summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--cerbero/bootstrap/__init__.py16
-rw-r--r--cerbero/bootstrap/android.py2
-rw-r--r--cerbero/bootstrap/bootstrapper.py6
-rw-r--r--cerbero/bootstrap/build_tools.py34
-rw-r--r--cerbero/bootstrap/ios.py3
-rw-r--r--cerbero/bootstrap/linux.py242
-rw-r--r--cerbero/bootstrap/osx.py7
-rw-r--r--cerbero/bootstrap/rust.py36
-rw-r--r--cerbero/bootstrap/windows.py63
-rw-r--r--cerbero/commands/__init__.py3
-rw-r--r--cerbero/commands/add_package.py231
-rw-r--r--cerbero/commands/add_recipe.py84
-rw-r--r--cerbero/commands/bootstrap.py128
-rw-r--r--cerbero/commands/build.py155
-rw-r--r--cerbero/commands/bundlesource.py24
-rw-r--r--cerbero/commands/cache.py95
-rw-r--r--cerbero/commands/check.py21
-rw-r--r--cerbero/commands/checkpackage.py18
-rw-r--r--cerbero/commands/debugpackages.py26
-rw-r--r--cerbero/commands/deps.py34
-rw-r--r--cerbero/commands/edit_cache.py24
-rw-r--r--cerbero/commands/fetch.py121
-rw-r--r--cerbero/commands/genlibfiles.py18
-rw-r--r--cerbero/commands/gensdkshell.py76
-rw-r--r--cerbero/commands/genvsprops.py30
-rw-r--r--cerbero/commands/genxcconfig.py23
-rw-r--r--cerbero/commands/graph.py47
-rw-r--r--cerbero/commands/info.py41
-rw-r--r--cerbero/commands/list.py22
-rw-r--r--cerbero/commands/package.py146
-rw-r--r--cerbero/commands/rdeps.py11
-rw-r--r--cerbero/commands/runit.py10
-rw-r--r--cerbero/commands/shell.py18
-rw-r--r--cerbero/commands/tag.py35
-rw-r--r--cerbero/commands/wipe.py47
-rw-r--r--cerbero/config.py357
-rw-r--r--cerbero/enums.py108
-rw-r--r--cerbero/errors.py19
-rw-r--r--cerbero/hacks.py33
-rw-r--r--cerbero/ide/pkgconfig.py7
-rw-r--r--cerbero/ide/vs/env.py51
-rw-r--r--cerbero/ide/vs/genlib.py23
-rwxr-xr-xcerbero/ide/vs/pkgconfig2vsprops.py29
-rw-r--r--cerbero/ide/vs/props.py44
-rw-r--r--cerbero/ide/vs/vsprops.py39
-rw-r--r--cerbero/ide/xcode/fwlib.py80
-rwxr-xr-xcerbero/ide/xcode/xcconfig.py15
-rw-r--r--cerbero/main.py86
-rw-r--r--cerbero/packages/__init__.py10
-rw-r--r--cerbero/packages/android.py18
-rw-r--r--cerbero/packages/debian.py124
-rw-r--r--cerbero/packages/disttarball.py51
-rw-r--r--cerbero/packages/linux.py23
-rw-r--r--cerbero/packages/osx/buildtools.py16
-rw-r--r--cerbero/packages/osx/bundles.py99
-rw-r--r--cerbero/packages/osx/distribution.py49
-rw-r--r--cerbero/packages/osx/info_plist.py45
-rw-r--r--cerbero/packages/osx/packager.py267
-rw-r--r--cerbero/packages/package.py82
-rw-r--r--cerbero/packages/packager.py27
-rw-r--r--cerbero/packages/packagesstore.py69
-rw-r--r--cerbero/packages/rpm.py87
-rw-r--r--cerbero/packages/wix.py454
-rw-r--r--cerbero/packages/wix_packager.py72
-rw-r--r--cerbero/tools/depstracker.py29
-rw-r--r--cerbero/tools/libtool.py22
-rwxr-xr-xcerbero/tools/osxrelocator.py29
-rwxr-xr-xcerbero/tools/osxuniversalgenerator.py59
-rw-r--r--cerbero/tools/pkgconfig.py15
-rw-r--r--cerbero/tools/strip.py2
-rw-r--r--cerbero/utils/__init__.py241
-rw-r--r--cerbero/utils/git.py129
-rw-r--r--cerbero/utils/manifest.py12
-rw-r--r--cerbero/utils/messages.py29
-rw-r--r--cerbero/utils/msbuild.py16
-rw-r--r--cerbero/utils/shell.py213
-rw-r--r--cerbero/utils/svn.py20
-rw-r--r--packages/custom.py4
-rw-r--r--recipes/custom.py19
-rw-r--r--setup.py83
-rw-r--r--test/test_build_common.py23
-rw-r--r--test/test_cerbero_build_build.py13
-rw-r--r--test/test_cerbero_build_cookbook.py4
-rw-r--r--test/test_cerbero_build_filesprovider.py61
-rw-r--r--test/test_cerbero_build_recipe.py18
-rw-r--r--test/test_cerbero_config.py118
-rw-r--r--test/test_cerbero_ide_pkgconfig.py27
-rw-r--r--test/test_cerbero_ide_xcode_xcconfig.py20
-rw-r--r--test/test_cerbero_packages_disttarball.py11
-rw-r--r--test/test_cerbero_packages_linux.py46
-rw-r--r--test/test_cerbero_packages_osx_info_plist.py53
-rw-r--r--test/test_cerbero_packages_package.py98
-rw-r--r--test/test_cerbero_packages_packagemaker.py29
-rw-r--r--test/test_cerbero_packages_packagesstore.py39
-rw-r--r--test/test_cerbero_packages_pmdoc.py98
-rw-r--r--test/test_cerbero_packages_wix.py67
-rw-r--r--test/test_cerbero_tools_osxuniversalgenerator.py80
-rw-r--r--test/test_common.py12
-rw-r--r--test/test_packages_common.py51
-rw-r--r--tools/certdata2pem.py47
-rw-r--r--tools/show-coverage.py13
101 files changed, 3322 insertions, 2809 deletions
diff --git a/cerbero/bootstrap/__init__.py b/cerbero/bootstrap/__init__.py
index 62d35650..71d3580c 100644
--- a/cerbero/bootstrap/__init__.py
+++ b/cerbero/bootstrap/__init__.py
@@ -22,7 +22,6 @@ from cerbero.build.source import BaseTarball, Source
class BootstrapTarball(BaseTarball, Source):
-
def __init__(self, config, offline, url, checksum, download_dir, tarball_name=None):
self.config = config
self.offline = offline
@@ -38,7 +37,7 @@ class BootstrapTarball(BaseTarball, Source):
return super().verify(fname, fatal)
-class BootstrapperBase (object):
+class BootstrapperBase(object):
# List of URLs to be fetched
fetch_urls = None
# A function that returns more URLs to fetch and a function to call that
@@ -59,14 +58,15 @@ class BootstrapperBase (object):
raise NotImplemented("'start' must be implemented by subclasses")
async def fetch_urls_impl(self, urls):
- for (url, name, checksum) in urls:
- source = BootstrapTarball(self.config, self.offline, url, checksum,
- self.config.local_sources, tarball_name=name)
+ for url, name, checksum in urls:
+ source = BootstrapTarball(
+ self.config, self.offline, url, checksum, self.config.local_sources, tarball_name=name
+ )
self.sources[url] = source
await source.fetch()
async def fetch(self):
- 'Fetch bootstrap binaries'
+ "Fetch bootstrap binaries"
await self.fetch_urls_impl(self.fetch_urls)
next_func = self.fetch_urls_func
while next_func:
@@ -74,11 +74,11 @@ class BootstrapperBase (object):
await self.fetch_urls_impl(more_urls)
async def fetch_recipes(self, jobs):
- 'Fetch build-tools recipes; only called by fetch-bootstrap'
+ "Fetch build-tools recipes; only called by fetch-bootstrap"
pass
async def extract(self):
- for (url, unpack, unpack_dir) in self.extract_steps:
+ for url, unpack, unpack_dir in self.extract_steps:
if unpack:
await self.sources[url].extract_tarball(unpack_dir)
else:
diff --git a/cerbero/bootstrap/android.py b/cerbero/bootstrap/android.py
index 6720d582..9534b914 100644
--- a/cerbero/bootstrap/android.py
+++ b/cerbero/bootstrap/android.py
@@ -33,8 +33,8 @@ NDK_CHECKSUMS = {
'android-ndk-r25c-windows.zip': 'f70093964f6cbbe19268f9876a20f92d3a593db3ad2037baadd25fd8d71e84e2',
}
-class AndroidBootstrapper (BootstrapperBase):
+class AndroidBootstrapper(BootstrapperBase):
def __init__(self, config, offline, assume_yes):
super().__init__(config, offline)
self.prefix = self.config.toolchain_prefix
diff --git a/cerbero/bootstrap/bootstrapper.py b/cerbero/bootstrap/bootstrapper.py
index 56733876..b4cc7461 100644
--- a/cerbero/bootstrap/bootstrapper.py
+++ b/cerbero/bootstrap/bootstrapper.py
@@ -34,13 +34,14 @@ def register_system_bootstrapper(distro, klass, distro_version=None):
system_bootstrappers[distro] = {}
system_bootstrappers[distro][distro_version] = klass
+
def register_toolchain_bootstrapper(distro, klass, distro_version=None):
if not distro in toolchain_bootstrappers:
toolchain_bootstrappers[distro] = {}
toolchain_bootstrappers[distro][distro_version] = klass
-class Bootstrapper (object):
+class Bootstrapper(object):
def __new__(klass, config, system, toolchains, build_tools, offline, assume_yes):
bs = []
@@ -60,7 +61,7 @@ class Bootstrapper (object):
if system:
d, v = build
if d not in system_bootstrappers:
- raise FatalError(_("No system bootstrapper for %s" % d))
+ raise FatalError(_('No system bootstrapper for %s' % d))
if v not in system_bootstrappers[d]:
v = None
bs.append(system_bootstrappers[d][v](config, offline, assume_yes))
@@ -86,6 +87,7 @@ class Bootstrapper (object):
return bs
+
from cerbero.bootstrap import linux, windows, android, osx, ios
linux.register_all()
diff --git a/cerbero/bootstrap/build_tools.py b/cerbero/bootstrap/build_tools.py
index 2c2b0cf1..569e9205 100644
--- a/cerbero/bootstrap/build_tools.py
+++ b/cerbero/bootstrap/build_tools.py
@@ -35,13 +35,11 @@ from cerbero.enums import Platform, Distro
from pathlib import PurePath
-class BuildTools (BootstrapperBase, Fetch):
- BUILD_TOOLS = ['automake', 'autoconf', 'libtool', 'pkg-config',
- 'orc', 'gettext-m4', 'meson']
+class BuildTools(BootstrapperBase, Fetch):
+ BUILD_TOOLS = ['automake', 'autoconf', 'libtool', 'pkg-config', 'orc', 'gettext-m4', 'meson']
PLAT_BUILD_TOOLS = {
- Platform.DARWIN: ['intltool', 'sed', 'gperf', 'bison', 'flex',
- 'moltenvk-tools'],
+ Platform.DARWIN: ['intltool', 'sed', 'gperf', 'bison', 'flex', 'moltenvk-tools'],
Platform.WINDOWS: ['nasm'],
Platform.LINUX: ['intltool-m4'],
}
@@ -70,35 +68,35 @@ class BuildTools (BootstrapperBase, Fetch):
if not self.config.variants.uwp:
self.PLAT_BUILD_TOOLS[Platform.WINDOWS].append('intltool')
- if self.config.target_platform != Platform.LINUX and not \
- self.config.prefix_is_executable():
+ if self.config.target_platform != Platform.LINUX and not self.config.prefix_is_executable():
# For glib-mkenums and glib-genmarshal
self.BUILD_TOOLS.append('glib-tools')
- if self.config.target_platform == Platform.WINDOWS and \
- self.config.platform == Platform.LINUX:
- self.BUILD_TOOLS.append('wix')
+ if self.config.target_platform == Platform.WINDOWS and self.config.platform == Platform.LINUX:
+ self.BUILD_TOOLS.append('wix')
self.BUILD_TOOLS += self.config.extra_build_tools
self._setup_env()
def check_build_tools(self):
- '''
+ """
Check whether the build tools we have are new enough, and if not, build
them ourselves. On Windows, we always build nasm ourselves, and we tell
the user to install CMake using the installer.
- '''
+ """
ret = []
tools = {
# meson requires ninja >=1.8.2
'ninja': ('1.8.2', None),
}
if self.config.platform in (Platform.LINUX, Platform.DARWIN):
- tools.update({
- # need cmake > 3.10.2 for out-of-source-tree builds.
- 'cmake': ('3.10.2', None),
- # dav1d requires nasm >=2.13.02
- 'nasm': ('2.13.02', '-v'),
- })
+ tools.update(
+ {
+ # need cmake > 3.10.2 for out-of-source-tree builds.
+ 'cmake': ('3.10.2', None),
+ # dav1d requires nasm >=2.13.02
+ 'nasm': ('2.13.02', '-v'),
+ }
+ )
for tool, (version, arg) in tools.items():
_, _, newer = shell.check_tool_version(tool, version, env=None, version_arg=arg)
if newer:
diff --git a/cerbero/bootstrap/ios.py b/cerbero/bootstrap/ios.py
index 25716c6b..0886b5c0 100644
--- a/cerbero/bootstrap/ios.py
+++ b/cerbero/bootstrap/ios.py
@@ -28,8 +28,7 @@ from cerbero.bootstrap.bootstrapper import register_toolchain_bootstrapper
from cerbero.config import Distro
-class IOSBootstrapper (BootstrapperBase):
-
+class IOSBootstrapper(BootstrapperBase):
def __init__(self, config, offline, assume_yes):
super().__init__(config, offline)
diff --git a/cerbero/bootstrap/linux.py b/cerbero/bootstrap/linux.py
index eafc3e3b..aff993a4 100644
--- a/cerbero/bootstrap/linux.py
+++ b/cerbero/bootstrap/linux.py
@@ -26,8 +26,8 @@ from cerbero.utils import messages as m
import shlex
import subprocess
-class UnixBootstrapper (BootstrapperBase):
+class UnixBootstrapper(BootstrapperBase):
tool = []
command = []
yes_arg = []
@@ -37,43 +37,75 @@ class UnixBootstrapper (BootstrapperBase):
def __init__(self, config, offline, assume_yes):
BootstrapperBase.__init__(self, config, offline)
self.assume_yes = assume_yes
- if user_is_root() and 'sudo' in self.tool: # no need for sudo as root user
- self.tool.remove('sudo')
+ if user_is_root() and 'sudo' in self.tool: # no need for sudo as root user
+ self.tool.remove('sudo')
async def start(self, jobs=0):
for c in self.checks:
c()
if self.config.distro_packages_install:
- extra_packages = self.config.extra_bootstrap_packages.get(
- self.config.platform, None)
+ extra_packages = self.config.extra_bootstrap_packages.get(self.config.platform, None)
if extra_packages:
self.packages += extra_packages.get(self.config.distro, [])
self.packages += extra_packages.get(self.config.distro_version, [])
tool = self.tool
if self.assume_yes:
- tool += self.yes_arg;
- tool += self.command;
+ tool += self.yes_arg
+ tool += self.command
cmd = tool + self.packages
m.message("Running command '%s'" % ' '.join(cmd))
shell.new_call(cmd, interactive=True)
-class DebianBootstrapper (UnixBootstrapper):
-
+class DebianBootstrapper(UnixBootstrapper):
tool = ['sudo', 'apt-get']
command = ['install']
yes_arg = ['-y']
packages = [
- 'autotools-dev', 'automake', 'autoconf', 'libtool', 'g++', 'autopoint',
- 'make', 'cmake', 'ninja-build', 'bison', 'flex', 'nasm', 'pkg-config',
- 'libxv-dev', 'libx11-dev', 'libx11-xcb-dev', 'libpulse-dev', 'python3-dev',
- 'gettext', 'build-essential', 'pkg-config', 'libxext-dev', 'libxi-dev',
- 'x11proto-record-dev', 'libxrender-dev', 'libgl1-mesa-dev',
- 'libxfixes-dev', 'libxdamage-dev', 'libxcomposite-dev',
- 'libasound2-dev', 'build-essential', 'gperf', 'wget', 'libxtst-dev',
- 'libxrandr-dev', 'libglu1-mesa-dev', 'libegl1-mesa-dev', 'git',
- 'xutils-dev', 'intltool', 'ccache', 'python3-setuptools', 'libssl-dev'
+ 'autotools-dev',
+ 'automake',
+ 'autoconf',
+ 'libtool',
+ 'g++',
+ 'autopoint',
+ 'make',
+ 'cmake',
+ 'ninja-build',
+ 'bison',
+ 'flex',
+ 'nasm',
+ 'pkg-config',
+ 'libxv-dev',
+ 'libx11-dev',
+ 'libx11-xcb-dev',
+ 'libpulse-dev',
+ 'python3-dev',
+ 'gettext',
+ 'build-essential',
+ 'pkg-config',
+ 'libxext-dev',
+ 'libxi-dev',
+ 'x11proto-record-dev',
+ 'libxrender-dev',
+ 'libgl1-mesa-dev',
+ 'libxfixes-dev',
+ 'libxdamage-dev',
+ 'libxcomposite-dev',
+ 'libasound2-dev',
+ 'build-essential',
+ 'gperf',
+ 'wget',
+ 'libxtst-dev',
+ 'libxrandr-dev',
+ 'libglu1-mesa-dev',
+ 'libegl1-mesa-dev',
+ 'git',
+ 'xutils-dev',
+ 'intltool',
+ 'ccache',
+ 'python3-setuptools',
+ 'libssl-dev',
]
def __init__(self, config, offline, assume_yes):
@@ -83,7 +115,7 @@ class DebianBootstrapper (UnixBootstrapper):
self.packages.append('libc6:i386')
self.checks.append(self.create_debian_arch_check('i386'))
if self.config.arch in [Architecture.X86_64, Architecture.X86]:
- self.packages.append('wine')
+ self.packages.append('wine')
def create_debian_arch_check(self, arch):
def check_arch():
@@ -93,27 +125,60 @@ class DebianBootstrapper (UnixBootstrapper):
foreign_archs = shell.check_output(['dpkg', '--print-foreign-architectures'])
if arch in foreign_archs.split():
return
- raise ConfigurationError(('Architecture %s is missing from your setup. ' + \
- 'You can add it with: "dpkg --add-architecture %s",' + \
- ' then run "apt-get update."') \
- % (arch, arch))
+ raise ConfigurationError(
+ (
+ 'Architecture %s is missing from your setup. '
+ + 'You can add it with: "dpkg --add-architecture %s",'
+ + ' then run "apt-get update."'
+ )
+ % (arch, arch)
+ )
return check_arch
-class RedHatBootstrapper (UnixBootstrapper):
+class RedHatBootstrapper(UnixBootstrapper):
tool = ['dnf']
command = ['install']
yes_arg = ['-y']
packages = [
- 'gcc', 'gcc-c++', 'automake', 'autoconf', 'libtool', 'gettext-devel',
- 'make', 'cmake', 'ninja-build', 'bison', 'flex', 'nasm', 'pkgconfig',
- 'curl', 'intltool', 'rpm-build', 'redhat-rpm-config', 'python3-devel',
- 'libXrender-devel', 'pulseaudio-libs-devel', 'libXv-devel',
- 'mesa-libGL-devel', 'libXcomposite-devel', 'perl-ExtUtils-MakeMaker',
- 'libXi-devel', 'perl-XML-Simple', 'gperf', 'wget', 'libXrandr-devel',
- 'libXtst-devel', 'git', 'xorg-x11-util-macros', 'mesa-libEGL-devel',
- 'ccache', 'openssl-devel', 'alsa-lib-devel', 'perl-FindBin',
+ 'gcc',
+ 'gcc-c++',
+ 'automake',
+ 'autoconf',
+ 'libtool',
+ 'gettext-devel',
+ 'make',
+ 'cmake',
+ 'ninja-build',
+ 'bison',
+ 'flex',
+ 'nasm',
+ 'pkgconfig',
+ 'curl',
+ 'intltool',
+ 'rpm-build',
+ 'redhat-rpm-config',
+ 'python3-devel',
+ 'libXrender-devel',
+ 'pulseaudio-libs-devel',
+ 'libXv-devel',
+ 'mesa-libGL-devel',
+ 'libXcomposite-devel',
+ 'perl-ExtUtils-MakeMaker',
+ 'libXi-devel',
+ 'perl-XML-Simple',
+ 'gperf',
+ 'wget',
+ 'libXrandr-devel',
+ 'libXtst-devel',
+ 'git',
+ 'xorg-x11-util-macros',
+ 'mesa-libEGL-devel',
+ 'ccache',
+ 'openssl-devel',
+ 'alsa-lib-devel',
+ 'perl-FindBin',
]
def __init__(self, config, offline, assume_yes):
@@ -132,42 +197,83 @@ class RedHatBootstrapper (UnixBootstrapper):
if self.config.distro_version in [DistroVersion.FEDORA_24, DistroVersion.FEDORA_25]:
self.packages.append('libncurses-compat-libs.i686')
if self.config.arch in [Architecture.X86_64, Architecture.X86]:
- self.packages.append('wine')
+ self.packages.append('wine')
if user_is_root():
return
self.tool = ['sudo'] + self.tool
-class OpenSuseBootstrapper (UnixBootstrapper):
+class OpenSuseBootstrapper(UnixBootstrapper):
tool = ['sudo', 'zypper']
command = ['install']
yes_arg = ['-y']
packages = [
- 'gcc', 'automake', 'autoconf', 'gcc-c++', 'libtool', 'gettext-tools',
- 'make', 'cmake', 'ninja-build', 'bison', 'flex', 'nasm', 'intltool',
- 'patterns-openSUSE-devel_rpm_build', 'python3-devel',
- 'xorg-x11-libXrender-devel', 'libpulse-devel', 'xorg-x11-libXv-devel',
- 'Mesa-libGL-devel', 'libXcomposite-devel', 'libX11-devel',
- 'alsa-devel', 'libXi-devel', 'Mesa-devel', 'Mesa-libGLESv3-devel',
- 'gperf', 'wget', 'git', 'ccache', 'openssl-devel'
+ 'gcc',
+ 'automake',
+ 'autoconf',
+ 'gcc-c++',
+ 'libtool',
+ 'gettext-tools',
+ 'make',
+ 'cmake',
+ 'ninja-build',
+ 'bison',
+ 'flex',
+ 'nasm',
+ 'intltool',
+ 'patterns-openSUSE-devel_rpm_build',
+ 'python3-devel',
+ 'xorg-x11-libXrender-devel',
+ 'libpulse-devel',
+ 'xorg-x11-libXv-devel',
+ 'Mesa-libGL-devel',
+ 'libXcomposite-devel',
+ 'libX11-devel',
+ 'alsa-devel',
+ 'libXi-devel',
+ 'Mesa-devel',
+ 'Mesa-libGLESv3-devel',
+ 'gperf',
+ 'wget',
+ 'git',
+ 'ccache',
+ 'openssl-devel',
]
def __init__(self, config, offline, assume_yes):
UnixBootstrapper.__init__(self, config, offline, assume_yes)
if self.config.target_platform == Platform.WINDOWS:
- if self.config.arch in [Architecture.X86_64, Architecture.X86]:
- self.packages.append('wine')
+ if self.config.arch in [Architecture.X86_64, Architecture.X86]:
+ self.packages.append('wine')
-class ArchBootstrapper (UnixBootstrapper):
+class ArchBootstrapper(UnixBootstrapper):
tool = ['sudo', 'pacman']
command = ['-S', '--needed']
yes_arg = ['--noconfirm']
packages = [
- 'intltool', 'cmake', 'ninja', 'libtool', 'bison', 'flex', 'automake',
- 'autoconf', 'make', 'gettext', 'nasm', 'gperf', 'libxrender', 'libxv',
- 'mesa', 'python3', 'wget', 'git', 'xorg-util-macros', 'ccache',
- 'openssl', 'alsa-lib',
+ 'intltool',
+ 'cmake',
+ 'ninja',
+ 'libtool',
+ 'bison',
+ 'flex',
+ 'automake',
+ 'autoconf',
+ 'make',
+ 'gettext',
+ 'nasm',
+ 'gperf',
+ 'libxrender',
+ 'libxv',
+ 'mesa',
+ 'python3',
+ 'wget',
+ 'git',
+ 'xorg-util-macros',
+ 'ccache',
+ 'openssl',
+ 'alsa-lib',
]
def __init__(self, config, offline, assume_yes):
@@ -175,9 +281,9 @@ class ArchBootstrapper (UnixBootstrapper):
has_multilib = True
try:
- shell.check_output (["pacman", "-Sp", "gcc-multilib"])
+ shell.check_output(['pacman', '-Sp', 'gcc-multilib'])
except CommandError:
- has_multilib = False
+ has_multilib = False
if self.config.arch == Architecture.X86_64 and has_multilib:
self.packages.append('gcc-multilib')
@@ -187,29 +293,41 @@ class ArchBootstrapper (UnixBootstrapper):
if self.config.arch in [Architecture.X86_64, Architecture.X86]:
self.packages.append('wine')
-class GentooBootstrapper (UnixBootstrapper):
+class GentooBootstrapper(UnixBootstrapper):
tool = ['sudo', 'emerge']
command = ['-u']
- yes_arg = [] # Does not seem interactive
+ yes_arg = [] # Does not seem interactive
packages = [
- 'dev-util/intltool', 'dev-util/cmake', 'dev-util/ninja',
- 'sys-devel/libtool', 'sys-devel/bison', 'sys-devel/flex',
- 'sys-devel/automake', 'sys-devel/autoconf', 'sys-devel/make',
- 'sys-devel/gettext', 'media-sound/pulseaudio', 'dev-lang/nasm',
- 'dev-util/gperf', 'x11-libs/libXrender', 'x11-libs/libXv',
- 'media-libs/mesa', 'net-misc/wget', 'dev-libs/openssl',
- 'media-libs/alsa-lib'
+ 'dev-util/intltool',
+ 'dev-util/cmake',
+ 'dev-util/ninja',
+ 'sys-devel/libtool',
+ 'sys-devel/bison',
+ 'sys-devel/flex',
+ 'sys-devel/automake',
+ 'sys-devel/autoconf',
+ 'sys-devel/make',
+ 'sys-devel/gettext',
+ 'media-sound/pulseaudio',
+ 'dev-lang/nasm',
+ 'dev-util/gperf',
+ 'x11-libs/libXrender',
+ 'x11-libs/libXv',
+ 'media-libs/mesa',
+ 'net-misc/wget',
+ 'dev-libs/openssl',
+ 'media-libs/alsa-lib',
]
def __init__(self, config, offline, assume_yes):
UnixBootstrapper.__init__(self, config, offline, assume_yes)
if self.config.target_platform == Platform.WINDOWS:
- if self.config.arch in [Architecture.X86_64, Architecture.X86]:
- self.packages.append('virtual/wine')
+ if self.config.arch in [Architecture.X86_64, Architecture.X86]:
+ self.packages.append('virtual/wine')
-class NoneBootstrapper (BootstrapperBase):
+class NoneBootstrapper(BootstrapperBase):
async def start(self):
pass
diff --git a/cerbero/bootstrap/osx.py b/cerbero/bootstrap/osx.py
index 0864aa33..884b077a 100644
--- a/cerbero/bootstrap/osx.py
+++ b/cerbero/bootstrap/osx.py
@@ -29,9 +29,8 @@ CPANM_VERSION = '1.7044'
CPANM_URL_TPL = 'https://raw.githubusercontent.com/miyagawa/cpanminus/{}/cpanm'
CPANM_CHECKSUM = '22b92506243649a73cfb55c5990cedd24cdbb20b15b4530064d2496d94d1642b'
-class OSXBootstrapper (BootstrapperBase):
-
+class OSXBootstrapper(BootstrapperBase):
def __init__(self, config, offline, assume_yes):
super().__init__(config, offline)
url = CPANM_URL_TPL.format(CPANM_VERSION)
@@ -43,7 +42,7 @@ class OSXBootstrapper (BootstrapperBase):
return
self._install_perl_deps()
if self.config.arch == Architecture.ARM64:
- m.message("Installing rosetta needed for some package installation scripts")
+ m.message('Installing rosetta needed for some package installation scripts')
shell.new_call(['/usr/sbin/softwareupdate', '--install-rosetta', '--agree-to-license'])
def _install_perl_deps(self):
@@ -51,7 +50,7 @@ class OSXBootstrapper (BootstrapperBase):
shell.new_call(['chmod', '+x', cpanm_installer])
# Install XML::Parser, required for intltool
cmd = ['sudo', cpanm_installer, 'XML::Parser']
- m.message("Installing XML::Parser, may require a password for running \'" + " ".join(cmd) + "\'")
+ m.message("Installing XML::Parser, may require a password for running '" + ' '.join(cmd) + "'")
shell.new_call(cmd, interactive=True)
diff --git a/cerbero/bootstrap/rust.py b/cerbero/bootstrap/rust.py
index 69843de8..1b3fe8a0 100644
--- a/cerbero/bootstrap/rust.py
+++ b/cerbero/bootstrap/rust.py
@@ -30,10 +30,10 @@ from cerbero.enums import Platform, Architecture
class RustBootstrapper(BootstrapperBase):
- '''
+ """
A class for installing a self-contained Rust and Cargo installation inside
Cerbero's home dir
- '''
+ """
SERVER = 'https://static.rust-lang.org'
RUSTUP_VERSION = '1.26.0'
@@ -84,8 +84,7 @@ class RustBootstrapper(BootstrapperBase):
# toolchains, so ensure that we fetch and install both
archs = {Architecture.X86_64, Architecture.X86}
other_arch = (archs - {self.config.arch}).pop()
- arch_triple = self.config.rust_triple(other_arch, self.config.platform,
- self.config.variants.visualstudio)
+ arch_triple = self.config.rust_triple(other_arch, self.config.platform, self.config.variants.visualstudio)
if arch_triple not in self.target_triples:
self.target_triples.append(arch_triple)
self.fetch_urls = self.get_fetch_urls()
@@ -97,10 +96,14 @@ class RustBootstrapper(BootstrapperBase):
self.extract_steps += [(self.TOMLI_URL, True, self.config.rust_prefix)]
def get_fetch_urls(self):
- '''Get Rustup and Rust channel URLs'''
+ """Get Rustup and Rust channel URLs"""
urls = []
- m = {'server': self.SERVER, 'version': self.RUSTUP_VERSION,
- 'triple': self.build_triple, 'exe_suffix': self.config.exe_suffix}
+ m = {
+ 'server': self.SERVER,
+ 'version': self.RUSTUP_VERSION,
+ 'triple': self.build_triple,
+ 'exe_suffix': self.config.exe_suffix,
+ }
# Rustup
url = self.RUSTUP_URL_TPL.format(**m)
name = self.RUSTUP_NAME_TPL.format(**m)
@@ -193,18 +196,27 @@ class RustBootstrapper(BootstrapperBase):
return rustup_env
async def install_toolchain(self):
- '''
+ """
Run rustup to install the downloaded toolchain. We pretend that
RUST_VERSION is the latest stable release. That way when we upgrade the
toolchain, rustup will automatically remove the older toolchain, which
it wouldn't do if we installed a specific version.
- '''
+ """
# Install Rust toolchain with rustup-init
st = os.stat(self.rustup)
os.chmod(self.rustup, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
- rustup_args = [self.rustup, '-y', '-v', '--no-modify-path',
- '--default-host', self.build_triple, '--profile', 'minimal',
- '--component', 'llvm-tools-preview']
+ rustup_args = [
+ self.rustup,
+ '-y',
+ '-v',
+ '--no-modify-path',
+ '--default-host',
+ self.build_triple,
+ '--profile',
+ 'minimal',
+ '--component',
+ 'llvm-tools-preview',
+ ]
for triple in self.target_triples:
rustup_args += ['--target', triple]
rustup_env = self.get_rustup_env()
diff --git a/cerbero/bootstrap/windows.py b/cerbero/bootstrap/windows.py
index 48dff0db..3d36053e 100644
--- a/cerbero/bootstrap/windows.py
+++ b/cerbero/bootstrap/windows.py
@@ -32,10 +32,14 @@ from cerbero.utils import messages as m
# Toolchain
TOOLCHAIN_BASE_URL = 'https://gstreamer.freedesktop.org/data/cerbero/toolchain/windows/'
TOOLCHAIN_PLATFORM = {
- Platform.LINUX: ('mingw-6.0.0-gcc-8.2.0-linux-multilib.tar.xz',
- '396ceb50161720b19971e2c71c87ce08150213b091ed8ffc00782df8759921bf'),
- Platform.WINDOWS: ('mingw-6.0.0-gcc-8.2.0-windows-multilib.tar.xz',
- '77fc1319b13894d7340d4994150e3af615e23a63113a9947412d11be95f4d8a9'),
+ Platform.LINUX: (
+ 'mingw-6.0.0-gcc-8.2.0-linux-multilib.tar.xz',
+ '396ceb50161720b19971e2c71c87ce08150213b091ed8ffc00782df8759921bf',
+ ),
+ Platform.WINDOWS: (
+ 'mingw-6.0.0-gcc-8.2.0-windows-multilib.tar.xz',
+ '77fc1319b13894d7340d4994150e3af615e23a63113a9947412d11be95f4d8a9',
+ ),
}
# MinGW Perl
@@ -56,10 +60,11 @@ XZ_CHECKSUM = 'd83b82ca75dfab39a13dda364367b34970c781a9df4d41264db922ac3a8f622d'
class MSYSBootstrapper(BootstrapperBase):
- '''
+ """
Bootstrapper for native windows builds on top of MSYS
Installs the necessary MSYS packages and fixups
- '''
+ """
+
# MSYS packages needed
packages = ['msys-flex', 'msys-bison', 'msys-perl']
@@ -79,12 +84,12 @@ class MSYSBootstrapper(BootstrapperBase):
# Newer version of xz that supports multithreaded compression. Need
# to extract to a temporary directory, then overwrite the existing
# lzma/xz binaries.
- self.xz_tmp_prefix = tempfile.TemporaryDirectory() # cleaned up on exit
+ self.xz_tmp_prefix = tempfile.TemporaryDirectory() # cleaned up on exit
self.fetch_urls.append((XZ_URL, None, XZ_CHECKSUM))
self.extract_steps.append((XZ_URL, True, self.xz_tmp_prefix.name))
async def start(self, jobs=0):
- self.install_mingwget_deps() # FIXME: This uses the network
+ self.install_mingwget_deps() # FIXME: This uses the network
self.fix_mingw_unused()
self.fix_openssl_mingw_perl()
self.fix_bin_deps()
@@ -104,8 +109,7 @@ class MSYSBootstrapper(BootstrapperBase):
# to get some include dirs (which doesn't looks like a good idea).
# If we only have the host-prefixed cpp, this problem is gone.
if (msys_mingw_bindir / 'cpp.exe').is_file():
- os.replace(msys_mingw_bindir / 'cpp.exe',
- msys_mingw_bindir / 'cpp.exe.bck')
+ os.replace(msys_mingw_bindir / 'cpp.exe', msys_mingw_bindir / 'cpp.exe.bck')
# MSYS's link.exe (for symlinking) overrides MSVC's link.exe (for
# C linking) in new shells, so rename it. No one uses `link` for
# symlinks anyway.
@@ -118,11 +122,11 @@ class MSYSBootstrapper(BootstrapperBase):
os.replace(msys_link_exe, msys_link_bindir / 'link.exe.bck')
def fix_openssl_mingw_perl(self):
- '''
+ """
This perl is only used by openssl; we can't use it everywhere else
because it can't find msys tools, and so perl scripts like autom4te
fail to run, f.ex., m4. Lucky for us, openssl doesn't use those.
- '''
+ """
# Move perl installation from perl-5.xx.y to perl
perldir = os.path.join(self.perl_prefix, 'perl-' + PERL_VERSION)
for d in os.listdir(perldir):
@@ -145,20 +149,26 @@ class MSYSBootstrapper(BootstrapperBase):
# replace /opt/perl/bin/perl in intltool
files = shell.ls_files(['bin/intltool*'], self.prefix)
for f in files:
- shell.replace(os.path.join(self.prefix, f),
- {'/opt/perl/bin/perl': '/bin/perl'})
+ shell.replace(os.path.join(self.prefix, f), {'/opt/perl/bin/perl': '/bin/perl'})
class MSYS2Bootstrapper(BootstrapperBase):
- '''
+ """
Bootstrapper for native windows builds on top of MSYS2
Installs the necessary MSYS2 packages and fixups
- '''
+ """
+
packages = [
- 'flex', 'bison', 'intltool', 'gperf', 'make', 'diffutils',
+ 'flex',
+ 'bison',
+ 'intltool',
+ 'gperf',
+ 'make',
+ 'diffutils',
# OpenSSL needs a perl version using '\' for the MSVC build
# and a perl version using '/' for the MinGW build
- 'mingw-w64-ucrt-x86_64-perl', 'perl',
+ 'mingw-w64-ucrt-x86_64-perl',
+ 'perl',
# Need a ninja that understands '\'
'mingw-w64-ucrt-x86_64-ninja',
]
@@ -167,14 +177,14 @@ class MSYS2Bootstrapper(BootstrapperBase):
super().__init__(config, offline)
async def start(self, jobs=0):
- shell.new_call(['pacman', '-Sy', '--noconfirm', '--needed'] + self.packages)
+ shell.new_call(['pacman', '-Sy', '--noconfirm', '--needed'] + self.packages)
class MinGWBootstrapper(BootstrapperBase):
- '''
+ """
Bootstrapper for windows builds.
Installs the mingw-w64 compiler toolchain and headers for Directx
- '''
+ """
def __init__(self, config, offline, assume_yes):
super().__init__(config, offline)
@@ -194,8 +204,7 @@ class MinGWBootstrapper(BootstrapperBase):
# wglext.h
url = KHRONOS_WGL_TPL.format(OPENGL_COMMIT)
self.fetch_urls.append((url, None, WGL_CHECKSUM))
- sysroot = os.path.join(self.prefix,
- 'x86_64-w64-mingw32/sysroot/usr/x86_64-w64-mingw32')
+ sysroot = os.path.join(self.prefix, 'x86_64-w64-mingw32/sysroot/usr/x86_64-w64-mingw32')
gl_inst_path = os.path.join(sysroot, 'include/GL/')
self.extract_steps.append((url, False, gl_inst_path))
# Fix extraction in MSYS2 with tar if the symlink exists
@@ -205,9 +214,11 @@ class MinGWBootstrapper(BootstrapperBase):
async def start(self, jobs=0):
if not git.check_line_endings(self.config.platform):
- raise ConfigurationError("git is configured to use automatic line "
- "endings conversion. Please change that by running:\n"
- "`git config --global core.autocrlf false` inside the MSYS shell")
+ raise ConfigurationError(
+ 'git is configured to use automatic line '
+ 'endings conversion. Please change that by running:\n'
+ '`git config --global core.autocrlf false` inside the MSYS shell'
+ )
self.check_dirs()
if self.config.platform == Platform.WINDOWS:
self.fix_mingw()
diff --git a/cerbero/commands/__init__.py b/cerbero/commands/__init__.py
index fa393e52..c90080ae 100644
--- a/cerbero/commands/__init__.py
+++ b/cerbero/commands/__init__.py
@@ -56,6 +56,7 @@ def register_command(command_class):
def load_commands(subparsers):
import os
+
commands_dir = os.path.abspath(os.path.dirname(__file__))
for name in os.listdir(commands_dir):
@@ -65,7 +66,7 @@ def load_commands(subparsers):
try:
__import__('cerbero.commands.%s' % name)
except ImportError as e:
- m.warning("Error importing command %s:\n %s" % (name, e))
+ m.warning('Error importing command %s:\n %s' % (name, e))
for command in _commands.values():
command.add_parser(subparsers)
diff --git a/cerbero/commands/add_package.py b/cerbero/commands/add_package.py
index 803a5c8f..917b48f0 100644
--- a/cerbero/commands/add_package.py
+++ b/cerbero/commands/add_package.py
@@ -28,8 +28,7 @@ from cerbero.packages.packagesstore import PackagesStore
# TODO: Add option to create metapackages
-RECEIPT_TPL =\
-'''# -*- Mode: Python -*- vi:si:et:sw=4:sts=4:ts=4:syntax=python
+RECEIPT_TPL = """# -*- Mode: Python -*- vi:si:et:sw=4:sts=4:ts=4:syntax=python
class Package(package.Package):
@@ -37,39 +36,33 @@ class Package(package.Package):
name = '%(name)s'
shortdesc = '%(shortdesc)s'
version = '%(version)s'
-'''
+"""
-VENDOR_TPL = \
-''' vendor = '%(vendor)s'
-'''
+VENDOR_TPL = """ vendor = '%(vendor)s'
+"""
-URL_TPL = \
-''' url = '%(url)s'
-'''
+URL_TPL = """ url = '%(url)s'
+"""
-LICENSE_TPL = \
-''' license = %(license)s
-'''
+LICENSE_TPL = """ license = %(license)s
+"""
-DEPS_TPL = \
-''' deps = %(deps)s
-'''
+DEPS_TPL = """ deps = %(deps)s
+"""
-FILES_TPL = '''
+FILES_TPL = """
files = %(files)s
-'''
+"""
-FILES_DEVEL_TPL = \
-''' files_devel = %(files_devel)s
-'''
+FILES_DEVEL_TPL = """ files_devel = %(files_devel)s
+"""
-PLATFORM_FILES_TPL = '''
+PLATFORM_FILES_TPL = """
platform_files = {%(platform_files)s}
-'''
+"""
-PLATFORM_FILES_DEVEL_TPL = \
-''' platform_files_devel = {%(platform_files_devel)s}
-'''
+PLATFORM_FILES_DEVEL_TPL = """ platform_files_devel = {%(platform_files_devel)s}
+"""
class AddPackage(Command):
@@ -88,59 +81,87 @@ class AddPackage(Command):
self.supported_platforms = {
'linux': 'Platform.LINUX',
'windows': 'Platform.WINDOWS',
- 'darwin': 'Platform.DARWIN'}
-
- Command.__init__(self,
- [ArgparseArgument('name', nargs=1,
- help=_('name of the package')),
- ArgparseArgument('version', nargs=1,
- help=_('version of the package')),
- ArgparseArgument('-s', '--short-desc', default='',
- help=_('a short description of the package')),
- ArgparseArgument('-v', '--vendor', default='',
- help=_('the package vendor')),
- ArgparseArgument('-u', '--url', default='',
- help=_('the package url')),
- ArgparseArgument('-l', '--license', default='',
- help=_('license of the package. '
- 'Supported licenses: %s') %
- ', '.join(list(self.supported_licenses.keys()))),
- ArgparseArgument('-d', '--deps', default='',
- help=_('comma separated list of the package '
- 'dependencies')),
- ArgparseArgument('-i', '--includes', default='',
- help=_('comma separated list of packages to '
- 'include in this package. All files '
- 'from the packages passed as param '
- 'will be added to this package.')),
- ArgparseArgument('--files', default='',
- help=_('comma separated list of recipe files to '
- 'add to the runtime package '
- '(e.g.: recipe1:category1:category2,'
- 'recipe2)')),
- ArgparseArgument('--files-devel', default='',
- help=_('comma separated list of recipe files to '
- 'add to the devel package '
- '(e.g.: recipe1:category1:category2,'
- 'recipe2)')),
- ArgparseArgument('--platform-files', default='',
- help=_('comma separated list of platform:recipe '
- 'files to add to the runtime package '
- '(e.g.: linux:recipe1:category1:category2,'
- 'windows:recipe2) '
- 'Supported platforms: %s') %
- ', '.join(
- list(self.supported_platforms.keys()))),
- ArgparseArgument('--platform-files-devel', default='',
- help=_('comma separated list of platform:recipe '
- 'files to add to the devel package '
- '(e.g.: linux:recipe1:category1:category2,'
- 'windows:recipe2) '
- 'Supported platforms: %s') %
- ', '.join(
- list(self.supported_platforms.keys()))),
- ArgparseArgument('-f', '--force', action='store_true',
- default=False, help=_('Replace package if existing'))])
+ 'darwin': 'Platform.DARWIN',
+ }
+
+ Command.__init__(
+ self,
+ [
+ ArgparseArgument('name', nargs=1, help=_('name of the package')),
+ ArgparseArgument('version', nargs=1, help=_('version of the package')),
+ ArgparseArgument('-s', '--short-desc', default='', help=_('a short description of the package')),
+ ArgparseArgument('-v', '--vendor', default='', help=_('the package vendor')),
+ ArgparseArgument('-u', '--url', default='', help=_('the package url')),
+ ArgparseArgument(
+ '-l',
+ '--license',
+ default='',
+ help=_('license of the package. ' 'Supported licenses: %s')
+ % ', '.join(list(self.supported_licenses.keys())),
+ ),
+ ArgparseArgument(
+ '-d', '--deps', default='', help=_('comma separated list of the package ' 'dependencies')
+ ),
+ ArgparseArgument(
+ '-i',
+ '--includes',
+ default='',
+ help=_(
+ 'comma separated list of packages to '
+ 'include in this package. All files '
+ 'from the packages passed as param '
+ 'will be added to this package.'
+ ),
+ ),
+ ArgparseArgument(
+ '--files',
+ default='',
+ help=_(
+ 'comma separated list of recipe files to '
+ 'add to the runtime package '
+ '(e.g.: recipe1:category1:category2,'
+ 'recipe2)'
+ ),
+ ),
+ ArgparseArgument(
+ '--files-devel',
+ default='',
+ help=_(
+ 'comma separated list of recipe files to '
+ 'add to the devel package '
+ '(e.g.: recipe1:category1:category2,'
+ 'recipe2)'
+ ),
+ ),
+ ArgparseArgument(
+ '--platform-files',
+ default='',
+ help=_(
+ 'comma separated list of platform:recipe '
+ 'files to add to the runtime package '
+ '(e.g.: linux:recipe1:category1:category2,'
+ 'windows:recipe2) '
+ 'Supported platforms: %s'
+ )
+ % ', '.join(list(self.supported_platforms.keys())),
+ ),
+ ArgparseArgument(
+ '--platform-files-devel',
+ default='',
+ help=_(
+ 'comma separated list of platform:recipe '
+ 'files to add to the devel package '
+ '(e.g.: linux:recipe1:category1:category2,'
+ 'windows:recipe2) '
+ 'Supported platforms: %s'
+ )
+ % ', '.join(list(self.supported_platforms.keys())),
+ ),
+ ArgparseArgument(
+ '-f', '--force', action='store_true', default=False, help=_('Replace package if existing')
+ ),
+ ],
+ )
def run(self, config, args):
name = args.name[0]
@@ -148,8 +169,7 @@ class AddPackage(Command):
store = PackagesStore(config)
filename = os.path.join(config.packages_dir, '%s.package' % name)
if not args.force and os.path.exists(filename):
- m.warning(_("Package '%s' (%s) already exists, "
- "use -f to replace" % (name, filename)))
+ m.warning(_("Package '%s' (%s) already exists, " 'use -f to replace' % (name, filename)))
return
template_args = {}
@@ -174,8 +194,7 @@ class AddPackage(Command):
if args.license:
self.validate_licenses([args.license])
template += LICENSE_TPL
- template_args['license'] = \
- 'License.' + self.supported_licenses[args.license]
+ template_args['license'] = 'License.' + self.supported_licenses[args.license]
deps = []
if args.deps:
@@ -185,8 +204,7 @@ class AddPackage(Command):
try:
package = store.get_package(dname)
except Exception as ex:
- raise UsageError(_("Error creating package: "
- "dependant package %s does not exist") % dname)
+ raise UsageError(_('Error creating package: ' 'dependant package %s does not exist') % dname)
template_args['deps'] = deps
include_files = []
@@ -196,22 +214,18 @@ class AddPackage(Command):
if args.includes:
includes = args.includes.split(',')
if list(set(deps) & set(includes)):
- raise UsageError(_("Error creating package: "
- "param --deps intersects with --includes"))
+ raise UsageError(_('Error creating package: ' 'param --deps intersects with --includes'))
for pname in includes:
try:
package = store.get_package(pname)
except Exception as ex:
- raise UsageError(_("Error creating package: "
- "included package %s does not exist") % pname)
+ raise UsageError(_('Error creating package: ' 'included package %s does not exist') % pname)
include_files.extend(package.files)
include_files_devel.extend(package.files_devel)
- platform_include_files = self.merge_dict(
- platform_include_files,
- package.platform_files)
+ platform_include_files = self.merge_dict(platform_include_files, package.platform_files)
platform_include_files_devel = self.merge_dict(
- platform_include_files_devel,
- package.platform_files_devel)
+ platform_include_files_devel, package.platform_files_devel
+ )
include_files = list(set(include_files))
include_files_devel = list(set(include_files_devel))
@@ -236,15 +250,12 @@ class AddPackage(Command):
if args.platform_files or platform_include_files:
template += PLATFORM_FILES_TPL
- platform_files = self.parse_platform_files(
- args.platform_files, platform_include_files)
+ platform_files = self.parse_platform_files(args.platform_files, platform_include_files)
template_args['platform_files'] = platform_files
if args.platform_files_devel or platform_include_files_devel:
template += PLATFORM_FILES_DEVEL_TPL
- platform_files_devel = self.parse_platform_files(
- args.platform_files_devel,
- platform_include_files_devel)
+ platform_files_devel = self.parse_platform_files(args.platform_files_devel, platform_include_files_devel)
template_args['platform_files_devel'] = platform_files_devel
try:
@@ -252,10 +263,9 @@ class AddPackage(Command):
f.write(template % template_args)
f.close()
- m.action(_("Package '%s' successfully created in %s") %
- (name, filename))
+ m.action(_("Package '%s' successfully created in %s") % (name, filename))
except IOError as ex:
- raise FatalError(_("Error creating package: %s") % ex)
+ raise FatalError(_('Error creating package: %s') % ex)
def merge_dict(self, d1, d2):
ret = d1
@@ -269,15 +279,13 @@ class AddPackage(Command):
def validate_licenses(self, licenses):
for l in licenses:
if l and not l in self.supported_licenses:
- raise UsageError(_("Error creating package: "
- "invalid license '%s'") % l)
+ raise UsageError(_('Error creating package: ' "invalid license '%s'") % l)
def validate_platform_files(self, platform_files):
for f in platform_files:
- platform = f[:f.index(':')]
+ platform = f[: f.index(':')]
if not platform in self.supported_platforms:
- raise UsageError(_("Error creating package: "
- "invalid platform '%s'") % platform)
+ raise UsageError(_('Error creating package: ' "invalid platform '%s'") % platform)
def parse_platform_files(self, platform_files, extra_files):
if not platform_files and not extra_files:
@@ -291,7 +299,7 @@ class AddPackage(Command):
for desc in unparsed_files:
platform_index = desc.index(':')
platform = desc[:platform_index]
- files = desc[platform_index + 1:]
+ files = desc[platform_index + 1 :]
if not platform in parsed_files:
parsed_files[platform] = [files]
else:
@@ -305,10 +313,11 @@ class AddPackage(Command):
template_arg = []
for platform, files in parsed_files.items():
template_arg.append(
- self.supported_platforms[platform] + ': [' + \
- ', '.join(['\'' + recipe_files + '\'' \
- for recipe_files in files]) + \
- ']')
+ self.supported_platforms[platform]
+ + ': ['
+ + ', '.join(["'" + recipe_files + "'" for recipe_files in files])
+ + ']'
+ )
return ', '.join(template_arg)
diff --git a/cerbero/commands/add_recipe.py b/cerbero/commands/add_recipe.py
index 55764683..b2a4d4a3 100644
--- a/cerbero/commands/add_recipe.py
+++ b/cerbero/commands/add_recipe.py
@@ -27,31 +27,26 @@ from cerbero.utils import _, N_, ArgparseArgument
from cerbero.utils import messages as m
-RECEIPT_TPL =\
-'''# -*- Mode: Python -*- vi:si:et:sw=4:sts=4:ts=4:syntax=python
+RECEIPT_TPL = """# -*- Mode: Python -*- vi:si:et:sw=4:sts=4:ts=4:syntax=python
class Recipe(recipe.Recipe):
name = '%(name)s'
version = '%(version)s'
-'''
+"""
-LICENSES_TPL = \
-''' licenses = [%(licenses)s]
-'''
+LICENSES_TPL = """ licenses = [%(licenses)s]
+"""
-COMMIT_TPL = \
-''' commit = '%(commit)s'
-'''
+COMMIT_TPL = """ commit = '%(commit)s'
+"""
-ORIGIN_TPL = \
-''' remotes = {'origin': '%(origin)s'}
-'''
+ORIGIN_TPL = """ remotes = {'origin': '%(origin)s'}
+"""
-DEPS_TPL = \
-''' deps = %(deps)s
-'''
+DEPS_TPL = """ deps = %(deps)s
+"""
class AddRecipe(Command):
@@ -67,33 +62,35 @@ class AddRecipe(Command):
continue
self.supported_licenses[attr.acronym] = name
- Command.__init__(self,
- [ArgparseArgument('name', nargs=1,
- help=_('name of the recipe')),
- ArgparseArgument('version', nargs=1,
- help=_('version of the recipe')),
- ArgparseArgument('-l', '--licenses', default='',
- help=_('comma separated list of the recipe '
- 'licenses. Supported licenses: %s') %
- ', '.join(list(self.supported_licenses.keys()))),
- ArgparseArgument('-c', '--commit', default='',
- help=_('commit to use '
- '(default to "sdk-$version")')),
- ArgparseArgument('-o', '--origin', default='',
- help=_('the origin repository of the recipe')),
- ArgparseArgument('-d', '--deps', default='',
- help=_('comma separated list of the recipe '
- 'dependencies')),
- ArgparseArgument('-f', '--force', action='store_true',
- default=False, help=_('Replace recipe if existing'))])
+ Command.__init__(
+ self,
+ [
+ ArgparseArgument('name', nargs=1, help=_('name of the recipe')),
+ ArgparseArgument('version', nargs=1, help=_('version of the recipe')),
+ ArgparseArgument(
+ '-l',
+ '--licenses',
+ default='',
+ help=_('comma separated list of the recipe ' 'licenses. Supported licenses: %s')
+ % ', '.join(list(self.supported_licenses.keys())),
+ ),
+ ArgparseArgument('-c', '--commit', default='', help=_('commit to use ' '(default to "sdk-$version")')),
+ ArgparseArgument('-o', '--origin', default='', help=_('the origin repository of the recipe')),
+ ArgparseArgument(
+ '-d', '--deps', default='', help=_('comma separated list of the recipe ' 'dependencies')
+ ),
+ ArgparseArgument(
+ '-f', '--force', action='store_true', default=False, help=_('Replace recipe if existing')
+ ),
+ ],
+ )
def run(self, config, args):
name = args.name[0]
version = args.version[0]
filename = os.path.join(config.recipes_dir, '%s.recipe' % name)
if not args.force and os.path.exists(filename):
- m.warning(_("Recipe '%s' (%s) already exists, "
- "use -f to replace" % (name, filename)))
+ m.warning(_("Recipe '%s' (%s) already exists, " 'use -f to replace' % (name, filename)))
return
template_args = {}
@@ -106,9 +103,7 @@ class AddRecipe(Command):
licenses = args.licenses.split(',')
self.validate_licenses(licenses)
template += LICENSES_TPL
- template_args['licenses'] = ', '.join(
- ['License.' + self.supported_licenses[l] \
- for l in licenses])
+ template_args['licenses'] = ', '.join(['License.' + self.supported_licenses[l] for l in licenses])
if args.commit:
template += COMMIT_TPL
@@ -126,8 +121,7 @@ class AddRecipe(Command):
try:
recipe = cookbook.get_recipe(dname)
except RecipeNotFoundError as ex:
- raise UsageError(_("Error creating recipe: "
- "dependant recipe %s does not exist") % dname)
+ raise UsageError(_('Error creating recipe: ' 'dependant recipe %s does not exist') % dname)
template_args['deps'] = deps
try:
@@ -135,16 +129,14 @@ class AddRecipe(Command):
f.write(template % template_args)
f.close()
- m.action(_("Recipe '%s' successfully created in %s") %
- (name, filename))
+ m.action(_("Recipe '%s' successfully created in %s") % (name, filename))
except IOError as ex:
- raise FatalError(_("Error creating recipe: %s") % ex)
+ raise FatalError(_('Error creating recipe: %s') % ex)
def validate_licenses(self, licenses):
for l in licenses:
if l and not l in self.supported_licenses:
- raise UsageError(_("Error creating recipe: "
- "invalid license '%s'") % l)
+ raise UsageError(_('Error creating recipe: ' "invalid license '%s'") % l)
register_command(AddRecipe)
diff --git a/cerbero/commands/bootstrap.py b/cerbero/commands/bootstrap.py
index 2b57674a..69ce5102 100644
--- a/cerbero/commands/bootstrap.py
+++ b/cerbero/commands/bootstrap.py
@@ -28,32 +28,58 @@ from cerbero.bootstrap.build_tools import BuildTools
NUMBER_OF_JOBS_IF_UNUSED = 2
NUMBER_OF_JOBS_IF_USED = 2 * determine_num_of_cpus()
+
class Bootstrap(Command):
doc = N_('Bootstrap the build system installing all the dependencies')
name = 'bootstrap'
def __init__(self):
args = [
- ArgparseArgument('--build-tools-only', action='store_true',
- default=False, help=argparse.SUPPRESS),
- ArgparseArgument('--system-only', action='store_true',
- default=False, help=argparse.SUPPRESS),
- ArgparseArgument('--system', action=StoreBool,
- default=True, nargs='?', choices=('yes', 'no'),
- help='Setup the system for building, such as by installing system packages'),
- ArgparseArgument('--toolchains', action=StoreBool,
- default=True, nargs='?', choices=('yes', 'no'),
- help='Setup any toolchains needed by the target platform'),
- ArgparseArgument('--build-tools', action=StoreBool,
- default=True, nargs='?', choices=('yes', 'no'),
- help='Compile the build tools needed while building'),
- ArgparseArgument('--offline', action='store_true',
- default=False, help=_('Use only the source cache, no network')),
- ArgparseArgument('-y', '--assume-yes', action='store_true',
- default=False, help=('Automatically say yes to prompts and run non-interactively')),
- ArgparseArgument('--jobs', '-j', action='store', type=int,
- default=0, help=_('How many recipes to build concurrently. '
- '0 = number of CPUs.'))]
+ ArgparseArgument('--build-tools-only', action='store_true', default=False, help=argparse.SUPPRESS),
+ ArgparseArgument('--system-only', action='store_true', default=False, help=argparse.SUPPRESS),
+ ArgparseArgument(
+ '--system',
+ action=StoreBool,
+ default=True,
+ nargs='?',
+ choices=('yes', 'no'),
+ help='Setup the system for building, such as by installing system packages',
+ ),
+ ArgparseArgument(
+ '--toolchains',
+ action=StoreBool,
+ default=True,
+ nargs='?',
+ choices=('yes', 'no'),
+ help='Setup any toolchains needed by the target platform',
+ ),
+ ArgparseArgument(
+ '--build-tools',
+ action=StoreBool,
+ default=True,
+ nargs='?',
+ choices=('yes', 'no'),
+ help='Compile the build tools needed while building',
+ ),
+ ArgparseArgument(
+ '--offline', action='store_true', default=False, help=_('Use only the source cache, no network')
+ ),
+ ArgparseArgument(
+ '-y',
+ '--assume-yes',
+ action='store_true',
+ default=False,
+ help=('Automatically say yes to prompts and run non-interactively'),
+ ),
+ ArgparseArgument(
+ '--jobs',
+ '-j',
+ action='store',
+ type=int,
+ default=0,
+ help=_('How many recipes to build concurrently. ' '0 = number of CPUs.'),
+ ),
+ ]
Command.__init__(self, args)
def run(self, config, args):
@@ -66,12 +92,15 @@ class Bootstrap(Command):
# --system-only meant '--system=yes --toolchains=yes --build-tools=no'
args.build_tools = False
m.deprecation('Replace --system-only with --build-tools=no')
- bootstrappers = Bootstrapper(config, args.system, args.toolchains,
- args.build_tools, args.offline, args.assume_yes)
+ bootstrappers = Bootstrapper(
+ config, args.system, args.toolchains, args.build_tools, args.offline, args.assume_yes
+ )
tasks = []
+
async def bootstrap_fetch_extract(bs):
await bs.fetch()
await bs.extract()
+
for bootstrapper in bootstrappers:
tasks.append(bootstrap_fetch_extract(bootstrapper))
run_until_complete(tasks)
@@ -91,19 +120,42 @@ class FetchBootstrap(Command):
def __init__(self):
args = [
- ArgparseArgument('--build-tools-only', action='store_true',
- default=False, help=argparse.SUPPRESS),
- ArgparseArgument('--system', action=StoreBool,
- default=True, nargs='?', choices=('yes', 'no'),
- help='Fetch sources to setup the system by the target platform'),
- ArgparseArgument('--toolchains', action=StoreBool,
- default=True, nargs='?', choices=('yes', 'no'),
- help='Setup any toolchains needed by the target platform'),
- ArgparseArgument('--build-tools', action=StoreBool,
- default=True, nargs='?', choices=('yes', 'no'),
- help='Compile the build tools needed while building'),
- ArgparseArgument('--jobs', '-j', action='store', nargs='?', type=int,
- const=NUMBER_OF_JOBS_IF_USED, default=NUMBER_OF_JOBS_IF_UNUSED, help=_('number of async jobs'))]
+ ArgparseArgument('--build-tools-only', action='store_true', default=False, help=argparse.SUPPRESS),
+ ArgparseArgument(
+ '--system',
+ action=StoreBool,
+ default=True,
+ nargs='?',
+ choices=('yes', 'no'),
+ help='Fetch sources to setup the system by the target platform',
+ ),
+ ArgparseArgument(
+ '--toolchains',
+ action=StoreBool,
+ default=True,
+ nargs='?',
+ choices=('yes', 'no'),
+ help='Setup any toolchains needed by the target platform',
+ ),
+ ArgparseArgument(
+ '--build-tools',
+ action=StoreBool,
+ default=True,
+ nargs='?',
+ choices=('yes', 'no'),
+ help='Compile the build tools needed while building',
+ ),
+ ArgparseArgument(
+ '--jobs',
+ '-j',
+ action='store',
+ nargs='?',
+ type=int,
+ const=NUMBER_OF_JOBS_IF_USED,
+ default=NUMBER_OF_JOBS_IF_UNUSED,
+ help=_('number of async jobs'),
+ ),
+ ]
Command.__init__(self, args)
def run(self, config, args):
@@ -111,8 +163,9 @@ class FetchBootstrap(Command):
# --build-tools-only meant '--system=no --toolchains=no --build-tools=yes'
args.toolchains = False
m.deprecation('Replace --build-tools-only with --system=no --toolchains=no')
- bootstrappers = Bootstrapper(config, args.system, args.toolchains,
- args.build_tools, offline=False, assume_yes=False)
+ bootstrappers = Bootstrapper(
+ config, args.system, args.toolchains, args.build_tools, offline=False, assume_yes=False
+ )
tasks = []
build_tools_task = None
for bootstrapper in bootstrappers:
@@ -126,5 +179,6 @@ class FetchBootstrap(Command):
if build_tools_task:
run_until_complete(build_tools_task)
+
register_command(Bootstrap)
register_command(FetchBootstrap)
diff --git a/cerbero/commands/build.py b/cerbero/commands/build.py
index 3f2ee1f7..ca374d56 100644
--- a/cerbero/commands/build.py
+++ b/cerbero/commands/build.py
@@ -17,7 +17,7 @@
# Boston, MA 02111-1307, USA.
-#from cerbero.oven import Oven
+# from cerbero.oven import Oven
from cerbero.commands import Command, register_command
from cerbero.build.cookbook import CookBook
from cerbero.build.oven import Oven
@@ -30,42 +30,62 @@ class Build(Command):
name = 'build'
def __init__(self, force=None, no_deps=None, deps_only=False):
- args = [
- ArgparseArgument('recipe', nargs='*',
- help=_('name of the recipe to build')),
- ArgparseArgument('--missing-files', action='store_true',
+ args = [
+ ArgparseArgument('recipe', nargs='*', help=_('name of the recipe to build')),
+ ArgparseArgument(
+ '--missing-files',
+ action='store_true',
+ default=False,
+ help=_('prints a list of files installed that are ' 'listed in the recipe'),
+ ),
+ ArgparseArgument(
+ '--dry-run', action='store_true', default=False, help=_('only print commands instead of running them ')
+ ),
+ ArgparseArgument(
+ '--offline', action='store_true', default=False, help=_('Use only the source cache, no network')
+ ),
+ ArgparseArgument(
+ '--jobs',
+ '-j',
+ action='store',
+ type=int,
+ default=0,
+ help=_('How many recipes to build concurrently. ' '0 = number of CPUs.'),
+ ),
+ ArgparseArgument(
+ '--build-tools',
+ '-b',
+ action='store_true',
+ default=False,
+ help=_('Runs the build command for the build tools of this config.'),
+ ),
+ ArgparseArgument(
+ '--steps',
+ '-s',
+ nargs='*',
+ action='store',
+ type=str,
+ help=_('List of steps to execute, instead of all build steps.'),
+ ),
+ ]
+ if force is None:
+ args.append(
+ ArgparseArgument(
+ '--force',
+ action='store_true',
default=False,
- help=_('prints a list of files installed that are '
- 'listed in the recipe')),
- ArgparseArgument('--dry-run', action='store_true',
- default=False,
- help=_('only print commands instead of running them ')),
- ArgparseArgument('--offline', action='store_true',
- default=False, help=_('Use only the source cache, no network')),
- ArgparseArgument('--jobs', '-j', action='store', type=int,
- default=0, help=_('How many recipes to build concurrently. '
- '0 = number of CPUs.')),
- ArgparseArgument('--build-tools', '-b', action='store_true',
- default=False, help=_('Runs the build command for the build tools of this config.')),
- ArgparseArgument('--steps', '-s', nargs='*', action='store', type=str,
- help=_('List of steps to execute, instead of all build steps.')),
- ]
- if force is None:
- args.append(
- ArgparseArgument('--force', action='store_true',
- default=False,
- help=_('force the build of the recipe ingoring '
- 'its cached state')))
- if no_deps is None:
- args.append(
- ArgparseArgument('--no-deps', action='store_true',
- default=False,
- help=_('do not build dependencies')))
-
- self.force = force
- self.no_deps = no_deps
- self.deps_only = deps_only
- Command.__init__(self, args)
+ help=_('force the build of the recipe ingoring ' 'its cached state'),
+ )
+ )
+ if no_deps is None:
+ args.append(
+ ArgparseArgument('--no-deps', action='store_true', default=False, help=_('do not build dependencies'))
+ )
+
+ self.force = force
+ self.no_deps = no_deps
+ self.deps_only = deps_only
+ Command.__init__(self, args)
def run(self, config, args):
if self.force is None:
@@ -74,28 +94,55 @@ class Build(Command):
self.no_deps = args.no_deps
if args.build_tools:
config = config.build_tools_config
- self.runargs(config, args.recipe, args.missing_files, self.force,
- self.no_deps, dry_run=args.dry_run, offline=args.offline,
- deps_only=self.deps_only, jobs=args.jobs, steps_filter=args.steps)
-
- def runargs(self, config, fuzzy_recipes, missing_files=False, force=False,
- no_deps=False, cookbook=None, dry_run=False, offline=False,
- deps_only=False, jobs=None, steps_filter=None):
+ self.runargs(
+ config,
+ args.recipe,
+ args.missing_files,
+ self.force,
+ self.no_deps,
+ dry_run=args.dry_run,
+ offline=args.offline,
+ deps_only=self.deps_only,
+ jobs=args.jobs,
+ steps_filter=args.steps,
+ )
+
+ def runargs(
+ self,
+ config,
+ fuzzy_recipes,
+ missing_files=False,
+ force=False,
+ no_deps=False,
+ cookbook=None,
+ dry_run=False,
+ offline=False,
+ deps_only=False,
+ jobs=None,
+ steps_filter=None,
+ ):
if cookbook is None:
cookbook = CookBook(config, offline=offline)
recipes = []
for recipe in fuzzy_recipes:
- found = cookbook.get_closest_recipe(recipe)
- if found:
- recipes.append(found)
- else:
- recipes.append(recipe)
-
- oven = Oven(recipes, cookbook, force=self.force,
- no_deps=self.no_deps, missing_files=missing_files,
- dry_run=dry_run, deps_only=deps_only, jobs=jobs,
- steps_filter=steps_filter)
+ found = cookbook.get_closest_recipe(recipe)
+ if found:
+ recipes.append(found)
+ else:
+ recipes.append(recipe)
+
+ oven = Oven(
+ recipes,
+ cookbook,
+ force=self.force,
+ no_deps=self.no_deps,
+ missing_files=missing_files,
+ dry_run=dry_run,
+ deps_only=deps_only,
+ jobs=jobs,
+ steps_filter=steps_filter,
+ )
run_until_complete(oven.start_cooking())
@@ -106,6 +153,7 @@ class BuildOne(Build):
def __init__(self):
Build.__init__(self, True, True)
+
class BuildDeps(Build):
doc = N_('Build only the dependencies of the specified recipes')
name = 'build-deps'
@@ -113,6 +161,7 @@ class BuildDeps(Build):
def __init__(self):
Build.__init__(self, no_deps=False, deps_only=True)
+
register_command(BuildOne)
register_command(BuildDeps)
register_command(Build)
diff --git a/cerbero/commands/bundlesource.py b/cerbero/commands/bundlesource.py
index ec06f136..68ee7f89 100644
--- a/cerbero/commands/bundlesource.py
+++ b/cerbero/commands/bundlesource.py
@@ -33,16 +33,14 @@ class BundleSource(Command):
def __init__(self, args=[]):
args = [
- ArgparseArgument('bundlepackages', nargs='+',
- help=_('packages to bundle')),
- ArgparseArgument('--add-recipe', action='append',
- default=[],
- help=_('additional recipes to bundle')),
- ArgparseArgument('--no-bootstrap', action='store_true',
- default=False,
- help=_('Don\'t include bootstrep sources')),
- ArgparseArgument('--offline', action='store_true',
- default=False, help=_('Use only the source cache, no network')),
+ ArgparseArgument('bundlepackages', nargs='+', help=_('packages to bundle')),
+ ArgparseArgument('--add-recipe', action='append', default=[], help=_('additional recipes to bundle')),
+ ArgparseArgument(
+ '--no-bootstrap', action='store_true', default=False, help=_("Don't include bootstrep sources")
+ ),
+ ArgparseArgument(
+ '--offline', action='store_true', default=False, help=_('Use only the source cache, no network')
+ ),
]
Command.__init__(self, args)
@@ -54,7 +52,7 @@ class BundleSource(Command):
setup_args = ['sdist']
if not config.uninstalled:
- m.error("Can only be run on cerbero-uninstalled")
+ m.error('Can only be run on cerbero-uninstalled')
store = PackagesStore(config)
cookbook = CookBook(config)
@@ -87,8 +85,7 @@ class BundleSource(Command):
if not args.no_bootstrap:
build_tools = BuildTools(config, args.offline)
- bs_recipes = build_tools.BUILD_TOOLS + \
- build_tools.PLAT_BUILD_TOOLS.get(config.platform, [])
+ bs_recipes = build_tools.BUILD_TOOLS + build_tools.PLAT_BUILD_TOOLS.get(config.platform, [])
b_recipes = []
for r in bs_recipes:
b_recipes += cookbook.list_recipe_deps(r)
@@ -109,4 +106,5 @@ class BundleSource(Command):
command = str(config._relative_path('setup.py'))
run_setup(command, setup_args)
+
register_command(BundleSource)
diff --git a/cerbero/commands/cache.py b/cerbero/commands/cache.py
index f64fc7f4..e6a67414 100644
--- a/cerbero/commands/cache.py
+++ b/cerbero/commands/cache.py
@@ -28,6 +28,7 @@ from cerbero.errors import FatalError
from cerbero.utils import _, N_, ArgparseArgument, git, shell, run_until_complete
from cerbero.utils import messages as m
+
class BaseCache(Command):
base_url = 'https://artifacts.gstreamer-foundation.net/cerbero-deps/%s/%s/%s'
ssh_address = 'cerbero-deps-uploader@artifacts.gstreamer-foundation.net'
@@ -38,10 +39,14 @@ class BaseCache(Command):
log_size = 10
def __init__(self, args=[]):
- args.append(ArgparseArgument('--commit', action='store', type=str,
- default='HEAD', help=_('the commit to pick artifact from')))
- args.append(ArgparseArgument('--branch', action='store', type=str,
- default='main', help=_('Git branch to search from')))
+ args.append(
+ ArgparseArgument(
+ '--commit', action='store', type=str, default='HEAD', help=_('the commit to pick artifact from')
+ )
+ )
+ args.append(
+ ArgparseArgument('--branch', action='store', type=str, default='main', help=_('Git branch to search from'))
+ )
Command.__init__(self, args)
# FIXME: move this to utils
@@ -63,7 +68,7 @@ class BaseCache(Command):
return git.get_hash_is_ancestor(git_dir, commit)
def json_get(self, url):
- m.message("GET %s" % url)
+ m.message('GET %s' % url)
tmpdir = tempfile.mkdtemp()
tmpfile = os.path.join(tmpdir, 'deps.json')
@@ -99,7 +104,7 @@ class BaseCache(Command):
branch = args.branch
distro, arch = self.get_distro_and_arch(config)
base_url = self.base_url % (branch, distro, arch)
- return "%s/%s" % (base_url, filename)
+ return '%s/%s' % (base_url, filename)
def get_deps(self, config, args):
url = self.make_url(config, args, self.log_filename)
@@ -108,7 +113,7 @@ class BaseCache(Command):
try:
deps = self.json_get(url)
except FatalError as e:
- m.warning("Could not get cache list: %s" % e.msg)
+ m.warning('Could not get cache list: %s' % e.msg)
return deps
def get_deps_filepath(self, config):
@@ -119,17 +124,19 @@ class BaseCache(Command):
def run(self, config, args):
if not config.uninstalled:
- raise FatalError(_("fetch-cache is only available with "
- "cerbero-uninstalled"))
+ raise FatalError(_('fetch-cache is only available with ' 'cerbero-uninstalled'))
+
class FetchCache(BaseCache):
- doc = N_('Fetch a cached build from external storage based on cerbero git '
- 'revision.')
+ doc = N_('Fetch a cached build from external storage based on cerbero git ' 'revision.')
name = 'fetch-cache'
def __init__(self, args=[]):
- args.append(ArgparseArgument('--namespace', action='store', type=str,
- default='gstreamer', help=_('GitLab namespace to search from')))
+ args.append(
+ ArgparseArgument(
+ '--namespace', action='store', type=str, default='gstreamer', help=_('GitLab namespace to search from')
+ )
+ )
BaseCache.__init__(self, args)
def find_dep(self, deps, sha, allow_old=False):
@@ -138,14 +145,14 @@ class FetchCache(BaseCache):
m.message(f"Matching cache file is {dep['url']}")
return dep
if allow_old:
- m.message(f"Did not find cache for commit {sha}, looking for an older one...");
+ m.message(f'Did not find cache for commit {sha}, looking for an older one...')
for dep in deps:
if self.get_git_sha_is_ancestor(dep['commit']):
m.message(f"Latest available cache file is {dep['url']}")
return dep
- m.warning(f"Did not find any cache for commit {sha}")
+ m.warning(f'Did not find any cache for commit {sha}')
else:
- m.warning(f"Did not find cache for commit {sha}")
+ m.warning(f'Did not find cache for commit {sha}')
return None
async def fetch_dep(self, config, dep, namespace):
@@ -157,7 +164,7 @@ class FetchCache(BaseCache):
m.action(f'Unpacking deps cache {dep_path}')
await shell.unpack(dep_path, config.home_dir)
else:
- m.warning("Corrupted dependency file, ignoring.")
+ m.warning('Corrupted dependency file, ignoring.')
m.action('Unpack complete, deleting artifact')
os.remove(dep_path)
@@ -168,15 +175,15 @@ class FetchCache(BaseCache):
# and Windows. It should instead be derived from CI env vars.
if config.platform == Platform.LINUX:
origin = self.build_dir % namespace
- m.action("Relocating from %s to %s" % (origin, config.home_dir))
+ m.action('Relocating from %s to %s' % (origin, config.home_dir))
# FIXME: Just a quick hack for now
- shell.call(("grep -lnrIU %(origin)s | xargs "
- "sed \"s#%(origin)s#%(dest)s#g\" -i") % {
- 'origin': origin, 'dest': config.home_dir},
- config.home_dir)
+ shell.call(
+ ('grep -lnrIU %(origin)s | xargs ' 'sed "s#%(origin)s#%(dest)s#g" -i')
+ % {'origin': origin, 'dest': config.home_dir},
+ config.home_dir,
+ )
except FatalError as e:
- m.warning("Could not retrieve dependencies for commit %s: %s" % (
- dep['commit'], e.msg))
+ m.warning('Could not retrieve dependencies for commit %s: %s' % (dep['commit'], e.msg))
def run(self, config, args):
BaseCache.run(self, config, args)
@@ -188,6 +195,7 @@ class FetchCache(BaseCache):
run_until_complete(self.fetch_dep(config, dep, args.namespace))
m.message('All done!')
+
class GenCache(BaseCache):
doc = N_('Generate build cache from current state.')
name = 'gen-cache'
@@ -197,13 +205,16 @@ class GenCache(BaseCache):
def create_tarball_tarfile(self, workdir, out_file, *in_files, exclude=None):
import tarfile
+
m.action(f'Generating cache file with tarfile + xz')
+
def exclude_filter(tarinfo):
for each in exclude:
if each in tarinfo.name:
return None
print(tarinfo.name)
return tarinfo
+
prev_cwd = os.getcwd()
os.chdir(workdir)
out_tar, _ = os.path.splitext(out_file)
@@ -219,7 +230,8 @@ class GenCache(BaseCache):
def create_tarball_tar(self, workdir, out_file, *in_files, exclude=None):
cmd = [
shell.get_tar_cmd(),
- '-C', workdir,
+ '-C',
+ workdir,
'--verbose',
'--use-compress-program=xz --threads=0',
]
@@ -242,22 +254,22 @@ class GenCache(BaseCache):
def gen_dep(self, config, args, deps, sha):
deps_filepath = self.get_deps_filepath(config)
if os.path.exists(deps_filepath):
- os.remove(deps_filepath)
+ os.remove(deps_filepath)
log_filepath = self.get_log_filepath(config)
if os.path.exists(log_filepath):
- os.remove(log_filepath)
+ os.remove(log_filepath)
workdir = config.home_dir
platform_arch = '_'.join(config._get_toolchain_target_platform_arch())
distdir = f'dist/{platform_arch}'
try:
- self.create_tarball(config, workdir, deps_filepath, 'build-tools',
- config.build_tools_cache, distdir,
- config.cache_file)
+ self.create_tarball(
+ config, workdir, deps_filepath, 'build-tools', config.build_tools_cache, distdir, config.cache_file
+ )
url = self.make_url(config, args, '%s-%s' % (sha, self.deps_filename))
deps.insert(0, {'commit': sha, 'checksum': self.checksum(deps_filepath), 'url': url})
- deps = deps[0:self.log_size]
+ deps = deps[0 : self.log_size]
with open(log_filepath, 'w') as outfile:
json.dump(deps, outfile, indent=1)
except FatalError:
@@ -276,6 +288,7 @@ class GenCache(BaseCache):
deps = self.get_deps(config, args)
self.gen_dep(config, args, deps, sha)
+
class UploadCache(BaseCache):
doc = N_('Build build cache to external storage.')
name = 'upload-cache'
@@ -291,7 +304,7 @@ class UploadCache(BaseCache):
return
tmpdir = tempfile.mkdtemp()
- private_key = os.getenv('CERBERO_PRIVATE_SSH_KEY');
+ private_key = os.getenv('CERBERO_PRIVATE_SSH_KEY')
private_key_path = os.path.join(tmpdir, 'id_rsa')
deps_filepath = self.get_deps_filepath(config)
@@ -305,7 +318,7 @@ class UploadCache(BaseCache):
if private_key:
with os.fdopen(os.open(private_key_path, os.O_WRONLY | os.O_CREAT, 0o600), 'w') as f:
f.write(private_key)
- f.write("\n")
+ f.write('\n')
f.close()
ssh_opt += ['-i', private_key_path]
ssh_cmd = ['ssh'] + ssh_opt + [self.ssh_address]
@@ -315,27 +328,26 @@ class UploadCache(BaseCache):
branch = args.branch
distro, arch = self.get_distro_and_arch(config)
base_dir = os.path.join(branch, distro, arch)
- shell.new_call(ssh_cmd + ['mkdir -p %s' % base_dir ], verbose=True)
+ shell.new_call(ssh_cmd + ['mkdir -p %s' % base_dir], verbose=True)
# Upload the deps files first
remote_deps_filepath = os.path.join(base_dir, '%s-%s' % (sha, self.deps_filename))
- shell.new_call(scp_cmd + [deps_filepath, '%s:%s' % (self.ssh_address, remote_deps_filepath)],
- verbose=True)
+ shell.new_call(scp_cmd + [deps_filepath, '%s:%s' % (self.ssh_address, remote_deps_filepath)], verbose=True)
# Upload the new log
remote_tmp_log_filepath = os.path.join(base_dir, '%s-%s' % (sha, self.log_filename))
- shell.new_call(scp_cmd + [log_filepath, '%s:%s' % (self.ssh_address, remote_tmp_log_filepath)],
- verbose=True)
+ shell.new_call(
+ scp_cmd + [log_filepath, '%s:%s' % (self.ssh_address, remote_tmp_log_filepath)], verbose=True
+ )
# Override the new log in a way that we reduce the risk of corrupted
# fetch.
remote_log_filepath = os.path.join(base_dir, self.log_filename)
- shell.new_call(ssh_cmd + ['mv', '-f', remote_tmp_log_filepath, remote_log_filepath],
- verbose=True)
+ shell.new_call(ssh_cmd + ['mv', '-f', remote_tmp_log_filepath, remote_log_filepath], verbose=True)
m.message('New deps cache uploaded and deps log updated')
# Now remove the obsoleted dep file if needed
- for dep in deps[self.log_size - 1:]:
+ for dep in deps[self.log_size - 1 :]:
old_remote_deps_filepath = os.path.join(base_dir, os.path.basename(dep['url']))
shell.new_call(ssh_cmd + ['rm', '-f', old_remote_deps_filepath], verbose=True)
finally:
@@ -347,6 +359,7 @@ class UploadCache(BaseCache):
self.upload_dep(config, args, deps)
m.message('All done!')
+
register_command(FetchCache)
register_command(GenCache)
register_command(UploadCache)
diff --git a/cerbero/commands/check.py b/cerbero/commands/check.py
index c12aa45f..b3841113 100644
--- a/cerbero/commands/check.py
+++ b/cerbero/commands/check.py
@@ -30,12 +30,15 @@ class Check(Command):
name = 'check'
def __init__(self):
- Command.__init__(self,
- [ArgparseArgument('recipe', nargs=1,
- help=_('name of the recipe to run checks on')),
- ArgparseArgument('--recursive', action='store_true', default=False,
- help=_('Recursively run checks on dependencies')),
- ])
+ Command.__init__(
+ self,
+ [
+ ArgparseArgument('recipe', nargs=1, help=_('name of the recipe to run checks on')),
+ ArgparseArgument(
+ '--recursive', action='store_true', default=False, help=_('Recursively run checks on dependencies')
+ ),
+ ],
+ )
def run(self, config, args):
cookbook = CookBook(config)
@@ -51,7 +54,7 @@ class Check(Command):
for recipe in ordered_recipes:
if cookbook.recipe_needs_build(recipe.name):
- raise FatalError(_("Recipe %s is not built yet" % recipe.name))
+ raise FatalError(_('Recipe %s is not built yet' % recipe.name))
for recipe in ordered_recipes:
# call step function
@@ -70,7 +73,7 @@ class Check(Command):
except FatalError as e:
raise e
except Exception as ex:
- raise FatalError(_("Error running %s checks: %s") %
- (recipe.name, ex))
+ raise FatalError(_('Error running %s checks: %s') % (recipe.name, ex))
+
register_command(Check)
diff --git a/cerbero/commands/checkpackage.py b/cerbero/commands/checkpackage.py
index adba5380..bac518cb 100644
--- a/cerbero/commands/checkpackage.py
+++ b/cerbero/commands/checkpackage.py
@@ -31,10 +31,12 @@ class CheckPackage(Command):
name = 'checkpackage'
def __init__(self):
- Command.__init__(self,
- [ArgparseArgument('package', nargs=1,
- help=_('name of the package to run checks on')),
- ])
+ Command.__init__(
+ self,
+ [
+ ArgparseArgument('package', nargs=1, help=_('name of the package to run checks on')),
+ ],
+ )
def run(self, config, args):
cookbook = CookBook(config)
@@ -47,7 +49,7 @@ class CheckPackage(Command):
for recipe in ordered_recipes:
if cookbook.recipe_needs_build(recipe.name):
- raise CommandError(_("Recipe %s is not built yet" % recipe.name))
+ raise CommandError(_('Recipe %s is not built yet' % recipe.name))
for recipe in ordered_recipes:
# call step function
@@ -63,9 +65,9 @@ class CheckPackage(Command):
stepfunc()
except Exception as ex:
failed.append(recipe.name)
- m.warning(_("%s checks failed: %s") % (recipe.name, ex))
+ m.warning(_('%s checks failed: %s') % (recipe.name, ex))
if failed:
- raise CommandError(_("Error running %s checks on:\n " +
- "\n ".join(failed)) % p_name)
+ raise CommandError(_('Error running %s checks on:\n ' + '\n '.join(failed)) % p_name)
+
register_command(CheckPackage)
diff --git a/cerbero/commands/debugpackages.py b/cerbero/commands/debugpackages.py
index a51f9821..df50c5aa 100644
--- a/cerbero/commands/debugpackages.py
+++ b/cerbero/commands/debugpackages.py
@@ -27,21 +27,25 @@ from cerbero.packages.package import Package
class DebugPackages(Command):
- doc = N_('Outputs debug information about package, like duplicates files '
- 'or files that do not belong to any package')
+ doc = N_(
+ 'Outputs debug information about package, like duplicates files ' 'or files that do not belong to any package'
+ )
name = 'debug-packages'
def __init__(self):
- Command.__init__(self, [
- ArgparseArgument('-e', '--exclude', nargs='*', default=[],
- help=_('Filter pattern to exclude files from the search')),
- ])
+ Command.__init__(
+ self,
+ [
+ ArgparseArgument(
+ '-e', '--exclude', nargs='*', default=[], help=_('Filter pattern to exclude files from the search')
+ ),
+ ],
+ )
def run(self, config, args):
store = PackagesStore(config)
- allfiles = [p.all_files_list() for p in store.get_packages_list() if\
- isinstance(p, Package)]
+ allfiles = [p.all_files_list() for p in store.get_packages_list() if isinstance(p, Package)]
allfiles = list(itertools.chain(*allfiles))
self.find_duplicates(allfiles)
@@ -51,8 +55,8 @@ class DebugPackages(Command):
count = collections.Counter(allfiles)
duplicates = [x for x in count if count[x] > 1]
if len(duplicates) > 0:
- m.message("Found duplicates files in packages:")
- m.message("%r" % duplicates)
+ m.message('Found duplicates files in packages:')
+ m.message('%r' % duplicates)
def find_orphan_files(self, allfiles, prefix, excludes=[]):
cmd = ['find', '.', '-type', 'f']
@@ -64,7 +68,7 @@ class DebugPackages(Command):
orphan = sorted(list((set(distfiles) - set(allfiles))))
if len(orphan) > 0:
- m.message("Found orphan files:")
+ m.message('Found orphan files:')
m.message('\n'.join(orphan))
diff --git a/cerbero/commands/deps.py b/cerbero/commands/deps.py
index 1bbd3c29..249c0173 100644
--- a/cerbero/commands/deps.py
+++ b/cerbero/commands/deps.py
@@ -28,15 +28,21 @@ class Deps(Command):
name = 'deps'
def __init__(self):
- Command.__init__(self,
- [ArgparseArgument('recipe', nargs=1,
- help=_('name of the recipe')),
- ArgparseArgument('--all', action='store_true', default=False,
- help=_('list all dependencies, including the '
- 'build ones')),
- ArgparseArgument('--graph', action='store_true', default=False,
- help=_('show the depencies as a graph')),
- ])
+ Command.__init__(
+ self,
+ [
+ ArgparseArgument('recipe', nargs=1, help=_('name of the recipe')),
+ ArgparseArgument(
+ '--all',
+ action='store_true',
+ default=False,
+ help=_('list all dependencies, including the ' 'build ones'),
+ ),
+ ArgparseArgument(
+ '--graph', action='store_true', default=False, help=_('show the depencies as a graph')
+ ),
+ ],
+ )
def run(self, config, args):
cookbook = CookBook(config)
@@ -47,8 +53,7 @@ class Deps(Command):
if all_deps:
recipes = cookbook.list_recipe_deps(recipe_name)
else:
- recipes = [cookbook.get_recipe(x) for x in
- cookbook.get_recipe(recipe_name).list_deps()]
+ recipes = [cookbook.get_recipe(x) for x in cookbook.get_recipe(recipe_name).list_deps()]
if len(recipes) == 0:
m.message(_('%s has 0 dependencies') % recipe_name)
@@ -60,14 +65,17 @@ class Deps(Command):
continue
m.message(recipe.name)
else:
+
def print_dep(cookbook, recipe, level=0, already_shown=[]):
- m.message("%s%s" %( " " * 3 * level, recipe.name))
+ m.message('%s%s' % (' ' * 3 * level, recipe.name))
already_shown.append(recipe)
for r in [cookbook.get_recipe(x) for x in recipe.list_deps()]:
if not r in already_shown:
print_dep(cookbook, r, level + 1, already_shown)
elif not r.name == recipe.name:
- m.message("%s(%s)" % ( " " * 3 * (level + 1), r.name))
+ m.message('%s(%s)' % (' ' * 3 * (level + 1), r.name))
+
print_dep(cookbook, cookbook.get_recipe(recipe_name))
+
register_command(Deps)
diff --git a/cerbero/commands/edit_cache.py b/cerbero/commands/edit_cache.py
index b24ff1ea..d6d67eed 100644
--- a/cerbero/commands/edit_cache.py
+++ b/cerbero/commands/edit_cache.py
@@ -39,24 +39,19 @@ class EditCache(Command):
self.recipe_status = RecipeStatus('filepath')
self.recipe_attributes = list(set(dir(self.recipe_status)) - set(dir(RecipeStatus)))
arguments = [
- ArgparseArgument('recipe', nargs='*',
- help=_('Recipe to work with')),
- ArgparseArgument('--bootstrap', action='store_true', default=False,
- help=_('Use bootstrap\'s cache file')),
- ArgparseArgument('--touch', action='store_true', default=False,
- help=_('Touch recipe modifying its mtime')),
- ArgparseArgument('--reset', action='store_true', default=False,
- help=_('Clean entirely the cache for the recipe'))
+ ArgparseArgument('recipe', nargs='*', help=_('Recipe to work with')),
+ ArgparseArgument('--bootstrap', action='store_true', default=False, help=_("Use bootstrap's cache file")),
+ ArgparseArgument('--touch', action='store_true', default=False, help=_('Touch recipe modifying its mtime')),
+ ArgparseArgument(
+ '--reset', action='store_true', default=False, help=_('Clean entirely the cache for the recipe')
+ ),
]
for attr in self.recipe_attributes:
attr_nargs = '*' if isinstance(getattr(self.recipe_status, attr), list) else None
attr_type = type(getattr(self.recipe_status, attr))
arg_type = str if attr_type == bool or attr_type == list else attr_type
- arguments.append(
- ArgparseArgument('--' + attr, nargs=attr_nargs, type=arg_type,
- help=_('Modify ' + attr))
- )
+ arguments.append(ArgparseArgument('--' + attr, nargs=attr_nargs, type=arg_type, help=_('Modify ' + attr)))
Command.__init__(self, arguments)
def run(self, config, args):
@@ -74,8 +69,9 @@ class EditCache(Command):
global_status = cookbook.status
recipes = args.recipe or list(global_status.keys())
- m.message('{} cache values for recipes: {}'.format(
- 'Showing' if not is_modifying else 'Modifying', ', '.join(recipes)))
+ m.message(
+ '{} cache values for recipes: {}'.format('Showing' if not is_modifying else 'Modifying', ', '.join(recipes))
+ )
for recipe in recipes:
if recipe not in global_status.keys():
diff --git a/cerbero/commands/fetch.py b/cerbero/commands/fetch.py
index 34e381be..f88deaf6 100644
--- a/cerbero/commands/fetch.py
+++ b/cerbero/commands/fetch.py
@@ -23,7 +23,15 @@ from cerbero.build.cookbook import CookBook
from cerbero.enums import LibraryType
from cerbero.errors import FatalError
from cerbero.packages.packagesstore import PackagesStore
-from cerbero.utils import _, N_, ArgparseArgument, remove_list_duplicates, shell, determine_num_of_cpus, run_until_complete
+from cerbero.utils import (
+ _,
+ N_,
+ ArgparseArgument,
+ remove_list_duplicates,
+ shell,
+ determine_num_of_cpus,
+ run_until_complete,
+)
from cerbero.utils import messages as m
from cerbero.utils.shell import BuildStatusPrinter
from cerbero.build.source import Tarball
@@ -31,18 +39,39 @@ from cerbero.build.source import Tarball
NUMBER_OF_JOBS_IF_UNUSED = 2
NUMBER_OF_JOBS_IF_USED = 2 * determine_num_of_cpus()
-class Fetch(Command):
+class Fetch(Command):
def __init__(self, args=[]):
- args.append(ArgparseArgument('--reset-rdeps', action='store_true',
- default=False, help=_('reset the status of reverse '
- 'dependencies too')))
- args.append(ArgparseArgument('--print-only', action='store_true',
- default=False, help=_('print all source URLs to stdout')))
- args.append(ArgparseArgument('--full-reset', action='store_true',
- default=False, help=_('reset to extract step if rebuild is needed')))
- args.append(ArgparseArgument('--jobs', '-j', action='store', nargs='?', type=int,
- const=NUMBER_OF_JOBS_IF_USED, default=NUMBER_OF_JOBS_IF_UNUSED, help=_('number of async jobs')))
+ args.append(
+ ArgparseArgument(
+ '--reset-rdeps',
+ action='store_true',
+ default=False,
+ help=_('reset the status of reverse ' 'dependencies too'),
+ )
+ )
+ args.append(
+ ArgparseArgument(
+ '--print-only', action='store_true', default=False, help=_('print all source URLs to stdout')
+ )
+ )
+ args.append(
+ ArgparseArgument(
+ '--full-reset', action='store_true', default=False, help=_('reset to extract step if rebuild is needed')
+ )
+ )
+ args.append(
+ ArgparseArgument(
+ '--jobs',
+ '-j',
+ action='store',
+ nargs='?',
+ type=int,
+ const=NUMBER_OF_JOBS_IF_USED,
+ default=NUMBER_OF_JOBS_IF_UNUSED,
+ help=_('number of async jobs'),
+ )
+ )
Command.__init__(self, args)
@staticmethod
@@ -55,12 +84,14 @@ class Fetch(Command):
else:
for recipe in recipes:
fetch_recipes += cookbook.list_recipe_deps(recipe)
- fetch_recipes = remove_list_duplicates (fetch_recipes)
- m.message(_("Fetching the following recipes using %s async job(s): %s") %
- (jobs, ' '.join([x.name for x in fetch_recipes])))
+ fetch_recipes = remove_list_duplicates(fetch_recipes)
+ m.message(
+ _('Fetching the following recipes using %s async job(s): %s')
+ % (jobs, ' '.join([x.name for x in fetch_recipes]))
+ )
shell.set_max_non_cpu_bound_calls(jobs)
to_rebuild = []
- printer = BuildStatusPrinter (('fetch',), cookbook.get_config().interactive)
+ printer = BuildStatusPrinter(('fetch',), cookbook.get_config().interactive)
printer.total = len(fetch_recipes)
async def fetch_print_wrapper(recipe_name, stepfunc):
@@ -73,7 +104,7 @@ class Fetch(Command):
if print_only:
# For now just print tarball URLs
if isinstance(recipe, Tarball):
- m.message("TARBALL: {} {}".format(recipe.url, recipe.tarball_name))
+ m.message('TARBALL: {} {}'.format(recipe.url, recipe.tarball_name))
continue
stepfunc = getattr(recipe, 'fetch')
if asyncio.iscoroutinefunction(stepfunc):
@@ -84,7 +115,7 @@ class Fetch(Command):
printer.count += 1
printer.remove_recipe(recipe.name)
- m.message("All async fetch jobs finished")
+ m.message('All async fetch jobs finished')
# Checking the current built version against the fetched one
# needs to be done *after* actually fetching
@@ -104,10 +135,11 @@ class Fetch(Command):
cookbook.reset_recipe_status(r.name)
if to_rebuild:
- to_rebuild = sorted(list(set(to_rebuild)), key=lambda r:r.name)
- m.message(_("These recipes have been updated and will "
- "be rebuilt:\n%s") %
- '\n'.join([x.name for x in to_rebuild]))
+ to_rebuild = sorted(list(set(to_rebuild)), key=lambda r: r.name)
+ m.message(
+ _('These recipes have been updated and will ' 'be rebuilt:\n%s')
+ % '\n'.join([x.name for x in to_rebuild])
+ )
class FetchRecipes(Fetch):
@@ -116,25 +148,25 @@ class FetchRecipes(Fetch):
def __init__(self):
args = [
- ArgparseArgument('recipes', nargs='*',
- help=_('list of the recipes to fetch (fetch all if none '
- 'is passed)')),
- ArgparseArgument('--no-deps', action='store_true',
- default=False, help=_('do not fetch dependencies')),
- ]
+ ArgparseArgument(
+ 'recipes', nargs='*', help=_('list of the recipes to fetch (fetch all if none ' 'is passed)')
+ ),
+ ArgparseArgument('--no-deps', action='store_true', default=False, help=_('do not fetch dependencies')),
+ ]
Fetch.__init__(self, args)
def run(self, config, args):
cookbook = CookBook(config)
recipes = []
for recipe in args.recipes:
- found = cookbook.get_closest_recipe(recipe)
- if found:
- recipes.append(found)
- else:
- recipes.append(recipe)
- task = self.fetch(cookbook, recipes, args.no_deps,
- args.reset_rdeps, args.full_reset, args.print_only, args.jobs)
+ found = cookbook.get_closest_recipe(recipe)
+ if found:
+ recipes.append(found)
+ else:
+ recipes.append(recipe)
+ task = self.fetch(
+ cookbook, recipes, args.no_deps, args.reset_rdeps, args.full_reset, args.print_only, args.jobs
+ )
return run_until_complete(task)
@@ -144,20 +176,25 @@ class FetchPackage(Fetch):
def __init__(self):
args = [
- ArgparseArgument('package', nargs=1,
- help=_('package to fetch')),
- ArgparseArgument('--deps', action='store_false',
- default=True, help=_('also fetch dependencies')),
- ]
+ ArgparseArgument('package', nargs=1, help=_('package to fetch')),
+ ArgparseArgument('--deps', action='store_false', default=True, help=_('also fetch dependencies')),
+ ]
Fetch.__init__(self, args)
def run(self, config, args):
store = PackagesStore(config)
package = store.get_package(args.package[0])
- task = self.fetch(store.cookbook, package.recipes_dependencies(),
- args.deps, args.reset_rdeps, args.full_reset,
- args.print_only, args.jobs)
+ task = self.fetch(
+ store.cookbook,
+ package.recipes_dependencies(),
+ args.deps,
+ args.reset_rdeps,
+ args.full_reset,
+ args.print_only,
+ args.jobs,
+ )
return run_until_complete(task)
+
register_command(FetchRecipes)
register_command(FetchPackage)
diff --git a/cerbero/commands/genlibfiles.py b/cerbero/commands/genlibfiles.py
index 810e5ba8..e29b53f4 100644
--- a/cerbero/commands/genlibfiles.py
+++ b/cerbero/commands/genlibfiles.py
@@ -33,15 +33,18 @@ class GenLibraryFiles(Command):
name = 'genlibfiles'
def __init__(self):
- Command.__init__(self,
- [ArgparseArgument('-o', '--output_dir', default=None,
- help=_('output directory where .lib files will be saved')),
- ])
+ Command.__init__(
+ self,
+ [
+ ArgparseArgument(
+ '-o', '--output_dir', default=None, help=_('output directory where .lib files will be saved')
+ ),
+ ],
+ )
def run(self, config, args):
if config.target_platform != Platform.WINDOWS:
- raise UsageError(_('%s command can only be used targetting '
- 'Windows platforms') % self.name)
+ raise UsageError(_('%s command can only be used targetting ' 'Windows platforms') % self.name)
if args.output_dir is not None and not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
@@ -52,8 +55,7 @@ class GenLibraryFiles(Command):
try:
recipe.gen_library_file(args.output_dir)
except Exception as e:
- m.message(_("Error generaring library files for %s:\n %s") %
- (recipe.name, e))
+ m.message(_('Error generaring library files for %s:\n %s') % (recipe.name, e))
register_command(GenLibraryFiles)
diff --git a/cerbero/commands/gensdkshell.py b/cerbero/commands/gensdkshell.py
index 9d680e89..739dc886 100644
--- a/cerbero/commands/gensdkshell.py
+++ b/cerbero/commands/gensdkshell.py
@@ -23,13 +23,13 @@ from cerbero.errors import FatalError
from cerbero.utils import _, N_, ArgparseArgument, shell
-SCRIPT_TPL = '''\
+SCRIPT_TPL = """\
#!/bin/bash
%s
%s
-'''
+"""
class GenSdkShell(Command):
@@ -39,16 +39,15 @@ class GenSdkShell(Command):
DEFAULT_CMD = '$SHELL "$@"'
def __init__(self):
- Command.__init__(self,
- [ArgparseArgument('name', nargs=1, default='sdk-shell',
- help=_('name of the scrips')),
- ArgparseArgument('-o', '--output-dir', default='.',
- help=_('output directory')),
- ArgparseArgument('-p', '--prefix',
- help=_('prefix of the SDK')),
- ArgparseArgument('--cmd', default=self.DEFAULT_CMD,
- help=_('command to run in the script')),
- ])
+ Command.__init__(
+ self,
+ [
+ ArgparseArgument('name', nargs=1, default='sdk-shell', help=_('name of the scrips')),
+ ArgparseArgument('-o', '--output-dir', default='.', help=_('output directory')),
+ ArgparseArgument('-p', '--prefix', help=_('prefix of the SDK')),
+ ArgparseArgument('--cmd', default=self.DEFAULT_CMD, help=_('command to run in the script')),
+ ],
+ )
def run(self, config, args):
name = args.name[0]
@@ -58,15 +57,25 @@ class GenSdkShell(Command):
cmd = args.cmd
self.runargs(config, name, output_dir, prefix, self.config.libdir, py_prefixes, cmd)
- def _putvar(self, var, value, append_separator=":"):
+ def _putvar(self, var, value, append_separator=':'):
if var in self._env:
if append_separator is not None:
self._env[var] = self._env[var] + append_separator + value
else:
self._env[var] = value
- def runargs(self, config, name, output_dir, prefix, libdir,
- py_prefixes, cmd=None, env=None, prefix_env_name='GSTREAMER_ROOT'):
+ def runargs(
+ self,
+ config,
+ name,
+ output_dir,
+ prefix,
+ libdir,
+ py_prefixes,
+ cmd=None,
+ env=None,
+ prefix_env_name='GSTREAMER_ROOT',
+ ):
if cmd == None:
cmd = self.DEFAULT_CMD
if env == None:
@@ -77,27 +86,24 @@ class GenSdkShell(Command):
prefix_env = '${%s}' % prefix_env_name
libdir = libdir.replace(prefix, prefix_env)
self._putvar('PATH', '%s/bin${PATH:+:$PATH}' % prefix_env)
- self._putvar('LD_LIBRARY_PATH',
- '%s${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}' % libdir)
- self._putvar('PKG_CONFIG_PATH', '%s/lib/pkgconfig:%s/share/pkgconfig'
- '${PKG_CONFIG_PATH:+:$PKG_CONFIG_PATH}' % (prefix_env, prefix_env))
- self._putvar('XDG_DATA_DIRS',
- '%s/share${XDG_DATA_DIRS:+:$XDG_DATA_DIRS}' % prefix_env)
- self._putvar('XDG_CONFIG_DIRS',
- '%s/etc/xdg${XDG_CONFIG_DIRS:+:$XDG_CONFIG_DIRS}' % prefix_env)
- self._putvar('GST_REGISTRY_1_0', '${HOME}/.cache/gstreamer-1.0/gstreamer-cerbero-registry',
- None)
- self._putvar('GST_PLUGIN_SCANNER_1_0',
- '%s/libexec/gstreamer-1.0/gst-plugin-scanner' % prefix_env)
+ self._putvar('LD_LIBRARY_PATH', '%s${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}' % libdir)
+ self._putvar(
+ 'PKG_CONFIG_PATH',
+ '%s/lib/pkgconfig:%s/share/pkgconfig' '${PKG_CONFIG_PATH:+:$PKG_CONFIG_PATH}' % (prefix_env, prefix_env),
+ )
+ self._putvar('XDG_DATA_DIRS', '%s/share${XDG_DATA_DIRS:+:$XDG_DATA_DIRS}' % prefix_env)
+ self._putvar('XDG_CONFIG_DIRS', '%s/etc/xdg${XDG_CONFIG_DIRS:+:$XDG_CONFIG_DIRS}' % prefix_env)
+ self._putvar('GST_REGISTRY_1_0', '${HOME}/.cache/gstreamer-1.0/gstreamer-cerbero-registry', None)
+ self._putvar('GST_PLUGIN_SCANNER_1_0', '%s/libexec/gstreamer-1.0/gst-plugin-scanner' % prefix_env)
self._putvar('GST_PLUGIN_PATH_1_0', '%s/lib/gstreamer-1.0' % prefix_env)
self._putvar('GST_PLUGIN_SYSTEM_PATH_1_0', '%s/lib/gstreamer-1.0' % prefix_env)
self._putvar('PYTHONPATH', '%s${PYTHONPATH:+:$PYTHONPATH}' % (os.pathsep.join(py_prefixes)))
- self._putvar('CFLAGS', '-I%s/include ${CFLAGS}' % prefix_env, " ")
- self._putvar('CXXFLAGS', '-I%s/include ${CXXFLAGS}' % prefix_env, " ")
- self._putvar('CPPFLAGS', '-I%s/include ${CPPFLAGS}' % prefix_env, " ")
- self._putvar('LDFLAGS', '-L%s ${LDFLAGS}' % libdir, " ")
- self._putvar('GIO_EXTRA_MODULES', '%s/gio/modules' % libdir)
- self._putvar('GI_TYPELIB_PATH', '%s/girepository-1.0' % libdir)
+ self._putvar('CFLAGS', '-I%s/include ${CFLAGS}' % prefix_env, ' ')
+ self._putvar('CXXFLAGS', '-I%s/include ${CXXFLAGS}' % prefix_env, ' ')
+ self._putvar('CPPFLAGS', '-I%s/include ${CPPFLAGS}' % prefix_env, ' ')
+ self._putvar('LDFLAGS', '-L%s ${LDFLAGS}' % libdir, ' ')
+ self._putvar('GIO_EXTRA_MODULES', '%s/gio/modules' % libdir)
+ self._putvar('GI_TYPELIB_PATH', '%s/girepository-1.0' % libdir)
envstr = 'export %s="%s"\n' % (prefix_env_name, prefix)
for e, v in env.items():
@@ -106,13 +112,13 @@ class GenSdkShell(Command):
filepath = os.path.join(output_dir, name)
if not os.path.exists(os.path.dirname(filepath)):
- os.mkdir(os.path.dirname(filepath))
+ os.mkdir(os.path.dirname(filepath))
with open(filepath, 'w+') as f:
f.write(SCRIPT_TPL % (envstr, cmd))
shell.new_call(['chmod', '+x', filepath])
except IOError as ex:
- raise FatalError(_("Error creating script: %s" % ex))
+ raise FatalError(_('Error creating script: %s' % ex))
register_command(GenSdkShell)
diff --git a/cerbero/commands/genvsprops.py b/cerbero/commands/genvsprops.py
index 668e7f13..dd802954 100644
--- a/cerbero/commands/genvsprops.py
+++ b/cerbero/commands/genvsprops.py
@@ -33,13 +33,20 @@ class GenVSProps(Command):
name = 'genvsprops'
def __init__(self):
- Command.__init__(self,
- [ArgparseArgument('-o', '--output_dir', default='.',
- help=_('output directory where .vsprops files will be saved')),
- ArgparseArgument('-p', '--prefix', default=DEFAULT_PREFIX_MACRO,
- help=_('name of the prefix environment variable '
- '(eg:CERBERO_SDK_ROOT_X86)')),
- ])
+ Command.__init__(
+ self,
+ [
+ ArgparseArgument(
+ '-o', '--output_dir', default='.', help=_('output directory where .vsprops files will be saved')
+ ),
+ ArgparseArgument(
+ '-p',
+ '--prefix',
+ default=DEFAULT_PREFIX_MACRO,
+ help=_('name of the prefix environment variable ' '(eg:CERBERO_SDK_ROOT_X86)'),
+ ),
+ ],
+ )
def run(self, config, args):
self.runargs(config, args.output_dir, args.prefix)
@@ -49,17 +56,16 @@ class GenVSProps(Command):
os.makedirs(output_dir)
for pc in PkgConfig.list_all(env=config.env):
- p2v = PkgConfig2VSProps(pc, prefix=config.prefix,
- inherit_common=True,
- prefix_replacement='$(%s)' % prefix, env=config.env)
+ p2v = PkgConfig2VSProps(
+ pc, prefix=config.prefix, inherit_common=True, prefix_replacement='$(%s)' % prefix, env=config.env
+ )
p2v.create(output_dir)
m.action('Created %s.props' % pc)
common = CommonProps(prefix)
common.create(output_dir)
- m.message('Property sheets files were sucessfully created in %s' %
- os.path.abspath(output_dir))
+ m.message('Property sheets files were sucessfully created in %s' % os.path.abspath(output_dir))
register_command(GenVSProps)
diff --git a/cerbero/commands/genxcconfig.py b/cerbero/commands/genxcconfig.py
index 43f7fa3c..e6c2bba4 100644
--- a/cerbero/commands/genxcconfig.py
+++ b/cerbero/commands/genxcconfig.py
@@ -30,14 +30,16 @@ class GenXCodeConfig(Command):
name = 'genxcconfig'
def __init__(self):
- Command.__init__(self,
- [ArgparseArgument('-o', '--output_dir', default='.',
- help=_('output directory where .xcconfig files will be saved')),
- ArgparseArgument('-f', '--filename', default=None,
- help=_('filename of the .xcconfig file')),
- ArgparseArgument('libraries', nargs='*',
- help=_('List of libraries to include')),
- ])
+ Command.__init__(
+ self,
+ [
+ ArgparseArgument(
+ '-o', '--output_dir', default='.', help=_('output directory where .xcconfig files will be saved')
+ ),
+ ArgparseArgument('-f', '--filename', default=None, help=_('filename of the .xcconfig file')),
+ ArgparseArgument('libraries', nargs='*', help=_('List of libraries to include')),
+ ],
+ )
def run(self, config, args):
self.runargs(config, args.output_dir, args.filename, args.libraries)
@@ -47,7 +49,7 @@ class GenXCodeConfig(Command):
os.makedirs(output_dir)
if len(libraries) == 0:
- raise UsageError("You need to specify at least one library name")
+ raise UsageError('You need to specify at least one library name')
filename = filename or libraries[0]
filepath = os.path.join(output_dir, '%s.xcconfig' % filename)
@@ -56,8 +58,7 @@ class GenXCodeConfig(Command):
xcconfig.create(filepath)
m.action('Created %s.xcconfig' % filename)
- m.message('XCode config file were sucessfully created in %s' %
- os.path.abspath(filepath))
+ m.message('XCode config file were sucessfully created in %s' % os.path.abspath(filepath))
register_command(GenXCodeConfig)
diff --git a/cerbero/commands/graph.py b/cerbero/commands/graph.py
index e8ec9219..167f2ad9 100644
--- a/cerbero/commands/graph.py
+++ b/cerbero/commands/graph.py
@@ -30,8 +30,8 @@ from cerbero.utils import messages as m
class GraphType:
- RECIPE = 'recipe',
- PACKAGE = 'package',
+ RECIPE = ('recipe',)
+ PACKAGE = ('package',)
PACKAGE_RECIPES = 'package_recipes'
@@ -40,23 +40,28 @@ class Graph(Command):
name = 'graph'
def __init__(self):
- Command.__init__(self,
- [ArgparseArgument('name', nargs=1,
- help=_('name of the recipe or package to generate deps from')),
- ArgparseArgument('-r', '--recipe', action='store_true',
- help=_('generate deps for the given recipe')),
- ArgparseArgument('-p', '--package', action='store_true',
- help=_('generate deps for the given package')),
- ArgparseArgument('-pr', '--package-recipes', action='store_true',
- help=_('generate recipe deps for the given package')),
- ArgparseArgument('-o', '--output', nargs=1,
- help=_('output file for the SVG graph')),
- ])
+ Command.__init__(
+ self,
+ [
+ ArgparseArgument('name', nargs=1, help=_('name of the recipe or package to generate deps from')),
+ ArgparseArgument('-r', '--recipe', action='store_true', help=_('generate deps for the given recipe')),
+ ArgparseArgument('-p', '--package', action='store_true', help=_('generate deps for the given package')),
+ ArgparseArgument(
+ '-pr',
+ '--package-recipes',
+ action='store_true',
+ help=_('generate recipe deps for the given package'),
+ ),
+ ArgparseArgument('-o', '--output', nargs=1, help=_('output file for the SVG graph')),
+ ],
+ )
def run(self, config, args):
if args.recipe + args.package + args.package_recipes == 0:
- m.error('Error: You need to specify either recipe, package or package-recipes '
- 'mode to generate the dependency graph')
+ m.error(
+ 'Error: You need to specify either recipe, package or package-recipes '
+ 'mode to generate the dependency graph'
+ )
return
if args.recipe + args.package + args.package_recipes > 1:
@@ -64,8 +69,10 @@ class Graph(Command):
return
if not shutil.which('dot'):
- m.error('Error: dot command not found. Please install graphviz it using '
- 'your package manager. e.g. apt/dnf/brew install graphviz')
+ m.error(
+ 'Error: dot command not found. Please install graphviz it using '
+ 'your package manager. e.g. apt/dnf/brew install graphviz'
+ )
return
label = ''
@@ -77,7 +84,7 @@ class Graph(Command):
label = 'package'
elif args.package_recipes:
self.graph_type = GraphType.PACKAGE_RECIPES
- label = 'package\'s recipes'
+ label = "package's recipes"
if self.graph_type == GraphType.RECIPE or self.graph_type == GraphType.PACKAGE_RECIPES:
self.cookbook = CookBook(config)
@@ -93,7 +100,7 @@ class Graph(Command):
f.write(dot)
shell.new_call(['dot', '-Tsvg', tmp.name, '-o', output])
- m.message("Dependency graph for %s generated at %s" % (name, output))
+ m.message('Dependency graph for %s generated at %s' % (name, output))
def _dot_gen(self, name, graph_type, already_parsed=[]):
already_parsed.append(name)
diff --git a/cerbero/commands/info.py b/cerbero/commands/info.py
index b45e8bfc..f95ae224 100644
--- a/cerbero/commands/info.py
+++ b/cerbero/commands/info.py
@@ -23,14 +23,14 @@ from cerbero.packages.packagesstore import PackagesStore
from cerbero.packages.package import MetaPackage
-INFO_TPL = '''
+INFO_TPL = """
Name: %(name)s
Version: %(version)s
Homepage: %(url)s
Dependencies: %(deps)s
Licences: %(licenses)s
Description: %(desc)s
-'''
+"""
class PackageInfo(Command):
@@ -38,13 +38,19 @@ class PackageInfo(Command):
name = 'packageinfo'
def __init__(self):
- Command.__init__(self,
- [ArgparseArgument('package', nargs=1,
- help=_('name of the package')),
- ArgparseArgument('-l', '--list-files', action='store_true',
- default=False,
- help=_('List all files installed by this package')),
- ])
+ Command.__init__(
+ self,
+ [
+ ArgparseArgument('package', nargs=1, help=_('name of the package')),
+ ArgparseArgument(
+ '-l',
+ '--list-files',
+ action='store_true',
+ default=False,
+ help=_('List all files installed by this package'),
+ ),
+ ],
+ )
def run(self, config, args):
store = PackagesStore(config)
@@ -57,16 +63,19 @@ class PackageInfo(Command):
if not isinstance(p, MetaPackage):
recipes_licenses = p.recipes_licenses()
recipes_licenses.update(p.devel_recipes_licenses())
- for recipe_name, categories_licenses in \
- recipes_licenses.items():
+ for recipe_name, categories_licenses in recipes_licenses.items():
for category_licenses in categories_licenses.values():
licenses.extend(category_licenses)
licenses = sorted(list(set(licenses)))
- d = {'name': p.name, 'version': p.version, 'url': p.url,
- 'licenses': ' and '.join([l.acronym for l in licenses]),
- 'desc': p.shortdesc,
- 'deps': ', '.join([p.name for p in
- store.get_package_deps(p_name, True)])}
+ d = {
+ 'name': p.name,
+ 'version': p.version,
+ 'url': p.url,
+ 'licenses': ' and '.join([l.acronym for l in licenses]),
+ 'desc': p.shortdesc,
+ 'deps': ', '.join([p.name for p in store.get_package_deps(p_name, True)]),
+ }
m.message(INFO_TPL % d)
+
register_command(PackageInfo)
diff --git a/cerbero/commands/list.py b/cerbero/commands/list.py
index 1677cc23..a31babc1 100644
--- a/cerbero/commands/list.py
+++ b/cerbero/commands/list.py
@@ -23,6 +23,7 @@ from cerbero.utils import messages as m
from cerbero.packages.packagesstore import PackagesStore
from cerbero.utils import ArgparseArgument
+
class List(Command):
doc = N_('List all the available recipes')
name = 'list'
@@ -31,14 +32,14 @@ class List(Command):
cookbook = CookBook(config)
recipes = cookbook.get_recipes_list()
if len(recipes) == 0:
- m.message(_("No recipes found"))
+ m.message(_('No recipes found'))
for recipe in recipes:
try:
- current = recipe.built_version().split("\n")[0]
+ current = recipe.built_version().split('\n')[0]
except:
- current = "Not checked out"
+ current = 'Not checked out'
- m.message("%s - %s (current checkout: %s) - %s" % (recipe.name, recipe.version, current, recipe.__file__))
+ m.message('%s - %s (current checkout: %s) - %s' % (recipe.name, recipe.version, current, recipe.__file__))
class ListPackages(Command):
@@ -49,9 +50,10 @@ class ListPackages(Command):
store = PackagesStore(config)
packages = store.get_packages_list()
if len(packages) == 0:
- m.message(_("No packages found"))
+ m.message(_('No packages found'))
for p in packages:
- m.message("%s - %s - %s" % (p.name, p.version, p.__file__))
+ m.message('%s - %s - %s' % (p.name, p.version, p.__file__))
+
class ShowConfig(Command):
doc = N_('Show configuration settings')
@@ -62,13 +64,13 @@ class ShowConfig(Command):
def run(self, config, args):
for n in config._properties:
- if n == "variants":
- print("%25s :" % (n))
+ if n == 'variants':
+ print('%25s :' % (n))
variants = getattr(config, n).__dict__
for v in variants:
- print("%30s : %s" % (v, variants[v]))
+ print('%30s : %s' % (v, variants[v]))
else:
- print("%25s : %s" % (n, getattr(config, n)))
+ print('%25s : %s' % (n, getattr(config, n)))
register_command(List)
diff --git a/cerbero/commands/package.py b/cerbero/commands/package.py
index aca07bd6..3fdd8b9a 100644
--- a/cerbero/commands/package.py
+++ b/cerbero/commands/package.py
@@ -34,50 +34,92 @@ class Package(Command):
name = 'package'
def __init__(self):
- Command.__init__(self,
- [ArgparseArgument('package', nargs=1,
- help=_('name of the package to create')),
- ArgparseArgument('-o', '--output-dir', default='.',
- help=_('Output directory for the tarball file')),
- ArgparseArgument('-t', '--tarball', action='store_true',
- default=False,
- help=_('Creates a tarball instead of a native package')),
- ArgparseArgument('-n', '--no-split', action='store_true',
- default=False,
- help=_('(only meaningfull when --tarball is set) Create one single '
- 'tarball with devel and runtime files')),
- ArgparseArgument('-f', '--force', action='store_true',
- default=False, help=_('Delete any existing package file')),
- ArgparseArgument('-d', '--no-devel', action='store_false',
- default=True, help=_('Do not create the development version '
- 'of this package')),
- ArgparseArgument('-s', '--skip-deps-build', action='store_true',
- default=False, help=_('Do not build the recipes needed to '
- 'create this package (conflicts with --only-build-deps)')),
- ArgparseArgument('-b', '--only-build-deps', action='store_true',
- default=False, help=_('Only build the recipes needed to '
- 'create this package (conflicts with --skip-deps-build)')),
- ArgparseArgument('-k', '--keep-temp', action='store_true',
- default=False, help=_('Keep temporary files for debug')),
- ArgparseArgument('--offline', action='store_true',
- default=False, help=_('Use only the source cache, no network')),
- ArgparseArgument('--dry-run', action='store_true',
- default=False, help=_('Only print the packages that will be built')),
- ArgparseArgument('--compress-method', type=str,
- choices=['default', 'xz', 'bz2', 'none'], default='default',
- help=_('Select compression method for tarballs')),
- ArgparseArgument('--jobs', '-j', action='store', type=int,
- default=0, help=_('How many recipes to build concurrently. '
- '0 = number of CPUs.')),
- ])
+ Command.__init__(
+ self,
+ [
+ ArgparseArgument('package', nargs=1, help=_('name of the package to create')),
+ ArgparseArgument('-o', '--output-dir', default='.', help=_('Output directory for the tarball file')),
+ ArgparseArgument(
+ '-t',
+ '--tarball',
+ action='store_true',
+ default=False,
+ help=_('Creates a tarball instead of a native package'),
+ ),
+ ArgparseArgument(
+ '-n',
+ '--no-split',
+ action='store_true',
+ default=False,
+ help=_(
+ '(only meaningfull when --tarball is set) Create one single '
+ 'tarball with devel and runtime files'
+ ),
+ ),
+ ArgparseArgument(
+ '-f', '--force', action='store_true', default=False, help=_('Delete any existing package file')
+ ),
+ ArgparseArgument(
+ '-d',
+ '--no-devel',
+ action='store_false',
+ default=True,
+ help=_('Do not create the development version ' 'of this package'),
+ ),
+ ArgparseArgument(
+ '-s',
+ '--skip-deps-build',
+ action='store_true',
+ default=False,
+ help=_(
+ 'Do not build the recipes needed to ' 'create this package (conflicts with --only-build-deps)'
+ ),
+ ),
+ ArgparseArgument(
+ '-b',
+ '--only-build-deps',
+ action='store_true',
+ default=False,
+ help=_(
+ 'Only build the recipes needed to ' 'create this package (conflicts with --skip-deps-build)'
+ ),
+ ),
+ ArgparseArgument(
+ '-k', '--keep-temp', action='store_true', default=False, help=_('Keep temporary files for debug')
+ ),
+ ArgparseArgument(
+ '--offline', action='store_true', default=False, help=_('Use only the source cache, no network')
+ ),
+ ArgparseArgument(
+ '--dry-run',
+ action='store_true',
+ default=False,
+ help=_('Only print the packages that will be built'),
+ ),
+ ArgparseArgument(
+ '--compress-method',
+ type=str,
+ choices=['default', 'xz', 'bz2', 'none'],
+ default='default',
+ help=_('Select compression method for tarballs'),
+ ),
+ ArgparseArgument(
+ '--jobs',
+ '-j',
+ action='store',
+ type=int,
+ default=0,
+ help=_('How many recipes to build concurrently. ' '0 = number of CPUs.'),
+ ),
+ ],
+ )
def run(self, config, args):
self.store = PackagesStore(config, offline=args.offline)
p = self.store.get_package(args.package[0])
if args.skip_deps_build and args.only_build_deps:
- raise UsageError(_("Cannot use --skip-deps-build together with "
- "--only-build-deps"))
+ raise UsageError(_('Cannot use --skip-deps-build together with ' '--only-build-deps'))
if not args.skip_deps_build:
self._build_deps(config, p, args.no_devel, args.offline, args.dry_run, args.jobs)
@@ -95,8 +137,7 @@ class Package(Command):
p.pre_package()
packager_class = Packager
if args.tarball:
- if config.target_platform == Platform.ANDROID and \
- config.target_arch == Architecture.UNIVERSAL:
+ if config.target_platform == Platform.ANDROID and config.target_arch == Architecture.UNIVERSAL:
packager_class = AndroidPackager
else:
packager_class = DistTarball
@@ -108,27 +149,30 @@ class Package(Command):
args.no_split = True
packager_class = DistTarball
- m.action(_("Creating package for %s") % p.name)
+ m.action(_('Creating package for %s') % p.name)
pkg = packager_class(config, p, self.store)
output_dir = os.path.abspath(args.output_dir)
if isinstance(pkg, DistTarball):
- paths = pkg.pack(output_dir, args.no_devel, args.force,
- args.keep_temp, split=not args.no_split,
- strip_binaries=p.strip)
+ paths = pkg.pack(
+ output_dir, args.no_devel, args.force, args.keep_temp, split=not args.no_split, strip_binaries=p.strip
+ )
else:
- paths = pkg.pack(output_dir, args.no_devel,
- args.force, args.keep_temp)
+ paths = pkg.pack(output_dir, args.no_devel, args.force, args.keep_temp)
if None in paths:
paths.remove(None)
paths = p.post_package(paths, output_dir) or paths
- m.action(_("Package successfully created in %s") %
- ' '.join([os.path.abspath(x) for x in paths]))
+ m.action(_('Package successfully created in %s') % ' '.join([os.path.abspath(x) for x in paths]))
def _build_deps(self, config, package, has_devel, offline, dry_run, jobs):
build_command = build.Build()
- build_command.runargs(config, package.recipes_dependencies(has_devel),
- cookbook=self.store.cookbook, dry_run=dry_run, offline=offline,
- jobs=jobs)
+ build_command.runargs(
+ config,
+ package.recipes_dependencies(has_devel),
+ cookbook=self.store.cookbook,
+ dry_run=dry_run,
+ offline=offline,
+ jobs=jobs,
+ )
register_command(Package)
diff --git a/cerbero/commands/rdeps.py b/cerbero/commands/rdeps.py
index 8065d5d9..508aafdc 100644
--- a/cerbero/commands/rdeps.py
+++ b/cerbero/commands/rdeps.py
@@ -28,10 +28,12 @@ class RDeps(Command):
name = 'rdeps'
def __init__(self):
- Command.__init__(self,
- [ArgparseArgument('recipe', nargs=1,
- help=_('name of the recipe')),
- ])
+ Command.__init__(
+ self,
+ [
+ ArgparseArgument('recipe', nargs=1, help=_('name of the recipe')),
+ ],
+ )
def run(self, config, args):
cookbook = CookBook(config)
@@ -44,4 +46,5 @@ class RDeps(Command):
for recipe in recipes:
m.message(recipe.name)
+
register_command(RDeps)
diff --git a/cerbero/commands/runit.py b/cerbero/commands/runit.py
index 4f6e2c25..7ecdaf5e 100644
--- a/cerbero/commands/runit.py
+++ b/cerbero/commands/runit.py
@@ -28,10 +28,12 @@ class Run(Command):
name = 'run'
def __init__(self):
- Command.__init__(self,
- [ArgparseArgument('cmd', nargs=argparse.REMAINDER,
- help=_('command to run')),
- ])
+ Command.__init__(
+ self,
+ [
+ ArgparseArgument('cmd', nargs=argparse.REMAINDER, help=_('command to run')),
+ ],
+ )
def run(self, config, args):
sys.exit(shell.new_call(args.cmd, fail=False, env=config.env))
diff --git a/cerbero/commands/shell.py b/cerbero/commands/shell.py
index e3582746..ee5f2717 100644
--- a/cerbero/commands/shell.py
+++ b/cerbero/commands/shell.py
@@ -22,15 +22,16 @@ from cerbero.commands import Command, register_command
from cerbero.build.cookbook import CookBook
from cerbero.utils import _, N_, shell, ArgparseArgument, add_system_libs
+
class Shell(Command):
doc = N_('Starts a shell with the build environment')
name = 'shell'
def __init__(self):
args = [
- ArgparseArgument('--use-system-libs', action='store_true',
- default=False,
- help=_('add system paths to PKG_CONFIG_PATH')),
+ ArgparseArgument(
+ '--use-system-libs', action='store_true', default=False, help=_('add system paths to PKG_CONFIG_PATH')
+ ),
]
Command.__init__(self, args)
@@ -45,9 +46,14 @@ class Shell(Command):
if args.use_system_libs:
add_system_libs(config, env, config.env)
- shell.enter_build_environment(config.target_platform,
- config.target_arch, config.distro, sourcedir=None,
- env=env, bash_completions=config.bash_completions)
+ shell.enter_build_environment(
+ config.target_platform,
+ config.target_arch,
+ config.distro,
+ sourcedir=None,
+ env=env,
+ bash_completions=config.bash_completions,
+ )
register_command(Shell)
diff --git a/cerbero/commands/tag.py b/cerbero/commands/tag.py
index 5c241b76..cfa166d9 100644
--- a/cerbero/commands/tag.py
+++ b/cerbero/commands/tag.py
@@ -24,21 +24,16 @@ from cerbero.utils import messages as m
class Tag(Command):
- doc = N_('Tag a git recipe or all git recipes using their '
- 'sdk-$version branch')
+ doc = N_('Tag a git recipe or all git recipes using their ' 'sdk-$version branch')
name = 'tag'
def __init__(self):
args = [
- ArgparseArgument('recipe',
- help=_('name of the recipe to tag or "all" to '
- 'tag all recipes')),
- ArgparseArgument('tagname',
- help=_('name of the tag to use')),
- ArgparseArgument('tagdescription',
- help=_('description of the tag')),
- ArgparseArgument('-f', '--force', action='store_true',
- default=False, help=_('Replace tag if existing'))]
+ ArgparseArgument('recipe', help=_('name of the recipe to tag or "all" to ' 'tag all recipes')),
+ ArgparseArgument('tagname', help=_('name of the tag to use')),
+ ArgparseArgument('tagdescription', help=_('description of the tag')),
+ ArgparseArgument('-f', '--force', action='store_true', default=False, help=_('Replace tag if existing')),
+ ]
Command.__init__(self, args)
def run(self, config, args):
@@ -48,34 +43,30 @@ class Tag(Command):
else:
recipes = [cookbook.get_recipe(args.recipe)]
if len(recipes) == 0:
- m.message(_("No recipes found"))
+ m.message(_('No recipes found'))
tagname = args.tagname
tagdescription = args.tagdescription
force = args.force
for recipe in recipes:
try:
- if recipe.stype != SourceType.GIT and \
- recipe.stype != SourceType.GIT_TARBALL:
- m.message(_("Recipe '%s' has a custom source repository, "
- "skipping") % recipe.name)
+ if recipe.stype != SourceType.GIT and recipe.stype != SourceType.GIT_TARBALL:
+ m.message(_("Recipe '%s' has a custom source repository, " 'skipping') % recipe.name)
continue
recipe.fetch(checkout=False)
tags = git.list_tags(recipe.repo_dir)
- exists = (tagname in tags)
+ exists = tagname in tags
if exists:
if not force:
- m.warning(_("Recipe '%s' tag '%s' already exists, "
- "not updating" % (recipe.name, tagname)))
+ m.warning(_("Recipe '%s' tag '%s' already exists, " 'not updating' % (recipe.name, tagname)))
continue
git.delete_tag(recipe.repo_dir, tagname)
commit = 'origin/sdk-%s' % recipe.version
- git.create_tag(recipe.repo_dir, tagname, tagdescription,
- commit)
+ git.create_tag(recipe.repo_dir, tagname, tagdescription, commit)
except:
- m.warning(_("Error tagging recipe %s" % recipe.name))
+ m.warning(_('Error tagging recipe %s' % recipe.name))
register_command(Tag)
diff --git a/cerbero/commands/wipe.py b/cerbero/commands/wipe.py
index c2262ad4..172ece87 100644
--- a/cerbero/commands/wipe.py
+++ b/cerbero/commands/wipe.py
@@ -30,18 +30,23 @@ class Wipe(Command):
name = 'wipe'
def __init__(self):
- Command.__init__(self, [
- ArgparseArgument('--force', action='store_true',
+ Command.__init__(
+ self,
+ [
+ ArgparseArgument(
+ '--force',
+ action='store_true',
default=False,
- help=_('force the deletion of everything without user '
- 'input')),
- ArgparseArgument('--build-tools', action='store_true',
- default=False,
- help=_('wipe the build tools too')),
- ArgparseArgument('--keep-sources', action='store_true',
- default=False,
- help=_('keep downloaded source files')),
- ])
+ help=_('force the deletion of everything without user ' 'input'),
+ ),
+ ArgparseArgument(
+ '--build-tools', action='store_true', default=False, help=_('wipe the build tools too')
+ ),
+ ArgparseArgument(
+ '--keep-sources', action='store_true', default=False, help=_('keep downloaded source files')
+ ),
+ ],
+ )
def run(self, config, args):
to_remove = [os.path.join(config.home_dir, config.cache_file)]
@@ -61,25 +66,25 @@ class Wipe(Command):
return
options = ['yes', 'no']
- en_msg = "WARNING!!!\n" \
- "This command will delete cerbero's build cache"
+ en_msg = 'WARNING!!!\n' "This command will delete cerbero's build cache"
if not args.keep_sources:
- en_msg += ", the sources directory,"
- en_msg += " and the builds directory " \
- "to reset the build system to its initial state.\n" \
- "The following paths will be removed:\n%s\n" \
- "Do you want to continue?" % '\n'.join(to_remove)
+ en_msg += ', the sources directory,'
+ en_msg += (
+ ' and the builds directory '
+ 'to reset the build system to its initial state.\n'
+ 'The following paths will be removed:\n%s\n'
+ 'Do you want to continue?' % '\n'.join(to_remove)
+ )
msg = _(en_msg)
# Ask once
if shell.prompt(msg, options) == options[0]:
- msg = _("Are you sure?")
+ msg = _('Are you sure?')
# Ask twice
if shell.prompt(msg, options) == options[0]:
# Start with the Apocalypse
self.wipe(to_remove)
def wipe(self, paths):
-
def _onerror(func, path, exc_info):
if not os.access(path, os.W_OK):
os.chmod(path, stat.S_IWUSR)
@@ -88,7 +93,7 @@ class Wipe(Command):
raise
for path in paths:
- m.action(_("Removing path: %s") % path)
+ m.action(_('Removing path: %s') % path)
if not os.path.exists(path):
continue
if os.path.isfile(path):
diff --git a/cerbero/config.py b/cerbero/config.py
index 696781f0..2d47eed4 100644
--- a/cerbero/config.py
+++ b/cerbero/config.py
@@ -40,7 +40,7 @@ USER_CONFIG_FILENAME = 'cerbero.%s' % CONFIG_EXT
USER_CONFIG_FILE = os.path.join(USER_CONFIG_DIR, USER_CONFIG_FILENAME)
DEFAULT_GIT_ROOT = 'https://gitlab.freedesktop.org/gstreamer'
DEFAULT_ALLOW_PARALLEL_BUILD = True
-DEFAULT_PACKAGER = "Default <default@change.me>"
+DEFAULT_PACKAGER = 'Default <default@change.me>'
CERBERO_UNINSTALLED = 'CERBERO_UNINSTALLED'
DEFAULT_MIRRORS = ['https://gstreamer.freedesktop.org/src/mirror/']
RUST_TRIPLE_MAPPING = {
@@ -65,11 +65,11 @@ RUST_TRIPLE_MAPPING = {
def set_nofile_ulimit():
- '''
+ """
Some newer toolchains such as our GCC 8.2 cross toolchain exceed the
1024 file ulimit, so let's increase it.
See: https://gitlab.freedesktop.org/gstreamer/cerbero/issues/165
- '''
+ """
try:
import resource
except ImportError:
@@ -82,11 +82,29 @@ def set_nofile_ulimit():
except (OSError, ValueError):
print('Failed to increase file ulimit, you may see linker failures')
+
class Variants(object):
# Variants that are booleans, and are unset when prefixed with 'no'
- __disabled_variants = ['x11', 'alsa', 'pulse', 'cdparanoia', 'v4l2',
- 'gi', 'unwind', 'rpi', 'visualstudio', 'mingw', 'uwp', 'qt5',
- 'intelmsdk', 'python', 'werror', 'vaapi', 'rust', 'qt6']
+ __disabled_variants = [
+ 'x11',
+ 'alsa',
+ 'pulse',
+ 'cdparanoia',
+ 'v4l2',
+ 'gi',
+ 'unwind',
+ 'rpi',
+ 'visualstudio',
+ 'mingw',
+ 'uwp',
+ 'qt5',
+ 'intelmsdk',
+ 'python',
+ 'werror',
+ 'vaapi',
+ 'rust',
+ 'qt6',
+ ]
__enabled_variants = ['debug', 'optimization', 'testspackage']
__bool_variants = __enabled_variants + __disabled_variants
# Variants that are `key: (values)`, with the first value in the tuple
@@ -142,13 +160,13 @@ class Variants(object):
self.vscrt = 'mdd'
def __setattr__(self, attr, value):
- if '-' in attr:
- raise AssertionError('Variant name {!r} must not contain \'-\''.format(attr))
- super().__setattr__(attr, value)
- # UWP implies Visual Studio
- if attr == 'uwp' and value:
- self.visualstudio = True
- self.mingw = False
+ if '-' in attr:
+ raise AssertionError("Variant name {!r} must not contain '-'".format(attr))
+ super().__setattr__(attr, value)
+ # UWP implies Visual Studio
+ if attr == 'uwp' and value:
+ self.visualstudio = True
+ self.mingw = False
def __getattr__(self, name):
if name.startswith('no') and name[2:] in self.bools():
@@ -167,8 +185,8 @@ class Variants(object):
return sorted(self.__mapping_variants)
-class Config (object):
- '''
+class Config(object):
+ """
Holds the configuration for the build
@ivar build_tools_config: Configuration for build tools
@@ -181,36 +199,95 @@ class Config (object):
@type py_win_prefix: str
@ivar py_prefixes: List of python prefixes
@type py_prefixes: list
- '''
-
- _properties = ['platform', 'target_platform', 'arch', 'target_arch',
- 'prefix', 'recipes_dir', 'host', 'build', 'target',
- 'sources', 'local_sources', 'lib_suffix', 'git_root',
- 'distro', 'target_distro', 'environ_dir', 'cache_file',
- 'toolchain_prefix', 'toolchain_version', 'distro_version',
- 'target_distro_version', 'allow_system_libs',
- 'packages_dir', 'py_prefix', 'logs',
- 'install_dir', 'allow_parallel_build', 'num_of_cpus',
- 'use_configure_cache', 'packages_prefix', 'packager',
- 'data_dir', 'min_osx_sdk_version', 'external_recipes',
- 'external_packages', 'use_ccache', 'force_git_commit',
- 'universal_archs', 'osx_target_sdk_version', 'variants',
- 'build_tools_prefix', 'build_tools_sources', 'build_tools_logs',
- 'build_tools_cache', 'home_dir', 'recipes_commits',
- 'recipes_remotes', 'ios_platform', 'extra_build_tools',
- 'distro_packages_install', 'interactive', 'bash_completions',
- 'target_arch_flags', 'sysroot', 'isysroot',
- 'extra_lib_path', 'cached_sources', 'tools_prefix',
- 'ios_min_version', 'toolchain_path', 'mingw_perl_prefix',
- 'msvc_env_for_toolchain', 'mingw_env_for_toolchain',
- 'msvc_env_for_build_system', 'mingw_env_for_build_system',
- 'msvc_version', 'meson_properties', 'manifest',
- 'extra_properties', 'qt5_qmake_path', 'qt5_pkgconfigdir',
- 'for_shell', 'package_tarball_compression', 'extra_mirrors',
- 'extra_bootstrap_packages', 'moltenvk_prefix',
- 'vs_install_path', 'vs_install_version', 'exe_suffix',
- 'rust_prefix', 'rustup_home', 'cargo_home', 'tomllib_path',
- 'qt6_qmake_path', 'system_build_tools']
+ """
+
+ _properties = [
+ 'platform',
+ 'target_platform',
+ 'arch',
+ 'target_arch',
+ 'prefix',
+ 'recipes_dir',
+ 'host',
+ 'build',
+ 'target',
+ 'sources',
+ 'local_sources',
+ 'lib_suffix',
+ 'git_root',
+ 'distro',
+ 'target_distro',
+ 'environ_dir',
+ 'cache_file',
+ 'toolchain_prefix',
+ 'toolchain_version',
+ 'distro_version',
+ 'target_distro_version',
+ 'allow_system_libs',
+ 'packages_dir',
+ 'py_prefix',
+ 'logs',
+ 'install_dir',
+ 'allow_parallel_build',
+ 'num_of_cpus',
+ 'use_configure_cache',
+ 'packages_prefix',
+ 'packager',
+ 'data_dir',
+ 'min_osx_sdk_version',
+ 'external_recipes',
+ 'external_packages',
+ 'use_ccache',
+ 'force_git_commit',
+ 'universal_archs',
+ 'osx_target_sdk_version',
+ 'variants',
+ 'build_tools_prefix',
+ 'build_tools_sources',
+ 'build_tools_logs',
+ 'build_tools_cache',
+ 'home_dir',
+ 'recipes_commits',
+ 'recipes_remotes',
+ 'ios_platform',
+ 'extra_build_tools',
+ 'distro_packages_install',
+ 'interactive',
+ 'bash_completions',
+ 'target_arch_flags',
+ 'sysroot',
+ 'isysroot',
+ 'extra_lib_path',
+ 'cached_sources',
+ 'tools_prefix',
+ 'ios_min_version',
+ 'toolchain_path',
+ 'mingw_perl_prefix',
+ 'msvc_env_for_toolchain',
+ 'mingw_env_for_toolchain',
+ 'msvc_env_for_build_system',
+ 'mingw_env_for_build_system',
+ 'msvc_version',
+ 'meson_properties',
+ 'manifest',
+ 'extra_properties',
+ 'qt5_qmake_path',
+ 'qt5_pkgconfigdir',
+ 'for_shell',
+ 'package_tarball_compression',
+ 'extra_mirrors',
+ 'extra_bootstrap_packages',
+ 'moltenvk_prefix',
+ 'vs_install_path',
+ 'vs_install_version',
+ 'exe_suffix',
+ 'rust_prefix',
+ 'rustup_home',
+ 'cargo_home',
+ 'tomllib_path',
+ 'qt6_qmake_path',
+ 'system_build_tools',
+ ]
cookbook = None
@@ -219,9 +296,9 @@ class Config (object):
self.build_tools_config = None
self._is_build_tools_config = is_build_tools_config
self.py_prefixes = []
- self.py_prefix = ""
- self.py_plat_prefix = ""
- self.py_win_prefix = ""
+ self.py_prefix = ''
+ self.py_plat_prefix = ''
+ self.py_win_prefix = ''
for a in self._properties:
setattr(self, a, None)
@@ -320,13 +397,13 @@ class Config (object):
config.prefix = os.path.join(self.prefix)
# qmake_path is different for each arch in android-universal, but
# not in ios-universal.
- qtpkgdir, qmake5 = detect_qt5(config.target_platform, config.target_arch,
- self.target_arch == Architecture.UNIVERSAL)
+ qtpkgdir, qmake5 = detect_qt5(
+ config.target_platform, config.target_arch, self.target_arch == Architecture.UNIVERSAL
+ )
config.set_property('qt5_qmake_path', qmake5)
config.set_property('qt5_pkgconfigdir', qtpkgdir)
# Qt6
- qmake6 = detect_qt6(config.target_platform, config.target_arch,
- self.target_arch == Architecture.UNIVERSAL)
+ qmake6 = detect_qt6(config.target_platform, config.target_arch, self.target_arch == Architecture.UNIVERSAL)
config.set_property('qt6_qmake_path', qmake6)
# We already called these functions on `self` above
if config is not self:
@@ -339,8 +416,7 @@ class Config (object):
# Ensure that variants continue to override all other configuration
self.variants.override(variants_override)
if not self.prefix_is_executable() and self.variants.gi:
- m.warning(_("gobject introspection requires an executable "
- "prefix, 'gi' variant will be removed"))
+ m.warning(_('gobject introspection requires an executable ' "prefix, 'gi' variant will be removed"))
self.variants.gi = False
for c in list(self.arch_config.values()):
@@ -348,18 +424,21 @@ class Config (object):
self.do_setup_env()
-
if self._is_build_tools_config:
m.message('Build tools install prefix will be {}'.format(self.prefix))
else:
if self.can_use_msvc():
- m.message('Building recipes with Visual Studio {} whenever possible'
- .format(get_vs_year_version(self.msvc_version)))
+ m.message(
+ 'Building recipes with Visual Studio {} whenever possible'.format(
+ get_vs_year_version(self.msvc_version)
+ )
+ )
if self.vs_install_path:
m.message('Using Visual Studio installed at {!r}'.format(self.vs_install_path))
m.message('Install prefix will be {}'.format(self.prefix))
if self.distro == Distro.MSYS:
import time
+
print('!!!!!!!!!!!!')
print('DEPRECATION: You are using the old MSYS which is deprecated and will be unsupported SOON!')
print('!!!!!!!!!!!!')
@@ -380,7 +459,6 @@ class Config (object):
if arches:
m.message('Building the following arches: ' + ' '.join(arches))
-
def do_setup_env(self):
self._create_paths()
@@ -391,12 +469,12 @@ class Config (object):
self.env = self.get_env(self.prefix, libdir)
def get_wine_runtime_env(self, prefix, env):
- '''
+ """
When we're creating a cross-winXX shell, these runtime environment
variables are only useful if the built binaries will be run using Wine,
so convert them to values that can be understood by programs running
under Wine.
- '''
+ """
runtime_env = (
'GI_TYPELIB_PATH',
'XDG_DATA_DIRS',
@@ -432,8 +510,10 @@ class Config (object):
elif EnvVar.is_path(k) or EnvVar.is_arg(k) or EnvVar.is_cmd(k):
ret_env[k] = new_v
else:
- raise FatalError("Don't know how to combine the environment "
- "variable '%s' with values '%s' and '%s'" % (k, new_v, old_v))
+ raise FatalError(
+ "Don't know how to combine the environment "
+ "variable '%s' with values '%s' and '%s'" % (k, new_v, old_v)
+ )
for k in old_env.keys():
if k not in new_env:
ret_env[k] = old_env[k]
@@ -451,25 +531,20 @@ class Config (object):
xdgconfigdir = os.path.join(prefix, 'etc', 'xdg')
xcursordir = os.path.join(prefix, 'share', 'icons')
aclocalflags = '-I{} -I{}'.format(
- os.path.join(prefix, 'share', 'aclocal'),
- os.path.join(self.build_tools_prefix, 'share', 'aclocal'))
+ os.path.join(prefix, 'share', 'aclocal'), os.path.join(self.build_tools_prefix, 'share', 'aclocal')
+ )
- perlversionpath = os.path.join(libdir, 'perl5', 'site_perl',
- self._perl_version())
+ perlversionpath = os.path.join(libdir, 'perl5', 'site_perl', self._perl_version())
if self.target_platform == Platform.WINDOWS:
# On windows even if perl version is 5.8.8, modules can be
# installed in 5.8
perlversionpath = perlversionpath.rsplit('.', 1)[0]
- perl5lib = ':'.join(
- [to_unixpath(os.path.join(libdir, 'perl5')),
- to_unixpath(perlversionpath)])
+ perl5lib = ':'.join([to_unixpath(os.path.join(libdir, 'perl5')), to_unixpath(perlversionpath)])
gstpluginpath = os.path.join(libdir, 'gstreamer-0.10')
gstpluginpath10 = os.path.join(libdir, 'gstreamer-1.0')
- gstregistry = os.path.join('~', '.gstreamer-0.10',
- 'cerbero-registry-%s' % self.target_arch)
- gstregistry10 = os.path.join('~', '.cache', 'gstreamer-1.0',
- 'cerbero-registry-%s' % self.target_arch)
+ gstregistry = os.path.join('~', '.gstreamer-0.10', 'cerbero-registry-%s' % self.target_arch)
+ gstregistry10 = os.path.join('~', '.cache', 'gstreamer-1.0', 'cerbero-registry-%s' % self.target_arch)
gstregistry = os.path.expanduser(gstregistry)
gstregistry10 = os.path.expanduser(gstregistry10)
@@ -482,7 +557,7 @@ class Config (object):
pythonpath = os.pathsep.join(pythonpath)
if self.platform == Platform.LINUX:
- xdgdatadir += ":/usr/share:/usr/local/share"
+ xdgdatadir += ':/usr/share:/usr/local/share'
ldflags = self.config_env.get('LDFLAGS', '')
ldflags_libdir = '-L%s ' % libdir
@@ -491,8 +566,7 @@ class Config (object):
path = self.config_env.get('PATH', None)
if not self._is_build_tools_config:
- path = self._join_path(
- os.path.join(self.build_tools_config.prefix, 'bin'), path)
+ path = self._join_path(os.path.join(self.build_tools_config.prefix, 'bin'), path)
if self.variants.rust:
path = self._join_path(os.path.join(self.cargo_home, 'bin'), path)
# Add the prefix bindir after the build-tools bindir so that on Windows
@@ -507,36 +581,35 @@ class Config (object):
if self.extra_lib_path is not None:
ld_library_path = self._join_path(ld_library_path, self.extra_lib_path)
if self.toolchain_prefix is not None:
- ld_library_path = self._join_path(ld_library_path,
- os.path.join(self.toolchain_prefix, 'lib'))
- includedir = self._join_path(includedir,
- os.path.join(self.toolchain_prefix, 'include'))
+ ld_library_path = self._join_path(ld_library_path, os.path.join(self.toolchain_prefix, 'lib'))
+ includedir = self._join_path(includedir, os.path.join(self.toolchain_prefix, 'include'))
# Most of these variables are extracted from jhbuild
- env = {'LD_LIBRARY_PATH': ld_library_path,
- 'LDFLAGS': ldflags,
- 'PATH': path,
- 'MANPATH': manpathdir,
- 'INFOPATH': infopathdir,
- 'GI_TYPELIB_PATH': typelibpath,
- 'XDG_DATA_DIRS': xdgdatadir,
- 'XDG_CONFIG_DIRS': xdgconfigdir,
- 'XCURSOR_PATH': xcursordir,
- 'ACLOCAL_FLAGS': aclocalflags,
- 'ACLOCAL': "aclocal",
- 'PERL5LIB': perl5lib,
- 'GST_PLUGIN_PATH': gstpluginpath,
- 'GST_PLUGIN_PATH_1_0': gstpluginpath10,
- 'GST_REGISTRY': gstregistry,
- 'GST_REGISTRY_1_0': gstregistry10,
- 'PYTHONPATH': pythonpath,
- 'MONO_PATH': os.path.join(libdir, 'mono', '4.5'),
- 'MONO_GAC_PREFIX': prefix,
- 'GSTREAMER_ROOT': prefix,
- 'CERBERO_PREFIX': self.prefix,
- 'CERBERO_HOST_SOURCES': self.sources,
- 'RUSTUP_HOME': self.rustup_home,
- 'CARGO_HOME': self.cargo_home,
- }
+ env = {
+ 'LD_LIBRARY_PATH': ld_library_path,
+ 'LDFLAGS': ldflags,
+ 'PATH': path,
+ 'MANPATH': manpathdir,
+ 'INFOPATH': infopathdir,
+ 'GI_TYPELIB_PATH': typelibpath,
+ 'XDG_DATA_DIRS': xdgdatadir,
+ 'XDG_CONFIG_DIRS': xdgconfigdir,
+ 'XCURSOR_PATH': xcursordir,
+ 'ACLOCAL_FLAGS': aclocalflags,
+ 'ACLOCAL': 'aclocal',
+ 'PERL5LIB': perl5lib,
+ 'GST_PLUGIN_PATH': gstpluginpath,
+ 'GST_PLUGIN_PATH_1_0': gstpluginpath10,
+ 'GST_REGISTRY': gstregistry,
+ 'GST_REGISTRY_1_0': gstregistry10,
+ 'PYTHONPATH': pythonpath,
+ 'MONO_PATH': os.path.join(libdir, 'mono', '4.5'),
+ 'MONO_GAC_PREFIX': prefix,
+ 'GSTREAMER_ROOT': prefix,
+ 'CERBERO_PREFIX': self.prefix,
+ 'CERBERO_HOST_SOURCES': self.sources,
+ 'RUSTUP_HOME': self.rustup_home,
+ 'CARGO_HOME': self.cargo_home,
+ }
PkgConfig.set_executable(env, self)
PkgConfig.set_default_search_dir(os.path.join(prefix, 'share', 'pkgconfig'), env, self)
@@ -657,12 +730,13 @@ class Config (object):
# On Windows, building 32-bit on 64-bit is not cross-compilation since
# 32-bit Windows binaries run on 64-bit Windows via WOW64.
if self.platform == Platform.WINDOWS:
- if self.arch == Architecture.X86_64 and \
- self.target_arch == Architecture.X86:
+ if self.arch == Architecture.X86_64 and self.target_arch == Architecture.X86:
return False
- return self.target_platform != self.platform or \
- self.target_arch != self.arch or \
- self.target_distro_version != self.distro_version
+ return (
+ self.target_platform != self.platform
+ or self.target_arch != self.arch
+ or self.target_distro_version != self.distro_version
+ )
def cross_universal_type(self):
if not self.cross_compiling():
@@ -684,8 +758,7 @@ class Config (object):
if self.variants.uwp:
return False
if self.target_arch != self.arch:
- if self.target_arch == Architecture.X86 and \
- self.arch == Architecture.X86_64:
+ if self.target_arch == Architecture.X86 and self.arch == Architecture.X86_64:
return True
return False
return True
@@ -694,7 +767,7 @@ class Config (object):
return self._is_build_tools_config
def target_distro_version_gte(self, distro_version):
- assert distro_version.startswith(self.target_distro + "_")
+ assert distro_version.startswith(self.target_distro + '_')
return self.target_distro_version >= distro_version
def _create_paths(self):
@@ -710,7 +783,6 @@ class Config (object):
if self._is_build_tools_config:
self._create_path(os.path.join(self.prefix, 'var', 'tmp'))
-
def _create_build_tools_config(self):
# Use a common prefix for the build tools for all the configurations
# so that it can be reused
@@ -749,13 +821,13 @@ class Config (object):
# a framework path, but setuptools defaults to a posix prefix
# So just use a posix prefix everywhere consistently.
pyvars = {'base': '.', 'platbase': '.'}
- self.py_prefix = sysconfig.get_path('purelib', 'posix_prefix', vars=pyvars)
+ self.py_prefix = sysconfig.get_path('purelib', 'posix_prefix', vars=pyvars)
self.py_plat_prefix = sysconfig.get_path('platlib', 'posix_prefix', vars=pyvars)
# Make sure we also include the default non-versioned path on
# Windows in addition to the posix path.
self.py_win_prefix = sysconfig.get_path('purelib', 'nt', vars=pyvars)
- self.py_prefixes = [self.py_prefix ,self.py_plat_prefix]
+ self.py_prefixes = [self.py_prefix, self.py_plat_prefix]
if self.platform == Platform.WINDOWS:
self.py_prefixes.append(self.py_win_prefix)
self.py_prefixes = list(set(self.py_prefixes))
@@ -773,8 +845,7 @@ class Config (object):
self._create_path(path)
def _parse(self, filename, reset=True):
- config = {'os': os, '__file__': filename, 'env': self.config_env,
- 'cross': self.cross_compiling()}
+ config = {'os': os, '__file__': filename, 'env': self.config_env, 'cross': self.cross_compiling()}
if not reset:
for prop in self._properties:
if hasattr(self, prop):
@@ -783,20 +854,17 @@ class Config (object):
try:
parse_file(filename, config)
except:
- raise ConfigurationError(_('Could not include config file (%s)') %
- filename)
+ raise ConfigurationError(_('Could not include config file (%s)') % filename)
for key in self._properties:
if key in config:
self.set_property(key, config[key], True)
def _validate_properties(self):
if not validate_packager(self.packager):
- raise FatalError(_('packager "%s" must be in the format '
- '"Name <email>"') % self.packager)
+ raise FatalError(_('packager "%s" must be in the format ' '"Name <email>"') % self.packager)
def _check_windows_is_x86_64(self):
- if self.target_platform == Platform.WINDOWS and \
- self.arch == Architecture.X86:
+ if self.target_platform == Platform.WINDOWS and self.arch == Architecture.X86:
raise ConfigurationError('The GCC/MinGW toolchain requires an x86 64-bit OS.')
def _check_uninstalled(self):
@@ -832,19 +900,16 @@ class Config (object):
# Check if the config specified is a complete path, else search
# in the user config directory
if not os.path.exists(f):
- f = os.path.join(USER_CONFIG_DIR, f + "." + CONFIG_EXT)
+ f = os.path.join(USER_CONFIG_DIR, f + '.' + CONFIG_EXT)
if os.path.exists(f):
self._parse(f, reset=False)
else:
- raise ConfigurationError(_("Configuration file %s doesn't "
- "exist") % f)
+ raise ConfigurationError(_("Configuration file %s doesn't " 'exist') % f)
def _load_platform_config(self):
- platform_config = os.path.join(self.environ_dir, '%s.config' %
- self.target_platform)
- arch_config = os.path.join(self.environ_dir, '%s_%s.config' %
- (self.target_platform, self.target_arch))
+ platform_config = os.path.join(self.environ_dir, '%s.config' % self.target_platform)
+ arch_config = os.path.join(self.environ_dir, '%s_%s.config' % (self.target_platform, self.target_arch))
for config_path in [platform_config, arch_config]:
if os.path.exists(config_path):
@@ -871,19 +936,16 @@ class Config (object):
def _load_last_defaults(self):
# Set build tools defaults
- self.set_property('build_tools_prefix',
- os.path.join(self.home_dir, 'build-tools'))
- self.set_property('build_tools_sources',
- os.path.join(self.home_dir, 'sources', 'build-tools'))
- self.set_property('build_tools_logs',
- os.path.join(self.home_dir, 'logs', 'build-tools'))
+ self.set_property('build_tools_prefix', os.path.join(self.home_dir, 'build-tools'))
+ self.set_property('build_tools_sources', os.path.join(self.home_dir, 'sources', 'build-tools'))
+ self.set_property('build_tools_logs', os.path.join(self.home_dir, 'logs', 'build-tools'))
self.set_property('build_tools_cache', 'build-tools.cache')
# Set target platform defaults
platform_arch = '_'.join(self._get_toolchain_target_platform_arch())
- self.set_property('prefix', os.path.join(self.home_dir, "dist", platform_arch))
- self.set_property('sources', os.path.join(self.home_dir, "sources", platform_arch))
- self.set_property('logs', os.path.join(self.home_dir, "logs", platform_arch))
- self.set_property('cache_file', platform_arch + ".cache")
+ self.set_property('prefix', os.path.join(self.home_dir, 'dist', platform_arch))
+ self.set_property('sources', os.path.join(self.home_dir, 'sources', platform_arch))
+ self.set_property('logs', os.path.join(self.home_dir, 'logs', platform_arch))
+ self.set_property('cache_file', platform_arch + '.cache')
self.set_property('install_dir', self.prefix)
self.set_property('local_sources', self._default_local_sources_dir())
self.set_property('rust_prefix', os.path.join(self.home_dir, 'rust'))
@@ -902,18 +964,16 @@ class Config (object):
def _find_data_dir(self):
if self.uninstalled:
- self.data_dir = os.path.join(os.path.dirname(__file__),
- '..', 'data')
+ self.data_dir = os.path.join(os.path.dirname(__file__), '..', 'data')
self.data_dir = os.path.abspath(self.data_dir)
return
curdir = os.path.dirname(__file__)
- while not os.path.exists(os.path.join(curdir, 'share', 'cerbero',
- 'config')):
+ while not os.path.exists(os.path.join(curdir, 'share', 'cerbero', 'config')):
curdir = os.path.abspath(os.path.join(curdir, '..'))
if curdir == '/' or curdir[1:] == ':/':
# We reached the root without finding the data dir, which
# shouldn't happen
- raise FatalError("Data dir not found")
+ raise FatalError('Data dir not found')
self.data_dir = os.path.join(curdir, 'share', 'cerbero')
def _relative_path(self, path):
@@ -950,9 +1010,9 @@ class Config (object):
@lru_cache()
def _perl_version(self):
try:
- version = shell.check_output("perl -e 'print \"$]\";'")
+ version = shell.check_output('perl -e \'print "$]";\'')
except FatalError:
- m.warning(_("Perl not found, you may need to run bootstrap."))
+ m.warning(_('Perl not found, you may need to run bootstrap.'))
version = '0.000000'
# FIXME: when perl's mayor is >= 10
mayor = str(version[0])
@@ -991,6 +1051,7 @@ class Config (object):
def find_toml_module(self, system_only=False):
import importlib
+
if sys.version_info >= (3, 11, 0):
return importlib.import_module('tomllib')
for m in ('tomli', 'toml', 'tomlkit'):
diff --git a/cerbero/enums.py b/cerbero/enums.py
index 2fc0316b..5f56c73c 100644
--- a/cerbero/enums.py
+++ b/cerbero/enums.py
@@ -22,8 +22,10 @@ from cerbero.errors import FatalError
# Safest place to define this since this file imports very few modules
CERBERO_VERSION = '1.23.0.1'
+
class Platform:
- ''' Enumeration of supported platforms '''
+ """Enumeration of supported platforms"""
+
LINUX = 'linux'
WINDOWS = 'windows'
DARWIN = 'darwin'
@@ -32,7 +34,8 @@ class Platform:
class Architecture:
- ''' Enumeration of supported acrchitectures '''
+ """Enumeration of supported acrchitectures"""
+
X86 = 'x86'
X86_64 = 'x86_64'
UNIVERSAL = 'universal'
@@ -43,12 +46,11 @@ class Architecture:
@staticmethod
def is_arm(arch):
- '''Returns whether the architecture is an ARM based one.
+ """Returns whether the architecture is an ARM based one.
Note that it will include 32bit *and* 64bit ARM targets. If you
wish to do something special for 64bit you should first check for
- that before calling this method.'''
- return arch in [Architecture.ARM, Architecture.ARMv7,
- Architecture.ARMv7S, Architecture.ARM64]
+ that before calling this method."""
+ return arch in [Architecture.ARM, Architecture.ARMv7, Architecture.ARMv7S, Architecture.ARM64]
@staticmethod
def is_arm32(arch):
@@ -56,13 +58,14 @@ class Architecture:
class Distro:
- ''' Enumeration of supported distributions '''
+ """Enumeration of supported distributions"""
+
DEBIAN = 'debian'
REDHAT = 'redhat'
SUSE = 'suse'
- WINDOWS = 'windows' # To be used as target_distro
- MSYS = 'msys' # When running on a native Windows with MSYS
- MSYS2 = 'msys2' # When running on a native Windows with MSYS2
+ WINDOWS = 'windows' # To be used as target_distro
+ MSYS = 'msys' # When running on a native Windows with MSYS
+ MSYS2 = 'msys2' # When running on a native Windows with MSYS2
ARCH = 'arch'
OS_X = 'osx'
IOS = 'ios'
@@ -72,7 +75,8 @@ class Distro:
class DistroVersion:
- ''' Enumeration of supported distribution versions, withing each distro, they must be sortable'''
+ """Enumeration of supported distribution versions, withing each distro, they must be sortable"""
+
DEBIAN_SQUEEZE = 'debian_06_squeeze'
DEBIAN_WHEEZY = 'debian_07_wheezy'
DEBIAN_JESSIE = 'debian_08_jessie'
@@ -175,21 +179,21 @@ class DistroVersion:
ANDROID_ICE_CREAM_SANDWICH = 'android_14_ice_cream_sandwich' # API Level 14
ANDROID_JELLY_BEAN = 'android_16_jelly_bean' # API Level 16
ANDROID_KITKAT = 'android_19_kitkat' # API Level 19
- ANDROID_LOLLIPOP = 'android_21_lollipop' # API Level 21
- ANDROID_LOLLIPOP_MR1 = 'android_22_lollipop_mr1' # API Level 22
- ANDROID_MARSHMALLOW = 'android_23_marshmallow' # API Level 23
- ANDROID_NOUGAT = 'android_24_nougat' # API Level 24
- ANDROID_NOUGAT_MR1 = 'android_25_nougat_mr1' # API Level 25
- ANDROID_OREO = 'android_26_oreo' # API Level 26
- ANDROID_OREO_MR1 = 'android_27_oreo_mr1' # API Level 27
- ANDROID_PIE = 'android_28_pie' # API Level 28
- ANDROID_Q = 'android_29_q' # API Level 29
+ ANDROID_LOLLIPOP = 'android_21_lollipop' # API Level 21
+ ANDROID_LOLLIPOP_MR1 = 'android_22_lollipop_mr1' # API Level 22
+ ANDROID_MARSHMALLOW = 'android_23_marshmallow' # API Level 23
+ ANDROID_NOUGAT = 'android_24_nougat' # API Level 24
+ ANDROID_NOUGAT_MR1 = 'android_25_nougat_mr1' # API Level 25
+ ANDROID_OREO = 'android_26_oreo' # API Level 26
+ ANDROID_OREO_MR1 = 'android_27_oreo_mr1' # API Level 27
+ ANDROID_PIE = 'android_28_pie' # API Level 28
+ ANDROID_Q = 'android_29_q' # API Level 29
NONE_UCLIBC = 'none_uclibc'
NONE_GLIBC = 'none_glibc'
@staticmethod
def get_android_api_version(version):
- '''Returns the corresponding android api version'''
+ """Returns the corresponding android api version"""
if version == DistroVersion.ANDROID_GINGERBREAD:
return 9
elif version == DistroVersion.ANDROID_ICE_CREAM_SANDWICH:
@@ -217,7 +221,7 @@ class DistroVersion:
elif version == DistroVersion.ANDROID_Q:
return 29
else:
- raise FatalError("DistroVersion not supported")
+ raise FatalError('DistroVersion not supported')
@staticmethod
def get_ios_sdk_version(version):
@@ -225,8 +229,8 @@ class DistroVersion:
raise FatalError('Not an iOS version: ' + version)
return [int(s) for s in version[4:].split('_')]
-class LicenseDescription:
+class LicenseDescription:
def __init__(self, acronym, pretty_name):
self.acronym = acronym
self.pretty_name = pretty_name
@@ -235,45 +239,31 @@ class LicenseDescription:
return self.acronym < other.acronym
def __repr__(self):
- return "LicenseDescription(%s)" % self.acronym
+ return 'LicenseDescription(%s)' % self.acronym
+
class License:
- ''' Enumeration of licensesversions '''
- Apachev2 = LicenseDescription('Apache-2.0',
- 'Apache License, version 2.0')
- BSD = LicenseDescription('BSD',
- 'BSD License')
- BSD_like = LicenseDescription('BSD-like',
- 'BSD-like License')
- FreeType = LicenseDescription('FreeType',
- 'FreeType License')
- GPLv2Plus = LicenseDescription('GPL-2+',
- 'GNU General Public License, version 2 or later')
- GPLv3Plus = LicenseDescription('GPL-3+',
- 'GNU General Public License, version 3 or later')
- LGPLv2Plus = LicenseDescription('LGPL-2+',
- 'GNU Lesser General Public License, version 2 or later')
- LGPLv2_1Plus = LicenseDescription('LGPL-2.1+',
- 'GNU Lesser General Public License, version 2.1 or later')
- LGPLv3 = LicenseDescription('LGPL-3',
- 'GNU Lesser General Public License, version 3')
- LGPLv3Plus = LicenseDescription('LGPL-3+',
- 'GNU Lesser General Public License, version 3 or later')
- LibPNG = LicenseDescription('LibPNG',
- 'LibPNG License')
- MPLv1_1 = LicenseDescription('MPL-1.1',
- 'Mozilla Public License Version 1.1')
- MPLv2 = LicenseDescription('MPL-2',
- 'Mozilla Public License Version 2.0')
- MIT = LicenseDescription('MIT',
- 'MIT License')
- OPENSSL = LicenseDescription('OpenSSL',
- 'OpenSSL License')
- Proprietary = LicenseDescription('Proprietary',
- 'Proprietary License')
+ """Enumeration of licensesversions"""
+
+ Apachev2 = LicenseDescription('Apache-2.0', 'Apache License, version 2.0')
+ BSD = LicenseDescription('BSD', 'BSD License')
+ BSD_like = LicenseDescription('BSD-like', 'BSD-like License')
+ FreeType = LicenseDescription('FreeType', 'FreeType License')
+ GPLv2Plus = LicenseDescription('GPL-2+', 'GNU General Public License, version 2 or later')
+ GPLv3Plus = LicenseDescription('GPL-3+', 'GNU General Public License, version 3 or later')
+ LGPLv2Plus = LicenseDescription('LGPL-2+', 'GNU Lesser General Public License, version 2 or later')
+ LGPLv2_1Plus = LicenseDescription('LGPL-2.1+', 'GNU Lesser General Public License, version 2.1 or later')
+ LGPLv3 = LicenseDescription('LGPL-3', 'GNU Lesser General Public License, version 3')
+ LGPLv3Plus = LicenseDescription('LGPL-3+', 'GNU Lesser General Public License, version 3 or later')
+ LibPNG = LicenseDescription('LibPNG', 'LibPNG License')
+ MPLv1_1 = LicenseDescription('MPL-1.1', 'Mozilla Public License Version 1.1')
+ MPLv2 = LicenseDescription('MPL-2', 'Mozilla Public License Version 2.0')
+ MIT = LicenseDescription('MIT', 'MIT License')
+ OPENSSL = LicenseDescription('OpenSSL', 'OpenSSL License')
+ Proprietary = LicenseDescription('Proprietary', 'Proprietary License')
PublicDomain = LicenseDescription('PublicDomain', 'Public Domain')
- Misc = LicenseDescription('Misc',
- 'Miscellaneous license information')
+ Misc = LicenseDescription('Misc', 'Miscellaneous license information')
+
class LibraryType:
NONE = 'none'
diff --git a/cerbero/errors.py b/cerbero/errors.py
index 257a9f9d..d70de181 100644
--- a/cerbero/errors.py
+++ b/cerbero/errors.py
@@ -39,12 +39,15 @@ class UsageError(CerberoException):
class FatalError(CerberoException):
header = 'Fatal Error: '
+
def __init__(self, msg='', arch=''):
self.arch = arch
CerberoException.__init__(self, msg)
+
class CommandError(FatalError):
header = 'Command Error: '
+
def __init__(self, msg, cmd, returncode):
msg = 'Running {!r} returned {}\n{}'.format(cmd, returncode, msg or '')
FatalError.__init__(self, msg)
@@ -58,40 +61,34 @@ class BuildStepError(CerberoException):
self.recipe = recipe
self.step = step
self.arch = arch
- CerberoException.__init__(self, _("Recipe '%s' failed at the build "
- "step '%s'\n%s") % (recipe, step, trace))
+ CerberoException.__init__(self, _("Recipe '%s' failed at the build " "step '%s'\n%s") % (recipe, step, trace))
class RecipeNotFoundError(CerberoException):
-
def __init__(self, recipe):
CerberoException.__init__(self, _("Recipe '%s' not found") % recipe)
class PackageNotFoundError(CerberoException):
-
def __init__(self, package):
CerberoException.__init__(self, _("Package '%s' not found") % package)
class EmptyPackageError(CerberoException):
-
def __init__(self, package):
CerberoException.__init__(self, _("Package '%s' is empty") % package)
class MissingPackageFilesError(CerberoException):
-
def __init__(self, files):
- CerberoException.__init__(self, _("The following files required by "
- "this package are missing:\n %s") % '\n'.join(files))
+ CerberoException.__init__(
+ self, _('The following files required by ' 'this package are missing:\n %s') % '\n'.join(files)
+ )
class InvalidRecipeError(CerberoException):
-
def __init__(self, recipe, message=''):
- CerberoException.__init__(self,
- _("Recipe %s is invalid:\n%s") % (recipe, message))
+ CerberoException.__init__(self, _('Recipe %s is invalid:\n%s') % (recipe, message))
class AbortedError(Exception):
diff --git a/cerbero/hacks.py b/cerbero/hacks.py
index adf51079..1e59e1a7 100644
--- a/cerbero/hacks.py
+++ b/cerbero/hacks.py
@@ -27,6 +27,7 @@ import re
import io
from xml.dom import minidom
from cerbero.utils import etree
+
oldwrite = etree.ElementTree.write
@@ -43,12 +44,12 @@ def write(self, file_or_filename, encoding=None, pretty_print=False):
tmpfile = io.BytesIO()
oldwrite(self, tmpfile, encoding)
tmpfile.seek(0)
- if hasattr(file_or_filename, "write"):
+ if hasattr(file_or_filename, 'write'):
out_file = file_or_filename
else:
- out_file = open(file_or_filename, "wb")
+ out_file = open(file_or_filename, 'wb')
out_file.write(pretify(tmpfile.read()).encode())
- if not hasattr(file_or_filename, "write"):
+ if not hasattr(file_or_filename, 'write'):
out_file.close()
@@ -108,17 +109,19 @@ import shutil
from shutil import rmtree as shutil_rmtree
from cerbero.utils.shell import new_call as shell_call
+
def rmtree(path, ignore_errors=False, onerror=None):
- '''
+ """
shutil.rmtree often fails with access denied. On Windows this happens when
a file is readonly. On Linux this can happen when a directory doesn't have
the appropriate permissions (Ex: chmod 200) and many other cases.
- '''
+ """
+
def force_removal(func, path, excinfo):
- '''
+ """
This is the only way to ensure that readonly files are deleted by
rmtree on Windows. See: http://bugs.python.org/issue19643
- '''
+ """
# Due to the way 'onerror' is implemented in shutil.rmtree, errors
# encountered while listing directories cannot be recovered from. So if
# a directory cannot be listed, shutil.rmtree assumes that it is empty
@@ -131,12 +134,14 @@ def rmtree(path, ignore_errors=False, onerror=None):
func(path)
except OSError:
shell_call('rm -rf ' + path)
+
# We try to not use `rm` because on Windows because it's about 20-30x slower
if not onerror:
shutil_rmtree(path, ignore_errors, onerror=force_removal)
else:
shutil_rmtree(path, ignore_errors, onerror)
+
shutil.rmtree = rmtree
@@ -147,10 +152,11 @@ shutil.rmtree = rmtree
import zipfile
from zipfile import ZipFile as zipfile_ZipFile
+
class ZipFile(zipfile_ZipFile):
def _extract_member(self, member, targetpath, pwd):
"""Extract the ZipInfo object 'member' to a physical
- file on the path targetpath.
+ file on the path targetpath.
"""
if not isinstance(member, zipfile.ZipInfo):
member = self.getinfo(member)
@@ -165,8 +171,7 @@ class ZipFile(zipfile_ZipFile):
# UNC path, redundant separators, "." and ".." components.
arcname = os.path.splitdrive(arcname)[1]
invalid_path_parts = ('', os.path.curdir, os.path.pardir)
- arcname = os.path.sep.join(x for x in arcname.split(os.path.sep)
- if x not in invalid_path_parts)
+ arcname = os.path.sep.join(x for x in arcname.split(os.path.sep) if x not in invalid_path_parts)
if os.path.sep == '\\':
# filter illegal characters on Windows
arcname = self._sanitize_windows_name(arcname, os.path.sep)
@@ -197,8 +202,7 @@ class ZipFile(zipfile_ZipFile):
os.symlink(self.read(member), targetpath)
return targetpath
- with self.open(member, pwd=pwd) as source, \
- open(targetpath, "wb") as target:
+ with self.open(member, pwd=pwd) as source, open(targetpath, 'wb') as target:
shutil.copyfileobj(source, target)
attr = member.external_attr >> 16
@@ -207,6 +211,7 @@ class ZipFile(zipfile_ZipFile):
return targetpath
+
zipfile.ZipFile = ZipFile
### Python os.symlink bug ###
@@ -218,10 +223,12 @@ zipfile.ZipFile = ZipFile
from pathlib import WindowsPath
from os import symlink as os_symlink
+
def symlink(src, dst, **kwargs):
src = str(WindowsPath(src))
os_symlink(src, dst, **kwargs)
+
if sys.platform.startswith('win'):
os.symlink = symlink
@@ -234,6 +241,7 @@ if sys.platform.startswith('win'):
import tarfile
+
def symlink_overwrite(src, dst, **kwargs):
# Allow overwriting symlinks
try:
@@ -243,5 +251,6 @@ def symlink_overwrite(src, dst, **kwargs):
pass
symlink(src, dst, **kwargs)
+
if sys.platform.startswith('win'):
tarfile.os.symlink = symlink_overwrite
diff --git a/cerbero/ide/pkgconfig.py b/cerbero/ide/pkgconfig.py
index e8c22e14..774cf24f 100644
--- a/cerbero/ide/pkgconfig.py
+++ b/cerbero/ide/pkgconfig.py
@@ -25,10 +25,11 @@ from cerbero.enums import Distro
from cerbero.errors import FatalError
from cerbero.utils import shell, to_winpath
+
class PkgConfig(object):
- '''
+ """
pkg-config wrapper
- '''
+ """
cmd = 'pkg-config'
@@ -90,7 +91,7 @@ class PkgConfig(object):
d = pkgconfig.include_dirs()
for p in d:
if not os.path.isabs(p):
- raise FatalError("pkg-config file %s contains relative include dir %s" % (pc, p))
+ raise FatalError('pkg-config file %s contains relative include dir %s' % (pc, p))
# Normalize before appending
include_dirs.append(os.path.abspath(p))
return list(set(include_dirs))
diff --git a/cerbero/ide/vs/env.py b/cerbero/ide/vs/env.py
index 2563cb0b..7ef19972 100644
--- a/cerbero/ide/vs/env.py
+++ b/cerbero/ide/vs/env.py
@@ -36,7 +36,7 @@ VCVARSALLS = {
r'Microsoft Visual Studio\2017\BuildTools',
r'Microsoft Visual Studio\2017\Preview',
),
- r'VC\Auxiliary\Build\vcvarsall.bat'
+ r'VC\Auxiliary\Build\vcvarsall.bat',
),
'vs16': (
(
@@ -46,7 +46,7 @@ VCVARSALLS = {
r'Microsoft Visual Studio\2019\BuildTools',
r'Microsoft Visual Studio\2019\Preview',
),
- r'VC\Auxiliary\Build\vcvarsall.bat'
+ r'VC\Auxiliary\Build\vcvarsall.bat',
),
'vs17': (
(
@@ -56,10 +56,11 @@ VCVARSALLS = {
r'Microsoft Visual Studio\2022\BuildTools',
r'Microsoft Visual Studio\2022\Preview',
),
- r'VC\Auxiliary\Build\vcvarsall.bat'
+ r'VC\Auxiliary\Build\vcvarsall.bat',
),
}
+
def get_program_files_dir():
if 'PROGRAMFILES(X86)' in os.environ:
# Windows 64-bit
@@ -69,6 +70,7 @@ def get_program_files_dir():
return Path(os.environ['PROGRAMFILES'])
raise FatalError('Could not find path to 32-bit Program Files directory')
+
def get_vs_year_version(vcver):
if vcver == 'vs15':
return '2017'
@@ -78,6 +80,7 @@ def get_vs_year_version(vcver):
return '2022'
raise RuntimeError('Unknown toolset value {!r}'.format(vcver))
+
def _get_custom_vs_install(vs_version, vs_install_path):
path = Path(vs_install_path)
if not path.is_dir():
@@ -85,21 +88,23 @@ def _get_custom_vs_install(vs_version, vs_install_path):
suffix = VCVARSALLS[vs_version][1]
path = path / suffix
if not path.is_file():
- raise FatalError('Can\'t find vcvarsall.bat inside vs_install_path {!r}'.format(path))
+ raise FatalError("Can't find vcvarsall.bat inside vs_install_path {!r}".format(path))
return path.as_posix(), vs_version
+
def _sort_vs_installs(installs):
return sorted(installs, reverse=True, key=lambda x: x['installationVersion'])
+
def _get_vswhere_vs_install(vswhere, vs_versions):
import json
+
vswhere_exe = str(vswhere)
# Get a list of installation paths for all installed Visual Studio
# instances, from VS 2013 to the latest one, sorted from newest to
# oldest, and including preview releases.
# Will not include BuildTools installations.
- out = check_output([vswhere_exe, '-legacy', '-prerelease', '-format',
- 'json', '-utf8'])
+ out = check_output([vswhere_exe, '-legacy', '-prerelease', '-format', 'json', '-utf8'])
installs = _sort_vs_installs(json.loads(out))
program_files = get_program_files_dir()
for install in installs:
@@ -113,19 +118,26 @@ def _get_vswhere_vs_install(vswhere, vs_versions):
# Find the location of the Visual Studio installation
if path.is_file():
return path.as_posix(), vs_version
- m.warning('vswhere.exe could not find Visual Studio version(s) {}. Falling '
- 'back to manual searching...' .format(', '.join(vs_versions)))
+ m.warning(
+ 'vswhere.exe could not find Visual Studio version(s) {}. Falling ' 'back to manual searching...'.format(
+ ', '.join(vs_versions)
+ )
+ )
return None
+
def get_vcvarsall(vs_version, vs_install_path):
known_vs_versions = sorted(VCVARSALLS.keys(), reverse=True)
if vs_version:
if vs_version not in VCVARSALLS:
- raise FatalError('Requested Visual Studio version {} is not one of: '
- '{}'.format(vs_version, ', '.join(known_vs_versions)))
+ raise FatalError(
+ 'Requested Visual Studio version {} is not one of: ' '{}'.format(
+ vs_version, ', '.join(known_vs_versions)
+ )
+ )
# Do we want to use a specific known Visual Studio installation?
if vs_install_path:
- assert(vs_version)
+ assert vs_version
return _get_custom_vs_install(vs_version, vs_install_path)
# Start searching.
if vs_version:
@@ -154,8 +166,11 @@ def get_vcvarsall(vs_version, vs_install_path):
# Find the location of the Visual Studio installation
if path.is_file():
return path.as_posix(), vs_version
- raise FatalError('Microsoft Visual Studio not found. If you installed it, '
- 'please file a bug. We looked for: ' + ', '.join(vs_versions))
+ raise FatalError(
+ 'Microsoft Visual Studio not found. If you installed it, '
+ 'please file a bug. We looked for: ' + ', '.join(vs_versions)
+ )
+
def append_path(var, path, sep=';'):
if var and not var.endswith(sep):
@@ -165,6 +180,7 @@ def append_path(var, path, sep=';'):
var += path
return var
+
def get_vcvarsall_arg(arch, target_arch):
if target_arch == Architecture.X86:
# If arch is x86_64, this will cause the WOW64 version of MSVC to be
@@ -188,9 +204,10 @@ def get_vcvarsall_arg(arch, target_arch):
elif arch == Architecture.ARM64 and target_arch == Architecture.ARM64:
return 'arm64'
elif Architecture.is_arm(arch) and Architecture.is_arm(target_arch):
- return 'arm'
+ return 'arm'
raise FatalError('Unsupported arch/target_arch: {0}/{1}'.format(arch, target_arch))
+
def run_and_get_env(cmd):
env = os.environ.copy()
env['VSCMD_ARG_no_logo'] = '1'
@@ -199,20 +216,20 @@ def run_and_get_env(cmd):
# GITLAB_USER_NAME when the name of the user triggering the pipeline has
# non-ascii characters.
# The env vars set by MSVC will always be correctly encoded.
- output = subprocess.check_output(cmd, shell=True, env=env,
- universal_newlines=True,
- errors='ignore')
+ output = subprocess.check_output(cmd, shell=True, env=env, universal_newlines=True, errors='ignore')
lines = []
for line in output.split('\n'):
if '=' in line:
lines.append(line)
return lines
+
# For a specific env var, get only the values that were prepended to it by MSVC
def get_envvar_msvc_values(msvc, nomsvc, sep=';'):
index = msvc.index(nomsvc)
return msvc[0:index]
+
@lru_cache()
def get_msvc_env(arch, target_arch, uwp, version=None, vs_install_path=None):
ret_env = {}
diff --git a/cerbero/ide/vs/genlib.py b/cerbero/ide/vs/genlib.py
index 60cc3c8a..674b9b2e 100644
--- a/cerbero/ide/vs/genlib.py
+++ b/cerbero/ide/vs/genlib.py
@@ -28,11 +28,12 @@ from cerbero.errors import FatalError
class GenLib(object):
- '''
+ """
Generates an import library that can be used in Visual Studio from a DLL,
using 'gendef' to create a .def file and then libtool to create the import
library (.lib)
- '''
+ """
+
warned_dlltool = False
filename = 'unknown'
@@ -62,8 +63,9 @@ class GenLib(object):
def gendef(self, dllpath, outputdir, libname):
defname = libname + '.def'
- def_contents = shell.check_output(self.gendef_bin + ['-', dllpath], outputdir,
- logfile=self.logfile, env=self.config.env)
+ def_contents = shell.check_output(
+ self.gendef_bin + ['-', dllpath], outputdir, logfile=self.logfile, env=self.config.env
+ )
# If the output doesn't contain a 'LIBRARY' directive, gendef errored
# out. However, gendef always returns 0 so we need to inspect the
# output and guess.
@@ -105,9 +107,11 @@ class GenLib(object):
shell.new_call(cmd, outputdir, logfile=self.logfile, env=env)
else:
if not GenLib.warned_dlltool:
- m.warning("Using dlltool instead of lib.exe! All generated .lib "
- "files will have problems with Visual Studio, see "
- "http://sourceware.org/bugzilla/show_bug.cgi?id=12633")
+ m.warning(
+ 'Using dlltool instead of lib.exe! All generated .lib '
+ 'files will have problems with Visual Studio, see '
+ 'http://sourceware.org/bugzilla/show_bug.cgi?id=12633'
+ )
GenLib.warned_dlltool = True
self.dlltool(defname, dllname, outputdir)
return os.path.join(outputdir, self.filename)
@@ -122,8 +126,9 @@ class GenLib(object):
paths = self.config.msvc_env_for_toolchain['PATH'].get()
return shutil.which('lib', path=paths), paths
+
class GenGnuLib(GenLib):
- '''
+ """
Generates an import library (libfoo.dll.a; not foo.lib) that is in a format
that allows GNU ld to resolve all symbols exported by a DLL created by MSVC.
@@ -132,7 +137,7 @@ class GenGnuLib(GenLib):
symbols from the import library. It can find them if you pass it the DLL
directly, but that's a terrible idea and breaks how library searching works,
so we create a GNU-compatible import library which will always work.
- '''
+ """
def create(self, libname, dllpath, platform, target_arch, outputdir):
# libfoo.dll.a must start with 'lib'
diff --git a/cerbero/ide/vs/pkgconfig2vsprops.py b/cerbero/ide/vs/pkgconfig2vsprops.py
index 43f7e7a4..08c6b27f 100755
--- a/cerbero/ide/vs/pkgconfig2vsprops.py
+++ b/cerbero/ide/vs/pkgconfig2vsprops.py
@@ -28,15 +28,11 @@ from cerbero.utils import messages as m
class PkgConfig2VSProps(object):
-
generators = {'vs2008': VSProps, 'vs2010': Props}
- def __init__(self, libname, target='vs2010', prefix=None,
- prefix_replacement=None, inherit_common=False, env=None):
-
+ def __init__(self, libname, target='vs2010', prefix=None, prefix_replacement=None, inherit_common=False, env=None):
if target not in self.generators:
- raise FatalError('Target version must be one of %s' %
- list(generators.keys()))
+ raise FatalError('Target version must be one of %s' % list(generators.keys()))
pkgconfig = PkgConfig([libname], False, env=env)
requires = pkgconfig.requires()
@@ -45,25 +41,19 @@ class PkgConfig2VSProps(object):
libs = pkgconfig.libraries()
if None not in [prefix_replacement, prefix]:
- libraries_dirs = [x.replace(prefix, prefix_replacement)
- for x in libraries_dirs]
- include_dirs = [x.replace(prefix, prefix_replacement)
- for x in include_dirs]
- self.vsprops = self.generators[target](libname, requires, include_dirs,
- libraries_dirs, libs, inherit_common)
+ libraries_dirs = [x.replace(prefix, prefix_replacement) for x in libraries_dirs]
+ include_dirs = [x.replace(prefix, prefix_replacement) for x in include_dirs]
+ self.vsprops = self.generators[target](libname, requires, include_dirs, libraries_dirs, libs, inherit_common)
def create(self, outdir):
self.vsprops.create(outdir)
-if __name__ == "__main__":
- parser = argparse.ArgumentParser(description='Creates VS property '
- 'sheets with pkg-config')
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='Creates VS property ' 'sheets with pkg-config')
parser.add_argument('library', help='Library name')
- parser.add_argument('-o', type=str, default='.',
- help='Output directory for generated files')
- parser.add_argument('-c', type=str, default='vs2010',
- help='Target version (vs2008 or vs2010) name')
+ parser.add_argument('-o', type=str, default='.', help='Output directory for generated files')
+ parser.add_argument('-c', type=str, default='vs2010', help='Target version (vs2008 or vs2010) name')
generators = {'vs2008': VSProps, 'vs2010': Props}
args = parser.parse_args(sys.argv[1:])
@@ -72,6 +62,7 @@ if __name__ == "__main__":
p2v.create(args.o)
except Exception as e:
import traceback
+
traceback.print_exc()
m.error(str(e))
exit(1)
diff --git a/cerbero/ide/vs/props.py b/cerbero/ide/vs/props.py
index 65e21d44..cf5fca9a 100644
--- a/cerbero/ide/vs/props.py
+++ b/cerbero/ide/vs/props.py
@@ -21,24 +21,21 @@ from cerbero.utils import etree, to_winpath
class PropsBase(object):
-
def __init__(self, name):
self.name = name
self._add_root()
self._add_skeleton()
def _add_root(self):
- self.root = etree.Element('Project', ToolsVersion='4.0',
- xmlns='http://schemas.microsoft.com/developer/msbuild/2003')
+ self.root = etree.Element(
+ 'Project', ToolsVersion='4.0', xmlns='http://schemas.microsoft.com/developer/msbuild/2003'
+ )
def _add_skeleton(self):
- self.import_group = etree.SubElement(self.root, 'ImportGroup',
- Label='PropertySheets')
- self.user_macros_group = etree.SubElement(self.root, 'PropertyGroup',
- Label='UserMacros')
+ self.import_group = etree.SubElement(self.root, 'ImportGroup', Label='PropertySheets')
+ self.user_macros_group = etree.SubElement(self.root, 'PropertyGroup', Label='UserMacros')
self.property_group = etree.SubElement(self.root, 'PropertyGroup')
- self.item_definition_group = etree.SubElement(self.root,
- 'ItemDefinitionGroup')
+ self.item_definition_group = etree.SubElement(self.root, 'ItemDefinitionGroup')
self.item_group = etree.SubElement(self.root, 'ItemGroup')
def _add_macro(self, name, value):
@@ -52,36 +49,29 @@ class PropsBase(object):
def _import_property(self, name):
cond = '$(%sImported)!=true' % self._format_name(name)
- etree.SubElement(self.import_group, 'Import', Condition=cond,
- Project='%s.props' % name)
+ etree.SubElement(self.import_group, 'Import', Condition=cond, Project='%s.props' % name)
def create(self, outdir):
el = etree.ElementTree(self.root)
- el.write(os.path.join(outdir, '%s.props' % self.name),
- encoding='utf-8', pretty_print=True)
+ el.write(os.path.join(outdir, '%s.props' % self.name), encoding='utf-8', pretty_print=True)
def _add_compiler_props(self):
- self.compiler = etree.SubElement(self.item_definition_group,
- 'ClCompile')
+ self.compiler = etree.SubElement(self.item_definition_group, 'ClCompile')
def _add_linker_props(self):
self.linker = etree.SubElement(self.item_definition_group, 'Link')
def _add_include_dirs(self, dirs):
- self._add_var(self.compiler, 'AdditionalIncludeDirectories',
- self._format_paths(dirs))
+ self._add_var(self.compiler, 'AdditionalIncludeDirectories', self._format_paths(dirs))
def _add_libs_dirs(self, dirs):
- self._add_var(self.linker, 'AdditionalLibraryDirectories',
- self._format_paths(dirs))
+ self._add_var(self.linker, 'AdditionalLibraryDirectories', self._format_paths(dirs))
def _add_libs(self, libs):
- self._add_var(self.linker, 'AdditionalDependencies',
- self._format_libs(libs))
+ self._add_var(self.linker, 'AdditionalDependencies', self._format_libs(libs))
def _add_imported_variable(self):
- el = etree.SubElement(self.property_group, '%sImported' %
- self._format_name(self.name))
+ el = etree.SubElement(self.property_group, '%sImported' % self._format_name(self.name))
el.text = 'true'
def _add_var(self, parent, name, content):
@@ -103,7 +93,6 @@ class PropsBase(object):
class CommonProps(PropsBase):
-
def __init__(self, prefix_macro):
PropsBase.__init__(self, 'Common')
self._add_root()
@@ -114,17 +103,16 @@ class CommonProps(PropsBase):
class Props(PropsBase):
- '''
+ """
Creates a MSBUILD properties sheet that imitaties a pkgconfig files to link
against a library from VS:
* inherits from others properties sheets
* add additional includes directories
* add additional libraries directories
* add link libraries
- '''
+ """
- def __init__(self, name, requires, include_dirs, libs_dirs, libs,
- inherit_common=False):
+ def __init__(self, name, requires, include_dirs, libs_dirs, libs, inherit_common=False):
PropsBase.__init__(self, name)
if inherit_common:
requires.append('Common')
diff --git a/cerbero/ide/vs/vsprops.py b/cerbero/ide/vs/vsprops.py
index 67b5b705..0bb47f43 100644
--- a/cerbero/ide/vs/vsprops.py
+++ b/cerbero/ide/vs/vsprops.py
@@ -22,69 +22,62 @@ from cerbero.utils import etree, to_winpath
class VSPropsBase(object):
-
def __init__(self, name):
self.name = name
def _add_root(self, name):
- self.root = etree.Element("VisualStudioPropertySheet",
- ProjectType="Visual C++", Version="8.00", Name=name)
+ self.root = etree.Element('VisualStudioPropertySheet', ProjectType='Visual C++', Version='8.00', Name=name)
def create(self, outdir):
el = etree.ElementTree(self.root)
- el.write(os.path.join(outdir, '%s.vsprops' % self.name),
- encoding='utf-8')
+ el.write(os.path.join(outdir, '%s.vsprops' % self.name), encoding='utf-8')
class CommonVSProps(VSPropsBase):
-
def __init__(self, prefix, prefix_macro):
VSPropsBase.__init__(self, 'Common')
self._add_root('Common')
self._add_sdk_root_macro(prefix, prefix_macro)
def _add_sdk_root_macro(self, prefix, prefix_macro):
- etree.SubElement(self.root, 'Macro', Name=prefix_macro,
- Value=to_winpath(prefix))
+ etree.SubElement(self.root, 'Macro', Name=prefix_macro, Value=to_winpath(prefix))
class VSProps(VSPropsBase):
- '''
+ """
Creates an VS properties sheet that imitaties a pkgconfig files to link
against a library from VS:
* inherits from others properties sheets
* add additional includes directories
* add additional libraries directories
* add link libraries
- '''
+ """
- def __init__(self, name, requires, include_dirs, libs_dirs, libs,
- inherit_common=False):
+ def __init__(self, name, requires, include_dirs, libs_dirs, libs, inherit_common=False):
VSPropsBase.__init__(self, name)
if inherit_common:
requires.append('Common')
self._add_root(name, requires)
- self.root.set('InheritedPropertySheets',
- self._format_requires(requires))
+ self.root.set('InheritedPropertySheets', self._format_requires(requires))
self._add_include_dirs(include_dirs)
self._add_libs(libs, libs_dirs)
def _add_root(self, name, requires):
VSPropsBase._add_root(self, name)
- self.root.set('InheritedPropertySheets',
- self._format_requires(requires))
+ self.root.set('InheritedPropertySheets', self._format_requires(requires))
def _add_include_dirs(self, dirs):
- self._add_tool("VCCLCompilerTool",
- AdditionalIncludeDirectories=self._format_paths(dirs))
+ self._add_tool('VCCLCompilerTool', AdditionalIncludeDirectories=self._format_paths(dirs))
def _add_libs(self, libs, dirs):
- self._add_tool("VCLinkerTool",
- AdditionalDependencies=self._format_libs(libs),
- AdditionalLibraryDirectories=self._format_paths(dirs))
+ self._add_tool(
+ 'VCLinkerTool',
+ AdditionalDependencies=self._format_libs(libs),
+ AdditionalLibraryDirectories=self._format_paths(dirs),
+ )
def _format_requires(self, requires):
- return ';'.join([".\\%s.vsprops" % x for x in requires])
+ return ';'.join(['.\\%s.vsprops' % x for x in requires])
def _format_libs(self, libs):
return ' '.join(['%s.lib' % x for x in libs])
@@ -94,7 +87,7 @@ class VSProps(VSPropsBase):
def _fix_path_and_quote(self, path):
path = to_winpath(path)
- return "&quot;%s&quot;" % path
+ return '&quot;%s&quot;' % path
def _add_tool(self, name, **kwargs):
etree.SubElement(self.root, 'Tool', Name=name, **kwargs)
diff --git a/cerbero/ide/xcode/fwlib.py b/cerbero/ide/xcode/fwlib.py
index fca15785..abd8c8ce 100644
--- a/cerbero/ide/xcode/fwlib.py
+++ b/cerbero/ide/xcode/fwlib.py
@@ -34,12 +34,12 @@ from cerbero.utils import messages as m
class FrameworkLibrary(object):
- '''
+ """
Combine several shared library into a single shared library to be used
as a Framework.
The complete list of shared libraries needed are guessed with pkg-config
but full paths can be used too with use_pkgconfig=False
- '''
+ """
def __init__(self, min_version, target, libname, install_name, libraries, arch, env=None):
self.libname = libname
@@ -69,7 +69,7 @@ class FrameworkLibrary(object):
libspaths = []
for lib in libs:
for libdir in libdirs:
- libpath = os.path.join(libdir, self._get_lib_file_name (lib))
+ libpath = os.path.join(libdir, self._get_lib_file_name(lib))
if not os.path.exists(libpath):
continue
libspaths.append(os.path.realpath(libpath))
@@ -85,8 +85,7 @@ class FrameworkLibrary(object):
class DynamicFrameworkLibrary(FrameworkLibrary):
def _create_framework_library(self, libraries):
- cmdline = ['clang', '-headerpad_max_install_names', '-dynamiclib',
- '-o', self.libname, '-arch', self.arch]
+ cmdline = ['clang', '-headerpad_max_install_names', '-dynamiclib', '-o', self.libname, '-arch', self.arch]
if self.target == Distro.OS_X:
cmdline += ['-mmacosx-version-min=%s' % self.min_version]
@@ -99,12 +98,13 @@ class DynamicFrameworkLibrary(FrameworkLibrary):
def _get_lib_file_name(self, lib):
return 'lib%s.dylib' % lib
+
class BuildStatusPrinter:
def __init__(self, archs, interactive):
self.archs = archs
self.interactive = interactive
- self.arch_total = collections.defaultdict(lambda : 0)
- self.arch_count = collections.defaultdict(lambda : 0)
+ self.arch_total = collections.defaultdict(lambda: 0)
+ self.arch_count = collections.defaultdict(lambda: 0)
def inc_arch(self, arch):
self.arch_count[arch] += 1
@@ -115,34 +115,39 @@ class BuildStatusPrinter:
m.output_status(self._generate_status_line())
def _generate_status_line(self):
- s = "["
- s += ", ".join([str(arch) + ": (" + str(self.arch_count[arch]) + "/" + str(self.arch_total[arch]) + ")" for arch in self.archs])
- s += "]"
+ s = '['
+ s += ', '.join(
+ [
+ str(arch) + ': (' + str(self.arch_count[arch]) + '/' + str(self.arch_total[arch]) + ')'
+ for arch in self.archs
+ ]
+ )
+ s += ']'
return s
+
class StaticFrameworkLibrary(FrameworkLibrary):
def _get_lib_file_name(self, lib):
return 'lib%s.a' % lib
async def _split_static_lib(self, lib, thin_arch=None):
- '''Splits the static lib @lib into its object files
+ """Splits the static lib @lib into its object files
- Splits the static lib @lib into its object files and returns
- a new temporary directory where the .o files should be found.
+ Splits the static lib @lib into its object files and returns
+ a new temporary directory where the .o files should be found.
- if @thin_arch was provided, it considers the @lib to be a fat
- binary and takes its thin version for the @thin_arch specified
- before retrieving the object files.
- '''
+ if @thin_arch was provided, it considers the @lib to be a fat
+ binary and takes its thin version for the @thin_arch specified
+ before retrieving the object files.
+ """
lib_tmpdir = tempfile.mkdtemp()
shutil.copy(lib, lib_tmpdir)
tmplib = os.path.join(lib_tmpdir, os.path.basename(lib))
- if thin_arch: #should be a fat file, split only to the arch we want
+ if thin_arch: # should be a fat file, split only to the arch we want
newname = '%s_%s' % (thin_arch, os.path.basename(lib))
cmd = ['lipo', tmplib, '-thin', thin_arch, '-output', newname]
- proc = await asyncio.create_subprocess_exec(*cmd, cwd=lib_tmpdir,
- stderr=subprocess.PIPE, env=self.env)
+ proc = await asyncio.create_subprocess_exec(*cmd, cwd=lib_tmpdir, stderr=subprocess.PIPE, env=self.env)
(unused_out, output) = await proc.communicate()
@@ -153,7 +158,7 @@ class StaticFrameworkLibrary(FrameworkLibrary):
if 'does not contain the specified architecture' in output:
return None
raise FatalError('Running {!r}, returncode {}:\n{}'.format(cmd, proc.returncode, output))
- tmplib = os.path.join (lib_tmpdir, newname)
+ tmplib = os.path.join(lib_tmpdir, newname)
await shell.async_call(['ar', '-x', tmplib], lib_tmpdir, env=self.env)
@@ -179,8 +184,8 @@ class StaticFrameworkLibrary(FrameworkLibrary):
new_path = os.path.join(lib_tmpdir, 'dup%d_' % x + f)
# The duplicated overwrote the first one, so extract it again
await shell.async_call(['ar', '-x', tmplib, f], lib_tmpdir, env=self.env)
- shutil.move (path, new_path)
- await shell.async_call(['ar', '-d',tmplib, f], lib_tmpdir, env=self.env)
+ shutil.move(path, new_path)
+ await shell.async_call(['ar', '-d', tmplib, f], lib_tmpdir, env=self.env)
return lib_tmpdir
@@ -194,20 +199,20 @@ class StaticFrameworkLibrary(FrameworkLibrary):
if len(s) == 4 and s[2] == 'T':
syms[s[3]].append(s)
dups = {}
- for k,v in syms.items():
+ for k, v in syms.items():
if len(v) > 1:
dups[k] = v
if dups:
- m.warning ("The static library contains duplicated symbols")
+ m.warning('The static library contains duplicated symbols')
for k, v in dups.items():
- m.message (k) # symbol name
+ m.message(k) # symbol name
for l in v:
- m.message (" %s" % l[0]) # file
+ m.message(' %s' % l[0]) # file
def _create_framework_library(self, libraries):
tmpdir = tempfile.mkdtemp()
- libname = os.path.basename (self.libname) # just to make sure
+ libname = os.path.basename(self.libname) # just to make sure
if self.arch == Architecture.UNIVERSAL:
archs = self.universal_archs
@@ -219,7 +224,7 @@ class StaticFrameworkLibrary(FrameworkLibrary):
split_queue = asyncio.Queue()
join_queues = collections.defaultdict(asyncio.Queue)
for thin_arch in archs:
- os.makedirs (os.path.join (tmpdir, thin_arch))
+ os.makedirs(os.path.join(tmpdir, thin_arch))
status = BuildStatusPrinter(archs, m.console_is_interactive())
for lib in libraries:
@@ -234,7 +239,7 @@ class StaticFrameworkLibrary(FrameworkLibrary):
tmpdir_thinarch = os.path.join(tmpdir, thin_arch)
libprefix = os.path.split(lib)[-1].replace('.', '_')
- if len(archs) > 1: #should be a fat file, split only to the arch we want
+ if len(archs) > 1: # should be a fat file, split only to the arch we want
libprefix += '_%s_' % thin_arch
lib_tmpdir = await self._split_static_lib(lib, thin_arch)
else:
@@ -242,7 +247,7 @@ class StaticFrameworkLibrary(FrameworkLibrary):
if lib_tmpdir is None:
# arch is not supported in the static lib, skip it
- status.inc_arch (thin_arch)
+ status.inc_arch(thin_arch)
split_queue.task_done()
continue
@@ -262,7 +267,7 @@ class StaticFrameworkLibrary(FrameworkLibrary):
while True:
lib, lib_tmpdir, obj_dict = await q.get()
- status.inc_arch (thin_arch)
+ status.inc_arch(thin_arch)
tmpdir_thinarch = os.path.join(tmpdir, thin_arch)
libprefix = os.path.split(lib)[-1].replace('.', '_')
@@ -281,11 +286,11 @@ class StaticFrameworkLibrary(FrameworkLibrary):
# If we have a duplicate object, commit any collected ones
if target_name in target_objs:
- m.warning ("Committing %d objects due to dup %s" % (len (target_objs), target_name))
+ m.warning('Committing %d objects due to dup %s' % (len(target_objs), target_name))
await shell.async_call(['ar', '-cqS', libname] + target_objs, tmpdir_thinarch, env=self.env)
target_objs = []
- target_objs.append (target_name)
+ target_objs.append(target_name)
object_files_md5.append(md5)
# Put all the collected target_objs in the archive. cmdline limit is 262k args on OSX.
@@ -303,20 +308,23 @@ class StaticFrameworkLibrary(FrameworkLibrary):
async def split_join_task():
tasks = [asyncio.ensure_future(join_library_worker(join_queues[arch], arch)) for arch in archs]
[tasks.append(asyncio.ensure_future(split_library_worker())) for i in range(len(archs))]
+
async def split_join_queues_done():
await split_queue.join()
for arch in archs:
await join_queues[arch].join()
+
await run_tasks(tasks, split_join_queues_done())
tasks = [asyncio.ensure_future(post_join_worker(thin_arch)) for thin_arch in archs]
await run_tasks(tasks)
+
run_until_complete(split_join_task())
if len(archs) > 1:
- #merge the final libs into a fat file again
+ # merge the final libs into a fat file again
files = [os.path.join(tmpdir, arch, libname) for arch in archs]
- shell.new_call(['lipo'] + files + ['-create' ,'-output', self.install_name], tmpdir, env=self.env)
+ shell.new_call(['lipo'] + files + ['-create', '-output', self.install_name], tmpdir, env=self.env)
else:
shell.new_call(['cp', os.path.join(tmpdir, self.arch, libname), self.install_name], tmpdir, env=self.env)
shutil.rmtree(tmpdir)
diff --git a/cerbero/ide/xcode/xcconfig.py b/cerbero/ide/xcode/xcconfig.py
index b116cf60..3c9ab340 100755
--- a/cerbero/ide/xcode/xcconfig.py
+++ b/cerbero/ide/xcode/xcconfig.py
@@ -22,20 +22,20 @@ from cerbero.ide.pkgconfig import PkgConfig
from functools import reduce
-XCCONFIG_TPL = '''
+XCCONFIG_TPL = """
ALWAYS_SEARCH_USER_PATHS = YES
USER_HEADER_SEARCH_PATHS = %(hsp)s
LIBRARY_SEARCH_PATHS = %(lsp)s
OTHER_LDFLAGS = %(libs)s
-'''
+"""
class XCConfig(object):
- '''
+ """
Creates an xcode config file to compile and link against the SDK using
pkgconfig to guess the headers search path, the libraries search path and
the libraries that need to be linked.
- '''
+ """
def __init__(self, libraries, env=None):
self.pkgconfig = PkgConfig(libraries, env=env)
@@ -49,14 +49,13 @@ class XCConfig(object):
args = dict()
args['hsp'] = ' '.join(self.pkgconfig.include_dirs())
args['lsp'] = ' '.join(self.pkgconfig.libraries_dirs())
- args['libs'] = reduce(lambda x, y: '%s -l%s' % (x, y),
- self.pkgconfig.libraries(), '')
+ args['libs'] = reduce(lambda x, y: '%s -l%s' % (x, y), self.pkgconfig.libraries(), '')
return args
-if __name__ == "__main__":
+if __name__ == '__main__':
if len(sys.argv) < 2:
- print("usage: xcconfig output_file libraries")
+ print('usage: xcconfig output_file libraries')
sys.exit(1)
xcconfig = XCConfig(sys.argv[2:])
xcconfig.create(sys.argv[1])
diff --git a/cerbero/main.py b/cerbero/main.py
index b806864e..fbe39996 100644
--- a/cerbero/main.py
+++ b/cerbero/main.py
@@ -28,20 +28,18 @@ import time
from cerbero import config, commands
from cerbero.enums import Platform
-from cerbero.errors import UsageError, FatalError, BuildStepError, \
- ConfigurationError, CerberoException, AbortedError
+from cerbero.errors import UsageError, FatalError, BuildStepError, ConfigurationError, CerberoException, AbortedError
from cerbero.utils import _, N_, user_is_root, git, run_until_complete
from cerbero.utils import messages as m
from cerbero.utils.manifest import Manifest
-description = N_('Build and package a set of modules to distribute them in '
- 'a SDK')
+description = N_('Build and package a set of modules to distribute them in ' 'a SDK')
-class Main(object):
+class Main(object):
def __init__(self, args):
if user_is_root():
- m.warning(_("Running as root"))
+ m.warning(_('Running as root'))
self.check_in_cerbero_shell()
self.create_parser()
@@ -55,11 +53,10 @@ class Main(object):
def check_in_cerbero_shell(self):
if os.environ.get('CERBERO_PREFIX', '') != '':
- self.log_error(_("ERROR: cerbero can't be run "
- "from a cerbero shell"))
+ self.log_error(_("ERROR: cerbero can't be run " 'from a cerbero shell'))
def log_error(self, msg, print_usage=False, command=None):
- ''' Log an error and exit '''
+ """Log an error and exit"""
if command is not None:
m.error("***** Error running '%s' command:" % command)
m.error('%s' % msg)
@@ -71,14 +68,15 @@ class Main(object):
sys.exit(1)
def init_logging(self):
- ''' Initialize logging '''
+ """Initialize logging"""
if self.args.timestamps:
m.START_TIME = time.monotonic()
logging.getLogger().setLevel(logging.INFO)
logging.getLogger().addHandler(logging.StreamHandler())
def create_parser(self):
- ''' Creates the arguments parser '''
+ """Creates the arguments parser"""
+
class VariantAction(argparse.Action):
def __call__(self, parser, namespace, value, option_string=None):
current = getattr(namespace, self.dest) or []
@@ -87,24 +85,43 @@ class Main(object):
setattr(namespace, self.dest, current + additional)
self.parser = argparse.ArgumentParser(description=_(description))
- self.parser.add_argument('-t', '--timestamps', action='store_true', default=False,
- help=_('Print timestamps with every message printed'))
- self.parser.add_argument('--list-variants', action='store_true', default=False,
- help=_('List available variants'))
- self.parser.add_argument('-v', '--variants', action=VariantAction, default=None,
- help=_('Variants to be used for the build'))
- self.parser.add_argument('-c', '--config', action='append', type=str, default=None,
- help=_('Configuration file used for the build'))
- self.parser.add_argument('-m', '--manifest', action='store', type=str, default=None,
- help=_('Manifest file used to fixate git revisions'))
- self.parser.add_argument('--self-update', action='store', type=str, default=None,
- help=_('Update cerbero git repository from manifest and exit.'))
+ self.parser.add_argument(
+ '-t',
+ '--timestamps',
+ action='store_true',
+ default=False,
+ help=_('Print timestamps with every message printed'),
+ )
+ self.parser.add_argument(
+ '--list-variants', action='store_true', default=False, help=_('List available variants')
+ )
+ self.parser.add_argument(
+ '-v', '--variants', action=VariantAction, default=None, help=_('Variants to be used for the build')
+ )
+ self.parser.add_argument(
+ '-c', '--config', action='append', type=str, default=None, help=_('Configuration file used for the build')
+ )
+ self.parser.add_argument(
+ '-m',
+ '--manifest',
+ action='store',
+ type=str,
+ default=None,
+ help=_('Manifest file used to fixate git revisions'),
+ )
+ self.parser.add_argument(
+ '--self-update',
+ action='store',
+ type=str,
+ default=None,
+ help=_('Update cerbero git repository from manifest and exit.'),
+ )
def parse_arguments(self, args):
- ''' Parse the command line arguments '''
+ """Parse the command line arguments"""
# If no commands, make it show the help by default
if len(args) == 0:
- args = ["-h"]
+ args = ['-h']
self.args = self.parser.parse_args(args)
if self.args.variants is None:
self.args.variants = []
@@ -117,10 +134,10 @@ class Main(object):
sys.exit(0)
def self_update(self):
- '''Update this instance of cerbero git repository'''
+ """Update this instance of cerbero git repository"""
if not self.args.self_update:
- return
+ return
try:
manifest = Manifest(self.args.self_update)
@@ -131,17 +148,15 @@ class Main(object):
run_until_complete(git.fetch(git_dir))
run_until_complete(git.checkout(git_dir, project.revision))
except FatalError as ex:
- self.log_error(_("ERROR: Failed to proceed with self update %s") %
- ex)
+ self.log_error(_('ERROR: Failed to proceed with self update %s') % ex)
sys.exit(0)
def load_commands(self):
- subparsers = self.parser.add_subparsers(help=_('sub-command help'),
- dest='command')
+ subparsers = self.parser.add_subparsers(help=_('sub-command help'), dest='command')
commands.load_commands(subparsers)
def load_config(self):
- ''' Load the configuration '''
+ """Load the configuration"""
try:
self.config = config.Config()
if self.args.command == 'shell':
@@ -151,7 +166,9 @@ class Main(object):
for name in ('orc-0.4-0', 'z-1', 'ffi-7', 'glib-2.0-0'):
dll = os.path.join(self.config.build_tools_prefix, 'bin', f'lib{name}.dll')
if os.path.exists(dll):
- m.warning("'visualstudio' variant is now enabled by default: to build using only MinGW, use -v mingw")
+ m.warning(
+ "'visualstudio' variant is now enabled by default: to build using only MinGW, use -v mingw"
+ )
m.error('MIGRATION: build-tools now use Visual Studio, you need to rebuild:')
print('./cerbero-uninstalled -c config/build-tools.cbc wipe --force', file=sys.stderr)
print('./cerbero-uninstalled bootstrap --build-tools-only', file=sys.stderr)
@@ -191,6 +208,7 @@ class Main(object):
def main():
if 'CERBERO_PROFILING' in os.environ:
import cProfile
+
pfile = 'cerbero-profile.log'
print('Outputting profiling information to {!r}'.format(pfile))
cProfile.runctx('Main(sys.argv[1:])', globals(), locals(), filename=pfile)
@@ -198,5 +216,5 @@ def main():
Main(sys.argv[1:])
-if __name__ == "__main__":
+if __name__ == '__main__':
main()
diff --git a/cerbero/packages/__init__.py b/cerbero/packages/__init__.py
index 61f35b38..ddf23fec 100644
--- a/cerbero/packages/__init__.py
+++ b/cerbero/packages/__init__.py
@@ -24,14 +24,13 @@ from cerbero.utils import _
class PackageType(object):
-
RUNTIME = ''
DEVEL = '-devel'
DEBUG = '-debug'
class PackagerBase(object):
- ''' Base class for packagers '''
+ """Base class for packagers"""
def __init__(self, config, package, store):
self.config = config
@@ -39,7 +38,7 @@ class PackagerBase(object):
self.store = store
def pack(self, output_dir, devel=True, force=False, keep_temp=False):
- '''
+ """
Creates a package and puts it the the output directory
@param output_dir: output directory where the package will be saved
@@ -53,7 +52,7 @@ class PackagerBase(object):
@return: list of filenames for the packages created
@rtype: list
- '''
+ """
self.output_dir = os.path.realpath(output_dir)
if not os.path.exists(self.output_dir):
os.makedirs(self.output_dir)
@@ -73,8 +72,7 @@ class PackagerBase(object):
diff = list(set(files) - set(real_files))
if len(diff) != 0:
if force:
- m.warning(_("Some files required by this package are missing "
- "in the prefix:\n%s" % '\n'.join(diff)))
+ m.warning(_('Some files required by this package are missing ' 'in the prefix:\n%s' % '\n'.join(diff)))
else:
raise MissingPackageFilesError(diff)
if len(real_files) == 0:
diff --git a/cerbero/packages/android.py b/cerbero/packages/android.py
index 443f5539..8a7589a5 100644
--- a/cerbero/packages/android.py
+++ b/cerbero/packages/android.py
@@ -26,10 +26,9 @@ from cerbero.errors import UsageError
class AndroidPackager(DistTarball):
- ''' Creates a distribution tarball for Android '''
+ """Creates a distribution tarball for Android"""
- def _create_tarball(self, output_dir, package_type, files, force,
- package_prefix):
+ def _create_tarball(self, output_dir, package_type, files, force, package_prefix):
# Filter out some unwanted directories for the development package
if package_type == PackageType.DEVEL:
for filt in ['bin/', 'share/aclocal']:
@@ -44,12 +43,19 @@ class AndroidPackager(DistTarball):
elif package_type == PackageType.RUNTIME:
package_type = '-runtime'
- return "%s%s-%s-%s-%s%s.%s" % (self.package_prefix, self.package.name,
- self.config.target_platform, self.config.target_arch,
- self.package.version, package_type, ext)
+ return '%s%s-%s-%s-%s%s.%s' % (
+ self.package_prefix,
+ self.package.name,
+ self.config.target_platform,
+ self.config.target_arch,
+ self.package.version,
+ package_type,
+ ext,
+ )
def register():
from cerbero.packages.packager import register_packager
from cerbero.config import Distro
+
register_packager(Distro.ANDROID, AndroidPackager)
diff --git a/cerbero/packages/debian.py b/cerbero/packages/debian.py
index e2473788..23461c50 100644
--- a/cerbero/packages/debian.py
+++ b/cerbero/packages/debian.py
@@ -31,19 +31,17 @@ from cerbero.packages.package import MetaPackage, App
from cerbero.utils import shell, _
from cerbero.utils import messages as m
-CHANGELOG_TPL = \
-'''%(p_prefix)s%(name)s (%(version)s-1) unstable; urgency=low
+CHANGELOG_TPL = """%(p_prefix)s%(name)s (%(version)s-1) unstable; urgency=low
* Release %(version)s
%(changelog_url)s
-- %(packager)s %(datetime)s
-'''
+"""
-COMPAT_TPL = '''7'''
+COMPAT_TPL = """7"""
-CONTROL_TPL = \
-'''Source: %(p_prefix)s%(name)s
+CONTROL_TPL = """Source: %(p_prefix)s%(name)s
Priority: extra
Maintainer: %(packager)s
Build-Depends: debhelper
@@ -51,10 +49,9 @@ Standards-Version: 3.8.4
Section: libs
%(homepage)s
-'''
+"""
-CONTROL_RUNTIME_PACKAGE_TPL = \
-'''Package: %(p_prefix)s%(name)s
+CONTROL_RUNTIME_PACKAGE_TPL = """Package: %(p_prefix)s%(name)s
Section: libs
Architecture: any
Depends: ${shlibs:Depends}, ${misc:Depends} %(requires)s
@@ -63,20 +60,18 @@ Suggests: %(suggests)s
Description: %(shortdesc)s
%(longdesc)s
-'''
+"""
-CONTROL_DBG_PACKAGE_TPL = \
-'''Package: %(p_prefix)s%(name)s-dbg
+CONTROL_DBG_PACKAGE_TPL = """Package: %(p_prefix)s%(name)s-dbg
Section: debug
Architecture: any
Depends: %(p_prefix)s%(name)s (= ${binary:Version})
Description: Debug symbols for %(p_prefix)s%(name)s
Debug symbols for %(p_prefix)s%(name)s
-'''
+"""
-CONTROL_DEVEL_PACKAGE_TPL = \
-'''Package: %(p_prefix)s%(name)s-dev
+CONTROL_DEVEL_PACKAGE_TPL = """Package: %(p_prefix)s%(name)s-dev
Section: libdevel
Architecture: any
Depends: ${shlibs:Depends}, ${misc:Depends} %(requires)s
@@ -84,10 +79,9 @@ Recommends: %(recommends)s
Suggests: %(suggests)s
Description: %(shortdesc)s
%(longdesc)s
-'''
+"""
-COPYRIGHT_TPL = \
-'''This package was debianized by %(packager)s on
+COPYRIGHT_TPL = """This package was debianized by %(packager)s on
%(datetime)s.
%(license_notes)s
@@ -101,10 +95,9 @@ License:
On Debian systems, the complete text of common license(s) can be found in
/usr/share/common-licenses/.
-'''
+"""
-COPYRIGHT_TPL_META = \
-'''This package was debianized by %(packager)s on
+COPYRIGHT_TPL_META = """This package was debianized by %(packager)s on
%(datetime)s.
%(license_notes)s
@@ -116,10 +109,9 @@ License:
On Debian systems, the complete text of common license(s) can be found in
/usr/share/common-licenses/.
-'''
+"""
-RULES_TPL = \
-'''#!/usr/bin/make -f
+RULES_TPL = """#!/usr/bin/make -f
# Uncomment this to turn on verbose mode.
#export DH_VERBOSE=1
@@ -164,9 +156,9 @@ binary-arch: build install
binary: binary-indep binary-arch
.PHONY: build clean binary-indep binary-arch binary install
-'''
+"""
-SOURCE_FORMAT_TPL = '''3.0 (native)'''
+SOURCE_FORMAT_TPL = """3.0 (native)"""
CHANGELOG_URL_TPL = '* Full changelog can be found at %s'
DH_STRIP_TPL = 'dh_strip -a --dbg-package=%(p_prefix)s%(name)s-dbg %(excl)s'
@@ -197,14 +189,11 @@ class DebianPackager(LinuxPackager):
packagedir = os.path.join(srcdir, 'debian')
os.mkdir(packagedir)
os.mkdir(os.path.join(packagedir, 'source'))
- m.action(_('Creating debian package structure at %s for package %s') %
- (srcdir, self.package.name))
+ m.action(_('Creating debian package structure at %s for package %s') % (srcdir, self.package.name))
if os.path.exists(self.package.resources_postinstall):
- shutil.copy(os.path.join(self.package.resources_postinstall),
- os.path.join(packagedir, 'postinst'))
+ shutil.copy(os.path.join(self.package.resources_postinstall), os.path.join(packagedir, 'postinst'))
if os.path.exists(self.package.resources_postremove):
- shutil.copy(os.path.join(self.package.resources_postremove),
- os.path.join(packagedir, 'postrm'))
+ shutil.copy(os.path.join(self.package.resources_postremove), os.path.join(packagedir, 'postrm'))
return (tmpdir, packagedir, srcdir)
def setup_source(self, tarball, tmpdir, packagedir, srcdir):
@@ -243,16 +232,11 @@ class DebianPackager(LinuxPackager):
self._write_debian_file(packagedir, 'copyright', copyright)
rules_path = self._write_debian_file(packagedir, 'rules', rules)
os.chmod(rules_path, 0o755)
- self._write_debian_file(packagedir, os.path.join('source', 'format'),
- source_format)
+ self._write_debian_file(packagedir, os.path.join('source', 'format'), source_format)
if self.package.has_runtime_package:
- self._write_debian_file(packagedir,
- self.package_prefix + self.package.name + '.install',
- runtime_files)
+ self._write_debian_file(packagedir, self.package_prefix + self.package.name + '.install', runtime_files)
if self.devel and self.package.has_devel_package:
- self._write_debian_file(packagedir,
- self.package_prefix + self.package.name + '-dev.install',
- devel_files)
+ self._write_debian_file(packagedir, self.package_prefix + self.package.name + '-dev.install', devel_files)
def build(self, output_dir, tarname, tmpdir, packagedir, srcdir):
if tarname:
@@ -264,17 +248,16 @@ class DebianPackager(LinuxPackager):
# for each dependency, copy the generated shlibs to this
# package debian/shlibs.local, so that dpkg-shlibdeps knows where
# our dependencies are without using Build-Depends:
- package_deps = self.store.get_package_deps(self.package.name,
- recursive=True)
+ package_deps = self.store.get_package_deps(self.package.name, recursive=True)
if package_deps:
shlibs_local_path = os.path.join(packagedir, 'shlibs.local')
f = open(shlibs_local_path, 'w')
for p in package_deps:
- package_shlibs_path = os.path.join(tmpdir,
- self.package_prefix + p.name + '-shlibs')
- m.action(_('Copying generated shlibs file %s for ' \
- 'dependency %s to %s') %
- (package_shlibs_path, p.name, shlibs_local_path))
+ package_shlibs_path = os.path.join(tmpdir, self.package_prefix + p.name + '-shlibs')
+ m.action(
+ _('Copying generated shlibs file %s for ' 'dependency %s to %s')
+ % (package_shlibs_path, p.name, shlibs_local_path)
+ )
if os.path.exists(package_shlibs_path):
shutil.copyfileobj(open(package_shlibs_path, 'r'), f)
f.close()
@@ -286,13 +269,9 @@ class DebianPackager(LinuxPackager):
if tarname:
# copy generated shlibs to tmpdir/$package-shlibs to be used by
# dependent packages
- shlibs_path = os.path.join(packagedir,
- self.package_prefix + self.package.name,
- 'DEBIAN', 'shlibs')
- out_shlibs_path = os.path.join(tmpdir,
- self.package_prefix + self.package.name + '-shlibs')
- m.action(_('Copying generated shlibs file %s to %s') %
- (shlibs_path, out_shlibs_path))
+ shlibs_path = os.path.join(packagedir, self.package_prefix + self.package.name, 'DEBIAN', 'shlibs')
+ out_shlibs_path = os.path.join(tmpdir, self.package_prefix + self.package.name + '-shlibs')
+ m.action(_('Copying generated shlibs file %s to %s') % (shlibs_path, out_shlibs_path))
if os.path.exists(shlibs_path):
shutil.copy(shlibs_path, out_shlibs_path)
@@ -320,8 +299,7 @@ class DebianPackager(LinuxPackager):
if isinstance(self.package, MetaPackage):
return ''
files = self.files_list(package_type)
- return '\n'.join([f + ' ' + os.path.join(self.install_dir.lstrip('/'),
- os.path.dirname(f)) for f in files])
+ return '\n'.join([f + ' ' + os.path.join(self.install_dir.lstrip('/'), os.path.dirname(f)) for f in files])
def _write_debian_file(self, packagedir, filename, content):
path = os.path.join(packagedir, filename)
@@ -336,8 +314,7 @@ class DebianPackager(LinuxPackager):
args['packager'] = self.packager
args['version'] = self.package.version
args['datetime'] = self.datetime
- args['changelog_url'] = CHANGELOG_URL_TPL % self.package.url \
- if self.package.url != 'default' else ''
+ args['changelog_url'] = CHANGELOG_URL_TPL % self.package.url if self.package.url != 'default' else ''
return CHANGELOG_TPL % args
def _deb_control_runtime_and_files(self):
@@ -345,11 +322,9 @@ class DebianPackager(LinuxPackager):
args['name'] = self.package.name
args['p_prefix'] = self.package_prefix
args['packager'] = self.packager
- args['homepage'] = 'Homepage: ' + self.package.url \
- if self.package.url != 'default' else ''
+ args['homepage'] = 'Homepage: ' + self.package.url if self.package.url != 'default' else ''
args['shortdesc'] = self.package.shortdesc
- args['longdesc'] = self.package.longdesc \
- if self.package.longdesc != 'default' else args['shortdesc']
+ args['longdesc'] = self.package.longdesc if self.package.longdesc != 'default' else args['shortdesc']
try:
runtime_files = self._files_list(PackageType.RUNTIME)
@@ -357,8 +332,7 @@ class DebianPackager(LinuxPackager):
runtime_files = ''
if isinstance(self.package, MetaPackage):
- requires, recommends, suggests = \
- self.get_meta_requires(PackageType.RUNTIME, '')
+ requires, recommends, suggests = self.get_meta_requires(PackageType.RUNTIME, '')
requires = ', '.join(requires)
recommends = ', '.join(recommends)
suggests = ', '.join(suggests)
@@ -372,16 +346,14 @@ class DebianPackager(LinuxPackager):
args['recommends'] = ''
args['suggests'] = ''
if runtime_files:
- return (CONTROL_TPL + CONTROL_RUNTIME_PACKAGE_TPL + CONTROL_DBG_PACKAGE_TPL) % \
- args, runtime_files
+ return (CONTROL_TPL + CONTROL_RUNTIME_PACKAGE_TPL + CONTROL_DBG_PACKAGE_TPL) % args, runtime_files
return CONTROL_TPL % args, ''
def _deb_control_devel_and_files(self):
args = {}
args['name'] = self.package.name
args['p_prefix'] = self.package_prefix
- args['shortdesc'] = 'Development files for %s' % \
- self.package_prefix + self.package.name
+ args['shortdesc'] = 'Development files for %s' % self.package_prefix + self.package.name
args['longdesc'] = args['shortdesc']
try:
@@ -390,8 +362,7 @@ class DebianPackager(LinuxPackager):
devel_files = ''
if isinstance(self.package, MetaPackage):
- requires, recommends, suggests = \
- self.get_meta_requires(PackageType.DEVEL, '-dev')
+ requires, recommends, suggests = self.get_meta_requires(PackageType.DEVEL, '-dev')
requires = ', '.join(requires)
recommends = ', '.join(recommends)
suggests = ', '.join(suggests)
@@ -403,7 +374,7 @@ class DebianPackager(LinuxPackager):
requires = self._get_requires(PackageType.DEVEL)
args['requires'] = ', ' + requires if requires else ''
if self.package.has_runtime_package:
- args['requires'] += (', %(p_prefix)s%(name)s (= ${binary:Version})' % args)
+ args['requires'] += ', %(p_prefix)s%(name)s (= ${binary:Version})' % args
args['recommends'] = ''
args['suggests'] = ''
if devel_files:
@@ -422,8 +393,7 @@ class DebianPackager(LinuxPackager):
if isinstance(self.package, MetaPackage):
return COPYRIGHT_TPL_META % args
- args['recipes_licenses'] = ',\n '.join(
- [l.pretty_name for l in self.recipes_licenses()])
+ args['recipes_licenses'] = ',\n '.join([l.pretty_name for l in self.recipes_licenses()])
return COPYRIGHT_TPL % args
def _deb_rules(self):
@@ -432,10 +402,8 @@ class DebianPackager(LinuxPackager):
args['p_prefix'] = self.package_prefix
args['excl'] = ''
if isinstance(self.package, App):
- args['excl'] = ' '.join(['-X%s' % x for x in
- self.package.strip_excludes])
- if not isinstance(self.package, MetaPackage) and \
- self.package.has_runtime_package:
+ args['excl'] = ' '.join(['-X%s' % x for x in self.package.strip_excludes])
+ if not isinstance(self.package, MetaPackage) and self.package.has_runtime_package:
args['dh_strip'] = DH_STRIP_TPL % args
else:
args['dh_strip'] = ''
@@ -443,7 +411,6 @@ class DebianPackager(LinuxPackager):
class Packager(object):
-
def __new__(klass, config, package, store):
return DebianPackager(config, package, store)
@@ -451,4 +418,5 @@ class Packager(object):
def register():
from cerbero.packages.packager import register_packager
from cerbero.config import Distro
+
register_packager(Distro.DEBIAN, Packager)
diff --git a/cerbero/packages/disttarball.py b/cerbero/packages/disttarball.py
index b2678734..af391b7a 100644
--- a/cerbero/packages/disttarball.py
+++ b/cerbero/packages/disttarball.py
@@ -30,7 +30,7 @@ from cerbero.tools import strip
class DistTarball(PackagerBase):
- ''' Creates a distribution tarball '''
+ """Creates a distribution tarball"""
def __init__(self, config, package, store):
PackagerBase.__init__(self, config, package, store)
@@ -43,19 +43,20 @@ class DistTarball(PackagerBase):
if self.compress not in ('none', 'bz2', 'xz'):
raise UsageError('Invalid compression type {!r}'.format(self.compress))
- def pack(self, output_dir, devel=True, force=False, keep_temp=False,
- split=True, package_prefix='', strip_binaries=False):
+ def pack(
+ self, output_dir, devel=True, force=False, keep_temp=False, split=True, package_prefix='', strip_binaries=False
+ ):
try:
dist_files = self.files_list(PackageType.RUNTIME, force)
except EmptyPackageError:
- m.warning(_("The runtime package is empty"))
+ m.warning(_('The runtime package is empty'))
dist_files = []
if devel:
try:
devel_files = self.files_list(PackageType.DEVEL, force)
except EmptyPackageError:
- m.warning(_("The development package is empty"))
+ m.warning(_('The development package is empty'))
devel_files = []
else:
devel_files = []
@@ -69,16 +70,15 @@ class DistTarball(PackagerBase):
filenames = []
if dist_files:
if not strip_binaries:
- runtime = self._create_tarball(output_dir, PackageType.RUNTIME,
- dist_files, force, package_prefix)
+ runtime = self._create_tarball(output_dir, PackageType.RUNTIME, dist_files, force, package_prefix)
else:
- runtime = self._create_tarball_stripped(output_dir, PackageType.RUNTIME,
- dist_files, force, package_prefix)
+ runtime = self._create_tarball_stripped(
+ output_dir, PackageType.RUNTIME, dist_files, force, package_prefix
+ )
filenames.append(runtime)
if split and devel and len(devel_files) != 0:
- devel = self._create_tarball(output_dir, PackageType.DEVEL,
- devel_files, force, package_prefix)
+ devel = self._create_tarball(output_dir, PackageType.DEVEL, devel_files, force, package_prefix)
filenames.append(devel)
return filenames
@@ -104,11 +104,17 @@ class DistTarball(PackagerBase):
if self.config.variants.visualstudio and self.config.variants.vscrt == 'mdd':
platform += '+debug'
- return "%s%s-%s-%s-%s%s.%s" % (self.package_prefix, self.package.name, platform,
- self.config.target_arch, self.package.version, package_type, ext)
-
- def _create_tarball_stripped(self, output_dir, package_type, files, force,
- package_prefix):
+ return '%s%s-%s-%s-%s%s.%s' % (
+ self.package_prefix,
+ self.package.name,
+ platform,
+ self.config.target_arch,
+ self.package.version,
+ package_type,
+ ext,
+ )
+
+ def _create_tarball_stripped(self, output_dir, package_type, files, force, package_prefix):
tmpdir = tempfile.mkdtemp(dir=self.config.home_dir)
if hasattr(self.package, 'strip_excludes'):
@@ -127,21 +133,19 @@ class DistTarball(PackagerBase):
prefix_restore = self.prefix
self.prefix = tmpdir
- tarball = self._create_tarball(output_dir, package_type,
- files, force, package_prefix)
+ tarball = self._create_tarball(output_dir, package_type, files, force, package_prefix)
self.prefix = prefix_restore
shutil.rmtree(tmpdir)
return tarball
- def _create_tarball(self, output_dir, package_type, files, force,
- package_prefix):
+ def _create_tarball(self, output_dir, package_type, files, force, package_prefix):
filename = os.path.join(output_dir, self._get_name(package_type))
if os.path.exists(filename):
if force:
os.remove(filename)
else:
- raise UsageError("File %s already exists" % filename)
+ raise UsageError('File %s already exists' % filename)
if self.config.distro == Distro.MSYS:
self._write_tar_windows(filename, package_prefix, files)
else:
@@ -203,7 +207,7 @@ class DistTarball(PackagerBase):
# a plain tar using bsdtar first, then compress it with xz later.
filename = os.path.splitext(filename)[0]
elif self.compress != 'none':
- raise AssertionError("Unknown tar compression: {}".format(self.compress))
+ raise AssertionError('Unknown tar compression: {}'.format(self.compress))
tar_cmd += ['-cf', filename]
with tempfile.TemporaryDirectory(ignore_cleanup_errors=True) as d:
@@ -224,14 +228,13 @@ class DistTarball(PackagerBase):
class Packager(object):
-
def __new__(klass, config, package, store):
return DistTarball(config, package, store)
-
def register():
from cerbero.packages.packager import register_packager
from cerbero.config import Distro
+
register_packager(Distro.NONE, Packager)
register_packager(Distro.GENTOO, Packager)
diff --git a/cerbero/packages/linux.py b/cerbero/packages/linux.py
index acd3357f..d8b577e5 100644
--- a/cerbero/packages/linux.py
+++ b/cerbero/packages/linux.py
@@ -31,7 +31,6 @@ import shutil
class LinuxPackager(PackagerBase):
-
def __init__(self, config, package, store):
PackagerBase.__init__(self, config, package, store)
self.package_prefix = ''
@@ -40,8 +39,7 @@ class LinuxPackager(PackagerBase):
self.packager = self.config.packager
self._check_packager()
- def pack(self, output_dir, devel=True, force=False, keep_temp=False,
- pack_deps=True, tmpdir=None):
+ def pack(self, output_dir, devel=True, force=False, keep_temp=False, pack_deps=True, tmpdir=None):
self.install_dir = self.package.get_install_dir()
self.devel = devel
self.force = force
@@ -58,10 +56,10 @@ class LinuxPackager(PackagerBase):
if not isinstance(self.package, MetaPackage):
# create a tarball with all the package's files
- tarball_packager = DistTarball(self.config, self.package,
- self.store)
- tarball = tarball_packager.pack(tmpdir, devel, True, split=False,
- package_prefix=self.full_package_name, strip_binaries=False)[0]
+ tarball_packager = DistTarball(self.config, self.package, self.store)
+ tarball = tarball_packager.pack(
+ tmpdir, devel, True, split=False, package_prefix=self.full_package_name, strip_binaries=False
+ )[0]
tarname = self.setup_source(tarball, tmpdir, packagedir, srcdir)
else:
# metapackages only contains Requires dependencies with
@@ -108,8 +106,7 @@ class LinuxPackager(PackagerBase):
# already built, skipping
continue
- m.action(_('Packing dependency %s for package %s') %
- (p.name, self.package.name))
+ m.action(_('Packing dependency %s for package %s') % (p.name, self.package.name))
packager = self.__class__(self.config, p, self.store)
try:
packager.pack(output_dir, self.devel, force, True, True, tmpdir)
@@ -160,7 +157,7 @@ class LinuxPackager(PackagerBase):
# Development packages should depend on the runtime package
if package_type == PackageType.DEVEL:
if self._has_runtime_package(self.package):
- deps.append("%s%s" % (self._package_prefix(self.package), self.package.name))
+ deps.append('%s%s' % (self._package_prefix(self.package), self.package.name))
deps.extend(self.package.get_sys_deps(package_type))
return sorted(deps)
@@ -186,13 +183,11 @@ class LinuxPackager(PackagerBase):
return ''
def _full_package_name(self):
- return '%s%s-%s' % (self.package_prefix, self.package.name,
- self.package.version)
+ return '%s%s-%s' % (self.package_prefix, self.package.name, self.package.version)
def _check_packager(self):
if self.packager == DEFAULT_PACKAGER:
- m.warning(_('No packager defined, using default '
- 'packager "%s"') % self.packager)
+ m.warning(_('No packager defined, using default ' 'packager "%s"') % self.packager)
def _has_runtime_package(self, package):
if hasattr(package, 'has_runtime_package'):
diff --git a/cerbero/packages/osx/buildtools.py b/cerbero/packages/osx/buildtools.py
index 21615ca5..61042ec9 100644
--- a/cerbero/packages/osx/buildtools.py
+++ b/cerbero/packages/osx/buildtools.py
@@ -21,13 +21,12 @@ from cerbero.utils import shell
class PackageBuild(object):
- ''' Wrapper for the packagebuild application '''
+ """Wrapper for the packagebuild application"""
CMD = 'pkgbuild'
- def create_package(self, root, pkg_id, version, title, output_file,
- destination='/opt/', scripts_path=None):
- '''
+ def create_package(self, root, pkg_id, version, title, output_file, destination='/opt/', scripts_path=None):
+ """
Creates an osx flat package, where all files are properly bundled in a
directory that is set as the package root
@@ -45,9 +44,8 @@ class PackageBuild(object):
@type destination: str
@param scripts_path: relative path for package scripts
@type scripts_path: str
- '''
- args = {'root': root, 'identifier': pkg_id, 'version': version,
- 'install-location': destination}
+ """
+ args = {'root': root, 'identifier': pkg_id, 'version': version, 'install-location': destination}
if scripts_path is not None:
args['scripts'] = scripts_path
shell.new_call(self._cmd_with_args(args, output_file))
@@ -59,8 +57,8 @@ class PackageBuild(object):
return [self.CMD] + args_arr + [output]
-class ProductBuild (object):
- ''' Wrapper for the packagebuild application '''
+class ProductBuild(object):
+ """Wrapper for the packagebuild application"""
CMD = 'productbuild'
diff --git a/cerbero/packages/osx/bundles.py b/cerbero/packages/osx/bundles.py
index daa447a4..89eb7890 100644
--- a/cerbero/packages/osx/bundles.py
+++ b/cerbero/packages/osx/bundles.py
@@ -29,10 +29,10 @@ from cerbero.utils import shell
class BundlePackagerBase(PackagerBase):
- '''
+ """
Creates a package with the basic structure of a bundle, to be included
in a MetaPackage.
- '''
+ """
def __init__(self, package, pkgname, desc, uuid):
self.package = Package(package.config, package.store, None)
@@ -55,45 +55,41 @@ class BundlePackagerBase(PackagerBase):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
- path = self._create_package(output_dir, self.package.get_install_dir(),
- self.package.version, root)
+ path = self._create_package(output_dir, self.package.get_install_dir(), self.package.version, root)
return [path, None]
-
def _create_package(self, output_dir, install_dir, version, root):
- output_file = os.path.join(output_dir, '%s-%s-%s.pkg' %
- (self.name, self.package.version,
- self.config.target_arch))
+ output_file = os.path.join(
+ output_dir, '%s-%s-%s.pkg' % (self.name, self.package.version, self.config.target_arch)
+ )
if not root:
root = self.create_bundle()
resources = tempfile.mkdtemp()
if os.path.exists(self.package.resources_preinstall):
- shutil.copy(os.path.join(self.package.resources_preinstall),
- os.path.join(resources, 'preinstall'))
+ shutil.copy(os.path.join(self.package.resources_preinstall), os.path.join(resources, 'preinstall'))
if os.path.exists(self.package.resources_postinstall):
- shutil.copy(os.path.join(self.package.resources_postinstall),
- os.path.join(resources, 'postinstall'))
+ shutil.copy(os.path.join(self.package.resources_postinstall), os.path.join(resources, 'postinstall'))
packagebuild = PackageBuild()
- packagebuild.create_package(root, self.package.identifier(),
- self.package.version, self.title, output_file, install_dir,
- resources)
+ packagebuild.create_package(
+ root, self.package.identifier(), self.package.version, self.title, output_file, install_dir, resources
+ )
shutil.rmtree(resources)
return output_file
def create_bundle(self, target_dir=None):
- '''
+ """
Creates the bundle structure
- '''
+ """
raise NotImplemented('Subclasses should implement create_bundle')
class FrameworkBundlePackager(BundlePackagerBase):
- '''
+ """
Creates a package with the basic structure of a framework bundle,
adding links for Headears, Libraries, Commands, and Current Versions,
and the Framework info.
- '''
+ """
name = 'osx-framework'
title = 'Framework Bundle'
@@ -103,7 +99,7 @@ class FrameworkBundlePackager(BundlePackagerBase):
self.name = filename
def create_bundle(self, target_dir=None):
- '''
+ """
Creates the bundle structure
Commands -> Versions/Current/Commands
@@ -113,33 +109,38 @@ class FrameworkBundlePackager(BundlePackagerBase):
Resources -> Versions/Current/Resources
Versions/Current -> Version/$VERSION/$ARCH
Framework -> Versions/Current/Famework
- '''
+ """
if target_dir:
tmp = target_dir
else:
tmp = tempfile.mkdtemp()
- #if self.config.target_arch == Architecture.UNIVERSAL:
+ # if self.config.target_arch == Architecture.UNIVERSAL:
# arch_dir = ''
- #else:
+ # else:
# arch_dir = self.config.target_arch
- vdir = os.path.join('Versions', self.package.sdk_version) #, arch_dir)
+ vdir = os.path.join('Versions', self.package.sdk_version) # , arch_dir)
rdir = '%s/Resources/' % vdir
- os.makedirs (os.path.join(tmp, rdir), exist_ok=True if target_dir else False)
+ os.makedirs(os.path.join(tmp, rdir), exist_ok=True if target_dir else False)
- links = {'Versions/Current': '%s' % self.package.sdk_version,
- 'Resources': 'Versions/Current/Resources',
- 'Commands': 'Versions/Current/Commands',
- 'Headers': 'Versions/Current/Headers',
- 'Libraries': 'Versions/Current/Libraries'}
- inner_links = {'Commands': 'bin',
- 'Libraries': 'lib'}
+ links = {
+ 'Versions/Current': '%s' % self.package.sdk_version,
+ 'Resources': 'Versions/Current/Resources',
+ 'Commands': 'Versions/Current/Commands',
+ 'Headers': 'Versions/Current/Headers',
+ 'Libraries': 'Versions/Current/Libraries',
+ }
+ inner_links = {'Commands': 'bin', 'Libraries': 'lib'}
# Create the frameworks Info.plist file
- framework_plist = FrameworkPlist(self.package.name,
- self.package.org, self.package.version, self.package.shortdesc,
- self.package.config.min_osx_sdk_version)
+ framework_plist = FrameworkPlist(
+ self.package.name,
+ self.package.org,
+ self.package.version,
+ self.package.shortdesc,
+ self.package.config.min_osx_sdk_version,
+ )
framework_plist.save(os.path.join(tmp, rdir, 'Info.plist'))
# Add a link from Framework to Versions/Current/Framework
@@ -150,34 +151,32 @@ class FrameworkBundlePackager(BundlePackagerBase):
# Create all links
for dest, src in links.items():
- shell.symlink (src, dest, tmp)
+ shell.symlink(src, dest, tmp)
inner_tmp = os.path.join(tmp, vdir)
for dest, src in inner_links.items():
- shell.symlink (src, dest, inner_tmp)
+ shell.symlink(src, dest, inner_tmp)
# Copy the framework library to Versions/$VERSION/$ARCH/Framework
- if self.package.osx_framework_library is not None \
- and os.path.exists(os.path.join(self.config.prefix, link)):
- shutil.copy(os.path.join(self.config.prefix, link),
- os.path.join(tmp, vdir, name))
+ if self.package.osx_framework_library is not None and os.path.exists(os.path.join(self.config.prefix, link)):
+ shutil.copy(os.path.join(self.config.prefix, link), os.path.join(tmp, vdir, name))
return tmp
class ApplicationBundlePackager(object):
- '''
+ """
Creates a package with the basic structure of an Application bundle.
- '''
+ """
def __init__(self, package):
self.package = package
def create_bundle(self, tmp=None):
- '''
+ """
Creates the Application bundle structure
Contents/MacOS/MainExectuable -> Contents/Home/bin/main-executable
Contents/Info.plist
- '''
+ """
tmp = tmp or tempfile.mkdtemp()
contents = os.path.join(tmp, 'Contents')
@@ -192,11 +191,15 @@ class ApplicationBundlePackager(object):
plist_tpl = None
if os.path.exists(self.package.resources_info_plist):
plist_tpl = open(self.package.resources_info_plist).read()
- framework_plist = ApplicationPlist(self.package.app_name,
- self.package.org, self.package.version, self.package.shortdesc,
+ framework_plist = ApplicationPlist(
+ self.package.app_name,
+ self.package.org,
+ self.package.version,
+ self.package.shortdesc,
self.package.config.min_osx_sdk_version,
os.path.basename(self.package.resources_icon_icns),
- plist_tpl)
+ plist_tpl,
+ )
framework_plist.save(os.path.join(contents, 'Info.plist'))
# Copy app icon to Resources
diff --git a/cerbero/packages/osx/distribution.py b/cerbero/packages/osx/distribution.py
index 64b63b39..a11a37f5 100644
--- a/cerbero/packages/osx/distribution.py
+++ b/cerbero/packages/osx/distribution.py
@@ -18,7 +18,7 @@
import os
from cerbero.enums import Architecture
-DISTRO_XML_TPL = '''\
+DISTRO_XML_TPL = """\
<?xml version="1.0"?>
<installer-gui-script minSpecVesion="1">
<options require-scripts="false" hostArchitectures="%(archs)s"/>
@@ -33,10 +33,11 @@ DISTRO_XML_TPL = '''\
%(choices_desc)s
%(pkg_refs)s
</installer-gui-script>
-'''
+"""
+
class DistributionXML(object):
- ''' Creates a Distribution.xml for productbuild '''
+ """Creates a Distribution.xml for productbuild"""
TAG_CHOICE = 'choice'
TAG_CHOICES_OUTLINE = 'choices-outline'
@@ -45,8 +46,9 @@ class DistributionXML(object):
PROP_ENABLE_ANYWHERE = 'false'
- def __init__(self, package, store, out_dir, packages_paths, emptypkgs,
- package_type, target_arch, home_folder=False):
+ def __init__(
+ self, package, store, out_dir, packages_paths, emptypkgs, package_type, target_arch, home_folder=False
+ ):
self.package = package
self.store = store
self.out_dir = out_dir
@@ -72,16 +74,18 @@ class DistributionXML(object):
archs = 'arm64,x86_64'
else:
archs = self.target_arch
- return self.template % {'background': self.package.resources_background,
- 'background_dark': self.package.resources_background_dark,
- 'archs': archs,
- 'license': self.package.resources_license_rtf,
- 'ehome': self.enable_user_home,
- 'elocal': self.enable_local_system,
- 'title': self.package.shortdesc,
- 'choices': self.choices,
- 'choices_desc': self.choices_desc,
- 'pkg_refs': self.pkg_refs}
+ return self.template % {
+ 'background': self.package.resources_background,
+ 'background_dark': self.package.resources_background_dark,
+ 'archs': archs,
+ 'license': self.package.resources_license_rtf,
+ 'ehome': self.enable_user_home,
+ 'elocal': self.enable_local_system,
+ 'title': self.package.shortdesc,
+ 'choices': self.choices,
+ 'choices_desc': self.choices_desc,
+ 'pkg_refs': self.pkg_refs,
+ }
def _add_choices(self):
self.choices = ''
@@ -100,9 +104,11 @@ class DistributionXML(object):
self._add_choice(package, not required, selected)
def _add_choice(self, package, enabled, selected):
- self.choices_desc += '<choice description="default" id="%s" start_enabled="%s"'\
- ' title="%s">\n' % \
- (package.identifier(), enabled, package.longdesc)
+ self.choices_desc += '<choice description="default" id="%s" start_enabled="%s"' ' title="%s">\n' % (
+ package.identifier(),
+ enabled,
+ package.longdesc,
+ )
packages = [package] + self.store.get_package_deps(package)
for package in packages:
@@ -111,8 +117,11 @@ class DistributionXML(object):
package.set_mode(self.package_type)
self.choices_desc += '<pkg-ref id="%s"/>\n' % package.identifier()
if package not in self.packagerefs:
- self.pkg_refs += '<pkg-ref id="%s" version="%s">%s</pkg-ref>\n' % \
- (package.identifier(), package.version, self.packages_paths[package])
+ self.pkg_refs += '<pkg-ref id="%s" version="%s">%s</pkg-ref>\n' % (
+ package.identifier(),
+ package.version,
+ self.packages_paths[package],
+ )
self.packagerefs.append(package)
self.choices_desc += '</choice>\n'
diff --git a/cerbero/packages/osx/info_plist.py b/cerbero/packages/osx/info_plist.py
index 017b87c6..564daf73 100644
--- a/cerbero/packages/osx/info_plist.py
+++ b/cerbero/packages/osx/info_plist.py
@@ -1,4 +1,3 @@
-
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
@@ -17,7 +16,7 @@
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
-INFO_PLIST_TPL='''\
+INFO_PLIST_TPL = """\
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
@@ -43,16 +42,15 @@ INFO_PLIST_TPL='''\
%(extra)s
</dict>
</plist>
-'''
+"""
class InfoPlist(object):
- ''' Create a Info.plist file '''
+ """Create a Info.plist file"""
package_type = ''
- def __init__(self, name, identifier, version, info, minosxversion,
- icon=None, plist_tpl=None):
+ def __init__(self, name, identifier, version, info, minosxversion, icon=None, plist_tpl=None):
self.name = name
self.identifier = identifier
self.version = version
@@ -66,34 +64,38 @@ class InfoPlist(object):
f.write(self.plist_tpl % self._get_properties())
def _get_properties(self):
- properties = {'id': self.identifier, 'name': self.name,
- 'desc': self.info, 'ptype': self.package_type,
- 'icon': self.icon, 'version_str': self.version,
- 'version': self.version.replace('.', ''),
- 'minosxversion': self.minosxversion, 'extra':''}
+ properties = {
+ 'id': self.identifier,
+ 'name': self.name,
+ 'desc': self.info,
+ 'ptype': self.package_type,
+ 'icon': self.icon,
+ 'version_str': self.version,
+ 'version': self.version.replace('.', ''),
+ 'minosxversion': self.minosxversion,
+ 'extra': '',
+ }
if self.icon:
- properties['extra'] = '<key>CFBundleIconFile</key>\n' \
- '<string>%s</string>' % self.icon
+ properties['extra'] = '<key>CFBundleIconFile</key>\n' '<string>%s</string>' % self.icon
return properties
class FrameworkPlist(InfoPlist):
- ''' Create a Info.plist file for frameworks '''
+ """Create a Info.plist file for frameworks"""
package_type = 'FMWK'
-
class ApplicationPlist(InfoPlist):
- ''' Create a Info.plist file for applications '''
+ """Create a Info.plist file for applications"""
package_type = 'APPL'
-class ComponentPropertyPlist():
- ''' Create a component property list to be used with pkgbuild '''
+class ComponentPropertyPlist:
+ """Create a component property list to be used with pkgbuild"""
- COMPONENT_TPL='''\
+ COMPONENT_TPL = """\
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
@@ -110,11 +112,12 @@ class ComponentPropertyPlist():
</dict>
</array>
</plist>
-'''
+"""
+
def __init__(self, description, rel_path):
self.desc = description
self.rel_path = rel_path
def save(self, filename):
with open(filename, 'w+') as f:
- f.write(INFO_PLIST_TPL % (self.rel_path, self.key))
+ f.write(INFO_PLIST_TPL % (self.rel_path, self.key))
diff --git a/cerbero/packages/osx/packager.py b/cerbero/packages/osx/packager.py
index c2b89ebc..ff88f856 100644
--- a/cerbero/packages/osx/packager.py
+++ b/cerbero/packages/osx/packager.py
@@ -25,11 +25,9 @@ from cerbero.ide.pkgconfig import PkgConfig
from cerbero.ide.xcode.fwlib import StaticFrameworkLibrary
from cerbero.errors import EmptyPackageError, FatalError
from cerbero.packages import PackagerBase, PackageType
-from cerbero.packages.package import Package, MetaPackage, App,\
- PackageBase, SDKPackage
+from cerbero.packages.package import Package, MetaPackage, App, PackageBase, SDKPackage
from cerbero.packages.osx.distribution import DistributionXML
-from cerbero.packages.osx.bundles import FrameworkBundlePackager,\
- ApplicationBundlePackager
+from cerbero.packages.osx.bundles import FrameworkBundlePackager, ApplicationBundlePackager
from cerbero.packages.osx.buildtools import PackageBuild, ProductBuild
from cerbero.utils import shell, _
from cerbero.tools import strip
@@ -37,9 +35,8 @@ from cerbero.utils import messages as m
class FrameworkHeadersMixin(object):
-
def _create_framework_headers(self, prefix, include_dirs, tmp):
- '''
+ """
To create a real OS X Framework we need to get rid of the versioned
directories for headers.
We should still keep the current tree in $PREFIX/include/ so that it
@@ -48,15 +45,13 @@ class FrameworkHeadersMixin(object):
directories with the help of pkg-config getting something like:
include/gstreamer-0.10/gst/gst.h -> Headers/gst/gst.h
include/zlib.h -> Headers/zlib.h
- '''
+ """
# Replace prefix path with the temporal directory one
- include_dirs = [x.replace(os.path.abspath(prefix), tmp)
- for x in include_dirs]
+ include_dirs = [x.replace(os.path.abspath(prefix), tmp) for x in include_dirs]
# Remove trailing /
include_dirs = [os.path.abspath(x) for x in include_dirs]
# Remove 'include' dir
- include_dirs = [x for x in include_dirs if not
- x.endswith(os.path.join(tmp, 'include'))]
+ include_dirs = [x for x in include_dirs if not x.endswith(os.path.join(tmp, 'include'))]
include_dirs = [x for x in include_dirs if os.path.isdir(x)]
include = os.path.join(tmp, 'include/')
@@ -82,8 +77,7 @@ class FrameworkHeadersMixin(object):
elif os.path.isdir(src):
shell.copy_dir(src, dest)
- def _copy_unversioned_headers(self, dirname, include, headers,
- include_dirs):
+ def _copy_unversioned_headers(self, dirname, include, headers, include_dirs):
if not os.path.exists(dirname):
return
@@ -103,24 +97,31 @@ class FrameworkHeadersMixin(object):
continue
else:
# Copy the directory
- self._copy_unversioned_headers(path, include,
- headers, include_dirs)
+ self._copy_unversioned_headers(path, include, headers, include_dirs)
class OSXPackage(PackagerBase, FrameworkHeadersMixin):
- '''
+ """
Creates an osx package from a L{cerbero.packages.package.Package}
@ivar package: package used to create the osx package
@type package: L{cerbero.packages.package.Package}
- '''
+ """
def __init__(self, config, package, store):
PackagerBase.__init__(self, config, package, store)
- def pack(self, output_dir, devel=True, force=False, keep_temp=False,
- version=None, install_dir=None, include_dirs=None,
- sdk_version=None):
+ def pack(
+ self,
+ output_dir,
+ devel=True,
+ force=False,
+ keep_temp=False,
+ version=None,
+ install_dir=None,
+ include_dirs=None,
+ sdk_version=None,
+ ):
PackagerBase.pack(self, output_dir, devel, force, keep_temp)
self.install_dir = install_dir or self.package.get_install_dir()
@@ -130,8 +131,7 @@ class OSXPackage(PackagerBase, FrameworkHeadersMixin):
# create the runtime package
try:
- runtime_path = self._create_package(PackageType.RUNTIME,
- output_dir, force)
+ runtime_path = self._create_package(PackageType.RUNTIME, output_dir, force)
except EmptyPackageError as e:
if not devel:
raise e
@@ -142,8 +142,7 @@ class OSXPackage(PackagerBase, FrameworkHeadersMixin):
try:
# create the development package
- devel_path = self._create_package(PackageType.DEVEL, output_dir,
- force)
+ devel_path = self._create_package(PackageType.DEVEL, output_dir, force)
except EmptyPackageError as e:
if runtime_path is None:
raise e
@@ -152,39 +151,44 @@ class OSXPackage(PackagerBase, FrameworkHeadersMixin):
return [runtime_path, devel_path]
def _get_install_dir(self):
- #if self.config.target_arch != Architecture.UNIVERSAL:
+ # if self.config.target_arch != Architecture.UNIVERSAL:
# arch_dir = self.config.target_arch
- #else:
+ # else:
# arch_dir = ''
- return os.path.join(self.install_dir, 'Versions',
- self.sdk_version) #, arch_dir)
+ return os.path.join(self.install_dir, 'Versions', self.sdk_version) # , arch_dir)
def _create_package(self, package_type, output_dir, force):
self.package.set_mode(package_type)
files = self.files_list(package_type, force)
- output_file = os.path.join(output_dir, '%s-%s-%s.pkg' %
- (self.package.name, self.version, self.config.target_arch))
+ output_file = os.path.join(
+ output_dir, '%s-%s-%s.pkg' % (self.package.name, self.version, self.config.target_arch)
+ )
tmp, root, resources = self._create_bundle(files, package_type)
packagebuild = PackageBuild()
- packagebuild.create_package(root, self.package.identifier(),
- self.package.version, self.package.shortdesc, output_file,
- self._get_install_dir(), scripts_path=resources)
+ packagebuild.create_package(
+ root,
+ self.package.identifier(),
+ self.package.version,
+ self.package.shortdesc,
+ output_file,
+ self._get_install_dir(),
+ scripts_path=resources,
+ )
shutil.rmtree(tmp)
return output_file
def _create_bundle(self, files, package_type):
- '''
+ """
Moves all the files that are going to be packaged to a temporary
directory to create the bundle
- '''
+ """
tmp = tempfile.mkdtemp()
root = os.path.join(tmp, 'Root')
resources = os.path.join(tmp, 'Resources')
for f in files:
in_path = os.path.join(self.config.prefix, f)
if not os.path.exists(in_path):
- m.warning("File %s is missing and won't be added to the "
- "package" % in_path)
+ m.warning("File %s is missing and won't be added to the " 'package' % in_path)
continue
out_path = os.path.join(root, f)
out_dir = os.path.split(out_path)[0]
@@ -197,23 +201,20 @@ class OSXPackage(PackagerBase, FrameworkHeadersMixin):
# Copy scripts to the Resources directory
os.makedirs(resources)
if os.path.exists(self.package.resources_preinstall):
- shutil.copy(os.path.join(self.package.resources_preinstall),
- os.path.join(resources, 'preinstall'))
+ shutil.copy(os.path.join(self.package.resources_preinstall), os.path.join(resources, 'preinstall'))
if os.path.exists(self.package.resources_postinstall):
- shutil.copy(os.path.join(self.package.resources_postinstall),
- os.path.join(resources, 'postinstall'))
+ shutil.copy(os.path.join(self.package.resources_postinstall), os.path.join(resources, 'postinstall'))
return tmp, root, resources
-
class ProductPackage(PackagerBase):
- '''
+ """
Creates an osx package from a L{cerbero.package.package.MetaPackage} using
productbuild.
@ivar package: package with the info to build the installer package
@type package: L{cerbero.packages.package.MetaPackage}
- '''
+ """
PKG_EXT = '.pkg'
home_folder = False
@@ -262,17 +263,18 @@ class ProductPackage(PackagerBase):
self.empty_packages = {PackageType.RUNTIME: [], PackageType.DEVEL: []}
self.packages_paths = {PackageType.RUNTIME: {}, PackageType.DEVEL: {}}
-
def _package_name(self, suffix):
- return '%s-%s-%s%s' % (self.package.name, self.package.version,
- self.config.target_arch, suffix)
+ return '%s-%s-%s%s' % (self.package.name, self.package.version, self.config.target_arch, suffix)
def _create_framework_bundle_packager(self):
- m.action(_("Creating framework package"))
- packager = FrameworkBundlePackager(self.package, 'osx-framework',
- 'GStreamer',
- 'GStreamer OSX Framework Bundle Version %s' % (self.package.version),
- '3ffe67c2-4565-411f-8287-e8faa892f853')
+ m.action(_('Creating framework package'))
+ packager = FrameworkBundlePackager(
+ self.package,
+ 'osx-framework',
+ 'GStreamer',
+ 'GStreamer OSX Framework Bundle Version %s' % (self.package.version),
+ '3ffe67c2-4565-411f-8287-e8faa892f853',
+ )
return packager
def _create_framework_bundle_layout(self, packager):
@@ -294,12 +296,18 @@ class ProductPackage(PackagerBase):
def _create_product(self, package_type):
self.package.set_mode(package_type)
- m.action(_("Creating Distribution.xml for package %s " % self.package))
- distro = DistributionXML(self.package, self.store, self.tmp,
+ m.action(_('Creating Distribution.xml for package %s ' % self.package))
+ distro = DistributionXML(
+ self.package,
+ self.store,
+ self.tmp,
self.packages_paths[package_type],
- self.empty_packages[package_type], package_type,
- self.config.target_arch, home_folder=self.home_folder)
- distro_path = os.path.join(self.tmp, "Distribution.xml")
+ self.empty_packages[package_type],
+ package_type,
+ self.config.target_arch,
+ home_folder=self.home_folder,
+ )
+ distro_path = os.path.join(self.tmp, 'Distribution.xml')
distro.write(distro_path)
output_file = os.path.join(self.output_dir, self._package_name('.pkg'))
output_file = os.path.abspath(output_file)
@@ -309,15 +317,20 @@ class ProductPackage(PackagerBase):
def _create_packages(self):
for p in self.packages:
- m.action(_("Creating package %s ") % p)
+ m.action(_('Creating package %s ') % p)
packager = OSXPackage(self.config, p, self.store)
try:
- paths = packager.pack(self.output_dir, self.devel, self.force,
- self.keep_temp, self.package.version,
- install_dir=self.package.get_install_dir(),
- include_dirs=self.include_dirs,
- sdk_version=self.package.sdk_version)
- m.action(_("Package created sucessfully"))
+ paths = packager.pack(
+ self.output_dir,
+ self.devel,
+ self.force,
+ self.keep_temp,
+ self.package.version,
+ install_dir=self.package.get_install_dir(),
+ include_dirs=self.include_dirs,
+ sdk_version=self.package.sdk_version,
+ )
+ m.action(_('Package created sucessfully'))
except EmptyPackageError:
paths = [None, None]
@@ -332,12 +345,11 @@ class ProductPackage(PackagerBase):
def _create_packages_dmg(self):
paths = list(self.packages_paths[PackageType.RUNTIME].values())
- dmg_file = os.path.join(self.output_dir,
- self._package_name('-packages.dmg'))
+ dmg_file = os.path.join(self.output_dir, self._package_name('-packages.dmg'))
- m.action(_("Creating image %s ") % dmg_file)
+ m.action(_('Creating image %s ') % dmg_file)
# create a temporary directory to store packages
- workdir = os.path.join (self.tmp, "hdidir")
+ workdir = os.path.join(self.tmp, 'hdidir')
os.makedirs(workdir)
try:
for p in paths:
@@ -350,12 +362,12 @@ class ProductPackage(PackagerBase):
class ApplicationPackage(PackagerBase):
- '''
+ """
Creates an osx package from a L{cerbero.packages.package.Package}
@ivar package: package used to create the osx package
@type package: L{cerbero.packages.package.Package}
- '''
+ """
def __init__(self, config, package, store):
PackagerBase.__init__(self, config, package, store)
@@ -385,17 +397,16 @@ class ApplicationPackage(PackagerBase):
return [dmg, pkg]
def _create_bundle(self):
- '''
+ """
Moves all the files that are going to be packaged to the bundle's
temporary directory
- '''
+ """
out_dir = os.path.join(self.appdir, 'Contents', 'Home')
os.makedirs(out_dir)
for f in self.package.files_list():
in_path = os.path.join(self.config.prefix, f)
if not os.path.exists(in_path):
- m.warning("File %s is missing and won't be added to the "
- "package" % in_path)
+ m.warning("File %s is missing and won't be added to the " 'package' % in_path)
continue
out_path = os.path.join(out_dir, f)
odir = os.path.split(out_path)[0]
@@ -404,7 +415,7 @@ class ApplicationPackage(PackagerBase):
shutil.copy(in_path, out_path)
def _create_app_bundle(self):
- ''' Creates the OS X Application bundle in temporary directory '''
+ """Creates the OS X Application bundle in temporary directory"""
packager = ApplicationBundlePackager(self.package)
return packager.create_bundle(self.appdir)
@@ -421,19 +432,16 @@ class ApplicationPackage(PackagerBase):
shell.symlink('/Applications', applications_link)
def _package_name(self, suffix):
- return '%s-%s-%s%s' % (self.package.name, self.package.version,
- self.config.target_arch, suffix)
+ return '%s-%s-%s%s' % (self.package.name, self.package.version, self.config.target_arch, suffix)
def _copy_scripts(self):
resources = os.path.join(self.tmp, 'Resources')
# Copy scripts to the Resources directory
os.makedirs(resources)
if os.path.exists(self.package.resources_preinstall):
- shutil.copy(os.path.join(self.package.resources_preinstall),
- os.path.join(resources, 'preinstall'))
+ shutil.copy(os.path.join(self.package.resources_preinstall), os.path.join(resources, 'preinstall'))
if os.path.exists(self.package.resources_postinstall):
- shutil.copy(os.path.join(self.package.resources_postinstall),
- os.path.join(resources, 'postinstall'))
+ shutil.copy(os.path.join(self.package.resources_postinstall), os.path.join(resources, 'postinstall'))
return resources
def _create_product(self):
@@ -441,28 +449,39 @@ class ApplicationPackage(PackagerBase):
resources = self._copy_scripts()
app_pkg_name = self._package_name('.pkg')
app_pkg = os.path.join(self.tmp, app_pkg_name)
- packagebuild.create_package(self.approot, self.package.identifier(),
- self.package.version, self.package.shortdesc, app_pkg,
- '/Applications', scripts_path=resources)
+ packagebuild.create_package(
+ self.approot,
+ self.package.identifier(),
+ self.package.version,
+ self.package.shortdesc,
+ app_pkg,
+ '/Applications',
+ scripts_path=resources,
+ )
self.package.packages = [(self.package.name, True, True)]
- m.action(_("Creating Distribution.xml for package %s " % self.package))
- distro = DistributionXML(self.package, self.store, self.tmp,
+ m.action(_('Creating Distribution.xml for package %s ' % self.package))
+ distro = DistributionXML(
+ self.package,
+ self.store,
+ self.tmp,
{self.package: app_pkg_name},
self.store.get_package_deps(self.package),
PackageType.RUNTIME,
- self.config.target_arch, home_folder=False)
+ self.config.target_arch,
+ home_folder=False,
+ )
distro_path = tempfile.NamedTemporaryFile().name
distro.write(distro_path)
output_file = os.path.join(self.output_dir, self._package_name('.pkg'))
output_file = os.path.abspath(output_file)
pb = ProductBuild()
- pb.create_package(distro_path, output_file,
- [self.package.relative_path('.'), self.tmp])
+ pb.create_package(distro_path, output_file, [self.package.relative_path('.'), self.tmp])
return output_file
def _create_dmg(self):
- dmg_file = os.path.join(self.output_dir, '%s-%s-%s.dmg' % (
- self.package.app_name, self.package.version, self.config.target_arch))
+ dmg_file = os.path.join(
+ self.output_dir, '%s-%s-%s.dmg' % (self.package.app_name, self.package.version, self.config.target_arch)
+ )
# Create Disk Image
cmd = ['hdiutil', 'create', dmg_file, '-volname', self.package.app_name, '-ov', '-srcfolder', self.approot]
shell.new_call(cmd)
@@ -470,7 +489,7 @@ class ApplicationPackage(PackagerBase):
class IOSPackage(ProductPackage, FrameworkHeadersMixin):
- '''
+ """
Creates an ios Framework package from a
L{cerbero.package.package.MetaPackage} using productbuild.
@@ -480,7 +499,7 @@ class IOSPackage(ProductPackage, FrameworkHeadersMixin):
listed in this package and the headers are copied unversionned to
the 'Headers' directory of the framework bundle.
The product package will only contain the ios-framework package
- '''
+ """
home_folder = True
user_resources = []
@@ -493,8 +512,7 @@ class IOSPackage(ProductPackage, FrameworkHeadersMixin):
self.fw_path = os.path.join(self.tmp, '%s.framework' % framework_name)
os.mkdir(self.fw_path)
- files = [os.path.join(self.config.prefix, x) for x in
- self.package.all_files_list()]
+ files = [os.path.join(self.config.prefix, x) for x in self.package.all_files_list()]
version_dir = os.path.join(self.fw_path, 'Versions', self.package.sdk_version)
libname = os.path.join(version_dir, framework_name)
@@ -502,8 +520,7 @@ class IOSPackage(ProductPackage, FrameworkHeadersMixin):
self._create_framework_bundle_layout(packager)
self._copy_templates(files)
self._copy_headers(files, version_dir)
- self._create_framework_headers(self.config.prefix,
- self.include_dirs, version_dir)
+ self._create_framework_headers(self.config.prefix, self.include_dirs, version_dir)
if os.path.exists(os.path.join(version_dir, 'include')):
shutil.rmtree(os.path.join(version_dir, 'include'))
if os.path.exists(os.path.join(version_dir, 'lib')):
@@ -517,24 +534,22 @@ class IOSPackage(ProductPackage, FrameworkHeadersMixin):
if isinstance(self.package, SDKPackage):
pkg_path = self._create_product(PackageType.DEVEL)
if self.package.user_resources:
- pkg_path = self._create_dmg (pkg_path,
- pkg_path.replace('.pkg', '.dmg'))
+ pkg_path = self._create_dmg(pkg_path, pkg_path.replace('.pkg', '.dmg'))
else:
- pkg_path = self._create_dmg (self.fw_path,
- os.path.join(output_dir, self._package_name('.dmg')))
+ pkg_path = self._create_dmg(self.fw_path, os.path.join(output_dir, self._package_name('.dmg')))
if not keep_temp:
shutil.rmtree(self.tmp)
return [pkg_path]
- def _copy_files (self, files, root):
+ def _copy_files(self, files, root):
for f in files:
out_path = f.replace(self.config.prefix, root)
out_dir = os.path.split(out_path)[0]
if not os.path.exists(out_dir):
os.makedirs(out_dir)
if os.path.isdir(f):
- shell.copy_dir (f, out_path)
+ shell.copy_dir(f, out_path)
else:
shutil.copy(f, out_path)
@@ -542,8 +557,7 @@ class IOSPackage(ProductPackage, FrameworkHeadersMixin):
templates_prefix = 'share/xcode/templates/ios'
templates = [x for x in files if templates_prefix in x]
for f in templates:
- out_path = f.replace(self.config.prefix,
- os.path.join(self.tmp, 'Templates'))
+ out_path = f.replace(self.config.prefix, os.path.join(self.tmp, 'Templates'))
out_path = out_path.replace(templates_prefix, '')
out_dir = os.path.split(out_path)[0]
if not os.path.exists(out_dir):
@@ -559,30 +573,45 @@ class IOSPackage(ProductPackage, FrameworkHeadersMixin):
include_dirs.append(os.path.join(self.config.prefix, 'include'))
for d in include_dirs:
include_files += [x for x in files if d in x]
- self._copy_files (include_files, version_dir)
+ self._copy_files(include_files, version_dir)
def _create_framework_bundle_packager(self):
- m.action(_("Creating framework package"))
- packager = FrameworkBundlePackager(self.package, 'ios-framework',
- 'GStreamer',
- 'GStreamer iOS Framework Bundle Version %s' % (self.package.version),
- '3ffe67c2-3421-411f-8287-e8faa892f853')
+ m.action(_('Creating framework package'))
+ packager = FrameworkBundlePackager(
+ self.package,
+ 'ios-framework',
+ 'GStreamer',
+ 'GStreamer iOS Framework Bundle Version %s' % (self.package.version),
+ '3ffe67c2-3421-411f-8287-e8faa892f853',
+ )
return packager
def _create_merged_lib(self, libname, files):
# Get the list of static libraries
static_files = [x for x in files if x.endswith('.a')]
- fwlib = StaticFrameworkLibrary(self.config.ios_min_version, self.config.target_distro,
- libname, libname, static_files, self.config.target_arch, env=self.config.env)
+ fwlib = StaticFrameworkLibrary(
+ self.config.ios_min_version,
+ self.config.target_distro,
+ libname,
+ libname,
+ static_files,
+ self.config.target_arch,
+ env=self.config.env,
+ )
fwlib.use_pkgconfig = False
if self.config.target_arch == Architecture.UNIVERSAL:
fwlib.universal_archs = self.config.universal_archs
fwlib.create()
def _package_name(self, suffix):
- return '%s-%s-%s-%s%s' % (self.package.name, self.package.version,
- self.config.target_platform, self.config.target_arch, suffix)
+ return '%s-%s-%s-%s%s' % (
+ self.package.name,
+ self.package.version,
+ self.config.target_platform,
+ self.config.target_arch,
+ suffix,
+ )
def _create_dmg(self, pkg_path, dmg_file):
# Create a new folder with the pkg and the user resources
@@ -591,7 +620,7 @@ class IOSPackage(ProductPackage, FrameworkHeadersMixin):
for r in self.package.user_resources:
r = os.path.join(self.config.prefix, r)
r_dir = os.path.split(r)[1]
- shell.copy_dir (r, os.path.join(dmg_dir, r_dir))
+ shell.copy_dir(r, os.path.join(dmg_dir, r_dir))
shutil.move(pkg_path, dmg_dir)
# Create Disk Image
@@ -599,13 +628,12 @@ class IOSPackage(ProductPackage, FrameworkHeadersMixin):
shell.new_call(cmd)
return dmg_file
-class Packager(object):
+class Packager(object):
def __new__(klass, config, package, store):
if config.target_platform == Platform.IOS:
if not isinstance(package, MetaPackage):
- raise FatalError ("iOS platform only support packages",
- "for MetaPackage")
+ raise FatalError('iOS platform only support packages', 'for MetaPackage')
return IOSPackage(config, package, store)
if isinstance(package, Package):
return OSXPackage(config, package, store)
@@ -618,5 +646,6 @@ class Packager(object):
def register():
from cerbero.packages.packager import register_packager
from cerbero.config import Distro
+
register_packager(Distro.OS_X, Packager)
register_packager(Distro.IOS, Packager)
diff --git a/cerbero/packages/package.py b/cerbero/packages/package.py
index 5452dbcf..49228fa7 100644
--- a/cerbero/packages/package.py
+++ b/cerbero/packages/package.py
@@ -26,7 +26,7 @@ from cerbero.utils import remove_list_duplicates, messages as m
class PackageBase(object):
- '''
+ """
Base class for packages with the common field to describe a package
@cvar name: name of the package
@@ -82,7 +82,8 @@ class PackageBase(object):
@type strip: list
@cvar strip_excludes: files that won't be stripped
@type strip_excludes: list
- '''
+ """
+
name = 'default'
shortdesc = 'default'
longdesc = 'default'
@@ -116,51 +117,49 @@ class PackageBase(object):
self.package_mode = PackageType.RUNTIME
def prepare(self):
- '''
+ """
Can be overrided by subclasses to modify conditionally the package
- '''
+ """
pass
def load_files(self):
pass
def package_dir(self):
- '''
+ """
Gets the directory path where this package is stored
@return: directory path
@rtype: str
- '''
+ """
return os.path.dirname(self.__file__)
def relative_path(self, path):
- '''
+ """
Gets a path relative to the package's directory
@return: absolute path relative to the pacakge's directory
@rtype: str
- '''
+ """
return os.path.abspath(os.path.join(self.package_dir(), path))
def files_list(self):
raise NotImplemented("'files_list' must be implemented by subclasses")
def devel_files_list(self):
- raise NotImplemented("'devel_files_list' must be implemented by "
- "subclasses")
+ raise NotImplemented("'devel_files_list' must be implemented by " 'subclasses')
def all_files_list(self):
- raise NotImplemented("'all_files_list' must be implemented by "
- "subclasses")
+ raise NotImplemented("'all_files_list' must be implemented by " 'subclasses')
def pre_package(self):
- '''
+ """
Subclasses can override to to perform actions before packaging
- '''
+ """
pass
def post_package(self, paths, output_dir):
- '''
+ """
Subclasses can override it to perform actions after packaging.
@param paths: list of paths for the files created during packaging
@@ -170,9 +169,9 @@ class PackageBase(object):
@type output_dir: str
@return: list of paths with created files
@rtype: list
- '''
+ """
if hasattr(self, 'post_install'):
- m.warning("Package.post_install is deprecated, use Package.post_package instead.")
+ m.warning('Package.post_install is deprecated, use Package.post_package instead.')
return self.post_install(paths)
return paths
@@ -229,7 +228,7 @@ class PackageBase(object):
class Package(PackageBase):
- '''
+ """
Describes a set of files to produce disctribution packages for the
different target platforms. It provides the first level of packaging
allowing to create modular installers by aggregating several of them.
@@ -256,7 +255,7 @@ class Package(PackageBase):
@type platform_files_Devel: dict
@cvar osx_framework_library: name and link for the Framework library
@type osx_framework_library: tuple
- '''
+ """
deps = list()
files = list()
@@ -270,10 +269,8 @@ class Package(PackageBase):
self.cookbook = cookbook
def load_files(self):
- self._files = self.files + \
- self.platform_files.get(self.config.target_platform, [])
- self._files_devel = self.files_devel + \
- self.platform_files_devel.get(self.config.target_platform, [])
+ self._files = self.files + self.platform_files.get(self.config.target_platform, [])
+ self._files_devel = self.files_devel + self.platform_files_devel.get(self.config.target_platform, [])
self._parse_files()
def recipes_dependencies(self, use_devel=True):
@@ -297,11 +294,9 @@ class Package(PackageBase):
if len(categories) == 0 or FilesProvider.LIBS_CAT in categories:
r = self.cookbook.get_recipe(recipe_name)
if recipe_name in licenses:
- licenses[recipe_name].update(
- r.list_licenses_by_categories(categories))
+ licenses[recipe_name].update(r.list_licenses_by_categories(categories))
else:
- licenses[recipe_name] = \
- r.list_licenses_by_categories(categories)
+ licenses[recipe_name] = r.list_licenses_by_categories(categories)
return licenses
def files_list(self):
@@ -368,7 +363,7 @@ class Package(PackageBase):
class MetaPackage(PackageBase):
- '''
+ """
Group of L{cerbero.packages.package.Package} used to build a a modular
installer package.
@@ -396,7 +391,7 @@ class MetaPackage(PackageBase):
@type resources_distribution: string
@cvar user_resources: folders included in the .dmg for iOS packages
@type user_resources: list
- '''
+ """
packages = []
root_env_var = 'CERBERO_SDK_ROOT'
@@ -446,8 +441,7 @@ class MetaPackage(PackageBase):
ret = attr[:]
platform_attr_name = 'platform_%s' % name
if hasattr(self, platform_attr_name):
- platform_attr = PackageBase.__getattribute__(self,
- platform_attr_name)
+ platform_attr = PackageBase.__getattribute__(self, platform_attr_name)
if self.config.target_platform in platform_attr:
platform_list = platform_attr[self.config.target_platform]
# Add to packages list, but do not duplicate
@@ -460,7 +454,7 @@ class MetaPackage(PackageBase):
class SDKPackage(MetaPackage):
- '''
+ """
Creates an installer for SDK's.
On Windows the installer will add a new enviroment variable set in
@@ -480,7 +474,7 @@ class SDKPackage(MetaPackage):
@cvar osx_framework_library: (name, path) of the lib used for the Framework
@type osx_framework_library: tuple
- '''
+ """
# Can be overriden by the package file, f.ex.
# packages/gstreamer-1.0/gstreamer-1.0.package
@@ -495,12 +489,12 @@ class SDKPackage(MetaPackage):
class InstallerPackage(MetaPackage):
- '''
+ """
Creates an installer for a target SDK to extend it.
@cvar windows_sdk_reg: name of the required SDK
@type windows_sdk_reg: str
- '''
+ """
windows_sdk_reg = None
@@ -509,7 +503,7 @@ class InstallerPackage(MetaPackage):
class App(Package):
- '''
+ """
Create packages for applications.
An App package will not include development files and binaries could
be stripped when required. The App packager will not create a development
@@ -542,7 +536,7 @@ class App(Package):
@type osx_create_dmg: bool
@cvar osx_create_pkg: Packages the app in a pkg
@type osx_create_pkg: bool
- '''
+ """
app_name = None
app_recipe = None
@@ -623,17 +617,18 @@ class App(Package):
if wrapper is not None:
wrapper_file = self.relative_path('%s_%s' % (platform, wrapper))
else:
- wrapper_file = os.path.join(self.config.data_dir, 'templates',
- '%s_%s' % (self.wrapper, platform))
+ wrapper_file = os.path.join(self.config.data_dir, 'templates', '%s_%s' % (self.wrapper, platform))
if not os.path.exists(wrapper_file):
return None
with open(wrapper_file, 'r') as f:
content = f.read()
- content = content % {'prefix': self.config.prefix,
- 'py_prefix': self.config.py_prefix,
- 'cmd': self.config.prefix}
+ content = content % {
+ 'prefix': self.config.prefix,
+ 'py_prefix': self.config.py_prefix,
+ 'cmd': self.config.prefix,
+ }
return content
@@ -643,8 +638,7 @@ class App(Package):
ret = attr[:]
platform_attr_name = 'platform_%s' % name
if hasattr(self, platform_attr_name):
- platform_attr = PackageBase.__getattribute__(self,
- platform_attr_name)
+ platform_attr = PackageBase.__getattribute__(self, platform_attr_name)
if self.config.target_platform in platform_attr:
platform_list = platform_attr[self.config.target_platform]
ret.extend(platform_list)
diff --git a/cerbero/packages/packager.py b/cerbero/packages/packager.py
index c49ccaca..b2675a32 100644
--- a/cerbero/packages/packager.py
+++ b/cerbero/packages/packager.py
@@ -18,8 +18,8 @@
from cerbero.config import Distro, Platform
from cerbero.errors import FatalError
-from cerbero.utils import _, get_wix_prefix
-from cerbero.utils import messages as m
+from cerbero.utils import _, get_wix_prefix
+from cerbero.utils import messages as m
_packagers = {}
@@ -31,25 +31,30 @@ def register_packager(distro, klass, distro_version=None):
_packagers[distro][distro_version] = klass
-class Packager (object):
-
+class Packager(object):
def __new__(klass, config, package, store):
d = config.target_distro
v = config.target_distro_version
if d not in _packagers:
- raise FatalError(_("No packager available for the distro %s" % d))
+ raise FatalError(_('No packager available for the distro %s' % d))
if v not in _packagers[d]:
# Be tolerant with the distro version
- m.warning(_("No specific packager available for the distro "
- "version %s, using generic packager for distro %s" % (v, d)))
+ m.warning(
+ _(
+ 'No specific packager available for the distro '
+ 'version %s, using generic packager for distro %s' % (v, d)
+ )
+ )
v = None
if d == Distro.DEBIAN:
- m.warning('Creation of Debian packages is currently broken, please see '
- 'https://gitlab.freedesktop.org/gstreamer/cerbero/issues/56\n'
- 'Creating tarballs instead...')
+ m.warning(
+ 'Creation of Debian packages is currently broken, please see '
+ 'https://gitlab.freedesktop.org/gstreamer/cerbero/issues/56\n'
+ 'Creating tarballs instead...'
+ )
d = Distro.NONE
v = None
@@ -57,7 +62,7 @@ class Packager (object):
try:
get_wix_prefix(config)
except:
- m.warning("Cross-compiling for Windows and WIX not found, overriding Packager")
+ m.warning('Cross-compiling for Windows and WIX not found, overriding Packager')
d = Distro.NONE
return _packagers[d][v](config, package, store)
diff --git a/cerbero/packages/packagesstore.py b/cerbero/packages/packagesstore.py
index d2002aa3..5546f7a8 100644
--- a/cerbero/packages/packagesstore.py
+++ b/cerbero/packages/packagesstore.py
@@ -21,18 +21,17 @@ import traceback
from collections import defaultdict
from cerbero.build.cookbook import CookBook
-from cerbero.config import Platform, Architecture, Distro, DistroVersion,\
- License
+from cerbero.config import Platform, Architecture, Distro, DistroVersion, License
from cerbero.packages import package, PackageType
from cerbero.errors import FatalError, PackageNotFoundError
from cerbero.utils import _, shell, remove_list_duplicates, parse_file, imp_load_source
from cerbero.utils import messages as m
-class PackagesStore (object):
- '''
+class PackagesStore(object):
+ """
Stores a list of L{cerbero.packages.package.Package}
- '''
+ """
PKG_EXT = '.package'
@@ -47,43 +46,42 @@ class PackagesStore (object):
return
if not os.path.exists(config.packages_dir):
- raise FatalError(_("Packages dir %s not found") %
- config.packages_dir)
+ raise FatalError(_('Packages dir %s not found') % config.packages_dir)
self._load_packages()
def get_packages_list(self):
- '''
+ """
Gets the list of packages
@return: list of packages
@rtype: list
- '''
+ """
packages = list(self._packages.values())
packages.sort(key=lambda x: x.name)
return packages
def get_package(self, name):
- '''
+ """
Gets a recipe from its name
@param name: name of the package
@type name: str
@return: the package instance
@rtype: L{cerbero.packages.package.Package}
- '''
+ """
if name not in self._packages:
raise PackageNotFoundError(name)
return self._packages[name]
def get_package_deps(self, pkg, recursive=False):
- '''
+ """
Gets the dependencies of a package
@param package: name of the package or package instance
@type package: L{cerbero.packages.package.Package}
@return: a list with the package dependencies
@rtype: list
- '''
+ """
if isinstance(pkg, str):
pkg = self.get_package(pkg)
if isinstance(pkg, package.MetaPackage):
@@ -97,14 +95,14 @@ class PackagesStore (object):
return remove_list_duplicates(ret)
def get_package_files_list(self, name):
- '''
+ """
Gets the list of files provided by a package
@param name: name of the package
@type name: str
@return: the package instance
@rtype: L{cerbero.packages.package.PackageBase}
- '''
+ """
p = self.get_package(name)
if isinstance(p, package.MetaPackage):
@@ -113,16 +111,15 @@ class PackagesStore (object):
return sorted(p.files_list())
def add_package(self, package):
- '''
+ """
Adds a new package to the store
@param package: the package to add
@type package: L{cerbero.packages.package.PackageBase}
- '''
+ """
self._packages[package.name] = package
def _list_metapackage_deps(self, metapackage):
-
def get_package_deps(package_name, visited=[], depslist=[]):
if package_name in visited:
return
@@ -150,8 +147,7 @@ class PackagesStore (object):
packages = defaultdict(dict)
repos = self._config.get_packages_repos()
for reponame, (repodir, priority) in repos.items():
- packages[int(priority)].update(
- self._load_packages_from_dir(repodir))
+ packages[int(priority)].update(self._load_packages_from_dir(repodir))
# Add recipes by asceding pripority
for key in sorted(packages.keys()):
self._packages.update(packages[key])
@@ -173,7 +169,7 @@ class PackagesStore (object):
for f in packages:
file_packages = self._load_packages_from_file(f, custom)
if file_packages is None:
- m.warning(_("Could not found a valid package in %s") % f)
+ m.warning(_('Could not found a valid package in %s') % f)
continue
for p in file_packages:
packages_dict[p.name] = p
@@ -181,13 +177,19 @@ class PackagesStore (object):
def _load_packages_from_file(self, filepath, custom=None):
packages = []
- d = {'Platform': Platform, 'Architecture': Architecture,
- 'Distro': Distro, 'DistroVersion': DistroVersion,
- 'License': License, 'package': package,
- 'PackageType': PackageType, 'custom': custom}
+ d = {
+ 'Platform': Platform,
+ 'Architecture': Architecture,
+ 'Distro': Distro,
+ 'DistroVersion': DistroVersion,
+ 'License': License,
+ 'package': package,
+ 'PackageType': PackageType,
+ 'custom': custom,
+ }
d_keys = set(list(d.keys()))
try:
- new_d = d.copy ()
+ new_d = d.copy()
parse_file(filepath, new_d)
# List new objects parsed added to the globals dict
diff_vals = [new_d[x] for x in set(new_d.keys()) - d_keys]
@@ -197,18 +199,18 @@ class PackagesStore (object):
if pkg is not None:
packages.append(pkg)
except Exception:
- m.warning("Error loading package from file %s" % filepath)
+ m.warning('Error loading package from file %s' % filepath)
traceback.print_exc()
return packages
def _load_package_from_file(self, package_cls, filepath, custom=None):
- if issubclass (package_cls, package.App):
+ if issubclass(package_cls, package.App):
p = package_cls(self._config, self, self.cookbook)
- elif issubclass (package_cls, package.SDKPackage):
+ elif issubclass(package_cls, package.SDKPackage):
p = package_cls(self._config, self)
- elif issubclass (package_cls, package.InstallerPackage):
+ elif issubclass(package_cls, package.InstallerPackage):
p = package_cls(self._config, self)
- elif issubclass (package_cls, package.Package):
+ elif issubclass(package_cls, package.Package):
p = package_cls(self._config, self, self.cookbook)
else:
raise Exception('Uknown package type %s' % package_cls)
@@ -222,11 +224,8 @@ class PackagesStore (object):
p.load_files()
return p
-
def _is_package_class(self, cls):
# The final check for 'builtins' is to make sure we only take in
# account classes defined in the package file and not the imported
# ones in the, for example base classes that inherit from PackageBase
- return isinstance(cls, type) and \
- issubclass (cls, package.PackageBase) and \
- cls.__module__ == 'builtins' \ No newline at end of file
+ return isinstance(cls, type) and issubclass(cls, package.PackageBase) and cls.__module__ == 'builtins'
diff --git a/cerbero/packages/rpm.py b/cerbero/packages/rpm.py
index 21955824..7f4a66fa 100644
--- a/cerbero/packages/rpm.py
+++ b/cerbero/packages/rpm.py
@@ -30,7 +30,7 @@ from cerbero.utils import shell, _
from functools import reduce
-SPEC_TPL = '''
+SPEC_TPL = """
%%define _topdir %(topdir)s
%%define _package_name %(package_name)s
%%undefine _debugsource_packages
@@ -88,10 +88,10 @@ rm -rf $RPM_BUILD_ROOT
%(files)s
%(devel_files)s
-'''
+"""
-DEVEL_PACKAGE_TPL = '''
+DEVEL_PACKAGE_TPL = """
%%package devel
%(requires)s
Summary: %(summary)s
@@ -99,9 +99,9 @@ Provides: %(p_prefix)s%(name)s-devel
%%description devel
%(description)s
-'''
+"""
-META_SPEC_TPL = '''
+META_SPEC_TPL = """
%%define _topdir %(topdir)s
%%define _package_name %(package_name)s
@@ -134,7 +134,7 @@ rm -rf $RPM_BUILD_ROOT
%%files
%(devel_files)s
-'''
+"""
REQUIRE_TPL = 'Requires: %s\n'
DEVEL_TPL = '%%files devel \n%s'
@@ -145,7 +145,6 @@ POSTUN_TPL = '%%postun\n%s\n'
class RPMPackager(LinuxPackager):
-
def __init__(self, config, package, store):
LinuxPackager.__init__(self, config, package, store)
@@ -155,8 +154,7 @@ class RPMPackager(LinuxPackager):
tmpdir = tempfile.mkdtemp(dir=self.config.home_dir)
for d in ['BUILD', 'SOURCES', 'RPMS', 'SRPMS', 'SPECS']:
os.mkdir(os.path.join(tmpdir, d))
- return (tmpdir, os.path.join(tmpdir, 'RPMS'),
- os.path.join(tmpdir, 'SOURCES'))
+ return (tmpdir, os.path.join(tmpdir, 'RPMS'), os.path.join(tmpdir, 'SOURCES'))
def setup_source(self, tarball, tmpdir, packagedir, srcdir):
# move the tarball to SOURCES
@@ -182,8 +180,7 @@ class RPMPackager(LinuxPackager):
if isinstance(self.package, MetaPackage):
template = META_SPEC_TPL
- requires = \
- self._get_meta_requires(PackageType.RUNTIME)
+ requires = self._get_meta_requires(PackageType.RUNTIME)
self.package.has_devel_package = True
else:
self.package.has_devel_package = bool(devel_files)
@@ -197,34 +194,31 @@ class RPMPackager(LinuxPackager):
scripts = ''
if os.path.exists(self.package.resources_postinstall):
- scripts += POST_TPL % \
- open(self.package.resources_postinstall).read()
+ scripts += POST_TPL % open(self.package.resources_postinstall).read()
if os.path.exists(self.package.resources_postremove):
- scripts += POSTUN_TPL % \
- open(self.package.resources_postremove).read()
+ scripts += POSTUN_TPL % open(self.package.resources_postremove).read()
self._spec_str = template % {
- 'name': self.package.name,
- 'p_prefix': self.package_prefix,
- 'version': self.package.version,
- 'package_name': self.full_package_name,
- 'summary': self.package.shortdesc,
- 'description': self.package.longdesc != 'default' and \
- self.package.longdesc or self.package.shortdesc,
- 'licenses': ' and '.join([l.acronym for l in licenses]),
- 'packager': self.packager,
- 'vendor': self.package.vendor,
- 'url': URL_TPL % self.package.url if \
- self.package.url != 'default' else '',
- 'requires': requires,
- 'prefix': self.install_dir,
- 'source': tarname,
- 'topdir': tmpdir,
- 'devel_package': devel_package,
- 'devel_files': devel_files,
- 'files': runtime_files,
- 'sources_dir': self.config.sources,
- 'scripts': scripts}
+ 'name': self.package.name,
+ 'p_prefix': self.package_prefix,
+ 'version': self.package.version,
+ 'package_name': self.full_package_name,
+ 'summary': self.package.shortdesc,
+ 'description': self.package.longdesc != 'default' and self.package.longdesc or self.package.shortdesc,
+ 'licenses': ' and '.join([l.acronym for l in licenses]),
+ 'packager': self.packager,
+ 'vendor': self.package.vendor,
+ 'url': URL_TPL % self.package.url if self.package.url != 'default' else '',
+ 'requires': requires,
+ 'prefix': self.install_dir,
+ 'source': tarname,
+ 'topdir': tmpdir,
+ 'devel_package': devel_package,
+ 'devel_files': devel_files,
+ 'files': runtime_files,
+ 'sources_dir': self.config.sources,
+ 'scripts': scripts,
+ }
self.spec_path = os.path.join(tmpdir, '%s.spec' % self.package.name)
with open(self.spec_path, 'w') as f:
@@ -236,15 +230,15 @@ class RPMPackager(LinuxPackager):
elif self.config.target_arch == Architecture.X86_64:
target = 'x86_64-redhat-linux'
else:
- raise FatalError(_('Architecture %s not supported') % \
- self.config.target_arch)
+ raise FatalError(_('Architecture %s not supported') % self.config.target_arch)
extra_options = ''
if self._rpmbuild_support_nodebuginfo():
extra_options = '--nodebuginfo'
- shell.new_call('rpmbuild -bb %s --buildroot %s/buildroot --target %s %s' % (
- extra_options, tmpdir, target, self.spec_path))
+ shell.new_call(
+ 'rpmbuild -bb %s --buildroot %s/buildroot --target %s %s' % (extra_options, tmpdir, target, self.spec_path)
+ )
paths = []
for d in os.listdir(packagedir):
@@ -260,12 +254,10 @@ class RPMPackager(LinuxPackager):
if not self.config.distro == Distro.REDHAT:
return False
- if ("fedora" in self.config.distro_version
- and self.config.distro_version > DistroVersion.FEDORA_26):
+ if 'fedora' in self.config.distro_version and self.config.distro_version > DistroVersion.FEDORA_26:
return True
- if ("redhat" in self.config.distro_version
- and self.config.distro_version > DistroVersion.REDHAT_7):
+ if 'redhat' in self.config.distro_version and self.config.distro_version > DistroVersion.REDHAT_7:
return True
return False
@@ -274,8 +266,7 @@ class RPMPackager(LinuxPackager):
devel_suffix = ''
if package_type == PackageType.DEVEL:
devel_suffix = '-devel'
- requires, recommends, suggests = \
- self.get_meta_requires(package_type, devel_suffix)
+ requires, recommends, suggests = self.get_meta_requires(package_type, devel_suffix)
requires = ''.join([REQUIRE_TPL % x for x in requires + recommends])
return requires
@@ -295,7 +286,7 @@ class RPMPackager(LinuxPackager):
files.append(f + 'c')
if f + 'o' not in files:
files.append(f + 'o')
- return '\n'.join([os.path.join('%{prefix}', x) for x in files])
+ return '\n'.join([os.path.join('%{prefix}', x) for x in files])
def _devel_package_and_files(self):
args = {}
@@ -315,7 +306,6 @@ class RPMPackager(LinuxPackager):
class Packager(object):
-
def __new__(klass, config, package, store):
return RPMPackager(config, package, store)
@@ -323,5 +313,6 @@ class Packager(object):
def register():
from cerbero.packages.packager import register_packager
from cerbero.config import Distro
+
register_packager(Distro.REDHAT, Packager)
register_packager(Distro.SUSE, Packager)
diff --git a/cerbero/packages/wix.py b/cerbero/packages/wix.py
index 92e5a79f..259f404f 100644
--- a/cerbero/packages/wix.py
+++ b/cerbero/packages/wix.py
@@ -26,11 +26,11 @@ from cerbero.config import Platform, Architecture
from cerbero.packages import PackageType
from cerbero.packages.package import Package, SDKPackage, App, InstallerPackage
-WIX_SCHEMA = "http://schemas.microsoft.com/wix/2006/wi"
+WIX_SCHEMA = 'http://schemas.microsoft.com/wix/2006/wi'
class VSTemplatePackage(Package):
- '''
+ """
A Package for Visual Studio templates
@cvar: vs_template_name: name of the template
@@ -39,7 +39,7 @@ class VSTemplatePackage(Package):
@type vs_template_dir: string
@cvar: vs_wizard_dir: directory of the wizard files
@type vs_wizard_dir: string
- '''
+ """
vs_template_dir = None
vs_wizard_dir = None
@@ -51,13 +51,11 @@ class VSTemplatePackage(Package):
def devel_files_list(self):
files = []
for f in [self.vs_template_dir, self.vs_wizard_dir]:
- files += shell.ls_dir(os.path.join(self.config.prefix, f),
- self.config.prefix)
+ files += shell.ls_dir(os.path.join(self.config.prefix, f), self.config.prefix)
return files
-class WixBase():
-
+class WixBase:
def __init__(self, config, package):
self.config = config
self.package = package
@@ -87,7 +85,7 @@ class WixBase():
return required and 'disallow' or 'allow'
def _add_root(self):
- self.root = etree.Element("Wix", xmlns=WIX_SCHEMA)
+ self.root = etree.Element('Wix', xmlns=WIX_SCHEMA)
def _format_id(self, string, replace_dots=False):
ret = string
@@ -120,8 +118,7 @@ class WixBase():
return ret
def _format_dir_id(self, string, path, replace_dots=False):
- ret = self._format_id(string, replace_dots) + '_' +\
- self._format_path_id(path, replace_dots)
+ ret = self._format_id(string, replace_dots) + '_' + self._format_path_id(path, replace_dots)
ret = self._make_unique_id(ret)
return ret
@@ -129,31 +126,33 @@ class WixBase():
return self._format_id(string, replace_dots) + '_group'
def _get_uuid(self):
- return "%s" % uuid.uuid1()
+ return '%s' % uuid.uuid1()
def _format_version(self, version):
# The heuristics to generate a valid version can get
# very complicated, so we leave it to the user
- url = "https://docs.microsoft.com/en-us/windows/desktop/Msi/productversion"
- versions = (version.split(".", 3) + ["0", "0", "0"])[:3]
+ url = 'https://docs.microsoft.com/en-us/windows/desktop/Msi/productversion'
+ versions = (version.split('.', 3) + ['0', '0', '0'])[:3]
for idx, val in enumerate(versions):
i = int(val)
if idx in [0, 1] and i > 255:
- raise FatalError("Invalid version string, major and minor"
- "must have a maximum value of 255.\nSee: {}".format(url))
+ raise FatalError(
+ 'Invalid version string, major and minor' 'must have a maximum value of 255.\nSee: {}'.format(url)
+ )
elif idx in [2] and i > 65535:
- raise FatalError("Invalid version string, build "
- "must have a maximum value of 65535.\nSee: {}".format(url))
- return ".".join(versions)
+ raise FatalError(
+ 'Invalid version string, build ' 'must have a maximum value of 65535.\nSee: {}'.format(url)
+ )
+ return '.'.join(versions)
class MergeModule(WixBase):
- '''
+ """
Creates WiX merge modules from cerbero packages
@ivar package: package with the info to build the merge package
@type pacakge: L{cerbero.packages.package.Package}
- '''
+ """
def __init__(self, config, files_list, package):
WixBase.__init__(self, config, package)
@@ -168,21 +167,26 @@ class MergeModule(WixBase):
self._add_files()
def _add_module(self):
- self.module = etree.SubElement(self.root, "Module",
- Id=self._format_id(self.package.name),
- Version=self._format_version(self.package.version),
- Language='1033')
+ self.module = etree.SubElement(
+ self.root,
+ 'Module',
+ Id=self._format_id(self.package.name),
+ Version=self._format_version(self.package.version),
+ Language='1033',
+ )
def _add_package(self):
- self.pkg = etree.SubElement(self.module, "Package",
- Id=self.package.uuid or self._get_uuid(),
- Description=self.package.shortdesc,
- Comments=self.package.longdesc,
- Manufacturer=self.package.vendor)
+ self.pkg = etree.SubElement(
+ self.module,
+ 'Package',
+ Id=self.package.uuid or self._get_uuid(),
+ Description=self.package.shortdesc,
+ Comments=self.package.longdesc,
+ Manufacturer=self.package.vendor,
+ )
def _add_root_dir(self):
- self.rdir = etree.SubElement(self.module, "Directory",
- Id='TARGETDIR', Name='SourceDir')
+ self.rdir = etree.SubElement(self.module, 'Directory', Id='TARGETDIR', Name='SourceDir')
self._dirnodes[''] = self.rdir
def _add_files(self):
@@ -200,9 +204,9 @@ class MergeModule(WixBase):
self._add_directory(parentpath)
parent = self._dirnodes[parentpath]
- dirnode = etree.SubElement(parent, "Directory",
- Id=self._format_path_id(dirpath),
- Name=os.path.split(dirpath)[1])
+ dirnode = etree.SubElement(
+ parent, 'Directory', Id=self._format_path_id(dirpath), Name=os.path.split(dirpath)[1]
+ )
self._dirnodes[dirpath] = dirnode
def _add_file(self, filepath):
@@ -210,24 +214,22 @@ class MergeModule(WixBase):
self._add_directory(dirpath)
dirnode = self._dirnodes[dirpath]
- component = etree.SubElement(dirnode, 'Component',
- Id=self._format_path_id(filepath), Guid=self._get_uuid())
+ component = etree.SubElement(dirnode, 'Component', Id=self._format_path_id(filepath), Guid=self._get_uuid())
filepath = os.path.join(self.prefix, filepath)
p_id = self._format_path_id(filepath, True)
if self._with_wine:
filepath = to_winepath(filepath)
- etree.SubElement(component, 'File', Id=p_id, Name=filename,
- Source=filepath)
+ etree.SubElement(component, 'File', Id=p_id, Name=filename, Source=filepath)
class Fragment(WixBase):
- '''
+ """
Creates WiX fragment from cerbero packages
@ivar package: package with the info to build the merge package
@type pacakge: L{cerbero.packages.package.Package}
- '''
+ """
def __init__(self, config, files_list, package):
WixBase.__init__(self, config, package)
@@ -243,15 +245,15 @@ class Fragment(WixBase):
self._add_files()
def _add_fragment(self):
- self.fragment = etree.SubElement(self.root, "Fragment")
+ self.fragment = etree.SubElement(self.root, 'Fragment')
def _add_component_group(self):
- self.component_group = etree.SubElement(self.fragment, "ComponentGroup",
- Id=self._format_group_id(self.package.name))
+ self.component_group = etree.SubElement(
+ self.fragment, 'ComponentGroup', Id=self._format_group_id(self.package.name)
+ )
def _add_root_dir(self):
- self.rdir = etree.SubElement(self.fragment, "DirectoryRef",
- Id='SDKROOTDIR')
+ self.rdir = etree.SubElement(self.fragment, 'DirectoryRef', Id='SDKROOTDIR')
self._dirnodes[''] = self.rdir
def _add_files(self):
@@ -271,9 +273,7 @@ class Fragment(WixBase):
parent = self._dirnodes[parentpath]
dirid = self._format_dir_id(self.package.name, dirpath)
- dirnode = etree.SubElement(parent, "Directory",
- Id=dirid,
- Name=os.path.split(dirpath)[1])
+ dirnode = etree.SubElement(parent, 'Directory', Id=dirid, Name=os.path.split(dirpath)[1])
self._dirnodes[dirpath] = dirnode
self._dirids[dirpath] = dirid
@@ -282,24 +282,27 @@ class Fragment(WixBase):
self._add_directory(dirpath)
dirid = self._dirids[dirpath]
- component = etree.SubElement(self.component_group, 'Component',
- Id=self._format_dir_id(self.package.name, filepath),
- Guid=self._get_uuid(), Directory=dirid)
+ component = etree.SubElement(
+ self.component_group,
+ 'Component',
+ Id=self._format_dir_id(self.package.name, filepath),
+ Guid=self._get_uuid(),
+ Directory=dirid,
+ )
filepath = os.path.join(self.prefix, filepath)
p_id = self._format_dir_id(self.package.name, filepath, True)
if self._with_wine:
filepath = to_winepath(filepath)
- etree.SubElement(component, 'File', Id=p_id, Name=filename,
- Source=filepath)
+ etree.SubElement(component, 'File', Id=p_id, Name=filename, Source=filepath)
class VSMergeModule(MergeModule):
- '''
+ """
Creates a Merge Module for Visual Studio templates
@ivar package: package with the info to build the merge package
@type pacakge: L{cerbero.packages.package.Package}
- '''
+ """
def __init__(self, config, files_list, package):
MergeModule.__init__(self, config, files_list, package)
@@ -309,35 +312,34 @@ class VSMergeModule(MergeModule):
self._add_vs_templates()
def _add_vs_templates(self):
- etree.SubElement(self.module, 'PropertyRef',
- Id='VS_PROJECTTEMPLATES_DIR')
- etree.SubElement(self.module, 'PropertyRef',
- Id='VS_WIZARDS_DIR')
- etree.SubElement(self.module, 'CustomActionRef',
- Id='VS2010InstallVSTemplates')
- etree.SubElement(self.module, 'CustomActionRef',
- Id='VC2010InstallVSTemplates')
- prop = etree.SubElement(self.module, 'SetProperty',
- Id="VSPROJECTTEMPLATESDIR", After="AppSearch",
- Value="[VS_PROJECTTEMPLATES_DIR]\\%s" %
- self.package.vs_template_name or "")
- prop.text = "VS_PROJECTTEMPLATES_DIR"
- prop = etree.SubElement(self.module, 'SetProperty',
- Id="VSWIZARDSDIR", After="AppSearch",
- Value="[VS_WIZARDS_DIR]\\%s" %
- os.path.split(self.package.vs_template_dir)[1])
- prop.text = "VS_WIZARDS_DIR"
-
- self._wizard_dir = etree.SubElement(self.rdir, 'Directory',
- Id='VSPROJECTTEMPLATESDIR')
- self._tpl_dir = etree.SubElement(self.rdir, 'Directory',
- Id='VSWIZARDSDIR')
+ etree.SubElement(self.module, 'PropertyRef', Id='VS_PROJECTTEMPLATES_DIR')
+ etree.SubElement(self.module, 'PropertyRef', Id='VS_WIZARDS_DIR')
+ etree.SubElement(self.module, 'CustomActionRef', Id='VS2010InstallVSTemplates')
+ etree.SubElement(self.module, 'CustomActionRef', Id='VC2010InstallVSTemplates')
+ prop = etree.SubElement(
+ self.module,
+ 'SetProperty',
+ Id='VSPROJECTTEMPLATESDIR',
+ After='AppSearch',
+ Value='[VS_PROJECTTEMPLATES_DIR]\\%s' % self.package.vs_template_name or '',
+ )
+ prop.text = 'VS_PROJECTTEMPLATES_DIR'
+ prop = etree.SubElement(
+ self.module,
+ 'SetProperty',
+ Id='VSWIZARDSDIR',
+ After='AppSearch',
+ Value='[VS_WIZARDS_DIR]\\%s' % os.path.split(self.package.vs_template_dir)[1],
+ )
+ prop.text = 'VS_WIZARDS_DIR'
+
+ self._wizard_dir = etree.SubElement(self.rdir, 'Directory', Id='VSPROJECTTEMPLATESDIR')
+ self._tpl_dir = etree.SubElement(self.rdir, 'Directory', Id='VSWIZARDSDIR')
self._dirnodes[self.package.vs_template_dir] = self._tpl_dir
self._dirnodes[self.package.vs_wizard_dir] = self._wizard_dir
class WixConfig(WixBase):
-
wix_config = 'wix/Config.wxi'
def __init__(self, config, package):
@@ -350,23 +352,21 @@ class WixConfig(WixBase):
self.ui_type = 'WixUI_Mondo'
def write(self, output_dir):
- config_out_path = os.path.join(output_dir,
- os.path.basename(self.wix_config))
- shutil.copy(self.config_path, os.path.join(output_dir,
- os.path.basename(self.wix_config)))
+ config_out_path = os.path.join(output_dir, os.path.basename(self.wix_config))
+ shutil.copy(self.config_path, os.path.join(output_dir, os.path.basename(self.wix_config)))
replacements = {
- "@ProductID@": '*',
- "@UpgradeCode@": self.package.get_wix_upgrade_code(),
- "@Language@": '1033',
- "@Manufacturer@": self.package.vendor,
- "@Version@": self._format_version(self.package.version),
- "@PackageComments@": self.package.longdesc,
- "@Description@": self.package.shortdesc,
- "@ProjectURL": self.package.url,
- "@ProductName@": self._product_name(),
- "@ProgramFilesFolder@": self._program_folder(),
- "@Platform@": self._platform(),
- "@UIType@": self.ui_type
+ '@ProductID@': '*',
+ '@UpgradeCode@': self.package.get_wix_upgrade_code(),
+ '@Language@': '1033',
+ '@Manufacturer@': self.package.vendor,
+ '@Version@': self._format_version(self.package.version),
+ '@PackageComments@': self.package.longdesc,
+ '@Description@': self.package.shortdesc,
+ '@ProjectURL': self.package.url,
+ '@ProductName@': self._product_name(),
+ '@ProgramFilesFolder@': self._program_folder(),
+ '@Platform@': self._platform(),
+ '@UIType@': self.ui_type,
}
shell.replace(config_out_path, replacements)
return config_out_path
@@ -386,12 +386,12 @@ class WixConfig(WixBase):
class MSI(WixBase):
- '''Creates an installer package from a
+ """Creates an installer package from a
L{cerbero.packages.package.MetaPackage}
@ivar package: meta package used to create the installer package
@type package: L{cerbero.packages.package.MetaPackage}
- '''
+ """
wix_sources = 'wix/installer.wxs'
REG_ROOT = 'HKLM'
@@ -408,16 +408,15 @@ class MSI(WixBase):
self._parse_sources()
self._add_include()
self._customize_ui()
- self.product = self.root.find(".//Product")
+ self.product = self.root.find('.//Product')
self._add_vs_properties()
def _parse_sources(self):
- sources_path = self.package.resources_wix_installer or \
- os.path.join(self.config.data_dir, self.wix_sources)
+ sources_path = self.package.resources_wix_installer or os.path.join(self.config.data_dir, self.wix_sources)
with open(sources_path, 'r') as f:
self.root = etree.fromstring(f.read())
for element in self.root.iter():
- element.tag = element.tag[len(WIX_SCHEMA) + 2:]
+ element.tag = element.tag[len(WIX_SCHEMA) + 2 :]
self.root.set('xmlns', WIX_SCHEMA)
self.product = self.root.find('Product')
@@ -442,33 +441,47 @@ class MSI(WixBase):
self._add_get_install_dir_from_registry()
def _add_application_merge_module(self):
- self.main_feature = etree.SubElement(self.product, "Feature",
- Id=self._format_id(self.package.name + '_app'),
- Title=self.package.title, Level='1', Display="expand",
- AllowAdvertise="no", ConfigurableDirectory="INSTALLDIR")
+ self.main_feature = etree.SubElement(
+ self.product,
+ 'Feature',
+ Id=self._format_id(self.package.name + '_app'),
+ Title=self.package.title,
+ Level='1',
+ Display='expand',
+ AllowAdvertise='no',
+ ConfigurableDirectory='INSTALLDIR',
+ )
if self.package.wix_use_fragment:
- etree.SubElement(self.main_feature, 'ComponentGroupRef',
- Id=self._format_group_id(self.package.name))
+ etree.SubElement(self.main_feature, 'ComponentGroupRef', Id=self._format_group_id(self.package.name))
else:
self._add_merge_module(self.package, True, True, [])
- etree.SubElement(self.installdir, 'Merge',
- Id=self._package_id(self.package.name), Language='1033',
- SourceFile=self.packages_deps[self.package], DiskId='1')
+ etree.SubElement(
+ self.installdir,
+ 'Merge',
+ Id=self._package_id(self.package.name),
+ Language='1033',
+ SourceFile=self.packages_deps[self.package],
+ DiskId='1',
+ )
def _add_merge_modules(self):
- self.main_feature = etree.SubElement(self.product, "Feature",
- Id=self._format_id(self.package.name),
- Title=self.package.title, Level='1', Display="expand",
- AllowAdvertise="no", ConfigurableDirectory="INSTALLDIR")
-
- packages = [(self.store.get_package(x[0]), x[1], x[2]) for x in
- self.package.packages]
+ self.main_feature = etree.SubElement(
+ self.product,
+ 'Feature',
+ Id=self._format_id(self.package.name),
+ Title=self.package.title,
+ Level='1',
+ Display='expand',
+ AllowAdvertise='no',
+ ConfigurableDirectory='INSTALLDIR',
+ )
+
+ packages = [(self.store.get_package(x[0]), x[1], x[2]) for x in self.package.packages]
# Remove empty packages
packages = [x for x in packages if x[0] in list(self.packages_deps.keys())]
if len(packages) == 0:
- raise FatalError("All packages are empty: %s" %
- [x[0] for x in self.package.packages])
+ raise FatalError('All packages are empty: %s' % [x[0] for x in self.package.packages])
# Fill the list of required packages, which are the ones installed by
# a package that is always installed
@@ -480,38 +493,37 @@ class MSI(WixBase):
if not self.package.wix_use_fragment:
for package, required, selected in packages:
if package in self.packages_deps:
- self._add_merge_module(package, required, selected,
- required_packages)
+ self._add_merge_module(package, required, selected, required_packages)
# Add a merge module ref for all the packages or use ComponentGroupRef when using
# wix_use_fragment
for package, path in self.packages_deps.items():
if self.package.wix_use_fragment:
- etree.SubElement(self.main_feature, 'ComponentGroupRef',
- Id=self._format_group_id(package.name))
+ etree.SubElement(self.main_feature, 'ComponentGroupRef', Id=self._format_group_id(package.name))
else:
- etree.SubElement(self.installdir, 'Merge',
- Id=self._package_id(package.name), Language='1033',
- SourceFile=path, DiskId='1')
+ etree.SubElement(
+ self.installdir,
+ 'Merge',
+ Id=self._package_id(package.name),
+ Language='1033',
+ SourceFile=path,
+ DiskId='1',
+ )
def _add_dir(self, parent, dir_id, name):
- tdir = etree.SubElement(parent, "Directory",
- Id=dir_id, Name=name)
+ tdir = etree.SubElement(parent, 'Directory', Id=dir_id, Name=name)
return tdir
def _add_install_dir(self):
self.target_dir = self._add_dir(self.product, 'TARGETDIR', 'SourceDir')
# FIXME: Add a way to install to ProgramFilesFolder
if isinstance(self.package, App):
- installdir = self._add_dir(self.target_dir,
- '$(var.PlatformProgramFilesFolder)', 'ProgramFilesFolder')
- self.installdir = self._add_dir(installdir, 'INSTALLDIR',
- '$(var.ProductName)')
+ installdir = self._add_dir(self.target_dir, '$(var.PlatformProgramFilesFolder)', 'ProgramFilesFolder')
+ self.installdir = self._add_dir(installdir, 'INSTALLDIR', '$(var.ProductName)')
self.bindir = self._add_dir(self.installdir, 'INSTALLBINDIR', 'bin')
else:
- installdir = self._add_dir(self.target_dir, 'INSTALLDIR',
- self.package.get_install_dir())
- versiondir = self._add_dir(installdir, "Version", self.package.sdk_version)
+ installdir = self._add_dir(self.target_dir, 'INSTALLDIR', self.package.get_install_dir())
+ versiondir = self._add_dir(installdir, 'Version', self.package.sdk_version)
# archdir has to be toolchain-specific: mingw_x86_64, uwp-debug_arm64, etc
platform_arch = '_'.join(self.config._get_toolchain_target_platform_arch())
archdir = self._add_dir(versiondir, 'Architecture', platform_arch)
@@ -532,34 +544,40 @@ class MSI(WixBase):
def _customize_ui(self):
# Banner Dialog and License
- for path, var in [(self.BANNER_BMP, 'BannerBmp'),
- (self.DIALOG_BMP, 'DialogBmp'),
- (self.LICENSE_RTF, 'LicenseRtf')]:
+ for path, var in [
+ (self.BANNER_BMP, 'BannerBmp'),
+ (self.DIALOG_BMP, 'DialogBmp'),
+ (self.LICENSE_RTF, 'LicenseRtf'),
+ ]:
path = self.package.relative_path(path)
if self._with_wine:
path = to_winepath(path)
if os.path.exists(path):
- etree.SubElement(self.product, 'WixVariable',
- Id='WixUI%s' % var, Value=path)
+ etree.SubElement(self.product, 'WixVariable', Id='WixUI%s' % var, Value=path)
# Icon
path = self.package.relative_path(self.ICON)
if self._with_wine:
path = to_winepath(path)
if os.path.exists(path):
- etree.SubElement(self.product, 'Icon',
- Id='MainIcon', SourceFile=path)
+ etree.SubElement(self.product, 'Icon', Id='MainIcon', SourceFile=path)
def _add_sdk_root_env_variable(self):
- envcomponent = etree.SubElement(self.installdir, 'Component',
- Id='EnvironmentVariables', Guid=self._get_uuid())
+ envcomponent = etree.SubElement(self.installdir, 'Component', Id='EnvironmentVariables', Guid=self._get_uuid())
# archdir has to be toolchain-specific: mingw_x86_64, uwp-debug_arm64, etc
platform_arch = '_'.join(self.config._get_toolchain_target_platform_arch())
root_env_var = self.package.get_root_env_var(platform_arch)
- env = etree.SubElement(envcomponent, 'Environment', Id="SdkRootEnv",
- Action="set", Part="all", Name=root_env_var, System="yes",
- Permanent="no", Value='[SDKROOTDIR]')
- etree.SubElement(self.main_feature, 'ComponentRef',
- Id='EnvironmentVariables')
+ env = etree.SubElement(
+ envcomponent,
+ 'Environment',
+ Id='SdkRootEnv',
+ Action='set',
+ Part='all',
+ Name=root_env_var,
+ System='yes',
+ Permanent='no',
+ Value='[SDKROOTDIR]',
+ )
+ etree.SubElement(self.main_feature, 'ComponentRef', Id='EnvironmentVariables')
def _add_registry_install_dir(self):
# Get the package name. Both devel and runtime will share the same
@@ -568,27 +586,43 @@ class MSI(WixBase):
# Add INSTALLDIR in the registry only for the runtime package
if self.package.package_mode == PackageType.RUNTIME:
- regcomponent = etree.SubElement(self.installdir, 'Component',
- Id='RegistryInstallDir', Guid=self._get_uuid())
- regkey = etree.SubElement(regcomponent, 'RegistryKey',
- Id='RegistryInstallDirRoot',
- ForceCreateOnInstall='yes',
- ForceDeleteOnUninstall='yes',
- Key=self._registry_key(name),
- Root=self.REG_ROOT)
- etree.SubElement(regkey, 'RegistryValue',
- Id='RegistryInstallDirValue',
- Type='string', Name='InstallDir', Value='[INSTALLDIR]')
- etree.SubElement(regkey, 'RegistryValue',
- Id='RegistryVersionValue',
- Type='string', Name='Version',
- Value=self.package.version)
- etree.SubElement(regkey, 'RegistryValue',
- Id='RegistrySDKVersionValue',
- Type='string', Name='SdkVersion',
- Value=self.package.sdk_version)
- etree.SubElement(self.main_feature, 'ComponentRef',
- Id='RegistryInstallDir')
+ regcomponent = etree.SubElement(
+ self.installdir, 'Component', Id='RegistryInstallDir', Guid=self._get_uuid()
+ )
+ regkey = etree.SubElement(
+ regcomponent,
+ 'RegistryKey',
+ Id='RegistryInstallDirRoot',
+ ForceCreateOnInstall='yes',
+ ForceDeleteOnUninstall='yes',
+ Key=self._registry_key(name),
+ Root=self.REG_ROOT,
+ )
+ etree.SubElement(
+ regkey,
+ 'RegistryValue',
+ Id='RegistryInstallDirValue',
+ Type='string',
+ Name='InstallDir',
+ Value='[INSTALLDIR]',
+ )
+ etree.SubElement(
+ regkey,
+ 'RegistryValue',
+ Id='RegistryVersionValue',
+ Type='string',
+ Name='Version',
+ Value=self.package.version,
+ )
+ etree.SubElement(
+ regkey,
+ 'RegistryValue',
+ Id='RegistrySDKVersionValue',
+ Type='string',
+ Name='SdkVersion',
+ Value=self.package.sdk_version,
+ )
+ etree.SubElement(self.main_feature, 'ComponentRef', Id='RegistryInstallDir')
def _add_get_install_dir_from_registry(self):
name = self._package_var().replace(' ', '')
@@ -598,18 +632,22 @@ class MSI(WixBase):
key = self._registry_key(name)
# Get INSTALLDIR from the registry key
- installdir_prop = etree.SubElement(self.product, 'Property',
- Id='INSTALLDIR')
- etree.SubElement(installdir_prop, 'RegistrySearch', Id=name,
- Type="raw", Root=self.REG_ROOT, Key=key, Name='InstallDir')
+ installdir_prop = etree.SubElement(self.product, 'Property', Id='INSTALLDIR')
+ etree.SubElement(
+ installdir_prop, 'RegistrySearch', Id=name, Type='raw', Root=self.REG_ROOT, Key=key, Name='InstallDir'
+ )
- def _add_merge_module(self, package, required, selected,
- required_packages):
+ def _add_merge_module(self, package, required, selected, required_packages):
# Create a new feature for this package
- feature = etree.SubElement(self.main_feature, 'Feature',
- Id=self._format_id(package.name), Title=package.shortdesc,
- Level=self._format_level(selected),
- Display='expand', Absent=self._format_absent(required))
+ feature = etree.SubElement(
+ self.main_feature,
+ 'Feature',
+ Id=self._format_id(package.name),
+ Title=package.shortdesc,
+ Level=self._format_level(selected),
+ Display='expand',
+ Absent=self._format_absent(required),
+ )
deps = self.store.get_package_deps(package, True)
# Add all the merge modules required by this package, but excluding
@@ -623,39 +661,43 @@ class MSI(WixBase):
mergerefs = [x for x in mergerefs if x in list(self.packages_deps.keys())]
for p in mergerefs:
- etree.SubElement(feature, "MergeRef",
- Id=self._package_id(p.name))
- etree.SubElement(feature, "MergeRef",
- Id=self._package_id(package.name))
+ etree.SubElement(feature, 'MergeRef', Id=self._package_id(p.name))
+ etree.SubElement(feature, 'MergeRef', Id=self._package_id(package.name))
if isinstance(package, VSTemplatePackage):
- c = etree.SubElement(feature, "Condition", Level="0")
- c.text = "NOT VS2010DEVENV AND NOT VC2010EXPRESS_IDE"
+ c = etree.SubElement(feature, 'Condition', Level='0')
+ c.text = 'NOT VS2010DEVENV AND NOT VC2010EXPRESS_IDE'
def _add_start_menu_shortcuts(self):
# Create a folder with the application name in the Start Menu folder
- programs = etree.SubElement(self.target_dir, 'Directory',
- Id='ProgramMenuFolder')
- etree.SubElement(programs, 'Directory', Id='ApplicationProgramsFolder',
- Name='$(var.ProductName)')
+ programs = etree.SubElement(self.target_dir, 'Directory', Id='ProgramMenuFolder')
+ etree.SubElement(programs, 'Directory', Id='ApplicationProgramsFolder', Name='$(var.ProductName)')
# Add the shortcut to the installer package
- appf = etree.SubElement(self.product, 'DirectoryRef',
- Id='ApplicationProgramsFolder')
- apps = etree.SubElement(appf, 'Component', Id='ApplicationShortcut',
- Guid=self._get_uuid())
+ appf = etree.SubElement(self.product, 'DirectoryRef', Id='ApplicationProgramsFolder')
+ apps = etree.SubElement(appf, 'Component', Id='ApplicationShortcut', Guid=self._get_uuid())
for desc, path, _, _ in self.package.commands[self.config.target_platform]:
- etree.SubElement(apps, 'Shortcut',
- Id='ApplicationStartMenuShortcut', Name=desc,
- Description=desc, Target='[INSTALLBINDIR]' + path,
- WorkingDirectory='INSTALLBINDIR',
- Icon='MainIcon')
- etree.SubElement(apps, 'RemoveFolder', Id='ApplicationProgramsFolder',
- On='uninstall')
- etree.SubElement(apps, 'RegistryValue', Root='HKCU',
- Key=r'Software\Microsoft\%s' % self.package.name,
- Name='installed', Type='integer', Value='1', KeyPath='yes')
+ etree.SubElement(
+ apps,
+ 'Shortcut',
+ Id='ApplicationStartMenuShortcut',
+ Name=desc,
+ Description=desc,
+ Target='[INSTALLBINDIR]' + path,
+ WorkingDirectory='INSTALLBINDIR',
+ Icon='MainIcon',
+ )
+ etree.SubElement(apps, 'RemoveFolder', Id='ApplicationProgramsFolder', On='uninstall')
+ etree.SubElement(
+ apps,
+ 'RegistryValue',
+ Root='HKCU',
+ Key=r'Software\Microsoft\%s' % self.package.name,
+ Name='installed',
+ Type='integer',
+ Value='1',
+ KeyPath='yes',
+ )
# Ref it in the main feature
- etree.SubElement(self.main_feature, 'ComponentRef',
- Id='ApplicationShortcut')
+ etree.SubElement(self.main_feature, 'ComponentRef', Id='ApplicationShortcut')
def _add_vs_properties(self):
etree.SubElement(self.product, 'PropertyRef', Id='VS2010DEVENV')
diff --git a/cerbero/packages/wix_packager.py b/cerbero/packages/wix_packager.py
index 2522081f..0f262c94 100644
--- a/cerbero/packages/wix_packager.py
+++ b/cerbero/packages/wix_packager.py
@@ -33,7 +33,6 @@ from cerbero.config import Platform
class MergeModulePackager(PackagerBase):
-
def __init__(self, config, package, store):
PackagerBase.__init__(self, config, package, store)
self._with_wine = config.platform != Platform.WINDOWS
@@ -45,19 +44,16 @@ class MergeModulePackager(PackagerBase):
paths = []
# create runtime package
- p = self.create_merge_module(output_dir, PackageType.RUNTIME, force,
- self.package.version, keep_temp)
+ p = self.create_merge_module(output_dir, PackageType.RUNTIME, force, self.package.version, keep_temp)
paths.append(p)
if devel:
- p = self.create_merge_module(output_dir, PackageType.DEVEL, force,
- self.package.version, keep_temp)
+ p = self.create_merge_module(output_dir, PackageType.DEVEL, force, self.package.version, keep_temp)
paths.append(p)
return paths
- def create_merge_module(self, output_dir, package_type, force, version,
- keep_temp, keep_strip_temp_dir=False):
+ def create_merge_module(self, output_dir, package_type, force, version, keep_temp, keep_strip_temp_dir=False):
self.package.set_mode(package_type)
files_list = self.files_list(package_type, force)
if isinstance(self.package, VSTemplatePackage):
@@ -82,21 +78,20 @@ class MergeModulePackager(PackagerBase):
package_name = self._package_name(version)
if self.package.wix_use_fragment:
mergemodule = Fragment(self.config, files_list, self.package)
- sources = [os.path.join(output_dir, "%s-fragment.wxs" % package_name)]
- wixobjs = [os.path.join(output_dir, "%s-fragment.wixobj" % package_name)]
+ sources = [os.path.join(output_dir, '%s-fragment.wxs' % package_name)]
+ wixobjs = [os.path.join(output_dir, '%s-fragment.wixobj' % package_name)]
else:
mergemodule = MergeModule(self.config, files_list, self.package)
- sources = [os.path.join(output_dir, "%s.wxs" % package_name)]
- wixobjs = [os.path.join(output_dir, "%s.wixobj" % package_name)]
+ sources = [os.path.join(output_dir, '%s.wxs' % package_name)]
+ wixobjs = [os.path.join(output_dir, '%s.wixobj' % package_name)]
if tmpdir:
mergemodule.prefix = tmpdir
mergemodule.write(sources[0])
for x in ['utils']:
- wixobjs.append(os.path.join(output_dir, "%s.wixobj" % x))
- sources.append(os.path.join(os.path.abspath(self.config.data_dir),
- 'wix/%s.wxs' % x))
+ wixobjs.append(os.path.join(output_dir, '%s.wixobj' % x))
+ sources.append(os.path.join(os.path.abspath(self.config.data_dir), 'wix/%s.wxs' % x))
if self._with_wine:
final_wixobjs = ['"{}"'.format(to_winepath(x)) for x in wixobjs]
final_sources = ['"{}"'.format(to_winepath(x)) for x in sources]
@@ -140,12 +135,10 @@ class MergeModulePackager(PackagerBase):
platform = 'mingw'
if self.config.variants.visualstudio and self.config.variants.vscrt == 'mdd':
platform += '+debug'
- return "%s-%s-%s-%s" % (self.package.name, platform,
- self.config.target_arch, version)
+ return '%s-%s-%s-%s' % (self.package.name, platform, self.config.target_arch, version)
class MSIPackager(PackagerBase):
-
UI_EXT = '-ext WixUIExtension'
UTIL_EXT = '-ext WixUtilExtension'
@@ -176,8 +169,7 @@ class MSIPackager(PackagerBase):
# create zip with merge modules
if not self.package.wix_use_fragment:
self.package.set_mode(PackageType.RUNTIME)
- zipf = ZipFile(os.path.join(self.output_dir, '%s-merge-modules.zip' %
- self._package_name()), 'w')
+ zipf = ZipFile(os.path.join(self.output_dir, '%s-merge-modules.zip' % self._package_name()), 'w')
for p in self.merge_modules[PackageType.RUNTIME]:
zipf.write(p)
zipf.close()
@@ -198,8 +190,7 @@ class MSIPackager(PackagerBase):
platform = 'mingw'
if self.config.variants.visualstudio and self.config.variants.vscrt == 'mdd':
platform += '+debug'
- return "%s-%s-%s-%s" % (self.package.name, platform,
- self.config.target_arch, self.package.version)
+ return '%s-%s-%s-%s' % (self.package.name, platform, self.config.target_arch, self.package.version)
def _create_msi_installer(self, package_type):
self.package.set_mode(package_type)
@@ -216,17 +207,17 @@ class MSIPackager(PackagerBase):
for package in self.packagedeps:
package.set_mode(package_type)
package.wix_use_fragment = self.package.wix_use_fragment
- m.action("Creating Merge Module for %s" % package)
+ m.action('Creating Merge Module for %s' % package)
packager = MergeModulePackager(self.config, package, self.store)
try:
- path = packager.create_merge_module(self.output_dir,
- package_type, self.force, self.package.version,
- self.keep_temp, True)
+ path = packager.create_merge_module(
+ self.output_dir, package_type, self.force, self.package.version, self.keep_temp, True
+ )
packagedeps[package] = path[0]
if path[1]:
tmp_dirs.append(path[1])
except EmptyPackageError:
- m.warning("Package %s is empty" % package)
+ m.warning('Package %s is empty' % package)
self.packagedeps = packagedeps
self.merge_modules[package_type] = list(packagedeps.values())
return tmp_dirs
@@ -237,21 +228,17 @@ class MSIPackager(PackagerBase):
return config_path
def _create_msi(self, config_path, tmp_dirs):
- sources = [os.path.join(self.output_dir, "%s.wxs" %
- self._package_name())]
- msi = MSI(self.config, self.package, self.packagedeps, config_path,
- self.store)
+ sources = [os.path.join(self.output_dir, '%s.wxs' % self._package_name())]
+ msi = MSI(self.config, self.package, self.packagedeps, config_path, self.store)
msi.write(sources[0])
- wixobjs = [os.path.join(self.output_dir, "%s.wixobj" %
- self._package_name())]
+ wixobjs = [os.path.join(self.output_dir, '%s.wixobj' % self._package_name())]
if self.package.wix_use_fragment:
wixobjs.extend(self.merge_modules[self.package.package_mode])
for x in ['utils']:
- wixobjs.append(os.path.join(self.output_dir, "%s.wixobj" % x))
- sources.append(os.path.join(os.path.abspath(self.config.data_dir),
- 'wix/%s.wxs' % x))
+ wixobjs.append(os.path.join(self.output_dir, '%s.wixobj' % x))
+ sources.append(os.path.join(os.path.abspath(self.config.data_dir), 'wix/%s.wxs' % x))
if self._with_wine:
final_wixobjs = ['"{}"'.format(to_winepath(x)) for x in wixobjs]
@@ -262,8 +249,7 @@ class MSIPackager(PackagerBase):
candle = Candle(self.wix_prefix, self._with_wine)
candle.compile(' '.join(final_sources), self.output_dir, env=self.config.env)
- light = Light(self.wix_prefix, self._with_wine,
- "%s %s" % (self.UI_EXT, self.UTIL_EXT))
+ light = Light(self.wix_prefix, self._with_wine, '%s %s' % (self.UI_EXT, self.UTIL_EXT))
path = light.compile(final_wixobjs, self._package_name(), self.output_dir, env=self.config.env)
# Clean up
@@ -284,7 +270,6 @@ class MSIPackager(PackagerBase):
class Packager(object):
-
def __new__(klass, config, package, store):
if isinstance(package, Package):
return MergeModulePackager(config, package, store)
@@ -293,7 +278,7 @@ class Packager(object):
class Candle(object):
- ''' Compile WiX objects with candle '''
+ """Compile WiX objects with candle"""
cmd = '%(wine)s %(q)s%(prefix)s/candle.exe%(q)s %(source)s'
@@ -314,10 +299,9 @@ class Candle(object):
class Light(object):
- ''' Compile WiX objects with light'''
+ """Compile WiX objects with light"""
- cmd = '%(wine)s %(q)s%(prefix)s/light.exe%(q)s %(objects)s -o '\
- '%(msi)s.%(ext)s -sval %(extra)s'
+ cmd = '%(wine)s %(q)s%(prefix)s/light.exe%(q)s %(objects)s -o ' '%(msi)s.%(ext)s -sval %(extra)s'
def __init__(self, wix_prefix, with_wine, extra=''):
self.options = {}
@@ -338,8 +322,7 @@ class Light(object):
else:
self.options['ext'] = 'msi'
shell.new_call(self.cmd % self.options, output_dir, env=env)
- msi_file_path = os.path.join(output_dir,
- '%(msi)s.%(ext)s' % self.options)
+ msi_file_path = os.path.join(output_dir, '%(msi)s.%(ext)s' % self.options)
if self.options['wine'] == 'wine':
shell.new_call(['chmod', '0755', msi_file_path])
return msi_file_path
@@ -348,4 +331,5 @@ class Light(object):
def register():
from cerbero.packages.packager import register_packager
from cerbero.config import Distro
+
register_packager(Distro.WINDOWS, Packager)
diff --git a/cerbero/tools/depstracker.py b/cerbero/tools/depstracker.py
index 0b0f0109..65bd5deb 100644
--- a/cerbero/tools/depstracker.py
+++ b/cerbero/tools/depstracker.py
@@ -22,8 +22,7 @@ from cerbero.config import Platform
from cerbero.utils import shell
-class RecursiveLister():
-
+class RecursiveLister:
def list_file_deps(self, prefix, path):
raise NotImplemented()
@@ -45,41 +44,33 @@ class RecursiveLister():
class ObjdumpLister(RecursiveLister):
-
def list_file_deps(self, prefix, path):
env = os.environ.copy()
env['LC_ALL'] = 'C'
files = shell.check_output(['objdump', '-xw', path], env=env).splitlines()
- prog = re.compile(r"(?i)^.*DLL[^:]*: (\S+\.dll)$")
- files = [prog.sub(r"\1", x) for x in files if prog.match(x) is not None]
- files = [os.path.join(prefix, 'bin', x) for x in files if
- x.lower().endswith('dll')]
+ prog = re.compile(r'(?i)^.*DLL[^:]*: (\S+\.dll)$')
+ files = [prog.sub(r'\1', x) for x in files if prog.match(x) is not None]
+ files = [os.path.join(prefix, 'bin', x) for x in files if x.lower().endswith('dll')]
return [os.path.realpath(x) for x in files if os.path.exists(x)]
class OtoolLister(RecursiveLister):
-
def list_file_deps(self, prefix, path):
files = shell.check_output(['otool', '-L', path]).splitlines()[1:]
# Shared libraries might be relocated, we look for files with the
# prefix or starting with @rpath
- files = [x.strip().split(' ')[0] for x in files if prefix in x or "@rpath" in x]
- return [x.replace("@rpath/", prefix) for x in files]
+ files = [x.strip().split(' ')[0] for x in files if prefix in x or '@rpath' in x]
+ return [x.replace('@rpath/', prefix) for x in files]
-class LddLister():
-
- def list_deps(self, prefix, path):
+class LddLister:
+ def list_deps(self, prefix, path):
files = shell.check_output(['ldd', path]).splitlines()
return [x.split(' ')[2] for x in files if prefix in x]
-class DepsTracker():
-
- BACKENDS = {
- Platform.WINDOWS: ObjdumpLister,
- Platform.LINUX: LddLister,
- Platform.DARWIN: OtoolLister}
+class DepsTracker:
+ BACKENDS = {Platform.WINDOWS: ObjdumpLister, Platform.LINUX: LddLister, Platform.DARWIN: OtoolLister}
def __init__(self, platform, prefix):
self.libs_deps = {}
diff --git a/cerbero/tools/libtool.py b/cerbero/tools/libtool.py
index 90014e13..3f5071ee 100644
--- a/cerbero/tools/libtool.py
+++ b/cerbero/tools/libtool.py
@@ -22,11 +22,11 @@ from cerbero.enums import Platform
from cerbero.utils import shell
from cerbero.errors import FatalError
+
def get_libtool_versions(version, soversion=0):
parts = version.split('.')
if not parts or len(parts) > 3:
- raise FatalError('Version must contain three or fewer parts: {!r}'
- ''.format(version))
+ raise FatalError('Version must contain three or fewer parts: {!r}' ''.format(version))
try:
major = int(parts[0])
minor = 0
@@ -43,12 +43,13 @@ def get_libtool_versions(version, soversion=0):
binary_age = (100 * minor) + micro
return (soversion, binary_age - interface_age, interface_age)
+
class LibtoolLibrary(object):
- '''
+ """
Helper class to create libtool libraries files (.la)
- '''
+ """
- LIBTOOL_TPL = '''\
+ LIBTOOL_TPL = """\
# %(libname)s - a libtool library file
# Generated by libtool (GNU libtool) 2.4.2 Debian-2.4.2-1ubuntu1
#
@@ -90,10 +91,9 @@ dlpreopen=''
# Directory that this library needs to be installed in:
libdir='%(libdir)s'
-'''
+"""
- def __init__(self, libname, major, minor, micro, libdir, platform,
- deps=None, static_only=False):
+ def __init__(self, libname, major, minor, micro, libdir, platform, deps=None, static_only=False):
self.libtool_vars = {
'libname': '',
'dlname': '',
@@ -103,7 +103,8 @@ libdir='%(libdir)s'
'current': '',
'age': '',
'revision': '',
- 'libdir': ''}
+ 'libdir': '',
+ }
if platform == Platform.WINDOWS:
shared_ext = 'dll.a'
@@ -139,8 +140,7 @@ libdir='%(libdir)s'
self.change_value('libname', self.laname)
if not static_only:
self.change_value('dlname', dlname)
- self.change_value('library_names', '%s %s %s' % (dlname_all, dlname,
- dlname_base))
+ self.change_value('library_names', '%s %s %s' % (dlname_all, dlname, dlname_base))
self.change_value('old_library', old_library)
self.change_value('current', minor_str)
self.change_value('age', minor_str)
diff --git a/cerbero/tools/osxrelocator.py b/cerbero/tools/osxrelocator.py
index 51c6b3c8..68f4800d 100755
--- a/cerbero/tools/osxrelocator.py
+++ b/cerbero/tools/osxrelocator.py
@@ -27,14 +27,14 @@ OTOOL_CMD = 'otool'
class OSXRelocator(object):
- '''
+ """
Wrapper for OS X's install_name_tool and otool commands to help
relocating shared libraries.
It parses lib/ /libexec and bin/ directories, changes the prefix path of
the shared libraries that an object file uses and changes it's library
ID if the file is a shared library.
- '''
+ """
def __init__(self, root, lib_prefix, recursive, logfile=None):
self.root = root
@@ -108,8 +108,7 @@ class OSXRelocator(object):
def parse_dir(self, dir_path, filters=None):
for dirpath, dirnames, filenames in os.walk(dir_path):
for f in filenames:
- if filters is not None and \
- os.path.splitext(f)[1] not in filters:
+ if filters is not None and os.path.splitext(f)[1] not in filters:
continue
self.change_libs_path(os.path.join(dirpath, f))
if not self.recursive:
@@ -162,19 +161,25 @@ class OSXRelocator(object):
class Main(object):
-
def run(self):
# We use OptionParser instead of ArgumentsParse because this script
# might be run in OS X 10.6 or older, which do not provide the argparse
# module
import optparse
- usage = "usage: %prog [options] library_path old_prefix new_prefix"
- description = 'Rellocates object files changing the dependant '\
- ' dynamic libraries location path with a new one'
+
+ usage = 'usage: %prog [options] library_path old_prefix new_prefix'
+ description = (
+ 'Rellocates object files changing the dependant ' ' dynamic libraries location path with a new one'
+ )
parser = optparse.OptionParser(usage=usage, description=description)
- parser.add_option('-r', '--recursive', action='store_true',
- default=False, dest='recursive',
- help='Scan directories recursively')
+ parser.add_option(
+ '-r',
+ '--recursive',
+ action='store_true',
+ default=False,
+ dest='recursive',
+ help='Scan directories recursively',
+ )
options, args = parser.parse_args()
if len(args) != 3:
@@ -185,6 +190,6 @@ class Main(object):
exit(0)
-if __name__ == "__main__":
+if __name__ == '__main__':
main = Main()
main.run()
diff --git a/cerbero/tools/osxuniversalgenerator.py b/cerbero/tools/osxuniversalgenerator.py
index c4b20def..9123d763 100755
--- a/cerbero/tools/osxuniversalgenerator.py
+++ b/cerbero/tools/osxuniversalgenerator.py
@@ -25,7 +25,7 @@ import sys
import asyncio
import os.path
-if __name__ == "__main__":
+if __name__ == '__main__':
# Add cerbero dir to path when invoked as a script so
# that the cerbero imports below resolve correctly.
parent = os.path.dirname(__file__)
@@ -36,12 +36,14 @@ if __name__ == "__main__":
from cerbero.utils import shell, run_tasks, run_until_complete
from cerbero.tools.osxrelocator import OSXRelocator
+
def get_parent_prefix(f, dirs):
dirs = dirs[:]
while dirs:
dir_ = os.path.join(os.path.realpath(dirs.pop(0)), '')
if f.startswith(dir_):
- yield(dir_)
+ yield (dir_)
+
file_types = [
('Mach-O', 'merge'),
@@ -69,8 +71,9 @@ file_types = [
('directory', 'recurse'),
]
+
class OSXUniversalGenerator(object):
- '''
+ """
Wrapper for OS X's lipo command to help generating universal binaries
from single arch binaries.
@@ -85,16 +88,16 @@ class OSXUniversalGenerator(object):
as they should be results from building the same project to different
architectures
- '''
+ """
LIPO_CMD = 'lipo'
FILE_CMD = 'file'
def __init__(self, output_root, logfile=None):
- '''
+ """
@output_root: the output directory where the result will be generated
- '''
+ """
self.output_root = output_root
if self.output_root.endswith('/'):
self.output_root = self.output_root[:-1]
@@ -124,8 +127,7 @@ class OSXUniversalGenerator(object):
tmp_inputs.append(tmp)
shutil.copy(f, tmp.name)
prefix_to_replace = [d for d in dirs if d in f][0]
- relocator = OSXRelocator (self.output_root, prefix_to_replace,
- False, logfile=self.logfile)
+ relocator = OSXRelocator(self.output_root, prefix_to_replace, False, logfile=self.logfile)
# since we are using a temporary file, we must force the library id
# name to real one and not based on the filename
relocator.relocate_file(tmp.name, f)
@@ -136,14 +138,14 @@ class OSXUniversalGenerator(object):
tmp.close()
def get_file_type(self, filepath):
- return shell.check_output([self.FILE_CMD, '-bh', filepath])[:-1] #remove trailing \n
+ return shell.check_output([self.FILE_CMD, '-bh', filepath])[:-1] # remove trailing \n
async def _detect_merge_action(self, files_list):
actions = []
for f in files_list:
if not os.path.exists(f):
- continue #TODO what can we do here? fontconfig has
- #some random generated filenames it seems
+ continue # TODO what can we do here? fontconfig has
+ # some random generated filenames it seems
ftype = self.get_file_type(f)
action = ''
for ft in file_types:
@@ -161,18 +163,17 @@ class OSXUniversalGenerator(object):
raise Exception('Unexpected file type %s %s' % (str(ftype), f))
actions.append(action)
if len(actions) == 0:
- return 'skip' #we should skip this one, the file doesn't exist
+ return 'skip' # we should skip this one, the file doesn't exist
all_same = all(x == actions[0] for x in actions)
if not all_same:
- raise Exception('Different file types found: %s : %s' \
- % (str(ftype), str(files_list)))
+ raise Exception('Different file types found: %s : %s' % (str(ftype), str(files_list)))
return actions[0]
async def do_merge(self, filepath, dirs):
full_filepaths = [os.path.join(d, filepath) for d in dirs]
action = await self._detect_merge_action(full_filepaths)
- #pick the first file as the base one in case of copying/linking
+ # pick the first file as the base one in case of copying/linking
current_file = full_filepaths[0]
output_file = os.path.join(self.output_root, filepath)
output_dir = os.path.dirname(output_file)
@@ -188,9 +189,9 @@ class OSXUniversalGenerator(object):
os.makedirs(output_dir)
await self.create_universal_file(output_file, full_filepaths, dirs)
elif action == 'skip':
- pass #just pass
+ pass # just pass
elif action == 'recurse':
- self.merge_dirs (full_filepaths, output_file)
+ self.merge_dirs(full_filepaths, output_file)
else:
raise Exception('unexpected action %s' % action)
@@ -198,11 +199,13 @@ class OSXUniversalGenerator(object):
self.missing = []
queue = asyncio.Queue()
+
async def parse_dirs_worker():
while True:
current_file, dirs = await queue.get()
await self.do_merge(current_file, dirs)
queue.task_done()
+
async def queue_done():
await queue.join()
@@ -226,15 +229,15 @@ class OSXUniversalGenerator(object):
if filters is not None and os.path.splitext(f)[1] not in filters:
continue
current_file = os.path.join(current_dir, f)
- queue.put_nowait ((current_file, dirs))
+ queue.put_nowait((current_file, dirs))
async def parse_dirs_main():
tasks = []
for i in range(4):
- tasks.append(asyncio.ensure_future (parse_dirs_worker()))
- await run_tasks (tasks, queue_done())
+ tasks.append(asyncio.ensure_future(parse_dirs_worker()))
+ await run_tasks(tasks, queue_done())
- print ("parsing dirs")
+ print('parsing dirs')
run_until_complete(parse_dirs_main())
def _copy(self, src, dest):
@@ -246,14 +249,14 @@ class OSXUniversalGenerator(object):
self._copy(src, dest)
replacements = {}
for d in dirs:
- replacements[d]=self.output_root
+ replacements[d] = self.output_root
shell.replace(dest, replacements)
def _link(self, src, dest, filepath):
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
if os.path.lexists(dest):
- return #link exists, skip it
+ return # link exists, skip it
# read the link, and extract the relative filepath
target = os.readlink(src)
@@ -270,14 +273,13 @@ class OSXUniversalGenerator(object):
class Main(object):
-
def run(self):
# We use OptionParser instead of ArgumentsParse because this script might
# be run in OS X 10.6 or older, which do not provide the argparse module
import optparse
- usage = "usage: %prog [options] outputdir inputdir1 inputdir2 ..."
- description='Merges multiple architecture build trees into a single '\
- 'universal binary build tree'
+
+ usage = 'usage: %prog [options] outputdir inputdir1 inputdir2 ...'
+ description = 'Merges multiple architecture build trees into a single ' 'universal binary build tree'
parser = optparse.OptionParser(usage=usage, description=description)
options, args = parser.parse_args()
if len(args) < 3:
@@ -287,6 +289,7 @@ class Main(object):
generator.merge_dirs(args[1:])
exit(0)
-if __name__ == "__main__":
+
+if __name__ == '__main__':
main = Main()
main.run()
diff --git a/cerbero/tools/pkgconfig.py b/cerbero/tools/pkgconfig.py
index 968a6f67..14880895 100644
--- a/cerbero/tools/pkgconfig.py
+++ b/cerbero/tools/pkgconfig.py
@@ -20,16 +20,15 @@ import os
class PkgConfigWritter(object):
-
- VARIABLES_TPL = '''\
+ VARIABLES_TPL = """\
prefix=%(prefix)s
exec_prefix=${prefix}
libdir=${prefix}/%(rel_libdir)s
includedir=${prefix}/%(rel_incldir)s
datarootdir=${prefix}/%(rel_sharedir)s
datadir=${datarootdir}
-'''
- BODY_TPL = '''\
+"""
+ BODY_TPL = """\
Name: %(name)s
Description: %(desc)s
@@ -39,7 +38,7 @@ Requires.private: %(req_priv)s
Libs: %(libs)s
Libs.private: %(libs_priv)s
Cflags: %(cflags)s
-'''
+"""
rel_incldir = 'include'
rel_sharedir = 'share'
@@ -68,7 +67,8 @@ Cflags: %(cflags)s
'prefix': self.prefix,
'rel_libdir': self.rel_libdir,
'rel_incldir': self.rel_incldir,
- 'rel_sharedir': self.rel_sharedir}
+ 'rel_sharedir': self.rel_sharedir,
+ }
def _get_body(self):
return self.BODY_TPL % {
@@ -79,4 +79,5 @@ Cflags: %(cflags)s
'req_priv': self.req_priv,
'libs': self.libs,
'libs_priv': self.libs_priv,
- 'cflags': self.cflags}
+ 'cflags': self.cflags,
+ }
diff --git a/cerbero/tools/strip.py b/cerbero/tools/strip.py
index f5ac030b..edba7259 100644
--- a/cerbero/tools/strip.py
+++ b/cerbero/tools/strip.py
@@ -25,7 +25,7 @@ from cerbero.utils import shell, run_until_complete, messages as m
class Strip(object):
- '''Wrapper for the strip tool'''
+ """Wrapper for the strip tool"""
def __init__(self, config, excludes=None, keep_symbols=None):
self.config = config
diff --git a/cerbero/utils/__init__.py b/cerbero/utils/__init__.py
index 1d65b432..6b57aa2d 100644
--- a/cerbero/utils/__init__.py
+++ b/cerbero/utils/__init__.py
@@ -25,6 +25,7 @@ import shutil
import pathlib
import argparse
import importlib
+
try:
import sysconfig
except:
@@ -50,7 +51,6 @@ CYGPATH = shutil.which('cygpath')
class ArgparseArgument(object):
-
def __init__(self, *name, **kwargs):
self.name = name
self.args = kwargs
@@ -60,7 +60,6 @@ class ArgparseArgument(object):
class StoreBool(argparse.Action):
-
def __init__(self, option_strings, dest, nargs=None, **kwargs):
super().__init__(option_strings, dest, **kwargs)
@@ -75,23 +74,26 @@ class StoreBool(argparse.Action):
def user_is_root():
- ''' Check if the user running the process is root '''
- return hasattr(os, 'getuid') and os.getuid() == 0
+ """Check if the user running the process is root"""
+ return hasattr(os, 'getuid') and os.getuid() == 0
+
@functools.lru_cache()
def determine_num_of_cpus() -> int:
- ''' Number of virtual or logical CPUs on this system '''
+ """Number of virtual or logical CPUs on this system"""
# Python 2.6+
try:
import multiprocessing
+
return multiprocessing.cpu_count()
except (ImportError, NotImplementedError):
return 1
+
@functools.lru_cache()
def determine_total_ram() -> int:
- ''' Total amount of RAM in this system, in bytes '''
+ """Total amount of RAM in this system, in bytes"""
platform = system_info()[0]
@@ -100,7 +102,9 @@ def determine_total_ram() -> int:
if ram_size_query.returncode() == 0:
return int(ram_size_query.stdout.strip())
elif platform == Platform.WINDOWS:
- ram_size_query = subprocess.run([shutil.which('wmic'), 'computersystem', 'get', 'totalphysicalmemory'], stdout=subprocess.PIPE, text=True)
+ ram_size_query = subprocess.run(
+ [shutil.which('wmic'), 'computersystem', 'get', 'totalphysicalmemory'], stdout=subprocess.PIPE, text=True
+ )
if ram_size_query.returncode() == 0:
return int(ram_size_query.stdout.strip())
elif platform == Platform.LINUX:
@@ -108,7 +112,8 @@ def determine_total_ram() -> int:
if ram_size_query.returncode() == 0:
return int(re.split(r'\s+', ram_size_query.stdout.splitlines()[1]))
- return 4 << 30 # Assume 4GB
+ return 4 << 30 # Assume 4GB
+
def to_winpath(path):
if path.startswith('/'):
@@ -154,12 +159,13 @@ def windows_arch():
raise FatalError(_('Unable to detect Windows architecture'))
return arch
+
def system_info():
- '''
+ """
Get the system information.
Return a tuple with the platform type, the architecture and the
distribution
- '''
+ """
# Get the platform info
platform = os.environ.get('OS', '').lower()
if not platform:
@@ -171,7 +177,7 @@ def system_info():
elif platform.startswith('linux'):
platform = Platform.LINUX
else:
- raise FatalError(_("Platform %s not supported") % platform)
+ raise FatalError(_('Platform %s not supported') % platform)
# Get the architecture info
if platform == Platform.WINDOWS:
@@ -181,7 +187,7 @@ def system_info():
elif arch == 'x86':
arch = Architecture.X86
else:
- raise FatalError(_("Windows arch %s is not supported") % arch)
+ raise FatalError(_('Windows arch %s is not supported') % arch)
else:
uname = os.uname()
arch = uname[4]
@@ -196,7 +202,7 @@ def system_info():
elif arch.startswith('arm'):
arch = Architecture.ARM
else:
- raise FatalError(_("Architecture %s not supported") % arch)
+ raise FatalError(_('Architecture %s not supported') % arch)
# Get the distro info
if platform == Platform.LINUX:
@@ -204,9 +210,12 @@ def system_info():
try:
import distro
except ImportError:
- print('''Python >= 3.8 detected and the 'distro' python package was not found.
+ print(
+ """Python >= 3.8 detected and the 'distro' python package was not found.
Please install the 'python3-distro' or 'python-distro' package from your linux package manager or from pypi using pip.
-Terminating.''', file=sys.stderr)
+Terminating.""",
+ file=sys.stderr,
+ )
sys.exit(1)
d = distro.linux_distribution()
else:
@@ -226,12 +235,12 @@ Terminating.''', file=sys.stderr)
for line in f:
# skip empty lines and comment lines
if line.strip() and not line.lstrip().startswith('#'):
- k,v = line.rstrip().split("=")
+ k, v = line.rstrip().split('=')
if k == 'NAME':
name = v.strip('"')
elif k == 'VERSION_ID':
version = v.strip('"')
- d = (name, version, '');
+ d = (name, version, '')
if d[0] in ['Ubuntu', 'debian', 'Debian GNU/Linux', 'LinuxMint', 'Linux Mint']:
distro = Distro.DEBIAN
@@ -293,12 +302,21 @@ Terminating.''', file=sys.stderr)
distro_version = DistroVersion.DEBIAN_SID
elif d[0] in ['debian', 'Debian GNU/Linux']:
number = int(d[1]) if d[1].isnumeric() else 0
- distro_version = "debian_{number:02d}_{name}".format(number=number, name=d[2])
+ distro_version = 'debian_{number:02d}_{name}'.format(number=number, name=d[2])
elif d[0] in ['Ubuntu']:
- distro_version = "ubuntu_{number}_{name}".format(number=d[1].replace('.', '_'), name=distro_version)
+ distro_version = 'ubuntu_{number}_{name}'.format(number=d[1].replace('.', '_'), name=distro_version)
else:
raise FatalError("Distribution '%s' not supported" % str(d))
- elif d[0] in ['RedHat', 'Fedora', 'Fedora Linux', 'CentOS', 'Red Hat Enterprise Linux Server', 'CentOS Linux', 'Amazon Linux', 'Rocky Linux']:
+ elif d[0] in [
+ 'RedHat',
+ 'Fedora',
+ 'Fedora Linux',
+ 'CentOS',
+ 'Red Hat Enterprise Linux Server',
+ 'CentOS Linux',
+ 'Amazon Linux',
+ 'Rocky Linux',
+ ]:
distro = Distro.REDHAT
if d[1] == '16':
distro_version = DistroVersion.FEDORA_16
@@ -361,8 +379,7 @@ Terminating.''', file=sys.stderr)
distro_version = DistroVersion.OPENSUSE_42_3
else:
# FIXME Fill this
- raise FatalError("Distribution OpenSuse '%s' "
- "not supported" % str(d))
+ raise FatalError("Distribution OpenSuse '%s' " 'not supported' % str(d))
elif d[0].strip() in ['openSUSE Tumbleweed']:
distro = Distro.SUSE
distro_version = DistroVersion.OPENSUSE_TUMBLEWEED
@@ -380,15 +397,17 @@ Terminating.''', file=sys.stderr)
else:
distro = Distro.MSYS2
win32_ver = pplatform.win32_ver()[0]
- dmap = {'xp': DistroVersion.WINDOWS_XP,
- 'vista': DistroVersion.WINDOWS_VISTA,
- '7': DistroVersion.WINDOWS_7,
- 'post2008Server': DistroVersion.WINDOWS_8,
- '8': DistroVersion.WINDOWS_8,
- 'post2012Server': DistroVersion.WINDOWS_8_1,
- '8.1': DistroVersion.WINDOWS_8_1,
- '10': DistroVersion.WINDOWS_10,
- '11': DistroVersion.WINDOWS_11}
+ dmap = {
+ 'xp': DistroVersion.WINDOWS_XP,
+ 'vista': DistroVersion.WINDOWS_VISTA,
+ '7': DistroVersion.WINDOWS_7,
+ 'post2008Server': DistroVersion.WINDOWS_8,
+ '8': DistroVersion.WINDOWS_8,
+ 'post2012Server': DistroVersion.WINDOWS_8_1,
+ '8.1': DistroVersion.WINDOWS_8_1,
+ '10': DistroVersion.WINDOWS_10,
+ '11': DistroVersion.WINDOWS_11,
+ }
if win32_ver in dmap:
distro_version = dmap[win32_ver]
else:
@@ -415,7 +434,7 @@ Terminating.''', file=sys.stderr)
elif ver.startswith('10.8'):
distro_version = DistroVersion.OS_X_MOUNTAIN_LION
else:
- distro_version = "osx_%s" % ver
+ distro_version = 'osx_%s' % ver
num_of_cpus = determine_num_of_cpus()
@@ -424,8 +443,7 @@ Terminating.''', file=sys.stderr)
def validate_packager(packager):
# match packager in the form 'Name <email>'
- expr = r'(.*\s)*[<]([a-zA-Z0-9+_\-\.]+@'\
- '[0-9a-zA-Z][.-0-9a-zA-Z]*.[a-zA-Z]+)[>]$'
+ expr = r'(.*\s)*[<]([a-zA-Z0-9+_\-\.]+@' '[0-9a-zA-Z][.-0-9a-zA-Z]*.[a-zA-Z]+)[>]$'
return bool(re.match(expr, packager))
@@ -442,15 +460,15 @@ def copy_files(origdir, destdir, files, extensions, target_platform, logfile=Non
relprefix = destdir[1:]
orig = os.path.join(origdir, relprefix, f)
dest = os.path.join(destdir, f)
- m.action("copying %s to %s" % (orig, dest), logfile=logfile)
+ m.action('copying %s to %s' % (orig, dest), logfile=logfile)
try:
shutil.copy(orig, dest)
except IOError:
- m.warning("Could not copy %s to %s" % (orig, dest))
+ m.warning('Could not copy %s to %s' % (orig, dest))
def remove_list_duplicates(seq):
- ''' Remove list duplicates maintaining the order '''
+ """Remove list duplicates maintaining the order"""
seen = set()
seen_add = seen.add
return [x for x in seq if x not in seen and not seen_add(x)]
@@ -463,6 +481,7 @@ def parse_file(filename, dict):
exec(compile(open(filename).read(), filename, 'exec'), dict)
except Exception as ex:
import traceback
+
traceback.print_exc()
raise ex
@@ -489,11 +508,12 @@ def get_wix_prefix(config):
raise FatalError("The required packaging tool 'WiX' was not found")
return escape_path(to_unixpath(wix_prefix))
+
def add_system_libs(config, new_env, old_env=None):
- '''
+ """
Add /usr/lib/pkgconfig to PKG_CONFIG_PATH so the system's .pc file
can be found.
- '''
+ """
arch = config.target_arch
libdir = 'lib'
@@ -519,12 +539,10 @@ def add_system_libs(config, new_env, old_env=None):
search_paths = []
if old_env.get('PKG_CONFIG_LIBDIR', None):
- search_paths += [old_env['PKG_CONFIG_LIBDIR']]
+ search_paths += [old_env['PKG_CONFIG_LIBDIR']]
if old_env.get('PKG_CONFIG_PATH', None):
- search_paths += [old_env['PKG_CONFIG_PATH']]
- search_paths += [
- os.path.join(sysroot, 'usr', libdir, 'pkgconfig'),
- os.path.join(sysroot, 'usr/share/pkgconfig')]
+ search_paths += [old_env['PKG_CONFIG_PATH']]
+ search_paths += [os.path.join(sysroot, 'usr', libdir, 'pkgconfig'), os.path.join(sysroot, 'usr/share/pkgconfig')]
if config.target_distro == Distro.DEBIAN:
host = None
@@ -543,20 +561,21 @@ def add_system_libs(config, new_env, old_env=None):
new_env['PKG_CONFIG_PATH'] = ':'.join(search_paths)
- search_paths = [os.environ.get('ACLOCAL_PATH', ''),
- os.path.join(sysroot, 'usr/share/aclocal')]
+ search_paths = [os.environ.get('ACLOCAL_PATH', ''), os.path.join(sysroot, 'usr/share/aclocal')]
new_env['ACLOCAL_PATH'] = ':'.join(search_paths)
+
def split_version(s):
return tuple(int(e) for e in s.split('.'))
+
def needs_xcode8_sdk_workaround(config):
- '''
+ """
Returns whether the XCode 8 clock_gettime, mkostemp, getentropy workaround
from https://bugzilla.gnome.org/show_bug.cgi?id=772451 is needed
These symbols are only available on macOS 10.12+ and iOS 10.0+
- '''
+ """
if config.target_platform == Platform.DARWIN:
if split_version(config.min_osx_sdk_version) < (10, 12):
return True
@@ -565,23 +584,25 @@ def needs_xcode8_sdk_workaround(config):
return True
return False
+
def _qmake_or_pkgdir(qmake):
qmake_path = Path(qmake)
if not qmake_path.is_file():
m.warning('QMAKE={!r} does not exist'.format(str(qmake_path)))
return (None, None)
- pkgdir = (qmake_path.parent.parent / 'lib/pkgconfig')
+ pkgdir = qmake_path.parent.parent / 'lib/pkgconfig'
if pkgdir.is_dir():
return (pkgdir.as_posix(), qmake_path.as_posix())
return (None, qmake_path.as_posix())
+
def detect_qt5(platform, arch, is_universal):
- '''
+ """
Returns both the path to the pkgconfig directory and the path to qmake:
(pkgdir, qmake). If `pkgdir` could not be found, it will be None
Returns (None, None) if nothing was found.
- '''
+ """
path = None
qt5_prefix = os.environ.get('QT5_PREFIX', None)
qmake_path = os.environ.get('QMAKE', None)
@@ -600,12 +621,12 @@ def detect_qt5(platform, arch, is_universal):
if len(qt_version) >= 1 and qt_version[0] != 5:
# QMAKE is not for Qt5
return (None, None)
- if len(qt_version) >= 2 and qt_version[:2] < [5, 14] and \
- is_universal and platform == Platform.ANDROID:
+ if len(qt_version) >= 2 and qt_version[:2] < [5, 14] and is_universal and platform == Platform.ANDROID:
# require QT5_PREFIX before Qt 5.14 with android universal
if not qt5_prefix:
- m.warning('Please set QT5_PREFIX if you want to build '
- 'the Qt5 plugin for android-universal with Qt < 5.14')
+ m.warning(
+ 'Please set QT5_PREFIX if you want to build ' 'the Qt5 plugin for android-universal with Qt < 5.14'
+ )
return (None, None)
else:
ret = _qmake_or_pkgdir(qmake_path)
@@ -644,12 +665,13 @@ def detect_qt5(platform, arch, is_universal):
m.warning('Unsupported arch {!r} on platform {!r}'.format(arch, platform))
return ret
+
def detect_qt6(platform, arch, is_universal):
- '''
+ """
Returns the path to qmake:
Returns None if qmake could not be found.
- '''
+ """
path = None
qmake6_path = os.environ.get('QMAKE6', None)
if not qmake6_path:
@@ -665,6 +687,7 @@ def detect_qt6(platform, arch, is_universal):
return None
return qmake6_path
+
def imp_load_source(modname, fname):
loader = importlib.machinery.SourceFileLoader(modname, fname)
spec = importlib.util.spec_from_file_location(modname, fname, loader=loader)
@@ -673,15 +696,17 @@ def imp_load_source(modname, fname):
loader.exec_module(module)
return module
+
# asyncio.Semaphore classes set their working event loop internally on
# creation, so we need to ensure the proper loop has already been set by then.
# This is especially important if we create global semaphores that are
# initialized at the very beginning, since on Windows, the default
# SelectorEventLoop is not available.
def CerberoSemaphore(value=1):
- get_event_loop() # this ensures the proper event loop is already created
+ get_event_loop() # this ensures the proper event loop is already created
return asyncio.Semaphore(value)
+
def get_event_loop():
try:
loop = asyncio.get_event_loop()
@@ -691,21 +716,20 @@ def get_event_loop():
# On Windows the default SelectorEventLoop is not available:
# https://docs.python.org/3.5/library/asyncio-subprocess.html#windows-event-loop
- if sys.platform == 'win32' and \
- not isinstance(loop, asyncio.ProactorEventLoop):
+ if sys.platform == 'win32' and not isinstance(loop, asyncio.ProactorEventLoop):
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
# Avoid spammy BlockingIOError warnings with older python versions
- if sys.platform != 'win32' and \
- sys.version_info < (3, 8, 0):
+ if sys.platform != 'win32' and sys.version_info < (3, 8, 0):
asyncio.set_child_watcher(asyncio.FastChildWatcher())
asyncio.get_child_watcher().attach_loop(loop)
return loop
+
def run_until_complete(tasks):
- '''
+ """
Runs one or many tasks, blocking until all of them have finished.
@param tasks: A single Future or a list of Futures to run
@type tasks: Future or list of Futures
@@ -713,7 +737,7 @@ def run_until_complete(tasks):
one task) or a list of all results in case of multiple
tasks. Result is None if operation is cancelled.
@rtype: any type or list of any types in case of multiple tasks
- '''
+ """
loop = get_event_loop()
try:
@@ -725,21 +749,24 @@ def run_until_complete(tasks):
except asyncio.CancelledError:
return None
+
async def run_tasks(tasks, done_async=None):
"""
Runs @tasks until completion or until @done_async returns
"""
+
class QueueDone(Exception):
pass
if done_async:
+
async def queue_done():
# This is how we exit the asyncio.wait once everything is done
# as otherwise asyncio.wait will wait for our tasks to complete
await done_async
raise QueueDone()
- task = asyncio.ensure_future (queue_done())
+ task = asyncio.ensure_future(queue_done())
tasks.append(task)
async def shutdown(abnormal=True):
@@ -752,9 +779,7 @@ async def run_tasks(tasks, done_async=None):
for e in ret:
if isinstance(e, asyncio.CancelledError):
cancelled = e
- if isinstance(e, Exception) \
- and not isinstance(e, asyncio.CancelledError) \
- and not isinstance(e, QueueDone):
+ if isinstance(e, Exception) and not isinstance(e, asyncio.CancelledError) and not isinstance(e, QueueDone):
raise e
if abnormal and cancelled:
# use cancelled as a last resort we would prefer to throw any
@@ -775,28 +800,67 @@ async def run_tasks(tasks, done_async=None):
class EnvVar:
@staticmethod
def is_path(var):
- return var in ('LD_LIBRARY_PATH', 'PATH', 'MANPATH', 'INFOPATH',
- 'PKG_CONFIG_PATH', 'PKG_CONFIG_LIBDIR', 'GI_TYPELIB_PATH',
- 'XDG_DATA_DIRS', 'XDG_CONFIG_DIRS', 'GST_PLUGIN_PATH',
- 'GST_PLUGIN_PATH_1_0', 'PYTHONPATH', 'MONO_PATH', 'LIB',
- 'INCLUDE', 'PATHEXT', 'PERL5LIB')
+ return var in (
+ 'LD_LIBRARY_PATH',
+ 'PATH',
+ 'MANPATH',
+ 'INFOPATH',
+ 'PKG_CONFIG_PATH',
+ 'PKG_CONFIG_LIBDIR',
+ 'GI_TYPELIB_PATH',
+ 'XDG_DATA_DIRS',
+ 'XDG_CONFIG_DIRS',
+ 'GST_PLUGIN_PATH',
+ 'GST_PLUGIN_PATH_1_0',
+ 'PYTHONPATH',
+ 'MONO_PATH',
+ 'LIB',
+ 'INCLUDE',
+ 'PATHEXT',
+ 'PERL5LIB',
+ )
@staticmethod
def is_arg(var):
- return var in ('CFLAGS', 'CPPFLAGS', 'CXXFLAGS', 'LDFLAGS',
- 'OBJCFLAGS', 'OBJCXXFLAGS', 'OBJLDFLAGS', 'CCASFLAGS')
+ return var in (
+ 'CFLAGS',
+ 'CPPFLAGS',
+ 'CXXFLAGS',
+ 'LDFLAGS',
+ 'OBJCFLAGS',
+ 'OBJCXXFLAGS',
+ 'OBJLDFLAGS',
+ 'CCASFLAGS',
+ )
@staticmethod
def is_cmd(var):
- return var in ('AR', 'AS', 'CC', 'CPP', 'CXX', 'DLLTOOL', 'GENDEF',
- 'LD', 'NM', 'OBJC', 'OBJCOPY', 'OBJCXX', 'PERL', 'PYTHON',
- 'RANLIB', 'RC', 'STRIP', 'WINDRES')
+ return var in (
+ 'AR',
+ 'AS',
+ 'CC',
+ 'CPP',
+ 'CXX',
+ 'DLLTOOL',
+ 'GENDEF',
+ 'LD',
+ 'NM',
+ 'OBJC',
+ 'OBJCOPY',
+ 'OBJCXX',
+ 'PERL',
+ 'PYTHON',
+ 'RANLIB',
+ 'RC',
+ 'STRIP',
+ 'WINDRES',
+ )
class EnvValue(list):
- '''
+ """
Env var value (list of strings) with an associated separator
- '''
+ """
def __init__(self, sep, *values):
self.sep = sep
@@ -817,9 +881,9 @@ class EnvValue(list):
class EnvValueSingle(EnvValue):
- '''
+ """
Env var with a single value
- '''
+ """
def __init__(self, *values):
if len(values) == 1:
@@ -839,9 +903,9 @@ class EnvValueSingle(EnvValue):
class EnvValueArg(EnvValue):
- '''
+ """
Env var containing a list of quoted arguments separated by space
- '''
+ """
def __init__(self, *values):
if len(values) == 1 and not isinstance(values[0], list):
@@ -853,9 +917,9 @@ class EnvValueArg(EnvValue):
class EnvValueCmd(EnvValueArg):
- '''
+ """
Env var containing a command and a list of arguments separated by space
- '''
+ """
def __iadd__(self, new):
if isinstance(new, EnvValueCmd):
@@ -864,9 +928,10 @@ class EnvValueCmd(EnvValueArg):
class EnvValuePath(EnvValue):
- '''
+ """
Env var containing a list of paths separated by os.pathsep, which is `:` or `;`
- '''
+ """
+
def __init__(self, *values):
if len(values) == 1 and not isinstance(values[0], list):
values = (values[0].split(os.pathsep),)
diff --git a/cerbero/utils/git.py b/cerbero/utils/git.py
index 4a6a7107..13e6e9b4 100644
--- a/cerbero/utils/git.py
+++ b/cerbero/utils/git.py
@@ -36,39 +36,38 @@ def ensure_user_is_set(git_dir, logfile=None):
try:
shell.new_call([GIT, 'config', 'user.email'], git_dir, logfile=logfile)
except FatalError:
- shell.new_call([GIT, 'config', 'user.email', 'cerbero@gstreamer.freedesktop.org'],
- git_dir, logfile=logfile)
+ shell.new_call([GIT, 'config', 'user.email', 'cerbero@gstreamer.freedesktop.org'], git_dir, logfile=logfile)
try:
shell.new_call([GIT, 'config', 'user.name'], git_dir, logfile=logfile)
except FatalError:
- shell.new_call([GIT, 'config', 'user.name', 'Cerbero Build System'],
- git_dir, logfile=logfile)
+ shell.new_call([GIT, 'config', 'user.name', 'Cerbero Build System'], git_dir, logfile=logfile)
+
def init(git_dir, logfile=None):
- '''
+ """
Initialize a git repository with 'git init'
@param git_dir: path of the git repository
@type git_dir: str
- '''
+ """
os.makedirs(git_dir, exist_ok=True)
shell.new_call([GIT, 'init'], git_dir, logfile=logfile)
ensure_user_is_set(git_dir, logfile=logfile)
def clean(git_dir, logfile=None):
- '''
+ """
Clean a git respository with clean -dfx
@param git_dir: path of the git repository
@type git_dir: str
- '''
+ """
return shell.new_call([GIT, 'clean', '-dfx'], git_dir, logfile=logfile)
def list_tags(git_dir):
- '''
+ """
List all tags
@param git_dir: path of the git repository
@@ -77,12 +76,12 @@ def list_tags(git_dir):
@type fail: false
@return: list of tag names (str)
@rtype: list
- '''
+ """
return shell.check_output([GIT, 'tag', '-l'], cmd_dir=git_dir).strip().splitlines()
def create_tag(git_dir, tagname, tagdescription, commit, logfile=None):
- '''
+ """
Create a tag using commit
@param git_dir: path of the git repository
@@ -95,16 +94,14 @@ def create_tag(git_dir, tagname, tagdescription, commit, logfile=None):
@type commit: str
@param fail: raise an error if the command failed
@type fail: false
- '''
+ """
- shell.new_call([GIT, 'tag', '-s', tagname, '-m', tagdescription, commit],
- cmd_dir=git_dir, logfile=logfile)
- return shell.new_call([GIT, 'push', 'origin', tagname], cmd_dir=git_dir,
- logfile=logfile)
+ shell.new_call([GIT, 'tag', '-s', tagname, '-m', tagdescription, commit], cmd_dir=git_dir, logfile=logfile)
+ return shell.new_call([GIT, 'push', 'origin', tagname], cmd_dir=git_dir, logfile=logfile)
def delete_tag(git_dir, tagname, logfile=None):
- '''
+ """
Delete a tag
@param git_dir: path of the git repository
@@ -113,19 +110,19 @@ def delete_tag(git_dir, tagname, logfile=None):
@type tagname: str
@param fail: raise an error if the command failed
@type fail: false
- '''
+ """
return shell.new_call([GIT, '-d', tagname], cmd_dir=git_dir, logfile=logfile)
async def fetch(git_dir, fail=True, logfile=None):
- '''
+ """
Fetch all refs from all the remotes
@param git_dir: path of the git repository
@type git_dir: str
@param fail: raise an error if the command failed
@type fail: false
- '''
+ """
# git 1.9 introduced the possibility to fetch both branches and tags at the
# same time when using --tags: https://stackoverflow.com/a/20608181.
# centOS 7 ships with git 1.8.3.1, hence for old git versions, we need to
@@ -139,8 +136,9 @@ async def fetch(git_dir, fail=True, logfile=None):
cmd.append('-f')
return await shell.async_call(cmd, cmd_dir=git_dir, fail=fail, logfile=logfile, cpu_bound=False)
+
async def submodules_update(git_dir, src_dir=None, fail=True, offline=False, logfile=None):
- '''
+ """
Update submodules asynchronously from local directory
@param git_dir: path of the git repository
@@ -151,52 +149,62 @@ async def submodules_update(git_dir, src_dir=None, fail=True, offline=False, log
@type fail: false
@param offline: don't use the network
@type offline: false
- '''
+ """
if not os.path.exists(os.path.join(git_dir, '.gitmodules')):
- m.log(_(".gitmodules does not exist in %s. No need to fetch submodules.") % git_dir, logfile)
+ m.log(_('.gitmodules does not exist in %s. No need to fetch submodules.') % git_dir, logfile)
return
if src_dir:
- config = shell.check_output([GIT, 'config', '--file=.gitmodules', '--list'],
- fail=False, cmd_dir=git_dir, logfile=logfile)
+ config = shell.check_output(
+ [GIT, 'config', '--file=.gitmodules', '--list'], fail=False, cmd_dir=git_dir, logfile=logfile
+ )
config_array = [s.split('=', 1) for s in config.splitlines()]
for c in config_array:
if c[0].startswith('submodule.') and c[0].endswith('.path'):
- submodule = c[0][len('submodule.'):-len('.path')]
- shell.new_call([GIT, 'config', '--file=.gitmodules', 'submodule.{}.url'.format(submodule),
- os.path.join(src_dir, c[1])], cmd_dir=git_dir, logfile=logfile)
+ submodule = c[0][len('submodule.') : -len('.path')]
+ shell.new_call(
+ [
+ GIT,
+ 'config',
+ '--file=.gitmodules',
+ 'submodule.{}.url'.format(submodule),
+ os.path.join(src_dir, c[1]),
+ ],
+ cmd_dir=git_dir,
+ logfile=logfile,
+ )
shell.new_call([GIT, 'submodule', 'init'], cmd_dir=git_dir, logfile=logfile)
if src_dir or not offline:
- await shell.async_call([GIT, 'submodule', 'sync'], cmd_dir=git_dir, logfile=logfile,
- cpu_bound=False)
- await shell.async_call([GIT, 'submodule', 'update'], cmd_dir=git_dir, fail=fail,
- logfile=logfile, cpu_bound=False)
+ await shell.async_call([GIT, 'submodule', 'sync'], cmd_dir=git_dir, logfile=logfile, cpu_bound=False)
+ await shell.async_call(
+ [GIT, 'submodule', 'update'], cmd_dir=git_dir, fail=fail, logfile=logfile, cpu_bound=False
+ )
else:
- await shell.async_call([GIT, 'submodule', 'update', '--no-fetch'], cmd_dir=git_dir,
- fail=fail, logfile=logfile, cpu_bound=False)
+ await shell.async_call(
+ [GIT, 'submodule', 'update', '--no-fetch'], cmd_dir=git_dir, fail=fail, logfile=logfile, cpu_bound=False
+ )
if src_dir:
for c in config_array:
if c[0].startswith('submodule.') and c[0].endswith('.url'):
- shell.new_call([GIT, 'config', '--file=.gitmodules', c[0], c[1]],
- cmd_dir=git_dir, logfile=logfile)
- await shell.async_call([GIT, 'submodule', 'sync'], cmd_dir=git_dir, logfile=logfile,
- cpu_bound=False)
+ shell.new_call([GIT, 'config', '--file=.gitmodules', c[0], c[1]], cmd_dir=git_dir, logfile=logfile)
+ await shell.async_call([GIT, 'submodule', 'sync'], cmd_dir=git_dir, logfile=logfile, cpu_bound=False)
+
async def checkout(git_dir, commit, logfile=None):
- '''
+ """
Reset a git repository to a given commit
@param git_dir: path of the git repository
@type git_dir: str
@param commit: the commit to checkout
@type commit: str
- '''
+ """
cmd = [GIT, 'reset', '--hard', commit]
return await shell.async_call(cmd, git_dir, logfile=logfile, cpu_bound=False)
def get_hash(git_dir, commit, logfile=None):
- '''
+ """
Get a commit hash from a valid commit.
Can be used to check if a commit exists
@@ -204,25 +212,28 @@ def get_hash(git_dir, commit, logfile=None):
@type git_dir: str
@param commit: the commit to log
@type commit: str
- '''
+ """
if not os.path.isdir(os.path.join(git_dir, '.git')):
# If a recipe's source type is switched from tarball to git, then we
# can get called from built_version() when the directory isn't git.
# Return a fixed string + unix time to trigger a full fetch.
return 'not-git-' + str(time.time())
- return shell.check_output([GIT, 'rev-parse', commit], cmd_dir=git_dir,
- fail=False, quiet=True, logfile=logfile).rstrip()
+ return shell.check_output(
+ [GIT, 'rev-parse', commit], cmd_dir=git_dir, fail=False, quiet=True, logfile=logfile
+ ).rstrip()
+
def get_hash_is_ancestor(git_dir, commit, logfile=None):
if not os.path.isdir(os.path.join(git_dir, '.git')):
return False
- ret = shell.new_call([GIT, 'merge-base', '--is-ancestor', commit, 'HEAD'],
- cmd_dir=git_dir, fail=False, logfile=logfile)
+ ret = shell.new_call(
+ [GIT, 'merge-base', '--is-ancestor', commit, 'HEAD'], cmd_dir=git_dir, fail=False, logfile=logfile
+ )
return ret == 0
async def local_checkout(git_dir, local_git_dir, commit, logfile=None, use_submodules=True):
- '''
+ """
Clone a repository for a given commit in a different location
@param git_dir: destination path of the git repository
@@ -231,17 +242,17 @@ async def local_checkout(git_dir, local_git_dir, commit, logfile=None, use_submo
@type local_git_dir: str
@param commit: the commit to checkout
@type commit: false
- '''
+ """
branch_name = 'cerbero_build'
await shell.async_call([GIT, 'checkout', commit, '-B', branch_name], local_git_dir, logfile=logfile)
- await shell.async_call([GIT, 'clone', local_git_dir, '-s', '-b', branch_name, '.'],
- git_dir, logfile=logfile)
+ await shell.async_call([GIT, 'clone', local_git_dir, '-s', '-b', branch_name, '.'], git_dir, logfile=logfile)
ensure_user_is_set(git_dir, logfile=logfile)
if use_submodules:
await submodules_update(git_dir, local_git_dir, logfile=logfile)
+
def add_remote(git_dir, name, url, logfile=None):
- '''
+ """
Add a remote to a git repository
@param git_dir: destination path of the git repository
@@ -250,7 +261,7 @@ def add_remote(git_dir, name, url, logfile=None):
@type name: str
@param url: url of the remote
@type url: str
- '''
+ """
try:
shell.new_call([GIT, 'remote', 'add', name, url], git_dir, logfile=logfile)
except:
@@ -258,7 +269,7 @@ def add_remote(git_dir, name, url, logfile=None):
def check_line_endings(platform):
- '''
+ """
Checks if on windows we don't use the automatic line endings conversion
as it breaks everything
@@ -266,23 +277,23 @@ def check_line_endings(platform):
@type platform: L{cerbero.config.Platform}
@return: true if git config is core.autorlf=false
@rtype: bool
- '''
+ """
if platform != Platform.WINDOWS:
return True
val = shell.check_output([GIT, 'config', '--get', 'core.autocrlf'], fail=False)
- if ('false' in val.lower()):
+ if 'false' in val.lower():
return True
return False
def init_directory(git_dir, logfile=None):
- '''
+ """
Initialize a git repository with the contents
of a directory
@param git_dir: path of the git repository
@type git_dir: str
- '''
+ """
init(git_dir, logfile=logfile)
shell.new_call([GIT, 'add', '--force', '-A', '.'], git_dir, logfile=logfile)
# Check if we need to commit anything. This can happen when extract failed
@@ -294,7 +305,7 @@ def init_directory(git_dir, logfile=None):
def apply_patch(patch, git_dir, logfile=None):
- '''
+ """
Applies a commit patch usign 'git am'
of a directory
@@ -302,5 +313,5 @@ def apply_patch(patch, git_dir, logfile=None):
@type git_dir: str
@param patch: path of the patch file
@type patch: str
- '''
+ """
shell.new_call([GIT, 'am', '--ignore-whitespace', patch], git_dir, logfile=logfile)
diff --git a/cerbero/utils/manifest.py b/cerbero/utils/manifest.py
index 010afb27..afae447d 100644
--- a/cerbero/utils/manifest.py
+++ b/cerbero/utils/manifest.py
@@ -4,10 +4,11 @@ import xml.etree.ElementTree as ET
from cerbero.utils import _
from cerbero.errors import FatalError
+
class Manifest(object):
- '''
+ """
Parse and store the content of a manifest file
- '''
+ """
remotes = {}
projects = {}
@@ -21,7 +22,7 @@ class Manifest(object):
try:
tree = ET.parse(self.manifest_path)
except Exception as ex:
- raise FatalError(_("Error loading manifest in file %s") % ex)
+ raise FatalError(_('Error loading manifest in file %s') % ex)
root = tree.getroot()
@@ -32,8 +33,7 @@ class Manifest(object):
self.default_remote = child.attrib['remote'] or self.default_remote
self.default_revision = child.attrib['revision'] or self.default_revision
if child.tag == 'project':
- project = namedtuple('Project', ['name', 'remote',
- 'revision', 'fetch_uri'])
+ project = namedtuple('Project', ['name', 'remote', 'revision', 'fetch_uri'])
project.name = child.attrib['name']
if project.name.endswith('.git'):
@@ -48,7 +48,7 @@ class Manifest(object):
try:
return self.projects[name]
except KeyError as ex:
- raise FatalError(_("Could not find project %s in manifes") % name)
+ raise FatalError(_('Could not find project %s in manifes') % name)
def get_fetch_uri(self, project, remote):
fetch = self.remotes[remote]
diff --git a/cerbero/utils/messages.py b/cerbero/utils/messages.py
index 85620214..45d863fc 100644
--- a/cerbero/utils/messages.py
+++ b/cerbero/utils/messages.py
@@ -26,6 +26,7 @@ import shutil
if sys.platform == 'win32':
import ctypes
+
kernel32 = ctypes.windll.kernel32
@@ -33,8 +34,8 @@ ACTION_TPL = '-----> %s'
DONE_STEP_TPL = '[(%s/%s) %s -> %s]'
STEP_TPL = '[(%s/%s @ %d%%) %s -> %s]'
START_TIME = None
-SHELL_CLEAR_LINE = "\r\033[K"
-SHELL_MOVE_UP = "\033[F"
+SHELL_CLEAR_LINE = '\r\033[K'
+SHELL_MOVE_UP = '\033[F'
# Enable support fot VT-100 escapes in Windows 10
@@ -60,9 +61,10 @@ def log(msg, logfile):
else:
logfile.write(msg + '\n')
+
class StdoutManager:
def __init__(self):
- self.status_line = ""
+ self.status_line = ''
self.clear_lines = 0
def output(self, msg):
@@ -70,12 +72,14 @@ class StdoutManager:
self.clear_status()
sys.stdout.write(msg)
sys.stdout.flush()
- self.status_line = ""
+ self.status_line = ''
self.clear_lines = 0
- def clear_status (self):
+ def clear_status(self):
if console_is_interactive():
- clear_prev_status = SHELL_CLEAR_LINE + "".join((SHELL_CLEAR_LINE + SHELL_MOVE_UP for i in range(self.clear_lines)))
+ clear_prev_status = SHELL_CLEAR_LINE + ''.join(
+ (SHELL_CLEAR_LINE + SHELL_MOVE_UP for i in range(self.clear_lines))
+ )
sys.stdout.write(clear_prev_status)
sys.stdout.flush()
@@ -87,10 +91,12 @@ class StdoutManager:
self.status_line = status
if console_is_interactive():
- self.clear_lines = len (status) // shutil.get_terminal_size().columns
+ self.clear_lines = len(status) // shutil.get_terminal_size().columns
+
STDOUT = StdoutManager()
+
def prepend_time(end=' '):
global START_TIME
s = ''
@@ -99,6 +105,7 @@ def prepend_time(end=' '):
s += end
return s
+
def output(msg, fd, end='\n'):
prefix = prepend_time()
if fd == sys.stdout:
@@ -107,6 +114,7 @@ def output(msg, fd, end='\n'):
fd.write(prefix + msg + end)
fd.flush()
+
def output_status(msg):
prefix = prepend_time()
STDOUT.output_status(prefix + msg)
@@ -127,11 +135,11 @@ def error(msg, logfile=None):
def deprecation(msg, logfile=None):
- error("DEPRECATION: %s" % msg, logfile=logfile)
+ error('DEPRECATION: %s' % msg, logfile=logfile)
def warning(msg, logfile=None):
- error("WARNING: %s" % msg, logfile=logfile)
+ error('WARNING: %s' % msg, logfile=logfile)
def action(msg, logfile=None):
@@ -141,5 +149,6 @@ def action(msg, logfile=None):
def build_step(recipe_i, total_recipes, completion_percent, recipe, step, logfile=None):
message(STEP_TPL % (recipe_i, total_recipes, completion_percent, recipe, step), logfile=logfile)
-def build_recipe_done (recipe_i, total_recipes, recipe, msg, logfile=None):
+
+def build_recipe_done(recipe_i, total_recipes, recipe, msg, logfile=None):
message(DONE_STEP_TPL % (recipe_i, total_recipes, recipe, msg), logfile=logfile)
diff --git a/cerbero/utils/msbuild.py b/cerbero/utils/msbuild.py
index b7a6f15d..28116c5e 100644
--- a/cerbero/utils/msbuild.py
+++ b/cerbero/utils/msbuild.py
@@ -24,9 +24,7 @@ from cerbero.utils import fix_winpath, shell
class MSBuild(object):
-
- def __init__(self, solution, arch=Architecture.X86, config='Release',
- sdk='Windows7.1SDK', **properties):
+ def __init__(self, solution, arch=Architecture.X86, config='Release', sdk='Windows7.1SDK', **properties):
self.properties = {}
if arch == Architecture.X86:
self.properties['Platform'] = 'Win32'
@@ -43,16 +41,14 @@ class MSBuild(object):
@staticmethod
def get_msbuild_tools_path():
reg = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
- key = winreg.OpenKey(reg,
- r"SOFTWARE\Microsoft\MSBuild\ToolsVersions\4.0")
+ key = winreg.OpenKey(reg, r'SOFTWARE\Microsoft\MSBuild\ToolsVersions\4.0')
path = winreg.QueryValueEx(key, 'MSBuildToolsPath')[0]
return fix_winpath(path)
@staticmethod
def get_vs_path():
reg = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
- key = winreg.OpenKey(reg,
- r"SOFTWARE\Microsoft\VisualStudio\SxS\VC7")
+ key = winreg.OpenKey(reg, r'SOFTWARE\Microsoft\VisualStudio\SxS\VC7')
path = winreg.QueryValueEx(key, '10.0')[0]
path = str(path)
path = path.replace('\\VC', '\\Common7\\IDE')
@@ -66,11 +62,9 @@ class MSBuild(object):
if self.properties['Platform'] == 'Win32':
os.environ['PATH'] = '%s;%s' % (os.environ['PATH'], vs_path)
try:
- shell.new_call(['msbuild.exe', self.solution, *properties, '/target:%s' %
- (command,)], msbuildpath)
+ shell.new_call(['msbuild.exe', self.solution, *properties, '/target:%s' % (command,)], msbuildpath)
finally:
os.environ['PATH'] = old_path
def _format_properties(self):
- return ['/property:%s=%s' % (k, v) for k, v in
- self.properties.items()]
+ return ['/property:%s=%s' % (k, v) for k, v in self.properties.items()]
diff --git a/cerbero/utils/shell.py b/cerbero/utils/shell.py
index a1257fbf..1b2faa07 100644
--- a/cerbero/utils/shell.py
+++ b/cerbero/utils/shell.py
@@ -53,6 +53,7 @@ CPU_BOUND_SEMAPHORE = CerberoSemaphore(info[4])
NON_CPU_BOUND_SEMAPHORE = CerberoSemaphore(2)
DRY_RUN = False
+
def _fix_mingw_cmd(path):
reserved = ['/', ' ', '\\', ')', '(', '"']
l_path = list(path)
@@ -62,14 +63,15 @@ def _fix_mingw_cmd(path):
l_path[i] = '/'
return ''.join(l_path)
+
def _resolve_cmd(cmd, env):
- '''
+ """
On Windows, we can't pass the PATH variable through the env= kwarg to
subprocess.* and expect it to use that value to search for the command,
because Python uses CreateProcess directly. Unlike execvpe, CreateProcess
does not use the PATH env var in the env supplied to search for the
executable. Hence, we need to search for it manually.
- '''
+ """
if PLATFORM != Platform.WINDOWS or env is None or 'PATH' not in env:
return cmd
if not os.path.isabs(cmd[0]):
@@ -79,10 +81,11 @@ def _resolve_cmd(cmd, env):
cmd[0] = resolved_cmd
return cmd
+
def _cmd_string_to_array(cmd, env):
if isinstance(cmd, list):
return _resolve_cmd(cmd, env)
- assert(isinstance(cmd, str))
+ assert isinstance(cmd, str)
if PLATFORM == Platform.WINDOWS:
# fix paths with backslashes
cmd = _fix_mingw_cmd(cmd)
@@ -91,16 +94,19 @@ def _cmd_string_to_array(cmd, env):
# platforms.
return ['sh', '-c', cmd]
+
def set_max_cpu_bound_calls(number):
global CPU_BOUND_SEMAPHORE
CPU_BOUND_SEMAPHORE = CerberoSemaphore(number)
+
def set_max_non_cpu_bound_calls(number):
global NON_CPU_BOUND_SEMAPHORE
NON_CPU_BOUND_SEMAPHORE = CerberoSemaphore(number)
+
def call(cmd, cmd_dir='.', fail=True, verbose=False, logfile=None, env=None):
- '''
+ """
Run a shell command
DEPRECATED: Use new_call and a cmd array wherever possible
@@ -110,7 +116,7 @@ def call(cmd, cmd_dir='.', fail=True, verbose=False, logfile=None, env=None):
@param cmd_dir: str
@param fail: whether or not to raise an exception if the command fails
@type fail: bool
- '''
+ """
try:
if logfile is None:
if verbose:
@@ -133,7 +139,7 @@ def call(cmd, cmd_dir='.', fail=True, verbose=False, logfile=None, env=None):
shell = False
if DRY_RUN:
# write to sdterr so it's filtered more easilly
- m.error("cd %s && %s && cd %s" % (cmd_dir, cmd, os.getcwd()))
+ m.error('cd %s && %s && cd %s' % (cmd_dir, cmd, os.getcwd()))
ret = 0
else:
if env is not None:
@@ -144,11 +150,17 @@ def call(cmd, cmd_dir='.', fail=True, verbose=False, logfile=None, env=None):
# Force python scripts to print their output on newlines instead
# of on exit. Ensures that we get continuous output in log files.
env['PYTHONUNBUFFERED'] = '1'
- ret = subprocess.check_call(cmd, cwd=cmd_dir, bufsize=1,
- stderr=subprocess.STDOUT, stdout=stream,
- stdin=subprocess.DEVNULL,
- universal_newlines=True,
- env=env, shell=shell)
+ ret = subprocess.check_call(
+ cmd,
+ cwd=cmd_dir,
+ bufsize=1,
+ stderr=subprocess.STDOUT,
+ stdout=stream,
+ stdin=subprocess.DEVNULL,
+ universal_newlines=True,
+ env=env,
+ shell=shell,
+ )
except SUBPROCESS_EXCEPTIONS as e:
if fail:
msg = ''
@@ -198,9 +210,9 @@ def new_call(cmd, cmd_dir=None, fail=True, logfile=None, env=None, verbose=False
else:
stdin = None
try:
- subprocess.check_call(cmd, cwd=cmd_dir, env=env,
- stdout=logfile, stderr=subprocess.STDOUT,
- stdin=stdin, shell=shell)
+ subprocess.check_call(
+ cmd, cwd=cmd_dir, env=env, stdout=logfile, stderr=subprocess.STDOUT, stdin=stdin, shell=shell
+ )
except SUBPROCESS_EXCEPTIONS as e:
returncode = getattr(e, 'returncode', -1)
if not fail:
@@ -218,14 +230,14 @@ def new_call(cmd, cmd_dir=None, fail=True, logfile=None, env=None, verbose=False
async def async_call(cmd, cmd_dir='.', fail=True, logfile=None, cpu_bound=True, env=None):
- '''
+ """
Run a shell command
@param cmd: the command to run
@type cmd: str
@param cmd_dir: directory where the command will be run
@param cmd_dir: str
- '''
+ """
global CPU_BOUND_SEMAPHORE, NON_CPU_BOUND_SEMAPHORE
semaphore = CPU_BOUND_SEMAPHORE if cpu_bound else NON_CPU_BOUND_SEMAPHORE
@@ -241,16 +253,16 @@ async def async_call(cmd, cmd_dir='.', fail=True, logfile=None, cpu_bound=True,
if DRY_RUN:
# write to sdterr so it's filtered more easilly
- m.error("cd %s && %s && cd %s" % (cmd_dir, cmd, os.getcwd()))
+ m.error('cd %s && %s && cd %s' % (cmd_dir, cmd, os.getcwd()))
return
env = os.environ.copy() if env is None else env.copy()
# Force python scripts to print their output on newlines instead
# of on exit. Ensures that we get continuous output in log files.
env['PYTHONUNBUFFERED'] = '1'
- proc = await asyncio.create_subprocess_exec(*cmd, cwd=cmd_dir,
- stderr=subprocess.STDOUT, stdout=stream,
- stdin=subprocess.DEVNULL, env=env)
+ proc = await asyncio.create_subprocess_exec(
+ *cmd, cwd=cmd_dir, stderr=subprocess.STDOUT, stdout=stream, stdin=subprocess.DEVNULL, env=env
+ )
await proc.wait()
if proc.returncode != 0 and fail:
msg = ''
@@ -262,14 +274,14 @@ async def async_call(cmd, cmd_dir='.', fail=True, logfile=None, cpu_bound=True,
async def async_call_output(cmd, cmd_dir=None, logfile=None, cpu_bound=True, env=None):
- '''
+ """
Run a shell command and get the output
@param cmd: the command to run
@type cmd: str
@param cmd_dir: directory where the command will be run
@param cmd_dir: str
- '''
+ """
global CPU_BOUND_SEMAPHORE, NON_CPU_BOUND_SEMAPHORE
semaphore = CPU_BOUND_SEMAPHORE if cpu_bound else NON_CPU_BOUND_SEMAPHORE
@@ -282,6 +294,7 @@ async def async_call_output(cmd, cmd_dir=None, logfile=None, cpu_bound=True, env
if PLATFORM == Platform.WINDOWS:
import cerbero.hacks
+
# On Windows, create_subprocess_exec with a PIPE fails while creating
# a named pipe using tempfile.mktemp because we override os.path.join
# to use / on Windows. Override the tempfile module's reference to the
@@ -293,9 +306,9 @@ async def async_call_output(cmd, cmd_dir=None, logfile=None, cpu_bound=True, env
# used instead.
tempfile.tempdir = str(PurePath(tempfile.gettempdir()))
- proc = await asyncio.create_subprocess_exec(*cmd, cwd=cmd_dir,
- stdout=subprocess.PIPE, stderr=logfile,
- stdin=subprocess.DEVNULL, env=env)
+ proc = await asyncio.create_subprocess_exec(
+ *cmd, cwd=cmd_dir, stdout=subprocess.PIPE, stderr=logfile, stdin=subprocess.DEVNULL, env=env
+ )
(output, unused_err) = await proc.communicate()
if PLATFORM == Platform.WINDOWS:
@@ -311,7 +324,7 @@ async def async_call_output(cmd, cmd_dir=None, logfile=None, cpu_bound=True, env
def apply_patch(patch, directory, strip=1, logfile=None):
- '''
+ """
Apply a patch
@param patch: path of the patch file
@@ -320,13 +333,13 @@ def apply_patch(patch, directory, strip=1, logfile=None):
@type: directory: str
@param strip: strip
@type strip: int
- '''
- m.log("Applying patch {}".format(patch), logfile)
+ """
+ m.log('Applying patch {}'.format(patch), logfile)
new_call([PATCH, f'-p{strip}', '-f', '-i', patch], cmd_dir=directory, logfile=logfile)
async def unpack(filepath, output_dir, logfile=None, force_tarfile=False):
- '''
+ """
Extracts a tarball
@param filepath: path of the tarball
@@ -335,7 +348,7 @@ async def unpack(filepath, output_dir, logfile=None, force_tarfile=False):
@type output_dir: str
@param force_tarfile: forces use of tarfile
@type force_tarfile: bool
- '''
+ """
m.log('Unpacking {} in {}'.format(filepath, output_dir), logfile)
if filepath.endswith(TARBALL_SUFFIXES):
@@ -351,7 +364,7 @@ async def unpack(filepath, output_dir, logfile=None, force_tarfile=False):
await async_call([get_tar_cmd(), '-C', output_dir, '-xf', filepath, '--no-same-owner'])
elif filepath.endswith('.zip'):
- zf = zipfile.ZipFile(filepath, "r")
+ zf = zipfile.ZipFile(filepath, 'r')
zf.extractall(path=output_dir)
elif filepath.endswith('.dmg'):
out_dir_name = os.path.splitext(os.path.split(filepath)[1])[0]
@@ -359,15 +372,17 @@ async def unpack(filepath, output_dir, logfile=None, force_tarfile=False):
output_dir = os.path.join(output_dir, out_dir_name)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
- await async_call(['hdiutil', 'attach', '-readonly', '-mountpoint', vol_name, filepath], logfile=logfile, cpu_bound=False)
+ await async_call(
+ ['hdiutil', 'attach', '-readonly', '-mountpoint', vol_name, filepath], logfile=logfile, cpu_bound=False
+ )
await async_call(['cp', '-r', vol_name + '/', output_dir], logfile=logfile, cpu_bound=False)
await async_call(['hdiutil', 'detach', vol_name], logfile=logfile, cpu_bound=False)
else:
- raise FatalError("Unknown tarball format %s" % filepath)
+ raise FatalError('Unknown tarball format %s' % filepath)
async def download(url, dest, check_cert=True, overwrite=False, logfile=None, mirrors=None):
- '''
+ """
Downloads a file
@param url: url to download
@@ -382,17 +397,17 @@ async def download(url, dest, check_cert=True, overwrite=False, logfile=None, mi
@type logfile: str
@param mirrors: list of mirrors to use as fallback
@type logfile: list
- '''
+ """
user_agent = 'GStreamerCerbero/' + CERBERO_VERSION
if not overwrite and os.path.exists(dest):
if logfile is None:
- logging.info("File %s already downloaded." % dest)
+ logging.info('File %s already downloaded.' % dest)
return
else:
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
- m.log("Downloading {}".format(url), logfile)
+ m.log('Downloading {}'.format(url), logfile)
urls = [url]
if mirrors is not None:
@@ -402,19 +417,34 @@ async def download(url, dest, check_cert=True, overwrite=False, logfile=None, mi
urls += [urllib.parse.urljoin(u + '/', filename) for u in mirrors]
if sys.platform.startswith('win'):
- cmd = ['powershell', '-Command', 'Set-Variable -Name ' \
- 'ProgressPreference -Value \'SilentlyContinue\'; ' \
- f'Invoke-WebRequest -UserAgent {user_agent} -OutFile {dest} ' \
- '-Method Get -Uri %s']
+ cmd = [
+ 'powershell',
+ '-Command',
+ 'Set-Variable -Name '
+ "ProgressPreference -Value 'SilentlyContinue'; "
+ f'Invoke-WebRequest -UserAgent {user_agent} -OutFile {dest} '
+ '-Method Get -Uri %s',
+ ]
elif shutil.which('wget'):
- cmd = ['wget', '--user-agent', user_agent, '--tries=2', '--timeout=20',
- '--progress=dot:giga', '-O', dest]
+ cmd = ['wget', '--user-agent', user_agent, '--tries=2', '--timeout=20', '--progress=dot:giga', '-O', dest]
if not check_cert:
cmd += ['--no-check-certificate']
cmd += ['%s']
elif shutil.which('curl'):
- cmd = ['curl', '-L', '--fail', '--user-agent', user_agent, '--retry', '2',
- '--connect-timeout', '20', '--progress-bar', '-o', dest]
+ cmd = [
+ 'curl',
+ '-L',
+ '--fail',
+ '--user-agent',
+ user_agent,
+ '--retry',
+ '2',
+ '--connect-timeout',
+ '20',
+ '--progress-bar',
+ '-o',
+ dest,
+ ]
if not check_cert:
cmd += ['-k']
cmd += ['%s']
@@ -428,8 +458,7 @@ async def download(url, dest, check_cert=True, overwrite=False, logfile=None, mi
tries = 2
while tries > 0:
try:
- return await async_call(cmd + [url_fmt % murl], cpu_bound=False,
- logfile=logfile)
+ return await async_call(cmd + [url_fmt % murl], cpu_bound=False, logfile=logfile)
except Exception as ex:
if os.path.exists(dest):
os.remove(dest)
@@ -445,10 +474,11 @@ def _splitter(string, base_url):
lines = string.split('\n')
for line in lines:
try:
- yield "%s/%s" % (base_url, line.split(' ')[2])
+ yield '%s/%s' % (base_url, line.split(' ')[2])
except:
continue
+
def ls_files(files, prefix):
if not files:
return []
@@ -458,6 +488,7 @@ def ls_files(files, prefix):
sfiles.update([i.relative_to(prefix).as_posix() for i in prefix.glob(f)])
return list(tuple(sfiles))
+
def ls_dir(dirpath, prefix):
files = []
for root, dirnames, filenames in os.walk(dirpath):
@@ -475,7 +506,7 @@ def find_newer_files(prefix, compfile):
def replace(filepath, replacements):
- ''' Replaces keys in the 'replacements' dict with their values in file '''
+ """Replaces keys in the 'replacements' dict with their values in file"""
with open(filepath, 'r') as f:
content = f.read()
for k, v in replacements.items():
@@ -489,9 +520,9 @@ def find_files(pattern, prefix):
def prompt(message, options=[]):
- ''' Prompts the user for input with the message and options '''
+ """Prompts the user for input with the message and options"""
if len(options) != 0:
- message = "%s [%s] " % (message, '/'.join(options))
+ message = '%s [%s] ' % (message, '/'.join(options))
res = input(message)
while res not in [str(x) for x in options]:
res = input(message)
@@ -499,10 +530,10 @@ def prompt(message, options=[]):
def prompt_multiple(message, options):
- ''' Prompts the user for input with using a list of string options'''
+ """Prompts the user for input with using a list of string options"""
output = message + '\n'
for i in range(len(options)):
- output += "[%s] %s\n" % (i, options[i])
+ output += '[%s] %s\n' % (i, options[i])
res = input(output)
while res not in [str(x) for x in range(len(options))]:
res = input(output)
@@ -534,21 +565,21 @@ def touch(path, create_if_not_exists=False, offset=0):
def file_hash(path):
- '''
+ """
Get the file md5 hash
- '''
+ """
return hashlib.md5(open(path, 'rb').read()).digest()
def files_checksum(paths):
- '''
+ """
Get the md5 checksum of the files
@paths: list of paths
@type: list
@return: the md5 checksum
@rtype: str
- '''
+ """
m = hashlib.md5()
for f in paths:
m.update(open(f, 'rb').read())
@@ -556,10 +587,10 @@ def files_checksum(paths):
def enter_build_environment(platform, arch, distro, sourcedir=None, bash_completions=None, env=None):
- '''
+ """
Enters to a new shell with the build environment
- '''
- SHELLRC = '''
+ """
+ SHELLRC = """
if [ -e ~/{rc_file} ]; then
source ~/{rc_file}
fi
@@ -574,13 +605,13 @@ BASH_COMPLETION_PATH="$CERBERO_PREFIX/share/bash-completion/completions"
for f in $BASH_COMPLETION_SCRIPTS; do
[ -f "$BASH_COMPLETION_PATH/$f" ] && . "$BASH_COMPLETION_PATH/$f"
done
-'''
- MSYSBAT = '''
+"""
+ MSYSBAT = """
C:\\MinGW\\msys\\1.0\\bin\\bash.exe --rcfile %s
-'''
- MSYS2BAT = '''
+"""
+ MSYS2BAT = """
C:\\msys64\\msys2_shell.cmd -ucrt64 -defterm -no-start -here -use-full-path -c 'bash --rcfile %s'
-'''
+"""
if sourcedir:
sourcedirsh = 'cd ' + sourcedir
else:
@@ -597,15 +628,18 @@ C:\\msys64\\msys2_shell.cmd -ucrt64 -defterm -no-start -here -use-full-path -c '
rc_opt = '--rcs'
prompt = os.environ.get('PROMPT', '')
prompt = 'PROMPT="%{{$fg[green]%}}[cerbero-{platform}-{arch}]%{{$reset_color%}} $PROMPT"'.format(
- platform=platform, arch=arch)
+ platform=platform, arch=arch
+ )
else:
rc_file = '.bashrc'
rc_opt = '--rcfile'
prompt = os.environ.get('PS1', '')
prompt = r'PS1="\[\033[01;32m\][cerbero-{platform}-{arch}]\[\033[00m\] $PS1"'.format(
- platform=platform, arch=arch)
- shellrc = SHELLRC.format(rc_file=rc_file, sourcedirsh=sourcedirsh,
- prompt=prompt, bash_completions=bash_completions, path=env['PATH'])
+ platform=platform, arch=arch
+ )
+ shellrc = SHELLRC.format(
+ rc_file=rc_file, sourcedirsh=sourcedirsh, prompt=prompt, bash_completions=bash_completions, path=env['PATH']
+ )
if PLATFORM == Platform.WINDOWS:
if distro == Distro.MSYS:
@@ -613,8 +647,8 @@ C:\\msys64\\msys2_shell.cmd -ucrt64 -defterm -no-start -here -use-full-path -c '
else:
bat_tpl = MSYS2BAT
msysbatdir = tempfile.mkdtemp()
- msysbat = os.path.join(msysbatdir, "msys.bat")
- bashrc = os.path.join(msysbatdir, "bash.rc")
+ msysbat = os.path.join(msysbatdir, 'msys.bat')
+ bashrc = os.path.join(msysbatdir, 'bash.rc')
with open(msysbat, 'w+') as f:
f.write(bat_tpl % bashrc)
with open(bashrc, 'w+') as f:
@@ -628,24 +662,24 @@ C:\\msys64\\msys2_shell.cmd -ucrt64 -defterm -no-start -here -use-full-path -c '
rc_tmp.write(shellrc)
rc_tmp.flush()
if 'zsh' in shell:
- env["ZDOTDIR"] = tmp.name
+ env['ZDOTDIR'] = tmp.name
os.execlpe(shell, shell, env)
else:
# Check if the shell supports passing the rcfile
if os.system("%s %s %s -c echo 'test' > /dev/null 2>&1" % (shell, rc_opt, rc_tmp.name)) == 0:
os.execlpe(shell, shell, rc_opt, rc_tmp.name, env)
else:
- env["CERBERO_ENV"] = "[cerbero-%s-%s]" % (platform, arch)
+ env['CERBERO_ENV'] = '[cerbero-%s-%s]' % (platform, arch)
os.execlpe(shell, shell, env)
def get_tar_cmd():
- '''
+ """
Returns the tar command to use
@return: the tar command
@rtype: str
- '''
+ """
# Use bsdtar with MSYS2 since tar hangs
# https://github.com/msys2/MSYS2-packages/issues/1548
if DISTRO == Distro.MSYS2:
@@ -689,13 +723,14 @@ def check_tool_version(tool_name, needed, env, version_arg=None):
return tool, found, newer
+
def windows_proof_rename(from_name, to_name):
- '''
+ """
On Windows, if you try to rename a file or a directory that you've newly
created, an anti-virus may be holding a lock on it, and renaming it will
yield a PermissionError. In this case, the only thing we can do is try and
try again.
- '''
+ """
delays = [0.1, 0.1, 0.2, 0.2, 0.2, 0.5, 0.5, 1, 1, 1, 1, 2]
if PLATFORM == Platform.WINDOWS:
for d in delays:
@@ -708,6 +743,7 @@ def windows_proof_rename(from_name, to_name):
# Try one last time and throw an error if it fails again
os.rename(from_name, to_name)
+
def symlink(src, dst, working_dir=None):
prev_wd = os.getcwd()
if working_dir:
@@ -727,6 +763,7 @@ def symlink(src, dst, working_dir=None):
finally:
os.chdir(prev_wd)
+
class BuildStatusPrinter:
def __init__(self, steps, interactive):
self.steps = steps[:]
@@ -749,23 +786,23 @@ class BuildStatusPrinter:
def built(self, count, recipe_name):
self.count += 1
if self.interactive:
- m.build_recipe_done(self.count, self.total, recipe_name, _("built"))
+ m.build_recipe_done(self.count, self.total, recipe_name, _('built'))
self.remove_recipe(recipe_name)
def already_built(self, count, recipe_name):
self.count += 1
if self.interactive:
- m.build_recipe_done(self.count, self.total, recipe_name, _("already built"))
+ m.build_recipe_done(self.count, self.total, recipe_name, _('already built'))
else:
- m.build_recipe_done(count, self.total, recipe_name, _("already built"))
+ m.build_recipe_done(count, self.total, recipe_name, _('already built'))
self.output_status_line()
- def _get_completion_percent (self):
- one_recipe = 100. / float (self.total)
- one_step = one_recipe / len (self.steps)
+ def _get_completion_percent(self):
+ one_recipe = 100.0 / float(self.total)
+ one_step = one_recipe / len(self.steps)
completed = float(self.count) * one_recipe
- for i, step in enumerate (self.steps):
- completed += len(self.step_to_recipe[step]) * (i+1) * one_step
+ for i, step in enumerate(self.steps):
+ completed += len(self.step_to_recipe[step]) * (i + 1) * one_step
return int(completed)
def update_recipe_step(self, count, recipe_name, step):
@@ -778,11 +815,11 @@ class BuildStatusPrinter:
self.output_status_line()
def generate_status_line(self):
- s = "[(" + str(self.count) + "/" + str(self.total) + " @ " + str(self._get_completion_percent()) + "%)"
+ s = '[(' + str(self.count) + '/' + str(self.total) + ' @ ' + str(self._get_completion_percent()) + '%)'
for step in self.steps:
if self.step_to_recipe[step]:
- s += " " + str(step).upper() + ": " + ", ".join(self.step_to_recipe[step])
- s += "]"
+ s += ' ' + str(step).upper() + ': ' + ', '.join(self.step_to_recipe[step])
+ s += ']'
return s
def output_status_line(self):
diff --git a/cerbero/utils/svn.py b/cerbero/utils/svn.py
index c4b634c1..7d7ea54a 100644
--- a/cerbero/utils/svn.py
+++ b/cerbero/utils/svn.py
@@ -20,48 +20,50 @@ from cerbero.utils import shell
async def checkout(url, dest):
- '''
+ """
Checkout a url to a given destination
@param url: url to checkout
@type url: string
@param dest: path where to do the checkout
@type url: string
- '''
+ """
await shell.async_call(['svn', 'co', '--non-interactive', '--trust-server-cert', url, dest], cpu_bound=False)
async def update(repo, revision='HEAD'):
- '''
+ """
Update a repositry to a given revision
@param repo: repository path
@type revision: str
@param revision: the revision to checkout
@type revision: str
- '''
- await shell.async_call(['svn', 'up', '--non-interactive', '--trust-server-cert', '-r', revision], repo, cpu_bound=False)
+ """
+ await shell.async_call(
+ ['svn', 'up', '--non-interactive', '--trust-server-cert', '-r', revision], repo, cpu_bound=False
+ )
def checkout_file(url, out_path):
- '''
+ """
Checkout a single file to out_path
@param url: file URL
@type url: str
@param out_path: output path
@type revision: str
- '''
+ """
shell.new_call(['svn', 'export', '--force', url, out_path])
def revision(repo):
- '''
+ """
Get the current revision of a repository with svnversion
@param repo: the path to the repository
@type repo: str
- '''
+ """
rev = shell.check_output(['svnversion'], cmd_dir=repo).splitlines()[0]
if rev[-1] == 'M':
rev = rev[:-1]
diff --git a/packages/custom.py b/packages/custom.py
index e775559e..5b47bd90 100644
--- a/packages/custom.py
+++ b/packages/custom.py
@@ -3,9 +3,9 @@
from cerbero.packages import package
from cerbero.enums import License
-class GStreamer:
- url = "http://gstreamer.freedesktop.org"
+class GStreamer:
+ url = 'http://gstreamer.freedesktop.org'
version = '1.23.0.1'
vendor = 'GStreamer Project'
licenses = [License.LGPLv2Plus]
diff --git a/recipes/custom.py b/recipes/custom.py
index 99c51e55..80229097 100644
--- a/recipes/custom.py
+++ b/recipes/custom.py
@@ -7,6 +7,7 @@ from cerbero.build.source import SourceType
from cerbero.build.cookbook import CookBook
from cerbero.enums import License, FatalError
+
def running_on_cerbero_ci():
if os.environ.get('CI_PROJECT_NAME', '') != 'cerbero':
return False
@@ -17,6 +18,7 @@ def running_on_cerbero_ci():
return False
return True
+
class GStreamer(recipe.Recipe):
licenses = [License.LGPLv2Plus]
version = '1.23.0.1'
@@ -134,9 +136,16 @@ class GStreamer(recipe.Recipe):
def list_gstreamer_1_0_plugins_by_category(config):
cookbook = CookBook(config)
plugins = defaultdict(list)
- recipes = ['gstreamer-1.0', 'gst-plugins-base-1.0', 'gst-plugins-good-1.0',
- 'gst-plugins-bad-1.0', 'gst-plugins-ugly-1.0', 'libnice',
- 'gst-libav-1.0', 'gst-editing-services-1.0', 'gst-rtsp-server-1.0'
+ recipes = [
+ 'gstreamer-1.0',
+ 'gst-plugins-base-1.0',
+ 'gst-plugins-good-1.0',
+ 'gst-plugins-bad-1.0',
+ 'gst-plugins-ugly-1.0',
+ 'libnice',
+ 'gst-libav-1.0',
+ 'gst-editing-services-1.0',
+ 'gst-rtsp-server-1.0',
]
if config.variants.rust:
recipes.append('gst-plugins-rs')
@@ -144,10 +153,10 @@ def list_gstreamer_1_0_plugins_by_category(config):
r = cookbook.get_recipe(r)
for attr_name in dir(r):
if attr_name.startswith('files_plugins_') and attr_name.endswith('devel'):
- cat_name = attr_name[len('files_plugins_'):-len('_devel')]
+ cat_name = attr_name[len('files_plugins_') : -len('_devel')]
plugins_list = getattr(r, attr_name)
elif attr_name.startswith('platform_files_plugins_') and attr_name.endswith('devel'):
- cat_name = attr_name[len('platform_files_plugins_'):-len('_devel')]
+ cat_name = attr_name[len('platform_files_plugins_') : -len('_devel')]
plugins_dict = getattr(r, attr_name)
plugins_list = plugins_dict.get(config.target_platform, [])
else:
diff --git a/setup.py b/setup.py
index 8bb1863c..94dc2344 100644
--- a/setup.py
+++ b/setup.py
@@ -45,26 +45,25 @@ def datafiles(prefix):
files.append((os.path.join(datadir, dirpath), [f]))
return files
+
# Intercept packages and recipes
-packages = [x[len('--package='):] for x in sys.argv
- if x.startswith('--package=')]
-recipes = [x[len('--recipe='):] for x in sys.argv if x.startswith('--recipe=')]
+packages = [x[len('--package=') :] for x in sys.argv if x.startswith('--package=')]
+recipes = [x[len('--recipe=') :] for x in sys.argv if x.startswith('--recipe=')]
if len(packages) == 0:
packages = None
if len(recipes) == 0:
recipes = None
-sys.argv = [x for x in sys.argv if not x.startswith('--package=') and
- not x.startswith('--recipe=')]
+sys.argv = [x for x in sys.argv if not x.startswith('--package=') and not x.startswith('--recipe=')]
-#Fill manifest
+# Fill manifest
shutil.copy('MANIFEST.in.in', 'MANIFEST.in')
with open('MANIFEST.in', 'a+') as f:
for dirname in ['data', 'config', 'tools']:
f.write('\n'.join(['include %s' % x for x in parse_dir(dirname)]))
f.write('\n')
- for (dirname, suffix) in [('packages', '.package'), ('recipes', '.recipe')]:
+ for dirname, suffix in [('packages', '.package'), ('recipes', '.recipe')]:
filenames = parse_dir(dirname)
requested = globals()[dirname]
if requested:
@@ -75,19 +74,19 @@ with open('MANIFEST.in', 'a+') as f:
requested_dir = requested + ['gstreamer-1.0']
else:
requested_dir = requested + ['build-tools', 'toolchain']
- requested_directories = tuple(os.path.join(dirname, x, "")
- for x in requested_dir)
-
- filenames = [p for p in filenames
- if p.startswith(requested_directories) or
- p.endswith(requested_filenames) or
- p.endswith('.py')]
-
- missing_files = [p for p in requested_filenames if
- not [True for m in filenames if m.endswith(p)]]
- assert not missing_files, \
- "Not all %s from the command line (%s) exist" % \
- (dirname, ", ".join(missing_files))
+ requested_directories = tuple(os.path.join(dirname, x, '') for x in requested_dir)
+
+ filenames = [
+ p
+ for p in filenames
+ if p.startswith(requested_directories) or p.endswith(requested_filenames) or p.endswith('.py')
+ ]
+
+ missing_files = [p for p in requested_filenames if not [True for m in filenames if m.endswith(p)]]
+ assert not missing_files, 'Not all %s from the command line (%s) exist' % (
+ dirname,
+ ', '.join(missing_files),
+ )
f.write('\n'.join(['include %s' % x for x in filenames]))
f.write('\n')
@@ -99,14 +98,12 @@ if len(prefix) == 1:
else:
prefix = '/usr/local'
+
class extended_sdist(setuptools_sdist.sdist):
user_options = setuptools_sdist.sdist.user_options + [
- ('source-dirs=', None,
- "Comma-separated list of source directories to add to the package"),
- ('package=', None,
- "Specific package to include, other packages are not included"),
- ('recipe=', None,
- "Specific recipe to include, other recipes are not included"),
+ ('source-dirs=', None, 'Comma-separated list of source directories to add to the package'),
+ ('package=', None, 'Specific package to include, other packages are not included'),
+ ('recipe=', None, 'Specific recipe to include, other recipes are not included'),
]
def initialize_options(self):
@@ -122,30 +119,28 @@ class extended_sdist(setuptools_sdist.sdist):
for d in self.source_dirs:
src = d.rstrip().rstrip(os.sep)
dest = os.path.join(base_dir, 'sources', os.path.basename(src))
- distutils.log.info("Copying %s -> %s", src, dest)
- copy_tree(src, dest, update=not self.force, verbose=0,
- dry_run=self.dry_run)
+ distutils.log.info('Copying %s -> %s', src, dest)
+ copy_tree(src, dest, update=not self.force, verbose=0, dry_run=self.dry_run)
+
setup(
- name = "cerbero",
- version = CERBERO_VERSION,
- author = "Andoni Morales",
- author_email = "amorales@fluendo.com",
- description = ("Multi platform build system for Open Source projects"),
- license = "LGPL",
- url = "http://gstreamer.freedesktop.org/",
- packages = find_packages(exclude=['tests']),
+ name='cerbero',
+ version=CERBERO_VERSION,
+ author='Andoni Morales',
+ author_email='amorales@fluendo.com',
+ description=('Multi platform build system for Open Source projects'),
+ license='LGPL',
+ url='http://gstreamer.freedesktop.org/',
+ packages=find_packages(exclude=['tests']),
long_description=read('README.md'),
- zip_safe = False,
+ zip_safe=False,
include_package_data=True,
- data_files = datafiles(prefix),
- entry_points = """
+ data_files=datafiles(prefix),
+ entry_points="""
[console_scripts]
cerbero = cerbero.main:main""",
classifiers=[
- "License :: OSI Approved :: LGPL License",
+ 'License :: OSI Approved :: LGPL License',
],
- cmdclass = {
- 'sdist' : extended_sdist
- }
+ cmdclass={'sdist': extended_sdist},
)
diff --git a/test/test_build_common.py b/test/test_build_common.py
index 2ba8a1cd..225578c7 100644
--- a/test/test_build_common.py
+++ b/test/test_build_common.py
@@ -25,29 +25,21 @@ from cerbero.utils import shell
class Recipe1(recipe.Recipe):
-
name = 'recipe1'
licence = 'LGPL'
uuid = '1'
files_misc = ['README', 'libexec/gstreamer-0.10/pluginsloader%(bext)s']
- platform_files_misc = {
- Platform.WINDOWS: ['windows'],
- Platform.LINUX: ['linux']}
+ platform_files_misc = {Platform.WINDOWS: ['windows'], Platform.LINUX: ['linux']}
files_bins = ['gst-launch']
- platform_files_bins = {
- Platform.WINDOWS: ['windows'],
- Platform.LINUX: ['linux']}
+ platform_files_bins = {Platform.WINDOWS: ['windows'], Platform.LINUX: ['linux']}
files_libs = ['libgstreamer-0.10']
- platform_files_libs = {
- Platform.WINDOWS: ['libgstreamer-win32'],
- Platform.LINUX: ['libgstreamer-x11']}
+ platform_files_libs = {Platform.WINDOWS: ['libgstreamer-win32'], Platform.LINUX: ['libgstreamer-x11']}
class Recipe2(recipe.Recipe):
-
name = 'recipe2'
licence = 'GPL'
@@ -55,7 +47,6 @@ class Recipe2(recipe.Recipe):
class Recipe3(recipe.Recipe):
-
name = 'recipe3'
licences = 'BSD'
@@ -63,7 +54,6 @@ class Recipe3(recipe.Recipe):
class Recipe4(recipe.Recipe):
-
name = 'recipe4'
licence = 'LGPL'
@@ -71,7 +61,6 @@ class Recipe4(recipe.Recipe):
class Recipe5(recipe.Recipe):
-
name = 'recipe5'
licence = 'LGPL'
@@ -85,7 +74,8 @@ def add_files(tmp):
os.makedirs(bindir)
os.makedirs(libdir)
os.makedirs(gstlibdir)
- shell.call('touch '
+ shell.call(
+ 'touch '
'windows '
'linux '
'README '
@@ -126,7 +116,8 @@ def add_files(tmp):
'lib/libgstreamer-x11.la '
'libexec/gstreamer-0.10/pluginsloader '
'libexec/gstreamer-0.10/pluginsloader.exe ',
- tmp)
+ tmp,
+ )
def create_cookbook(config):
diff --git a/test/test_cerbero_build_build.py b/test/test_cerbero_build_build.py
index 941addf0..02c917d6 100644
--- a/test/test_cerbero_build_build.py
+++ b/test/test_cerbero_build_build.py
@@ -24,7 +24,6 @@ from cerbero.build import build
class MakefilesBase(build.MakefilesBase):
-
srcdir = ''
build_dir = ''
@@ -44,7 +43,6 @@ class MakefilesBase(build.MakefilesBase):
class ModifyEnvTest(unittest.TestCase):
-
def setUp(self):
self.var = 'TEST_VAR'
self.val1 = 'test'
@@ -55,7 +53,7 @@ class ModifyEnvTest(unittest.TestCase):
os.environ[self.var] = self.val1
self.mk.append_env = {self.var: self.val2}
val = self.mk.get_env_var(self.var)
- self.assertEqual(val, "%s %s" % (self.val1, self.val2))
+ self.assertEqual(val, '%s %s' % (self.val1, self.val2))
def testAppendNonExistentEnv(self):
if self.var in os.environ:
@@ -83,15 +81,14 @@ class ModifyEnvTest(unittest.TestCase):
self.mk.config.allow_system_libs = True
self.mk.use_system_libs = True
val = self.mk.get_env_var('PKG_CONFIG_PATH')
- self.assertEqual(val,'/path/2:/usr/lib/pkgconfig:'
- '/usr/share/pkgconfig:/usr/lib/i386-linux-gnu/pkgconfig')
+ self.assertEqual(val, '/path/2:/usr/lib/pkgconfig:' '/usr/share/pkgconfig:/usr/lib/i386-linux-gnu/pkgconfig')
val = self.mk.get_env_var('PKG_CONFIG_LIBDIR')
- self.assertEqual(val,'/path/2')
+ self.assertEqual(val, '/path/2')
def testNestedModif(self):
os.environ[self.var] = self.val1
self.mk.append_env = {self.var: self.val2}
val = self.mk.get_env_var(self.var)
- self.assertEqual(val, "%s %s" % (self.val1, self.val2))
+ self.assertEqual(val, '%s %s' % (self.val1, self.val2))
val = self.mk.get_env_var_nested(self.var)
- self.assertEqual(val, "%s %s" % (self.val1, self.val2))
+ self.assertEqual(val, '%s %s' % (self.val1, self.val2))
diff --git a/test/test_cerbero_build_cookbook.py b/test/test_cerbero_build_cookbook.py
index 6fe6d601..d4f18230 100644
--- a/test/test_cerbero_build_cookbook.py
+++ b/test/test_cerbero_build_cookbook.py
@@ -27,7 +27,6 @@ from test.test_build_common import Recipe1
class PackageTest(unittest.TestCase):
-
def setUp(self):
self.config = Config()
self.config.cache_file = '/dev/null'
@@ -65,8 +64,7 @@ class PackageTest(unittest.TestCase):
def testAddGetRecipe(self):
recipe = Recipe1(self.config)
- self.assertRaises(RecipeNotFoundError, self.cookbook.get_recipe,
- recipe.name)
+ self.assertRaises(RecipeNotFoundError, self.cookbook.get_recipe, recipe.name)
self.cookbook.add_recipe(recipe)
self.assertEqual(recipe, self.cookbook.recipes[recipe.name])
self.assertEqual(recipe, self.cookbook.get_recipe(recipe.name))
diff --git a/test/test_cerbero_build_filesprovider.py b/test/test_cerbero_build_filesprovider.py
index fdd36d5a..e7261927 100644
--- a/test/test_cerbero_build_filesprovider.py
+++ b/test/test_cerbero_build_filesprovider.py
@@ -27,29 +27,22 @@ from test.test_common import DummyConfig
class Config(DummyConfig):
-
def __init__(self, tmp, platform):
self.prefix = tmp
self.target_platform = platform
class FilesProvider(filesprovider.FilesProvider):
-
files_misc = ['README', 'libexec/gstreamer-0.10/pluginsloader%(bext)s']
files_libs = ['libgstreamer-0.10']
files_bins = ['gst-launch']
files_devel = ['include/gstreamer.h']
licenses_devel = [License.LGPL]
- platform_files_bins = {
- Platform.WINDOWS: ['windows'],
- Platform.LINUX: ['linux']}
- platform_files_libs = {
- Platform.WINDOWS: ['libgstreamer-win32'],
- Platform.LINUX: ['libgstreamer-x11']}
+ platform_files_bins = {Platform.WINDOWS: ['windows'], Platform.LINUX: ['linux']}
+ platform_files_libs = {Platform.WINDOWS: ['libgstreamer-win32'], Platform.LINUX: ['libgstreamer-x11']}
class PackageTest(unittest.TestCase):
-
def setUp(self):
self.tmp = tempfile.mkdtemp()
win32config = Config(self.tmp, Platform.WINDOWS)
@@ -63,44 +56,44 @@ class PackageTest(unittest.TestCase):
self.linuxlib = ['lib/libgstreamer-0.10.so.1', 'lib/libgstreamer-x11.so.1']
self.winmisc = ['README', 'libexec/gstreamer-0.10/pluginsloader.exe']
self.linuxmisc = ['README', 'libexec/gstreamer-0.10/pluginsloader']
- devfiles = ['include/gstreamer.h', 'lib/libgstreamer-0.10.a',
- 'lib/libgstreamer-0.10.la']
-
- self.windevfiles = devfiles + ['lib/libgstreamer-win32.a',
- 'lib/libgstreamer-win32.la', 'lib/libgstreamer-win32.dll.a',
- 'lib/libgstreamer-win32.def', 'lib/gstreamer-win32.lib',
- 'lib/libgstreamer-0.10.dll.a', 'lib/libgstreamer-0.10.def',
- 'lib/gstreamer-0.10.lib']
- self.lindevfiles = devfiles + ['lib/libgstreamer-0.10.so',
- 'lib/libgstreamer-x11.a', 'lib/libgstreamer-x11.la',
- 'lib/libgstreamer-x11.so']
+ devfiles = ['include/gstreamer.h', 'lib/libgstreamer-0.10.a', 'lib/libgstreamer-0.10.la']
+
+ self.windevfiles = devfiles + [
+ 'lib/libgstreamer-win32.a',
+ 'lib/libgstreamer-win32.la',
+ 'lib/libgstreamer-win32.dll.a',
+ 'lib/libgstreamer-win32.def',
+ 'lib/gstreamer-win32.lib',
+ 'lib/libgstreamer-0.10.dll.a',
+ 'lib/libgstreamer-0.10.def',
+ 'lib/gstreamer-0.10.lib',
+ ]
+ self.lindevfiles = devfiles + [
+ 'lib/libgstreamer-0.10.so',
+ 'lib/libgstreamer-x11.a',
+ 'lib/libgstreamer-x11.la',
+ 'lib/libgstreamer-x11.so',
+ ]
def tearDown(self):
shutil.rmtree(self.tmp)
def testFilesCategories(self):
- self.assertEqual(sorted(['bins', 'libs', 'misc', 'devel']),
- self.win32recipe._files_categories())
+ self.assertEqual(sorted(['bins', 'libs', 'misc', 'devel']), self.win32recipe._files_categories())
def testListBinaries(self):
- self.assertEqual(self.win32recipe.files_list_by_category('bins'),
- sorted(self.winbin))
- self.assertEqual(self.linuxrecipe.files_list_by_category('bins'),
- sorted(self.linuxbin))
+ self.assertEqual(self.win32recipe.files_list_by_category('bins'), sorted(self.winbin))
+ self.assertEqual(self.linuxrecipe.files_list_by_category('bins'), sorted(self.linuxbin))
def testListLibraries(self):
add_files(self.tmp)
- self.assertEqual(self.win32recipe.files_list_by_category('libs'),
- sorted(self.winlib))
- self.assertEqual(self.linuxrecipe.files_list_by_category('libs'),
- sorted(self.linuxlib))
+ self.assertEqual(self.win32recipe.files_list_by_category('libs'), sorted(self.winlib))
+ self.assertEqual(self.linuxrecipe.files_list_by_category('libs'), sorted(self.linuxlib))
def testDevelFiles(self):
add_files(self.tmp)
- self.assertEqual(self.win32recipe.devel_files_list(),
- sorted(self.windevfiles))
- self.assertEqual(self.linuxrecipe.devel_files_list(),
- sorted(self.lindevfiles))
+ self.assertEqual(self.win32recipe.devel_files_list(), sorted(self.windevfiles))
+ self.assertEqual(self.linuxrecipe.devel_files_list(), sorted(self.lindevfiles))
def testDistFiles(self):
win32files = self.winlib + self.winbin + self.winmisc
diff --git a/test/test_cerbero_build_recipe.py b/test/test_cerbero_build_recipe.py
index 2b2a643c..03194d59 100644
--- a/test/test_cerbero_build_recipe.py
+++ b/test/test_cerbero_build_recipe.py
@@ -27,7 +27,6 @@ from test.test_build_common import Recipe1
class Class1(object):
-
test = None
def __init__(self):
@@ -38,13 +37,11 @@ class Class1(object):
class Class2(object):
-
class2_method = lambda x: None
fetch = lambda x: 'CODEPASS'
class Recipe(recipe.Recipe):
-
btype = Class1
stype = Class2
@@ -63,13 +60,11 @@ class Recipe(recipe.Recipe):
class Class3(object, metaclass=recipe.MetaUniversalRecipe):
-
def _do_step(self, name):
return name
class TestReceiptMetaClass(unittest.TestCase):
-
def setUp(self):
self.config = DummyConfig()
self.config.local_sources = ''
@@ -99,7 +94,6 @@ class TestReceiptMetaClass(unittest.TestCase):
class TestReceipt(unittest.TestCase):
-
def setUp(self):
self.config = DummyConfig()
self.config.local_sources = 'path1'
@@ -131,7 +125,6 @@ class TestReceipt(unittest.TestCase):
class TestLicenses(unittest.TestCase):
-
def setUp(self):
self.config = DummyConfig()
self.config.local_sources = ''
@@ -156,7 +149,6 @@ class TestLicenses(unittest.TestCase):
class TestMetaUniveralRecipe(unittest.TestCase):
-
def testBuildSteps(self):
obj = Class3()
for _, step in recipe.BuildSteps():
@@ -166,7 +158,6 @@ class TestMetaUniveralRecipe(unittest.TestCase):
class TestUniversalRecipe(unittest.TestCase):
-
def setUp(self):
self.config = DummyConfig()
self.config.target_platform = Platform.LINUX
@@ -194,10 +185,8 @@ class TestUniversalRecipe(unittest.TestCase):
def testAddRecipe(self):
self.recipe.add_recipe(self.recipe_x86)
- self.assertEqual(self.recipe._recipes[Architecture.X86],
- self.recipe_x86)
- self.assertEqual(self.recipe._proxy_recipe,
- self.recipe_x86)
+ self.assertEqual(self.recipe._recipes[Architecture.X86], self.recipe_x86)
+ self.assertEqual(self.recipe._proxy_recipe, self.recipe_x86)
def testDifferentRecipe(self):
self.recipe.add_recipe(self.recipe_x86)
@@ -209,5 +198,4 @@ class TestUniversalRecipe(unittest.TestCase):
self.assertEqual(self.recipe.steps, [])
self.recipe.add_recipe(self.recipe_x86)
self.recipe.add_recipe(self.recipe_x86_64)
- self.assertEqual(self.recipe.steps,
- recipe.BuildSteps() + [recipe.BuildSteps.MERGE])
+ self.assertEqual(self.recipe.steps, recipe.BuildSteps() + [recipe.BuildSteps.MERGE])
diff --git a/test/test_cerbero_config.py b/test/test_cerbero_config.py
index f4465f83..ec43b363 100644
--- a/test/test_cerbero_config.py
+++ b/test/test_cerbero_config.py
@@ -25,13 +25,13 @@ from cerbero import config as cconfig
from cerbero.enums import Platform
from cerbero.errors import FatalError, ConfigurationError
from cerbero.utils import system_info
+
Config = cconfig.Config
class LinuxPackagesTest(unittest.TestCase):
-
def setUp(self):
- os.environ[cconfig.CERBERO_UNINSTALLED]='1'
+ os.environ[cconfig.CERBERO_UNINSTALLED] = '1'
def _checkLoadConfig(self, config, func, filename, properties):
with open(filename, 'w+') as f:
@@ -53,45 +53,44 @@ class LinuxPackagesTest(unittest.TestCase):
data_dir = os.path.join(os.path.dirname(__file__), '..', 'data')
data_dir = os.path.abspath(data_dir)
props = {
- 'platform': platform,
- 'target_platform': platform,
- 'distro': distro,
- 'distro_version': distro_version,
- 'target_distro': distro,
- 'target_distro_version': distro_version,
- 'arch': arch,
- 'target_arch': arch,
- 'num_of_cpus': num_of_cpus,
- 'host': None,
- 'build': None,
- 'target': None,
- 'prefix': None,
- 'sources': None,
- 'local_sources': None,
- 'min_osx_sdk_version': None,
- 'lib_suffix': '',
- 'cache_file': None,
- 'toolchain_prefix': None,
- 'install_dir': None,
- 'packages_prefix': None,
- 'data_dir': data_dir,
- 'environ_dir': config._relative_path('config'),
- 'recipes_dir': config._relative_path('recipes'),
- 'packages_dir': config._relative_path('packages'),
- 'git_root': cconfig.DEFAULT_GIT_ROOT,
- 'wix_prefix': cconfig.DEFAULT_WIX_PREFIX,
- 'packager': cconfig.DEFAULT_PACKAGER,
- 'py_prefix': 'lib/python%s.%s' % (sys.version_info[0],
- sys.version_info[1]),
- 'allow_parallel_build': cconfig.DEFAULT_ALLOW_PARALLEL_BUILD,
- 'use_configure_cache': False,
- 'allow_system_libs': True,
- 'external_packages': {},
- 'external_recipes': {},
- 'use_ccache': None,
- 'force_git_commit': None,
- 'universal_archs': [cconfig.Architecture.X86, cconfig.Architecture.X86_64],
- }
+ 'platform': platform,
+ 'target_platform': platform,
+ 'distro': distro,
+ 'distro_version': distro_version,
+ 'target_distro': distro,
+ 'target_distro_version': distro_version,
+ 'arch': arch,
+ 'target_arch': arch,
+ 'num_of_cpus': num_of_cpus,
+ 'host': None,
+ 'build': None,
+ 'target': None,
+ 'prefix': None,
+ 'sources': None,
+ 'local_sources': None,
+ 'min_osx_sdk_version': None,
+ 'lib_suffix': '',
+ 'cache_file': None,
+ 'toolchain_prefix': None,
+ 'install_dir': None,
+ 'packages_prefix': None,
+ 'data_dir': data_dir,
+ 'environ_dir': config._relative_path('config'),
+ 'recipes_dir': config._relative_path('recipes'),
+ 'packages_dir': config._relative_path('packages'),
+ 'git_root': cconfig.DEFAULT_GIT_ROOT,
+ 'wix_prefix': cconfig.DEFAULT_WIX_PREFIX,
+ 'packager': cconfig.DEFAULT_PACKAGER,
+ 'py_prefix': 'lib/python%s.%s' % (sys.version_info[0], sys.version_info[1]),
+ 'allow_parallel_build': cconfig.DEFAULT_ALLOW_PARALLEL_BUILD,
+ 'use_configure_cache': False,
+ 'allow_system_libs': True,
+ 'external_packages': {},
+ 'external_recipes': {},
+ 'use_ccache': None,
+ 'force_git_commit': None,
+ 'universal_archs': [cconfig.Architecture.X86, cconfig.Architecture.X86_64],
+ }
self.assertEqual(sorted(config._properties), sorted(props.keys()))
for p, v in props.items():
self.assertEqual(getattr(config, p), v)
@@ -107,8 +106,7 @@ class LinuxPackagesTest(unittest.TestCase):
self.assertIsNone(getattr(config, p))
config.load_defaults()
- self._checkLoadConfig(config, config._load_main_config,
- tmpconfig.name, config._properties)
+ self._checkLoadConfig(config, config._load_main_config, tmpconfig.name, config._properties)
def testLoadPlatformConfig(self):
config = Config()
@@ -116,11 +114,9 @@ class LinuxPackagesTest(unittest.TestCase):
config.environ_dir = tmpdir
config.load_defaults()
config._load_platform_config()
- platform_config = os.path.join(tmpdir, '%s.config' %
- config.target_platform)
+ platform_config = os.path.join(tmpdir, '%s.config' % config.target_platform)
config.load_defaults()
- self._checkLoadConfig(config, config._load_platform_config,
- platform_config, config._properties)
+ self._checkLoadConfig(config, config._load_platform_config, platform_config, config._properties)
def testFindDataDir(self):
config = Config()
@@ -133,7 +129,7 @@ class LinuxPackagesTest(unittest.TestCase):
del os.environ[cconfig.CERBERO_UNINSTALLED]
config._check_uninstalled()
self.assertFalse(config.uninstalled)
- os.environ[cconfig.CERBERO_UNINSTALLED]='1'
+ os.environ[cconfig.CERBERO_UNINSTALLED] = '1'
config._check_uninstalled()
self.assertTrue(config.uninstalled)
@@ -143,8 +139,7 @@ class LinuxPackagesTest(unittest.TestCase):
config.prefix = tmpdir
config.load_defaults()
config.do_setup_env()
- env = config.get_env(tmpdir, os.path.join(tmpdir, 'lib'),
- config.py_prefix)
+ env = config.get_env(tmpdir, os.path.join(tmpdir, 'lib'), config.py_prefix)
for k, v in env.items():
self.assertEqual(os.environ[k], v)
@@ -167,8 +162,7 @@ class LinuxPackagesTest(unittest.TestCase):
config.filename = None
config._load_cmd_config(None)
self.assertIsNone(config.filename)
- self.assertRaises(ConfigurationError, config._load_cmd_config,
- '/foo/bar')
+ self.assertRaises(ConfigurationError, config._load_cmd_config, '/foo/bar')
tmpfile = tempfile.NamedTemporaryFile()
config._load_cmd_config(tmpfile.name)
self.assertEqual(config.filename, cconfig.DEFAULT_CONFIG_FILE)
@@ -179,27 +173,19 @@ class LinuxPackagesTest(unittest.TestCase):
cerbero_home = os.path.expanduser('~/cerbero')
self.assertEqual(config.prefix, os.path.join(cerbero_home, 'dist'))
self.assertEqual(config.install_dir, config.prefix)
- self.assertEqual(config.sources,
- os.path.join(cerbero_home, 'sources'))
- self.assertEqual(config.local_sources,
- os.path.join(cerbero_home, 'sources', 'local'))
+ self.assertEqual(config.sources, os.path.join(cerbero_home, 'sources'))
+ self.assertEqual(config.local_sources, os.path.join(cerbero_home, 'sources', 'local'))
def testRecipesExternalRepositories(self):
config = Config()
config.recipes_dir = 'test'
- config.external_recipes = {'test1': ('/path/to/repo', 1),
- 'test2': ('/path/to/other/repo', 2)}
- expected = {'default': ('test', 0),
- 'test1': ('/path/to/repo', 1),
- 'test2': ('/path/to/other/repo', 2)}
+ config.external_recipes = {'test1': ('/path/to/repo', 1), 'test2': ('/path/to/other/repo', 2)}
+ expected = {'default': ('test', 0), 'test1': ('/path/to/repo', 1), 'test2': ('/path/to/other/repo', 2)}
self.assertEqual(config.get_recipes_repos(), expected)
def testPakcagesExternalRepositories(self):
config = Config()
config.packages_dir = 'test'
- config.external_packages = {'test1': ('/path/to/repo', 1),
- 'test2': ('/path/to/other/repo', 2)}
- expected = {'default': ('test', 0),
- 'test1': ('/path/to/repo', 1),
- 'test2': ('/path/to/other/repo', 2)}
+ config.external_packages = {'test1': ('/path/to/repo', 1), 'test2': ('/path/to/other/repo', 2)}
+ expected = {'default': ('test', 0), 'test1': ('/path/to/repo', 1), 'test2': ('/path/to/other/repo', 2)}
self.assertEqual(config.get_packages_repos(), expected)
diff --git a/test/test_cerbero_ide_pkgconfig.py b/test/test_cerbero_ide_pkgconfig.py
index 85d31f72..b484789a 100644
--- a/test/test_cerbero_ide_pkgconfig.py
+++ b/test/test_cerbero_ide_pkgconfig.py
@@ -24,7 +24,6 @@ from cerbero.ide.pkgconfig import PkgConfig
class TestPkgConfig(unittest.TestCase):
-
def setUp(self):
pc_path = os.path.join(os.path.dirname(__file__), 'pkgconfig')
os.environ['PKG_CONFIG_LIBDIR'] = pc_path
@@ -33,14 +32,24 @@ class TestPkgConfig(unittest.TestCase):
self.pkgconfig2 = PkgConfig('gstreamer-0.10', False)
def testListAll(self):
- expected = ['gobject-2.0', 'gmodule-2.0', 'libxml-2.0', 'gthread-2.0',
- 'glib-2.0', 'gmodule-no-export-2.0', 'gstreamer-0.10']
+ expected = [
+ 'gobject-2.0',
+ 'gmodule-2.0',
+ 'libxml-2.0',
+ 'gthread-2.0',
+ 'glib-2.0',
+ 'gmodule-no-export-2.0',
+ 'gstreamer-0.10',
+ ]
self.assertEqual(sorted(PkgConfig.list_all()), sorted(expected))
def testIncludeDirs(self):
- expected = ['/usr/include/gstreamer-0.10', '/usr/include/glib-2.0',
- '/usr/lib/glib-2.0/include',
- '/usr/include/libxml2']
+ expected = [
+ '/usr/include/gstreamer-0.10',
+ '/usr/include/glib-2.0',
+ '/usr/lib/glib-2.0/include',
+ '/usr/include/libxml2',
+ ]
self.assertEqual(self.pkgconfig.include_dirs(), expected)
expected = ['/usr/include/gstreamer-0.10']
self.assertEqual(self.pkgconfig2.include_dirs(), expected)
@@ -58,15 +67,13 @@ class TestPkgConfig(unittest.TestCase):
self.assertEqual(self.pkgconfig2.libraries_dirs(), expected)
def testLibraries(self):
- expected = ['gstreamer-0.10', 'gobject-2.0', 'gmodule-2.0', 'xml2',
- 'gthread-2.0', 'rt', 'glib-2.0']
+ expected = ['gstreamer-0.10', 'gobject-2.0', 'gmodule-2.0', 'xml2', 'gthread-2.0', 'rt', 'glib-2.0']
self.assertEqual(self.pkgconfig.libraries(), expected)
expected = ['gstreamer-0.10']
self.assertEqual(self.pkgconfig2.libraries(), expected)
def testRequires(self):
- expected = ['glib-2.0', 'gobject-2.0', 'gmodule-no-export-2.0',
- 'gthread-2.0', 'libxml-2.0']
+ expected = ['glib-2.0', 'gobject-2.0', 'gmodule-no-export-2.0', 'gthread-2.0', 'libxml-2.0']
self.assertEqual(self.pkgconfig.requires(), expected)
self.assertEqual(self.pkgconfig2.requires(), expected)
diff --git a/test/test_cerbero_ide_xcode_xcconfig.py b/test/test_cerbero_ide_xcode_xcconfig.py
index 5ce7d522..e5adf388 100644
--- a/test/test_cerbero_ide_xcode_xcconfig.py
+++ b/test/test_cerbero_ide_xcode_xcconfig.py
@@ -23,7 +23,7 @@ import tempfile
from cerbero.ide.xcode.xcconfig import XCConfig
-XCCONFIG = '''
+XCCONFIG = """
ALWAYS_SEARCH_USER_PATHS = YES
USER_HEADER_SEARCH_PATHS = /usr/include/gstreamer-0.10\
/usr/include/glib-2.0 /usr/lib/glib-2.0/include\
@@ -31,10 +31,10 @@ USER_HEADER_SEARCH_PATHS = /usr/include/gstreamer-0.10\
LIBRARY_SEARCH_PATHS =
OTHER_LDFLAGS = -lgstreamer-0.10 \
-lgobject-2.0 -lgmodule-2.0 -lxml2 -lgthread-2.0 -lrt -lglib-2.0
-'''
+"""
-class TestPkgConfig(unittest.TestCase):
+class TestPkgConfig(unittest.TestCase):
def setUp(self):
pc_path = os.path.join(os.path.dirname(__file__), 'pkgconfig')
os.environ['PKG_CONFIG_LIBDIR'] = pc_path
@@ -42,13 +42,13 @@ class TestPkgConfig(unittest.TestCase):
def testFill(self):
xcconfig = XCConfig('gstreamer-0.10')
- expected = \
- {'libs': ' -lgstreamer-0.10 -lgobject-2.0 -lgmodule-2.0 '
- '-lxml2 -lgthread-2.0 -lrt -lglib-2.0',
- 'hsp': '/usr/include/gstreamer-0.10 /usr/include/glib-2.0 '
- '/usr/lib/glib-2.0/include '
- '/usr/include/libxml2',
- 'lsp': ''}
+ expected = {
+ 'libs': ' -lgstreamer-0.10 -lgobject-2.0 -lgmodule-2.0 ' '-lxml2 -lgthread-2.0 -lrt -lglib-2.0',
+ 'hsp': '/usr/include/gstreamer-0.10 /usr/include/glib-2.0 '
+ '/usr/lib/glib-2.0/include '
+ '/usr/include/libxml2',
+ 'lsp': '',
+ }
self.assertEqual(expected, xcconfig._fill())
def testXCConfig(self):
diff --git a/test/test_cerbero_packages_disttarball.py b/test/test_cerbero_packages_disttarball.py
index 8db2c98d..b7fb1edf 100644
--- a/test/test_cerbero_packages_disttarball.py
+++ b/test/test_cerbero_packages_disttarball.py
@@ -28,12 +28,11 @@ from test.test_build_common import add_files
class DistTarballTest(unittest.TestCase):
-
def setUp(self):
self.config = DummyConfig()
self.tmp = tempfile.mkdtemp()
self.config.prefix = self.tmp
- self.store = create_store(self.config)
+ self.store = create_store(self.config)
self.package = self.store.get_package('gstreamer-runtime')
self.packager = DistTarball(self.config, self.package, self.store)
add_files(self.tmp)
@@ -45,7 +44,7 @@ class DistTarballTest(unittest.TestCase):
# Creates one package with the runtime files
filenames = self.packager.pack(self.tmp, devel=False)
self.assertEqual(len(filenames), 1)
- tar = tarfile.open(filenames[0], "r:bz2")
+ tar = tarfile.open(filenames[0], 'r:bz2')
tarfiles = sorted([x.path for x in tar.getmembers()])
self.assertEqual(tarfiles, self.package.files_list())
@@ -54,10 +53,10 @@ class DistTarballTest(unittest.TestCase):
# devel files
filenames = self.packager.pack(self.tmp, devel=True)
self.assertEqual(len(filenames), 2)
- tar = tarfile.open(filenames[0], "r:bz2")
+ tar = tarfile.open(filenames[0], 'r:bz2')
tarfiles = sorted([x.path for x in tar.getmembers()])
self.assertEqual(tarfiles, self.package.files_list())
- tar = tarfile.open(filenames[1], "r:bz2")
+ tar = tarfile.open(filenames[1], 'r:bz2')
tarfiles = sorted([x.path for x in tar.getmembers()])
self.assertEqual(tarfiles, self.package.devel_files_list())
@@ -65,6 +64,6 @@ class DistTarballTest(unittest.TestCase):
# Creates 1 package, with the runtime files and the devel files
filenames = self.packager.pack(self.tmp, devel=True, split=False)
self.assertEqual(len(filenames), 1)
- tar = tarfile.open(filenames[0], "r:bz2")
+ tar = tarfile.open(filenames[0], 'r:bz2')
tarfiles = sorted([x.path for x in tar.getmembers()])
self.assertEqual(tarfiles, self.package.all_files_list())
diff --git a/test/test_cerbero_packages_linux.py b/test/test_cerbero_packages_linux.py
index 9ac86fca..16118e6d 100644
--- a/test/test_cerbero_packages_linux.py
+++ b/test/test_cerbero_packages_linux.py
@@ -30,16 +30,13 @@ packed = []
class LoggerPackager(linux.LinuxPackager):
-
def pack(self, output_dir, devel, force, keep_temp, pack_deps, tmpdir):
packed.append(self.package.name)
class DummyPackager(linux.LinuxPackager):
-
def build(self, output_dir, tarname, tmpdir, packagedir, srcdir):
- linux.LinuxPackager.build(self, output_dir, tarname, tmpdir,
- packagedir, srcdir)
+ linux.LinuxPackager.build(self, output_dir, tarname, tmpdir, packagedir, srcdir)
return ['test']
def create_tree(self, tmpdir):
@@ -48,9 +45,7 @@ class DummyPackager(linux.LinuxPackager):
class DummyTarballPackager(PackagerBase):
-
- def pack(self, output_dir, devel=True, force=False, split=True,
- package_prefix=''):
+ def pack(self, output_dir, devel=True, force=False, split=True, package_prefix=''):
return ['test']
@@ -58,12 +53,10 @@ linux.DistTarball = DummyTarballPackager
class LinuxPackagesTest(unittest.TestCase):
-
def setUp(self):
self.config = Config()
self.store = create_store(self.config)
- self.packager = linux.LinuxPackager(self.config,
- self.store.get_package('gstreamer-runtime'), self.store)
+ self.packager = linux.LinuxPackager(self.config, self.store.get_package('gstreamer-runtime'), self.store)
def testInit(self):
config = Config()
@@ -80,8 +73,7 @@ class LinuxPackagesTest(unittest.TestCase):
config.packager = 'Pin <pan@p.un>'
packager = linux.LinuxPackager(config, package, None)
self.assertEqual(packager.package_prefix, 'test-')
- self.assertEqual(packager.full_package_name,
- 'test-gstreamer-test1-1.0')
+ self.assertEqual(packager.full_package_name, 'test-gstreamer-test1-1.0')
self.assertEqual(packager.packager, 'Pin <pan@p.un>')
# Test ignore package
@@ -92,10 +84,7 @@ class LinuxPackagesTest(unittest.TestCase):
def testRequires(self):
self.packager._empty_packages = []
- expected = sorted(['gstreamer-test-bindings',
- 'gstreamer-test2',
- 'gstreamer-test3',
- 'gstreamer-test1'])
+ expected = sorted(['gstreamer-test-bindings', 'gstreamer-test2', 'gstreamer-test3', 'gstreamer-test1'])
requires = self.packager.get_requires(PackageType.RUNTIME, '-dev')
self.assertEqual(expected, requires)
@@ -112,17 +101,14 @@ class LinuxPackagesTest(unittest.TestCase):
self.assertEqual([x + '-dev' for x in expected], requires)
# test empty packages
- self.packager._empty_packages = \
- [self.store.get_package('gstreamer-test2')]
+ self.packager._empty_packages = [self.store.get_package('gstreamer-test2')]
requires = self.packager.get_requires(PackageType.RUNTIME, '-dev')
expected.remove('gstreamer-test2')
self.assertEqual(expected, requires)
def testMetaPackageRequires(self):
self.packager._empty_packages = []
- expected = (['gstreamer-test1'],
- ['gstreamer-test3'],
- ['gstreamer-test-bindings'])
+ expected = (['gstreamer-test1'], ['gstreamer-test3'], ['gstreamer-test-bindings'])
self.store.get_package('gstreamer-test1').has_runtime_package = True
self.store.get_package('gstreamer-test3').has_runtime_package = True
self.store.get_package('gstreamer-test-bindings').has_runtime_package = True
@@ -141,18 +127,12 @@ class LinuxPackagesTest(unittest.TestCase):
for p in [self.store.get_package(x[0]) for x in expected]:
p.has_devel_package = True
requires = self.packager.get_meta_requires(PackageType.DEVEL, '-dev')
- expected = (['gstreamer-test1-dev'],
- ['gstreamer-test3-dev'],
- ['gstreamer-test-bindings-dev'])
+ expected = (['gstreamer-test1-dev'], ['gstreamer-test3-dev'], ['gstreamer-test-bindings-dev'])
self.assertEqual(expected, requires)
def testPackDeps(self):
- expected = sorted(['gstreamer-test-bindings',
- 'gstreamer-test2',
- 'gstreamer-test3',
- 'gstreamer-test1'])
- self.packager = LoggerPackager(self.config,
- self.store.get_package('gstreamer-runtime'), self.store)
+ expected = sorted(['gstreamer-test-bindings', 'gstreamer-test2', 'gstreamer-test3', 'gstreamer-test1'])
+ self.packager = LoggerPackager(self.config, self.store.get_package('gstreamer-runtime'), self.store)
self.packager.devel = False
self.packager.force = False
global packed
@@ -167,15 +147,13 @@ class LinuxPackagesTest(unittest.TestCase):
packed = []
def testPack(self):
- self.packager = DummyPackager(self.config,
- self.store.get_package('gstreamer-runtime'), self.store)
+ self.packager = DummyPackager(self.config, self.store.get_package('gstreamer-runtime'), self.store)
paths = self.packager.pack('', False, True, True, False, None)
self.assertTrue(os.path.exists('gstreamer-runtime-stamp'))
os.remove('gstreamer-runtime-stamp')
self.assertEqual(paths, ['test'])
- self.packager = DummyPackager(self.config,
- self.store.get_package('gstreamer-test1'), self.store)
+ self.packager = DummyPackager(self.config, self.store.get_package('gstreamer-test1'), self.store)
paths = self.packager.pack('', False, True, True, False, None)
self.assertTrue(os.path.exists('gstreamer-test1-stamp'))
os.remove('gstreamer-test1-stamp')
diff --git a/test/test_cerbero_packages_osx_info_plist.py b/test/test_cerbero_packages_osx_info_plist.py
index e3394853..8734b2f5 100644
--- a/test/test_cerbero_packages_osx_info_plist.py
+++ b/test/test_cerbero_packages_osx_info_plist.py
@@ -19,30 +19,28 @@
import unittest
import tempfile
-from cerbero.packages.osx.info_plist import InfoPlist, FrameworkPlist,\
- ApplicationPlist, INFO_PLIST_TPL
+from cerbero.packages.osx.info_plist import InfoPlist, FrameworkPlist, ApplicationPlist, INFO_PLIST_TPL
class InfoPlistTest(unittest.TestCase):
-
- PROPS_TPL = ('%(icon)s<key>CFBundleIdentifier</key>\n'
- '<string>test.org</string>\n'
- '<key>CFBundleName</key>\n'
- '<string>test</string>\n'
- '<key>CFBundlePackageGetInfoString</key>\n'
- '<string>Test package</string>\n'
- '<key>CFBundlePackageType</key>\n'
- '<string>%(ptype)s</string>\n'
- '<key>CFBundleVersion</key>\n'
- '<string>1.0</string>')
+ PROPS_TPL = (
+ '%(icon)s<key>CFBundleIdentifier</key>\n'
+ '<string>test.org</string>\n'
+ '<key>CFBundleName</key>\n'
+ '<string>test</string>\n'
+ '<key>CFBundlePackageGetInfoString</key>\n'
+ '<string>Test package</string>\n'
+ '<key>CFBundlePackageType</key>\n'
+ '<string>%(ptype)s</string>\n'
+ '<key>CFBundleVersion</key>\n'
+ '<string>1.0</string>'
+ )
def setUp(self):
- self.info_plist = InfoPlist('test', 'test.org', '1.0',
- 'Test package')
+ self.info_plist = InfoPlist('test', 'test.org', '1.0', 'Test package')
def testFormatProperty(self):
- self.assertEqual('<key>Key</key>\n<string>Value</string>',
- self.info_plist._format_property('Key', 'Value'))
+ self.assertEqual('<key>Key</key>\n<string>Value</string>', self.info_plist._format_property('Key', 'Value'))
def testGetPropertiesString(self):
result = self.info_plist._get_properties_string()
@@ -50,15 +48,13 @@ class InfoPlistTest(unittest.TestCase):
self.assertEqual(result, expected)
def testFrameworkPackageType(self):
- self.info_plist = FrameworkPlist('test', 'test.org', '1.0',
- 'Test package')
+ self.info_plist = FrameworkPlist('test', 'test.org', '1.0', 'Test package')
result = self.info_plist._get_properties_string()
expected = self.PROPS_TPL % {'ptype': 'FMWK', 'icon': ''}
self.assertEqual(result, expected)
def testApplicationPackageType(self):
- self.info_plist = ApplicationPlist('test', 'test.org', '1.0',
- 'Test package')
+ self.info_plist = ApplicationPlist('test', 'test.org', '1.0', 'Test package')
result = self.info_plist._get_properties_string()
expected = self.PROPS_TPL % {'ptype': 'APPL', 'icon': ''}
self.assertEqual(result, expected)
@@ -66,9 +62,10 @@ class InfoPlistTest(unittest.TestCase):
def testGetPropertiesStringWithIcon(self):
self.info_plist.icon = 'test.ico'
result = self.info_plist._get_properties_string()
- expected = self.PROPS_TPL % {'ptype': '', 'icon':
- self.info_plist._format_property('CFBundleIconFile', 'test.ico') +
- '\n'}
+ expected = self.PROPS_TPL % {
+ 'ptype': '',
+ 'icon': self.info_plist._format_property('CFBundleIconFile', 'test.ico') + '\n',
+ }
self.info_plist.icon = None
self.assertEqual(result, expected)
@@ -77,7 +74,9 @@ class InfoPlistTest(unittest.TestCase):
self.info_plist.save(tmp.name)
with open(tmp.name, 'r') as f:
result = f.read()
- expected = INFO_PLIST_TPL % (self.info_plist.BEGIN,
- self.info_plist._get_properties_string(),
- self.info_plist.END)
+ expected = INFO_PLIST_TPL % (
+ self.info_plist.BEGIN,
+ self.info_plist._get_properties_string(),
+ self.info_plist.END,
+ )
self.assertEqual(result, expected)
diff --git a/test/test_cerbero_packages_package.py b/test/test_cerbero_packages_package.py
index f4cd8839..1a3b5273 100644
--- a/test/test_cerbero_packages_package.py
+++ b/test/test_cerbero_packages_package.py
@@ -29,22 +29,18 @@ from test.test_common import DummyConfig
class Config(DummyConfig):
-
def __init__(self, tmp, platform):
self.prefix = tmp
self.target_platform = platform
class PackageTest(unittest.TestCase):
-
def setUp(self):
self.tmp = tempfile.mkdtemp()
win32config = Config(self.tmp, Platform.WINDOWS)
linuxconfig = Config(self.tmp, Platform.LINUX)
- self.win32package = Package1(win32config, create_store(win32config),
- create_cookbook(win32config))
- self.linuxpackage = Package1(linuxconfig, create_store(linuxconfig),
- create_cookbook(linuxconfig))
+ self.win32package = Package1(win32config, create_store(win32config), create_cookbook(win32config))
+ self.linuxpackage = Package1(linuxconfig, create_store(linuxconfig), create_cookbook(linuxconfig))
def tearDown(self):
shutil.rmtree(self.tmp)
@@ -55,47 +51,65 @@ class PackageTest(unittest.TestCase):
self.linuxpackage.set_mode(PackageType.DEVEL)
self.assertEqual(self.linuxpackage.package_mode, PackageType.DEVEL)
self.assertEqual(self.linuxpackage.name, 'gstreamer-test1-devel')
- self.assertEqual(self.linuxpackage.shortdesc,
- 'GStreamer Test (Development Files)')
+ self.assertEqual(self.linuxpackage.shortdesc, 'GStreamer Test (Development Files)')
def testParseFiles(self):
- self.assertEqual(self.win32package._recipes_files['recipe1'],
- ['misc', 'libs', 'bins'])
+ self.assertEqual(self.win32package._recipes_files['recipe1'], ['misc', 'libs', 'bins'])
self.assertEqual(self.win32package._recipes_files['recipe5'], ['libs'])
def testListRecipesDeps(self):
- self.assertEqual(self.win32package.recipes_dependencies(),
- ['recipe1', 'recipe5', 'recipe2'])
- self.assertEqual(self.linuxpackage.recipes_dependencies(),
- ['recipe1', 'recipe2'])
+ self.assertEqual(self.win32package.recipes_dependencies(), ['recipe1', 'recipe5', 'recipe2'])
+ self.assertEqual(self.linuxpackage.recipes_dependencies(), ['recipe1', 'recipe2'])
def testFilesList(self):
add_files(self.tmp)
- winfiles = ['README', 'bin/gst-launch.exe', 'bin/libgstreamer-win32.dll',
- 'bin/libgstreamer-0.10.dll', 'bin/windows.exe',
- 'libexec/gstreamer-0.10/pluginsloader.exe',
- 'windows', 'bin/libtest.dll']
- linuxfiles = ['README', 'bin/gst-launch', 'bin/linux',
- 'lib/libgstreamer-x11.so.1', 'lib/libgstreamer-0.10.so.1',
- 'libexec/gstreamer-0.10/pluginsloader', 'linux']
-
- self.assertEqual(sorted(winfiles),
- sorted(self.win32package.files_list()))
- self.assertEqual(sorted(linuxfiles),
- sorted(self.linuxpackage.files_list()))
+ winfiles = [
+ 'README',
+ 'bin/gst-launch.exe',
+ 'bin/libgstreamer-win32.dll',
+ 'bin/libgstreamer-0.10.dll',
+ 'bin/windows.exe',
+ 'libexec/gstreamer-0.10/pluginsloader.exe',
+ 'windows',
+ 'bin/libtest.dll',
+ ]
+ linuxfiles = [
+ 'README',
+ 'bin/gst-launch',
+ 'bin/linux',
+ 'lib/libgstreamer-x11.so.1',
+ 'lib/libgstreamer-0.10.so.1',
+ 'libexec/gstreamer-0.10/pluginsloader',
+ 'linux',
+ ]
+
+ self.assertEqual(sorted(winfiles), sorted(self.win32package.files_list()))
+ self.assertEqual(sorted(linuxfiles), sorted(self.linuxpackage.files_list()))
def testDevelFilesList(self):
add_files(self.tmp)
devfiles = ['lib/libgstreamer-0.10.a', 'lib/libgstreamer-0.10.la']
- linuxdevfiles = devfiles + ['lib/libgstreamer-0.10.so',
- 'lib/libgstreamer-x11.a', 'lib/libgstreamer-x11.la',
- 'lib/libgstreamer-x11.so']
- windevfiles = devfiles + ['lib/libgstreamer-win32.a',
- 'lib/libgstreamer-win32.dll.a', 'lib/libgstreamer-win32.la',
- 'lib/libgstreamer-win32.def', 'lib/gstreamer-win32.lib',
- 'lib/libtest.a', 'lib/libtest.dll.a', 'lib/libtest.la',
- 'lib/libtest.def', 'lib/test.lib', 'lib/libgstreamer-0.10.dll.a',
- 'lib/libgstreamer-0.10.def', 'lib/gstreamer-0.10.lib']
+ linuxdevfiles = devfiles + [
+ 'lib/libgstreamer-0.10.so',
+ 'lib/libgstreamer-x11.a',
+ 'lib/libgstreamer-x11.la',
+ 'lib/libgstreamer-x11.so',
+ ]
+ windevfiles = devfiles + [
+ 'lib/libgstreamer-win32.a',
+ 'lib/libgstreamer-win32.dll.a',
+ 'lib/libgstreamer-win32.la',
+ 'lib/libgstreamer-win32.def',
+ 'lib/gstreamer-win32.lib',
+ 'lib/libtest.a',
+ 'lib/libtest.dll.a',
+ 'lib/libtest.la',
+ 'lib/libtest.def',
+ 'lib/test.lib',
+ 'lib/libgstreamer-0.10.dll.a',
+ 'lib/libgstreamer-0.10.def',
+ 'lib/gstreamer-0.10.lib',
+ ]
self.assertEqual(sorted(windevfiles), self.win32package.devel_files_list())
self.assertEqual(sorted(linuxdevfiles), self.linuxpackage.devel_files_list())
@@ -112,7 +126,6 @@ class PackageTest(unittest.TestCase):
class TestMetaPackages(unittest.TestCase):
-
def setUp(self):
self.tmp = tempfile.mkdtemp()
config = Config(self.tmp, Platform.LINUX)
@@ -124,8 +137,7 @@ class TestMetaPackages(unittest.TestCase):
def _compareList(self, func_name):
list_func = getattr(self.package, func_name)
- packages = [self.store.get_package(x) for x in \
- self.package.list_packages()]
+ packages = [self.store.get_package(x) for x in self.package.list_packages()]
files = []
for package in packages:
list_func = getattr(package, func_name)
@@ -135,18 +147,15 @@ class TestMetaPackages(unittest.TestCase):
self.assertEqual(sorted(files), list_func())
def testListPackages(self):
- expected = ['gstreamer-test1', 'gstreamer-test3',
- 'gstreamer-test-bindings', 'gstreamer-test2']
+ expected = ['gstreamer-test1', 'gstreamer-test3', 'gstreamer-test-bindings', 'gstreamer-test2']
self.assertEqual(self.package.list_packages(), expected)
def testPlatfromPackages(self):
packages_attr = object.__getattribute__(self.package, 'packages')
self.assertEqual(len(packages_attr), 3)
- platform_packages_attr = object.__getattribute__(self.package,
- 'platform_packages')
+ platform_packages_attr = object.__getattribute__(self.package, 'platform_packages')
self.assertEqual(len(platform_packages_attr), 1)
- self.assertEqual(len(self.package.packages),
- len(packages_attr) + len(platform_packages_attr))
+ self.assertEqual(len(self.package.packages), len(packages_attr) + len(platform_packages_attr))
def testFilesList(self):
self._compareList('files_list')
@@ -159,7 +168,6 @@ class TestMetaPackages(unittest.TestCase):
class AppPackageTest(unittest.TestCase):
-
def setUp(self):
self.tmp = tempfile.mkdtemp()
config = Config(self.tmp, Platform.LINUX)
diff --git a/test/test_cerbero_packages_packagemaker.py b/test/test_cerbero_packages_packagemaker.py
index 9e0ce4e1..c728546f 100644
--- a/test/test_cerbero_packages_packagemaker.py
+++ b/test/test_cerbero_packages_packagemaker.py
@@ -31,7 +31,6 @@ from test.test_common import DummyConfig
class PackageMakerTest(unittest.TestCase):
-
def setUp(self):
self.tmp = tempfile.mkdtemp()
self.config = DummyConfig()
@@ -60,7 +59,8 @@ class PackageMakerTest(unittest.TestCase):
os.makedirs(bindir)
os.makedirs(libdir)
os.makedirs(os.path.join(self.tmp, 'libexec', 'gstreamer-0.10'))
- shell.call('touch '
+ shell.call(
+ 'touch '
'README '
'linux '
'libexec/gstreamer-0.10/pluginsloader '
@@ -69,31 +69,30 @@ class PackageMakerTest(unittest.TestCase):
'lib/libgstreamer-0.10.so.1 '
'lib/libgstreamer-x11.so.1 '
'lib/notincluded1 '
- 'notincluded2 ', self.tmp)
+ 'notincluded2 ',
+ self.tmp,
+ )
class DummyPackageMaker(PackageMaker):
-
def _execute(self, cmd):
self.cmd = cmd
class TestPackageMaker(unittest.TestCase):
-
-
def testFillArgs(self):
pm = PackageMaker()
- args = {'r': 'root', 'i': 'pkg_id', 'n': 'version', 't': 'title',
- 'l': 'destination', 'o': 'output_file'}
+ args = {'r': 'root', 'i': 'pkg_id', 'n': 'version', 't': 'title', 'l': 'destination', 'o': 'output_file'}
cmd = pm._cmd_with_args(args)
- self.assertEqual(cmd,
- "./PackageMaker -i 'pkg_id' -l 'destination' -o 'output_file' "
- "-n 'version' -r 'root' -t 'title'")
+ self.assertEqual(
+ cmd, "./PackageMaker -i 'pkg_id' -l 'destination' -o 'output_file' " "-n 'version' -r 'root' -t 'title'"
+ )
def testCreatePackage(self):
pm = DummyPackageMaker()
- pm.create_package('root', 'pkg_id', 'version', 'title',
- 'output_file', 'destination')
- self.assertEqual(pm.cmd,
+ pm.create_package('root', 'pkg_id', 'version', 'title', 'output_file', 'destination')
+ self.assertEqual(
+ pm.cmd,
"./PackageMaker -g '10.6' -i 'pkg_id' -l 'destination' -o 'output_file' "
- "-n 'version' -r 'root' -t 'title'")
+ "-n 'version' -r 'root' -t 'title'",
+ )
diff --git a/test/test_cerbero_packages_packagesstore.py b/test/test_cerbero_packages_packagesstore.py
index 5f932933..088b4d12 100644
--- a/test/test_cerbero_packages_packagesstore.py
+++ b/test/test_cerbero_packages_packagesstore.py
@@ -21,13 +21,12 @@ import tempfile
from cerbero.config import Platform
from cerbero.errors import PackageNotFoundError
-from cerbero.packages.package import Package, MetaPackage, SDKPackage,\
- InstallerPackage, App
+from cerbero.packages.package import Package, MetaPackage, SDKPackage, InstallerPackage, App
from cerbero.packages.packagesstore import PackagesStore
from test import test_packages_common as common
-PACKAGE = '''
+PACKAGE = """
class Package(package.Package):
name = 'test-package'
@@ -37,22 +36,22 @@ class Package(package.Package):
Distro.WINDOWS
DistroVersion.WINDOWS_7
Architecture.X86
-'''
+"""
-SDKPACKAGE = '''
+SDKPACKAGE = """
class SDKPackage(package.SDKPackage):
name = 'test-package'
-'''
+"""
-INSTALLERPACKAGE = '''
+INSTALLERPACKAGE = """
class InstallerPackage(package.InstallerPackage):
name = 'test-package'
-'''
+"""
-class PackageTest(unittest.TestCase):
+class PackageTest(unittest.TestCase):
def setUp(self):
self.config = common.DummyConfig()
self.config.packages_dir = '/test'
@@ -72,8 +71,7 @@ class PackageTest(unittest.TestCase):
self.assertEqual(package, self.store.get_package(package.name))
def testPackageNotFound(self):
- self.assertRaises(PackageNotFoundError, self.store.get_package,
- 'unknown')
+ self.assertRaises(PackageNotFoundError, self.store.get_package, 'unknown')
def testPackagesList(self):
package = common.Package1(self.config, None, None)
@@ -84,28 +82,24 @@ class PackageTest(unittest.TestCase):
self.assertEqual(l, self.store.get_packages_list())
def testPackageDeps(self):
- package = common.Package1(self.config, None, None)
+ package = common.Package1(self.config, None, None)
package2 = common.Package2(self.config, None, None)
self.store.add_package(package)
self.store.add_package(package2)
- self.assertEqual(package.deps,
- [x.name for x in self.store.get_package_deps(package.name)])
+ self.assertEqual(package.deps, [x.name for x in self.store.get_package_deps(package.name)])
def testMetaPackageDeps(self):
metapackage = common.MetaPackage(self.config, None)
self.store.add_package(metapackage)
# the metapackage depends on package that are not yet in the store
- self.assertRaises(PackageNotFoundError,
- self.store.get_package_deps, metapackage.name)
- for klass in [common.Package1, common.Package2, common.Package3,
- common.Package4]:
+ self.assertRaises(PackageNotFoundError, self.store.get_package_deps, metapackage.name)
+ for klass in [common.Package1, common.Package2, common.Package3, common.Package4]:
p = klass(self.config, None, None)
self.store.add_package(p)
for klass in [common.MetaPackage]:
p = klass(self.config, None)
self.store.add_package(p)
- deps = ['gstreamer-test-bindings', 'gstreamer-test1',
- 'gstreamer-test2', 'gstreamer-test3']
+ deps = ['gstreamer-test-bindings', 'gstreamer-test1', 'gstreamer-test2', 'gstreamer-test3']
res = [x.name for x in self.store.get_package_deps(metapackage.name)]
self.assertEqual(sorted(deps), sorted(res))
@@ -118,8 +112,7 @@ class PackageTest(unittest.TestCase):
self.assertEqual('test-package', p.name)
def testLoadMetaPackageFromFile(self):
- for x, t in [(SDKPACKAGE, SDKPackage),
- (INSTALLERPACKAGE, InstallerPackage)]:
+ for x, t in [(SDKPACKAGE, SDKPackage), (INSTALLERPACKAGE, InstallerPackage)]:
package_file = tempfile.NamedTemporaryFile()
package_file.write(x)
package_file.flush()
@@ -137,4 +130,4 @@ class PackageTest(unittest.TestCase):
try:
p.test_imports()
except ImportError as e:
- self.fail("Import error raised, %s", e)
+ self.fail('Import error raised, %s', e)
diff --git a/test/test_cerbero_packages_pmdoc.py b/test/test_cerbero_packages_pmdoc.py
index 0694aefa..fb2d4f2e 100644
--- a/test/test_cerbero_packages_pmdoc.py
+++ b/test/test_cerbero_packages_pmdoc.py
@@ -31,14 +31,12 @@ from test.test_common import XMLMixin, DummyConfig
class IndexTest(unittest.TestCase, XMLMixin):
-
def setUp(self):
self.config = DummyConfig()
self.store = create_store(self.config)
self.package = self.store.get_package('gstreamer-runtime')
self.outdir = '/test'
- self.index = Index(self.package, self.store, self.outdir, [],
- PackageType.RUNTIME, False)
+ self.index = Index(self.package, self.store, self.outdir, [], PackageType.RUNTIME, False)
def testAddRoot(self):
self.index._add_root()
@@ -55,30 +53,24 @@ class IndexTest(unittest.TestCase, XMLMixin):
self.assertEqual(len(properties.getchildren()), 6)
self.check_text(properties, Index.TAG_ORGANIZATION, self.package.org)
self.check_text(properties, Index.TAG_TITLE, self.package.title)
- self.check_text(properties, Index.TAG_BUILD,
- os.path.join(self.outdir, '%s.pkg' % self.package.name))
- self.check_attrib(properties, Index.TAG_USER_SEES, 'ui',
- Index.PROP_USER_SEES)
- self.check_attrib(properties, Index.TAG_MIN_TARGET, 'os',
- Index.PROP_MIN_TARGET)
- self.check_attrib(properties, Index.TAG_DOMAIN, 'system',
- Index.PROP_DOMAIN)
+ self.check_text(properties, Index.TAG_BUILD, os.path.join(self.outdir, '%s.pkg' % self.package.name))
+ self.check_attrib(properties, Index.TAG_USER_SEES, 'ui', Index.PROP_USER_SEES)
+ self.check_attrib(properties, Index.TAG_MIN_TARGET, 'os', Index.PROP_MIN_TARGET)
+ self.check_attrib(properties, Index.TAG_DOMAIN, 'system', Index.PROP_DOMAIN)
def testAddDistribution(self):
self.index._add_root()
self.index._add_distribution()
children = self.index.root.getchildren()
self.assertEqual(len(children), 1)
- dist =children[0]
+ dist = children[0]
self.find_one(dist, Index.TAG_SCRIPTS)
- self.check_attrib(dist, Index.TAG_VERSION, Index.ATTR_MIN_SPEC,
- Index.MIN_SPEC)
+ self.check_attrib(dist, Index.TAG_VERSION, Index.ATTR_MIN_SPEC, Index.MIN_SPEC)
def testAddDescription(self):
self.index._add_root()
self.index._add_description()
- self.check_text(self.index.root, Index.TAG_DESCRIPTION,
- self.package.shortdesc)
+ self.check_text(self.index.root, Index.TAG_DESCRIPTION, self.package.shortdesc)
def testAddFlags(self):
self.index._add_root()
@@ -92,7 +84,7 @@ class IndexTest(unittest.TestCase, XMLMixin):
# 1 choice + 4 item
self.assertEqual(len(children), 5)
contents = self.find_one(self.index.root, Index.TAG_CONTENTS)
- packages =[]
+ packages = []
for choice in contents.iterfind(Index.TAG_CHOICE):
if choice.attrib['id'] == 'gstreamer-test1':
@@ -104,21 +96,16 @@ class IndexTest(unittest.TestCase, XMLMixin):
elif choice.attrib['id'] == 'gstreamer-test-bindings':
pkrefs = ['gstreamer-test-bindings']
else:
- self.fail("Incorrect choice %s" % choice)
- elpkrefs = [x.attrib['id'] for x in \
- choice.iterfind(Index.TAG_PKGREF)]
- self.assertEqual(sorted(["default.%s.%s" %
- (self.config.target_arch, x) for x in pkrefs]),
- sorted(elpkrefs))
+ self.fail('Incorrect choice %s' % choice)
+ elpkrefs = [x.attrib['id'] for x in choice.iterfind(Index.TAG_PKGREF)]
+ self.assertEqual(sorted(['default.%s.%s' % (self.config.target_arch, x) for x in pkrefs]), sorted(elpkrefs))
packages.extend(pkrefs)
- items = [x.text[:-4] for x in self.index.root.iterfind(Index.TAG_ITEM) if
- x.attrib['type']=='pkgref']
+ items = [x.text[:-4] for x in self.index.root.iterfind(Index.TAG_ITEM) if x.attrib['type'] == 'pkgref']
self.assertEqual(sorted(packages), sorted(items))
class PkgRefTest(unittest.TestCase, XMLMixin):
-
def setUp(self):
self.config = DummyConfig()
self.config.target_platform = Platform.LINUX
@@ -137,8 +124,7 @@ class PkgRefTest(unittest.TestCase, XMLMixin):
self.pkgref._add_root()
self.pkgref._add_scripts()
scripts = self.find_one(self.pkgref.root, PkgRef.TAG_SCRIPTS)
- self.check_text(scripts, PkgRef.TAG_SCRIPTS_DIR,
- os.path.join(self.package_path, 'Contents', 'Resources'))
+ self.check_text(scripts, PkgRef.TAG_SCRIPTS_DIR, os.path.join(self.package_path, 'Contents', 'Resources'))
def testAddExtra(self):
self.pkgref._add_root()
@@ -151,8 +137,7 @@ class PkgRefTest(unittest.TestCase, XMLMixin):
self.pkgref._add_root()
self.pkgref._add_contents()
contents = self.find_one(self.pkgref.root, PkgRef.TAG_CONTENTS)
- self.check_text(contents, PkgRef.TAG_FILE_LIST, '%s-contents.xml' %
- self.package.name)
+ self.check_text(contents, PkgRef.TAG_FILE_LIST, '%s-contents.xml' % self.package.name)
def testAddConfig(self):
self.pkgref._add_root()
@@ -166,21 +151,24 @@ class PkgRefTest(unittest.TestCase, XMLMixin):
self.check_attrib(config, PkgRef.TAG_INSTALL_TO, 'mod', 'true')
self.check_text(config, PkgRef.TAG_INSTALL_TO, '.')
self.find_one(config, PkgRef.TAG_REQ_AUTH)
- mods = ['installTo.isAbsoluteType', 'installTo.path',
- 'parent', 'installTo.isRelativeType', 'installTo',
- 'version', 'identifier']
+ mods = [
+ 'installTo.isAbsoluteType',
+ 'installTo.path',
+ 'parent',
+ 'installTo.isRelativeType',
+ 'installTo',
+ 'version',
+ 'identifier',
+ ]
docmods = [x.text for x in config.iterfind(PkgRef.TAG_MOD)]
self.assertEqual(sorted(mods), sorted(docmods))
flags = self.find_one(config, PkgRef.TAG_FLAGS)
self.find_one(flags, PkgRef.TAG_FOLLOW_SYMLINKS)
-
class PkgContentsWrap(PkgContents):
-
dirs = ['.', './bin', './lib', './lib/gstreamer-0.10', '']
- files = ['./bin/gst-inspect', './lib/libgstreamer.so.1.0',
- './lib/gstreamer-0.10/libgstplugin.so', './README', '']
+ files = ['./bin/gst-inspect', './lib/libgstreamer.so.1.0', './lib/gstreamer-0.10/libgstplugin.so', './README', '']
def _list_bom_dirs(self):
return '\n'.join(self.dirs)
@@ -191,7 +179,6 @@ class PkgContentsWrap(PkgContents):
class PkgContentsTest(unittest.TestCase, XMLMixin):
-
def setUp(self):
self.tmp = tempfile.mkdtemp()
self.pkgcontents = PkgContentsWrap(self.tmp)
@@ -204,23 +191,26 @@ class PkgContentsTest(unittest.TestCase, XMLMixin):
def testAddRoot(self):
self.pkgcontents._add_root()
- self.assertEqual(self.pkgcontents.root.tag,
- PkgContents.TAG_PKG_CONTENTS)
+ self.assertEqual(self.pkgcontents.root.tag, PkgContents.TAG_PKG_CONTENTS)
self.assertEqual(self.pkgcontents.root.attrib['spec'], PkgContents.SPEC_VERSION)
self.assertEqual(len(self.pkgcontents.root.getchildren()), 0)
def testAddPackageRoot(self):
self.pkgcontents._add_root()
self.pkgcontents._add_package_root()
- for k, v in [('n', 'PackageRoot'), ('o', PkgContents.OWNER),
- ('g', PkgContents.GROUP), ('pt', '.'), ('m', 'true'),
- ('t', 'bom')]:
+ for k, v in [
+ ('n', 'PackageRoot'),
+ ('o', PkgContents.OWNER),
+ ('g', PkgContents.GROUP),
+ ('pt', '.'),
+ ('m', 'true'),
+ ('t', 'bom'),
+ ]:
self.check_attrib(self.pkgcontents.root, PkgContents.TAG_F, k, v)
def testFill(self):
self.pkgcontents._fill()
- children = [x for x in self.pkgcontents.proot.getchildren()
- if x.tag == PkgContents.TAG_F]
+ children = [x for x in self.pkgcontents.proot.getchildren() if x.tag == PkgContents.TAG_F]
children_path = [x.attrib['n'] for x in children]
self.assertEqual(sorted(children_path), sorted(['bin', 'lib', 'README']))
for c in children:
@@ -229,16 +219,15 @@ class PkgContentsTest(unittest.TestCase, XMLMixin):
elif c.attrib['n'] == 'lib':
for c in c.getchildren():
if c.attrib['n'] == 'gstreamer-0.10':
- self.check_attrib(c, PkgContents.TAG_F, 'n',
- 'libgstplugin.so')
+ self.check_attrib(c, PkgContents.TAG_F, 'n', 'libgstplugin.so')
else:
self.assertEqual(c.attrib['n'], 'libgstreamer.so.1.0')
else:
self.assertEqual(c.attrib['n'], 'README')
-class TestPMDoc(unittest.TestCase):
- #if not sys.platform.startswith('darwin'):
+class TestPMDoc(unittest.TestCase):
+ # if not sys.platform.startswith('darwin'):
def setUp(self):
self.config = DummyConfig()
@@ -252,13 +241,12 @@ class TestPMDoc(unittest.TestCase):
def tearDown(self):
shutil.rmtree(self.tmp)
- @unittest.skipUnless(sys.platform.startswith("darwin"), "requires OSX")
+ @unittest.skipUnless(sys.platform.startswith('darwin'), 'requires OSX')
def testAllFilesCreated(self):
d = dict()
- packages = ['gstreamer-test1', 'gstreamer-test3',
- 'gstreamer-test-bindings', 'gstreamer-test2']
+ packages = ['gstreamer-test1', 'gstreamer-test3', 'gstreamer-test-bindings', 'gstreamer-test2']
for name in packages:
- p =self.store.get_package(name)
+ p = self.store.get_package(name)
d[p] = self.packages_path
self.package.__file__ = ''
pmdoc = PMDoc(self.package, self.store, self.tmp, d)
@@ -267,6 +255,6 @@ class TestPMDoc(unittest.TestCase):
expected_files = ['index.xml']
for p in packages:
- expected_files.append("%s.xml" % p)
- expected_files.append("%s-contents.xml" % p)
+ expected_files.append('%s.xml' % p)
+ expected_files.append('%s-contents.xml' % p)
self.assertEqual(sorted(files), sorted(expected_files))
diff --git a/test/test_cerbero_packages_wix.py b/test/test_cerbero_packages_wix.py
index 4b0a55ba..81564d67 100644
--- a/test/test_cerbero_packages_wix.py
+++ b/test/test_cerbero_packages_wix.py
@@ -32,12 +32,17 @@ from test.test_common import DummyConfig
class Recipe1(recipe.Recipe):
name = 'recipe-test'
- files_misc = ['bin/test.exe', 'bin/test2.exe', 'bin/test3.exe',
- 'README', 'lib/libfoo.dll', 'lib/gstreamer-0.10/libgstplugins.dll']
+ files_misc = [
+ 'bin/test.exe',
+ 'bin/test2.exe',
+ 'bin/test3.exe',
+ 'README',
+ 'lib/libfoo.dll',
+ 'lib/gstreamer-0.10/libgstplugins.dll',
+ ]
class Package(package.Package):
-
name = 'gstreamer-test'
shortdesc = 'GStreamer Test'
longdesc = 'test'
@@ -48,7 +53,7 @@ class Package(package.Package):
files = ['recipe-test:misc']
-MERGE_MODULE = '''\
+MERGE_MODULE = """\
<?xml version="1.0" ?>
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
<Module Id="_gstreamer_test" Language="1033" Version="1.0">
@@ -81,33 +86,33 @@ MERGE_MODULE = '''\
</Directory>
</Module>
</Wix>
-'''
+"""
class MergeModuleTest(unittest.TestCase):
-
def setUp(self):
self.config = DummyConfig()
- cb = create_cookbook(self.config)
- store = create_store(self.config)
+ cb = create_cookbook(self.config)
+ store = create_store(self.config)
cb.add_recipe(Recipe1(self.config))
self.package = Package(self.config, store, cb)
- self.mergemodule = MergeModule(self.config,
- self.package.files_list(), self.package)
+ self.mergemodule = MergeModule(self.config, self.package.files_list(), self.package)
def test_add_root(self):
self.mergemodule._add_root()
self.assertEqual(
- '<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi" />',
- etree.tostring(self.mergemodule.root))
+ '<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi" />', etree.tostring(self.mergemodule.root)
+ )
def test_add_module(self):
self.mergemodule._add_root()
self.mergemodule._add_module()
self.assertEqual(
'<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">'
- '<Module Id="_gstreamer_test" Language="1033" Version="1.0" />'
- '</Wix>', etree.tostring(self.mergemodule.root))
+ '<Module Id="_gstreamer_test" Language="1033" Version="1.0" />'
+ '</Wix>',
+ etree.tostring(self.mergemodule.root),
+ )
def test_add_package(self):
self.mergemodule._add_root()
@@ -115,11 +120,13 @@ class MergeModuleTest(unittest.TestCase):
self.mergemodule._add_package()
self.assertEqual(
'<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">'
- '<Module Id="_gstreamer_test" Language="1033" Version="1.0">'
- '<Package Comments="test" Description="GStreamer Test" Id="1" '
- 'Manufacturer="GStreamer Project" />'
- '</Module>'
- '</Wix>', etree.tostring(self.mergemodule.root))
+ '<Module Id="_gstreamer_test" Language="1033" Version="1.0">'
+ '<Package Comments="test" Description="GStreamer Test" Id="1" '
+ 'Manufacturer="GStreamer Project" />'
+ '</Module>'
+ '</Wix>',
+ etree.tostring(self.mergemodule.root),
+ )
def test_add_root_dir(self):
self.mergemodule._add_root()
@@ -128,12 +135,14 @@ class MergeModuleTest(unittest.TestCase):
self.mergemodule._add_root_dir()
self.assertEqual(
'<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">'
- '<Module Id="_gstreamer_test" Language="1033" Version="1.0">'
- '<Package Comments="test" Description="GStreamer Test" Id="1" '
- 'Manufacturer="GStreamer Project" />'
- '<Directory Id="TARGETDIR" Name="SourceDir" />'
- '</Module>'
- '</Wix>', etree.tostring(self.mergemodule.root))
+ '<Module Id="_gstreamer_test" Language="1033" Version="1.0">'
+ '<Package Comments="test" Description="GStreamer Test" Id="1" '
+ 'Manufacturer="GStreamer Project" />'
+ '<Directory Id="TARGETDIR" Name="SourceDir" />'
+ '</Module>'
+ '</Wix>',
+ etree.tostring(self.mergemodule.root),
+ )
def test_add_directory(self):
self.mergemodule._add_root()
@@ -168,11 +177,11 @@ class MergeModuleTest(unittest.TestCase):
def test_render_xml(self):
self.config.platform = Platform.WINDOWS
- self.mergemodule._get_uuid = lambda : '1'
+ self.mergemodule._get_uuid = lambda: '1'
self.mergemodule.fill()
tmp = io.StringIO()
self.mergemodule.write(tmp)
- #self._compstr(tmp.getvalue(), MERGE_MODULE)
+ # self._compstr(tmp.getvalue(), MERGE_MODULE)
self.assertEqual(MERGE_MODULE, tmp.getvalue())
def _compstr(self, str1, str2):
@@ -182,12 +191,10 @@ class MergeModuleTest(unittest.TestCase):
if str1[i] != str2[i]:
print(str1[i])
print(str2[i])
- print("")
+ print('')
class InstallerTest(unittest.TestCase):
-
-
def setUp(self):
pass
diff --git a/test/test_cerbero_tools_osxuniversalgenerator.py b/test/test_cerbero_tools_osxuniversalgenerator.py
index 3259238b..2abf7868 100644
--- a/test/test_cerbero_tools_osxuniversalgenerator.py
+++ b/test/test_cerbero_tools_osxuniversalgenerator.py
@@ -27,7 +27,7 @@ from cerbero.tools.osxuniversalgenerator import OSXUniversalGenerator
from cerbero.tools.osxrelocator import OSXRelocator
-TEST_APP = '''\
+TEST_APP = """\
#include<stdio.h>
extern int foo1(int r);
@@ -36,28 +36,26 @@ int main(int arg_count,char ** arg_values)
{
printf("Hello World %%d\\n", foo1(1));
return 0;
-}'''
+}"""
-TEST_LIB = '''\
+TEST_LIB = """\
int foo1(int r);
int foo1(int r) {
return r;
-}'''
+}"""
SHARED_LIBRARY = {
Architecture.X86: 'Mach-O dynamically linked shared library i386',
- Architecture.X86_64: 'Mach-O 64-bit dynamically linked shared library x86_64'}
-EXECUTABLE = {
- Architecture.X86: 'Mach-O executable i386',
- Architecture.X86_64: 'Mach-O 64-bit executable x86_64'}
+ Architecture.X86_64: 'Mach-O 64-bit dynamically linked shared library x86_64',
+}
+EXECUTABLE = {Architecture.X86: 'Mach-O executable i386', Architecture.X86_64: 'Mach-O 64-bit executable x86_64'}
-class OSXUniversalGeneratorTest(unittest.TestCase):
-
+class OSXUniversalGeneratorTest(unittest.TestCase):
def setUp(self):
self.tmp = tempfile.mkdtemp()
self._create_tree()
@@ -73,7 +71,7 @@ class OSXUniversalGeneratorTest(unittest.TestCase):
os.makedirs(self.tmp_sources)
def _compile(self, arch):
- main_c = os.path.join(self.tmp_sources, 'main.c')
+ main_c = os.path.join(self.tmp_sources, 'main.c')
foo_c = os.path.join(self.tmp_sources, 'foo.c')
libdir = os.path.join(self.tmp, arch, 'lib')
@@ -86,9 +84,7 @@ class OSXUniversalGeneratorTest(unittest.TestCase):
if arch == Architecture.X86:
arch = 'i386'
shell.call('gcc -arch %s -o %s -shared %s' % (arch, libfoo, foo_c))
- shell.call('gcc -arch %s -o %s %s -L%s -lfoo' %
- (arch, test_app, main_c, libdir))
-
+ shell.call('gcc -arch %s -o %s %s -L%s -lfoo' % (arch, test_app, main_c, libdir))
def _get_file_type(self, path):
cmd = 'file -bh %s'
@@ -96,62 +92,50 @@ class OSXUniversalGeneratorTest(unittest.TestCase):
def _check_compiled_files(self):
for arch in [Architecture.X86, Architecture.X86_64]:
- res = self._get_file_type(
- os.path.join(self.tmp, arch, 'lib', 'libfoo.so'))
+ res = self._get_file_type(os.path.join(self.tmp, arch, 'lib', 'libfoo.so'))
self.assertEqual(res, SHARED_LIBRARY[arch])
- res = self._get_file_type(
- os.path.join(self.tmp, arch, 'bin', 'test_app'))
+ res = self._get_file_type(os.path.join(self.tmp, arch, 'bin', 'test_app'))
self.assertEqual(res, EXECUTABLE[arch])
def testMergeDirs(self):
self._compile(Architecture.X86)
self._compile(Architecture.X86_64)
self._check_compiled_files()
- gen = OSXUniversalGenerator(
- os.path.join(self.tmp, Architecture.UNIVERSAL))
- gen.merge_dirs([
- os.path.join(self.tmp, Architecture.X86),
- os.path.join(self.tmp, Architecture.X86_64)])
+ gen = OSXUniversalGenerator(os.path.join(self.tmp, Architecture.UNIVERSAL))
+ gen.merge_dirs([os.path.join(self.tmp, Architecture.X86), os.path.join(self.tmp, Architecture.X86_64)])
- # bash-3.2$ file libfoo.so
+ # bash-3.2$ file libfoo.so
# libfoo.so: Mach-O universal binary with 2 architectures
# libfoo.so (for architecture i386): Mach-O dynamically linked shared library i386
# libfoo.so (for architecture x86_64): Mach-O 64-bit dynamically linked shared library x86_64
- ftype = self._get_file_type(os.path.join(self.tmp,
- Architecture.UNIVERSAL, 'lib', 'libfoo.so'))
+ ftype = self._get_file_type(os.path.join(self.tmp, Architecture.UNIVERSAL, 'lib', 'libfoo.so'))
for arch in [Architecture.X86, Architecture.X86_64]:
self.assertTrue(SHARED_LIBRARY[arch] in ftype)
- ftype = self._get_file_type(os.path.join(self.tmp,
- Architecture.UNIVERSAL, 'bin', 'test_app'))
+ ftype = self._get_file_type(os.path.join(self.tmp, Architecture.UNIVERSAL, 'bin', 'test_app'))
for arch in [Architecture.X86, Architecture.X86_64]:
self.assertTrue(EXECUTABLE[arch] in ftype)
def testMergeFiles(self):
for arch in [Architecture.X86, Architecture.X86_64]:
with open(os.path.join(self.tmp, arch, 'share', 'test'), 'w') as f:
- f.write("test")
- gen = OSXUniversalGenerator(
- os.path.join(self.tmp, Architecture.UNIVERSAL))
- gen.merge_files(['share/test'],
- [os.path.join(self.tmp, Architecture.X86),
- os.path.join(self.tmp, Architecture.X86_64)])
- self.assertTrue(os.path.exists(os.path.join(self.tmp,
- Architecture.UNIVERSAL, 'share', 'test')))
+ f.write('test')
+ gen = OSXUniversalGenerator(os.path.join(self.tmp, Architecture.UNIVERSAL))
+ gen.merge_files(
+ ['share/test'], [os.path.join(self.tmp, Architecture.X86), os.path.join(self.tmp, Architecture.X86_64)]
+ )
+ self.assertTrue(os.path.exists(os.path.join(self.tmp, Architecture.UNIVERSAL, 'share', 'test')))
def testMergeCopyAndLink(self):
for arch in [Architecture.X86, Architecture.X86_64]:
file1 = os.path.join(self.tmp, arch, 'share', 'test1')
file2 = os.path.join(self.tmp, arch, 'share', 'test2')
with open(file1, 'w') as f:
- f.write("test")
+ f.write('test')
os.symlink(file1, file2)
- gen = OSXUniversalGenerator(
- os.path.join(self.tmp, Architecture.UNIVERSAL))
- gen.merge_dirs([
- os.path.join(self.tmp, Architecture.X86),
- os.path.join(self.tmp, Architecture.X86_64)])
+ gen = OSXUniversalGenerator(os.path.join(self.tmp, Architecture.UNIVERSAL))
+ gen.merge_dirs([os.path.join(self.tmp, Architecture.X86), os.path.join(self.tmp, Architecture.X86_64)])
file1 = os.path.join(self.tmp, Architecture.UNIVERSAL, 'share', 'test1')
file2 = os.path.join(self.tmp, Architecture.UNIVERSAL, 'share', 'test2')
@@ -165,14 +149,12 @@ class OSXUniversalGeneratorTest(unittest.TestCase):
pc_file = os.path.join(self.tmp, arch, 'test.pc')
with open(pc_file, 'w') as f:
f.write(os.path.join(self.tmp, arch, 'lib', 'test'))
- gen = OSXUniversalGenerator(
- os.path.join(self.tmp, Architecture.UNIVERSAL))
- gen.merge_files(['test.pc'],
- [os.path.join(self.tmp, Architecture.X86),
- os.path.join(self.tmp, Architecture.X86_64)])
+ gen = OSXUniversalGenerator(os.path.join(self.tmp, Architecture.UNIVERSAL))
+ gen.merge_files(
+ ['test.pc'], [os.path.join(self.tmp, Architecture.X86), os.path.join(self.tmp, Architecture.X86_64)]
+ )
pc_file = os.path.join(self.tmp, Architecture.UNIVERSAL, 'test.pc')
- self.assertEqual(open(pc_file).readline(),
- os.path.join(self.tmp, Architecture.UNIVERSAL, 'lib', 'test'))
+ self.assertEqual(open(pc_file).readline(), os.path.join(self.tmp, Architecture.UNIVERSAL, 'lib', 'test'))
def testMergedLibraryPaths(self):
def check_prefix(path):
diff --git a/test/test_common.py b/test/test_common.py
index 7904b753..72d8caa0 100644
--- a/test/test_common.py
+++ b/test/test_common.py
@@ -28,7 +28,7 @@ class DummyConfig(object):
sources = ''
local_sources = ''
wix_prefix = ''
- py_prefix= ''
+ py_prefix = ''
git_root = ''
allow_parallel_build = False
num_of_cpus = 1
@@ -39,21 +39,19 @@ class DummyConfig(object):
install_dir = ''
-class XMLMixin():
-
+class XMLMixin:
def find_one(self, el, tag):
children = list(el.iterfind(tag))
if len(children) == 0:
- self.fail("Element with tag %s not found in parent %s" % (tag, el))
- return children[0]
+ self.fail('Element with tag %s not found in parent %s' % (tag, el))
+ return children[0]
def check_attrib(self, parent, tag, attrib, value):
n = self.find_one(parent, tag)
if attrib not in n.attrib:
- self.fail("Attribute %s not found in %s" % (attrib, n))
+ self.fail('Attribute %s not found in %s' % (attrib, n))
self.assertEqual(n.attrib[attrib], value)
def check_text(self, parent, tag, value):
n = self.find_one(parent, tag)
self.assertEqual(n.text, value)
-
diff --git a/test/test_packages_common.py b/test/test_packages_common.py
index ff0e71c5..ae9af809 100644
--- a/test/test_packages_common.py
+++ b/test/test_packages_common.py
@@ -23,7 +23,6 @@ from test.test_build_common import create_cookbook
class Package1(package.Package):
-
name = 'gstreamer-test1'
shortdesc = 'GStreamer Test'
version = '1.0'
@@ -33,13 +32,10 @@ class Package1(package.Package):
deps = ['gstreamer-test2']
files = ['recipe1:misc:libs:bins']
- platform_files = {
- Platform.WINDOWS: ['recipe5:libs']
- }
+ platform_files = {Platform.WINDOWS: ['recipe5:libs']}
class Package2(package.Package):
-
name = 'gstreamer-test2'
shortdesc = 'GStreamer Test 2'
version = '1.0'
@@ -51,7 +47,6 @@ class Package2(package.Package):
class Package3(package.Package):
-
name = 'gstreamer-test3'
shortdesc = 'GStreamer Test 3'
version = '1.0'
@@ -63,53 +58,49 @@ class Package3(package.Package):
class Package4(package.Package):
-
name = 'gstreamer-test-bindings'
shortdesc = 'GStreamer Bindings'
version = '1.0'
licences = ['LGPL']
uuid = '1'
vendor = 'GStreamer Project'
- sys_deps = {Distro.DEBIAN: ['python'],
- DistroVersion.FEDORA_16: ['python27']}
+ sys_deps = {Distro.DEBIAN: ['python'], DistroVersion.FEDORA_16: ['python27']}
files = ['recipe4:misc']
class MetaPackage(package.MetaPackage):
-
- name = "gstreamer-runtime"
- shortdesc = "GStreamer runtime"
- longdesc = "GStreamer runtime"
- title = "GStreamer runtime"
- url = "http://www.gstreamer.net"
+ name = 'gstreamer-runtime'
+ shortdesc = 'GStreamer runtime'
+ longdesc = 'GStreamer runtime'
+ title = 'GStreamer runtime'
+ url = 'http://www.gstreamer.net'
version = '1.0'
uuid = '3ffe67b2-4565-411f-8287-e8faa892f853'
- vendor = "GStreamer Project"
+ vendor = 'GStreamer Project'
org = 'net.gstreamer'
packages = [
- ('gstreamer-test1', True, True),
- ('gstreamer-test3', False, True),
- ('gstreamer-test-bindings', False, False)]
- platform_packages = {
- Platform.LINUX: [('gstreamer-test2', False, False)]}
- icon = "gstreamer.ico"
+ ('gstreamer-test1', True, True),
+ ('gstreamer-test3', False, True),
+ ('gstreamer-test-bindings', False, False),
+ ]
+ platform_packages = {Platform.LINUX: [('gstreamer-test2', False, False)]}
+ icon = 'gstreamer.ico'
class App(package.App):
-
- name = "gstreamer-app"
- shortdesc = "GStreamer sample app"
- longdesc = "GStreamer sample app"
- title = "GStreamer sample app"
- url = "http://www.gstreamer.net"
+ name = 'gstreamer-app'
+ shortdesc = 'GStreamer sample app'
+ longdesc = 'GStreamer sample app'
+ title = 'GStreamer sample app'
+ url = 'http://www.gstreamer.net'
version = '1.0'
uuid = '3ffe67b2-4565-411f-8287-e8faa892f853'
- vendor = "GStreamer Project"
+ vendor = 'GStreamer Project'
org = 'net.gstreamer'
app_recipe = 'recipe3'
deps = ['gstreamer-test1']
- icon = "share/images/gstreamer.png"
+ icon = 'share/images/gstreamer.png'
embed_deps = True
diff --git a/tools/certdata2pem.py b/tools/certdata2pem.py
index fa8246ba..cce7d2b9 100644
--- a/tools/certdata2pem.py
+++ b/tools/certdata2pem.py
@@ -73,7 +73,7 @@ for line in open('certdata.txt', 'r'):
raise NotImplementedError('line_parts < 2 not supported.')
if type == 'MULTILINE_OCTAL':
in_multiline = True
- value = ""
+ value = ''
continue
obj[field] = value
if len(list(obj.items())) > 0:
@@ -95,35 +95,36 @@ for obj in objects:
if obj['CKA_CLASS'] not in ('CKO_NETSCAPE_TRUST', 'CKO_NSS_TRUST'):
continue
if obj['CKA_LABEL'] in blacklist:
- print("Certificate %s blacklisted, ignoring." % obj['CKA_LABEL'])
- elif obj['CKA_TRUST_SERVER_AUTH'] in ('CKT_NETSCAPE_TRUSTED_DELEGATOR',
- 'CKT_NSS_TRUSTED_DELEGATOR'):
+ print('Certificate %s blacklisted, ignoring.' % obj['CKA_LABEL'])
+ elif obj['CKA_TRUST_SERVER_AUTH'] in ('CKT_NETSCAPE_TRUSTED_DELEGATOR', 'CKT_NSS_TRUSTED_DELEGATOR'):
trust[obj['CKA_LABEL']] = True
- elif obj['CKA_TRUST_EMAIL_PROTECTION'] in ('CKT_NETSCAPE_TRUSTED_DELEGATOR',
- 'CKT_NSS_TRUSTED_DELEGATOR'):
+ elif obj['CKA_TRUST_EMAIL_PROTECTION'] in ('CKT_NETSCAPE_TRUSTED_DELEGATOR', 'CKT_NSS_TRUSTED_DELEGATOR'):
trust[obj['CKA_LABEL']] = True
- elif obj['CKA_TRUST_SERVER_AUTH'] in ('CKT_NETSCAPE_UNTRUSTED',
- 'CKT_NSS_NOT_TRUSTED'):
- print('!'*74)
- print("UNTRUSTED BUT NOT BLACKLISTED CERTIFICATE FOUND: %s" % obj['CKA_LABEL'])
- print('!'*74)
+ elif obj['CKA_TRUST_SERVER_AUTH'] in ('CKT_NETSCAPE_UNTRUSTED', 'CKT_NSS_NOT_TRUSTED'):
+ print('!' * 74)
+ print('UNTRUSTED BUT NOT BLACKLISTED CERTIFICATE FOUND: %s' % obj['CKA_LABEL'])
+ print('!' * 74)
else:
- print("Ignoring certificate %s. SAUTH=%s, EPROT=%s" % \
- (obj['CKA_LABEL'], obj['CKA_TRUST_SERVER_AUTH'],
- obj['CKA_TRUST_EMAIL_PROTECTION']))
+ print(
+ 'Ignoring certificate %s. SAUTH=%s, EPROT=%s'
+ % (obj['CKA_LABEL'], obj['CKA_TRUST_SERVER_AUTH'], obj['CKA_TRUST_EMAIL_PROTECTION'])
+ )
for obj in objects:
if obj['CKA_CLASS'] == 'CKO_CERTIFICATE':
if not obj['CKA_LABEL'] in trust or not trust[obj['CKA_LABEL']]:
continue
- fname = obj['CKA_LABEL'][1:-1].replace('/', '_')\
- .replace(' ', '_')\
- .replace('(', '=')\
- .replace(')', '=')\
- .replace(',', '_') + '.crt'
+ fname = (
+ obj['CKA_LABEL'][1:-1]
+ .replace('/', '_')
+ .replace(' ', '_')
+ .replace('(', '=')
+ .replace(')', '=')
+ .replace(',', '_')
+ + '.crt'
+ )
fname = fname.decode('string_escape')
f = open(fname, 'w')
- f.write("-----BEGIN CERTIFICATE-----\n")
- f.write("\n".join(textwrap.wrap(base64.b64encode(obj['CKA_VALUE']), 64)))
- f.write("\n-----END CERTIFICATE-----\n")
-
+ f.write('-----BEGIN CERTIFICATE-----\n')
+ f.write('\n'.join(textwrap.wrap(base64.b64encode(obj['CKA_VALUE']), 64)))
+ f.write('\n-----END CERTIFICATE-----\n')
diff --git a/tools/show-coverage.py b/tools/show-coverage.py
index 28cf4c4e..3b37ff69 100644
--- a/tools/show-coverage.py
+++ b/tools/show-coverage.py
@@ -4,7 +4,6 @@ import sys
class Presentation:
-
def __init__(self, name, lines, covered):
self.name = name
self.lines = lines
@@ -21,7 +20,6 @@ class Presentation:
class Coverage:
-
def __init__(self):
self.files = []
self.total_lines = 0
@@ -66,20 +64,16 @@ class Coverage:
def show_one(self, filename):
f = open(filename)
# Grab all executables lines
- lines = [line for line in f.readlines()
- if self.codeline_matcher.match(line)]
+ lines = [line for line in f.readlines() if self.codeline_matcher.match(line)]
# Find out which of them were not executed
- uncovered_lines = [line for line in lines
- if line.startswith('>>>>>>')]
+ uncovered_lines = [line for line in lines if line.startswith('>>>>>>')]
if not lines:
return
filename = self._strip_filename(filename)
- p = Presentation(filename,
- len(lines),
- len(lines) - len(uncovered_lines))
+ p = Presentation(filename, len(lines), len(lines) - len(uncovered_lines))
p.show(self.maxlen)
self.total_lines += p.lines
@@ -99,5 +93,6 @@ def main(args):
c.show_results()
+
if __name__ == '__main__':
sys.exit(main(sys.argv))