setup.py revision 2928
409N/A#!/usr/bin/python2.6
20N/A#
20N/A# CDDL HEADER START
20N/A#
20N/A# The contents of this file are subject to the terms of the
20N/A# Common Development and Distribution License (the "License").
20N/A# You may not use this file except in compliance with the License.
20N/A#
20N/A# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
20N/A# or http://www.opensolaris.org/os/licensing.
20N/A# See the License for the specific language governing permissions
20N/A# and limitations under the License.
20N/A#
20N/A# When distributing Covered Code, include this CDDL HEADER in each
20N/A# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
20N/A# If applicable, add the following below this CDDL HEADER, with the
20N/A# fields enclosed by brackets "[]" replaced with your own identifying
20N/A# information: Portions Copyright [yyyy] [name of copyright owner]
20N/A#
20N/A# CDDL HEADER END
20N/A#
814N/A# Copyright (c) 2008, 2013, Oracle and/or its affiliates. All rights reserved.
20N/A#
20N/A
20N/Aimport errno
22N/Aimport fnmatch
0N/Aimport os
50N/Aimport platform
50N/Aimport stat
50N/Aimport sys
50N/Aimport shutil
50N/Aimport re
50N/Aimport subprocess
50N/Aimport tarfile
50N/Aimport tempfile
50N/Aimport urllib
50N/Aimport py_compile
50N/Aimport hashlib
50N/Aimport time
50N/Aimport StringIO
50N/A
50N/Afrom distutils.errors import DistutilsError, DistutilsFileError
50N/Afrom distutils.core import setup
382N/Afrom distutils.cmd import Command
382N/Afrom distutils.command.install import install as _install
382N/Afrom distutils.command.install_data import install_data as _install_data
382N/Afrom distutils.command.install_lib import install_lib as _install_lib
589N/Afrom distutils.command.build import build as _build
589N/Afrom distutils.command.build_ext import build_ext as _build_ext
965N/Afrom distutils.command.build_py import build_py as _build_py
965N/Afrom distutils.command.bdist import bdist as _bdist
965N/Afrom distutils.command.clean import clean as _clean
965N/Afrom distutils.dist import Distribution
965N/Afrom distutils import log
965N/A
812N/Afrom distutils.sysconfig import get_python_inc
382N/Aimport distutils.dep_util as dep_util
812N/Aimport distutils.dir_util as dir_util
382N/Aimport distutils.file_util as file_util
382N/Aimport distutils.util as util
382N/Aimport distutils.ccompiler
382N/Afrom distutils.unixccompiler import UnixCCompiler
382N/A
382N/Aosname = platform.uname()[0].lower()
382N/Aostype = arch = 'unknown'
382N/Aif osname == 'sunos':
382N/A arch = platform.processor()
382N/A ostype = "posix"
382N/Aelif osname == 'linux':
382N/A arch = "linux_" + platform.machine()
382N/A ostype = "posix"
382N/Aelif osname == 'windows':
429N/A arch = osname
429N/A ostype = "windows"
461N/Aelif osname == 'darwin':
461N/A arch = osname
382N/A ostype = "posix"
26N/Aelif osname == 'aix':
689N/A arch = "aix"
689N/A ostype = "posix"
466N/A
0N/Apwd = os.path.normpath(sys.path[0])
468N/A
812N/A# the version of pylint that we must have in order to run the pylint checks.
812N/Areq_pylint_version = "0.25.2"
52N/A
812N/A#
451N/A# Unbuffer stdout and stderr. This helps to ensure that subprocess output
0N/A# is properly interleaved with output from this program.
382N/A#
382N/Asys.stdout = os.fdopen(sys.stdout.fileno(), "w", 0)
382N/Asys.stderr = os.fdopen(sys.stderr.fileno(), "w", 0)
452N/A
382N/Adist_dir = os.path.normpath(os.path.join(pwd, os.pardir, "proto", "dist_" + arch))
452N/Abuild_dir = os.path.normpath(os.path.join(pwd, os.pardir, "proto", "build_" + arch))
382N/Aif "ROOT" in os.environ and os.environ["ROOT"] != "":
382N/A root_dir = os.environ["ROOT"]
751N/Aelse:
751N/A root_dir = os.path.normpath(os.path.join(pwd, os.pardir, "proto", "root_" + arch))
382N/Apkgs_dir = os.path.normpath(os.path.join(pwd, os.pardir, "packages", arch))
22N/Aextern_dir = os.path.normpath(os.path.join(pwd, "extern"))
814N/A
812N/Apy_install_dir = 'usr/lib/python2.6/vendor-packages'
873N/A
812N/Ascripts_dir = 'usr/bin'
26N/Alib_dir = 'usr/lib'
382N/Asvc_method_dir = 'lib/svc/method'
873N/Asvc_share_dir = 'lib/svc/share'
975N/A
428N/Aman1_dir = 'usr/share/man/man1'
466N/Aman1m_dir = 'usr/share/man/man1m'
466N/Aman5_dir = 'usr/share/man/man5'
466N/Aman1_ja_JP_dir = 'usr/share/man/ja_JP.UTF-8/man1'
466N/Aman1m_ja_JP_dir = 'usr/share/man/ja_JP.UTF-8/man1m'
23N/Aman5_ja_JP_dir = 'usr/share/man/ja_JP.UTF-8/man5'
466N/Aman1_zh_CN_dir = 'usr/share/man/zh_CN.UTF-8/man1'
466N/Aman1m_zh_CN_dir = 'usr/share/man/zh_CN.UTF-8/man1m'
466N/Aman5_zh_CN_dir = 'usr/share/man/zh_CN.UTF-8/man5'
466N/A
466N/Aresource_dir = 'usr/share/lib/pkg'
466N/Atransform_dir = 'usr/share/pkg/transforms'
466N/Asmf_app_dir = 'lib/svc/manifest/application/pkg'
466N/Aexecattrd_dir = 'etc/security/exec_attr.d'
466N/Aauthattrd_dir = 'etc/security/auth_attr.d'
466N/Auserattrd_dir = 'etc/user_attr.d'
466N/Asysrepo_dir = 'etc/pkg/sysrepo'
466N/Asysrepo_logs_dir = 'var/log/pkg/sysrepo'
26N/Asysrepo_cache_dir = 'var/cache/pkg/sysrepo'
589N/Adepot_dir = 'etc/pkg/depot'
858N/Adepot_conf_dir = 'etc/pkg/depot/conf.d'
1191N/Adepot_logs_dir = 'var/log/pkg/depot'
1191N/Adepot_cache_dir = 'var/cache/pkg/depot'
1191N/Aautostart_dir = 'etc/xdg/autostart'
382N/Adesktop_dir = 'usr/share/applications'
812N/Agconf_dir = 'etc/gconf/schemas'
812N/Ahelp_dir = 'usr/share/gnome/help/package-manager'
589N/Aomf_dir = 'usr/share/omf/package-manager'
589N/Astartpage_dir = 'usr/share/package-manager/data/startpagebase'
589N/Aum_lib_dir = 'usr/lib/update-manager'
589N/Aum_share_dir = 'usr/share/update-manager'
858N/Apm_share_dir = 'usr/share/package-manager'
858N/Alocale_dir = 'usr/share/locale'
858N/Amirror_logs_dir = 'var/log/pkg/mirror'
466N/Amirror_cache_dir = 'var/cache/pkg/mirror'
466N/A
466N/A
466N/A# A list of source, destination tuples of modules which should be hardlinked
466N/A# together if the os supports it and otherwise copied.
466N/Ahardlink_modules = []
466N/A
466N/Ascripts_sunos = {
466N/A scripts_dir: [
589N/A ['client.py', 'pkg'],
589N/A ['pkgdep.py', 'pkgdepend'],
589N/A ['pkgrepo.py', 'pkgrepo'],
1191N/A ['util/publish/pkgdiff.py', 'pkgdiff'],
1191N/A ['util/publish/pkgfmt.py', 'pkgfmt'],
1191N/A ['util/publish/pkglint.py', 'pkglint'],
1191N/A ['util/publish/pkgmerge.py', 'pkgmerge'],
1191N/A ['util/publish/pkgmogrify.py', 'pkgmogrify'],
589N/A ['util/publish/pkgsurf.py', 'pkgsurf'],
589N/A ['publish.py', 'pkgsend'],
589N/A ['pull.py', 'pkgrecv'],
589N/A ['sign.py', 'pkgsign'],
589N/A ['packagemanager.py', 'packagemanager'],
589N/A ['updatemanager.py', 'pm-updatemanager'],
812N/A ],
812N/A lib_dir: [
812N/A ['depot.py', 'pkg.depotd'],
812N/A ['checkforupdates.py', 'pm-checkforupdates'],
812N/A ['updatemanagernotifier.py', 'updatemanagernotifier'],
812N/A ['launch.py', 'pm-launch'],
812N/A ['sysrepo.py', 'pkg.sysrepo'],
812N/A ['depot-config.py', "pkg.depot-config"]
812N/A ],
812N/A um_lib_dir: [
812N/A ['um/update-refresh.sh', 'update-refresh.sh'],
812N/A ],
812N/A svc_method_dir: [
975N/A ['svc/svc-pkg-depot', 'svc-pkg-depot'],
975N/A ['svc/svc-pkg-mdns', 'svc-pkg-mdns'],
975N/A ['svc/svc-pkg-mirror', 'svc-pkg-mirror'],
975N/A ['svc/svc-pkg-repositories-setup',
26N/A 'svc-pkg-repositories-setup'],
135N/A ['svc/svc-pkg-server', 'svc-pkg-server'],
14N/A ['svc/svc-pkg-sysrepo', 'svc-pkg-sysrepo'],
382N/A ['svc/svc-pkg-update', 'svc-pkg-update'],
429N/A ],
14N/A svc_share_dir: [
404N/A ['svc/pkg5_include.sh', 'pkg5_include.sh'],
404N/A ],
30N/A }
382N/A
30N/Ascripts_windows = {
791N/A scripts_dir: [
689N/A ['client.py', 'client.py'],
689N/A ['pkgrepo.py', 'pkgrepo.py'],
858N/A ['publish.py', 'publish.py'],
858N/A ['pull.py', 'pull.py'],
858N/A ['scripts/pkg.bat', 'pkg.bat'],
382N/A ['scripts/pkgsend.bat', 'pkgsend.bat'],
812N/A ['scripts/pkgrecv.bat', 'pkgrecv.bat'],
382N/A ],
382N/A lib_dir: [
382N/A ['depot.py', 'depot.py'],
382N/A ['scripts/pkg.depotd.bat', 'pkg.depotd.bat'],
429N/A ],
451N/A }
461N/A
1191N/Ascripts_other_unix = {
1191N/A scripts_dir: [
797N/A ['client.py', 'client.py'],
812N/A ['pkgdep.py', 'pkgdep'],
812N/A ['util/publish/pkgdiff.py', 'pkgdiff'],
812N/A ['util/publish/pkgfmt.py', 'pkgfmt'],
975N/A ['util/publish/pkgmogrify.py', 'pkgmogrify'],
258N/A ['pull.py', 'pull.py'],
382N/A ['publish.py', 'publish.py'],
382N/A ['scripts/pkg.sh', 'pkg'],
382N/A ['scripts/pkgsend.sh', 'pkgsend'],
382N/A ['scripts/pkgrecv.sh', 'pkgrecv'],
30N/A ],
589N/A lib_dir: [
589N/A ['depot.py', 'depot.py'],
589N/A ['scripts/pkg.depotd.sh', 'pkg.depotd'],
589N/A ],
589N/A }
589N/A
589N/A# indexed by 'osname'
589N/Ascripts = {
589N/A "sunos": scripts_sunos,
466N/A "linux": scripts_other_unix,
466N/A "windows": scripts_windows,
466N/A "darwin": scripts_other_unix,
466N/A "aix" : scripts_other_unix,
466N/A "unknown": scripts_sunos,
466N/A }
466N/A
466N/Aman1_files = [
466N/A 'man/packagemanager.1',
466N/A 'man/pkg.1',
466N/A 'man/pkgdepend.1',
466N/A 'man/pkgdiff.1',
466N/A 'man/pkgfmt.1',
466N/A 'man/pkglint.1',
54N/A 'man/pkgmerge.1',
858N/A 'man/pkgmogrify.1',
1191N/A 'man/pkgsend.1',
1191N/A 'man/pkgsign.1',
1191N/A 'man/pkgsurf.1',
466N/A 'man/pkgrecv.1',
466N/A 'man/pkgrepo.1',
382N/A 'man/pm-updatemanager.1',
466N/A ]
135N/Aman1m_files = [
135N/A 'man/pkg.depotd.1m',
135N/A 'man/pkg.depot-config.1m',
135N/A 'man/pkg.sysrepo.1m'
382N/A ]
135N/Aman5_files = [
135N/A 'man/pkg.5'
812N/A ]
382N/A
382N/Aman1_ja_files = [
382N/A 'man/ja_JP/packagemanager.1',
382N/A 'man/ja_JP/pkg.1',
382N/A 'man/ja_JP/pkgdepend.1',
382N/A 'man/ja_JP/pkgdiff.1',
382N/A 'man/ja_JP/pkgfmt.1',
382N/A 'man/ja_JP/pkglint.1',
382N/A 'man/ja_JP/pkgmerge.1',
382N/A 'man/ja_JP/pkgmogrify.1',
812N/A 'man/ja_JP/pkgsend.1',
812N/A 'man/ja_JP/pkgsign.1',
589N/A 'man/ja_JP/pkgrecv.1',
589N/A 'man/ja_JP/pkgrepo.1',
589N/A 'man/ja_JP/pm-updatemanager.1',
589N/A ]
589N/Aman1m_ja_files = [
858N/A 'man/ja_JP/pkg.depotd.1m',
858N/A 'man/ja_JP/pkg.sysrepo.1m'
858N/A ]
858N/Aman5_ja_files = [
858N/A 'man/ja_JP/pkg.5'
858N/A ]
858N/A
858N/Aman1_zh_CN_files = [
858N/A 'man/zh_CN/packagemanager.1',
858N/A 'man/zh_CN/pkg.1',
858N/A 'man/zh_CN/pkgdepend.1',
858N/A 'man/zh_CN/pkgdiff.1',
858N/A 'man/zh_CN/pkgfmt.1',
858N/A 'man/zh_CN/pkglint.1',
858N/A 'man/zh_CN/pkgmerge.1',
858N/A 'man/zh_CN/pkgmogrify.1',
858N/A 'man/zh_CN/pkgsend.1',
858N/A 'man/zh_CN/pkgsign.1',
466N/A 'man/zh_CN/pkgrecv.1',
466N/A 'man/zh_CN/pkgrepo.1',
466N/A 'man/zh_CN/pm-updatemanager.1',
466N/A ]
466N/Aman1m_zh_CN_files = [
466N/A 'man/zh_CN/pkg.depotd.1m',
589N/A 'man/zh_CN/pkg.sysrepo.1m'
589N/A ]
1191N/Aman5_zh_CN_files = [
1191N/A 'man/zh_CN/pkg.5'
1191N/A ]
1191N/A
1191N/Apackages = [
1191N/A 'pkg',
1191N/A 'pkg.actions',
1191N/A 'pkg.bundle',
1191N/A 'pkg.client',
1191N/A 'pkg.client.linkedimage',
1191N/A 'pkg.client.transport',
1191N/A 'pkg.file_layout',
1191N/A 'pkg.flavor',
589N/A 'pkg.gui',
589N/A 'pkg.lint',
589N/A 'pkg.portable',
589N/A 'pkg.publish',
589N/A 'pkg.server'
589N/A ]
589N/A
765N/Apylint_targets = [
765N/A 'pkg.altroot',
765N/A 'pkg.client.__init__',
765N/A 'pkg.client.api',
765N/A 'pkg.client.linkedimage',
765N/A 'pkg.client.pkgdefs',
765N/A 'pkg.client.pkgremote',
589N/A 'pkg.client.plandesc',
765N/A 'pkg.client.printengine',
765N/A 'pkg.client.progress',
765N/A 'pkg.misc',
765N/A 'pkg.pipeutils',
765N/A 'checkforupdates',
765N/A ]
765N/A
873N/Aweb_files = []
873N/Afor entry in os.walk("web"):
135N/A web_dir, dirs, files = entry
382N/A if not files:
157N/A continue
382N/A web_files.append((os.path.join(resource_dir, web_dir), [
429N/A os.path.join(web_dir, f) for f in files
429N/A if f != "Makefile"
429N/A ]))
429N/A # install same set of files in "en/" in "__LOCALE__/ as well"
429N/A # for localizable file package (regarding themes, install
429N/A # theme "oracle.com" only)
429N/A if os.path.basename(web_dir) == "en" and \
429N/A os.path.dirname(web_dir) in ("web", "web/_themes/oracle.com"):
429N/A web_files.append((os.path.join(resource_dir,
429N/A os.path.dirname(web_dir), "__LOCALE__"), [
429N/A os.path.join(web_dir, f) for f in files
812N/A if f != "Makefile"
812N/A ]))
812N/A
812N/Asmf_app_files = [
812N/A 'svc/pkg-depot.xml',
812N/A 'svc/pkg-mdns.xml',
812N/A 'svc/pkg-mirror.xml',
812N/A 'svc/pkg-repositories-setup.xml',
812N/A 'svc/pkg-server.xml',
812N/A 'svc/pkg-system-repository.xml',
812N/A 'svc/pkg-update.xml',
812N/A 'svc/zoneproxy-client.xml',
812N/A 'svc/zoneproxyd.xml'
812N/A ]
812N/Aresource_files = [
812N/A 'util/opensolaris.org.sections',
812N/A 'util/pkglintrc',
812N/A ]
812N/Atransform_files = [
812N/A 'util/publish/transforms/developer',
812N/A 'util/publish/transforms/documentation',
812N/A 'util/publish/transforms/locale',
812N/A 'util/publish/transforms/smf-manifests'
812N/A ]
812N/Asysrepo_files = [
812N/A 'util/apache2/sysrepo/sysrepo_p5p.py',
812N/A 'util/apache2/sysrepo/sysrepo_httpd.conf.mako',
812N/A 'util/apache2/sysrepo/sysrepo_publisher_response.mako',
812N/A ]
812N/Asysrepo_log_stubs = [
812N/A 'util/apache2/sysrepo/logs/access_log',
812N/A 'util/apache2/sysrepo/logs/error_log',
812N/A 'util/apache2/sysrepo/logs/rewrite.log',
812N/A ]
812N/Adepot_files = [
812N/A 'util/apache2/depot/depot.conf.mako',
812N/A 'util/apache2/depot/depot_httpd.conf.mako',
812N/A 'util/apache2/depot/depot_index.py',
812N/A ]
812N/Adepot_log_stubs = [
812N/A 'util/apache2/depot/logs/access_log',
812N/A 'util/apache2/depot/logs/error_log',
812N/A 'util/apache2/depot/logs/rewrite.log',
873N/A ]
873N/A# The apache-based depot includes an shtml file we add to the resource dir
873N/Aweb_files.append((os.path.join(resource_dir, "web"),
873N/A ["util/apache2/depot/repos.shtml"]))
873N/Aexecattrd_files = [
873N/A 'util/misc/exec_attr.d/package:pkg',
873N/A 'util/misc/exec_attr.d/package:pkg:package-manager'
812N/A]
812N/Aauthattrd_files = ['util/misc/auth_attr.d/package:pkg']
812N/Auserattrd_files = ['util/misc/user_attr.d/package:pkg']
812N/Aautostart_files = [
812N/A 'um/data/updatemanagernotifier.desktop',
812N/A]
812N/Adesktop_files = [
812N/A 'gui/data/addmoresoftware.desktop',
812N/A 'gui/data/packagemanager.desktop',
812N/A 'um/data/updatemanager.desktop',
812N/A]
812N/Agconf_files = [
975N/A 'gui/data/packagemanager-preferences.schemas',
975N/A 'um/data/updatemanager-preferences.schemas',
975N/A]
975N/Aintl_files = [
975N/A 'gui/data/addmoresoftware.desktop.in',
873N/A 'gui/data/packagemanager-info.xml.in',
873N/A 'gui/data/packagemanager-preferences.schemas.in',
873N/A 'gui/data/packagemanager.desktop.in',
873N/A 'um/data/updatemanager-preferences.schemas.in',
382N/A 'um/data/updatemanager.desktop.in',
466N/A 'um/data/updatemanagernotifier.desktop.in',
466N/A]
451N/Ahelp_locales = \
445N/A 'C ar ca cs de es fr hu id it ja ko pl pt_BR ru sv zh_CN zh_HK zh_TW'.split()
466N/Ahelp_files = {
461N/A 'C': ['gui/help/C/package-manager.xml'],
466N/A 'C/figures': [
1020N/A 'gui/help/C/figures/%s.png' % n
1020N/A for n in 'pkgmgr-main startpage_new update_all_new webinstall'.split()
1020N/A ]
1020N/A}
1020N/Ahelp_files.update(
451N/A (locale, ['gui/help/%s/package-manager.xml' % locale])
812N/A for locale in help_locales[1:]
812N/A)
812N/A# add package-manager-__LOCALE__.omf for localizable file package
812N/Aomf_files = [
812N/A 'gui/help/package-manager-%s.omf' % locale
812N/A for locale in help_locales + [ "__LOCALE__" ]
812N/A]
812N/Astartpage_locales = \
812N/A 'C ar ca cs de es fr hu id it ja ko nl pt_BR ru sv zh_CN zh_HK zh_TW'.split()
429N/Astartpage_files = {
429N/A 'C': [
429N/A 'gui/data/startpagebase/C/%s.png' % n
429N/A for n in [
429N/A 'dialog-information', 'dialog-warning', 'hc_dialog-information',
429N/A 'hc_dialog-warning', 'hc_install', 'hc_opensolaris',
429N/A 'hci_dialog-information', 'hci_dialog-warning', 'hci_install',
429N/A 'hci_opensolaris', 'install', 'opensolaris'
612N/A ]
612N/A ] + ['gui/data/startpagebase/C/startpage.html']
612N/A}
386N/Astartpage_files.update(
916N/A (locale, ['gui/data/startpagebase/%s/startpage.html' % locale])
916N/A for locale in startpage_locales[1:]
1191N/A)
1191N/Apkg_locales = \
1191N/A 'ar ca cs de es fr he hu id it ja ko nl pl pt_BR ru sk sv zh_CN zh_HK zh_TW'.split()
1191N/A
1191N/Asyscallat_srcs = [
1191N/A 'modules/syscallat.c'
1191N/A ]
1191N/Apspawn_srcs = [
1191N/A 'modules/pspawn.c'
382N/A ]
382N/Aelf_srcs = [
382N/A 'modules/elf.c',
382N/A 'modules/elfextract.c',
461N/A 'modules/liblist.c',
461N/A ]
461N/Aarch_srcs = [
1191N/A 'modules/arch.c'
382N/A ]
382N/A_actions_srcs = [
975N/A 'modules/actions/_actions.c'
382N/A ]
382N/A_actcomm_srcs = [
873N/A 'modules/actions/_common.c'
382N/A ]
382N/A_varcet_srcs = [
812N/A 'modules/_varcet.c'
812N/A ]
812N/Asolver_srcs = [
812N/A 'modules/solver/solver.c',
812N/A 'modules/solver/py_solver.c'
812N/A ]
812N/Asolver_link_args = ["-lm", "-lc"]
812N/Aif osname == 'sunos':
812N/A solver_link_args = ["-ztext"] + solver_link_args
812N/A
812N/A# Runs lint on the extension module source code
873N/Aclass pylint_func(Command):
812N/A description = "Runs pylint tools over IPS python source code"
812N/A user_options = []
812N/A
873N/A def initialize_options(self):
812N/A pass
812N/A
812N/A def finalize_options(self):
812N/A pass
812N/A
812N/A # Make string shell-friendly
812N/A @staticmethod
812N/A def escape(astring):
812N/A return astring.replace(' ', '\\ ')
812N/A
812N/A def run(self, quiet=False):
812N/A
812N/A def supported_pylint_ver(version):
812N/A """Compare the installed version against the version
812N/A we require to build with, returning False if the version
812N/A is too old. It's tempting to use pkg.version.Version
812N/A here, but since that's a build artifact, we'll do it
812N/A the long way."""
812N/A inst_pylint_ver = version.split(".")
812N/A req_pylint_ver = req_pylint_version.split(".")
812N/A
812N/A # if the lists are of different lengths, we just
812N/A # compare with the precision we have.
873N/A vers_comp = zip(inst_pylint_ver, req_pylint_ver)
812N/A for inst, req in vers_comp:
873N/A try:
812N/A if int(inst) < int(req):
873N/A return False
812N/A except ValueError:
812N/A # if we somehow get non-numeric version
873N/A # components, we ignore them.
812N/A continue
812N/A return True
812N/A
812N/A # it's fine to default to the required version - the build will
812N/A # break if the installed version is incompatible and $PYLINT_VER
452N/A # didn't get set, somehow.
466N/A pylint_ver_str = os.environ.get("PYLINT_VER",
858N/A req_pylint_version)
382N/A if pylint_ver_str == "":
466N/A pylint_ver_str = req_pylint_version
965N/A
858N/A if os.environ.get("PKG_SKIP_PYLINT"):
452N/A log.warn("WARNING: skipping pylint checks: "
382N/A "$PKG_SKIP_PYLINT was set")
858N/A return
858N/A elif not pylint_ver_str or \
858N/A not supported_pylint_ver(pylint_ver_str):
382N/A log.warn("WARNING: skipping pylint checks: the "
742N/A "installed version %s is older than version %s" %
858N/A (pylint_ver_str, req_pylint_version))
466N/A return
466N/A
858N/A proto = os.path.join(root_dir, py_install_dir)
858N/A sys.path.insert(0, proto)
858N/A
858N/A # Insert tests directory onto sys.path so any custom checkers
858N/A # can be found.
858N/A sys.path.insert(0, os.path.join(pwd, 'tests'))
858N/A # assumes pylint is accessible on the sys.path
858N/A from pylint import lint
858N/A
858N/A #
858N/A # For some reason, the load-plugins option, when used in the
858N/A # rcfile, does not work, so we put it here instead, to load
466N/A # our custom checkers.
466N/A #
466N/A # Unfortunately, pylint seems pretty fragile and will crash if
466N/A # we try to run it over all the current pkg source. Hence for
466N/A # now we only run it over a subset of the source. As source
466N/A # files are made pylint clean they should be added to the
466N/A # pylint_targets list.
466N/A #
466N/A args = ['--load-plugins=multiplatform']
466N/A if quiet:
466N/A args += ['--reports=no']
466N/A args += ['--rcfile', os.path.join(pwd, 'tests', 'pylintrc')]
466N/A args += pylint_targets
466N/A lint.Run(args)
466N/A
466N/A
466N/Aclass pylint_func_quiet(pylint_func):
466N/A
466N/A def run(self, quiet=False):
466N/A pylint_func.run(self, quiet=True)
466N/A
466N/A
466N/Ainclude_dirs = [ 'modules' ]
466N/Alint_flags = [ '-u', '-axms', '-erroff=E_NAME_DEF_NOT_USED2' ]
466N/A
466N/A# Runs lint on the extension module source code
466N/Aclass clint_func(Command):
466N/A description = "Runs lint tools over IPS C extension source code"
466N/A user_options = []
466N/A
382N/A def initialize_options(self):
612N/A pass
612N/A
612N/A def finalize_options(self):
612N/A pass
612N/A
617N/A # Make string shell-friendly
916N/A @staticmethod
975N/A def escape(astring):
975N/A return astring.replace(' ', '\\ ')
975N/A
617N/A def run(self):
617N/A if "LINT" in os.environ and os.environ["LINT"] != "":
612N/A lint = [os.environ["LINT"]]
612N/A else:
451N/A lint = ['lint']
382N/A if osname == 'sunos' or osname == "linux":
452N/A archcmd = lint + lint_flags + \
452N/A ['-D_FILE_OFFSET_BITS=64'] + \
452N/A ["%s%s" % ("-I", k) for k in include_dirs] + \
452N/A ['-I' + self.escape(get_python_inc())] + \
873N/A arch_srcs
452N/A elfcmd = lint + lint_flags + \
382N/A ["%s%s" % ("-I", k) for k in include_dirs] + \
382N/A ['-I' + self.escape(get_python_inc())] + \
742N/A ["%s%s" % ("-l", k) for k in elf_libraries] + \
382N/A elf_srcs
382N/A _actionscmd = lint + lint_flags + \
382N/A ["%s%s" % ("-I", k) for k in include_dirs] + \
145N/A ['-I' + self.escape(get_python_inc())] + \
451N/A _actions_srcs
451N/A _actcommcmd = lint + lint_flags + \
451N/A ["%s%s" % ("-I", k) for k in include_dirs] + \
451N/A ['-I' + self.escape(get_python_inc())] + \
451N/A _actcomm_srcs
451N/A _varcetcmd = lint + lint_flags + \
451N/A ["%s%s" % ("-I", k) for k in include_dirs] + \
451N/A ['-I' + self.escape(get_python_inc())] + \
451N/A _varcet_srcs
451N/A pspawncmd = lint + lint_flags + \
451N/A ['-D_FILE_OFFSET_BITS=64'] + \
451N/A ["%s%s" % ("-I", k) for k in include_dirs] + \
451N/A ['-I' + self.escape(get_python_inc())] + \
451N/A pspawn_srcs
451N/A syscallatcmd = lint + lint_flags + \
451N/A ['-D_FILE_OFFSET_BITS=64'] + \
451N/A ["%s%s" % ("-I", k) for k in include_dirs] + \
451N/A ['-I' + self.escape(get_python_inc())] + \
466N/A syscallat_srcs
814N/A
873N/A print(" ".join(archcmd))
975N/A os.system(" ".join(archcmd))
873N/A print(" ".join(elfcmd))
814N/A os.system(" ".join(elfcmd))
466N/A print(" ".join(_actionscmd))
217N/A os.system(" ".join(_actionscmd))
1191N/A print(" ".join(_actcommcmd))
1191N/A os.system(" ".join(_actcommcmd))
1191N/A print(" ".join(_varcetcmd))
1191N/A os.system(" ".join(_varcetcmd))
1191N/A print(" ".join(pspawncmd))
1191N/A os.system(" ".join(pspawncmd))
873N/A print(" ".join(syscallatcmd))
873N/A os.system(" ".join(syscallatcmd))
466N/A
466N/A
466N/A# Runs both C and Python lint
466N/Aclass lint_func(Command):
873N/A description = "Runs C and Python lint checkers"
812N/A user_options = []
812N/A
873N/A def initialize_options(self):
812N/A pass
def finalize_options(self):
pass
# Make string shell-friendly
@staticmethod
def escape(astring):
return astring.replace(' ', '\\ ')
def run(self):
clint_func(Distribution()).run()
pylint_func(Distribution()).run()
class install_func(_install):
def initialize_options(self):
_install.initialize_options(self)
# PRIVATE_BUILD set in the environment tells us to put the build
# directory into the .pyc files, rather than the final
# installation directory.
private_build = os.getenv("PRIVATE_BUILD", None)
if private_build is None:
self.install_lib = py_install_dir
self.install_data = os.path.sep
self.root = root_dir
else:
self.install_lib = os.path.join(root_dir, py_install_dir)
self.install_data = root_dir
# This is used when installing scripts, below, but it isn't a
# standard distutils variable.
self.root_dir = root_dir
def run(self):
"""At the end of the install function, we need to rename some
files because distutils provides no way to rename files as they
are placed in their install locations.
"""
_install.run(self)
for o_src, o_dest in hardlink_modules:
for e in [".py", ".pyc"]:
src = util.change_root(self.root_dir, o_src + e)
dest = util.change_root(
self.root_dir, o_dest + e)
if ostype == "posix":
if os.path.exists(dest) and \
os.stat(src)[stat.ST_INO] != \
os.stat(dest)[stat.ST_INO]:
os.remove(dest)
file_util.copy_file(src, dest,
link="hard", update=1)
else:
file_util.copy_file(src, dest, update=1)
for d, files in scripts[osname].iteritems():
for (srcname, dstname) in files:
dst_dir = util.change_root(self.root_dir, d)
dst_path = util.change_root(self.root_dir,
os.path.join(d, dstname))
dir_util.mkpath(dst_dir, verbose=True)
file_util.copy_file(srcname, dst_path, update=True)
# make scripts executable
os.chmod(dst_path,
os.stat(dst_path).st_mode
| stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
class install_lib_func(_install_lib):
"""Remove the target files prior to the standard install_lib procedure
if the build_py module has determined that they've actually changed.
This may be needed when a module's timestamp goes backwards in time, if
a working-directory change is reverted, or an older changeset is checked
out.
"""
def install(self):
build_py = self.get_finalized_command("build_py")
prefix_len = len(self.build_dir) + 1
for p in build_py.copied:
id_p = os.path.join(self.install_dir, p[prefix_len:])
rm_f(id_p)
if self.compile:
rm_f(id_p + "c")
if self.optimize > 0:
rm_f(id_p + "o")
return _install_lib.install(self)
class install_data_func(_install_data):
"""Enhance the standard install_data subcommand to take not only a list
of filenames, but a list of source and destination filename tuples, for
the cases where a filename needs to be renamed between the two
locations."""
def run(self):
self.mkpath(self.install_dir)
for f in self.data_files:
dir, files = f
dir = util.convert_path(dir)
if not os.path.isabs(dir):
dir = os.path.join(self.install_dir, dir)
elif self.root:
dir = change_root(self.root, dir)
self.mkpath(dir)
if not files:
self.outfiles.append(dir)
else:
for file in files:
if isinstance(file, basestring):
infile = file
outfile = os.path.join(dir,
os.path.basename(file))
else:
infile, outfile = file
infile = util.convert_path(infile)
outfile = util.convert_path(outfile)
if os.path.sep not in outfile:
outfile = os.path.join(dir,
outfile)
self.copy_file(infile, outfile)
self.outfiles.append(outfile)
# Don't bother making this generic for the one symlink.
src = "HighContrastInverse"
dst = os.path.join(self.install_dir, pm_share_dir,
"icons/HighContrastLargePrintInverse")
try:
targ = os.readlink(dst)
except OSError, e:
if e.errno in (errno.ENOENT, errno.EINVAL):
targ = None
else:
raise
if src != targ:
log.info("linking %s -> %s" % (src, dst))
rm_f(dst)
os.symlink(src, dst)
def run_cmd(args, swdir, updenv=None, ignerr=False, savestderr=None):
if updenv:
# use temp environment modified with the given dict
env = os.environ.copy()
env.update(updenv)
else:
# just use environment of this (parent) process as is
env = os.environ
if ignerr:
# send stderr to devnull
stderr = open(os.devnull)
elif savestderr:
stderr = savestderr
else:
# just use stderr of this (parent) process
stderr = None
ret = subprocess.Popen(args, cwd=swdir, env=env,
stderr=stderr).wait()
if ret != 0:
if stderr:
stderr.close()
print >> sys.stderr, \
"install failed and returned %d." % ret
print >> sys.stderr, \
"Command was: %s" % " ".join(args)
sys.exit(1)
if stderr:
stderr.close()
def _copy_file_contents(src, dst, buffer_size=16*1024):
"""A clone of distutils.file_util._copy_file_contents() that strips the
CDDL text. For Python files, we replace the CDDL text with an equal
number of empty comment lines so that line numbers match between the
source and destination files."""
# Match the lines between and including the CDDL header signposts, as
# well as empty comment lines before and after, if they exist.
cddl_re = re.compile("\n(#\s*\n)?^[^\n]*CDDL HEADER START.+"
"CDDL HEADER END[^\n]*$(\n#\s*$)?", re.MULTILINE|re.DOTALL)
with file(src, "r") as sfp:
try:
os.unlink(dst)
except EnvironmentError, e:
if e.errno != errno.ENOENT:
raise DistutilsFileError("could not delete "
"'%s': %s" % (dst, e))
with file(dst, "w") as dfp:
while True:
buf = sfp.read(buffer_size)
if not buf:
break
if src.endswith(".py"):
match = cddl_re.search(buf)
if match:
# replace the CDDL expression
# with the same number of empty
# comment lines as the cddl_re
# matched.
substr = buf[
match.start():match.end()]
count = len(
substr.split("\n")) - 2
blanks = "#\n" * count
buf = cddl_re.sub("\n" + blanks,
buf)
else:
buf = cddl_re.sub("", buf)
dfp.write(buf)
# Make file_util use our version of _copy_file_contents
file_util._copy_file_contents = _copy_file_contents
def intltool_update_maintain():
"""Check if scope of localization looks up-to-date or possibly not,
by comparing file set described in po/POTFILES.{in,skip} and
actual source files (e.g. .py) detected.
"""
rm_f("po/missing")
rm_f("po/notexist")
args = [
"/usr/bin/intltool-update", "--maintain"
]
print " ".join(args)
podir = os.path.join(os.getcwd(), "po")
run_cmd(args, podir, updenv={"LC_ALL": "C"}, ignerr=True)
if os.path.exists("po/missing"):
print >> sys.stderr, \
"New file(s) with translatable strings detected:"
missing = open("po/missing", "r")
print >> sys.stderr, "--------"
for fn in missing:
print >> sys.stderr, "%s" % fn.strip()
print >> sys.stderr, "--------"
missing.close()
print >> sys.stderr, \
"""Please evaluate whether any of the above file(s) needs localization.
If so, please add its name to po/POTFILES.in. If not (e.g., it's not
delivered), please add its name to po/POTFILES.skip.
Please be sure to maintain alphabetical ordering in both files."""
sys.exit(1)
if os.path.exists("po/notexist"):
print >> sys.stderr, \
"""The following files are listed in po/POTFILES.in, but no longer exist
in the workspace:"""
notexist = open("po/notexist", "r")
print >> sys.stderr, "--------"
for fn in notexist:
print >> sys.stderr, "%s" % fn.strip()
print >> sys.stderr, "--------"
notexist.close()
print >> sys.stderr, \
"Please remove the file names from po/POTFILES.in"
sys.exit(1)
def intltool_update_pot():
"""Generate pkg.pot by extracting localizable strings from source
files (e.g. .py)
"""
rm_f("po/pkg.pot")
args = [
"/usr/bin/intltool-update", "--pot"
]
print " ".join(args)
podir = os.path.join(os.getcwd(), "po")
run_cmd(args, podir,
updenv={"LC_ALL": "C", "XGETTEXT": "/usr/gnu/bin/xgettext"})
if not os.path.exists("po/pkg.pot"):
print >> sys.stderr, \
"Failed in generating pkg.pot."
sys.exit(1)
def intltool_merge(src, dst):
if not dep_util.newer(src, dst):
return
args = [
"/usr/bin/intltool-merge", "-d", "-u",
"-c", "po/.intltool-merge-cache", "po", src, dst
]
print " ".join(args)
run_cmd(args, os.getcwd(), updenv={"LC_ALL": "C"})
def i18n_check():
"""Checks for common i18n messaging bugs in the source."""
src_files = []
# A list of the i18n errors we check for in the code
common_i18n_errors = [
# This checks that messages with multiple parameters are always
# written using "%(name)s" format, rather than just "%s"
"format string with unnamed arguments cannot be properly localized"
]
for line in open("po/POTFILES.in", "r").readlines():
if line.startswith("["):
continue
if line.startswith("#"):
continue
src_files.append(line.rstrip())
args = [
"/usr/gnu/bin/xgettext", "--from-code=UTF-8", "-o", "/dev/null"]
args += src_files
xgettext_output_path = tempfile.mkstemp()[1]
xgettext_output = open(xgettext_output_path, "w")
run_cmd(args, os.getcwd(), updenv={"LC_ALL": "C"},
savestderr=xgettext_output)
found_errs = False
i18n_errs = open("po/i18n_errs.txt", "w")
for line in open(xgettext_output_path, "r").readlines():
for err in common_i18n_errors:
if err in line:
i18n_errs.write(line)
found_errs = True
i18n_errs.close()
if found_errs:
print >> sys.stderr, \
"The following i18n errors were detected and should be corrected:\n" \
"(this list is saved in po/i18n_errs.txt)\n"
for line in open("po/i18n_errs.txt", "r"):
print >> sys.stderr, line.rstrip()
sys.exit(1)
os.remove(xgettext_output_path)
def msgfmt(src, dst):
if not dep_util.newer(src, dst):
return
args = ["/usr/bin/msgfmt", "-o", dst, src]
print " ".join(args)
run_cmd(args, os.getcwd())
def localizablexml(src, dst):
"""create XML help for localization, where French part of legalnotice
is stripped off
"""
if not dep_util.newer(src, dst):
return
fsrc = open(src, "r")
fdst = open(dst, "w")
# indicates currently in French part of legalnotice
in_fr = False
for l in fsrc:
if in_fr: # in French part
if l.startswith('</legalnotice>'):
# reached end of legalnotice
print >> fdst, l,
in_fr = False
elif l.startswith('<para lang="fr"/>') or \
l.startswith('<para lang="fr"></para>'):
in_fr = True
else:
# not in French part
print >> fdst, l,
fsrc.close()
fdst.close()
def xml2po_gen(src, dst):
"""Input is English XML file. Output is pkg_help.pot, message
source for next translation update.
"""
if not dep_util.newer(src, dst):
return
args = ["/usr/bin/xml2po", "-o", dst, src]
print " ".join(args)
run_cmd(args, os.getcwd())
def xml2po_merge(src, dst, mofile):
"""Input is English XML file and <lang>.po file (which contains
translations). Output is translated XML file.
"""
msgfmt(mofile[:-3] + ".po", mofile)
monewer = dep_util.newer(mofile, dst)
srcnewer = dep_util.newer(src, dst)
if not srcnewer and not monewer:
return
args = ["/usr/bin/xml2po", "-t", mofile, "-o", dst, src]
print " ".join(args)
run_cmd(args, os.getcwd())
class installfile(Command):
user_options = [
("file=", "f", "source file to copy"),
("dest=", "d", "destination directory"),
("mode=", "m", "file mode"),
]
description = "De-CDDLing file copy"
def initialize_options(self):
self.file = None
self.dest = None
self.mode = None
def finalize_options(self):
if self.mode is None:
self.mode = 0644
elif isinstance(self.mode, basestring):
try:
self.mode = int(self.mode, 8)
except ValueError:
self.mode = 0644
def run(self):
dest_file = os.path.join(self.dest, os.path.basename(self.file))
ret = self.copy_file(self.file, dest_file)
os.chmod(dest_file, self.mode)
os.utime(dest_file, None)
return ret
class build_func(_build):
sub_commands = _build.sub_commands + [('build_data', None)]
def initialize_options(self):
_build.initialize_options(self)
self.build_base = build_dir
def get_hg_version():
try:
p = subprocess.Popen(['hg', 'id', '-i'], stdout = subprocess.PIPE)
return p.communicate()[0].strip()
except OSError:
print >> sys.stderr, "ERROR: unable to obtain mercurial version"
return "unknown"
def syntax_check(filename):
""" Run python's compiler over the file, and discard the results.
Arrange to generate an exception if the file does not compile.
This is needed because distutil's own use of pycompile (in the
distutils.utils module) is broken, and doesn't stop on error. """
try:
py_compile.compile(filename, os.devnull, doraise=True)
except py_compile.PyCompileError, e:
res = ""
for err in e.exc_value:
if isinstance(err, basestring):
res += err + "\n"
continue
# Assume it's a tuple of (filename, lineno, col, code)
fname, line, col, code = err
res += "line %d, column %s, in %s:\n%s" % (line,
col or "unknown", fname, code)
raise DistutilsError(res)
# On Solaris, ld inserts the full argument to the -o option into the symbol
# table. This means that the resulting object will be different depending on
# the path at which the workspace lives, and not just on the interesting content
# of the object.
#
# In order to work around that bug (7076871), we create a new compiler class
# that looks at the argument indicating the output file, chdirs to its
# directory, and runs the real link with the output file set to just the base
# name of the file.
#
# Unfortunately, distutils isn't too customizable in this regard, so we have to
# twiddle with a couple of the names in the distutils.ccompiler namespace: we
# have to add a new entry to the compiler_class dict, and we have to override
# the new_compiler() function to point to our own. Luckily, our copy of
# new_compiler() gets to be very simple, since we always know what we want to
# return.
class MyUnixCCompiler(UnixCCompiler):
def link(self, *args, **kwargs):
output_filename = args[2]
output_dir = kwargs.get('output_dir')
cwd = os.getcwd()
assert(not output_dir)
output_dir = os.path.join(cwd, os.path.dirname(output_filename))
output_filename = os.path.basename(output_filename)
nargs = args[:2] + (output_filename,) + args[3:]
if not os.path.exists(output_dir):
os.mkdir(output_dir, 0755)
os.chdir(output_dir)
UnixCCompiler.link(self, *nargs, **kwargs)
os.chdir(cwd)
distutils.ccompiler.compiler_class['myunix'] = (
'unixccompiler', 'MyUnixCCompiler',
'standard Unix-style compiler with a link stage modified for Solaris'
)
def my_new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
return MyUnixCCompiler(None, dry_run, force)
if osname == 'sunos':
distutils.ccompiler.new_compiler = my_new_compiler
class build_ext_func(_build_ext):
def initialize_options(self):
_build_ext.initialize_options(self)
self.build64 = False
if osname == 'sunos':
self.compiler = 'myunix'
def build_extension(self, ext):
# Build 32-bit
_build_ext.build_extension(self, ext)
if not ext.build_64:
return
# Set up for 64-bit
old_build_temp = self.build_temp
d, f = os.path.split(self.build_temp)
# store our 64-bit extensions elsewhere
self.build_temp = d + "/temp64.%s" % \
os.path.basename(self.build_temp).replace("temp.", "")
ext.extra_compile_args += ["-m64"]
ext.extra_link_args += ["-m64"]
self.build64 = True
# Build 64-bit
_build_ext.build_extension(self, ext)
# Reset to 32-bit
self.build_temp = old_build_temp
ext.extra_compile_args.remove("-m64")
ext.extra_link_args.remove("-m64")
self.build64 = False
def get_ext_fullpath(self, ext_name):
path = _build_ext.get_ext_fullpath(self, ext_name)
if not self.build64:
return path
dpath, fpath = os.path.split(path)
return os.path.join(dpath, "64", fpath)
class build_py_func(_build_py):
def __init__(self, dist):
ret = _build_py.__init__(self, dist)
self.copied = []
# Gather the timestamps of the .py files in the gate, so we can
# force the mtimes of the built and delivered copies to be
# consistent across builds, causing their corresponding .pyc
# files to be unchanged unless the .py file content changed.
self.timestamps = {}
p = subprocess.Popen(
[sys.executable, os.path.join(pwd, "pydates")],
stdout=subprocess.PIPE)
for line in p.stdout:
stamp, path = line.split()
stamp = float(stamp)
self.timestamps[path] = stamp
if p.wait() != 0:
print >> sys.stderr, "ERROR: unable to gather .py " \
"timestamps"
sys.exit(1)
return ret
# override the build_module method to do VERSION substitution on
# pkg/__init__.py
def build_module (self, module, module_file, package):
if module == "__init__" and package == "pkg":
versionre = '(?m)^VERSION[^"]*"([^"]*)"'
# Grab the previously-built version out of the build
# tree.
try:
ocontent = \
file(self.get_module_outfile(self.build_lib,
[package], module)).read()
ov = re.search(versionre, ocontent).group(1)
except IOError:
ov = None
v = get_hg_version()
vstr = 'VERSION = "%s"' % v
# If the versions haven't changed, there's no need to
# recompile.
if v == ov:
return
mcontent = file(module_file).read()
mcontent = re.sub(versionre, vstr, mcontent)
tmpfd, tmp_file = tempfile.mkstemp()
os.write(tmpfd, mcontent)
os.close(tmpfd)
print "doing version substitution: ", v
rv = _build_py.build_module(self, module, tmp_file, package)
os.unlink(tmp_file)
return rv
# Will raise a DistutilsError on failure.
syntax_check(module_file)
return _build_py.build_module(self, module, module_file, package)
def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1,
link=None, level=1):
# If the timestamp on the source file (coming from mercurial if
# unchanged, or from the filesystem if changed) doesn't match
# the filesystem timestamp on the destination, then force the
# copy to make sure the right data is in place.
try:
dst_mtime = os.stat(outfile).st_mtime
except OSError, e:
if e.errno != errno.ENOENT:
raise
dst_mtime = time.time()
# The timestamp for __init__.py is the timestamp for the
# workspace itself.
if outfile.endswith("/pkg/__init__.py"):
src_mtime = self.timestamps["."]
else:
src_mtime = self.timestamps.get(
os.path.join("src", infile), self.timestamps["."])
# Force a copy of the file if the source timestamp is different
# from that of the destination, not just if it's newer. This
# allows timestamps in the working directory to regress (for
# instance, following the reversion of a change).
if dst_mtime != src_mtime:
f = self.force
self.force = True
dst, copied = _build_py.copy_file(self, infile, outfile,
preserve_mode, preserve_times, link, level)
self.force = f
else:
dst, copied = outfile, 0
# If we copied the file, then we need to go and readjust the
# timestamp on the file to match what we have in our database.
# Save the filename aside for our version of install_lib.
if copied and dst.endswith(".py"):
os.utime(dst, (src_mtime, src_mtime))
self.copied.append(dst)
return dst, copied
class build_data_func(Command):
description = "build data files whose source isn't in deliverable form"
user_options = []
# As a subclass of distutils.cmd.Command, these methods are required to
# be implemented.
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# Anything that gets created here should get deleted in
# clean_func.run() below.
i18n_check()
for f in intl_files:
intltool_merge(f, f[:-3])
for l in help_locales:
path = "gui/help/%s/" % l
xml2po_merge(path + "package-manager.xml.in",
path + "package-manager.xml",
path + "%s.mo" % l)
# create xml for localization
localizablexml("gui/help/C/package-manager.xml",
"gui/help/C/package-manager.localizable.xml")
# generate pkg_help.pot for next translation
xml2po_gen("gui/help/C/package-manager.localizable.xml",
"gui/help/C/pkg_help.pot")
for l in pkg_locales:
msgfmt("po/%s.po" % l, "po/%s.mo" % l)
# generate pkg.pot for next translation
intltool_update_maintain()
intltool_update_pot()
# create __LOCALE__ -> C symlink for omf file
# to make installation with data_files list work
locomf="gui/help/package-manager-__LOCALE__.omf"
if not os.path.exists(locomf):
os.symlink("package-manager-C.omf", locomf)
def rm_f(filepath):
"""Remove a file without caring whether it exists."""
try:
os.unlink(filepath)
except OSError, e:
if e.errno != errno.ENOENT:
raise
class clean_func(_clean):
def initialize_options(self):
_clean.initialize_options(self)
self.build_base = build_dir
def run(self):
_clean.run(self)
rm_f("po/.intltool-merge-cache")
for f in intl_files:
rm_f(f[:-3])
for l in pkg_locales:
rm_f("po/%s.mo" % l)
rm_f("po/pkg.pot")
for l in help_locales:
path = "gui/help/%s/" % l
rm_f(path + "package-manager.xml")
rm_f(path + "%s.mo" % l)
rm_f("gui/help/C/pkg_help.pot")
rm_f("gui/help/package-manager-__LOCALE__.omf")
rm_f("po/i18n_errs.txt")
class clobber_func(Command):
user_options = []
description = "Deletes any and all files created by setup"
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# nuke everything
print("deleting " + dist_dir)
shutil.rmtree(dist_dir, True)
print("deleting " + build_dir)
shutil.rmtree(build_dir, True)
print("deleting " + root_dir)
shutil.rmtree(root_dir, True)
print("deleting " + pkgs_dir)
shutil.rmtree(pkgs_dir, True)
print("deleting " + extern_dir)
shutil.rmtree(extern_dir, True)
class test_func(Command):
# NOTE: these options need to be in sync with tests/run.py and the
# list of options stored in initialize_options below. The first entry
# in each tuple must be the exact name of a member variable.
user_options = [
("archivedir=", 'a', "archive failed tests <dir>"),
("baselinefile=", 'b', "baseline file <file>"),
("coverage", "c", "collect code coverage data"),
("genbaseline", 'g', "generate test baseline"),
("only=", "o", "only <regex>"),
("parseable", 'p', "parseable output"),
("port=", "z", "lowest port to start a depot on"),
("timing", "t", "timing file <file>"),
("verbosemode", 'v', "run tests in verbose mode"),
("enableguitests", 'u', "enable IPS GUI tests, disabled by default"),
("stoponerr", 'x', "stop when a baseline mismatch occurs"),
("debugoutput", 'd', "emit debugging output"),
("showonexpectedfail", 'f',
"show all failure info, even for expected fails"),
("startattest=", 's', "start at indicated test"),
("jobs=", 'j', "number of parallel processes to use"),
("quiet", "q", "use the dots as the output format"),
]
description = "Runs unit and functional tests"
def initialize_options(self):
self.only = ""
self.baselinefile = ""
self.verbosemode = 0
self.parseable = 0
self.genbaseline = 0
self.timing = 0
self.coverage = 0
self.stoponerr = 0
self.debugoutput = 0
self.showonexpectedfail = 0
self.startattest = ""
self.archivedir = ""
self.port = 12001
self.jobs = 1
self.quiet = False
def finalize_options(self):
pass
def run(self):
os.putenv('PYEXE', sys.executable)
os.chdir(os.path.join(pwd, "tests"))
# Reconstruct the cmdline and send that to run.py
cmd = [sys.executable, "run.py"]
args = ""
if "test" in sys.argv:
args = sys.argv[sys.argv.index("test")+1:]
cmd.extend(args)
subprocess.call(cmd)
class dist_func(_bdist):
def initialize_options(self):
_bdist.initialize_options(self)
self.dist_dir = dist_dir
class Extension(distutils.core.Extension):
# This class wraps the distutils Extension class, allowing us to set
# build_64 in the object constructor instead of being forced to add it
# after the object has been created.
def __init__(self, name, sources, build_64=False, **kwargs):
distutils.core.Extension.__init__(self, name, sources, **kwargs)
self.build_64 = build_64
# These are set to real values based on the platform, down below
compile_args = None
if osname in ("sunos", "linux", "darwin"):
compile_args = [ "-O3" ]
if osname == "sunos":
link_args = [ "-zstrip-class=nonalloc" ]
else:
link_args = []
# We don't support 64-bit yet, but 64-bit _actions.so, _common.so, and
# _varcet.so are needed for a system repository mod_wsgi application,
# sysrepo_p5p.py.
ext_modules = [
Extension(
'actions._actions',
_actions_srcs,
include_dirs = include_dirs,
extra_compile_args = compile_args,
extra_link_args = link_args,
build_64 = True
),
Extension(
'actions._common',
_actcomm_srcs,
include_dirs = include_dirs,
extra_compile_args = compile_args,
extra_link_args = link_args,
build_64 = True
),
Extension(
'_varcet',
_varcet_srcs,
include_dirs = include_dirs,
extra_compile_args = compile_args,
extra_link_args = link_args,
build_64 = True
),
Extension(
'solver',
solver_srcs,
include_dirs = include_dirs + ["."],
extra_compile_args = compile_args,
extra_link_args = link_args + solver_link_args,
define_macros = [('_FILE_OFFSET_BITS', '64')]
),
]
elf_libraries = None
data_files = web_files
cmdclasses = {
'install': install_func,
'install_data': install_data_func,
'install_lib': install_lib_func,
'build': build_func,
'build_data': build_data_func,
'build_ext': build_ext_func,
'build_py': build_py_func,
'bdist': dist_func,
'lint': lint_func,
'clint': clint_func,
'pylint': pylint_func,
'pylint_quiet': pylint_func_quiet,
'clean': clean_func,
'clobber': clobber_func,
'test': test_func,
'installfile': installfile,
}
# all builds of IPS should have manpages
data_files += [
(man1_dir, man1_files),
(man1m_dir, man1m_files),
(man5_dir, man5_files),
(man1_ja_JP_dir, man1_ja_files),
(man1m_ja_JP_dir, man1m_ja_files),
(man5_ja_JP_dir, man5_ja_files),
(man1_zh_CN_dir, man1_zh_CN_files),
(man1m_zh_CN_dir, man1m_zh_CN_files),
(man5_zh_CN_dir, man5_zh_CN_files),
(resource_dir, resource_files),
]
# add transforms
data_files += [
(transform_dir, transform_files)
]
if osname == 'sunos':
# Solaris-specific extensions are added here
data_files += [
(smf_app_dir, smf_app_files),
(execattrd_dir, execattrd_files),
(authattrd_dir, authattrd_files),
(userattrd_dir, userattrd_files),
(sysrepo_dir, sysrepo_files),
(sysrepo_logs_dir, sysrepo_log_stubs),
(sysrepo_cache_dir, {}),
(depot_dir, depot_files),
(depot_conf_dir, {}),
(depot_logs_dir, depot_log_stubs),
(depot_cache_dir, {}),
(autostart_dir, autostart_files),
(desktop_dir, desktop_files),
(gconf_dir, gconf_files),
(omf_dir, omf_files),
('usr/share/icons/hicolor/48x48/mimetypes',
['gui/data/gnome-mime-application-vnd.pkg5.info.png']),
('usr/share/mime/packages', ['gui/data/packagemanager-info.xml']),
(pm_share_dir, ['gui/data/packagemanager.ui']),
(mirror_cache_dir, {}),
(mirror_logs_dir, {}),
]
data_files += [
(os.path.join(startpage_dir, locale), files)
for locale, files in startpage_files.iteritems()
]
data_files += [
(os.path.join(help_dir, locale), files)
for locale, files in help_files.iteritems()
]
# install localizable .xml and its .pot file to put into localizable file package
data_files += [
(os.path.join(help_dir, '__LOCALE__'),
[('gui/help/C/package-manager.localizable.xml', 'package-manager.xml'),
('gui/help/C/pkg_help.pot', 'pkg_help.pot')])
]
data_files += [
(os.path.join(locale_dir, locale, 'LC_MESSAGES'),
[('po/%s.mo' % locale, 'pkg.mo')])
for locale in pkg_locales
]
# install English .pot file to put into localizable file package
data_files += [
(os.path.join(locale_dir, '__LOCALE__', 'LC_MESSAGES'),
[('po/pkg.pot', 'pkg.pot')])
]
for t in 'HighContrast', 'HighContrastInverse', '':
for px in '24', '36', '48':
data_files += [(
'%s/icons/%s/%sx%s/actions' % (um_share_dir, t or 'hicolor', px, px),
['um/data/icons/%s/%sx%s/updatemanager.png' % (t, px, px)]
)]
data_files += [(
'%s/icons/%s/16x16/actions' % (pm_share_dir, t or 'hicolor'),
[
'gui/data/icons/%s/16x16/%s.png' % (t, n)
for n in ('filter_all', 'filter_selected', 'progress_checkmark',
'selection', 'status_checkmark', 'status_installed',
'status_newupdate', 'status_notinstalled')
]
)]
data_files += [
('%s/icons/%s/%sx%s/actions' % (pm_share_dir, t or 'hicolor', px, px),
[
'gui/data/icons/%s/%sx%s/%s.png' % (t, px, px, n)
for n in ('pm-install_update', 'pm-refresh',
'pm-remove', 'pm-update_all')
])
for px in (24, 48)
]
data_files += [(
'%s/icons/%s/48x48/actions' % (pm_share_dir, t or 'hicolor'),
['gui/data/icons/%s/48x48/packagemanager.png' % t]
)]
data_files += [
('usr/share/icons/%s/48x48/apps' % (t or 'hicolor'),
[
'um/data/icons/%s/48x48/updatemanager.png' % t,
'gui/data/icons/%s/48x48/packagemanager.png' % t
]),
]
# These two icons don't fit any patterns.
data_files += [
(os.path.join(pm_share_dir, 'icons/hicolor/16x16/actions'), [
'gui/data/icons/16x16/progress_blank.png']),
(os.path.join(pm_share_dir, 'icons/hicolor/24x24/actions'), [
'gui/data/icons/24x24/pm-check.png']),
]
if osname == 'sunos' or osname == "linux":
# Unix platforms which the elf extension has been ported to
# are specified here, so they are built automatically
elf_libraries = ['elf']
ext_modules += [
Extension(
'elf',
elf_srcs,
include_dirs = include_dirs,
libraries = elf_libraries,
extra_compile_args = compile_args,
extra_link_args = link_args,
),
]
# Solaris has built-in md library and Solaris-specific arch extension
# All others use OpenSSL and cross-platform arch module
if osname == 'sunos':
elf_libraries += [ 'md' ]
ext_modules += [
Extension(
'arch',
arch_srcs,
include_dirs = include_dirs,
extra_compile_args = compile_args,
extra_link_args = link_args,
define_macros = [('_FILE_OFFSET_BITS', '64')]
),
Extension(
'pspawn',
pspawn_srcs,
include_dirs = include_dirs,
extra_compile_args = compile_args,
extra_link_args = link_args,
define_macros = [('_FILE_OFFSET_BITS', '64')]
),
Extension(
'syscallat',
syscallat_srcs,
include_dirs = include_dirs,
extra_compile_args = compile_args,
extra_link_args = link_args,
define_macros = [('_FILE_OFFSET_BITS', '64')]
),
]
else:
elf_libraries += [ 'ssl' ]
setup(cmdclass = cmdclasses,
name = 'pkg',
version = '0.1',
package_dir = {'pkg':'modules'},
packages = packages,
data_files = data_files,
ext_package = 'pkg',
ext_modules = ext_modules,
)