setup.py revision 3245
#
# CDDL HEADER START
#
# The contents of this file are subject to the terms of the
# Common Development and Distribution License (the "License").
# You may not use this file except in compliance with the License.
#
# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
# See the License for the specific language governing permissions
# and limitations under the License.
#
# When distributing Covered Code, include this CDDL HEADER in each
# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
# If applicable, add the following below this CDDL HEADER, with the
# fields enclosed by brackets "[]" replaced with your own identifying
# information: Portions Copyright [yyyy] [name of copyright owner]
#
# CDDL HEADER END
#
#
from __future__ import print_function
import errno
import fnmatch
import os
import platform
import six
import stat
import sys
import shutil
import re
import subprocess
import tarfile
import tempfile
import py_compile
import hashlib
import time
if osname == 'sunos':
ostype = "posix"
elif osname == 'linux':
ostype = "posix"
elif osname == 'windows':
ostype = "windows"
elif osname == 'darwin':
ostype = "posix"
elif osname == 'aix':
arch = "aix"
ostype = "posix"
# the version of pylint that we must have in order to run the pylint checks.
req_pylint_version = "0.25.2"
#
# Unbuffer stdout and stderr. This helps to ensure that subprocess output
# is properly interleaved with output from this program.
#
else:
# Extract Python minor version.
scripts_dir = 'usr/bin'
svc_method_dir = 'lib/svc/method'
svc_share_dir = 'lib/svc/share'
resource_dir = 'usr/share/lib/pkg'
sysrepo_dir = 'etc/pkg/sysrepo'
sysrepo_logs_dir = 'var/log/pkg/sysrepo'
depot_dir_22 = 'etc/pkg/depot/apache22'
depot_conf_dir = 'etc/pkg/depot/conf.d'
depot_logs_dir = 'var/log/pkg/depot'
depot_cache_dir = 'var/cache/pkg/depot'
locale_dir = 'usr/share/locale'
mirror_logs_dir = 'var/log/pkg/mirror'
# A list of source, destination tuples of modules which should be hardlinked
# together if the os supports it and otherwise copied.
hardlink_modules = []
scripts_sunos = {
scripts_dir: [
['client.py', 'pkg'],
['pkgdep.py', 'pkgdepend'],
['pkgrepo.py', 'pkgrepo'],
['util/publish/pkgdiff.py', 'pkgdiff'],
['util/publish/pkglint.py', 'pkglint'],
['util/publish/pkgmerge.py', 'pkgmerge'],
['util/publish/pkgmogrify.py', 'pkgmogrify'],
['util/publish/pkgsurf.py', 'pkgsurf'],
['publish.py', 'pkgsend'],
['pull.py', 'pkgrecv'],
['sign.py', 'pkgsign'],
],
lib_dir: [
['depot.py', 'pkg.depotd'],
['sysrepo.py', 'pkg.sysrepo'],
['depot-config.py', "pkg.depot-config"]
],
['svc/svc-pkg-depot', 'svc-pkg-depot'],
['svc/svc-pkg-mdns', 'svc-pkg-mdns'],
['svc/svc-pkg-mirror', 'svc-pkg-mirror'],
'svc-pkg-repositories-setup'],
['svc/svc-pkg-server', 'svc-pkg-server'],
['svc/svc-pkg-sysrepo', 'svc-pkg-sysrepo'],
'svc-pkg-sysrepo-cache'],
],
['svc/pkg5_include.sh', 'pkg5_include.sh'],
],
rad_dir: [
["rad-invoke.py", "rad-invoke"],
],
}
scripts_windows = {
scripts_dir: [
['pkgrepo.py', 'pkgrepo.py'],
['publish.py', 'publish.py'],
['scripts/pkgsend.bat', 'pkgsend.bat'],
['scripts/pkgrecv.bat', 'pkgrecv.bat'],
],
lib_dir: [
['scripts/pkg.depotd.bat', 'pkg.depotd.bat'],
],
}
scripts_dir: [
['pkgdep.py', 'pkgdep'],
['util/publish/pkgdiff.py', 'pkgdiff'],
['util/publish/pkgmogrify.py', 'pkgmogrify'],
['publish.py', 'publish.py'],
['scripts/pkgsend.sh', 'pkgsend'],
['scripts/pkgrecv.sh', 'pkgrecv'],
],
lib_dir: [
['scripts/pkg.depotd.sh', 'pkg.depotd'],
],
rad_dir: [
["rad-invoke.py", "rad-invoke"],
],
}
# indexed by 'osname'
scripts = {
"sunos": scripts_sunos,
"linux": scripts_other_unix,
"windows": scripts_windows,
"darwin": scripts_other_unix,
"aix" : scripts_other_unix,
"unknown": scripts_sunos,
}
man1_files = [
MANPAGE_OUTPUT_ROOT + '/man1/' + f
for f in [
'pkg.1',
'pkgdepend.1',
'pkgdiff.1',
'pkgfmt.1',
'pkglint.1',
'pkgmerge.1',
'pkgmogrify.1',
'pkgrecv.1',
'pkgrepo.1',
'pkgsend.1',
'pkgsign.1',
'pkgsurf.1',
]
]
man1m_files = [
MANPAGE_OUTPUT_ROOT + '/man1m/' + f
for f in [
'pkg.depotd.1m',
'pkg.depot-config.1m',
'pkg.sysrepo.1m',
]
]
man5_files = [
MANPAGE_OUTPUT_ROOT + '/man5/' + f
for f in [
'pkg.5'
]
]
man1_ja_files = [
MANPAGE_OUTPUT_ROOT + '/ja_JP.UTF-8/man1/' + f
for f in [
'pkg.1',
'pkgdepend.1',
'pkgdiff.1',
'pkgfmt.1',
'pkglint.1',
'pkgmerge.1',
'pkgmogrify.1',
'pkgrecv.1',
'pkgrepo.1',
'pkgsend.1',
'pkgsign.1',
]
]
man1m_ja_files = [
MANPAGE_OUTPUT_ROOT + '/ja_JP.UTF-8/man1m/' + f
for f in [
'pkg.depotd.1m',
'pkg.sysrepo.1m',
]
]
man5_ja_files = [
MANPAGE_OUTPUT_ROOT + '/ja_JP.UTF-8/man5/' + f
for f in [
'pkg.5'
]
]
man1_zh_CN_files = [
MANPAGE_OUTPUT_ROOT + '/zh_CN.UTF-8/man1/' + f
for f in [
'pkg.1',
'pkgdepend.1',
'pkgdiff.1',
'pkgfmt.1',
'pkglint.1',
'pkgmerge.1',
'pkgmogrify.1',
'pkgrecv.1',
'pkgrepo.1',
'pkgsend.1',
'pkgsign.1',
]
]
MANPAGE_OUTPUT_ROOT + '/zh_CN.UTF-8/man1m/' + f
for f in [
'pkg.depotd.1m',
'pkg.sysrepo.1m',
]
]
man5_zh_CN_files = [
MANPAGE_OUTPUT_ROOT + '/zh_CN.UTF-8/man5/' + f
for f in [
'pkg.5'
]
]
packages = [
'pkg',
'pkg.actions',
'pkg.bundle',
'pkg.client',
'pkg.client.linkedimage',
'pkg.client.transport',
'pkg.file_layout',
'pkg.flavor',
'pkg.lint',
'pkg.portable',
'pkg.publish',
'pkg.server'
]
pylint_targets = [
'pkg.altroot',
'pkg.client.__init__',
'pkg.client.api',
'pkg.client.linkedimage',
'pkg.client.pkg_solver',
'pkg.client.pkgdefs',
'pkg.client.pkgremote',
'pkg.client.plandesc',
'pkg.client.printengine',
'pkg.client.progress',
'pkg.misc',
'pkg.pipeutils',
]
web_files = []
if not files:
continue
if f != "Makefile"
]))
# install same set of files in "en/" in "__LOCALE__/ as well"
# for localizable file package (regarding themes, install
# theme "oracle.com" only)
if f != "Makefile"
]))
smf_app_files = [
'svc/pkg-depot.xml',
'svc/pkg-mdns.xml',
]
resource_files = [
]
transform_files = [
]
sysrepo_files = [
]
sysrepo_files_22 = [
]
]
depot_files = [
]
depot_files_22 = [
]
depot_log_stubs = [
]
ignored_deps_files = []
# The apache-based depot includes an shtml file we add to the resource dir
["util/apache2/depot/repos.shtml"]))
execattrd_files = [
'util/misc/exec_attr.d/package:pkg',
]
authattrd_files = ['util/misc/auth_attr.d/package:pkg']
userattrd_files = ['util/misc/user_attr.d/package:pkg']
pkg_locales = \
'ar ca cs de es fr he hu id it ja ko nl pl pt_BR ru sk sv zh_CN zh_HK zh_TW'.split()
sha512_t_srcs = [
]
sysattr_srcs = [
]
syscallat_srcs = [
]
pspawn_srcs = [
]
elf_srcs = [
]
arch_srcs = [
]
_actions_srcs = [
]
_actcomm_srcs = [
]
_varcet_srcs = [
'modules/_varcet.c'
]
solver_srcs = [
]
if osname == 'sunos':
# Runs lint on the extension module source code
class pylint_func(Command):
description = "Runs pylint tools over IPS python source code"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
# Make string shell-friendly
def supported_pylint_ver(version):
"""Compare the installed version against the version
we require to build with, returning False if the version
is too old. It's tempting to use pkg.version.Version
here, but since that's a build artifact, we'll do it
the long way."""
# if the lists are of different lengths, we just
# compare with the precision we have.
try:
return False
except ValueError:
# if we somehow get non-numeric version
# components, we ignore them.
continue
return True
# it's fine to default to the required version - the build will
# break if the installed version is incompatible and $PYLINT_VER
# didn't get set, somehow.
if pylint_ver_str == "":
"$PKG_SKIP_PYLINT was set")
return
elif not pylint_ver_str or \
"installed version {0} is older than version {1}".format(
return
# Insert tests directory onto sys.path so any custom checkers
# can be found.
# assumes pylint is accessible on the sys.path
#
# For some reason, the load-plugins option, when used in the
# rcfile, does not work, so we put it here instead, to load
# our custom checkers.
#
# Unfortunately, pylint seems pretty fragile and will crash if
# we try to run it over all the current pkg source. Hence for
# now we only run it over a subset of the source. As source
# files are made pylint clean they should be added to the
# pylint_targets list.
#
args = ['--load-plugins=multiplatform']
if quiet:
args += ['--reports=no']
class pylint_func_quiet(pylint_func):
include_dirs = [ 'modules' ]
# Runs lint on the extension module source code
class clint_func(Command):
description = "Runs lint tools over IPS C extension source code"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
# Make string shell-friendly
else:
lint = ['lint']
['-D_FILE_OFFSET_BITS=64'] + \
['-D_FILE_OFFSET_BITS=64'] + \
['-D_FILE_OFFSET_BITS=64'] + \
['-D_FILE_OFFSET_BITS=64'] + \
['-D_FILE_OFFSET_BITS=64'] + \
# Runs both C and Python lint
description = "Runs C and Python lint checkers"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
# Make string shell-friendly
class install_func(_install):
def initialize_options(self):
# PRIVATE_BUILD set in the environment tells us to put the build
# directory into the .pyc files, rather than the final
# installation directory.
if private_build is None:
else:
# This is used when installing scripts, below, but it isn't a
# standard distutils variable.
"""At the end of the install function, we need to rename some
files because distutils provides no way to rename files as they
are placed in their install locations.
"""
for e in [".py", ".pyc"]:
if ostype == "posix":
else:
# XXX Uncomment it when we need to deliver python 3.4 version
# of modules.
# Don't install the scripts for python 3.4.
# if py_version == '3.4':
# return
# make scripts executable
class install_lib_func(_install_lib):
"""Remove the target files prior to the standard install_lib procedure
if the build_py module has determined that they've actually changed.
This may be needed when a module's timestamp goes backwards in time, if
a working-directory change is reverted, or an older changeset is checked
out.
"""
class install_data_func(_install_data):
"""Enhance the standard install_data subcommand to take not only a list
of filenames, but a list of source and destination filename tuples, for
the cases where a filename needs to be renamed between the two
locations."""
for f in self.data_files:
if not files:
else:
else:
if updenv:
# use temp environment modified with the given dict
else:
# just use environment of this (parent) process as is
if ignerr:
# send stderr to devnull
elif savestderr:
else:
# just use stderr of this (parent) process
stderr = None
if ret != 0:
if stderr:
if stderr:
"""A clone of distutils.file_util._copy_file_contents() that strips the
CDDL text. For Python files, we replace the CDDL text with an equal
number of empty comment lines so that line numbers match between the
source and destination files."""
# Match the lines between and including the CDDL header signposts, as
# well as empty comment lines before and after, if they exist.
try:
except EnvironmentError as e:
raise DistutilsFileError("could not delete "
while True:
if not buf:
break
if match:
# replace the CDDL expression
# with the same number of empty
# comment lines as the cddl_re
# matched.
buf)
else:
# Make file_util use our version of _copy_file_contents
def intltool_update_maintain():
"""Check if scope of localization looks up-to-date or possibly not,
actual source files (e.g. .py) detected.
"""
args = [
"/usr/bin/intltool-update", "--maintain"
]
print("New file(s) with translatable strings detected:",
print("""\
Please evaluate whether any of the above file(s) needs localization.
If so, please add its name to po/POTFILES.in. If not (e.g., it's not
delivered), please add its name to po/POTFILES.skip.
print("""\
The following files are listed in po/POTFILES.in, but no longer exist
print("Please remove the file names from po/POTFILES.in",
def intltool_update_pot():
"""Generate pkg.pot by extracting localizable strings from source
files (e.g. .py)
"""
args = [
"/usr/bin/intltool-update", "--pot"
]
return
args = [
]
def i18n_check():
"""Checks for common i18n messaging bugs in the source."""
src_files = []
# A list of the i18n errors we check for in the code
# This checks that messages with multiple parameters are always
# written using "{name}" format, rather than just "{0}"
"format string with unnamed arguments cannot be properly localized"
]
continue
continue
args = [
for err in common_i18n_errors:
if found_errs:
print("""\
The following i18n errors were detected and should be corrected:
(this list is saved in po/i18n_errs.txt)
return
"""create XML help for localization, where French part of legalnotice
is stripped off
"""
return
# indicates currently in French part of legalnotice
for l in fsrc:
if in_fr: # in French part
if l.startswith('</legalnotice>'):
# reached end of legalnotice
elif l.startswith('<para lang="fr"/>') or \
l.startswith('<para lang="fr"></para>'):
else:
# not in French part
"""Input is English XML file. Output is pkg_help.pot, message
source for next translation update.
"""
return
"""Input is English XML file and <lang>.po file (which contains
translations). Output is translated XML file.
"""
return
class installfile(Command):
user_options = [
("file=", "f", "source file to copy"),
("dest=", "d", "destination directory"),
("mode=", "m", "file mode"),
]
description = "De-CDDLing file copy"
def initialize_options(self):
def finalize_options(self):
try:
except ValueError:
return ret
class build_func(_build):
def initialize_options(self):
def get_hg_version():
try:
except OSError:
print("ERROR: unable to obtain mercurial version",
return "unknown"
def syntax_check(filename):
""" Run python's compiler over the file, and discard the results.
Arrange to generate an exception if the file does not compile.
This is needed because distutil's own use of pycompile (in the
distutils.utils module) is broken, and doesn't stop on error. """
try:
except py_compile.PyCompileError as e:
res = ""
continue
# Assume it's a tuple of (filename, lineno, col, code)
raise DistutilsError(res)
# On Solaris, ld inserts the full argument to the -o option into the symbol
# table. This means that the resulting object will be different depending on
# the path at which the workspace lives, and not just on the interesting content
# of the object.
#
# In order to work around that bug (7076871), we create a new compiler class
# that looks at the argument indicating the output file, chdirs to its
# directory, and runs the real link with the output file set to just the base
# name of the file.
#
# Unfortunately, distutils isn't too customizable in this regard, so we have to
# twiddle with a couple of the names in the distutils.ccompiler namespace: we
# have to add a new entry to the compiler_class dict, and we have to override
# the new_compiler() function to point to our own. Luckily, our copy of
# new_compiler() gets to be very simple, since we always know what we want to
# return.
class MyUnixCCompiler(UnixCCompiler):
assert(not output_dir)
'unixccompiler', 'MyUnixCCompiler',
'standard Unix-style compiler with a link stage modified for Solaris'
)
if osname == 'sunos':
class build_ext_func(_build_ext):
def initialize_options(self):
if osname == 'sunos':
# Build 32-bit
return
# Set up for 64-bit
# store our 64-bit extensions elsewhere
# Build 64-bit
# Reset to 32-bit
return path
class build_py_func(_build_py):
# Gather the timestamps of the .py files in the gate, so we can
# force the mtimes of the built and delivered copies to be
# consistent across builds, causing their corresponding .pyc
# files to be unchanged unless the .py file content changed.
self.timestamps = {}
p = subprocess.Popen(
if p.wait() != 0:
print("ERROR: unable to gather .py timestamps",
return ret
# override the build_module method to do VERSION substitution on
# pkg/__init__.py
versionre = '(?m)^VERSION[^"]*"([^"]*)"'
# Grab the previously-built version out of the build
# tree.
try:
ocontent = \
except IOError:
ov = None
v = get_hg_version()
# If the versions haven't changed, there's no need to
# recompile.
if v == ov:
return
print("doing version substitution: ", v)
return rv
# Will raise a DistutilsError on failure.
# If the timestamp on the source file (coming from mercurial if
# unchanged, or from the filesystem if changed) doesn't match
# the filesystem timestamp on the destination, then force the
# copy to make sure the right data is in place.
try:
except OSError as e:
raise
# The timestamp for __init__.py is the timestamp for the
# workspace itself.
else:
# Force a copy of the file if the source timestamp is different
# from that of the destination, not just if it's newer. This
# allows timestamps in the working directory to regress (for
# instance, following the reversion of a change).
else:
# If we copied the file, then we need to go and readjust the
# timestamp on the file to match what we have in our database.
# Save the filename aside for our version of install_lib.
def manpage_input_dir(path):
"""Convert a manpage output path to the directory where its source lives."""
loc = ""
else:
raise RuntimeError("bad manpage path")
"""Convert XML manpages to ROFF for delivery.
The input should be a list of the output file paths. The corresponding
inputs will be generated from this. We do it in this way so that we can
share the paths with the install code.
All paths should have a common manpath root. In particular, pages
belonging to different localizations should be run through this function
separately.
"""
do_files = [
for f in files
]
if do_files:
# Get the output dir by removing the filename and the manX
# directory
class build_data_func(Command):
description = "build data files whose source isn't in deliverable form"
user_options = []
# As a subclass of distutils.cmd.Command, these methods are required to
# be implemented.
def initialize_options(self):
pass
def finalize_options(self):
pass
# Anything that gets created here should get deleted in
# clean_func.run() below.
for l in pkg_locales:
# generate pkg.pot for next translation
"""Remove a file without caring whether it exists."""
try:
except OSError as e:
raise
class clean_func(_clean):
def initialize_options(self):
rm_f("po/.intltool-merge-cache")
for l in pkg_locales:
rm_f("po/i18n_errs.txt")
class clobber_func(Command):
user_options = []
description = "Deletes any and all files created by setup"
def initialize_options(self):
pass
def finalize_options(self):
pass
# nuke everything
print("deleting " + dist_dir)
print("deleting " + build_dir)
print("deleting " + root_dir)
print("deleting " + pkgs_dir)
print("deleting " + extern_dir)
# list of options stored in initialize_options below. The first entry
# in each tuple must be the exact name of a member variable.
user_options = [
("archivedir=", 'a', "archive failed tests <dir>"),
("baselinefile=", 'b', "baseline file <file>"),
("coverage", "c", "collect code coverage data"),
("genbaseline", 'g', "generate test baseline"),
("only=", "o", "only <regex>"),
("parseable", 'p', "parseable output"),
("port=", "z", "lowest port to start a depot on"),
("timing", "t", "timing file <file>"),
("verbosemode", 'v', "run tests in verbose mode"),
("stoponerr", 'x', "stop when a baseline mismatch occurs"),
("debugoutput", 'd', "emit debugging output"),
("showonexpectedfail", 'f',
"show all failure info, even for expected fails"),
("startattest=", 's', "start at indicated test"),
("jobs=", 'j', "number of parallel processes to use"),
("quiet", "q", "use the dots as the output format"),
("livesystem", 'l', "run tests on live system"),
]
description = "Runs unit and functional tests"
def initialize_options(self):
def finalize_options(self):
pass
# Reconstruct the cmdline and send that to run.py
args = ""
def initialize_options(self):
# This class wraps the distutils Extension class, allowing us to set
# build_64 in the object constructor instead of being forced to add it
# after the object has been created.
# These are set to real values based on the platform, down below
compile_args = None
compile_args = [ "-O3" ]
if osname == "sunos":
link_args = [ "-zstrip-class=nonalloc" ]
else:
link_args = []
ext_modules = [
'actions._actions',
),
'actions._common',
),
'_varcet',
),
'solver',
),
]
elf_libraries = None
sysattr_libraries = None
sha512_t_libraries = None
cmdclasses = {
'install': install_func,
'install_data': install_data_func,
'install_lib': install_lib_func,
'build': build_func,
'build_data': build_data_func,
'build_ext': build_ext_func,
'build_py': build_py_func,
'bdist': dist_func,
'lint': lint_func,
'clint': clint_func,
'pylint': pylint_func,
'pylint_quiet': pylint_func_quiet,
'clean': clean_func,
'clobber': clobber_func,
'test': test_func,
'installfile': installfile,
}
# all builds of IPS should have manpages
data_files += [
(man1_dir, man1_files),
(man1m_dir, man1m_files),
(man5_dir, man5_files),
]
# add transforms
data_files += [
]
# add ignored deps
data_files += [
]
if osname == 'sunos':
# Solaris-specific extensions are added here
data_files += [
(sysrepo_cache_dir, {}),
(depot_dir, depot_files),
(depot_conf_dir, {}),
(depot_cache_dir, {}),
(mirror_cache_dir, {}),
(mirror_logs_dir, {}),
]
# install localizable .xml and its .pot file to put into localizable file package
data_files += [
for locale in pkg_locales
]
# install English .pot file to put into localizable file package
data_files += [
]
# Unix platforms which the elf extension has been ported to
# are specified here, so they are built automatically
elf_libraries = ['elf']
ext_modules += [
'elf',
),
]
# Solaris has built-in md library and Solaris-specific arch extension
# All others use OpenSSL and cross-platform arch module
if osname == 'sunos':
elf_libraries += [ 'md' ]
sysattr_libraries = [ 'nvpair' ]
sha512_t_libraries = [ 'md' ]
ext_modules += [
'arch',
),
'pspawn',
),
'syscallat',
),
'sysattr',
),
'sha512_t',
),
]
else:
elf_libraries += [ 'ssl' ]
name = 'pkg',
version = '0.1',
ext_package = 'pkg',
classifiers = [
'Programming Language :: Python :: 2 :: Only',
'Programming Language :: Python :: 2.7',
]
)