#
# CDDL HEADER START
#
# The contents of this file are subject to the terms of the
# Common Development and Distribution License (the "License").
# You may not use this file except in compliance with the License.
#
# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
# See the License for the specific language governing permissions
# and limitations under the License.
#
# When distributing Covered Code, include this CDDL HEADER in each
# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
# If applicable, add the following below this CDDL HEADER, with the
# fields enclosed by brackets "[]" replaced with your own identifying
# information: Portions Copyright [yyyy] [name of copyright owner]
#
# CDDL HEADER END
#
#
#
"""Provides the interfaces and exceptions needed to determine which packages
should be installed, updated, or removed to perform a requested operation."""
import operator
import six
import time
# Redefining built-in; pylint: disable=W0622
# Imports from package six are not grouped: pylint: disable=C0412
#
# Constants representing reasons why packages were trimmed from possible set.
# The reasons listed below do *not* always map 1:1 to the error text produced;
# instead, they indicate the 'type' of trim applied. Values below must be
# unique, but can be changed at any time.
#
"""local exception used to pass failure to match
dependencies in packages out of nested evaluation"""
"""The FMRIs related to the exception."""
"""A constant indicating why the related FMRIs were rejected."""
return self.__reason_id
"""A string describing why the related FMRIs were rejected."""
"""Provides a SAT-based solution solver to determine which packages
should be installed, updated, or removed to perform a requested
operation."""
"""Create a PkgSolver instance; catalog should contain all
known pkgs, installed fmris should be a dict of fmris indexed
by name that define pkgs current installed in the image.
Pub_ranks dict contains (rank, stickiness, enabled) for each
publisher. variants are the current image variants; avoids is
the set of pkg stems being avoided in the image due to
administrator action (e.g. --reject, uninstall)."""
# Value 'DebugValues' is unsubscriptable;
# pylint: disable=E1136
# check if we're allowed to use the solver
if DebugValues["no_solver"]:
raise RuntimeError("no_solver set, but solver invoked")
#
# Get rank indexed by pub
#
self.__pub_ranks = {}
# To ease cross-publisher (i.e. consolidation) flag days, treat
# adjacent, non-sticky publishers as having the same rank so
# that any of them may be used to satisfy package dependencies.
last_rank = None
last_sticky = None
else:
# any dependencies with
# timestamps were seen; used in
# error output generation
# consideration
# trimmed by pub.
# going to remove
# present in solution by spec.
self.__actcache = {}
# trimming
# bits so we can print something
# reasonable
self.__subphasename = None
self.__inc_list = []
self.__dependents = None
# set of fmris installed in root image; used for origin
# dependencies
self.__root_fmris = None
# set of stems avoided by admin (e.g. --reject, uninstall)
# set of stems avoided by solver due to dependency constraints
# (e.g. all fmris that satisfy group dependency trimmed); this
# intentionally starts empty for every new solver invocation and
# is only stored in image configuration for diagnostic purposes.
# set of obsolete stems
# set of stems we're rejecting
# pkgs that have parent deps
# Internal cache of created fmri objects. Used so that the same
# PkgFmri doesn't need to be created more than once. This isn't
# a weakref dictionary because in two of the four places where
# PkgFmri's are created, the name is extracted and the PkgFmri
# object is immediately discarded.
self.__fmridict = {}
# Packages with explicit install action set to true.
self.__expl_install_dict = {}
if self.__parent_pkgs != None:
(f.pkg_name, f)
for f in self.__parent_pkgs
])
# cache of firmware and cpu dependencies
self.__triggered_ops = {
PKG_OP_UNINSTALL : {
PKG_OP_UPDATE : set(),
PKG_OP_UNINSTALL : set(),
},
}
# incorp deps
s = "Solver: ["
s += (" Variables: {0:d} Clauses: {1:d} Iterations: "
s += "\nTimings: ["
s += ", ".join([
"{0}: {1: 6.3f}".format(*a)
])
s += "]"
if self.__inc_list:
else:
incs = "None"
return s
"""Discards all solver information except for that needed to
show failure information or to stringify the solver object.
This allows early garbage collection to take place, and should
be performed after a solution is successfully returned."""
self.__installed_dict = {}
self.__publisher = {}
self.__possible_dict = {}
self.__pub_ranks = None
self.__pub_trim = {}
self.__progtrack = None
self.__variant_dict = None
self.__variants = None
self.__actcache = None
self.__trimdone = None
self.__fmri_state = None
self.__start_time = None
self.__dependents = None
self.__fmridict = {}
self.__firmware = None
self.__allowed_downgrades = None
self.__dg_incorp_cache = None
# Value 'DebugValues' is unsubscriptable;
# pylint: disable=E1136
if DebugValues["plan"]:
# Remaining data must be kept.
return rval
self.__trim_dict = None
return rval
"""Bump progress tracker to indicate processing is active."""
assert self.__progitem
"""Add timing records and tickle progress tracker. Ends
previous subphase if ongoing."""
if reset:
if self.__subphasename is not None:
"""Mark the end of a solver subphase, recording time taken."""
self.__start_time = None
self.__subphasename = None
"""Trim any packages we cannot update due to freezes."""
for f, r, _t in existing_freezes:
if r:
"freeze on {0} at version {1}. The "
"reason for the freeze is: {2}"),
include_build=False), r))
else:
"freeze on {0} at version {1}."),
"""Raise a plan exception due to solution errors."""
solver_errors = None
# Value 'DebugValues' is unsubscriptable;
# pylint: disable=E1136
if DebugValues["plan"]:
"""Remove any versions from proposed_dict that are in trim_dict
and raise an exception if no matching version of a proposed
package can be installed at this point."""
if proposed_dict is None:
# Nothing to do.
return
# Used to de-dup errors.
already_seen = set()
ret = []
for name in proposed_dict:
if tv:
continue
# continue processing and accumulate all errors
if ret:
"""Sets the list of package to be removed from the image, the
list of packages to reject, the list of packages to avoid
during the operation, and the list of packages that must not be
removed from the image.
'rejected' is a set of package stems to reject.
'proposed' is an optional set of FMRI objects representing
packages to install or update.
Upon return:
* self.__removal_fmris will contain the list of FMRIs to be
removed from the image due to user request or due to past
bugs that caused wrong variant to be installed by mistake.
* self.__reject_set will contain the list of packages to avoid
or that were rejected by user request as appropriate."""
if proposed is None:
else:
# remove packages to be installed from avoid sets
] + [
f
for f in self.__installed_fmris
if not self.__trim_nonmatching_variants(f)
])
# trim fmris that user explicitly disallowed
N_("This version rejected by user request"))
f.pkg_name
for f in self.__removal_fmris
)
"""Add the common set of conditional, group, and require
dependencies of proposed packages to the list of package stems
known to be a required part of the solution. This will improve
error messaging if no solution is found."""
if proposed_dict is None:
return
req_dep_names = set()
for name in proposed_dict:
# Find intersection of the set of conditional, group,
# and require dependencies for all proposed versions of
# the proposed package. The result is the set of
# package stems we know will be required to be part of
# the solution.
comm_deps = None
for f in propvers:
f, excludes)
)
if dtype != "group" or
)
if comm_deps is None:
else:
if comm_deps:
"""Update the provided possible set of fmris with the transitive
closure of dependencies that can be satisfied, trimming those
packages that cannot be installed.
'possible' is a set of FMRI objects representing all possible
versions of packages to consider for the operation.
'full_trim' is an optional boolean indicating whether a full
trim of the dependency graph should be performed. This is NOT
required for the solver to find a solution. Trimming is only
needed to reduce the size of clauses and to provide error
messages. This requires multiple passes to determine if the
transitive closure of dependencies can be satisfied. This is
not required for correctness (and it greatly increases runtime).
However, it does greatly improve error messaging for some error
cases.
'filter_explicit' is an optional boolean indicating whether
packages with pkg.depend.explicit-install set to true will be
filtered out.
'proposed_dict' contains user specified FMRI objects indexed by
pkg_name that should be installed or updated within an image.
An example of a case where full_trim will be useful (dueling
incorporations):
Installed:
entire
incorporates java-7-incorporation
Proposed:
osnet-incorporation
incorporates system/resource-mgmt/dynamic-resource-pools
requires new version of java not allowed by installed
java-7-incorporation"""
while True:
if first:
# The first pass will return the transitive
# closure of all dependencies; subsequent passes
# are only done for trimming, so need to update
# the possible set only on first pass.
# Nothing more to trim.
break
# Remove trimmed items from possible_set.
"""Constrain the solver solution so that only one version of
each package can be installed and generate dependency clauses
for possible packages."""
# Generate clauses for only one version of each package, and
# for dependencies for each package. Do so for all possible
# fmris.
# Ensure only one version of a package is installed
# generate dependency clauses for each pkg
da))
proposed_dict=None):
"""Generate initial solver clauses for the proposed packages (if
any) and installed packages.
'proposed' is a set of FMRI objects representing packages to
install or update.
'proposed_dict' contains user specified FMRI objects indexed by
pkg_name that should be installed or updated within an image."""
assert ((proposed is None and proposed_dict is None) or
(proposed is not None and proposed_dict is not None))
if proposed is None:
if proposed_dict is None:
# Generate clauses for proposed and installed pkgs note that we
# create clauses that require one of the proposed pkgs to work;
# this allows the possible_set to always contain the existing
# pkgs.
for name in proposed_dict:
# we're uninstalling this package
continue
"""Prepares solver for solution creation returning a
ProgressTracker object to be used for the operation."""
# Once solution has been returned or failure has occurred, a new
# solver must be used.
# Check to see if we were invoked by solve_uninstall, in
# which case we don't want to restart what we've already
# started.
if self.__progitem is None:
return pt
"""Returns the solution result to the caller after completing
all necessary solution cleanup."""
"""Raises a PlanCreationException if the proposed operation
would require the removal of installed packages that are not
marked for removal by the proposed operation."""
if proposed is None:
uninstall_fmris = []
# we're uninstalling this package
continue
continue
# no version of this package is allowed
# Used to de-dup errors.
already_seen = set()
ret = []
"if the requested operation is to be performed.")
# First check for solver failures caused by missing parent
# dependencies. We do this because missing parent dependency
# failures cause other cascading failures, so it's better to
# just emit these failures first, have the user fix them, and
# have them re-run the operation, so then we can provide more
# concise error output about other problems.
for fmri in uninstall_fmris:
# Unused variable; pylint: disable=W0612
if reason_id == _TRIM_PARENT_MISSING:
break
else:
continue
assert res
if ret:
for fmri in uninstall_fmris:
# If no errors returned, that implies that all of the
# reasons the FMRI was rejected aren't interesting.
if res:
if ret:
proposed_dict=None):
"""Raises a PlanCreationException if any further trims would
prevent the installation or update of proposed or
'proposed' is an optional set of FMRI objects representing
packages to install or update.
'proposed_dict' contains user specified FMRIs indexed by
pkg_name that should be installed within an image.
'possible_set' is the set of FMRIs potentially allowed for use
in the proposed operation."""
# make sure all package trims appear
# Ensure required dependencies of proposed packages are flagged
# to improve error messaging when parsing the transitive
# closure of all dependencies.
# First, perform a full trim of the package version space; this
# is normally skipped for performance reasons as it's not
# required for correctness.
# Now try re-asserting that proposed (if any) and installed
# packages are allowed after the trimming; these calls will
# raise an exception if all the proposed or any of the
"""Private logic for solve_install() to process a
PlanCreationException and re-raise as appropriate.
'exp' is the related exception object raised by the solver when
no solution was found.
'inc_list' is a list of package FMRIs representing installed
incorporations that are being maintained.
'proposed_dict' contains user specified FMRIs indexed by
pkg_name that should be installed within an image.
'possible_set' is the set of FMRIs potentially allowed for use
in the proposed operation.
"""
# Before making a guess, apply extra trimming to see if we can
# reject the operation based on changing packages.
# Despite all of the trimming done, we still don't know why the
# solver couldn't find a solution, so make a best effort guess
# at the reason why.
info = []
incs = []
if inc_list:
else:
b for a in proposed_dict.values()
for b in a
if ms:
"proposed packages:"))
for s in ms:
if ms:
"packages due to proposed changes:"))
for s in ms:
if not info: # both error detection methods insufficent.
"unable to compute solution."))
"determine exact cause."))
"obtain more detailed error messages."))
"you wish installed."))
# Value 'DebugValues' is unsubscriptable;
# pylint: disable=E1136
if DebugValues["plan"]:
raise exp
"""Add the set of FMRIs in 'fmris' to the internal dict of
pkg-actuators. 'trigger_op' is the operation which triggered
the pkg change, 'exec_op' is the operation which is supposed to
be executed."""
"not a valid trigger op for pkg actuators".format(
facets.
Returns FMRIs to be installed / upgraded in system and a new
set of packages to be avoided.
'existing_freezes' is a list of incorp. style FMRIs that
constrain package motion.
'proposed_dict' contains user specified FMRIs indexed by
pkg_name that should be installed within an image.
'new_variants' a dictionary containing variants which are
being updated. (It should not contain existing variants which
are not changing.)
'reject_set' contains user specified package names that should
not be present within the final image. (These packages may or
may not be currently installed.)
'trim_proposed_installed' is a boolean indicating whether the
solver should elide versions of proposed packages older than
those installed from the set of possible solutions. If False,
package downgrades are allowed, but only for installed
packages matching those in the proposed_dict.
'relax_all' indicates if the solver should relax all install
holds, or only install holds specified by proposed packages.
'ignore_inst_parent_deps' indicates if the solver should
ignore parent dependencies for installed packages. This
allows us to modify images with unsatisfied parent
dependencies (ie, out of sync images). Any packaging
operation which needs to guarantee that we have an in sync
image (for example, sync-linked operations, or any recursive
packaging operations) should NOT enable this behavior.
'exact_install' is a flag to indicate whether we treat the
current image as an empty one. Any previously installed
packages that are not either specified in proposed_dict or
are a dependency (require, origin and parent dependencies)
of those packages will be removed.
'installed_dict_tmp' a dictionary containing the current
installed FMRIs indexed by pkg_name. Used when exact_install
is on.
'insync' a flag indicating if the image is currently in sync."""
# reject_set is a frozenset(), need to make copy to modify
if f.pkg_name in proposed_dict:
else:
proposed_dict[f.pkg_name] = [f]
for f in \
# re-freeze reject set
if new_variants:
#
# Entire packages can be tagged with variants thereby
# making those packages uninstallable in certain
# images. So if we're changing variants such that
# some currently installed packages are becoming
# uninstallable add them to the removal package set.
#
for f in self.__installed_fmris:
d = self.__get_variant_dict(f)
for k in new_variants:
if k in d and \
new_variants[k] not in d[k]:
# proposed_dict already contains publisher selection logic,
# so prevent any further trimming of named packages based
# on publisher if they are installed.
for name in proposed_dict:
else:
# Determine which packages are to be removed, rejected, and
# avoided and also determine which ones must not be removed
# during the operation.
# find list of incorps we don't let change as a side effect of
# other changes; exclude any specified on command line if the
# proposed version is already installed and is not being removed
# translate proposed_dict into a set
if relax_all:
else:
relax_pkgs = set(
for name in proposed_pkgs
if not any(
f for f in proposed_dict[name]
f in (self.__installed_fmris -
)
)
relax_pkgs |= \
# generate set of possible fmris
#
# ensure existing pkgs stay installed; explicitly add in
# installed fmris in case publisher change has occurred and
# some pkgs aren't part of new publisher
possible_set = set()
# Add the proposed fmris, populate self.__expl_install_dict and
# check for allowed downgrades.
for f in flist:
self.__allowed_downgrades |= \
if self.__is_explicit_install(f):
# For linked image sync we have to analyze all pkgs of the
# possible_set because no proposed pkgs will be given. However,
# that takes more time so only do this for syncs. The relax_all
# flag is an indicator of a sync operation.
for f in possible_set:
self.__allowed_downgrades |= \
# If requested, trim any proposed fmris older than those of
# corresponding installed packages.
# Because we have introduced exact-install where
# self.__installed_fmris will be empty, in order to prevent
# downgrading, we need to look up the full installed dictionary
# stored in installed_dict_tmp.
if exact_install:
else:
for f in candidate_fmris:
if not trim_proposed_installed and \
f.pkg_name in proposed_dict:
# Don't trim versions if newest version in
# proposed dict is older than installed
# version.
# Assume downgrade is intentional.
continue
if valid_trigger:
continue
self.__trim_older(f)
# trim fmris we excluded via proposed_fmris
for name in proposed_dict:
N_("This version excluded by specified "
"installation version"))
# trim packages excluded by incorps in proposed.
# Trim packages with unsatisfied parent dependencies. Then
# if the current image is out of sync, for packages with
# satisfied parent depencences (which will include
# incorporations), call __trim_recursive_incorps() to trim out
# more packages that are disallowed due to the synced
# incorporations.
if self.__is_child():
for f in possible_set.copy():
if not self.__trim_nonmatching_parents(f,
continue
for name in possible_linked:
# calling __trim_recursive_incorps can be
# expensive so don't call it unnecessarily.
if name in proposed_dict:
possible_linked[name] -= \
if not possible_linked[name]:
continue
del possible_linked
# now trim pkgs we cannot update due to maintained
# incorporations
"incorporation {0}"), (i.get_short_fmri(
for f in flist:
# dotrim=False here as we only want to trim
# packages that don't satisfy the incorporation.
# now trim any pkgs we cannot update due to freezes
# elide any proposed versions that don't match variants (arch
# usually)
for name in proposed_dict:
# remove any versions from proposed_dict that are in trim_dict
try:
# One or more proposed packages have been rejected.
# Ensure required dependencies of proposed packages are flagged
# to improve error messaging when parsing the transitive
# closure of all dependencies.
# Update the set of possible fmris with the transitive closure
# of all dependencies.
# trim any non-matching variants, origins or parents
for f in possible_set:
continue
if not self.__trim_nonmatching_variants(f):
continue
# remove all trimmed fmris from consideration
# remove any versions from proposed_dict that are in trim_dict
# as trim dict has been updated w/ missing dependencies
try:
# One or more proposed packages have been rejected.
#
# Generate ids, possible_dict for clause generation. Prepare
# the solver for invocation.
#
# Constrain the solution so that only one version of each
# package can be installed.
# Add proposed and installed packages to solver.
try:
# One or more installed packages can't be retained or
# upgraded.
# save a solver instance so we can come back here
# this is where errors happen...
try:
# no solution can be found.
# we have a solution that works... attempt to
# reduce collateral damage to other packages
# while still keeping command line pkgs at their
# optimum level
# fix the fmris that were specified on the cmd line
# at their optimum (newest) level along with the
# new dependencies, but try and avoid upgrading
# already installed pkgs or adding un-needed new pkgs.
for fmri in saved_solution:
# save context
# Now we have the oldest possible original fmris
# but we may have some that are not original
# Since we want to move as far forward as possible
# when we have to move a package, fix the originals
# and drive forward again w/ the remainder
reject_set=frozenset()):
"""Logic to update all packages within an image to the latest
versions possible.
Returns FMRIs to be installed / upgraded in system and a new
set of packages to be avoided.
'existing_freezes' is a list of incorp. style FMRIs that
constrain pkg motion
'reject_set' contains user specified FMRIs that should not be
present within the final image. (These packages may or may
not be currently installed.)
"""
# Determine which packages are to be removed, rejected, and
# avoided and also determine which ones must not be removed
# during the operation.
# generate set of possible fmris
possible_set = set()
if not matching: # disabled publisher...
for f in possible_set:
self.__allowed_downgrades |= \
# trim fmris we cannot install because they're older
for f in self.__installed_fmris:
self.__trim_older(f)
# now trim any pkgs we cannot update due to freezes
# Trim packages with unsatisfied parent dependencies.
if self.__is_child():
for f in possible_set.copy():
if not self.__trim_nonmatching_parents(f,
excludes):
# Update the set of possible FMRIs with the transitive closure
# of all dependencies.
# trim any non-matching origins or parents
for f in possible_set:
if self.__trim_nonmatching_variants(f):
# remove all trimmed fmris from consideration
#
# Generate ids, possible_dict for clause generation. Prepare
# the solver for invocation.
#
# Constrain the solution so that only one version of each
# package can be installed.
# Add installed packages to solver.
try:
except api_errors.PlanCreationException:
# Attempt a full trim to see if we can raise a sensible
# error. If not, re-raise.
raise
try:
except api_errors.PlanCreationException:
# No solution can be found; attempt a full trim to see
# if we can raise a sensible error. If not, re-raise.
raise
if self.__fmri_is_obsolete(f):
# If solution doesn't match installed set of packages, then an
# upgrade solution was found (heuristic):
# If there are no installed, upgradeable incorporations,
# then assume that no updates were available. Also if
# we're a linked image child we may not be able to
# update to the latest available incorporations due to
# parent constraints, so don't generate an error.
# Before making a guess, apply extra trimming to see if we can
# reject the operation based on changing packages.
# Despite all of the trimming done, we still don't know why the
# solver couldn't find a solution, so make a best-effort guess
# at the reason why.
info = []
"versions."))
"packages are installed."))
" {0}".format(f)
))
if ms:
"cannot update to the latest version:"))
for s in ms:
else:
"determine the cause."))
"obtain more detailed error messages."))
"""Compute changes needed for uninstall"""
# generate list of installed pkgs w/ possible renames removed to
# forestall failing removal due to presence of unneeded renamed
# pkg
triggered_set = set()
triggered_set.add(f)
# check for dependents
for pfmri in proposed_removals:
# Check if any of the dependents are going to be updated
# to a different version which might not have the same
# dependency constraints. If so, remove from dependents
# list.
# Example:
# A@1 depends on B
# A@2 does not depend on B
#
# A@1 is currently installed, B is requested for removal
# -> not allowed
# pkg actuator updates A to 2
# -> now removal of B is allowed
candidates = dict(
(tf, f)
for f in dependents
for tf in triggered_set
)
for tf in candidates:
excludes):
continue
break
if remove:
if dependents:
# Run it through the solver; with more complex dependencies
# we're going to be out of luck without it.
"""Update avoid sets w/ any missing packages (due to reject).
Remove obsolete packages from solution. Keep track of which
obsolete packages have group dependencies so verify of group
packages w/ obsolete members works."""
tracked_stems = set()
continue
for t in a.attrlist("fmri"):
try:
except KeyError:
# Add stems omitted by solution and explicitly rejected.
for f in solution:
if self.__fmri_is_obsolete(f):
# Add stems omitted by solution but not explicitly rejected, not
# previously avoided, and not avoided due to obsoletion.
return ret
"""Duplicate current current solver state and return it."""
return (self.__addclause_failure,
"""Set the current solver state to the previously saved one"""
"""Perform iterative solution; try for newest pkgs unless
older=True"""
solution_vector = []
eliminated = set()
break
if not solution_vector:
break
# prevent the selection of any older pkgs except for
# those that are part of the set of allowed downgrades;
for fid in solution_vector:
if not older:
# without subtraction of allowed
# downgrades, an initial solution will
# exclude any solutions containing
# earlier versions of downgradeable
# packages
else:
for f in remove:
# prevent the selection of this exact combo;
# permit [] solution
if not self.__iterations:
return solution
"""Return solution vector from solver"""
return frozenset([
])
"""Assign __possible_dict of possible package FMRIs by pkg stem
and mark trimming complete."""
# generate dictionary of possible pkgs fmris by pkg stem
for f in possible_set:
""" give a set of possible fmris, assign ids"""
# assign clause numbers (ids) to possible pkgs
pkgid = 1
pkgid += 1
"""Translate fmri to variable number (id)"""
"""Translate variable number (id) to fmris"""
"""Cache for catalog entries; helps performance"""
t
]
""" return the list of fmris in catalog for this pkg name"""
if self.__trimdone:
return [
f
for f in tp[1]
]
"""Returns tuple of set of fmris that are matched within
CONSTRAINT.NONE of specified version and set of remaining
fmris."""
"""Underlying impl. of other comb routines"""
# determine if the data is cacheable or cached:
# use frozensets so callers don't inadvertently update
# these sets (which may be cached).
f
for f in all_fmris
])
# if we haven't finished trimming, don't cache this
if not self.__trimdone:
# cache the result
"""Returns tuple of set of fmris that are older than
specified version and set of remaining fmris."""
if not dotrim:
# we're going to return the older packages, so we need
# to make sure that any trimmed packages are removed
# from the matching set and added to the non-matching
# ones.
trimmed_older = set([
f
for f in older
])
"""Returns tuple of set of fmris that are match within
CONSTRAINT.AUTO of specified version and set of remaining
fmris."""
"""load fmri state (obsolete == True, renamed == True)"""
try:
if a.name == "set" and \
"pkg.obsolete"]
])
except api_errors.InvalidPackageErrors:
# Trim package entries that have unparseable action data
# so that they can be filtered out later.
return
"""check to see if fmri is obsolete"""
"""check to see if fmri is renamed"""
"""Return list of actions of type 'name' for this 'fmri' in
Catalog.DEPENDENCY section."""
try:
except KeyError:
pass
try:
acts = [
a
]
if name == "depend":
for a in acts:
continue
raise api_errors.InvalidPackageErrors([
"Unknown dependency type {0}".
return acts
except api_errors.InvalidPackageErrors:
if not trim_invalid:
raise
# Trim package entries that have unparseable action
# data so that they can be filtered out later.
return []
"""Return list of all dependency actions for this fmri."""
"""Return list of all set actions for this fmri in
Catalog.DEPENDENCY section."""
"""Return dictionary of variants suppported by fmri"""
try:
except api_errors.InvalidPackageErrors:
# Trim package entries that have unparseable action data
# so that they can be filtered out later.
"""check if given fmri has explicit install actions."""
return True
return False
"""Check packages which have 'pkg.depend.explicit-install'
action set to true, and prepare to filter."""
# Filter out fmris with 'pkg.depend.explicit-install' set to
# true and not present in self.__expl_install_dict or
# self.__installed_dict.
else:
return will_filter
proposed_dict=None):
"""return set of all fmris the set of specified fmris could
depend on; while trimming those packages that cannot be
installed"""
# Use a copy of the set provided by the caller to prevent
# unexpected modification!
already_processed = set()
while needs_processing:
# Trim filtered packages.
if filter_explicit and \
"only be installed if explicitly "
"requested"), (fmri,))
continue
return already_processed
"""return set of direct (possible) dependencies of this pkg;
trim those packages whose dependencies cannot be satisfied"""
try:
return set([
f
# check most common ones first; what is checked
# here is a matter of optimization / messaging, not
# correctness.
(full_trim and (
])
except DependencyException as e:
return set([])
"""Return fmri list (which must be self-complete) with all
renamed fmris that have no other fmris depending on them
removed"""
# figure out which have been renamed
renamed_fmris = set([
])
# return if nothing has been renamed
if not renamed_fmris:
)
# figure out which renamed fmris have dependencies; compute
# transitively so we can handle multiple renames
already_processed = set()
while needs_processing:
("incorporate", "optional", "origin"):
try:
except KeyError:
tmp = \
if name not in fmris_by_name:
continue
# since new_fmri will not be
# treated as renamed, make sure
# we check any dependencies it
# has
if new_fmri not in \
"""return set of installed fmris that have require dependencies
on specified installed fmri"""
if self.__dependents is None:
self.__dependents = {}
for f in self.__installed_fmris:
excludes):
continue
"""trim packages affected by incorporations"""
if reason_id == _TRIM_PROPOSED_INC:
"Excluded by proposed incorporation '{0}'")
elif reason_id == _TRIM_SYNCED_INC:
"Excluded by synced parent incorporation '{0}'")
else:
raise AssertionError(
while work:
for name in d:
"""Given a list of fmris, one of which must be present, produce
a dictionary indexed by package name, which contains a tuple
of two sets (matching fmris, nonmatching)"""
dict_list = [
for f in fmri_list
]
# The following ignores constraints that appear in only some of
# the versions. This also handles obsoletions & renames.
return dict(
(k,
for d in dict_list)),
for d in dict_list))))
for k in all_keys
)
"""Given a fmri with incorporation dependencies, produce a
dictionary containing (matching, non matching fmris),
indexed by pkg name. Note that some fmris may be
incorporated more than once at different levels of
specificity"""
continue
# Collect all incorp. dependencies affecting
# a package in a list. Note that it is
# possible for both matching and non-matching
# sets to be NULL, and we'll need at least
# one item in the list for reduce to work.
# For each of the packages constrained, combine multiple
# incorporation dependencies. Matches are intersected,
# non-matches form a union.
return ret
"""Returns (matching, nonmatching) fmris for given list of group
dependencies."""
matching = []
nonmatching = []
for f in fmris:
# remove version explicitly; don't
# modify cached fmri
if f.version is not None:
else:
fmri = f
"""Return tuple of (disallowed fmri list, allowed fmri list,
conditional_list, dependency_type, required)"""
fmris = []
try:
except KeyError:
if not self.__depend_ts:
# Include timestamp in all error
# output for dependencies.
# true if match is required for containing pkg
# if this dependency has conditional fmris
conditional = None
# true if obsolete pkgs satisfy this dependency
if dtype == "require":
matching, nonmatching = \
elif dtype == "optional":
matching, nonmatching = \
elif dtype == "exclude":
matching, nonmatching = \
elif dtype == "incorporate":
matching, nonmatching = \
# Track packages that deliver incorporate deps.
elif dtype == "conditional":
# Required is only really helpful for solver error
# messaging. The only time we know that this dependency
# is required is when the predicate package must be part
# of the solution.
matching, nonmatching = \
elif dtype == "require-any":
matching = []
nonmatching = []
for f in fmris:
elif dtype == "parent":
# Parent dependency fmris must exist outside of the
# current image, so we don't report any new matching
# or nonmatching requirements for the solver.
elif dtype == "origin":
matching, nonmatching = \
# Determine potential fmris for matching.
potential = [
]
# Determine matching fmris.
if required:
matching, nonmatching = \
if not matching and not nonmatching:
# No possible stems at all? Ignore
# dependency.
# If more than one stem matched, prefer stems for which
# no obsoletion exists.
for f in matching:
if self.__fmri_is_obsolete(f):
# If not all matching stems had an obsolete
# version, remove the obsolete fmris from
# consideration. This makes the assumption that
# at least one of the remaining, non-obsolete
# stems will be installable. If that is not
# true, the solver may not find anything to do,
# or may not find a solution if the system is
# overly constrained. This is believed
# unlikely, so seems a reasonable compromise.
# In that scenario, a client can move forward by
# using --reject to remove the related group
# dependencies.
else: # only way this happens is if new type is incomplete
raise api_errors.InvalidPackageErrors([
# check if we're throwing exceptions and we didn't find any
# matches on a required package
dtype == "incorporate":
# This is an incorporation package that will not be
# removed, so if dependencies can't be satisfied, try
# again with dotrim=False to ignore rejections due to
# proposed packages.
# Neither build or publisher is interesting for dependencies.
# we're going to toss an exception
if dtype == "exclude":
# If we reach this point, we know that a required
# package (already installed or proposed) was excluded.
# Determine if excluded package is already installed.
for f in nonmatching:
if f in self.__installed_fmris:
break
# The exclude dependency doesn't allow the
# version of the package that is already
# installed.
raise DependencyException(
(N_("Package contains 'exclude' dependency "
"{0} on installed package"), (fstr,)))
# The exclude dependency doesn't allow any
# version of the package that is proposed.
raise DependencyException(
(N_("Package contains 'exclude' dependency "
"{0} on proposed package"), (fstr,)))
else:
# All versions of the package allowed by the
# exclude dependency were trimmed by other
# dependencies. If changed, update _fmri_errors
# _TRIM_DEP_TRIMMED.
raise DependencyException(
(N_("No version allowed by 'exclude' "
"dependency {0} could be installed"),
# not reached
elif dtype == "incorporate":
matching, nonmatching = \
# check if allowing obsolete packages helps
elif not obsolete_ok:
# see if allowing obsolete pkgs gets us some matches
matching, nonmatching = \
else:
matching = []
nonmatching = []
for f in fmris:
if matching:
raise DependencyException(
(N_("All acceptable versions of "
"'{0}' dependency on {1} are "
else:
for f in fmris
])
raise DependencyException(
(N_("All acceptable versions of "
"'{0}' dependencies on {1} are "
# something else is wrong
else:
# try w/o trimming anything
if not matching:
(N_("No version for '{0}' dependency on {1} can "
# If this is a dependency of a proposed package for which only
# one version is possible, then mark all other versions as
# rejected by this package. This ensures that other proposed
# packages will be included in error messaging if their
# dependencies can only be satisfied if this one is not
# proposed.
(N_("Rejected by '{0}' dependency in proposed "
# If changed, update _fmri_errors _TRIM_DEP_TRIMMED.
(N_("No version matching '{0}' dependency {1} can be "
"""If we're a child image then we need to relax packages
that are dependent upon themselves in the parent image. This
is necessary to keep those packages in sync."""
relax_pkgs = set()
# check if we're a child image.
if not self.__is_child():
return relax_pkgs
# if we're ignoring parent dependencies there is no reason to
# relax install-holds in packages constrained by those
# dependencies.
return relax_pkgs
for f in self.__installed_fmris:
continue
break
return relax_pkgs
""" Returns a list of strings describing why fmris cannot
be installed, or returns an empty list if installation
is possible. """
ret = []
already_processed = set()
already_seen = set()
while needs_processing:
return ret
"""Returns a list of strings for all FMRIs evaluated by the
solver explaining why they were rejected. (All packages
found in solver's trim database.)"""
# At a minimum, a solve_*() method must have been called first.
# Value 'DebugValues' is unsubscriptable;
# pylint: disable=E1136
assert DebugValues["plan"]
"""Generate list of strings describing why currently
installed packages cannot be installed, or empty list"""
# Used to de-dup errors.
already_seen = set()
ret = []
if matching:
continue
# no matches when disallowed packages are excluded
return ret
"""Given a list of FMRIs, return indented strings indicating why
they were rejected."""
ret = []
if omit is None:
fmri_reasons = []
# If None was returned, that implies that all of the
# reasons the FMRI was rejected aren't interesting.
if res is not None:
last_run = []
def collapse_fmris():
"""Collapse a range of FMRIs into format:
first_fmri
to
last_fmri
...based on verbose state."""
if last_run:
elif (not self.__depend_ts and
# If timestamps are not being displayed
# and the last FMRI is the same as the
# first in the range then we only need
# to show the first.
pass
else:
last_run[::] = []
last_reason = None
if reason == last_reason:
if not last_run:
continue
else: # ends run
if last_reason:
if last_reason:
return ret
"""return a list of strings w/ indents why this fmri is not
suitable"""
if already_seen is None:
already_seen = set()
if omit is None:
# Exclude build and timestamp for brevity.
else:
# Include timestamp for clarity if any dependency
# included a timestamp; exclude build for brevity.
tag = _("Reason:")
if fmri in already_seen:
return
# note to translators: 'indent' will be a series of
# whitespaces.
reason = _("{indent} {tag} [already rejected; see "
if not verbose:
# By default, omit packages from errors that were only
# rejected due to a newer version being installed, or
# because they didn't match user-specified input. It's
# tempting to omit _TRIM_REJECT here as well, but that
# leads to some very mysterious errors for
# administrators if the only reason an operation failed
# is because a required dependency was rejected.
if reason_id not in (_TRIM_INSTALLED_NEWER,
break
else:
return
ms = []
if not verbose:
if reason_id in (_TRIM_INSTALLED_NEWER,
continue
else:
if reason in already_seen:
# If we've already explained why something was
# rejected before, skip it.
continue
# Use the reason text and not the id, as the text is
# specific to a particular rejection.
# By default, don't include error output for
# dependencies on incorporation packages that don't
# specify a version since any version-specific
# dependencies will have caused a rejection elsewhere.
if (not verbose and
reason_id == _TRIM_DEP_TRIMMED and
# Assumes fstr does not include
# publisher or scheme.
continue
# Add the reasons why each package version that
# satisfied a dependency was rejected.
f
if f not in already_seen
],
indent + " ",
)
if res:
"""Private helper function used by __generate_dependency_errors
to determine why packages were rejected."""
needs_processing = set()
try:
except DependencyException as e:
s = _("No suitable version of required package "
set())
return [], needs_processing
# clause generation routines
"""Return clauses to implement this dependency"""
if not m:
return [] # no clauses needed; pkg avoided
else:
elif dtype == "conditional":
cond)
# handled by trimming proposed set, not by solver
return []
else:
"""Return a list of clauses that specifies only one or zero
of the fmris in fmri_list may be installed. This prevents
multiple versions of the same package being installed
at once"""
# pair wise negation
# if a has 4 versions, we need
# [
# [-a.1, -a.2],
# [-a.1, -a.3],
# [-a.1, -a.4],
# [-a.2, -a.3],
# [-a.2, -a.4],
# [-a.3, -a.4]
# ]
# n*(n-1)/2 algorithms suck
return []
return [
for i in range(l-1)
for j in range(i+1, l)
]
"""generate clause for require dependency: if fmri is
installed, one of fmri_list is required"""
# if a.1 requires b.2, b.3 or b.4:
# !a.1 | b.2 | b.3 | b.4
return [
]
"""Generate clauses for conditional dependency: if
fmri is installed and one of conditional_fmri_list is installed,
one of fmri list is required"""
# if a.1 requires c.2, c.3, c.4 if b.2 or newer is installed:
# !a.1 | !b.2 | c.2 | c.3 | c.4
# !a.1 | !b.3 | c.2 | c.3 | c.4
return [
for c in conditional_fmri_list
]
""" generate clauses for optional, incorporate and
exclude dependencies to exclude non-acceptable versions"""
# if present, fmri must match ok list
# if a.1 optionally requires b.3:
# [
# [!a.1 | !b.1],
# [!a.1 | !b.2]
# ]
return [
for f in non_matching_fmri_list
]
"""generate clauses such that at least one of the fmri_list
members gets installed"""
# If a has four versions,
# a.1|a.2|a.3|a.4
# plus highlander clauses
assert fmri_list, "Empty list of which one is required"
"""add list of clause lists to solver"""
for c in clauses:
try:
except TypeError:
raise TypeError(_("List of integers, not {0}, "
"expected").format(c))
"""Returns the list of installed packages that are incorporated
by packages, delivering an install-hold, and that do not have an
install-hold but incorporate packages.
'install_holds' is a dict of installed package stems indicating
the pkg.depend.install-hold delivered by the package that are
not being removed.
'pkg_cons' is a dict of installed package fmris and the
incorporate constraints they deliver.
'inc_set' is a list of packages that incorporate other packages
and deliver install-hold actions. It acts as the starting point
where we fan out to find "child" packages that incorporate other
packages."""
while unprocessed:
# This package will be removed, so
# nothing to do.
continue
# If this package incorporates other
# packages and does not deliver an
# install-hold, then consider it a
# 'child' hold.
# Find all incorporation constraints that result
# in only one possible match. If there is only
# one possible match for an incorporation
# constraint then that package will not be
# upgraded and should be checked for
# incorporation constraints.
# Already handled.
continue
else:
# Track which constraints have
# already been processed
# seperately from which
# package FMRIs have been
# processed to avoid (unlikely)
# collision.
return incorps
"""Return the latest version of installed upgradeable
incorporations w/ install holds"""
installed_incs = []
"pkg.depend.install-hold"):
ret = []
for f in installed_incs:
if latest != f:
return ret
"""Return the list of incorporations that are to not to change
during this install operation, and the lists of fmris they
constrain."""
pkg_cons = {}
install_holds = {}
# Determine installed packages that contain incorporation
# dependencies, those packages that are depended on by explict
# version, and those that have pkg.depend.install-hold values.
if d.name == "depend":
fmris = []
try:
fl]
except KeyError:
fl)
)
"pkg.depend.install-hold"):
install_holds[f.pkg_name] = \
d.attrs["value"]
# find install holds that appear on command line and are thus
# relaxed
relaxed_holds = set([
for name in proposed_pkgs
if name in install_holds
])
# add any other install holds that are relaxed because they have
# values that start w/ the relaxed ones...
relaxed_holds |= set([
])
# Expand the list of install holds to include packages that are
# incorporated by packages delivering an install-hold and that
# do not have an install-hold, but incorporate packages.
)
)
for child_hold in child_holds:
# versioned_dependents contains all the packages that are
# depended on w/ a explicit version. We now modify this list so
# that it does not contain any packages w/ install_holds, unless
# those holds were relaxed.
versioned_dependents -= set([
if hold_value not in relaxed_holds
])
# Build the list of fmris that 1) contain incorp. dependencies
# 2) are not in the set of versioned_dependents and 3) do not
# explicitly appear on the install command line.
ret = [
if pkg_name not in proposed_pkgs
]
# For each incorporation above that will not change, return a
# list of the fmris that incorporation constrains
con_lists = [
]
"""Record that a given package stem has been trimmed based on
publisher."""
"""Given a list of fmris for various versions of
a package from various publishers, trim those
that are not suitable"""
return
"'{0}' is from sticky publisher '{1}'."),
else:
"than specified one.")
else:
# order by pub_rank; choose highest possible tier for
# pkgs; guard against unconfigured publishers in known
# catalog
for p in pubs_found
])
acceptable_pubs = [
r[1]
for r in ranked
]
if acceptable_pubs:
else:
"in search order")
# allow installed packages to co-exist to meet dependency reqs.
# in case new publisher not proper superset of original. avoid
# multiple publishers w/ the exact same fmri to prevent
# thrashing in the solver due to many equiv. solutions.
f
for f in fmri_list
if (f.publisher not in acceptable_pubs and
# routines to manage the trim dictionary
# trim dictionary contains the reasons an fmri was rejected for
# consideration reason is a tuple of a string w/ format chars and args,
# or just a string. fmri_adds are any fmris that caused the rejection
"""Remove specified fmri(s) from consideration for specified
reason."""
"""Trim any fmris older than this one"""
"""Trim packages that don't support image architecture or other
image variant."""
reason = ""
if vd == "variant.arch":
"image architecture")
else:
"variant {0}={1} but doesn't "
"support this image's {0}={2}"),
return reason == ""
"""Private helper function for __trim_nonmatching_parents that
trims any pkg_fmri that matches a parent dependency and that is
not installed in the parent image, that is from a different
publisher than the parent image, or that is a different version
than the parent image."""
# exact fmri installed in parent
return True
# package is not installed in parent
else:
return False
# package is from a different publisher in the parent
"a different publisher: {0}"), (pf,))
else:
"different publisher: {0}"), (pf,))
return False
# parent dependency is satisfied
return True
# version mismatch
"newer version: {0}"), (pf,))
else:
"newer version: {0}"), (pf,))
else:
"version of package: {0}"), (pf,))
else:
"version of package: {0}"), (pf,))
return False
"""Trim any pkg_fmri that contains a parent dependency that
is not satisfied by the parent image."""
# the fmri for the package should include a publisher
# if we're not a child then ignore "parent" dependencies.
if not self.__is_child():
return True
# check if we're ignoring parent dependencies for installed
# packages.
if ignore_inst_parent_deps and \
return True
# Find all the fmris that we depend on in our parent.
# Use a set() to eliminate any dups.
])
if not pkg_deps:
# no parent dependencies.
return True
for f in pkg_deps:
fmri = f
# check if this package depends on itself.
return allowed
"""Trim any fmri that contains a origin dependency that is
not satisfied by the current image or root-image"""
continue
# Are firmware (driver) updates needed?
if not fw_ok:
return False
continue
# Check that the CPU is supported in the
# new root-image
if not cpu_ok:
return False
continue
if self.__root_fmris is None:
(f.pkg_name, f)
for f in img.gen_installed_pkgs()
])
"is too old for origin " "dependency {0}"),
(req_fmri,))
else:
# Always use the full installed dict for origin
# dependency.
if exact_install:
else:
"being upgraded is too old for origin "
"dependency {0}"), (req_fmri,))
# assumption is that for root-image, publishers align;
# otherwise these sorts of cross-environment
# dependencies don't work well
continue
return False
return True
"""Indicate given package FMRI is unsupported."""
N_("Package contains invalid or unsupported actions"))
"""Get all incorporated pkgs for the given 'fmri' whose versions
are older than what is currently installed in the image."""
if not candidates:
candidates = set()
return candidates
if depth > 10:
# Safeguard against circular dependencies.
# If it happens, just end the recursion tree.
return candidates
# Get all matching incorporated packages for this fmri; this is
# a list of sets, where each set represents all of the fmris
# matching the incorporate dependency for a single package stem.
#
# Only add potential FMRIs to the list of allowed downgrades if
# the currently installed version is not allowed by the related
# incorporate dependency. This prevents two undesirable
# behaviours:
#
# - downgrades when a package is no longer incorporated in
# a newer version of an incorporating package and an older
# version is otherwise allowed
# - upgrades of packages that are no longer incorporated
# in a newer version of an incorporating package and a newer
# version is otherwise allowed
if (not match or
continue
continue
# Ignore pkgs incorporated at a higher or same
# version.
continue
# Do not allow implicit publisher switches.
continue
# Do not allow pkgs marked for removal.
continue
# Do not allow pkgs with install-holds but
# filter out child holds
for ha in [
"pkg.depend.install-hold"
]:
for h in install_holds:
h):
# This is a child hold
# of an incorporating
# pkg, ignore.
break
if not install_hold:
break
if install_hold:
continue
# Check if this pkgs has incorporate deps of its
# own.
return candidates
"""Find packages which have lower versions than installed but
are incorporated by a package in the proposed list."""
install_holds = set([
])
# Get all pkgs which are incorporated by 'fmri',
# including nested incorps.
return candidates
"""Return fmri_list trimmed of any fmris in self.__trim_dict"""
return [
f
for f in fmri_list
]
"""Return True if this image is a linked image child."""
return self.__parent_pkgs is not None
"""Return True if image is a nonglobal zone"""
'nonglobal'
else:
return False