summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorOtavio Pontes <otaviobp@gmail.com>2010-08-10 00:20:43 +0000
committerOtavio Pontes <otaviobp@gmail.com>2010-08-10 00:20:43 +0000
commitd37836e51ef63963dc56a30eedae521b29cbadd9 (patch)
tree20955e89ab24ad94ed94ddb6309b2a7fdf5d5208
parentRevert "Fixing portage bug. If more than one line in package.license is used ... (diff)
parentFix /etc/portage/package.keywords handling so that more specific (diff)
downloadportage-multirepo-d37836e51ef63963dc56a30eedae521b29cbadd9.tar.gz
portage-multirepo-d37836e51ef63963dc56a30eedae521b29cbadd9.tar.bz2
portage-multirepo-d37836e51ef63963dc56a30eedae521b29cbadd9.zip
Merge branch 'master' of git://git.overlays.gentoo.org/proj/portage
Conflicts: pym/_emerge/depgraph.py pym/portage/package/ebuild/config.py pym/portage/package/ebuild/getmaskingstatus.py
-rwxr-xr-xbin/ebuild.sh5
-rwxr-xr-xbin/misc-functions.sh24
-rwxr-xr-xbin/repoman20
-rw-r--r--man/ebuild.510
-rw-r--r--man/emerge.19
-rw-r--r--man/repoman.17
-rw-r--r--pym/_emerge/PackageUninstall.py4
-rw-r--r--pym/_emerge/PipeReader.py2
-rw-r--r--pym/_emerge/Scheduler.py1
-rw-r--r--pym/_emerge/SpawnProcess.py3
-rw-r--r--pym/_emerge/actions.py25
-rw-r--r--pym/_emerge/depgraph.py378
-rw-r--r--pym/_emerge/help.py11
-rw-r--r--pym/_emerge/main.py13
-rw-r--r--pym/_emerge/unmerge.py4
-rw-r--r--pym/portage/dbapi/vartree.py110
-rw-r--r--pym/portage/dep/__init__.py16
-rw-r--r--pym/portage/dep/dep_check.py6
-rw-r--r--pym/portage/package/ebuild/config.py158
-rw-r--r--pym/portage/package/ebuild/doebuild.py2
-rw-r--r--pym/portage/package/ebuild/getmaskingstatus.py37
-rw-r--r--pym/portage/tests/__init__.py27
-rw-r--r--pym/portage/tests/dep/testAtom.py50
-rw-r--r--pym/portage/tests/ebuild/test_array_fromfile_eof.py2
-rw-r--r--pym/portage/tests/ebuild/test_spawn.py8
-rw-r--r--pym/portage/tests/lazyimport/__init__.py0
-rw-r--r--pym/portage/tests/lazyimport/__test__0
-rw-r--r--pym/portage/tests/lazyimport/test_preload_portage_submodules.py16
-rw-r--r--pym/portage/tests/resolver/ResolverPlayground.py227
-rw-r--r--pym/portage/tests/resolver/__init__.py2
-rw-r--r--pym/portage/tests/resolver/__test__0
-rw-r--r--pym/portage/tests/resolver/test_autounmask.py116
-rw-r--r--pym/portage/tests/resolver/test_eapi.py102
-rw-r--r--pym/portage/tests/resolver/test_simple.py34
-rwxr-xr-xpym/portage/tests/runTests7
-rw-r--r--pym/portage/util/_pty.py5
-rw-r--r--pym/repoman/checks.py55
-rw-r--r--pym/repoman/errors.py1
38 files changed, 1283 insertions, 214 deletions
diff --git a/bin/ebuild.sh b/bin/ebuild.sh
index 9c599c01..2affc923 100755
--- a/bin/ebuild.sh
+++ b/bin/ebuild.sh
@@ -479,6 +479,11 @@ econf() {
: ${ECONF_SOURCE:=.}
if [ -x "${ECONF_SOURCE}/configure" ]; then
+ if [[ -n $CONFIG_SHELL && \
+ "$(head -n1 "$ECONF_SOURCE/configure")" =~ ^'#!'[[:space:]]*/bin/sh([[:space:]]|$) ]] ; then
+ sed -e "1s:^#![[:space:]]*/bin/sh:#!$CONFIG_SHELL:" -i "$ECONF_SOURCE/configure" || \
+ die "Substition of shebang in '$ECONF_SOURCE/configure' failed"
+ fi
if [ -e /usr/share/gnuconfig/ ]; then
find "${WORKDIR}" -type f '(' \
-name config.guess -o -name config.sub ')' -print0 | \
diff --git a/bin/misc-functions.sh b/bin/misc-functions.sh
index 7726b9f2..10d19312 100755
--- a/bin/misc-functions.sh
+++ b/bin/misc-functions.sh
@@ -41,6 +41,8 @@ install_symlink_html_docs() {
}
install_qa_check() {
+ local f
+
cd "${D}" || die "cd failed"
export STRIP_MASK
@@ -49,6 +51,7 @@ install_qa_check() {
ecompress --dequeue
# Now we look for all world writable files.
+ local i
for i in $(find "${D}/" -type f -perm -2); do
vecho -ne '\a'
vecho "QA Security Notice:"
@@ -60,7 +63,7 @@ install_qa_check() {
if type -P scanelf > /dev/null && ! hasq binchecks ${RESTRICT}; then
local qa_var insecure_rpath=0 tmp_quiet=${PORTAGE_QUIET}
- local f x
+ local x
# display warnings when using stricter because we die afterwards
if has stricter ${FEATURES} ; then
@@ -331,6 +334,7 @@ install_qa_check() {
fi
# Sanity check syntax errors in init.d scripts
+ local d
for d in /etc/conf.d /etc/init.d ; do
[[ -d ${D}/${d} ]] || continue
for i in "${D}"/${d}/* ; do
@@ -343,6 +347,7 @@ install_qa_check() {
# this should help to ensure that all (most?) shared libraries are executable
# and that all libtool scripts / static libraries are not executable
+ local j
for i in "${D}"opt/*/lib{,32,64} \
"${D}"lib{,32,64} \
"${D}"usr/lib{,32,64} \
@@ -384,6 +389,7 @@ install_qa_check() {
# the static library, or gcc will utilize the static lib when linking :(.
# http://bugs.gentoo.org/4411
abort="no"
+ local a s
for a in "${D}"usr/lib*/*.a ; do
s=${a%.a}.so
if [[ ! -e ${s} ]] ; then
@@ -532,13 +538,12 @@ install_qa_check() {
fi
fi
- # Compiled python objects do not belong in /usr/share (FHS violation)
- # and can be a pain when upgrading python
- f=$([ -d "${D}"/usr/share ] && \
- find "${D}"usr/share -name '*.py[co]' | sed "s:${D}:/:")
+ f=$(find "${D}" -name '*.py[co]' | sed "s:${D}:/:")
if [[ -n ${f} ]] ; then
vecho -ne '\a\n'
- eqawarn "QA Notice: Precompiled python object files do not belong in /usr/share"
+ eqawarn "QA Notice: Byte-compiled Python modules have been found. python_mod_optimize()"
+ eqawarn " and python_mod_cleanup() functions python.eclass should be used to"
+ eqawarn " handle byte-compiled Python modules."
eqawarn "${f}"
vecho -ne '\a\n'
fi
@@ -550,7 +555,7 @@ install_qa_check() {
[[ -x /usr/bin/file && -x /usr/bin/find ]] && \
[[ -n ${MULTILIB_STRICT_DIRS} && -n ${MULTILIB_STRICT_DENY} ]]
then
- local abort=no firstrun=yes
+ local abort=no dir file firstrun=yes
MULTILIB_STRICT_EXEMPT=$(echo ${MULTILIB_STRICT_EXEMPT} | sed -e 's:\([(|)]\):\\\1:g')
for dir in ${MULTILIB_STRICT_DIRS} ; do
[[ -d ${D}/${dir} ]] || continue
@@ -578,6 +583,7 @@ install_mask() {
# we don't want globbing for initial expansion, but afterwards, we do
local shopts=$-
set -o noglob
+ local no_inst
for no_inst in ${install_mask}; do
set +o noglob
quiet_mode || einfo "Removing ${no_inst}"
@@ -620,6 +626,7 @@ preinst_mask() {
cd "${T}"
# remove man pages, info pages, docs if requested
+ local f
for f in man info doc; do
if hasq no${f} $FEATURES; then
INSTALL_MASK="${INSTALL_MASK} /usr/share/${f}"
@@ -676,7 +683,7 @@ preinst_suid_scan() {
fi
# total suid control.
if hasq suidctl $FEATURES; then
- local sfconf
+ local i sfconf x
sfconf=${PORTAGE_CONFIGROOT}etc/portage/suidctl.conf
# sandbox prevents us from writing directly
# to files outside of the sandbox, but this
@@ -845,6 +852,7 @@ if [ -n "${MISC_FUNCTIONS_ARGS}" ]; then
for x in ${MISC_FUNCTIONS_ARGS}; do
${x}
done
+ unset x
fi
[ -n "${EBUILD_EXIT_STATUS_FILE}" ] && \
diff --git a/bin/repoman b/bin/repoman
index 53b9ad0c..0115c288 100755
--- a/bin/repoman
+++ b/bin/repoman
@@ -21,8 +21,6 @@ import sys
import tempfile
import time
import platform
-import xml.etree.ElementTree
-from xml.parsers.expat import ExpatError
try:
from urllib.request import urlopen as urllib_request_urlopen
@@ -32,9 +30,6 @@ except ImportError:
from itertools import chain
from stat import S_ISDIR
-if not hasattr(__builtins__, "set"):
- from sets import Set as set
-
try:
import portage
except ImportError:
@@ -42,6 +37,18 @@ except ImportError:
sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym"))
import portage
portage._disable_legacy_globals()
+
+try:
+ import xml.etree.ElementTree
+ from xml.parsers.expat import ExpatError
+except ImportError:
+ msg = ["Please enable python's \"xml\" USE flag in order to use repoman."]
+ from portage.output import EOutput
+ out = EOutput()
+ for line in msg:
+ out.eerror(line)
+ sys.exit(1)
+
from portage import os
from portage import subprocess_getstatusoutput
from portage import _encodings
@@ -328,7 +335,7 @@ qahelp={
"IUSE.undefined":"This ebuild does not define IUSE (style guideline says to define IUSE even when empty)",
"LICENSE.invalid":"This ebuild is listing a license that doesnt exist in portages license/ dir.",
"KEYWORDS.invalid":"This ebuild contains KEYWORDS that are not listed in profiles/arch.list or for which no valid profile was found",
- "RDEPEND.implicit":"RDEPEND is unset in the ebuild which triggers implicit RDEPEND=$DEPEND assignment",
+ "RDEPEND.implicit":"RDEPEND is unset in the ebuild which triggers implicit RDEPEND=$DEPEND assignment (prior to EAPI 4)",
"RDEPEND.suspect":"RDEPEND contains a package that usually only belongs in DEPEND.",
"RESTRICT.invalid":"This ebuild contains invalid RESTRICT values.",
"digest.assumed":"Existing digest must be assumed correct (Package level only)",
@@ -339,6 +346,7 @@ qahelp={
"ebuild.majorsyn":"This ebuild has a major syntax error that may cause the ebuild to fail partially or fully",
"ebuild.minorsyn":"This ebuild has a minor syntax error that contravenes gentoo coding style",
"ebuild.badheader":"This ebuild has a malformed header",
+ "eprefixify.defined":"The ebuild uses eprefixify, but does not inherit the prefix eclass",
"manifest.bad":"Manifest has missing or incorrect digests",
"metadata.missing":"Missing metadata.xml files",
"metadata.bad":"Bad metadata.xml files",
diff --git a/man/ebuild.5 b/man/ebuild.5
index 5c71cbea..94f49d70 100644
--- a/man/ebuild.5
+++ b/man/ebuild.5
@@ -286,9 +286,15 @@ and explicitly disallow them from being temporarily installed
simultaneously during a series of upgrades. This syntax is supported
beginning with \fBEAPI 2\fR.
.br
-\fI*\fR means match any version of the package so long as the specified base
-is matched. So with a version of '2*', we can match '2.1', '2.2', '2.2.1',
+\fI*\fR means match any version of the package so long
+as the specified string prefix is matched. So with a
+version of '2*', we can match '2.1', '2.2', '2.2.1',
etc... and not match version '1.0', '3.0', '4.1', etc...
+Beware that, due to the string matching nature, '20'
+will also be matched by '2*'. The version part
+that comes before the '*' must be a valid version in the absence of the '*'.
+For example, '2' is a valid version and '2.' is not. Therefore, '2*' is
+allowed and '2.*' is not.
.br
\fI~\fR means match any revision of the base version specified. So in the
above example, we would match versions '1.0.2a', '1.0.2a\-r1', '1.0.2a\-r2',
diff --git a/man/emerge.1 b/man/emerge.1
index 572002d3..73c90d3f 100644
--- a/man/emerge.1
+++ b/man/emerge.1
@@ -284,6 +284,15 @@ acceptance of the first choice. This option is
intended to be set in the \fBmake.conf\fR(5)
\fBEMERGE_DEFAULT_OPTS\fR variable.
.TP
+.BR "\-\-autounmask[=n]"
+Automatically unmask packages. If any configuration
+changes are required, then they will be displayed
+after the merge list and emerge will immediately
+abort. If the displayed configuration changes are
+satisfactory, you should copy and paste them into
+the specified configuration file(s). Currently,
+this only works for unstable KEYWORDS masks.
+.TP
.BR \-\-backtrack=COUNT
Specifies an integer number of times to backtrack if
dependency calculation fails due to a conflict or an
diff --git a/man/repoman.1 b/man/repoman.1
index 58165bb7..ad4c74cb 100644
--- a/man/repoman.1
+++ b/man/repoman.1
@@ -210,6 +210,10 @@ Masked ebuilds with RDEPEND settings (matched against *all* ebuilds) in developi
.B RDEPEND.badtilde
RDEPEND uses the ~ dep operator with a non-zero revision part, which is useless (the revision is ignored)
.TP
+.B RDEPEND.implicit
+RDEPEND is unset in the ebuild which triggers implicit RDEPEND=$DEPEND
+assignment (prior to EAPI 4)
+.TP
.B RDEPEND.suspect
RDEPEND contains a package that usually only belongs in DEPEND
.TP
@@ -282,6 +286,9 @@ PATCHES variable should be a bash array to ensure white space safety
Error generating cache entry for ebuild; typically caused by ebuild syntax error
or digest verification failure.
.TP
+.B eprefixify.defined
+The ebuild uses eprefixify, but does not inherit the prefix eclass
+.TP
.B file.UTF8
File is not UTF8 compliant
.TP
diff --git a/pym/_emerge/PackageUninstall.py b/pym/_emerge/PackageUninstall.py
index 58a1717a..6f528663 100644
--- a/pym/_emerge/PackageUninstall.py
+++ b/pym/_emerge/PackageUninstall.py
@@ -27,8 +27,8 @@ class PackageUninstall(AsynchronousTask):
else:
self.returncode = os.EX_OK
- if retval == 1:
- self.world_atom(self.pkg)
+ if retval == 1:
+ self.world_atom(self.pkg)
self.wait()
diff --git a/pym/_emerge/PipeReader.py b/pym/_emerge/PipeReader.py
index be958bcb..604fbf29 100644
--- a/pym/_emerge/PipeReader.py
+++ b/pym/_emerge/PipeReader.py
@@ -71,7 +71,7 @@ class PipeReader(AbstractPollTask):
buf = array.array('B')
try:
buf.fromfile(f, self._bufsize)
- except EOFError:
+ except (EOFError, IOError):
pass
if buf:
diff --git a/pym/_emerge/Scheduler.py b/pym/_emerge/Scheduler.py
index 0de656c7..067aa1a0 100644
--- a/pym/_emerge/Scheduler.py
+++ b/pym/_emerge/Scheduler.py
@@ -345,6 +345,7 @@ class Scheduler(PollScheduler):
def _set_digraph(self, digraph):
if "--nodeps" in self.myopts or \
+ digraph is None or \
(self._max_jobs is not True and self._max_jobs < 2):
# save some memory
self._digraph = None
diff --git a/pym/_emerge/SpawnProcess.py b/pym/_emerge/SpawnProcess.py
index bacbc2f3..51cd04fc 100644
--- a/pym/_emerge/SpawnProcess.py
+++ b/pym/_emerge/SpawnProcess.py
@@ -148,7 +148,6 @@ class SpawnProcess(SubProcess):
buf = array.array('B')
try:
buf.fromfile(files.process, self._bufsize)
- # EOFError was raised in Python <2.6.6 and <2.7.1.
except (EOFError, IOError):
pass
@@ -210,7 +209,7 @@ class SpawnProcess(SubProcess):
buf = array.array('B')
try:
buf.fromfile(self._files.process, self._bufsize)
- except EOFError:
+ except (EOFError, IOError):
pass
if buf:
diff --git a/pym/_emerge/actions.py b/pym/_emerge/actions.py
index 70b091e5..bd167a0c 100644
--- a/pym/_emerge/actions.py
+++ b/pym/_emerge/actions.py
@@ -515,7 +515,7 @@ def action_config(settings, trees, myopts, myfiles):
print()
def action_depclean(settings, trees, ldpath_mtimes,
- myopts, action, myfiles, spinner):
+ myopts, action, myfiles, spinner, scheduler=None):
# Kill packages that aren't explicitly merged or are required as a
# dependency of another package. World file is explicit.
@@ -576,7 +576,8 @@ def action_depclean(settings, trees, ldpath_mtimes,
if cleanlist:
unmerge(root_config, myopts, "unmerge",
- cleanlist, ldpath_mtimes, ordered=ordered)
+ cleanlist, ldpath_mtimes, ordered=ordered,
+ scheduler=scheduler)
if action == "prune":
return
@@ -2650,7 +2651,7 @@ def action_uninstall(settings, trees, ldpath_mtimes,
for line in textwrap.wrap(msg, 70):
writemsg_level("!!! %s\n" % (line,),
level=logging.ERROR, noiselevel=-1)
- for i in e[0]:
+ for i in e.args[0]:
writemsg_level(" %s\n" % colorize("INFORM", i),
level=logging.ERROR, noiselevel=-1)
writemsg_level("\n", level=logging.ERROR, noiselevel=-1)
@@ -2723,18 +2724,28 @@ def action_uninstall(settings, trees, ldpath_mtimes,
for line in textwrap.wrap(msg, 72):
out.ewarn(line)
+ if action == 'deselect':
+ return action_deselect(settings, trees, opts, valid_atoms)
+
+ # Create a Scheduler for calls to unmerge(), in order to cause
+ # redirection of ebuild phase output to logs as required for
+ # options such as --quiet.
+ sched = Scheduler(settings, trees, None, opts,
+ spinner, [], [], None)
+ sched._background = sched._background_mode()
+ sched._status_display.quiet = True
+
if action in ('clean', 'unmerge') or \
(action == 'prune' and "--nodeps" in opts):
# When given a list of atoms, unmerge them in the order given.
ordered = action == 'unmerge'
unmerge(trees[settings["ROOT"]]['root_config'], opts, action,
- valid_atoms, ldpath_mtimes, ordered=ordered)
+ valid_atoms, ldpath_mtimes, ordered=ordered,
+ scheduler=sched._sched_iface)
rval = os.EX_OK
- elif action == 'deselect':
- rval = action_deselect(settings, trees, opts, valid_atoms)
else:
rval = action_depclean(settings, trees, ldpath_mtimes,
- opts, action, valid_atoms, spinner)
+ opts, action, valid_atoms, spinner, scheduler=sched._sched_iface)
return rval
diff --git a/pym/_emerge/depgraph.py b/pym/_emerge/depgraph.py
index 684b5b64..d31fcb8f 100644
--- a/pym/_emerge/depgraph.py
+++ b/pym/_emerge/depgraph.py
@@ -1,4 +1,4 @@
-# Copyright 1999-2009 Gentoo Foundation
+# Copyright 1999-2010 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from __future__ import print_function
@@ -20,6 +20,8 @@ from portage.dep import Atom
from portage.output import bold, blue, colorize, create_color_func, darkblue, \
darkgreen, green, nc_len, red, teal, turquoise, yellow
bad = create_color_func("BAD")
+from portage.package.ebuild.getmaskingstatus import \
+ _getmaskingstatus, _MaskReason
from portage.sets import SETPREFIX
from portage.sets.base import InternalPackageSet
from portage.util import cmp_sort_key, writemsg, writemsg_stdout
@@ -66,7 +68,10 @@ class _frozen_depgraph_config(object):
if settings.get("PORTAGE_DEBUG", "") == "1":
self.edebug = 1
self.spinner = spinner
- self._running_root = trees["/"]["root_config"]
+ if "_test_" in myopts and "/" not in trees:
+ self._running_root = trees[self.target_root]["root_config"]
+ else:
+ self._running_root = trees["/"]["root_config"]
self._opts_no_restart = frozenset(["--buildpkgonly",
"--fetchonly", "--fetch-all-uri", "--pretend"])
self.pkgsettings = {}
@@ -106,7 +111,7 @@ class _frozen_depgraph_config(object):
class _dynamic_depgraph_config(object):
def __init__(self, depgraph, myparams, allow_backtracking,
- runtime_pkg_mask):
+ runtime_pkg_mask, needed_unstable_keywords, needed_use_config_changes):
self.myparams = myparams.copy()
self._vdb_loaded = False
self._allow_backtracking = allow_backtracking
@@ -182,6 +187,21 @@ class _dynamic_depgraph_config(object):
else:
runtime_pkg_mask = dict((k, v.copy()) for (k, v) in \
runtime_pkg_mask.items())
+
+ if needed_unstable_keywords is None:
+ self._needed_unstable_keywords = set()
+ else:
+ self._needed_unstable_keywords = needed_unstable_keywords.copy()
+
+ if needed_use_config_changes is None:
+ self._needed_use_config_changes = {}
+ else:
+ self._needed_use_config_changes = \
+ dict((k.copy(), (v[0].copy(), v[1].copy())) for (k, v) in \
+ needed_use_config_changes.items())
+
+ self._autounmask = depgraph._frozen_config.myopts.get('--autounmask', 'n') == True
+
self._runtime_pkg_mask = runtime_pkg_mask
self._need_restart = False
@@ -252,13 +272,14 @@ class depgraph(object):
_dep_keys = ["DEPEND", "RDEPEND", "PDEPEND"]
def __init__(self, settings, trees, myopts, myparams, spinner,
- frozen_config=None, runtime_pkg_mask=None, allow_backtracking=False):
+ frozen_config=None, runtime_pkg_mask=None, needed_unstable_keywords=None, \
+ needed_use_config_changes=None, allow_backtracking=False):
if frozen_config is None:
frozen_config = _frozen_depgraph_config(settings, trees,
myopts, spinner)
self._frozen_config = frozen_config
self._dynamic_config = _dynamic_depgraph_config(self, myparams,
- allow_backtracking, runtime_pkg_mask)
+ allow_backtracking, runtime_pkg_mask, needed_unstable_keywords, needed_use_config_changes)
self._select_atoms = self._select_atoms_highest_available
self._select_package = self._select_pkg_highest_available
@@ -1109,7 +1130,7 @@ class depgraph(object):
myroot = pkg.root
mykey = pkg.cpv
metadata = pkg.metadata
- myuse = pkg.use.enabled
+ myuse = self._pkg_use_enabled(pkg)
jbigkey = pkg
depth = pkg.depth + 1
removal_action = "remove" in self._dynamic_config.myparams
@@ -1190,7 +1211,7 @@ class depgraph(object):
dep_string = portage.dep.paren_normalize(
portage.dep.use_reduce(
portage.dep.paren_reduce(dep_string),
- uselist=pkg.use.enabled))
+ uselist=self._pkg_use_enabled(pkg)))
dep_string = list(self._queue_disjunctive_deps(
pkg, dep_root, dep_priority, dep_string))
@@ -1252,7 +1273,7 @@ class depgraph(object):
try:
selected_atoms = self._select_atoms(dep_root,
- dep_string, myuse=pkg.use.enabled, parent=pkg,
+ dep_string, myuse=self._pkg_use_enabled(pkg), parent=pkg,
strict=strict, priority=dep_priority)
except portage.exception.InvalidDependString as e:
show_invalid_depstring_notice(pkg, dep_string, str(e))
@@ -1282,7 +1303,7 @@ class depgraph(object):
inst_pkgs = vardb.match_pkgs(atom)
if inst_pkgs:
for inst_pkg in inst_pkgs:
- if inst_pkg.visible:
+ if self._pkg_visibility_check(inst_pkg):
# highest visible
mypriority.satisfied = inst_pkg
break
@@ -1328,7 +1349,7 @@ class depgraph(object):
inst_pkgs = vardb.match_pkgs(atom)
if inst_pkgs:
for inst_pkg in inst_pkgs:
- if inst_pkg.visible:
+ if self._pkg_visibility_check(inst_pkg):
# highest visible
mypriority.satisfied = inst_pkg
break
@@ -1971,6 +1992,13 @@ class depgraph(object):
except self._unknown_internal_error:
return False, myfavorites
+ if set(self._dynamic_config.digraph.nodes.keys()).intersection( \
+ set(self._dynamic_config._needed_unstable_keywords)) or \
+ set(self._dynamic_config.digraph.nodes.keys()).intersection( \
+ set(self._dynamic_config._needed_use_config_changes.keys())) :
+ #We failed if the user needs to change the configuration
+ return False, myfavorites
+
# We're true here unless we are missing binaries.
return (not missing,myfavorites)
@@ -2052,7 +2080,7 @@ class depgraph(object):
dep_str = " ".join(pkg.metadata[k] for k in blocker_dep_keys)
try:
selected_atoms = self._select_atoms(
- pkg.root, dep_str, pkg.use.enabled,
+ pkg.root, dep_str, self._pkg_use_enabled(pkg),
parent=pkg, strict=True)
except portage.exception.InvalidDependString:
continue
@@ -2107,11 +2135,18 @@ class depgraph(object):
myuse=None, parent=None, strict=True, trees=None, priority=None):
"""This will raise InvalidDependString if necessary. If trees is
None then self._dynamic_config._filtered_trees is used."""
+
pkgsettings = self._frozen_config.pkgsettings[root]
if trees is None:
trees = self._dynamic_config._filtered_trees
+ mytrees = trees[root]
atom_graph = digraph()
if True:
+ # Temporarily disable autounmask so that || preferences
+ # account for masking and USE settings.
+ _autounmask_backup = self._dynamic_config._autounmask
+ self._dynamic_config._autounmask = False
+ mytrees["pkg_use_enabled"] = self._pkg_use_enabled
try:
if parent is not None:
trees[root]["parent"] = parent
@@ -2124,6 +2159,8 @@ class depgraph(object):
pkgsettings, myuse=myuse,
myroot=root, trees=trees)
finally:
+ self._dynamic_config._autounmask = _autounmask_backup
+ del mytrees["pkg_use_enabled"]
if parent is not None:
trees[root].pop("parent")
trees[root].pop("atom_graph")
@@ -2221,7 +2258,7 @@ class depgraph(object):
masked_pkg_instances.add(pkg)
if atom.unevaluated_atom.use:
if not pkg.iuse.is_valid_flag(atom.unevaluated_atom.use.required) \
- or atom.violated_conditionals(pkg.use.enabled).use:
+ or atom.violated_conditionals(self._pkg_use_enabled(pkg)).use:
missing_use.append(pkg)
if not mreasons:
continue
@@ -2239,7 +2276,7 @@ class depgraph(object):
missing_use_reasons = []
missing_iuse_reasons = []
for pkg in missing_use:
- use = pkg.use.enabled
+ use = self._pkg_use_enabled(pkg)
missing_iuse = pkg.iuse.get_missing_iuse(atom.use.required)
mreasons = []
if missing_iuse:
@@ -2261,7 +2298,7 @@ class depgraph(object):
# Lets see if the violated use deps are conditional.
# If so, suggest to change them on the parent.
mreasons = []
- violated_atom = atom.unevaluated_atom.violated_conditionals(pkg.use.enabled, myparent.use.enabled)
+ violated_atom = atom.unevaluated_atom.violated_conditionals(self._pkg_use_enabled(pkg), myparent.use.enabled)
if not (violated_atom.use.enabled or violated_atom.use.disabled):
#all violated use deps are conditional
changes = []
@@ -2296,11 +2333,14 @@ class depgraph(object):
break
elif unmasked_iuse_reasons:
- if missing_use_reasons:
- # All packages with required IUSE are masked,
- # so display a normal masking message.
- pass
- else:
+ masked_with_iuse = False
+ for pkg in masked_pkg_instances:
+ if not pkg.iuse.get_missing_iuse(atom.use.required):
+ # Package(s) with required IUSE are masked,
+ # so display a normal masking message.
+ masked_with_iuse = True
+ break
+ if not masked_with_iuse:
show_missing_use = unmasked_iuse_reasons
mask_docs = False
@@ -2467,12 +2507,162 @@ class depgraph(object):
pkg, existing = ret
if pkg is not None:
settings = pkg.root_config.settings
- if pkg.visible and not (pkg.installed and \
+ if self._pkg_visibility_check(pkg) and not (pkg.installed and \
settings._getMissingKeywords(pkg.cpv, pkg.metadata)):
self._dynamic_config._visible_pkgs[pkg.root].cpv_inject(pkg)
return ret
+ def _want_installed_pkg(self, pkg):
+ """
+ Given an installed package returned from select_pkg, return
+ True if the user has not explicitly requested for this package
+ to be replaced (typically via an atom on the command line).
+ """
+ if "selective" not in self._dynamic_config.myparams and \
+ pkg.root == self._frozen_config.target_root:
+ try:
+ next(self._iter_atoms_for_pkg(pkg))
+ except StopIteration:
+ pass
+ except portage.exception.InvalidDependString:
+ pass
+ else:
+ return False
+ return True
+
def _select_pkg_highest_available_imp(self, root, atom, onlydeps=False):
+ pkg, existing = self._wrapped_select_pkg_highest_available_imp(root, atom, onlydeps=onlydeps)
+
+ default_selection = (pkg, existing)
+
+ if self._dynamic_config._autounmask is True:
+ if pkg is not None and \
+ pkg.installed and \
+ not self._want_installed_pkg(pkg):
+ pkg = None
+
+ for allow_unstable_keywords in False, True:
+ if pkg is not None:
+ break
+
+ pkg, existing = \
+ self._wrapped_select_pkg_highest_available_imp(
+ root, atom, onlydeps=onlydeps,
+ allow_use_changes=True, allow_unstable_keywords=allow_unstable_keywords)
+
+ if pkg is not None and \
+ pkg.installed and \
+ not self._want_installed_pkg(pkg):
+ pkg = None
+
+ if pkg is not None and not pkg.visible:
+ self._dynamic_config._needed_unstable_keywords.add(pkg)
+
+ if self._dynamic_config._need_restart:
+ return None, None
+
+ if pkg is None:
+ # This ensures that we can fall back to an installed package
+ # that may have been rejected in the autounmask path above.
+ return default_selection
+
+ return pkg, existing
+
+ def _pkg_visibility_check(self, pkg, allow_unstable_keywords=False):
+ if pkg.visible:
+ return True
+
+ if pkg in self._dynamic_config._needed_unstable_keywords:
+ return True
+
+ if not allow_unstable_keywords:
+ return False
+
+ pkgsettings = self._frozen_config.pkgsettings[pkg.root]
+ root_config = self._frozen_config.roots[pkg.root]
+ mreasons = _get_masking_status(pkg, pkgsettings, root_config)
+ if len(mreasons) == 1 and \
+ mreasons[0].hint == 'unstable keyword':
+ return True
+ else:
+ return False
+
+ def _pkg_use_enabled(self, pkg, target_use=None):
+ """
+ If target_use is None, returns pkg.use.enabled + changes in _needed_use_config_changes.
+ If target_use is given, the need changes are computed to make the package useable.
+ Example: target_use = { "foo": True, "bar": False }
+ The flags target_use must be in the pkg's IUSE.
+ """
+ needed_use_config_change = self._dynamic_config._needed_use_config_changes.get(pkg)
+
+ if target_use is None:
+ if needed_use_config_change is None:
+ return pkg.use.enabled
+ else:
+ return needed_use_config_change[0]
+
+ if needed_use_config_change is not None:
+ old_use = needed_use_config_change[0]
+ new_use = set()
+ old_changes = needed_use_config_change[1]
+ new_changes = old_changes.copy()
+ else:
+ old_use = pkg.use.enabled
+ new_use = set()
+ old_changes = {}
+ new_changes = {}
+
+ for flag, state in target_use.items():
+ if state:
+ if flag not in old_use:
+ if new_changes.get(flag) == False:
+ return old_use
+ new_changes[flag] = True
+ new_use.add(flag)
+ else:
+ if flag in old_use:
+ if new_changes.get(flag) == True:
+ return old_use
+ new_changes[flag] = False
+ new_use.update(old_use.difference(target_use.keys()))
+
+ def want_restart_for_use_change(pkg, new_use):
+ if pkg not in self._dynamic_config.digraph.nodes:
+ return False
+
+ for key in "DEPEND", "RDEPEND", "PDEPEND", "LICENSE":
+ dep = pkg.metadata[key]
+ old_val = set(portage.dep.paren_normalize( \
+ portage.dep.use_reduce(portage.dep.paren_reduce(dep), pkg.use.enabled)))
+ new_val = set(portage.dep.paren_normalize( \
+ portage.dep.use_reduce(portage.dep.paren_reduce(dep), new_use)))
+
+ if old_val != new_val:
+ return True
+
+ parent_atoms = self._dynamic_config._parent_atoms.get(pkg)
+ if not parent_atoms:
+ return False
+
+ new_use, changes = self._dynamic_config._needed_use_config_changes.get(pkg)
+ for ppkg, atom in parent_atoms:
+ if not atom.use or \
+ not atom.use.required.intersection(changes.keys()):
+ continue
+ else:
+ return True
+
+ return False
+
+ if new_changes != old_changes:
+ self._dynamic_config._needed_use_config_changes[pkg] = (new_use, new_changes)
+ if want_restart_for_use_change(pkg, new_use):
+ self._dynamic_config._need_restart = True
+ return new_use
+
+ def _wrapped_select_pkg_highest_available_imp(self, root, atom, onlydeps=False, \
+ allow_use_changes=False, allow_unstable_keywords=False):
root_config = self._frozen_config.roots[root]
pkgsettings = self._frozen_config.pkgsettings[root]
dbs = self._dynamic_config._filtered_trees[root]["dbs"]
@@ -2569,7 +2759,7 @@ class depgraph(object):
# were installed can be automatically downgraded
# to an unmasked version.
- if not pkg.visible:
+ if not self._pkg_visibility_check(pkg, allow_unstable_keywords=allow_unstable_keywords):
continue
# Enable upgrade or downgrade to a version
@@ -2603,7 +2793,8 @@ class depgraph(object):
except portage.exception.PackageNotFound:
continue
else:
- if not pkg_eb.visible:
+ if not self._pkg_visibility_check(pkg_eb, \
+ allow_unstable_keywords=allow_unstable_keywords):
continue
# Calculation of USE for unbuilt ebuilds is relatively
@@ -2630,11 +2821,21 @@ class depgraph(object):
# since IUSE cannot be adjusted by the user.
continue
- if atom.use.enabled.difference(pkg.use.enabled):
+ if allow_use_changes:
+ target_use = {}
+ for flag in atom.use.enabled:
+ target_use[flag] = True
+ for flag in atom.use.disabled:
+ target_use[flag] = False
+ use = self._pkg_use_enabled(pkg, target_use)
+ else:
+ use = self._pkg_use_enabled(pkg)
+
+ if atom.use.enabled.difference(use):
if not pkg.built:
packages_with_invalid_use_config.append(pkg)
continue
- if atom.use.disabled.intersection(pkg.use.enabled):
+ if atom.use.disabled.intersection(use):
if not pkg.built:
packages_with_invalid_use_config.append(pkg)
continue
@@ -2675,7 +2876,7 @@ class depgraph(object):
"--reinstall" in self._frozen_config.myopts or \
"--binpkg-respect-use" in self._frozen_config.myopts):
iuses = pkg.iuse.all
- old_use = pkg.use.enabled
+ old_use = self._pkg_use_enabled(pkg)
if myeb:
pkgsettings.setcpv(myeb)
else:
@@ -2704,7 +2905,7 @@ class depgraph(object):
old_use = vardb.aux_get(cpv, ["USE"])[0].split()
old_iuse = set(filter_iuse_defaults(
vardb.aux_get(cpv, ["IUSE"])[0].split()))
- cur_use = pkg.use.enabled
+ cur_use = self._pkg_use_enabled(pkg)
cur_iuse = pkg.iuse.all
reinstall_for_flags = \
self._reinstall_for_flags(
@@ -2787,11 +2988,13 @@ class depgraph(object):
if avoid_update:
for pkg in matched_packages:
- if pkg.installed and pkg.visible:
+ if pkg.installed and self._pkg_visibility_check(pkg, \
+ allow_unstable_keywords=allow_unstable_keywords):
return pkg, existing_node
bestmatch = portage.best(
- [pkg.cpv for pkg in matched_packages if pkg.visible])
+ [pkg.cpv for pkg in matched_packages \
+ if self._pkg_visibility_check(pkg, allow_unstable_keywords=allow_unstable_keywords)])
if not bestmatch:
# all are masked, so ignore visibility
bestmatch = portage.best(
@@ -2958,7 +3161,7 @@ class depgraph(object):
self._frozen_config._pkg_cache[pkg] = pkg
- if not pkg.visible and \
+ if not self._pkg_visibility_check(pkg) and \
'LICENSE' in pkg.masks and len(pkg.masks) == 1:
slot_key = (pkg.root, pkg.slot_atom)
other_pkg = self._frozen_config._highest_license_masked.get(slot_key)
@@ -3014,7 +3217,7 @@ class depgraph(object):
# packages masked by license, since the user likely wants
# to adjust ACCEPT_LICENSE.
if pkg in final_db:
- if not pkg.visible and \
+ if not self._pkg_visibility_check(pkg) and \
(pkg_in_graph or 'LICENSE' in pkg.masks):
self._dynamic_config._masked_installed.add(pkg)
else:
@@ -3083,7 +3286,7 @@ class depgraph(object):
portage.dep._dep_check_strict = False
try:
success, atoms = portage.dep_check(depstr,
- final_db, pkgsettings, myuse=pkg.use.enabled,
+ final_db, pkgsettings, myuse=self._pkg_use_enabled(pkg),
trees=self._dynamic_config._graph_trees, myroot=myroot)
except Exception as e:
if isinstance(e, SystemExit):
@@ -4394,7 +4597,7 @@ class depgraph(object):
os.path.dirname(ebuild_path)))
else:
repo_path_real = portdb.getRepositoryPath(repo_name)
- pkg_use = list(pkg.use.enabled)
+ pkg_use = list(self._pkg_use_enabled(pkg))
if not pkg.built and pkg.operation == 'merge' and \
'fetch' in pkg.metadata.restrict:
fetch = red("F")
@@ -4485,7 +4688,7 @@ class depgraph(object):
forced_flags.update(pkgsettings.useforce)
forced_flags.update(pkgsettings.usemask)
- cur_use = [flag for flag in pkg.use.enabled \
+ cur_use = [flag for flag in self._pkg_use_enabled(pkg) \
if flag in pkg.iuse.all]
cur_iuse = sorted(pkg.iuse.all)
@@ -5049,6 +5252,74 @@ class depgraph(object):
else:
self._show_missed_update()
+ def get_dep_chain(pkg):
+ traversed_nodes = set()
+ msg = "#"
+ node = pkg
+ first = True
+ while node is not None:
+ traversed_nodes.add(node)
+ if node is not pkg:
+ if first:
+ first = False
+ else:
+ msg += ", "
+ msg += 'required by =%s' % node.cpv
+
+ if node not in self._dynamic_config.digraph:
+ # The parent is not in the graph due to backtracking.
+ break
+
+ # When traversing to parents, prefer arguments over packages
+ # since arguments are root nodes. Never traverse the same
+ # package twice, in order to prevent an infinite loop.
+ selected_parent = None
+ for parent in self._dynamic_config.digraph.parent_nodes(node):
+ if isinstance(parent, DependencyArg):
+ if first:
+ first = False
+ else:
+ msg += ", "
+ msg += 'required by %s (argument)' % str(parent)
+ selected_parent = None
+ break
+ if parent not in traversed_nodes:
+ selected_parent = parent
+ node = selected_parent
+ msg += "\n"
+ return msg
+
+ unstable_keyword_msg = []
+ for pkg in self._dynamic_config._needed_unstable_keywords:
+ self._show_merge_list()
+ if pkg in self._dynamic_config.digraph.nodes.keys():
+ pkgsettings = self._frozen_config.pkgsettings[pkg.root]
+ unstable_keyword_msg.append(get_dep_chain(pkg))
+ unstable_keyword_msg.append("=%s ~%s\n" % (pkg.cpv, pkgsettings["ACCEPT_KEYWORDS"]))
+
+ use_changes_msg = []
+ for pkg, needed_use_config_change in self._dynamic_config._needed_use_config_changes.items():
+ self._show_merge_list()
+ if pkg in self._dynamic_config.digraph.nodes.keys():
+ changes = needed_use_config_change[1]
+ adjustments = []
+ for flag, state in changes.items():
+ if state:
+ adjustments.append(flag)
+ else:
+ adjustments.append("-" + flag)
+ use_changes_msg.append("=%s %s\n" % (pkg.cpv, " ".join(adjustments)))
+
+ if unstable_keyword_msg:
+ writemsg_stdout("\nThe following " + colorize("BAD", "keyword changes") + \
+ " are necessary to proceed:\n", noiselevel=-1)
+ writemsg_stdout("".join(unstable_keyword_msg), noiselevel=-1)
+
+ if use_changes_msg:
+ writemsg_stdout("\nThe following " + colorize("BAD", "USE changes") + \
+ " are necessary to proceed:\n", noiselevel=-1)
+ writemsg_stdout("".join(use_changes_msg), noiselevel=-1)
+
# TODO: Add generic support for "set problem" handlers so that
# the below warnings aren't special cases for world only.
@@ -5247,7 +5518,7 @@ class depgraph(object):
self._frozen_config.excluded_pkgs.findAtomForPackage(pkg):
continue
- if "merge" == pkg.operation and not pkg.visible:
+ if "merge" == pkg.operation and not self._pkg_visibility_check(pkg):
if skip_masked:
masked_tasks.append(Dependency(root=pkg.root, parent=pkg))
else:
@@ -5440,8 +5711,16 @@ class depgraph(object):
def need_restart(self):
return self._dynamic_config._need_restart
- def get_runtime_pkg_mask(self):
- return self._dynamic_config._runtime_pkg_mask.copy()
+ def get_backtrack_parameters(self):
+ return {
+ "needed_unstable_keywords":
+ self._dynamic_config._needed_unstable_keywords.copy(), \
+ "runtime_pkg_mask":
+ self._dynamic_config._runtime_pkg_mask.copy(),
+ "needed_use_config_changes":
+ self._dynamic_config._needed_use_config_changes.copy()
+ }
+
class _dep_check_composite_db(dbapi):
"""
@@ -5672,7 +5951,8 @@ def _backtrack_depgraph(settings, trees, myopts, myparams,
myaction, myfiles, spinner):
backtrack_max = myopts.get('--backtrack', 5)
- runtime_pkg_mask = None
+ backtrack_parameters = {}
+ needed_unstable_keywords = None
allow_backtracking = backtrack_max > 0
backtracked = 0
frozen_config = _frozen_depgraph_config(settings, trees,
@@ -5681,11 +5961,11 @@ def _backtrack_depgraph(settings, trees, myopts, myparams,
mydepgraph = depgraph(settings, trees, myopts, myparams, spinner,
frozen_config=frozen_config,
allow_backtracking=allow_backtracking,
- runtime_pkg_mask=runtime_pkg_mask)
+ **backtrack_parameters)
success, favorites = mydepgraph.select_files(myfiles)
if not success:
if mydepgraph.need_restart() and backtracked < backtrack_max:
- runtime_pkg_mask = mydepgraph.get_runtime_pkg_mask()
+ backtrack_parameters = mydepgraph.get_backtrack_parameters()
backtracked += 1
elif backtracked and allow_backtracking:
if "--debug" in myopts:
@@ -5695,7 +5975,10 @@ def _backtrack_depgraph(settings, trees, myopts, myparams,
# Backtracking failed, so disable it and do
# a plain dep calculation + error message.
allow_backtracking = False
- runtime_pkg_mask = None
+ #Don't reset needed_unstable_keywords here, since we don't want to
+ #send the user through a "one step at a time" unmasking session for
+ #no good reason.
+ backtrack_parameters.pop('runtime_pkg_mask', None)
else:
break
else:
@@ -5884,21 +6167,26 @@ def show_blocker_docs_link():
print()
def get_masking_status(pkg, pkgsettings, root_config, myrepo=None):
+ return [mreason.message for \
+ mreason in _get_masking_status(pkg, pkgsettings, root_config, myrepo)]
- mreasons = portage.getmaskingstatus(
+def _get_masking_status(pkg, pkgsettings, root_config, myrepo=None):
+ mreasons = _getmaskingstatus(
pkg, settings=pkgsettings,
portdb=root_config.trees["porttree"].dbapi, myrepo=myrepo)
if not pkg.installed:
if not pkgsettings._accept_chost(pkg.cpv, pkg.metadata):
- mreasons.append("CHOST: %s" % \
- pkg.metadata["CHOST"])
+ mreasons.append(_MaskReason("CHOST", "CHOST: %s" % \
+ pkg.metadata["CHOST"]))
if pkg.invalid:
for msg_type, msgs in pkg.invalid.items():
for msg in msgs:
- mreasons.append("invalid: %s" % (msg,))
+ mreasons.append(
+ _MaskReason("invalid", "invalid: %s" % (msg,)))
if not pkg.metadata["SLOT"]:
- mreasons.append("invalid: SLOT is undefined")
+ mreasons.append(
+ _MaskReason("invalid", "SLOT: undefined"))
return mreasons
diff --git a/pym/_emerge/help.py b/pym/_emerge/help.py
index 43e9794e..72bb56d4 100644
--- a/pym/_emerge/help.py
+++ b/pym/_emerge/help.py
@@ -291,6 +291,17 @@ def help(myopts, havecolor=1):
"EMERGE_DEFAULT_OPTS variable."
for line in wrap(desc, desc_width):
print(desc_indent + line)
+ print()
+ print(" " + green("--autounmask") + "[=%s]" % turquoise("n"))
+ desc = "Automatically unmask packages. If any configuration " + \
+ "changes are required, then they will be displayed " + \
+ "after the merge list and emerge will immediately " + \
+ "abort. If the displayed configuration changes are " + \
+ "satisfactory, you should copy and paste them into " + \
+ "the specified configuration file(s). Currently, " + \
+ "this only works for unstable KEYWORDS masks."
+ for line in wrap(desc, desc_width):
+ print(desc_indent + line)
print()
print(" " + green("--backtrack") + " " + turquoise("COUNT"))
desc = "Specifies an integer number of times to backtrack if " + \
diff --git a/pym/_emerge/main.py b/pym/_emerge/main.py
index b7008658..974b5cf3 100644
--- a/pym/_emerge/main.py
+++ b/pym/_emerge/main.py
@@ -388,6 +388,7 @@ def insert_optional_args(args):
new_args = []
default_arg_opts = {
+ '--autounmask' : ('n',),
'--complete-graph' : ('n',),
'--deep' : valid_integers,
'--depclean-lib-check' : ('n',),
@@ -515,6 +516,13 @@ def parse_opts(tmpcmdline, silent=False):
longopt_aliases = {"--cols":"--columns", "--skip-first":"--skipfirst"}
argument_options = {
+
+ "--autounmask": {
+ "help" : "automatically unmask packages",
+ "type" : "choice",
+ "choices" : ("True", "n")
+ },
+
"--accept-properties": {
"help":"temporarily override ACCEPT_PROPERTIES",
"action":"store"
@@ -735,6 +743,9 @@ def parse_opts(tmpcmdline, silent=False):
myoptions, myargs = parser.parse_args(args=tmpcmdline)
+ if myoptions.autounmask in ("True",):
+ myoptions.autounmask = True
+
if myoptions.changed_use is not False:
myoptions.reinstall = "changed-use"
myoptions.changed_use = False
@@ -1604,7 +1615,7 @@ def emerge_main():
for line in textwrap.wrap(msg, 70):
writemsg_level("!!! %s\n" % (line,),
level=logging.ERROR, noiselevel=-1)
- for i in e[0]:
+ for i in e.args[0]:
writemsg_level(" %s\n" % colorize("INFORM", i),
level=logging.ERROR, noiselevel=-1)
writemsg_level("\n", level=logging.ERROR, noiselevel=-1)
diff --git a/pym/_emerge/unmerge.py b/pym/_emerge/unmerge.py
index c8ad7a2e..5b466e2d 100644
--- a/pym/_emerge/unmerge.py
+++ b/pym/_emerge/unmerge.py
@@ -40,9 +40,9 @@ def unmerge(root_config, myopts, unmerge_action,
def _pkg(cpv):
pkg = pkg_cache.get(cpv)
if pkg is None:
- pkg = Package(cpv=cpv, installed=True,
+ pkg = Package(built=True, cpv=cpv, installed=True,
metadata=zip(db_keys, vartree.dbapi.aux_get(cpv, db_keys)),
- root_config=root_config,
+ operation="uninstall", root_config=root_config,
type_name="installed")
pkg_cache[cpv] = pkg
return pkg
diff --git a/pym/portage/dbapi/vartree.py b/pym/portage/dbapi/vartree.py
index b95d8c2e..dc8648b3 100644
--- a/pym/portage/dbapi/vartree.py
+++ b/pym/portage/dbapi/vartree.py
@@ -1878,14 +1878,14 @@ class dblink(object):
"""
import re
- _normalize_needed = re.compile(r'.*//.*|^[^/]|.+/$|(^|.*/)\.\.?(/.*|$)')
- _contents_split_counts = {
- "dev": 2,
- "dir": 2,
- "fif": 2,
- "obj": 4,
- "sym": 5
- }
+ _normalize_needed = re.compile(r'//|^[^/]|./$|(^|/)\.\.?(/|$)')
+
+ _contents_re = re.compile(r'^(' + \
+ r'(?P<dir>(dev|dir|fif) (.+))|' + \
+ r'(?P<obj>(obj) (.+) (\S+) (\d+))|' + \
+ r'(?P<sym>(sym) (.+) -> (.+) (\d+))' + \
+ r')$'
+ )
# When looping over files for merge/unmerge, temporarily yield to the
# scheduler each time this many files are processed.
@@ -2033,7 +2033,10 @@ class dblink(object):
myc.close()
null_byte = "\0"
normalize_needed = self._normalize_needed
- contents_split_counts = self._contents_split_counts
+ contents_re = self._contents_re
+ obj_index = contents_re.groupindex['obj']
+ dir_index = contents_re.groupindex['dir']
+ sym_index = contents_re.groupindex['sym']
myroot = self.myroot
if myroot == os.path.sep:
myroot = None
@@ -2045,63 +2048,40 @@ class dblink(object):
errors.append((pos + 1, _("Null byte found in CONTENTS entry")))
continue
line = line.rstrip("\n")
- # Split on " " so that even file paths that
- # end with spaces can be handled.
- mydat = line.split(" ")
- entry_type = mydat[0] # empty string if line is empty
- correct_split_count = contents_split_counts.get(entry_type)
- if correct_split_count and len(mydat) > correct_split_count:
- # Apparently file paths contain spaces, so reassemble
- # the split have the correct_split_count.
- newsplit = [entry_type]
- spaces_total = len(mydat) - correct_split_count
- if entry_type == "sym":
- try:
- splitter = mydat.index("->", 2, len(mydat) - 2)
- except ValueError:
- errors.append((pos + 1, _("Unrecognized CONTENTS entry")))
- continue
- spaces_in_path = splitter - 2
- spaces_in_target = spaces_total - spaces_in_path
- newsplit.append(" ".join(mydat[1:splitter]))
- newsplit.append("->")
- target_end = splitter + spaces_in_target + 2
- newsplit.append(" ".join(mydat[splitter + 1:target_end]))
- newsplit.extend(mydat[target_end:])
- else:
- path_end = spaces_total + 2
- newsplit.append(" ".join(mydat[1:path_end]))
- newsplit.extend(mydat[path_end:])
- mydat = newsplit
-
- # we do this so we can remove from non-root filesystems
- # (use the ROOT var to allow maintenance on other partitions)
- try:
- if normalize_needed.match(mydat[1]):
- mydat[1] = normalize_path(mydat[1])
- if not mydat[1].startswith(os.path.sep):
- mydat[1] = os.path.sep + mydat[1]
- if myroot:
- mydat[1] = os.path.join(myroot, mydat[1].lstrip(os.path.sep))
- if mydat[0] == "obj":
- #format: type, mtime, md5sum
- pkgfiles[mydat[1]] = [mydat[0], mydat[3], mydat[2]]
- elif mydat[0] == "dir":
- #format: type
- pkgfiles[mydat[1]] = [mydat[0]]
- elif mydat[0] == "sym":
- #format: type, mtime, dest
- pkgfiles[mydat[1]] = [mydat[0], mydat[4], mydat[3]]
- elif mydat[0] == "dev":
- #format: type
- pkgfiles[mydat[1]] = [mydat[0]]
- elif mydat[0]=="fif":
- #format: type
- pkgfiles[mydat[1]] = [mydat[0]]
- else:
- errors.append((pos + 1, _("Unrecognized CONTENTS entry")))
- except (KeyError, IndexError):
+ m = contents_re.match(line)
+ if m is None:
errors.append((pos + 1, _("Unrecognized CONTENTS entry")))
+ continue
+
+ if m.group(obj_index) is not None:
+ base = obj_index
+ #format: type, mtime, md5sum
+ data = (m.group(base+1), m.group(base+4), m.group(base+3))
+ elif m.group(dir_index) is not None:
+ base = dir_index
+ #format: type
+ data = (m.group(base+1),)
+ elif m.group(sym_index) is not None:
+ base = sym_index
+ #format: type, mtime, dest
+ data = (m.group(base+1), m.group(base+4), m.group(base+3))
+ else:
+ # This won't happen as long the regular expression
+ # is written to only match valid entries.
+ raise AssertionError(_("required group not found " + \
+ "in CONTENTS entry: '%s'") % line)
+
+ path = m.group(base+2)
+ if normalize_needed.search(path) is not None:
+ path = normalize_path(path)
+ if not path.startswith(os.path.sep):
+ path = os.path.sep + path
+
+ if myroot is not None:
+ path = os.path.join(myroot, path.lstrip(os.path.sep))
+
+ pkgfiles[path] = data
+
if errors:
writemsg(_("!!! Parse error in '%s'\n") % contents_file, noiselevel=-1)
for pos, e in errors:
diff --git a/pym/portage/dep/__init__.py b/pym/portage/dep/__init__.py
index bc77ec54..a048558a 100644
--- a/pym/portage/dep/__init__.py
+++ b/pym/portage/dep/__init__.py
@@ -519,9 +519,9 @@ class _use_dep(object):
tokens.extend(x for x in self.enabled if x not in other_use)
tokens.extend("-" + x for x in self.disabled if x in other_use)
if conditional:
- if not parent_use:
+ if parent_use is None:
raise InvalidAtom("violated_conditionals needs 'parent_use'" + \
- " parameter for conditional flags: '%s'" % (token,))
+ " parameter for conditional flags.")
tokens.extend(x + "?" for x in conditional.enabled if x in parent_use and not x in other_use)
tokens.extend("!" + x + "?" for x in conditional.disabled if x not in parent_use and x in other_use)
tokens.extend(x + "=" for x in conditional.equal if x in parent_use and x not in other_use)
@@ -791,6 +791,18 @@ class ExtendedAtomDict(portage.cache.mappings.MutableMapping):
self._normal = {}
self._value_class = value_class
+ def __iter__(self):
+ for k in self._normal:
+ yield k
+ for k in self._extended:
+ yield k
+
+ if sys.hexversion >= 0x3000000:
+ keys = __iter__
+
+ def __len__(self):
+ return len(self._normal) + len(self._extended)
+
def setdefault(self, cp, default=None):
if "*" in cp:
return self._extended.setdefault(cp, default)
diff --git a/pym/portage/dep/dep_check.py b/pym/portage/dep/dep_check.py
index f4a44611..8747bb14 100644
--- a/pym/portage/dep/dep_check.py
+++ b/pym/portage/dep/dep_check.py
@@ -28,6 +28,7 @@ def _expand_new_virtuals(mysplit, edebug, mydbapi, mysettings, myroot="/",
newsplit = []
mytrees = trees[myroot]
portdb = mytrees["porttree"].dbapi
+ pkg_use_enabled = mytrees.get("pkg_use_enabled")
atom_graph = mytrees.get("atom_graph")
parent = mytrees.get("parent")
virt_parent = mytrees.get("virt_parent")
@@ -100,7 +101,8 @@ def _expand_new_virtuals(mysplit, edebug, mydbapi, mysettings, myroot="/",
atom_graph.add(x, graph_parent)
continue
- if repoman or not hasattr(portdb, 'match_pkgs'):
+ if repoman or not hasattr(portdb, 'match_pkgs') or \
+ pkg_use_enabled is None:
if portdb.cp_list(x.cp):
newsplit.append(x)
else:
@@ -149,7 +151,7 @@ def _expand_new_virtuals(mysplit, edebug, mydbapi, mysettings, myroot="/",
# should enforce this.
depstring = pkg.metadata['RDEPEND']
pkg_kwargs = kwargs.copy()
- pkg_kwargs["myuse"] = pkg.use.enabled
+ pkg_kwargs["myuse"] = pkg_use_enabled(pkg)
if edebug:
writemsg_level(_("Virtual Parent: %s\n") \
% (pkg,), noiselevel=-1, level=logging.DEBUG)
diff --git a/pym/portage/package/ebuild/config.py b/pym/portage/package/ebuild/config.py
index 2ffad468..4f5f9413 100644
--- a/pym/portage/package/ebuild/config.py
+++ b/pym/portage/package/ebuild/config.py
@@ -843,12 +843,8 @@ class config(object):
else:
self.configdict["conf"]["ACCEPT_LICENSE"] = " ".join(v)
for k, v in licdict.items():
- cp = k.cp
- cp_dict = self._plicensedict.get(cp)
- if not cp_dict:
- cp_dict = {}
- self._plicensedict[cp] = cp_dict
- cp_dict[k] = self.expandLicenseTokens(v)
+ self._plicensedict.setdefault(k.cp, {})[k] = \
+ self.expandLicenseTokens(v)
#package.properties
propdict = grabdict_package(os.path.join(
@@ -860,12 +856,7 @@ class config(object):
else:
self.configdict["conf"]["ACCEPT_PROPERTIES"] = " ".join(v)
for k, v in propdict.items():
- cp = k.cp
- cp_dict = self._ppropertiesdict.get(cp)
- if not cp_dict:
- cp_dict = {}
- self._ppropertiesdict[cp] = cp_dict
- cp_dict[k] = v
+ self._ppropertiesdict.setdefault(k.cp, {})[k] = v
#getting categories from an external file now
categories = [grabfile(os.path.join(x, "categories")) for x in locations]
@@ -1425,22 +1416,24 @@ class config(object):
has_changed = True
defaults = []
- pos = 0
for i, pkgprofileuse_dict in enumerate(self.pkgprofileuse):
+ if self.make_defaults_use[i]:
+ defaults.append(self.make_defaults_use[i])
cpdict = pkgprofileuse_dict.get(cp)
if cpdict:
+ pkg_defaults = []
keys = list(cpdict)
while keys:
bestmatch = best_match_to_list(cpv_slot, keys)
if bestmatch:
keys.remove(bestmatch)
- defaults.insert(pos, cpdict[bestmatch])
+ pkg_defaults.append(cpdict[bestmatch])
else:
break
- del keys
- if self.make_defaults_use[i]:
- defaults.insert(pos, self.make_defaults_use[i])
- pos = len(defaults)
+ if pkg_defaults:
+ # reverse, so the most specific atoms come last
+ pkg_defaults.reverse()
+ defaults.extend(pkg_defaults)
defaults = " ".join(defaults)
if defaults != self.configdict["defaults"].get("USE",""):
self.configdict["defaults"]["USE"] = defaults
@@ -1614,22 +1607,24 @@ class config(object):
if cp is None:
cp = cpv_getkey(remove_slot(pkg))
usemask = []
- pos = 0
for i, pusemask_dict in enumerate(self.pusemask_list):
+ if self.usemask_list[i]:
+ usemask.append(self.usemask_list[i])
cpdict = pusemask_dict.get(cp)
if cpdict:
+ pkg_usemask = []
keys = list(cpdict)
while keys:
best_match = best_match_to_list(pkg, keys)
if best_match:
keys.remove(best_match)
- usemask.insert(pos, cpdict[best_match])
+ pkg_usemask.append(cpdict[best_match])
else:
break
- del keys
- if self.usemask_list[i]:
- usemask.insert(pos, self.usemask_list[i])
- pos = len(usemask)
+ if pkg_usemask:
+ # reverse, so the most specific atoms come last
+ pkg_usemask.reverse()
+ usemask.extend(pkg_usemask)
return set(stack_lists(usemask, incremental=True))
def _getUseForce(self, pkg):
@@ -1637,22 +1632,24 @@ class config(object):
if cp is None:
cp = cpv_getkey(remove_slot(pkg))
useforce = []
- pos = 0
for i, puseforce_dict in enumerate(self.puseforce_list):
+ if self.useforce_list[i]:
+ useforce.append(self.useforce_list[i])
cpdict = puseforce_dict.get(cp)
if cpdict:
+ pkg_useforce = []
keys = list(cpdict)
while keys:
best_match = best_match_to_list(pkg, keys)
if best_match:
keys.remove(best_match)
- useforce.insert(pos, cpdict[best_match])
+ pkg_useforce.append(cpdict[best_match])
else:
break
- del keys
- if self.useforce_list[i]:
- useforce.insert(pos, self.useforce_list[i])
- pos = len(useforce)
+ if pkg_useforce:
+ # reverse, so the most specific atoms come last
+ pkg_useforce.reverse()
+ useforce.extend(pkg_useforce)
return set(stack_lists(useforce, incremental=True))
def _getMaskAtom(self, cpv, metadata):
@@ -1721,19 +1718,22 @@ class config(object):
if 'repository' in metadata:
pkg = "".join((pkg, _repo_separator, metadata['repository']))
keywords = [[x for x in metadata["KEYWORDS"].split() if x != "-*"]]
- pos = len(keywords)
for pkeywords_dict in self._pkeywords_list:
cpdict = pkeywords_dict.get(cp)
if cpdict:
+ pkg_keywords = []
keys = list(cpdict)
while keys:
best_match = best_match_to_list(pkg, keys)
if best_match:
keys.remove(best_match)
- keywords.insert(pos, cpdict[best_match])
+ pkg_keywords.append(cpdict[best_match])
else:
break
- pos = len(keywords)
+ if pkg_keywords:
+ # reverse, so the most specific atoms come last
+ pkg_keywords.reverse()
+ keywords.extend(pkg_keywords)
return stack_lists(keywords, incremental=True)
def _getMissingKeywords(self, cpv, metadata):
@@ -1765,14 +1765,24 @@ class config(object):
pkgdict = self.pkeywordsdict.get(cp)
matches = False
if pkgdict:
- pkg = "".join((cpv, _slot_separator, metadata["SLOT"]))
+ cpv_slot = "%s:%s" % (cpv, metadata["SLOT"])
if 'repository' in metadata:
- pkg = "".join((pkg, _repo_separator, metadata['repository']))
- cpv_slot_list = [pkg]
- for atom, pkgkeywords in pkgdict.items():
- if match_from_list(atom, cpv_slot_list):
- matches = True
- pgroups.extend(pkgkeywords)
+ cpv_slot = "".join((cpv_slot, _repo_separator, metadata['repository']))
+ pkg_accept_keywords = []
+ keys = list(pkgdict)
+ while keys:
+ best_match = best_match_to_list(cpv_slot, keys)
+ if best_match:
+ keys.remove(best_match)
+ pkg_accept_keywords.append(pkgdict[best_match])
+ else:
+ break
+ if pkg_accept_keywords:
+ # reverse, so the most specific atoms come last
+ pkg_accept_keywords.reverse()
+ for x in pkg_accept_keywords:
+ pgroups.extend(x)
+ matches = True
if matches or egroups:
pgroups.extend(egroups)
inc_pgroups = set()
@@ -1834,12 +1844,24 @@ class config(object):
cp = cpv_getkey(cpv)
cpdict = self._plicensedict.get(cp)
if cpdict:
- accept_license = list(self._accept_license)
- cpv_slot = "".join((cpv, _slot_separator, metadata["SLOT"]))
+ cpv_slot = "%s:%s" % (cpv, metadata["SLOT"])
if 'repository' in metadata:
cpv_slot = "".join((cpv_slot, _repo_separator, metadata['repository']))
- for atom in match_to_list(cpv_slot, list(cpdict)):
- accept_license.extend(cpdict[atom])
+ keys = list(cpdict)
+ plicence_list = []
+ while keys:
+ bestmatch = best_match_to_list(cpv_slot, keys)
+ if bestmatch:
+ keys.remove(bestmatch)
+ plicence_list.append(cpdict[bestmatch])
+ else:
+ break
+ if plicence_list:
+ # reverse, so the most specific atoms come last
+ plicence_list.reverse()
+ accept_license = list(self._accept_license)
+ for x in plicence_list:
+ accept_license.extend(x)
licenses = set(flatten(use_reduce(paren_reduce(
metadata["LICENSE"]), matchall=1)))
@@ -1916,12 +1938,24 @@ class config(object):
cp = cpv_getkey(cpv)
cpdict = self._ppropertiesdict.get(cp)
if cpdict:
- accept_properties = list(self._accept_properties)
- cpv_slot = "".join((cpv, _slot_separator, metadata["SLOT"]))
+ cpv_slot = "%s:%s" % (cpv, metadata["SLOT"])
if 'repository' in metadata:
cpv_slot = "".join((cpv_slot, _repo_separator, metadata['repository']))
- for atom in match_to_list(cpv_slot, list(cpdict)):
- accept_properties.extend(cpdict[atom])
+ keys = list(cpdict)
+ pproperties_list = []
+ while keys:
+ bestmatch = best_match_to_list(cpv_slot, keys)
+ if bestmatch:
+ keys.remove(bestmatch)
+ pproperties_list.append(cpdict[bestmatch])
+ else:
+ break
+ if pproperties_list:
+ # reverse, so the most specific atoms come last
+ pproperties_list.reverse()
+ accept_properties = list(self._accept_properties)
+ for x in pproperties_list:
+ accept_properties.extend(x)
properties = set(flatten(use_reduce(paren_reduce(
metadata["PROPERTIES"]), matchall=1)))
@@ -2228,6 +2262,9 @@ class config(object):
# For optimal performance, use slice
# comparison instead of startswith().
+ iuse = self.configdict["pkg"].get("IUSE")
+ if iuse is not None:
+ iuse = [x.lstrip("+-") for x in iuse.split()]
myflags = set()
for curdb in self.uvlist:
cur_use_expand = [x for x in use_expand if x in curdb]
@@ -2247,10 +2284,33 @@ class config(object):
continue
if x[0] == "-":
+ if x[-2:] == '_*':
+ prefix = x[1:-1]
+ prefix_len = len(prefix)
+ myflags.difference_update(
+ [y for y in myflags if \
+ y[:prefix_len] == prefix])
myflags.discard(x[1:])
continue
- myflags.add(x)
+ if iuse is not None and x[-2:] == '_*':
+ # Expand wildcards here, so that cases like
+ # USE="linguas_* -linguas_en_US" work correctly.
+ prefix = x[:-1]
+ prefix_len = len(prefix)
+ has_iuse = False
+ for y in iuse:
+ if y[:prefix_len] == prefix:
+ has_iuse = True
+ myflags.add(y)
+ if not has_iuse:
+ # There are no matching IUSE, so allow the
+ # wildcard to pass through. This allows
+ # linguas_* to trigger unset LINGUAS in
+ # cases when no linguas_ flags are in IUSE.
+ myflags.add(x)
+ else:
+ myflags.add(x)
for var in cur_use_expand:
var_lower = var.lower()
diff --git a/pym/portage/package/ebuild/doebuild.py b/pym/portage/package/ebuild/doebuild.py
index 78d1360c..afffd45d 100644
--- a/pym/portage/package/ebuild/doebuild.py
+++ b/pym/portage/package/ebuild/doebuild.py
@@ -1268,7 +1268,7 @@ def spawn(mystring, mysettings, debug=0, free=0, droppriv=0, sesandbox=0, fakero
buf = array.array('B')
try:
buf.fromfile(f, buffsize)
- except EOFError:
+ except (EOFError, IOError):
pass
if not buf:
eof = True
diff --git a/pym/portage/package/ebuild/getmaskingstatus.py b/pym/portage/package/ebuild/getmaskingstatus.py
index 8fbbff02..5e5a13f1 100644
--- a/pym/portage/package/ebuild/getmaskingstatus.py
+++ b/pym/portage/package/ebuild/getmaskingstatus.py
@@ -15,12 +15,26 @@ from portage.versions import catpkgsplit, cpv_getkey
if sys.hexversion >= 0x3000000:
basestring = str
+class _MaskReason(object):
+
+ __slots__ = ('category', 'message', 'hint')
+
+ def __init__(self, category, message, hint=None):
+ self.category = category
+ self.message = message
+ self.hint = hint
+
def getmaskingstatus(mycpv, settings=None, portdb=None, myrepo=None):
if settings is None:
settings = config(clone=portage.settings)
if portdb is None:
portdb = portage.portdb
+ return [mreason.message for \
+ mreason in _getmaskingstatus(mycpv, settings, portdb,myrepo)]
+
+def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
+
metadata = None
installed = False
if not isinstance(mycpv, basestring):
@@ -40,7 +54,7 @@ def getmaskingstatus(mycpv, settings=None, portdb=None, myrepo=None):
except KeyError:
if not portdb.cpv_exists(mycpv):
raise
- return ["corruption"]
+ return [_MaskReason("corruption", "corruption")]
if "?" in metadata["LICENSE"]:
settings.setcpv(mycpv, mydb=metadata)
metadata["USE"] = settings["PORTAGE_USE"]
@@ -51,11 +65,11 @@ def getmaskingstatus(mycpv, settings=None, portdb=None, myrepo=None):
# profile checking
if settings._getProfileMaskAtom(mycpv, metadata):
- rValue.append("profile")
+ rValue.append(_MaskReason("profile", "profile"))
# package.mask checking
if settings._getMaskAtom(mycpv, metadata):
- rValue.append("package.mask")
+ rValue.append(_MaskReason("package.mask", "package.mask"))
# keywords checking
eapi = metadata["EAPI"]
@@ -65,9 +79,9 @@ def getmaskingstatus(mycpv, settings=None, portdb=None, myrepo=None):
if eapi.startswith("-"):
eapi = eapi[1:]
if not eapi_is_supported(eapi):
- return ["EAPI %s" % eapi]
+ return [_MaskReason("EAPI", "EAPI %s" % eapi)]
elif _eapi_is_deprecated(eapi) and not installed:
- return ["EAPI %s" % eapi]
+ return [_MaskReason("EAPI", "EAPI %s" % eapi)]
egroups = settings.configdict["backupenv"].get(
"ACCEPT_KEYWORDS", "").split()
pgroups = settings["ACCEPT_KEYWORDS"].split()
@@ -104,6 +118,7 @@ def getmaskingstatus(mycpv, settings=None, portdb=None, myrepo=None):
del inc_pgroups
kmask = "missing"
+ kmask_hint = None
if '**' in pgroups:
kmask = None
@@ -123,6 +138,7 @@ def getmaskingstatus(mycpv, settings=None, portdb=None, myrepo=None):
break
elif gp=="~"+myarch and myarch in pgroups:
kmask="~"+myarch
+ kmask_hint = "unstable keyword"
break
try:
@@ -135,9 +151,9 @@ def getmaskingstatus(mycpv, settings=None, portdb=None, myrepo=None):
if x in allowed_tokens]
msg = license_split[:]
msg.append("license(s)")
- rValue.append(" ".join(msg))
+ rValue.append(_MaskReason("LICENSE", " ".join(msg)))
except portage.exception.InvalidDependString as e:
- rValue.append("LICENSE: "+str(e))
+ rValue.append(_MaskReason("invalid", "LICENSE: "+str(e)))
try:
missing_properties = settings._getMissingProperties(mycpv, metadata)
@@ -149,13 +165,14 @@ def getmaskingstatus(mycpv, settings=None, portdb=None, myrepo=None):
if x in allowed_tokens]
msg = properties_split[:]
msg.append("properties")
- rValue.append(" ".join(msg))
+ rValue.append(_MaskReason("PROPERTIES", " ".join(msg)))
except portage.exception.InvalidDependString as e:
- rValue.append("PROPERTIES: "+str(e))
+ rValue.append(_MaskReason("invalid", "PROPERTIES: "+str(e)))
# Only show KEYWORDS masks for installed packages
# if they're not masked for any other reason.
if kmask and (not installed or not rValue):
- rValue.append(kmask+" keyword")
+ rValue.append(_MaskReason("KEYWORDS",
+ kmask + " keyword", hint=kmask_hint))
return rValue
diff --git a/pym/portage/tests/__init__.py b/pym/portage/tests/__init__.py
index 393ecf78..bd41f1ee 100644
--- a/pym/portage/tests/__init__.py
+++ b/pym/portage/tests/__init__.py
@@ -26,6 +26,10 @@ def main():
basedir = os.path.dirname(os.path.realpath(__file__))
testDirs = []
+ if len(sys.argv) > 1:
+ suite.addTests(getTestFromCommandLine(sys.argv[1:], basedir))
+ return TextTestRunner(verbosity=2).run(suite)
+
# the os.walk help mentions relative paths as being quirky
# I was tired of adding dirs to the list, so now we add __test__
# to each dir we want tested.
@@ -52,6 +56,29 @@ def my_import(name):
mod = getattr(mod, comp)
return mod
+def getTestFromCommandLine(args, base_path):
+ ret = []
+ for arg in args:
+ realpath = os.path.realpath(arg)
+ path = os.path.dirname(realpath)
+ f = realpath[len(path)+1:]
+
+ if not f.startswith("test") or not f.endswith(".py"):
+ raise Exception("Invalid argument: '%s'" % arg)
+
+ mymodule = f[:-3]
+
+ parent_path = path[len(base_path)+1:]
+ parent_module = ".".join(("portage", "tests", parent_path))
+ parent_module = parent_module.replace('/', '.')
+ result = []
+
+ # Make the trailing / a . for module importing
+ modname = ".".join((parent_module, mymodule))
+ mod = my_import(modname)
+ ret.append(unittest.TestLoader().loadTestsFromModule(mod))
+ return ret
+
def getTests(path, base_path):
"""
diff --git a/pym/portage/tests/dep/testAtom.py b/pym/portage/tests/dep/testAtom.py
index 9345711f..14a4e0f6 100644
--- a/pym/portage/tests/dep/testAtom.py
+++ b/pym/portage/tests/dep/testAtom.py
@@ -96,3 +96,53 @@ class TestAtom(TestCase):
for atom, allow_wildcard in tests_xfail:
self.assertRaisesMsg(atom, portage.exception.InvalidAtom, Atom, atom)
+
+ def test_violated_conditionals(self):
+ test_cases = (
+ ("dev-libs/A", ["foo"], None, "dev-libs/A"),
+ ("dev-libs/A[foo]", [], None, "dev-libs/A[foo]"),
+ ("dev-libs/A[foo]", ["foo"], None, "dev-libs/A"),
+ ("dev-libs/A[foo]", [], [], "dev-libs/A[foo]"),
+ ("dev-libs/A[foo]", ["foo"], [], "dev-libs/A"),
+
+ ("dev-libs/A:0[foo]", ["foo"], [], "dev-libs/A:0"),
+
+ ("dev-libs/A[foo,-bar]", [], None, "dev-libs/A[foo]"),
+ ("dev-libs/A[-foo,bar]", [], None, "dev-libs/A[bar]"),
+
+ ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", [], [], "dev-libs/A[a,!c=]"),
+
+ ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["a"], [], "dev-libs/A[!c=]"),
+ ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["b"], [], "dev-libs/A[a,b=,!c=]"),
+ ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["c"], [], "dev-libs/A[a]"),
+ ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["d"], [], "dev-libs/A[a,!c=]"),
+ ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["e"], [], "dev-libs/A[a,!e?,!c=]"),
+ ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["f"], [], "dev-libs/A[a,-f,!c=]"),
+
+ ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["a"], ["a"], "dev-libs/A[!c=]"),
+ ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["b"], ["b"], "dev-libs/A[a,!c=]"),
+ ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["c"], ["c"], "dev-libs/A[a,!c=]"),
+ ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["d"], ["d"], "dev-libs/A[a,!c=]"),
+ ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["e"], ["e"], "dev-libs/A[a,!c=]"),
+ ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["f"], ["f"], "dev-libs/A[a,-f,!c=]"),
+ )
+
+ test_cases_xfail = (
+ ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", [], None),
+ )
+
+ for atom, other_use, parent_use, expected_violated_atom in test_cases:
+ a = Atom(atom)
+ violated_atom = a.violated_conditionals(other_use, parent_use)
+ if parent_use is None:
+ fail_msg = "Atom: %s, other_use: %s, parent_use: %s, got: %s, expected: %s" % \
+ (atom, " ".join(other_use), "None", str(violated_atom), expected_violated_atom)
+ else:
+ fail_msg = "Atom: %s, other_use: %s, parent_use: %s, got: %s, expected: %s" % \
+ (atom, " ".join(other_use), " ".join(parent_use), str(violated_atom), expected_violated_atom)
+ self.assertEqual(str(violated_atom), expected_violated_atom, fail_msg)
+
+ for atom, other_use, parent_use in test_cases_xfail:
+ a = Atom(atom)
+ self.assertRaisesMsg(atom, portage.exception.InvalidAtom, \
+ a.violated_conditionals, other_use, parent_use)
diff --git a/pym/portage/tests/ebuild/test_array_fromfile_eof.py b/pym/portage/tests/ebuild/test_array_fromfile_eof.py
index 3f2a6c7c..d8277f27 100644
--- a/pym/portage/tests/ebuild/test_array_fromfile_eof.py
+++ b/pym/portage/tests/ebuild/test_array_fromfile_eof.py
@@ -28,7 +28,7 @@ class ArrayFromfileEofTestCase(TestCase):
a = array.array('B')
try:
a.fromfile(f, len(input_bytes) + 1)
- except EOFError:
+ except (EOFError, IOError):
# python-3.0 lost data here
eof = True
diff --git a/pym/portage/tests/ebuild/test_spawn.py b/pym/portage/tests/ebuild/test_spawn.py
index f37a0c5a..bb9fb262 100644
--- a/pym/portage/tests/ebuild/test_spawn.py
+++ b/pym/portage/tests/ebuild/test_spawn.py
@@ -4,16 +4,19 @@
import codecs
import errno
import sys
+from tempfile import mkstemp
from portage import os
from portage import _encodings
from portage import _unicode_encode
+from portage import spawn
from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import ResolverPlayground
class SpawnTestCase(TestCase):
def testLogfile(self):
- from portage import settings, spawn
- from tempfile import mkstemp
+ playground = ResolverPlayground()
+ settings = playground.settings
logfile = None
try:
fd, logfile = mkstemp()
@@ -42,6 +45,7 @@ class SpawnTestCase(TestCase):
# may occur.
self.assertEqual(test_string, log_content)
finally:
+ playground.cleanup()
if logfile:
try:
os.unlink(logfile)
diff --git a/pym/portage/tests/lazyimport/__init__.py b/pym/portage/tests/lazyimport/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/pym/portage/tests/lazyimport/__init__.py
diff --git a/pym/portage/tests/lazyimport/__test__ b/pym/portage/tests/lazyimport/__test__
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/pym/portage/tests/lazyimport/__test__
diff --git a/pym/portage/tests/lazyimport/test_preload_portage_submodules.py b/pym/portage/tests/lazyimport/test_preload_portage_submodules.py
new file mode 100644
index 00000000..9d20ebac
--- /dev/null
+++ b/pym/portage/tests/lazyimport/test_preload_portage_submodules.py
@@ -0,0 +1,16 @@
+# Copyright 2010 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+import portage
+from portage.tests import TestCase
+
+class PreloadPortageSubmodulesTestCase(TestCase):
+
+ def testPreloadPortageSubmodules(self):
+ """
+ Verify that _preload_portage_submodules() doesn't leave any
+ remaining proxies that refer to the portage.* namespace.
+ """
+ portage.proxy.lazyimport._preload_portage_submodules()
+ for name in portage.proxy.lazyimport._module_proxies:
+ self.assertEqual(name.startswith('portage.'), False)
diff --git a/pym/portage/tests/resolver/ResolverPlayground.py b/pym/portage/tests/resolver/ResolverPlayground.py
new file mode 100644
index 00000000..b4ff4180
--- /dev/null
+++ b/pym/portage/tests/resolver/ResolverPlayground.py
@@ -0,0 +1,227 @@
+# Copyright 2010 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from itertools import chain
+import shutil
+import tempfile
+import portage
+from portage import os
+from portage.dbapi.vartree import vartree
+from portage.dbapi.porttree import portagetree
+from portage.dbapi.bintree import binarytree
+from portage.dep import Atom
+from portage.package.ebuild.config import config
+from portage.sets import load_default_config
+from portage.versions import catsplit
+
+from _emerge.Blocker import Blocker
+from _emerge.create_depgraph_params import create_depgraph_params
+from _emerge.depgraph import backtrack_depgraph
+from _emerge.RootConfig import RootConfig
+
+class ResolverPlayground(object):
+ """
+ This class help to create the necessary files on disk and
+ the needed settings instances, etc. for the resolver to do
+ it's work.
+ """
+
+ def __init__(self, ebuilds={}, installed={}, profile={}):
+ """
+ ebuilds: cpv -> metadata mapping simulating avaiable ebuilds.
+ installed: cpv -> metadata mapping simulating installed packages.
+ If a metadata key is missing, it gets a default value.
+ profile: settings defined by the profile.
+ """
+ self.root = tempfile.mkdtemp() + os.path.sep
+ self.portdir = os.path.join(self.root, "usr/portage")
+ self.vdbdir = os.path.join(self.root, "var/db/pkg")
+ os.makedirs(self.portdir)
+ os.makedirs(self.vdbdir)
+
+ self._create_ebuilds(ebuilds)
+ self._create_installed(installed)
+ self._create_profile(ebuilds, installed, profile)
+
+ self.settings, self.trees = self._load_config()
+
+ self._create_ebuild_manifests(ebuilds)
+
+ def _create_ebuilds(self, ebuilds):
+ for cpv in ebuilds:
+ a = Atom("=" + cpv)
+ ebuild_dir = os.path.join(self.portdir, a.cp)
+ ebuild_path = os.path.join(ebuild_dir, a.cpv.split("/")[1] + ".ebuild")
+ try:
+ os.makedirs(ebuild_dir)
+ except os.error:
+ pass
+
+ metadata = ebuilds[cpv]
+ eapi = metadata.get("EAPI", 0)
+ slot = metadata.get("SLOT", 0)
+ keywords = metadata.get("KEYWORDS", "x86")
+ iuse = metadata.get("IUSE", "")
+ depend = metadata.get("DEPEND", "")
+ rdepend = metadata.get("RDEPEND", None)
+ pdepend = metadata.get("PDEPEND", None)
+
+ f = open(ebuild_path, "w")
+ f.write('EAPI="' + str(eapi) + '"\n')
+ f.write('SLOT="' + str(slot) + '"\n')
+ f.write('KEYWORDS="' + str(keywords) + '"\n')
+ f.write('IUSE="' + str(iuse) + '"\n')
+ f.write('DEPEND="' + str(depend) + '"\n')
+ if rdepend is not None:
+ f.write('RDEPEND="' + str(rdepend) + '"\n')
+ if rdepend is not None:
+ f.write('PDEPEND="' + str(pdepend) + '"\n')
+ f.close()
+
+ def _create_ebuild_manifests(self, ebuilds):
+ for cpv in ebuilds:
+ a = Atom("=" + cpv)
+ ebuild_dir = os.path.join(self.portdir, a.cp)
+ ebuild_path = os.path.join(ebuild_dir, a.cpv.split("/")[1] + ".ebuild")
+
+ portage.util.noiselimit = -1
+ tmpsettings = config(clone=self.settings)
+ portdb = self.trees[self.root]["porttree"].dbapi
+ portage.doebuild(ebuild_path, "digest", self.root, tmpsettings,
+ tree="porttree", mydbapi=portdb)
+ portage.util.noiselimit = 0
+
+ def _create_installed(self, installed):
+ for cpv in installed:
+ a = Atom("=" + cpv)
+ vdb_pkg_dir = os.path.join(self.vdbdir, a.cpv)
+ try:
+ os.makedirs(vdb_pkg_dir)
+ except os.error:
+ pass
+
+ metadata = installed[cpv]
+ eapi = metadata.get("EAPI", 0)
+ slot = metadata.get("SLOT", 0)
+ keywords = metadata.get("KEYWORDS", "~x86")
+ iuse = metadata.get("IUSE", "")
+ use = metadata.get("USE", "")
+ depend = metadata.get("DEPEND", "")
+ rdepend = metadata.get("RDEPEND", None)
+ pdepend = metadata.get("PDEPEND", None)
+
+ def write_key(key, value):
+ f = open(os.path.join(vdb_pkg_dir, key), "w")
+ f.write(str(value) + "\n")
+ f.close()
+
+ write_key("EAPI", eapi)
+ write_key("SLOT", slot)
+ write_key("KEYWORDS", keywords)
+ write_key("IUSE", iuse)
+ write_key("USE", use)
+ write_key("DEPEND", depend)
+ if rdepend is not None:
+ write_key("RDEPEND", rdepend)
+ if rdepend is not None:
+ write_key("PDEPEND", pdepend)
+
+ def _create_profile(self, ebuilds, installed, profile):
+ #Create $PORTDIR/profiles/categories
+ categories = set()
+ for cpv in chain(ebuilds.keys(), installed.keys()):
+ categories.add(catsplit(cpv)[0])
+
+ profile_dir = os.path.join(self.portdir, "profiles")
+ try:
+ os.makedirs(profile_dir)
+ except os.error:
+ pass
+
+ categories_file = os.path.join(profile_dir, "categories")
+
+ f = open(categories_file, "w")
+ for cat in categories:
+ f.write(cat + "\n")
+ f.close()
+
+ #Create $PORTDIR/eclass (we fail to digest the ebuilds if it's not there)
+ os.makedirs(os.path.join(self.portdir, "eclass"))
+
+ if profile:
+ #This is meant to allow the consumer to set up his own profile,
+ #with package.mask and what not.
+ raise NotImplentedError()
+
+ def _load_config(self):
+ env = { "PORTDIR": self.portdir, "ROOT": self.root, "ACCEPT_KEYWORDS": "x86"}
+ settings = config(config_root=self.root, target_root=self.root, local_config=False, env=env)
+ settings.lock()
+
+ trees = {
+ self.root: {
+ "virtuals": settings.getvirtuals(),
+ "vartree": vartree(self.root, categories=settings.categories, settings=settings),
+ "porttree": portagetree(self.root, settings=settings),
+ "bintree": binarytree(self.root, os.path.join(self.root, "usr/portage/packages"), settings=settings)
+ }
+ }
+
+ for root, root_trees in trees.items():
+ settings = root_trees["vartree"].settings
+ settings._init_dirs()
+ setconfig = load_default_config(settings, root_trees)
+ root_trees["root_config"] = RootConfig(settings, root_trees, setconfig)
+
+ return settings, trees
+
+ def run(self, myfiles, myopts={}, myaction=None):
+ myopts["--pretend"] = True
+ myopts["--quiet"] = True
+ myopts["--root"] = self.root
+ myopts["--config-root"] = self.root
+ myopts["--root-deps"] = "rdeps"
+ # Add a fake _test_ option that can be used for
+ # conditional test code.
+ myopts["_test_"] = True
+
+ portage.util.noiselimit = -2
+ myparams = create_depgraph_params(myopts, myaction)
+ success, mydepgraph, favorites = backtrack_depgraph(
+ self.settings, self.trees, myopts, myparams, myaction, myfiles, None)
+ result = ResolverPlaygroundResult(success, mydepgraph, favorites)
+ portage.util.noiselimit = 0
+
+ return result
+
+ def cleanup(self):
+ shutil.rmtree(self.root)
+
+class ResolverPlaygroundResult(object):
+ def __init__(self, success, mydepgraph, favorites):
+ self.success = success
+ self.depgraph = mydepgraph
+ self.favorites = favorites
+ self.mergelist = None
+ self.use_changes = None
+ self.unstable_keywords = None
+
+ if self.depgraph._dynamic_config._serialized_tasks_cache is not None:
+ self.mergelist = []
+ for x in self.depgraph._dynamic_config._serialized_tasks_cache:
+ if isinstance(x, Blocker):
+ self.mergelist.append(x.atom)
+ else:
+ self.mergelist.append(x.cpv)
+
+ if self.depgraph._dynamic_config._needed_use_config_changes:
+ self.use_changes = {}
+ for pkg, needed_use_config_changes in \
+ self.depgraph._dynamic_config._needed_use_config_changes.items():
+ new_use, changes = needed_use_config_changes
+ self.use_changes[pkg.cpv] = changes
+
+ if self.depgraph._dynamic_config._needed_unstable_keywords:
+ self.unstable_keywords = set()
+ for pkg in self.depgraph._dynamic_config._needed_unstable_keywords:
+ self.unstable_keywords.add(pkg.cpv)
diff --git a/pym/portage/tests/resolver/__init__.py b/pym/portage/tests/resolver/__init__.py
new file mode 100644
index 00000000..21a391ae
--- /dev/null
+++ b/pym/portage/tests/resolver/__init__.py
@@ -0,0 +1,2 @@
+# Copyright 2010 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
diff --git a/pym/portage/tests/resolver/__test__ b/pym/portage/tests/resolver/__test__
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/pym/portage/tests/resolver/__test__
diff --git a/pym/portage/tests/resolver/test_autounmask.py b/pym/portage/tests/resolver/test_autounmask.py
new file mode 100644
index 00000000..d528f907
--- /dev/null
+++ b/pym/portage/tests/resolver/test_autounmask.py
@@ -0,0 +1,116 @@
+# Copyright 2010 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import ResolverPlayground
+
+class AutounmaskTestCase(TestCase):
+
+ def testAutounmask(self):
+ ebuilds = {
+ #ebuilds to test use changes
+ "dev-libs/A-1": { "SLOT": 1, "DEPEND": "dev-libs/B[foo]", "EAPI": 2},
+ "dev-libs/A-2": { "SLOT": 2, "DEPEND": "dev-libs/B[bar]", "EAPI": 2},
+ "dev-libs/B-1": { "DEPEND": "foo? ( dev-libs/C ) bar? ( dev-libs/D )", "IUSE": "foo bar"},
+ "dev-libs/C-1": {},
+ "dev-libs/D-1": {},
+
+ #ebuilds to test keyword changes
+ "app-misc/Z-1": { "KEYWORDS": "~x86", "DEPEND": "app-misc/Y" },
+ "app-misc/Y-1": { "KEYWORDS": "~x86" },
+ "app-misc/W-1": {},
+ "app-misc/W-2": { "KEYWORDS": "~x86" },
+ "app-misc/V-1": { "KEYWORDS": "~x86", "DEPEND": ">=app-misc/W-2"},
+
+ #ebuilds for mixed test for || dep handling
+ "sci-libs/K-1": { "DEPEND": " || ( sci-libs/L[bar] || ( sci-libs/M sci-libs/P ) )", "EAPI": 2},
+ "sci-libs/K-2": { "DEPEND": " || ( sci-libs/L[bar] || ( sci-libs/P sci-libs/M ) )", "EAPI": 2},
+ "sci-libs/K-3": { "DEPEND": " || ( sci-libs/M || ( sci-libs/L[bar] sci-libs/P ) )", "EAPI": 2},
+ "sci-libs/K-4": { "DEPEND": " || ( sci-libs/M || ( sci-libs/P sci-libs/L[bar] ) )", "EAPI": 2},
+ "sci-libs/K-5": { "DEPEND": " || ( sci-libs/P || ( sci-libs/L[bar] sci-libs/M ) )", "EAPI": 2},
+ "sci-libs/K-6": { "DEPEND": " || ( sci-libs/P || ( sci-libs/M sci-libs/L[bar] ) )", "EAPI": 2},
+ "sci-libs/K-7": { "DEPEND": " || ( sci-libs/M sci-libs/L[bar] )", "EAPI": 2},
+ "sci-libs/K-8": { "DEPEND": " || ( sci-libs/L[bar] sci-libs/M )", "EAPI": 2},
+
+ "sci-libs/L-1": { "IUSE": "bar" },
+ "sci-libs/M-1": { "KEYWORDS": "~x86" },
+ "sci-libs/P-1": { },
+ }
+
+ requests = (
+ #Test USE changes.
+ #The simple case.
+
+ (["dev-libs/A:1"], {"--autounmask": "n"}, None, False, None, None, None),
+ (["dev-libs/A:1"], {"--autounmask": True}, None, False, \
+ ["dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1"], { "dev-libs/B-1": {"foo": True} }, None),
+
+ #Make sure we restart if needed.
+ (["dev-libs/B", "dev-libs/A:1"], {"--autounmask": True}, None, False, \
+ ["dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1"], { "dev-libs/B-1": {"foo": True} }, None),
+ (["dev-libs/A:1", "dev-libs/B"], {"--autounmask": True}, None, False, \
+ ["dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1"], { "dev-libs/B-1": {"foo": True} }, None),
+ (["dev-libs/A:1", "dev-libs/A:2"], {"--autounmask": True}, None, False, \
+ ["dev-libs/D-1", "dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1", "dev-libs/A-2"], \
+ { "dev-libs/B-1": {"foo": True, "bar": True} }, None),
+ (["dev-libs/B", "dev-libs/A:1", "dev-libs/A:2"], {"--autounmask": True}, None, False, \
+ ["dev-libs/D-1", "dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1", "dev-libs/A-2"], \
+ { "dev-libs/B-1": {"foo": True, "bar": True} }, None),
+ (["dev-libs/A:1", "dev-libs/B", "dev-libs/A:2"], {"--autounmask": True}, None, False, \
+ ["dev-libs/D-1", "dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1", "dev-libs/A-2"], \
+ { "dev-libs/B-1": {"foo": True, "bar": True} }, None),
+ (["dev-libs/A:1", "dev-libs/A:2", "dev-libs/B"], {"--autounmask": True}, None, False, \
+ ["dev-libs/D-1", "dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1", "dev-libs/A-2"], \
+ { "dev-libs/B-1": {"foo": True, "bar": True} }, None),
+
+ #Test keywording.
+ #The simple case.
+
+ (["app-misc/Z"], {"--autounmask": "n"}, None, False, None, None, None),
+ (["app-misc/Z"], {"--autounmask": True}, None, False, \
+ ["app-misc/Y-1", "app-misc/Z-1"], None, ["app-misc/Y-1", "app-misc/Z-1"]),
+
+ #Make sure that the backtracking for slot conflicts handles our mess.
+
+ (["=app-misc/V-1", "app-misc/W"], {"--autounmask": True}, None, False, \
+ ["app-misc/W-2", "app-misc/V-1"], None, ["app-misc/W-2", "app-misc/V-1"]),
+ (["app-misc/W", "=app-misc/V-1"], {"--autounmask": True}, None, False, \
+ ["app-misc/W-2", "app-misc/V-1"], None, ["app-misc/W-2", "app-misc/V-1"]),
+
+ #Mixed testing
+ #Make sure we don't change use for something in a || dep if there is another choice
+ #that needs no change.
+
+ (["=sci-libs/K-1"], {"--autounmask": True}, None, True, \
+ ["sci-libs/P-1", "sci-libs/K-1"], None, None),
+ (["=sci-libs/K-2"], {"--autounmask": True}, None, True, \
+ ["sci-libs/P-1", "sci-libs/K-2"], None, None),
+ (["=sci-libs/K-3"], {"--autounmask": True}, None, True, \
+ ["sci-libs/P-1", "sci-libs/K-3"], None, None),
+ (["=sci-libs/K-4"], {"--autounmask": True}, None, True, \
+ ["sci-libs/P-1", "sci-libs/K-4"], None, None),
+ (["=sci-libs/K-5"], {"--autounmask": True}, None, True, \
+ ["sci-libs/P-1", "sci-libs/K-5"], None, None),
+ (["=sci-libs/K-6"], {"--autounmask": True}, None, True, \
+ ["sci-libs/P-1", "sci-libs/K-6"], None, None),
+
+ #Make sure we prefer use changes over keyword changes.
+ (["=sci-libs/K-7"], {"--autounmask": True}, None, False, \
+ ["sci-libs/L-1", "sci-libs/K-7"], { "sci-libs/L-1": { "bar": True } }, None),
+ (["=sci-libs/K-8"], {"--autounmask": True}, None, False, \
+ ["sci-libs/L-1", "sci-libs/K-8"], { "sci-libs/L-1": { "bar": True } }, None),
+ )
+
+ playground = ResolverPlayground(ebuilds=ebuilds)
+ try:
+ for atoms, options, action, expected_result, expected_mergelist, \
+ expected_use_changes, expected_unstable_keywords in requests:
+ result = playground.run(atoms, options, action)
+ if expected_unstable_keywords is not None:
+ expected_unstable_keywords = set(expected_unstable_keywords)
+ self.assertEqual(
+ (result.success, result.mergelist, result.use_changes, result.unstable_keywords),
+ (expected_result, expected_mergelist, expected_use_changes, expected_unstable_keywords)
+ )
+ finally:
+ playground.cleanup()
diff --git a/pym/portage/tests/resolver/test_eapi.py b/pym/portage/tests/resolver/test_eapi.py
new file mode 100644
index 00000000..d9f34f43
--- /dev/null
+++ b/pym/portage/tests/resolver/test_eapi.py
@@ -0,0 +1,102 @@
+# Copyright 2010 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import ResolverPlayground
+
+class EAPITestCase(TestCase):
+
+ def testEAPI(self):
+ ebuilds = {
+ #EAPI-1: IUSE-defaults
+ "dev-libs/A-1.0": { "EAPI": 0, "IUSE": "+foo" },
+ "dev-libs/A-1.1": { "EAPI": 1, "IUSE": "+foo" },
+ "dev-libs/A-1.2": { "EAPI": 2, "IUSE": "+foo" },
+ "dev-libs/A-1.3": { "EAPI": 3, "IUSE": "+foo" },
+ #~ "dev-libs/A-1.4": { "EAPI": 4, "IUSE": "+foo" },
+
+ #EAPI-1: slot deps
+ "dev-libs/A-2.0": { "EAPI": 0, "DEPEND": "dev-libs/B:0" },
+ "dev-libs/A-2.1": { "EAPI": 1, "DEPEND": "dev-libs/B:0" },
+ "dev-libs/A-2.2": { "EAPI": 2, "DEPEND": "dev-libs/B:0" },
+ "dev-libs/A-2.3": { "EAPI": 3, "DEPEND": "dev-libs/B:0" },
+ #~ "dev-libs/A-2.4": { "EAPI": 4, "DEPEND": "dev-libs/B:0" },
+
+ #EAPI-2: use deps
+ "dev-libs/A-3.0": { "EAPI": 0, "DEPEND": "dev-libs/B[foo]" },
+ "dev-libs/A-3.1": { "EAPI": 1, "DEPEND": "dev-libs/B[foo]" },
+ "dev-libs/A-3.2": { "EAPI": 2, "DEPEND": "dev-libs/B[foo]" },
+ "dev-libs/A-3.3": { "EAPI": 3, "DEPEND": "dev-libs/B[foo]" },
+ #~ "dev-libs/A-3.4": { "EAPI": 4, "DEPEND": "dev-libs/B[foo]" },
+
+ #EAPI-2: strong blocks
+ "dev-libs/A-4.0": { "EAPI": 0, "DEPEND": "!!dev-libs/B" },
+ "dev-libs/A-4.1": { "EAPI": 1, "DEPEND": "!!dev-libs/B" },
+ "dev-libs/A-4.2": { "EAPI": 2, "DEPEND": "!!dev-libs/B" },
+ "dev-libs/A-4.3": { "EAPI": 3, "DEPEND": "!!dev-libs/B" },
+ #~ "dev-libs/A-4.4": { "EAPI": 4, "DEPEND": "!!dev-libs/B" },
+
+ #EAPI-4: slot operator deps
+ #~ "dev-libs/A-5.0": { "EAPI": 0, "DEPEND": "dev-libs/B:*" },
+ #~ "dev-libs/A-5.1": { "EAPI": 1, "DEPEND": "dev-libs/B:*" },
+ #~ "dev-libs/A-5.2": { "EAPI": 2, "DEPEND": "dev-libs/B:*" },
+ #~ "dev-libs/A-5.3": { "EAPI": 3, "DEPEND": "dev-libs/B:*" },
+ #~ "dev-libs/A-5.4": { "EAPI": 4, "DEPEND": "dev-libs/B:*" },
+
+ #EAPI-4: slot operator deps
+ #~ "dev-libs/A-6.0": { "EAPI": 0, "DEPEND": "dev-libs/B[bar(+)]" },
+ #~ "dev-libs/A-6.1": { "EAPI": 1, "DEPEND": "dev-libs/B[bar(+)]" },
+ #~ "dev-libs/A-6.2": { "EAPI": 2, "DEPEND": "dev-libs/B[bar(+)]" },
+ #~ "dev-libs/A-6.3": { "EAPI": 3, "DEPEND": "dev-libs/B[bar(+)]" },
+ #~ "dev-libs/A-6.4": { "EAPI": 4, "DEPEND": "dev-libs/B[bar(+)]" },
+
+ "dev-libs/B-1": {"EAPI": 1, "IUSE": "+foo"},
+ }
+
+ requests = (
+ #~ (["=dev-libs/A-1.0"], {}, None, False, None),
+ (["=dev-libs/A-1.1"], {}, None, True, ["dev-libs/A-1.1"]),
+ (["=dev-libs/A-1.2"], {}, None, True, ["dev-libs/A-1.2"]),
+ (["=dev-libs/A-1.3"], {}, None, True, ["dev-libs/A-1.3"]),
+ #~ (["=dev-libs/A-1.4"], {}, None, True, ["dev-libs/A-1.4"]),
+
+ #~ (["=dev-libs/A-2.0"], {}, None, False, None),
+ (["=dev-libs/A-2.1"], {}, None, True, ["dev-libs/B-1", "dev-libs/A-2.1"]),
+ (["=dev-libs/A-2.2"], {}, None, True, ["dev-libs/B-1", "dev-libs/A-2.2"]),
+ (["=dev-libs/A-2.3"], {}, None, True, ["dev-libs/B-1", "dev-libs/A-2.3"]),
+ #~ (["=dev-libs/A-2.4"], {}, None, True, ["dev-libs/B-1", "dev-libs/A-2.4"]),
+
+ (["=dev-libs/A-3.0"], {}, None, False, None),
+ (["=dev-libs/A-3.1"], {}, None, False, None),
+ (["=dev-libs/A-3.2"], {}, None, True, ["dev-libs/B-1", "dev-libs/A-3.2"]),
+ (["=dev-libs/A-3.3"], {}, None, True, ["dev-libs/B-1", "dev-libs/A-3.3"]),
+ #~ (["=dev-libs/A-3.4"], {}, None, True, ["dev-libs/B-1", "dev-libs/A-3.4"]),
+
+ (["=dev-libs/A-4.0"], {}, None, False, None),
+ (["=dev-libs/A-4.1"], {}, None, False, None),
+ (["=dev-libs/A-4.2"], {}, None, True, ["dev-libs/A-4.2"]),
+ (["=dev-libs/A-4.3"], {}, None, True, ["dev-libs/A-4.3"]),
+ #~ (["=dev-libs/A-4.4"], {}, None, True, ["dev-libs/A-4.4"]),
+
+ #~ (["=dev-libs/A-5.0"], {}, None, False, None),
+ #~ (["=dev-libs/A-5.1"], {}, None, False, None),
+ #~ (["=dev-libs/A-5.2"], {}, None, False, None),
+ #~ (["=dev-libs/A-5.3"], {}, None, False, None),
+ #~ (["=dev-libs/A-5.4"], {}, None, True, ["dev-libs/B-1", "dev-libs/A-5.4"]),
+
+ #~ (["=dev-libs/A-6.0"], {}, None, False, None),
+ #~ (["=dev-libs/A-6.1"], {}, None, False, None),
+ #~ (["=dev-libs/A-6.2"], {}, None, False, None),
+ #~ (["=dev-libs/A-6.3"], {}, None, False, None),
+ #~ (["=dev-libs/A-6.4"], {}, None, True, ["dev-libs/B-1", "dev-libs/A-6.4"]),
+ )
+
+ playground = ResolverPlayground(ebuilds=ebuilds)
+ try:
+ for atoms, options, action, \
+ expected_result, expected_mergelist in requests:
+ result = playground.run(atoms, options, action)
+ self.assertEqual((result.success, result.mergelist),
+ (expected_result, expected_mergelist))
+ finally:
+ playground.cleanup()
diff --git a/pym/portage/tests/resolver/test_simple.py b/pym/portage/tests/resolver/test_simple.py
new file mode 100644
index 00000000..ef19abe1
--- /dev/null
+++ b/pym/portage/tests/resolver/test_simple.py
@@ -0,0 +1,34 @@
+# Copyright 2010 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import ResolverPlayground
+
+class SimpleResolverTestCase(TestCase):
+
+ def testSimple(self):
+ ebuilds = {
+ "dev-libs/A-1": {},
+ "dev-libs/A-2": { "KEYWORDS": "~x86" },
+ "dev-libs/B-1.2": {},
+ }
+ installed = {
+ "dev-libs/B-1.1": {},
+ }
+
+ requests = (
+ (["dev-libs/A"], {}, None, True, ["dev-libs/A-1"]),
+ (["=dev-libs/A-2"], {}, None, False, None),
+ (["dev-libs/B"], {"--noreplace": True}, None, True, []),
+ (["dev-libs/B"], {"--update": True}, None, True, ["dev-libs/B-1.2"]),
+ )
+
+ playground = ResolverPlayground(ebuilds=ebuilds, installed=installed)
+ try:
+ for atoms, options, action, \
+ expected_result, expected_mergelist in requests:
+ result = playground.run(atoms, options, action)
+ self.assertEqual((result.success, result.mergelist),
+ (expected_result, expected_mergelist))
+ finally:
+ playground.cleanup()
diff --git a/pym/portage/tests/runTests b/pym/portage/tests/runTests
index 8e8a3a44..a6f3b8f1 100755
--- a/pym/portage/tests/runTests
+++ b/pym/portage/tests/runTests
@@ -10,6 +10,12 @@ import os.path as osp
# This line courtesy of Marienz and Pkgcore ;)
sys.path.insert(0, osp.dirname(osp.dirname(osp.dirname(osp.abspath(__file__)))))
+import portage
+
+# Ensure that we don't instantiate portage.settings, so that tests should
+# work the same regardless of global configuration file state/existence.
+portage._disable_legacy_globals()
+
import portage.tests as tests
from portage.const import PORTAGE_BIN_PATH
path = os.environ.get("PATH", "").split(":")
@@ -19,6 +25,7 @@ if not path or not os.path.samefile(path[0], PORTAGE_BIN_PATH):
os.environ["PATH"] = ":".join(path)
del path
+
if __name__ == "__main__":
result = tests.main()
if not result.wasSuccessful():
diff --git a/pym/portage/util/_pty.py b/pym/portage/util/_pty.py
index 7fba0e2b..877430e9 100644
--- a/pym/portage/util/_pty.py
+++ b/pym/portage/util/_pty.py
@@ -95,10 +95,7 @@ def _test_pty_eof():
buf = array.array('B')
try:
buf.fromfile(master_file, 1024)
- except EOFError:
- eof = True
- except IOError:
- # This is where data loss occurs.
+ except (EOFError, IOError):
eof = True
if not buf:
diff --git a/pym/repoman/checks.py b/pym/repoman/checks.py
index d403044b..7e76bf78 100644
--- a/pym/repoman/checks.py
+++ b/pym/repoman/checks.py
@@ -306,6 +306,57 @@ class EbuildQuotedA(LineCheck):
if match:
return "Quoted \"${A}\" on line: %d"
+class EprefixifyDefined(LineCheck):
+ """ Check that prefix.eclass is inherited if needed"""
+
+ repoman_check_name = 'eprefixify.defined'
+
+ _eprefixify_re = re.compile(r'\beprefixify\b')
+ _inherit_prefix_re = re.compile(r'^\s*inherit\s(.*\s)?prefix\b')
+
+ def new(self, pkg):
+ self._prefix_inherited = False
+
+ def check(self, num, line):
+ if self._eprefixify_re.search(line) is not None:
+ if not self._prefix_inherited:
+ return errors.EPREFIXIFY_MISSING_INHERIT
+ elif self._inherit_prefix_re.search(line) is not None:
+ self._prefix_inherited = True
+
+class ImplicitRuntimeDeps(LineCheck):
+ """
+ Detect the case where DEPEND is set and RDEPEND is unset in the ebuild,
+ since this triggers implicit RDEPEND=$DEPEND assignment (prior to EAPI 4).
+ """
+
+ repoman_check_name = 'RDEPEND.implicit'
+ _assignment_re = re.compile(r'^\s*(R?DEPEND)=')
+
+ def new(self, pkg):
+ self._rdepend = False
+ self._depend = False
+
+ def check_eapi(self, eapi):
+ # Beginning with EAPI 4, there is no
+ # implicit RDEPEND=$DEPEND assignment
+ # to be concerned with.
+ return eapi in ('0', '1', '2', '3')
+
+ def check(self, num, line):
+ if not self._rdepend:
+ m = self._assignment_re.match(line)
+ if m is None:
+ pass
+ elif m.group(1) == "RDEPEND":
+ self._rdepend = True
+ elif m.group(1) == "DEPEND":
+ self._depend = True
+
+ def end(self):
+ if self._depend and not self._rdepend:
+ yield 'RDEPEND is not explicitly assigned'
+
class InheritAutotools(LineCheck):
"""
Make sure appropriate functions are called in
@@ -493,8 +544,8 @@ _constant_checks = tuple((c() for c in (
EbuildHeader, EbuildWhitespace, EbuildBlankLine, EbuildQuote,
EbuildAssignment, Eapi3EbuildAssignment, EbuildUselessDodoc,
EbuildUselessCdS, EbuildNestedDie,
- EbuildPatches, EbuildQuotedA, EapiDefinition,
- IUseUndefined, InheritAutotools,
+ EbuildPatches, EbuildQuotedA, EapiDefinition, EprefixifyDefined,
+ ImplicitRuntimeDeps, InheritAutotools, IUseUndefined,
EMakeParallelDisabled, EMakeParallelDisabledViaMAKEOPTS, NoAsNeeded,
DeprecatedBindnowFlags, SrcUnpackPatches, WantAutoDefaultValue,
SrcCompileEconf, Eapi3DeprecatedFuncs,
diff --git a/pym/repoman/errors.py b/pym/repoman/errors.py
index 97bd2829..8a28d4fd 100644
--- a/pym/repoman/errors.py
+++ b/pym/repoman/errors.py
@@ -19,3 +19,4 @@ EAPI_DEFINED_AFTER_INHERIT = 'EAPI defined after inherit on line: %d'
NO_AS_NEEDED = 'Upstream asneeded linking bug (no-as-needed on line: %d)'
PRESERVE_OLD_LIB = 'Upstream ABI change workaround on line: %d'
BUILT_WITH_USE = 'built_with_use on line: %d'
+EPREFIXIFY_MISSING_INHERIT = "prefix.eclass is not inherited, but eprefixify is used on line: %d"