aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Changelog8
-rwxr-xr-xbin/euscan42
-rw-r--r--pym/euscan/handlers/generic.py25
-rw-r--r--pym/euscan/handlers/php.py5
-rw-r--r--pym/euscan/handlers/rubygem.py5
-rw-r--r--pym/euscan/helpers.py21
-rw-r--r--pym/euscan/scan.py91
7 files changed, 110 insertions, 87 deletions
diff --git a/Changelog b/Changelog
index 9490ec5..3b74270 100644
--- a/Changelog
+++ b/Changelog
@@ -2,6 +2,14 @@
Change history
================
+.. _version-0.2.0:
+
+0.2.0
+=====
+:release-date: ???
+
+* Better --quiet mode
+
.. _version-0.1.0:
0.1.0
diff --git a/bin/euscan b/bin/euscan
index 0284685..b70600e 100755
--- a/bin/euscan
+++ b/bin/euscan
@@ -29,7 +29,8 @@ from gentoolkit import pprinter as pp
from gentoolkit.eclean.search import (port_settings)
from gentoolkit.errors import GentoolkitException
-from euscan import CONFIG, output
+import euscan
+from euscan import CONFIG
from euscan.scan import scan_upstream
""" Globals """
@@ -198,7 +199,7 @@ def main():
sys.exit(errno.EINVAL)
""" Change euscan's output """
- output = EOutput(CONFIG['quiet'])
+ euscan.output = EOutput(CONFIG['quiet'])
if CONFIG['verbose'] > 2:
httplib.HTTPConnection.debuglevel = 1
@@ -208,29 +209,34 @@ def main():
try:
ret = scan_upstream(package)
except AmbiguousPackageName as e:
- pkgs = e.args[0]
- for candidate in pkgs:
- print(candidate)
+ pkgs = e.args[0]
+ for candidate in pkgs:
+ print(candidate)
- from os.path import basename # To get the short name
+ from os.path import basename # To get the short name
- print(file=sys.stderr)
- print(pp.error("The short ebuild name '%s' is ambiguous. Please specify" % basename(pkgs[0])),
- file=sys.stderr, end="")
- pp.die(1, "one of the above fully-qualified ebuild names instead.")
+ print(file=sys.stderr)
+ print(pp.error("The short ebuild name '%s' is ambiguous. Please specify" % basename(pkgs[0])),
+ file=sys.stderr, end="")
+ pp.die(1, "one of the above fully-qualified ebuild names instead.")
except GentoolkitException as err:
- pp.die(1, str(err))
+ pp.die(1, str(err))
except Exception as err:
- pp.die(1, str(err))
+ pp.die(1, str(err))
- print ()
+ if not CONFIG['quiet']:
+ print ()
- for url, version in ret:
- print ("Upstream Version: "
- + pp.number("%s" % version)
- + pp.path(" %s" % url))
+ for cp, url, version in ret:
+ if not CONFIG['quiet']:
+ print ("Upstream Version: "
+ + pp.number("%s" % version)
+ + pp.path(" %s" % url))
+ else:
+ print (pp.cpv("%s-%s" % (cp, version))
+ + ": " + pp.path(url))
- if not len(ret):
+ if not len(ret) and not CONFIG['quiet']:
print (pp.warn("Didn't find any new version, "
+ "check package's homepage for "
+ "more informations"));
diff --git a/pym/euscan/handlers/generic.py b/pym/euscan/handlers/generic.py
index 1584042..9a4b7c3 100644
--- a/pym/euscan/handlers/generic.py
+++ b/pym/euscan/handlers/generic.py
@@ -6,8 +6,9 @@ from BeautifulSoup import BeautifulSoup
import portage
-from euscan import CONFIG, SCANDIR_BLACKLIST_URLS, BRUTEFORCE_BLACKLIST_PACKAGES, BRUTEFORCE_BLACKLIST_URLS, output
+from euscan import CONFIG, SCANDIR_BLACKLIST_URLS, BRUTEFORCE_BLACKLIST_PACKAGES, BRUTEFORCE_BLACKLIST_URLS
from euscan import helpers
+import euscan
def scan_html(data, url, pattern):
soup = BeautifulSoup(data)
@@ -48,7 +49,7 @@ def scan_directory_recursive(cpv, url, steps):
steps = steps[1:]
- output.einfo("Scanning: %s" % url)
+ euscan.output.einfo("Scanning: %s" % url)
try:
fp = helpers.urlopen(url)
@@ -91,7 +92,7 @@ def scan_directory_recursive(cpv, url, steps):
def scan(cpv, url):
for bu in SCANDIR_BLACKLIST_URLS:
if re.match(bu, url):
- output.einfo("%s is blacklisted by rule %s" % (url, bu))
+ euscan.output.einfo("%s is blacklisted by rule %s" % (url, bu))
return []
resolved_url = helpers.parse_mirror(url)
@@ -100,11 +101,11 @@ def scan(cpv, url):
template = helpers.template_from_url(resolved_url, ver)
if '${' not in template:
- output.einfo("Url doesn't seems to depend on version: %s not found in %s"
+ euscan.output.einfo("Url doesn't seems to depend on version: %s not found in %s"
% (ver, resolved_url))
return []
else:
- output.einfo("Scanning: %s" % template)
+ euscan.output.einfo("Scanning: %s" % template)
steps = helpers.generate_scan_paths(template)
return scan_directory_recursive(cpv, "", steps)
@@ -116,15 +117,15 @@ def brute_force(cpv, url):
for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
if re.match(bp, cp):
- output.einfo("%s is blacklisted by rule %s" % (cp, bp))
+ euscan.output.einfo("%s is blacklisted by rule %s" % (cp, bp))
return []
for bp in BRUTEFORCE_BLACKLIST_URLS:
if re.match(bp, url):
- output.einfo("%s is blacklisted by rule %s" % (cp, bp))
+ euscan.output.einfo("%s is blacklisted by rule %s" % (cp, bp))
return []
- output.einfo("Generating version from " + ver)
+ euscan.output.einfo("Generating version from " + ver)
components = helpers.split_version(ver)
versions = helpers.gen_versions(components, CONFIG["brute-force"])
@@ -135,17 +136,17 @@ def brute_force(cpv, url):
versions.remove(v)
if not versions:
- output.einfo("Can't generate new versions from " + ver)
+ euscan.output.einfo("Can't generate new versions from " + ver)
return []
template = helpers.template_from_url(url, ver)
if '${PV}' not in template:
- output.einfo("Url doesn't seems to depend on full version: %s not found in %s"
+ euscan.output.einfo("Url doesn't seems to depend on full version: %s not found in %s"
% (ver, url))
return []
else:
- output.einfo("Brute forcing: %s" % template)
+ euscan.output.einfo("Brute forcing: %s" % template)
result = []
@@ -173,7 +174,7 @@ def brute_force(cpv, url):
result.append([url, version])
if len(result) > CONFIG['brute-force-false-watermark']:
- output.einfo("Broken server detected ! Skipping brute force.")
+ euscan.output.einfo("Broken server detected ! Skipping brute force.")
return []
if CONFIG["brute-force-recursive"]:
diff --git a/pym/euscan/handlers/php.py b/pym/euscan/handlers/php.py
index 5ec673b..bf99cce 100644
--- a/pym/euscan/handlers/php.py
+++ b/pym/euscan/handlers/php.py
@@ -3,7 +3,8 @@ import portage
import urllib2
import xml.dom.minidom
-from euscan import helpers, output
+from euscan import helpers
+import euscan
def can_handle(cpv, url):
if url.startswith('http://pear.php.net/get/'):
@@ -29,7 +30,7 @@ def scan(cpv, url):
orig_url = url
url = 'http://%s/rest/r/%s/allreleases.xml' % (channel, pkg.lower())
- output.einfo("Using: " + url)
+ euscan.output.einfo("Using: " + url)
try:
fp = helpers.urlopen(url)
diff --git a/pym/euscan/handlers/rubygem.py b/pym/euscan/handlers/rubygem.py
index 0b7bdc8..cf37891 100644
--- a/pym/euscan/handlers/rubygem.py
+++ b/pym/euscan/handlers/rubygem.py
@@ -3,7 +3,8 @@ import portage
import json
import urllib2
-from euscan import helpers, output
+from euscan import helpers
+import euscan
def can_handle(cpv, url):
return url.startswith('mirror://rubygems/')
@@ -24,7 +25,7 @@ def scan(cpv, url):
gem = guess_gem(cpv, url)
url = 'http://rubygems.org/api/v1/versions/%s.json' % gem
- output.einfo("Using: " + url)
+ euscan.output.einfo("Using: " + url)
try:
fp = helpers.urlopen(url)
diff --git a/pym/euscan/helpers.py b/pym/euscan/helpers.py
index e987ba2..49d1fe5 100644
--- a/pym/euscan/helpers.py
+++ b/pym/euscan/helpers.py
@@ -15,7 +15,8 @@ except ImportError:
import portage
from portage import dep
-from euscan import CONFIG, BLACKLIST_VERSIONS, ROBOTS_TXT_BLACKLIST_DOMAINS, output
+from euscan import CONFIG, BLACKLIST_VERSIONS, ROBOTS_TXT_BLACKLIST_DOMAINS
+import euscan
def htop_vercmp(a, b):
def fixver(v):
@@ -87,7 +88,7 @@ def version_blacklisted(cp, version):
None
if rule:
- output.einfo("%s is blacklisted by rule %s" % (cpv, bv))
+ euscan.output.einfo("%s is blacklisted by rule %s" % (cpv, bv))
return rule is not None
def version_filtered(cp, base, version):
@@ -254,7 +255,7 @@ def urlallowed(url):
def urlopen(url, timeout=None, verb="GET"):
if not urlallowed(url):
- output.einfo("Url '%s' blocked by robots.txt" % url)
+ euscan.output.einfo("Url '%s' blocked by robots.txt" % url)
return None
if not timeout:
@@ -287,17 +288,17 @@ def tryurl(fileurl, template):
result = True
if not urlallowed(fileurl):
- output.einfo("Url '%s' blocked by robots.txt" % fileurl)
+ euscan.output.einfo("Url '%s' blocked by robots.txt" % fileurl)
return None
- output.ebegin("Trying: " + fileurl)
+ euscan.output.ebegin("Trying: " + fileurl)
try:
basename = os.path.basename(fileurl)
fp = urlopen(fileurl, verb='HEAD')
if not fp:
- output.eend(errno.EPERM)
+ euscan.output.eend(errno.EPERM)
return None
headers = fp.info()
@@ -328,7 +329,7 @@ def tryurl(fileurl, template):
except IOError:
result = None
- output.eend(errno.ENOENT if not result else 0)
+ euscan.output.eend(errno.ENOENT if not result else 0)
return result
@@ -383,8 +384,8 @@ def parse_mirror(uri):
eidx = uri.find("/", 9)
if eidx == -1:
- output.einfo("Invalid mirror definition in SRC_URI:\n")
- output.einfo(" %s\n" % (uri))
+ euscan.output.einfo("Invalid mirror definition in SRC_URI:\n")
+ euscan.output.einfo(" %s\n" % (uri))
return None
mirrorname = uri[9:eidx]
@@ -395,7 +396,7 @@ def parse_mirror(uri):
shuffle(mirrors)
uri = mirrors[0].strip("/") + "/" + path
else:
- output.einfo("No known mirror by the name: %s\n" % (mirrorname))
+ euscan.output.einfo("No known mirror by the name: %s\n" % (mirrorname))
return None
return uri
diff --git a/pym/euscan/scan.py b/pym/euscan/scan.py
index 4ea49be..aae180d 100644
--- a/pym/euscan/scan.py
+++ b/pym/euscan/scan.py
@@ -11,10 +11,12 @@ from gentoolkit import errors
from gentoolkit.query import Query
from gentoolkit.eclean.search import (port_settings)
-from euscan import CONFIG, BLACKLIST_PACKAGES, output
+from euscan import CONFIG, BLACKLIST_PACKAGES
from euscan import handlers
from euscan import helpers
+import euscan
+
def filter_versions(cp, versions):
filtered = {}
@@ -31,18 +33,19 @@ def filter_versions(cp, versions):
filtered[version] = url
- return [ (filtered[version], version) for version in filtered ]
+ return [ (cp, filtered[version], version) for version in filtered ]
def scan_upstream_urls(cpv, urls):
versions = []
for filename in urls:
for url in urls[filename]:
- pp.uprint()
- output.einfo("SRC_URI is '%s'" % url)
+ if not CONFIG['quiet']:
+ pp.uprint()
+ euscan.output.einfo("SRC_URI is '%s'" % url)
if '://' not in url:
- output.einfo("Invalid url '%s'" % url)
+ euscan.output.einfo("Invalid url '%s'" % url)
continue
''' Try normal scan '''
@@ -64,57 +67,59 @@ def scan_upstream_urls(cpv, urls):
def scan_upstream(query):
- matches = Query(query).find(
- include_masked=True,
- in_installed=False
- )
+ matches = Query(query).find(
+ include_masked=True,
+ in_installed=False
+ )
- if not matches:
- sys.stderr.write(pp.warn("No package matching '%s'" % pp.pkgquery(query)))
- return []
+ if not matches:
+ sys.stderr.write(pp.warn("No package matching '%s'" % pp.pkgquery(query)))
+ return []
- matches = sorted(matches)
- pkg = matches.pop()
+ matches = sorted(matches)
+ pkg = matches.pop()
- while '9999' in pkg.version and len(matches):
- pkg = matches.pop()
+ while '9999' in pkg.version and len(matches):
+ pkg = matches.pop()
- if not pkg:
- sys.stderr.write(pp.warn("Package '%s' only have a dev version (9999)" % pp.pkgquery(pkg.cp)))
- return []
+ if not pkg:
+ sys.stderr.write(pp.warn("Package '%s' only have a dev version (9999)"
+ % pp.pkgquery(pkg.cp)))
+ return []
- if pkg.cp in BLACKLIST_PACKAGES:
- sys.stderr.write(pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp)))
- return []
+ if pkg.cp in BLACKLIST_PACKAGES:
+ sys.stderr.write(pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp)))
+ return []
+ if not CONFIG['quiet']:
pp.uprint(" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name())))
pp.uprint()
ebuild_path = pkg.ebuild_path()
if ebuild_path:
- pp.uprint('Ebuild: ' + pp.path(os.path.normpath(ebuild_path)))
+ pp.uprint('Ebuild: ' + pp.path(os.path.normpath(ebuild_path)))
pp.uprint('Repository: ' + pkg.repo_name())
pp.uprint('Homepage: ' + pkg.environment("HOMEPAGE"))
pp.uprint('Description: ' + pkg.environment("DESCRIPTION"))
- cpv = pkg.cpv
- metadata = {
- "EAPI" : port_settings["EAPI"],
- "SRC_URI" : pkg.environment("SRC_URI", False),
- }
- use = frozenset(port_settings["PORTAGE_USE"].split())
- try:
- alist = porttree._parse_uri_map(cpv, metadata, use=use)
- aalist = porttree._parse_uri_map(cpv, metadata)
- except Exception as e:
- sys.stderr.write(pp.warn("%s\n" % str(e)))
- sys.stderr.write(pp.warn("Invalid SRC_URI for '%s'" % pp.pkgquery(cpv)))
- return []
-
- if "mirror" in portage.settings.features:
- urls = aalist
- else:
- urls = alist
-
- return scan_upstream_urls(pkg.cpv, urls)
+ cpv = pkg.cpv
+ metadata = {
+ "EAPI" : port_settings["EAPI"],
+ "SRC_URI" : pkg.environment("SRC_URI", False),
+ }
+ use = frozenset(port_settings["PORTAGE_USE"].split())
+ try:
+ alist = porttree._parse_uri_map(cpv, metadata, use=use)
+ aalist = porttree._parse_uri_map(cpv, metadata)
+ except Exception as e:
+ sys.stderr.write(pp.warn("%s\n" % str(e)))
+ sys.stderr.write(pp.warn("Invalid SRC_URI for '%s'" % pp.pkgquery(cpv)))
+ return []
+
+ if "mirror" in portage.settings.features:
+ urls = aalist
+ else:
+ urls = alist
+
+ return scan_upstream_urls(pkg.cpv, urls)