[Lsb-messages] /var/www/bzr/lsb/devel/appbat r1036: switch entitychecker to argparse, a few other cleanups
Mats Wichmann
mats at linuxfoundation.org
Tue May 2 21:43:17 UTC 2017
------------------------------------------------------------
revno: 1036
committer: Mats Wichmann <mats at linuxfoundation.org>
branch nick: appbat
timestamp: Tue 2017-05-02 15:43:17 -0600
message:
switch entitychecker to argparse, a few other cleanups
modified:
extras/entitycheck.py
extras/entitycheck_new.py
-------------- next part --------------
=== modified file 'extras/entitycheck.py'
--- a/extras/entitycheck.py 2017-03-18 21:42:22 +0000
+++ b/extras/entitycheck.py 2017-05-02 21:43:17 +0000
@@ -1,48 +1,27 @@
#!/usr/bin/env python
#
-# entitycheck.py
-#
-# Check for entitites, and optionally retrieve them if missing/out of date.
-# retrieval is done using a combination of ftp and http, depending
-# on how the files are made available by their providers.
-# If you need to use a proxy, first set 'http_proxy' and 'ftp_proxy'
-# in the environment, using the form: xxx_proxy=host:port
-# Calling an external command (wget) is also possible
-# Note: the internal fetcher may go away if wget works out
-#
"""Entity checker
-Checks a build area for required packages and patches ("entities")
-as described by an entity file, and fetching missing ones if requested.
-
-Command line options:
-
--q, --quiet -- operate silently
--e FILE, --entityfile=FILE -- use entity file FILE [%s]
--p PATH, --packagepath=PATH -- use PATH for packages [%s]
--d PATH, --patchpath=PATH -- use PATH for patches [%s]
--g, --gensum -- generate crypto hashes for found entities
--c, --checksum -- check crypto hashes against file
---delete-bad -- delete files with bad checksums
--s FILE, --sumfile=FILE -- use crypto hash file FILE [%s]
--f, --fetch -- fetch missing pkgs
--n, --dryrun -- test what pkgs would be retrieved
---show-extras -- report on unused pkgs/patches
--u FILE, --updatefile=FILE -- use FILE for pkg locations [%s]
--z URL, --fallback=URL -- use URL as fallback for pkg retrieval
- [%s]
---prefer-fallback -- prefer the fallback URL over upstream
--w, --wget -- use wget to fetch files
--h, --help -- print this text and exit
+Validates a build area. An entity file defines "entities", which
+in this context are packages and patches declared to be necessary
+for the build to proceed. Entities are checked for presence,
+and optionally for valid cryptographic hash, and optionally if
+there are missing/invalid entities, attempt to retreive them.
+Retrieval is done using a combination of ftp and http, depending
+on how the files are made available by their providers.
+If you need to use a proxy, first set 'http_proxy' and 'ftp_proxy'
+in the environment, using the form: xxx_proxy=host:port
+Calling an external command (wget) is also possible
+Note: the internal fetcher may go away if wget works out
"""
-import getopt
import os
import re
import sys
import time
import traceback
import urllib
+from argparse import ArgumentParser
# Handle hashing module (md5 is the old way, hashlib is the new)
try:
@@ -74,14 +53,9 @@
prefer_fallback = False
-def usage(code, msg=""):
- """Print usage, possibly with an error message preceding."""
- if msg:
- print "ERROR:", msg
- print
- print __doc__ % (epaths['entity_file'], epaths['package_path'],
- epaths['patch_path'], epaths['md5sum_file'],
- epaths['update_file'], epaths['fallback_url'])
+def usage(code, msg):
+ """Print error message and quit."""
+ print "ERROR:", msg
sys.exit(code)
@@ -527,7 +501,7 @@
hashfile.close()
-def retrieve_packages(missing_packages):
+def retrieve_packages(pkg_missing):
"""Retrieve packages identified as missing."""
locations = parse_locations()
retrieved = 0
@@ -538,7 +512,7 @@
getter = urlget_package
if os.access(epaths['package_path'], os.W_OK):
print "Retrieving..."
- for pkg in missing_packages:
+ for pkg in pkg_missing:
if getter(pkg, locations):
retrieved += 1
else:
@@ -563,61 +537,169 @@
os.unlink(entity.fullpath)
+def fetch_opts(ep):
+ longdesc="""
+ Checks a build area for required packages and patches ("entities")
+ as described by an entity file, fetching missing ones if requested."""
+
+ parser = ArgumentParser(description="Entity checker: " +longdesc)
+ parser.add_argument(
+ '-q',
+ '--quiet',
+ default=False,
+ action='store_true',
+ dest='quiet',
+ help='operate silently')
+ parser.add_argument(
+ '-g',
+ '--gensum',
+ default=False,
+ action='store_true',
+ dest='gensum',
+ help='generate crypto hashes for found entities')
+ sums = parser.add_mutually_exclusive_group()
+ sums.add_argument(
+ '-c',
+ '--checksum',
+ default=False,
+ action='store_true',
+ dest='checksum',
+ help='check crypto hashes')
+ sums.add_argument(
+ '--nochecksum',
+ action='store_false',
+ dest='checksum',
+ help='do not check crypto hashes')
+ delete = parser.add_mutually_exclusive_group()
+ delete.add_argument(
+ '-d',
+ '--delete',
+ default=True,
+ action='store_true',
+ dest='delete',
+ help='delete files with invalid sums')
+ delete.add_argument(
+ '--nodelete',
+ action='store_false',
+ dest='delete',
+ help='do not delete files with invalid sums')
+ fetch = parser.add_mutually_exclusive_group()
+ fetch.add_argument(
+ '-f',
+ '--fetch',
+ default=False,
+ action='store_true',
+ dest='fetch',
+ help='fetch missing pkgs')
+ fetch.add_argument(
+ '--nofetch',
+ default=True,
+ action='store_false',
+ dest='fetch',
+ help='fetch missing pkgs')
+ parser.add_argument(
+ '-n',
+ '--dryrun',
+ default=False,
+ action='store_true',
+ dest='dryrun',
+ help='show what entities would be retrieved')
+ parser.add_argument(
+ '-w',
+ '--wget',
+ default=True,
+ action='store_true',
+ dest='wget',
+ help='use wget to retrieve instead of internal fetcher')
+ parser.add_argument(
+ '-x',
+ '--extras',
+ default=False,
+ action='store_true',
+ dest='extras',
+ help='report on unused entities')
+ parser.add_argument(
+ '--prefer-fallback',
+ default=False,
+ action='store_true',
+ dest='usefallback',
+ help='prefer the fallback URL over upstream')
+ parser.add_argument(
+ '--entityfile',
+ dest='entity_file',
+ action='store',
+ default=ep['entity_file'],
+ help='set entity file (default: %(default)s)')
+ parser.add_argument(
+ '--packagepath',
+ dest='package_path',
+ action='store',
+ default=ep['package_path'],
+ help='set packages location (default: %(default)s)')
+ parser.add_argument(
+ '--patchpath',
+ dest='patch_path',
+ action='store',
+ default=ep['patch_path'],
+ help='set patches location (default: %(default)s)')
+ parser.add_argument(
+ '--sumfile',
+ dest='md5sum_file',
+ action='store',
+ default=ep['md5sum_file'],
+ help='set crypto hash file (default: %(default)s)')
+ parser.add_argument(
+ '--updatefile',
+ dest='update_file',
+ default=ep['update_file'],
+ help='set list of package fetch locations (default: %(default)s)')
+ parser.add_argument(
+ '--fallback',
+ action='store',
+ dest='fallback_url',
+ default=ep['fallback_url'],
+ help='set fallback for package retrieval (default: %(default)s)')
+ return parser.parse_args()
+
+
def main():
# 1. Process command-line arguments
- global generate_sums, check_sums, fetch_files, use_wget, dry_run, noisy, show_extras, delete_bad, prefer_fallback
-
- shortopts = 'qe:p:d:gcs:fun:z:wh'
- longopts = [
- 'quiet', 'entityfile=', 'packagepath=', 'patchpath=', 'gensum',
- 'checksum', 'sumfile=', 'fetch', 'updatefile=', 'dryrun',
- 'fallback=', 'wget', 'help', 'show-extras', 'delete-bad',
- 'prefer-fallback'
- ]
- try:
- opts, args = getopt.getopt(sys.argv[1:], shortopts, longopts)
- except getopt.error, msg:
- usage(2, msg)
-
- if opts:
- for opt, arg in opts:
- if opt in ('--help', '-h'):
- usage(0)
- if opt in ('--entityfile', '-e'):
- epaths['entity_file'] = arg
- if opt in ('--packagepath', '-p'):
- epaths['package_path'] = arg
- if opt in ('--patchpath', '-d'):
- epaths['patch_path'] = arg
- if opt in ('--gensum', '-g'):
- if check_sums:
- usage(2, "check-sums and generate-sums are mutually exclusive")
- generate_sums = True
- if opt in ('--checksum', '-c'):
- if generate_sums:
- usage(2, "check-sums and generate-sums are mutually exclusive")
- check_sums = True
- if opt in ('--sumfile', '-s'):
- epaths['md5sum_file'] = arg
- if opt in ('--fetch', '-f'):
- fetch_files = True
- if opt in ('--updatefile', '-u'):
- epaths['update_file'] = arg
- if opt in ('--dryrun', '-n'):
- dry_run = True
- fetch_files = True
- if opt in ('--fallback', '-z'):
- epaths['fallback_url'] = arg
- if opt in ('--wget', '-w'):
- use_wget = True
- if opt in ('--quiet', '-q'):
- noisy = False
- if opt == '--show-extras':
- show_extras = True
- if opt == '--delete-bad':
- delete_bad = True
- if opt == '--prefer-fallback':
- prefer_fallback = True
+ global generate_sums, check_sums, fetch_files, use_wget, dry_run
+ global noisy, show_extras, delete_bad, prefer_fallback, epaths
+ opts = fetch_opts(epaths)
+
+ if opts.quiet:
+ noisy = not opts.quiet
+ if opts.gensum:
+ generate_sums = opts.gensum
+ if opts.checksum:
+ check_sums = opts.checksum
+ if opts.delete:
+ delete_bad = opts.delete
+ if opts.fetch:
+ fetch_files = opts.fetch
+ if opts.dryrun:
+ dry_run = opts.dryrun
+ fetch_files = True
+ if opts.extras:
+ show_extras = opts.extras
+ if opts.wget:
+ use_wget = opts.wget
+ if opts.usefallback:
+ prefer_fallback = opts.usefallback
+
+ if opts.entity_file:
+ epaths['entity_file'] = opts.entity_file
+ if opts.package_path:
+ epaths['package_path'] = opts.package_path
+ if opts.patch_path:
+ epaths['patch_path'] = opts.patch_path
+ if opts.md5sum_file:
+ epaths['md5sum_file'] = opts.md5sum_file
+ if opts.update_file:
+ epaths['update_file'] = opts.update_file
+ if opts.fallback_url:
+ epaths['fallback_url'] = opts.fallback_url
# 2. Check directories are okay up front
# also saves time to make sure the checksum file is there
@@ -630,29 +712,25 @@
# 3. Parse the entity file and see if those entities exist
pkglist, patchlist = parse_entities()
- found_packages, missing_packages = check_missing(epaths['package_path'],
- pkglist)
- found_patches, missing_patches = check_missing(epaths['patch_path'], patchlist)
+ pkg_found, pkg_missing = check_missing(epaths['package_path'], pkglist)
+ pch_found, pch_missing = check_missing(epaths['patch_path'], patchlist)
# 4. Scan the package and patch directories for extra files
if epaths['package_path'] == epaths['patch_path']:
- extras = check_extra(epaths['package_path'],
- found_packages + found_patches)
+ extras = check_extra(epaths['package_path'], pkg_found + pch_found)
else: # packages and patches in separate directories
- extras = check_extra(epaths['package_path'], found_packages)
- extras += check_extra(epaths['patch_path'], found_patches)
+ extras = check_extra(epaths['package_path'], pkg_found)
+ extras += check_extra(epaths['patch_path'], pch_found)
# 5. check checksums, if requested
# Whether doing sums or not, generate a report of the work to date
# so do that work first - it makes the ordering a little more sane
-
- exitcode = report(found_packages, found_patches, missing_packages,
- missing_patches, extras)
+ exitcode = report(pkg_found, pch_found, pkg_missing, pch_missing, extras)
if check_sums:
checksums = readhash()
- bad_packages, no_packages = check_checksums(found_packages, checksums)
- bad_patches, no_patches = check_checksums(found_patches, checksums)
+ bad_packages, no_packages = check_checksums(pkg_found, checksums)
+ bad_patches, no_patches = check_checksums(pch_found, checksums)
exitcode += sum_report(bad_packages, bad_patches, no_packages, no_patches)
# 6. Go fetch missing files if requested, and do another report
@@ -660,8 +738,8 @@
fails = []
missing = []
if fetch_files:
- if missing_packages:
- fetchcount, fails, missing = retrieve_packages(missing_packages)
+ if pkg_missing:
+ fetchcount, fails, missing = retrieve_packages(pkg_missing)
# if we checked checksums, there may also be pkgs with bad cksums:
if check_sums and bad_packages:
@@ -688,7 +766,7 @@
# 9. Generate a new checksum file, if requested (only if nothing fatal
# happened above)
if generate_sums:
- writehash(found_packages + found_patches)
+ writehash(pkg_found + pch_found)
sys.exit(exitcode)
=== modified file 'extras/entitycheck_new.py'
--- a/extras/entitycheck_new.py 2017-03-06 15:14:28 +0000
+++ b/extras/entitycheck_new.py 2017-05-02 21:43:17 +0000
@@ -1,42 +1,20 @@
#!/usr/bin/env python
#
-# entitycheck.py
-#
-# Check for entitites, and optionally retrieve them if missing/out of date.
-# retrieval is done using a combination of ftp and http, depending
-# on how the files are made available by their providers.
-# If you need to use a proxy, first set 'http_proxy' and 'ftp_proxy'
-# in the environment, using the form: xxx_proxy=host:port
-# Calling an external command (wget) is also possible
-# Note: the internal fetcher may go away if wget works out
-#
"""Entity checker
-Checks a build area for required packages and patches ("entities")
-as described by an entity file, and fetching missing ones if requested.
-
-Command line options:
-
--q, --quiet -- operate silently
--e FILE, --entityfile=FILE -- use entity file FILE [{entity_file}]
--p PATH, --packagepath=PATH -- use PATH for packages [{package_path}]
--d PATH, --patchpath=PATH -- use PATH for patches [{patch_path}]
--g, --gensum -- generate crypto hashes for found entities
--c, --checksum -- check crypto hashes against file
---delete-bad -- delete files with bad checksums
--s FILE, --sumfile=FILE -- use crypto hash file FILE [{md5sum_file}]
--f, --fetch -- fetch missing pkgs
--n, --dryrun -- test what pkgs would be retrieved
---show-extras -- report on unused pkgs/patches
--u FILE, --updatefile=FILE -- use FILE for pkg locations [{update_file}]
--z URL, --fallback=URL -- use URL as fallback for pkg retrieval
- [{fallback_url}]
---prefer-fallback -- prefer the fallback URL over upstream
--w, --wget -- use wget to fetch files
--h, --help -- print this text and exit
+Validates a build area. An entity file defines "entities", which
+in this context are packages and patches declared to be necessary
+for the build to proceed. Entities are checked for presence,
+and optionally for valid cryptographic hash, and optionally if
+there are missing/invalid entities, attempt to retreive them.
+Retrieval is done using a combination of ftp and http, depending
+on how the files are made available by their providers.
+If you need to use a proxy, first set 'http_proxy' and 'ftp_proxy'
+in the environment, using the form: xxx_proxy=host:port
+Calling an external command (wget) is also possible
+Note: the internal fetcher may go away if wget works out
"""
-import getopt
import os
import re
import sys
@@ -44,6 +22,7 @@
import traceback
import urllib
from functools import partial
+from argparse import ArgumentParser
# Handle hashing module (md5 is the old way, hashlib is the new)
try:
@@ -67,7 +46,7 @@
generate_sums = False
check_sums = False
fetch_files = False
-use_wget = False
+use_wget = True
dry_run = False
noisy = True
show_extras = False
@@ -75,11 +54,9 @@
prefer_fallback = False
-def usage(code, msg=""):
- """Print usage, possibly with an error message preceding."""
- if msg:
- print "ERROR: {}\n".format(msg)
- print __doc__.format(**epaths)
+def usage(code, msg):
+ """Print error message and quit."""
+ print "ERROR:", msg
sys.exit(code)
@@ -498,7 +475,7 @@
hashfile.write("%s %s\n" % (entity.hash, entity.fname))
-def retrieve_packages(missing_packages):
+def retrieve_packages(pkg_missing):
"""Retrieve packages identified as missing."""
locations = parse_locations()
retrieved = 0
@@ -509,7 +486,7 @@
getter = urlget_package
if os.access(epaths['package_path'], os.W_OK):
print "Retrieving..."
- for pkg in missing_packages:
+ for pkg in pkg_missing:
if getter(pkg, locations):
retrieved += 1
else:
@@ -534,61 +511,169 @@
os.unlink(entity.fullpath)
+def fetch_opts(ep):
+ longdesc="""
+ Checks a build area for required packages and patches ("entities")
+ as described by an entity file, fetching missing ones if requested."""
+
+ parser = ArgumentParser(description="Entity checker: " +longdesc)
+ parser.add_argument(
+ '-q',
+ '--quiet',
+ default=False,
+ action='store_true',
+ dest='quiet',
+ help='operate silently')
+ parser.add_argument(
+ '-g',
+ '--gensum',
+ default=False,
+ action='store_true',
+ dest='gensum',
+ help='generate crypto hashes for found entities')
+ sums = parser.add_mutually_exclusive_group()
+ sums.add_argument(
+ '-c',
+ '--checksum',
+ default=False,
+ action='store_true',
+ dest='checksum',
+ help='check crypto hashes')
+ sums.add_argument(
+ '--nochecksum',
+ action='store_false',
+ dest='checksum',
+ help='do not check crypto hashes')
+ delete = parser.add_mutually_exclusive_group()
+ delete.add_argument(
+ '-d',
+ '--delete',
+ default=True,
+ action='store_true',
+ dest='delete',
+ help='delete files with invalid sums')
+ delete.add_argument(
+ '--nodelete',
+ action='store_false',
+ dest='delete',
+ help='do not delete files with invalid sums')
+ fetch = parser.add_mutually_exclusive_group()
+ fetch.add_argument(
+ '-f',
+ '--fetch',
+ default=False,
+ action='store_true',
+ dest='fetch',
+ help='fetch missing pkgs')
+ fetch.add_argument(
+ '--nofetch',
+ default=True,
+ action='store_false',
+ dest='fetch',
+ help='fetch missing pkgs')
+ parser.add_argument(
+ '-n',
+ '--dryrun',
+ default=False,
+ action='store_true',
+ dest='dryrun',
+ help='show what entities would be retrieved')
+ parser.add_argument(
+ '-w',
+ '--wget',
+ default=True,
+ action='store_true',
+ dest='wget',
+ help='use wget to retrieve instead of internal fetcher')
+ parser.add_argument(
+ '-x',
+ '--extras',
+ default=False,
+ action='store_true',
+ dest='extras',
+ help='report on unused entities')
+ parser.add_argument(
+ '--prefer-fallback',
+ default=False,
+ action='store_true',
+ dest='usefallback',
+ help='prefer the fallback URL over upstream')
+ parser.add_argument(
+ '--entityfile',
+ dest='entity_file',
+ action='store',
+ default=ep['entity_file'],
+ help='set entity file (default: %(default)s)')
+ parser.add_argument(
+ '--packagepath',
+ dest='package_path',
+ action='store',
+ default=ep['package_path'],
+ help='set packages location (default: %(default)s)')
+ parser.add_argument(
+ '--patchpath',
+ dest='patch_path',
+ action='store',
+ default=ep['patch_path'],
+ help='set patches location (default: %(default)s)')
+ parser.add_argument(
+ '--sumfile',
+ dest='md5sum_file',
+ action='store',
+ default=ep['md5sum_file'],
+ help='set crypto hash file (default: %(default)s)')
+ parser.add_argument(
+ '--updatefile',
+ dest='update_file',
+ default=ep['update_file'],
+ help='set list of package fetch locations (default: %(default)s)')
+ parser.add_argument(
+ '--fallback',
+ action='store',
+ dest='fallback_url',
+ default=ep['fallback_url'],
+ help='set fallback for package retrieval (default: %(default)s)')
+ return parser.parse_args()
+
+
def main():
# 1. Process command-line arguments
- global generate_sums, check_sums, fetch_files, use_wget, dry_run, noisy, show_extras, delete_bad, prefer_fallback
-
- shortopts = 'qe:p:d:gcs:fun:z:wh'
- longopts = [
- 'quiet', 'entityfile=', 'packagepath=', 'patchpath=', 'gensum',
- 'checksum', 'sumfile=', 'fetch', 'updatefile=', 'dryrun',
- 'fallback=', 'wget', 'help', 'show-extras', 'delete-bad',
- 'prefer-fallback'
- ]
- try:
- opts, args = getopt.getopt(sys.argv[1:], shortopts, longopts)
- except getopt.error as msg:
- usage(2, msg)
-
- if opts:
- for opt, arg in opts:
- if opt in ('--help', '-h'):
- usage(0)
- if opt in ('--entityfile', '-e'):
- epaths['entity_file'] = arg
- if opt in ('--packagepath', '-p'):
- epaths['package_path'] = arg
- if opt in ('--patchpath', '-d'):
- epaths['patch_path'] = arg
- if opt in ('--gensum', '-g'):
- if check_sums:
- usage(2, "check-sums and generate-sums are mutually exclusive")
- generate_sums = True
- if opt in ('--checksum', '-c'):
- if generate_sums:
- usage(2, "check-sums and generate-sums are mutually exclusive")
- check_sums = True
- if opt in ('--sumfile', '-s'):
- epaths['md5sum_file'] = arg
- if opt in ('--fetch', '-f'):
- fetch_files = True
- if opt in ('--updatefile', '-u'):
- epaths['update_file'] = arg
- if opt in ('--dryrun', '-n'):
- dry_run = True
- fetch_files = True
- if opt in ('--fallback', '-z'):
- epaths['fallback_url'] = arg
- if opt in ('--wget', '-w'):
- use_wget = True
- if opt in ('--quiet', '-q'):
- noisy = False
- if opt == '--show-extras':
- show_extras = True
- if opt == '--delete-bad':
- delete_bad = True
- if opt == '--prefer-fallback':
- prefer_fallback = True
+ global generate_sums, check_sums, fetch_files, use_wget, dry_run
+ global noisy, show_extras, delete_bad, prefer_fallback, epaths
+ opts = fetch_opts(epaths)
+
+ if opts.quiet:
+ noisy = not opts.quiet
+ if opts.gensum:
+ generate_sums = opts.gensum
+ if opts.checksum:
+ check_sums = opts.checksum
+ if opts.delete:
+ delete_bad = opts.delete
+ if opts.fetch:
+ fetch_files = opts.fetch
+ if opts.dryrun:
+ dry_run = opts.dryrun
+ fetch_files = True
+ if opts.extras:
+ show_extras = opts.extras
+ if opts.wget:
+ use_wget = opts.wget
+ if opts.usefallback:
+ prefer_fallback = opts.usefallback
+
+ if opts.entity_file:
+ epaths['entity_file'] = opts.entity_file
+ if opts.package_path:
+ epaths['package_path'] = opts.package_path
+ if opts.patch_path:
+ epaths['patch_path'] = opts.patch_path
+ if opts.md5sum_file:
+ epaths['md5sum_file'] = opts.md5sum_file
+ if opts.update_file:
+ epaths['update_file'] = opts.update_file
+ if opts.fallback_url:
+ epaths['fallback_url'] = opts.fallback_url
# 2. Check directories are okay up front
# also saves time to make sure the checksum file is there
@@ -601,29 +686,25 @@
# 3. Parse the entity file and see if those entities exist
pkglist, patchlist = parse_entities()
- found_packages, missing_packages = check_missing(epaths['package_path'],
- pkglist)
- found_patches, missing_patches = check_missing(epaths['patch_path'], patchlist)
+ pkg_found, pkg_missing = check_missing(epaths['package_path'], pkglist)
+ pch_found, pch_missing = check_missing(epaths['patch_path'], patchlist)
# 4. Scan the package and patch directories for extra files
if epaths['package_path'] == epaths['patch_path']:
- extras = check_extra(epaths['package_path'],
- found_packages + found_patches)
+ extras = check_extra(epaths['package_path'], pkg_found + pch_found)
else: # packages and patches in separate directories
- extras = check_extra(epaths['package_path'], found_packages)
- extras += check_extra(epaths['patch_path'], found_patches)
+ extras = check_extra(epaths['package_path'], pkg_found)
+ extras += check_extra(epaths['patch_path'], pch_found)
# 5. check checksums, if requested
# Whether doing sums or not, generate a report of the work to date
# so do that work first - it makes the ordering a little more sane
-
- exitcode = report(found_packages, found_patches, missing_packages,
- missing_patches, extras)
+ exitcode = report(pkg_found, pch_found, pkg_missing, pch_missing, extras)
if check_sums:
checksums = readhash()
- bad_packages, no_packages = check_checksums(found_packages, checksums)
- bad_patches, no_patches = check_checksums(found_patches, checksums)
+ bad_packages, no_packages = check_checksums(pkg_found, checksums)
+ bad_patches, no_patches = check_checksums(pch_found, checksums)
exitcode += sum_report(bad_packages, bad_patches, no_packages, no_patches)
# 6. Go fetch missing files if requested, and do another report
@@ -631,8 +712,8 @@
fails = []
missing = []
if fetch_files:
- if missing_packages:
- fetchcount, fails, missing = retrieve_packages(missing_packages)
+ if pkg_missing:
+ fetchcount, fails, missing = retrieve_packages(pkg_missing)
# if we checked checksums, there may also be pkgs with bad cksums:
if check_sums and bad_packages:
@@ -659,7 +740,7 @@
# 9. Generate a new checksum file, if requested (only if nothing fatal
# happened above)
if generate_sums:
- writehash(found_packages + found_patches)
+ writehash(found_packages + pch_found)
sys.exit(exitcode)
More information about the lsb-messages
mailing list