#!/usr/bin/python2
#
# makeupdates - Generate an updates.img containing changes since the last
# tag, but only changes to the main anaconda runtime.
# initrd/stage1 updates have to be created separately.
#
# Copyright (C) 2009 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see .
#
# Author: David Cantrell
import os
import shutil
import subprocess
import sys
import re
import glob
import urllib
import threading
import multiprocessing
import argparse
import tempfile
import fnmatch
from collections import namedtuple
try:
from rpmUtils import miscutils # available from the yum-utils package
except ImportError:
print("You need to install the yum-utils package to run makeupdates.")
exit(1)
RPM_FOLDER_NAME = os.path.expanduser("~/.anaconda_updates_rpm_cache")
RPM_RELEASE_DIR_TEMPLATE = "for_%s"
KOJI_BASE_URL = "http://kojipkgs.fedoraproject.org//packages/" \
"%(toplevel_name)s/%(toplevel_version)s/%(release)s/%(arch)s/%(rpm_name)s"
VERSION_EQUAL = "="
VERSION_MORE_OR_EQUAL = ">="
VERSION_LESS_OR_EQUAL = "<="
VERSION_OP_MAP = {
"=": VERSION_EQUAL,
">=": VERSION_MORE_OR_EQUAL,
"<=": VERSION_LESS_OR_EQUAL
}
def getArchiveTag(configure, spec):
tag = ""
with open(configure, "r") as f:
for line in f:
if line.startswith('AC_INIT('):
fields = line.split('[')
tag += fields[1].split(']')[0] + '-' + fields[2].split(']')[0]
break
else:
continue
with open(spec, "r") as f:
for line in f:
if line.startswith('Release:'):
tag += '-' + line.split()[1].split('%')[0]
else:
continue
return tag
def getArchiveTagOffset(configure, spec, offset):
tag = getArchiveTag(configure, spec)
if not tag.count("-") >= 2:
return tag
ldash = tag.rfind("-")
bldash = tag[:ldash].rfind("-")
ver = tag[bldash+1:ldash]
if not ver.count(".") >= 1:
return tag
ver = ver[:ver.rfind(".")]
if not len(ver) > 0:
return tag
globstr = "refs/tags/" + tag[:bldash+1] + ver + ".*"
proc = subprocess.Popen(['git', 'for-each-ref', '--sort=taggerdate',
'--format=%(tag)', globstr],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()
lines = proc[0].strip("\n").split('\n')
lines.reverse()
try:
return lines[offset]
except IndexError:
return tag
def get_anaconda_version():
"""Get current anaconda version as string from the configure script"""
with open("configure.ac") as f:
match = re.search(r"AC_INIT\(\[.*\],\ \[(.*)\],\ \[.*\]\)", f.read())
return match.groups()[0]
def get_fedora_version():
"""Return integer representing current Fedora number,
based on Anaconda version"""
anaconda_version = get_anaconda_version()
return int(anaconda_version.split(".")[0])
def get_pkg_tuple(filename):
"""Split package filename to name, version, release, epoch, arch
:param filename: RPM package filename
:type filename: string
:returns: package metadata tuple
:rtype: tuple
"""
name, version, release, epoch, arch = miscutils.splitFilename(filename)
return (name, arch, epoch, version, release)
def get_req_tuple(pkg_tuple, version_request):
"""Return package version requirements tuple
:param pkg_tuple: package metadata tuple
:type pkg_tuple: tuple
:param version_request: version request constant or None
:returns: version request tuple
:rtype: tuple
"""
name, _arch, epoch, version, release = pkg_tuple
return (name, version_request, (epoch, version, release))
def check_package_version(filename, package, check_release_id=True):
"""Check if package described by filename complies with the required
version and the version request operator
:param filename: the package filename to check
:type version: string
:param package: specification of the required package
:type: named tuple
:returns: True if filename satisfies package version request,
False otherwise
:rtype: bool
"""
# drop all other path components than the filename
# (if present)
filename = os.path.basename(filename)
# split the name into components
pkg_tuple = get_pkg_tuple(filename)
if check_release_id:
# get release ids for request and the package
# and strip it from any build/git garbage
request_release = package.req_tuple[2][2].rsplit(".", 1).pop()
package_release = pkg_tuple[4].rsplit(".", 1).pop()
# rangeCheck actually ignores different release ids,
# so we need to do it here
if request_release != package_release:
return False
return bool(miscutils.rangeCheck(package.req_tuple, pkg_tuple))
def doGitDiff(tag, args=None):
if args is None:
args=[]
cmd = ['git', 'diff', '--name-status', tag] + args
proc = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output = proc.communicate()
if proc.returncode:
raise RuntimeError("Error running %s: %s" % (" ".join(cmd), output[1]))
lines = output[0].split('\n')
return lines
def doGitContentDiff(tag, args=None):
if args is None:
args = []
cmd = ['git', 'diff', tag] + args
proc = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output = proc.communicate()
if proc.returncode:
raise RuntimeError("Error running %s: %s" % (" ".join(cmd), output[1]))
lines = output[0].split('\n')
return lines
def download_to_file(url, path):
"""Download a file to the given path,
return the storage path if successful,
or None if the download fails for some reason
"""
try:
# try to make sure the folder for the download exists
download_folder = os.path.split(path)[0]
if not os.access(download_folder, os.W_OK):
os.makedirs(download_folder)
result = urllib.urlretrieve(url, path)
# return the storage path
return result[0]
except IOError as e:
print("download of %s to %s failed with exception: %s" % (url, path, e))
return None
def create_RPM_cache_folder():
"""Create RPM package cache folder if it does not already exist"""
if not os.path.exists(RPM_FOLDER_NAME):
os.makedirs(RPM_FOLDER_NAME)
def copyUpdatedFiles(tag, updates, cwd, builddir):
def install_to_dir(fname, relpath):
sys.stdout.write("Including %s\n" % fname)
outdir = os.path.join(updates, relpath)
if not os.path.isdir(outdir):
os.makedirs(outdir)
shutil.copy2(fname, outdir)
def install_gschema():
# Run make install to a temp directory and pull the compiled file out
# of it
tmpdir = tempfile.mkdtemp()
try:
os.system('make -C %s/data/window-manager/config install DESTDIR=%s' %
(builddir,tmpdir))
# Find the .compiled file
for root, _dirs, files in os.walk(tmpdir):
for f in files:
if f.endswith('.compiled'):
install_to_dir(os.path.join(root, f),
'usr/share/anaconda/window-manager/glib-2.0/schemas')
finally:
shutil.rmtree(tmpdir)
# Updates get overlaid onto the runtime filesystem. Anaconda expects them
# to be in /run/install/updates, so put them in
# $updatedir/run/install/updates.
tmpupdates = updates.rstrip('/')
if not tmpupdates.endswith("/run/install/updates"):
tmpupdates = os.path.join(tmpupdates, "run/install/updates")
try:
lines = doGitDiff(tag)
except RuntimeError as e:
print("ERROR: %s" % e)
return
for line in lines:
fields = line.split()
if len(fields) < 2:
continue
status = fields[0]
gitfile = fields[1]
# R is followed by a number that doesn't matter to us.
if status == "D" or status[0] == "R":
if gitfile.startswith('pyanaconda/') and gitfile.endswith(".py"):
# deleted python module, write out a stub raising RemovedModuleError
file_path = os.path.join(tmpupdates, gitfile)
if not os.path.exists(os.path.dirname(file_path)):
os.makedirs(os.path.dirname(file_path))
with open(file_path, "w") as fobj:
fobj.write('from pyanaconda.errors import RemovedModuleError\n')
fobj.write('raise RemovedModuleError("This module no longer exists!")\n')
if status == "D":
continue
elif status[0] == "R":
gitfile = fields[2]
if gitfile.endswith('.spec.in') or (gitfile.find('Makefile') != -1) or \
gitfile.endswith('.c') or gitfile.endswith('.h') or \
gitfile.endswith('.sh') or gitfile == 'configure.ac':
continue
if gitfile.endswith('.glade'):
# Some UI files should go under ui/ where dir is the
# directory above the file.glade
dir_parts = os.path.dirname(gitfile).split(os.path.sep)
g_idx = dir_parts.index("gui")
uidir = os.path.sep.join(dir_parts[g_idx+1:])
path_comps = [tmpupdates, "ui"]
if uidir:
path_comps.append(uidir)
install_to_dir(gitfile, os.path.join(*path_comps))
elif gitfile.startswith('pyanaconda/'):
# pyanaconda stuff goes into /tmp/updates/[path]
dirname = os.path.join(tmpupdates, os.path.dirname(gitfile))
install_to_dir(gitfile, dirname)
elif gitfile == 'anaconda':
# anaconda itself we just overwrite
install_to_dir(gitfile, "usr/sbin")
elif gitfile.endswith('.service') or gitfile.endswith(".target"):
# same for systemd services
install_to_dir(gitfile, "usr/lib/systemd/system")
elif gitfile.endswith('/anaconda-generator'):
# yeah, this should probably be more clever..
install_to_dir(gitfile, "usr/lib/systemd/system-generators")
elif gitfile == "data/tmux.conf":
install_to_dir(gitfile, "usr/share/anaconda")
elif gitfile == "data/anaconda-gtk.css":
install_to_dir(gitfile, "run/install/updates")
elif gitfile == "data/interactive-defaults.ks":
install_to_dir(gitfile, "usr/share/anaconda")
elif gitfile == "data/anaconda_options.txt":
install_to_dir(gitfile, "usr/share/anaconda")
elif gitfile == "data/liveinst/liveinst":
install_to_dir(gitfile, "usr/sbin")
elif gitfile.startswith("data/pixmaps"):
install_to_dir(gitfile, "usr/share/anaconda/pixmaps")
elif gitfile.startswith("widgets/data/pixmaps"):
install_to_dir(gitfile, "usr/share/anaconda/pixmaps")
elif gitfile.startswith("data/ui/"):
install_to_dir(gitfile, "usr/share/anaconda/ui")
elif gitfile.startswith("data/window-manager/config"):
install_gschema()
elif gitfile.startswith("data/window-manager/theme"):
install_to_dir(gitfile, "usr/share/themes/Anaconda/metacity-1")
elif gitfile.startswith("data/post-scripts/"):
install_to_dir(gitfile, "usr/share/anaconda/post-scripts")
elif any(gitfile.endswith(libexec_script) for libexec_script in \
("anaconda-yum", "zramswapon", "zramswapoff", "zram-stats")):
install_to_dir(gitfile, "usr/libexec/anaconda")
elif gitfile.endswith("AnacondaWidgets.py"):
import gi
install_to_dir(gitfile, gi._overridesdir[1:])
elif gitfile.find('/') != -1:
fields = gitfile.split('/')
subdir = fields[0]
if subdir in ['po', 'scripts','command-stubs', 'tests',
'docs', 'fonts', 'utils',
'liveinst', 'dracut', 'data']:
continue
else:
sys.stdout.write("Including %s\n" % (gitfile,))
install_to_dir(gitfile, tmpupdates)
else:
sys.stdout.write("Including %s\n" % (gitfile,))
install_to_dir(gitfile, tmpupdates)
def _compilableChanged(tag, compilable):
try:
lines = doGitDiff(tag, [compilable])
except RuntimeError as e:
print("ERROR: %s" % e)
return
for line in lines:
fields = line.split()
if len(fields) < 2:
continue
status = fields[0]
gitfile = fields[1]
if status == "D":
continue
if gitfile.startswith('Makefile') or gitfile.endswith('.h') or \
gitfile.endswith('.c') or gitfile.endswith('.py'):
return True
return False
def isysChanged(tag):
return _compilableChanged(tag, 'pyanaconda/isys')
def widgetsChanged(tag):
return _compilableChanged(tag, 'widgets')
def auditdChanged(tag):
return _compilableChanged(tag, 'pyanaconda/isys/auditd.c') or \
_compilableChanged(tag, 'pyanaconda/isys/auditd.h')
def checkAutotools(srcdir, builddir):
# Assumes that cwd is srcdir
if not os.path.isfile(os.path.join(builddir, 'Makefile')):
if not os.path.isfile('configure'):
os.system('./autogen.sh')
os.chdir(builddir)
os.system(os.path.join(srcdir, 'configure') + ' --prefix=`rpm --eval %_prefix` --enable-gtk-doc --enable-introspection')
os.chdir(srcdir)
def copyUpdatedIsys(updates, srcdir, builddir):
os.chdir(srcdir)
print("copyUpdatedIsys BUILDDIR %s" % builddir)
checkAutotools(srcdir, builddir)
os.system('make -C %s -j %d' % (builddir, multiprocessing.cpu_count()))
# Updates get overlaid onto the runtime filesystem. Anaconda expects them
# to be in /run/install/updates, so put them in
# $updatedir/run/install/updates.
tmpupdates = updates.rstrip('/')
if not tmpupdates.endswith("/run/install/updates/pyanaconda"):
tmpupdates = os.path.join(tmpupdates, "run/install/updates/pyanaconda")
if not os.path.isdir(tmpupdates):
os.makedirs(tmpupdates)
isysmodule = os.path.realpath(os.path.join(builddir,'pyanaconda/isys/.libs/_isys.so'))
if os.path.isfile(isysmodule):
shutil.copy2(isysmodule, tmpupdates)
def copyUpdatedAuditd(updates, srcdir, builddir):
os.chdir(srcdir)
print("copyUpdatedIsys BUILLDIR %s" % builddir)
auditdir = updates + '/usr/sbin'
checkAutotools(srcdir, builddir)
os.system('make -C %s -j %d auditd' % (builddir + '/pyanaconda/isys', multiprocessing.cpu_count()))
# Copy the auditd binary to /usr/sbin
if not os.path.isdir(auditdir):
os.makedirs(auditdir)
auditd = builddir + '/pyanaconda/isys/auditd'
if os.path.isfile(auditd):
shutil.copy2(auditd, auditdir)
def copyUpdatedWidgets(updates, srcdir, builddir):
os.chdir(srcdir)
if os.path.isdir("/usr/lib64"):
libdir = "/usr/lib64/"
else:
libdir = "/usr/lib/"
if not os.path.isdir(updates + libdir):
os.makedirs(updates + libdir)
if not os.path.isdir(updates + libdir + "girepository-1.0"):
os.makedirs(updates + libdir + "girepository-1.0")
checkAutotools(srcdir, builddir)
os.system('make -C %s' % builddir)
libglob = os.path.normpath(builddir + "/widgets/src/.libs") + "/libAnacondaWidgets.so*"
for path in glob.glob(libglob):
if os.path.islink(path) and not os.path.exists(updates + libdir + os.path.basename(path)):
os.symlink(os.readlink(path), updates + libdir + os.path.basename(path))
elif os.path.isfile(path):
shutil.copy2(path, updates + libdir)
typeglob = os.path.realpath(builddir + "/widgets/src") + "/AnacondaWidgets-*.typelib"
for typelib in glob.glob(typeglob):
if os.path.isfile(typelib):
shutil.copy2(typelib, updates + libdir + "girepository-1.0")
def copyTranslations(updates, srcdir, builddir):
localedir = "/usr/share/locale/"
# Ensure all the message files are up to date
if os.system('make -C %s/po' % builddir) != 0:
sys.exit(1)
# From here gettext puts everything in $srcdir
# For each language in LINGUAS, install srcdir/.gmo as
# /usr/share/locale/$language/LC_MESSAGES/anaconda.mo
with open(srcdir + '/po/LINGUAS') as linguas:
for line in linguas.readlines():
if line.startswith('#'):
continue
for lang in line.strip().split(" "):
if not os.path.isdir(updates + localedir + lang + "/LC_MESSAGES"):
os.makedirs(updates + localedir + lang + "/LC_MESSAGES")
shutil.copy2(srcdir + "/po/" + lang + ".gmo",
updates + localedir + lang + "/LC_MESSAGES/anaconda.mo")
def addRpms(updates_path, add_rpms):
"""Add content one or more RPM packages to the updates image
:param updates_path: path to the updates image folder
:type updates_path: string
:param add_rpms: list of paths to RPM files
:type add_rpms: list of strings
"""
# convert all the RPM paths to absolute paths, so that
# relative paths can be used with -a/--add
add_rpms = map(os.path.abspath, add_rpms)
# resolve wildcards and also eliminate non-existing RPMs
resolved_rpms = []
for rpm in add_rpms:
resolved_path = glob.glob(rpm)
if not(resolved_path):
print("warning: requested rpm %s does not exist and can't be aded" % rpm)
elif len(resolved_path) > 1:
print("wildcard %s resolved to %d paths" % (rpm, len(resolved_path)))
resolved_rpms.extend(resolved_path)
for rpm in resolved_rpms:
cmd = "cd %s && rpm2cpio %s | cpio -dium" % (updates_path, rpm)
sys.stdout.write(cmd+"\n")
os.system(cmd)
def createUpdatesImage(cwd, updates):
os.chdir(updates)
os.system("find . | cpio -c -o | pigz -9cv > %s/updates.img" % (cwd,))
sys.stdout.write("updates.img ready\n")
def check_for_new_packages(tag, arch, args, specfile_path):
"""Download any new packages added to Requires and Defines
since the given tag, return list of RPM paths
"""
new_packages = {}
version_vars = {}
all_used_version_vars = {}
fedora_number = get_fedora_version()
release_id = "fc%s" % fedora_number
Package = namedtuple("Package", "name version version_request req_tuple")
try:
diff = doGitContentDiff(tag, ["anaconda.spec.in"])
except RuntimeError as e:
print("ERROR: %s" % e)
return
new_requires = filter(lambda x: x.startswith("+Requires:"), diff)
new_defines = filter(lambda x: x.startswith("+%define"), diff)
with open(specfile_path) as f:
spec_content = f.readlines()
all_defines = filter(lambda x: x.startswith("%define"), spec_content)
all_requires = filter(lambda x: x.startswith("Requires:"), spec_content)
# parse all defines, to get the version variables
for define in all_defines:
# second word & split the "ver" suffix
package = define.split()[1][:-3]
version = define.split()[2]
version_vars[package] = version
# parse all Requires and store lines referencing
# version variables
# ex.: Requires: langtable-data >= %{langtablever}
# will be stored as:
# langtable : [(langtable-data, VERSION_MORE_OR_EQUAL)]
for require in all_requires:
parts = require.split()
# we are interest only in Requires lines using
# version variables
if len(parts) >= 4 and parts[3].startswith('%'):
package_name = parts[1]
version_request = VERSION_OP_MAP.get(parts[2])
# drop the %{ prefix and ver} suffix
version_var = parts[3][2:-4]
# store (package_name, version_request) tuples for the given
# version variable
# single version variable might be used to set version of multiple
# package, see langtable for an example of such usage
if version_var in all_used_version_vars:
all_used_version_vars[version_var].append((package_name, version_request))
else:
all_used_version_vars[version_var] = [(package_name, version_request)]
# parse all new defines
for define in new_defines:
# second word & split the "ver" suffix
parts = define.split()
version_var = parts[1][:-3]
version = parts[2]
# if there are any packages in Requires using the version variable
# corresponding to the current %define, add a new package request
packages_using_this_define = all_used_version_vars.get(version_var, [])
# multiple requests might be using a single version variable
for package_name, version_request in packages_using_this_define:
if not version.count("-"):
version = "%s-1" % version
pkg_name = "%s-%s.%s.%s.rpm" % (package_name, version,
release_id, arch)
pkg_tuple = get_pkg_tuple(pkg_name)
req_tuple = get_req_tuple(pkg_tuple, version_request)
new_packages[package_name] = Package(package_name, version,
version_request, req_tuple)
# then parse requires and substitute version variables where needed
for req in new_requires:
parts = req.split()
if len(parts) < 2:
# must contain at least "+Requires:" and "some_package"
continue
package_name = parts[1]
# skip packages that were already added from new %defines
if package_name in new_packages:
continue
version_request = None
if len(parts) > 2:
# get the version request operator
version_operator = parts[2]
# at the moment only = (considered the default),
# >= and <= are supported
version_request = VERSION_OP_MAP.get(version_operator)
version = parts.pop()
else:
version = ""
# skip requires of our own packages
if version == "%{version}-%{release}":
continue
# handle version variables (%{package-namever})
if version.startswith("%"):
# drop the %{ prefix and ver} suffix
version_var = version[2:-4]
# resolve the variable to package version
try:
version = version_vars[version_var]
except KeyError:
# if there if this version variable is missing in version_vars,
# there must be a missing define in the specfile
print("%%define missing for %s in the Anaconda specfile" % version)
exit(1)
# create metadata tuple for version range checking
if version:
# check if version contains a build number
# and add a fake one if it doesn't, as the
# newest package will be fetched from Koji anyway
if not version.count("-"):
version = "%s-1" % version
pkg_name = "%s-%s.%s.%s.rpm" % (package_name, version, release_id, arch)
pkg_tuple = get_pkg_tuple(pkg_name)
else:
pkg_tuple = (package_name, arch, '', '', '')
req_tuple = get_req_tuple(pkg_tuple, version_request)
new_packages[package_name] = Package(package_name, version,
version_request, req_tuple)
# report about new package requests
if new_packages:
print("%d new packages found in Requires or updated %%defines for Requires:" %
len(new_packages))
for p in new_packages.values():
if p.version_request:
print("%s %s %s" % (p.name, p.version_request, p.version))
else:
print(p.name)
# remove ignored packages
ignored_count = 0
for ignored_package in args.ignored_packages:
matches = fnmatch.filter(new_packages, ignored_package)
# the ignored package specifications support glob
for match in matches:
print("the new package %s matches %s and will be ignored" % (match, ignored_package))
del new_packages[match]
ignored_count += 1
if ignored_count:
print("%d new packages have been ignored" % ignored_count)
else:
print("no new Requires or updated %%defines for Requires found")
return []
# make sure the RPM cache folder exists
create_RPM_cache_folder()
# get package names for RPMs added by the -a/--add flags
added_names = {}
for path in args.add_rpms:
try:
basename = os.path.basename(path)
name = get_pkg_tuple(basename)[0]
added_names[name] = basename
except ValueError:
print("malformed RPM name ? : %s" % path)
# remove available packages from the list
new_packages, include_rpms = remove_local_packages(new_packages, arch,
release_id, added_names)
# if some packages are not locally available, download them from Koji
if new_packages:
include_rpms.extend(get_RPMs_from_koji(new_packages, fedora_number, arch))
# return absolute paths for the packages
return map(os.path.abspath, include_rpms)
def remove_local_packages(packages, arch, release_id, added_rpms):
"""Remove locally available RPMs from the list of needed packages,
return locally unavailable packages and paths to relevant locally
available RPMs for inclusion"""
current_release_dir = RPM_RELEASE_DIR_TEMPLATE % release_id
# list all package names and version for the RPMs already in cache
folder_glob = os.path.join(RPM_FOLDER_NAME, "*.rpm")
folder_glob = os.path.abspath(folder_glob)
release_folder_glob = os.path.join(RPM_FOLDER_NAME, current_release_dir, "*.rpm")
release_folder_glob = os.path.abspath(release_folder_glob)
include_rpms = []
skipped_packages = []
# first remove from packages any packages that were provided manually
for package_name in packages.keys():
# check if the package was added by the
# -a/--add option
if package_name in added_rpms:
# the package was added by the -a/--add option,
# remove it from the list so it is not loaded from
# RPM cache and not fetched
# NOTE: the version of the added package is not checked,
# so "added" packages are always used, even if their
# version does not comply with the one given in the specfile
del packages[package_name]
# remember which packages were skipped due to the
# -a/--add option
skipped_packages.append(added_rpms[package_name])
# only check RPMs that are either noarch or built for the
# currently specified architecture
allowed = ("noarch.rpm", "%s.rpm" % arch)
relevant_rpms = [x for x in glob.glob(folder_glob) if x.endswith(allowed)]
# also add any RPMS from the current release folder
# (has RPMs from older releases that were not yet rebuilt
# for the current release)
relevant_rpms.extend(x for x in glob.glob(release_folder_glob)
if x.endswith(allowed))
# iterate over all relevant cached RPMs and check if they are needed
for rpm_path in relevant_rpms:
proc = subprocess.Popen(['rpm', '-qp', '--queryformat',
'%{NAME} %{VERSION} %{RELEASE}', rpm_path],
stdout=subprocess.PIPE,
stderr=None)
proc_output = proc.communicate()
if proc.returncode != 0:
continue
name, version, release = proc_output[0].split()
# get the build number and release id
build_id, package_release_id = release.rsplit(".", 1)
# If a package is stored in the for_
# subfolder, we don't check its release id,
# because it is a package that has not been rebuilt
# for a new release but it still the latest version.
# If a package is not stored in a for_ subfolder,
# we check the release id to filter out old cached packages.
if not os.path.split(rpm_path)[0].endswith(current_release_dir):
if package_release_id != release_id:
continue
# add the build id to the version string
version_build = "%s-%s" % (version, build_id)
# check if the package is needed
if name in packages:
package = packages[name]
package_version = package.version
# handle versions with build number and without it
if not package_version or package_version == version_build or \
package_version == version or \
check_package_version(rpm_path, package):
include_rpms.append(rpm_path)
del packages[name]
# return only those packages that are not locally available
if include_rpms and not packages and not added_rpms:
print("all %d required RPMs found locally:" % len(include_rpms))
elif include_rpms:
print("%d required RPMs found locally:" % len(include_rpms))
else:
print("no required packages found locally")
# print any locally found RPMs
for rpm in include_rpms:
print(os.path.basename(rpm))
# print skipped packages
if skipped_packages:
print('%d required packages found in the manually added RPMs:' % len(skipped_packages))
for item in skipped_packages:
print(item)
return packages, include_rpms
def get_RPMs_from_koji(packages, fedora_number, arch):
"""Get RPM download URLs for given packages and Fedora version,
return URLS and RPM filenames
"""
threads = []
rpm_paths = []
# the print lock is used to make sure only one
# thread is printing to stdout at a time
print_lock = threading.Lock()
index = 1
print("starting %d worker threads" % len(packages))
for _package_name, package in packages.items():
thread = threading.Thread(name=index, target=get_rpm_from_Koji_thread,
args=(package, fedora_number,
arch, rpm_paths, print_lock))
thread.start()
threads.append(thread)
index += 1
# wait for all threads to finish
for thread in threads:
thread.join()
print("%d RPMs have been downloaded" % len(rpm_paths))
# return the list of paths for the downloaded RPMs
return rpm_paths
def get_rpm_from_Koji_thread(package, fedora_number, arch,
rpm_paths, print_lock):
"""Download the given package from Koji and if successful,
append the path to the downloaded file to the rpm_paths list
"""
# just to be sure, create a separate session for each query,
# as the individual lookups will run in different threads
import koji
kojiclient = koji.ClientSession('http://koji.fedoraproject.org/kojihub', {})
version = package.version
if not version:
version = "*"
# check if version contains build number or not
if len(version.split("-")) == 1:
version = "%s-*" % version
# if there is a version-request, just get all package version for the given
# release and filter them afterwards
if package.version_request:
package_glob = "%s-*.fc*.*.rpm" % (package.name)
else:
package_glob = "%s-%s.fc*.*.rpm" % (package.name, version)
# get the current thread, so output can be prefixed by thread number
prefix = "thread %s:" % threading.current_thread().name
with print_lock:
if package.version_request:
print("%s searching for: %s (version %s %s) in Koji" % (
prefix, package_glob, package.version_request, package.version))
else:
print("%s searching for: %s (any version) in Koji" % (prefix, package_glob))
# call the Koji API
results = kojiclient.search(package_glob, "rpm", "glob")
# leave only results that are either noarch
# or are built for the current architecture
allowed = ("noarch.rpm", "%s.rpm" % arch)
results = [x for x in results if x['name'].endswith(allowed)]
# remove results that don't fully match the package name
# Example: searching for glade3 and getting glade3-devel instead is wrong
results = [x for x in results if get_pkg_tuple(x['name'])[0] == package.name]
# if there is a version request (=,>=,<=), remove packages that
# are outside of the specified version range
if package.version_request:
filtered_results = []
for result in results:
# check if the version complies with the version request
if check_package_version(result['name'], package,
check_release_id=False):
filtered_results.append(result)
# replace results with filtered results
results = filtered_results
# the response from Koji has multiple release ids;
# packages that were not updated in the given release might
# have an older release id, but will still be valid for the
# given Fedora release
# therefore we go back from the current release id,
# until we either find a package or run out of release ids
# Example:
# foo-0.1.fc19.x86_64.rpm could be the latest RPM for
# Fedora 19, 20 & 21, if foo was not updated since the 0.1 release
def is_in_release(result, release_number):
pkg_tuple = get_pkg_tuple(result["name"])
# there could be stuff like 16.git20131003.fc20,
# so we spit by all dots and get the last one
release_id = pkg_tuple[4].split(".").pop()
return release_id == "fc%d" % release_number
suitable_results = []
release_number_override = None
for release_number in range(fedora_number, 0, -1):
suitable_results = [x for x in results if is_in_release(x, release_number)]
if suitable_results:
if release_number != fedora_number:
release_number_override = release_number
break
results = suitable_results
if results and release_number_override:
with print_lock:
print("%s %s not found in fc%d, getting package from fc%d" %
(prefix, package.name, fedora_number, release_number_override))
if results: # any packages left ?
# as the newest packages are on the bottom of the
# result list, just pop the last item
newest_package = results.pop()
package_metadata = {}
rpm_name = newest_package['name']
package_metadata['rpm_name'] = rpm_name
with print_lock:
print("%s RPM found: %s" % (prefix, rpm_name))
rpm_id = newest_package['id']
# get info about the RPM to
# get the arch and build_id
result = kojiclient.getRPM(rpm_id)
package_metadata['arch'] = result['arch']
package_metadata['release'] = result['release']
build_id = result['build_id']
# so we can get the toplevel package name and version
result = kojiclient.getBuild(build_id)
package_metadata['toplevel_name'] = result['package_name']
package_metadata['toplevel_version'] = result['version']
# and use the information to build the URL
url = KOJI_BASE_URL % package_metadata
# simple, isn't it ? :)
# build RPM storage path
release_dir = ""
if release_number_override:
# Using package from older release, store it in a sub-folder
# so that it is not downloaded again each time.
release_id = "fc%d" % fedora_number
release_dir = RPM_RELEASE_DIR_TEMPLATE % release_id
# if a package from and older release is used, the release subfolder is
# added to the storage path, otherwise the package is downloaded to the
# main folder
download_path = os.path.join(RPM_FOLDER_NAME, release_dir, rpm_name)
# check if the download was successful
storage_path = download_to_file(url, download_path)
if storage_path is not None:
with print_lock:
print("%s download done: %s" % (prefix, rpm_name))
# add successful downloads to the RPM inclusion list
rpm_paths.append(storage_path)
# GIL should be enough for appending to the list
# from multiple threads
else:
with print_lock:
print("%s download failed: %s @ %s" % (prefix, rpm_name, url))
else:
with print_lock:
if package.version_request:
print("%s %s in version %s %s was not found in Koji" % (
prefix, package.name, package.version_request, package.version))
else:
print("%s %s in any version was not found in Koji" % (prefix, package.name))
class ExtendAction(argparse.Action):
""" A parsing action that extends a list of items instead of appending to
it. Useful where there is an option that can be used multiple times,
and each time the values yielded are a list, and a single list is
desired.
"""
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, getattr(namespace, self.dest, []) + values)
def main():
cwd = os.getcwd()
configure = os.path.realpath(os.path.join(cwd, 'configure.ac'))
spec = os.path.realpath(os.path.join(cwd, 'anaconda.spec.in'))
updates = os.path.join(cwd, 'updates')
parser = argparse.ArgumentParser(description="Make Anaconda updates image")
parser.add_argument('-k', '--keep', action='store_true',
help='do not delete updates subdirectory')
parser.add_argument('-c', '--compile', action='store_true',
help='compile code if there are isys changes')
parser.add_argument('-t', '--tag', action='store', type=str,
help='make updates image from TAG to HEAD')
parser.add_argument('-o', '--offset', action='store', type=int, default=0,
help='make image from (latest_tag - OFFSET) to HEAD')
parser.add_argument('-p', '--po', action='store_true',
help='update translations')
parser.add_argument('-a', '--add', action=ExtendAction, type=str, nargs='+',
dest='add_rpms', metavar='PATH_TO_RPM', default=[],
help='add contents of RPMs to the updates image (glob supported)')
parser.add_argument('-f', '--fetch', action='store', type=str, metavar="ARCH",
help='autofetch new dependencies from Koji for ARCH')
parser.add_argument('-i', '--ignore', action=ExtendAction, type=str, metavar="PACKAGE_NAME",
dest="ignored_packages", nargs='+', default=[],
help='ignore this package when autofetching dependencies (glob supported)')
parser.add_argument('-b', '--builddir', action='store', type=str,
metavar='BUILDDIR', help='build directory for shared objects')
args = parser.parse_args()
if not os.path.isfile(configure) and not os.path.isfile(spec):
sys.stderr.write("You must be at the top level of the anaconda source tree.\n")
sys.exit(1)
if not args.tag:
# add a fake tag to the arguments to be consistent
if args.offset < 1:
args.tag = getArchiveTag(configure, spec)
else:
args.tag = getArchiveTagOffset(configure, spec, args.offset)
sys.stdout.write("Using tag: %s\n" % args.tag)
if args.builddir:
if os.path.isabs(args.builddir):
builddir = args.builddir
else:
builddir = os.path.join(cwd, args.builddir)
else:
builddir = cwd
print("BUILDDIR %s" % builddir)
if not os.path.isdir(updates):
os.makedirs(updates)
copyUpdatedFiles(args.tag, updates, cwd, builddir)
if args.compile:
if isysChanged(args.tag):
copyUpdatedIsys(updates, cwd, builddir)
if widgetsChanged(args.tag):
copyUpdatedWidgets(updates, cwd, builddir)
if auditdChanged(args.tag):
copyUpdatedAuditd(updates, cwd, builddir)
if args.po:
copyTranslations(updates, cwd, builddir)
if args.add_rpms:
args.add_rpms = list(set(args.add_rpms))
print('%d RPMs added manually:' % len(args.add_rpms))
for item in args.add_rpms:
print(os.path.basename(item))
if args.fetch:
arch = args.fetch
rpm_paths = check_for_new_packages(args.tag, arch, args, spec)
args.add_rpms.extend(rpm_paths)
if args.add_rpms:
addRpms(updates, args.add_rpms)
createUpdatesImage(cwd, updates)
if not args.keep:
shutil.rmtree(updates)
if __name__ == "__main__":
main()