2015-05-30 11:20:59 +00:00
|
|
|
#!/usr/bin/python2
|
2011-01-18 09:24:57 +00:00
|
|
|
#
|
|
|
|
# makeupdates - Generate an updates.img containing changes since the last
|
2013-01-23 17:28:19 +00:00
|
|
|
# tag, but only changes to the main anaconda runtime.
|
|
|
|
# initrd/stage1 updates have to be created separately.
|
2011-01-18 09:24:57 +00:00
|
|
|
#
|
|
|
|
# Copyright (C) 2009 Red Hat, Inc.
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Lesser General Public License as published
|
|
|
|
# by the Free Software Foundation; either version 2.1 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Lesser General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Lesser General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
#
|
|
|
|
# Author: David Cantrell <dcantrell@redhat.com>
|
|
|
|
|
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import subprocess
|
|
|
|
import sys
|
2014-04-07 12:38:09 +00:00
|
|
|
import re
|
|
|
|
import glob
|
|
|
|
import urllib
|
|
|
|
import threading
|
|
|
|
import multiprocessing
|
|
|
|
import argparse
|
2015-05-30 11:20:59 +00:00
|
|
|
import tempfile
|
|
|
|
import fnmatch
|
2014-04-07 12:38:09 +00:00
|
|
|
from collections import namedtuple
|
|
|
|
try:
|
|
|
|
from rpmUtils import miscutils # available from the yum-utils package
|
|
|
|
except ImportError:
|
|
|
|
print("You need to install the yum-utils package to run makeupdates.")
|
|
|
|
exit(1)
|
|
|
|
|
|
|
|
RPM_FOLDER_NAME = os.path.expanduser("~/.anaconda_updates_rpm_cache")
|
2015-03-23 11:36:12 +00:00
|
|
|
RPM_RELEASE_DIR_TEMPLATE = "for_%s"
|
2014-04-07 12:38:09 +00:00
|
|
|
KOJI_BASE_URL = "http://kojipkgs.fedoraproject.org//packages/" \
|
|
|
|
"%(toplevel_name)s/%(toplevel_version)s/%(release)s/%(arch)s/%(rpm_name)s"
|
|
|
|
|
|
|
|
VERSION_EQUAL = "="
|
|
|
|
VERSION_MORE_OR_EQUAL = ">="
|
|
|
|
VERSION_LESS_OR_EQUAL = "<="
|
|
|
|
|
|
|
|
VERSION_OP_MAP = {
|
|
|
|
"=": VERSION_EQUAL,
|
|
|
|
">=": VERSION_MORE_OR_EQUAL,
|
|
|
|
"<=": VERSION_LESS_OR_EQUAL
|
|
|
|
}
|
|
|
|
|
2011-01-18 09:24:57 +00:00
|
|
|
|
|
|
|
def getArchiveTag(configure, spec):
|
|
|
|
tag = ""
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
with open(configure, "r") as f:
|
|
|
|
for line in f:
|
|
|
|
if line.startswith('AC_INIT('):
|
|
|
|
fields = line.split('[')
|
|
|
|
tag += fields[1].split(']')[0] + '-' + fields[2].split(']')[0]
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
continue
|
2011-01-18 09:24:57 +00:00
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
with open(spec, "r") as f:
|
|
|
|
for line in f:
|
|
|
|
if line.startswith('Release:'):
|
|
|
|
tag += '-' + line.split()[1].split('%')[0]
|
|
|
|
else:
|
|
|
|
continue
|
2011-01-18 09:24:57 +00:00
|
|
|
|
|
|
|
return tag
|
|
|
|
|
|
|
|
def getArchiveTagOffset(configure, spec, offset):
|
|
|
|
tag = getArchiveTag(configure, spec)
|
|
|
|
|
|
|
|
if not tag.count("-") >= 2:
|
|
|
|
return tag
|
|
|
|
ldash = tag.rfind("-")
|
|
|
|
bldash = tag[:ldash].rfind("-")
|
|
|
|
ver = tag[bldash+1:ldash]
|
|
|
|
|
|
|
|
if not ver.count(".") >= 1:
|
|
|
|
return tag
|
|
|
|
ver = ver[:ver.rfind(".")]
|
|
|
|
|
|
|
|
if not len(ver) > 0:
|
|
|
|
return tag
|
|
|
|
globstr = "refs/tags/" + tag[:bldash+1] + ver + ".*"
|
|
|
|
proc = subprocess.Popen(['git', 'for-each-ref', '--sort=taggerdate',
|
|
|
|
'--format=%(tag)', globstr],
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE).communicate()
|
|
|
|
lines = proc[0].strip("\n").split('\n')
|
|
|
|
lines.reverse()
|
|
|
|
|
|
|
|
try:
|
|
|
|
return lines[offset]
|
|
|
|
except IndexError:
|
|
|
|
return tag
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
def get_anaconda_version():
|
|
|
|
"""Get current anaconda version as string from the configure script"""
|
|
|
|
with open("configure.ac") as f:
|
|
|
|
match = re.search(r"AC_INIT\(\[.*\],\ \[(.*)\],\ \[.*\]\)", f.read())
|
|
|
|
return match.groups()[0]
|
|
|
|
|
|
|
|
def get_fedora_version():
|
|
|
|
"""Return integer representing current Fedora number,
|
|
|
|
based on Anaconda version"""
|
|
|
|
anaconda_version = get_anaconda_version()
|
|
|
|
return int(anaconda_version.split(".")[0])
|
|
|
|
|
|
|
|
def get_pkg_tuple(filename):
|
|
|
|
"""Split package filename to name, version, release, epoch, arch
|
|
|
|
|
|
|
|
:param filename: RPM package filename
|
|
|
|
:type filename: string
|
|
|
|
:returns: package metadata tuple
|
|
|
|
:rtype: tuple
|
|
|
|
"""
|
|
|
|
name, version, release, epoch, arch = miscutils.splitFilename(filename)
|
|
|
|
return (name, arch, epoch, version, release)
|
|
|
|
|
|
|
|
def get_req_tuple(pkg_tuple, version_request):
|
|
|
|
"""Return package version requirements tuple
|
|
|
|
|
|
|
|
:param pkg_tuple: package metadata tuple
|
|
|
|
:type pkg_tuple: tuple
|
|
|
|
:param version_request: version request constant or None
|
|
|
|
:returns: version request tuple
|
|
|
|
:rtype: tuple
|
|
|
|
"""
|
|
|
|
name, _arch, epoch, version, release = pkg_tuple
|
|
|
|
return (name, version_request, (epoch, version, release))
|
|
|
|
|
|
|
|
|
2015-03-23 11:36:12 +00:00
|
|
|
def check_package_version(filename, package, check_release_id=True):
|
2014-04-07 12:38:09 +00:00
|
|
|
"""Check if package described by filename complies with the required
|
|
|
|
version and the version request operator
|
|
|
|
|
|
|
|
:param filename: the package filename to check
|
|
|
|
:type version: string
|
|
|
|
:param package: specification of the required package
|
|
|
|
:type: named tuple
|
|
|
|
:returns: True if filename satisfies package version request,
|
|
|
|
False otherwise
|
|
|
|
:rtype: bool
|
|
|
|
"""
|
|
|
|
# drop all other path components than the filename
|
|
|
|
# (if present)
|
|
|
|
filename = os.path.basename(filename)
|
|
|
|
# split the name into components
|
|
|
|
pkg_tuple = get_pkg_tuple(filename)
|
2015-03-23 11:36:12 +00:00
|
|
|
if check_release_id:
|
|
|
|
# get release ids for request and the package
|
|
|
|
# and strip it from any build/git garbage
|
|
|
|
request_release = package.req_tuple[2][2].rsplit(".", 1).pop()
|
|
|
|
package_release = pkg_tuple[4].rsplit(".", 1).pop()
|
|
|
|
# rangeCheck actually ignores different release ids,
|
|
|
|
# so we need to do it here
|
|
|
|
if request_release != package_release:
|
|
|
|
return False
|
|
|
|
return bool(miscutils.rangeCheck(package.req_tuple, pkg_tuple))
|
2014-04-07 12:38:09 +00:00
|
|
|
|
|
|
|
def doGitDiff(tag, args=None):
|
|
|
|
if args is None:
|
|
|
|
args=[]
|
2015-03-23 11:36:12 +00:00
|
|
|
cmd = ['git', 'diff', '--name-status', tag] + args
|
|
|
|
proc = subprocess.Popen(cmd,
|
2011-01-18 09:24:57 +00:00
|
|
|
stdout=subprocess.PIPE,
|
2015-03-23 11:36:12 +00:00
|
|
|
stderr=subprocess.PIPE)
|
|
|
|
output = proc.communicate()
|
|
|
|
|
|
|
|
if proc.returncode:
|
|
|
|
raise RuntimeError("Error running %s: %s" % (" ".join(cmd), output[1]))
|
2011-01-18 09:24:57 +00:00
|
|
|
|
2015-03-23 11:36:12 +00:00
|
|
|
lines = output[0].split('\n')
|
2011-01-18 09:24:57 +00:00
|
|
|
return lines
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
def doGitContentDiff(tag, args=None):
|
|
|
|
if args is None:
|
|
|
|
args = []
|
2015-03-23 11:36:12 +00:00
|
|
|
cmd = ['git', 'diff', tag] + args
|
|
|
|
proc = subprocess.Popen(cmd,
|
2014-04-07 12:38:09 +00:00
|
|
|
stdout=subprocess.PIPE,
|
2015-03-23 11:36:12 +00:00
|
|
|
stderr=subprocess.PIPE)
|
|
|
|
output = proc.communicate()
|
|
|
|
if proc.returncode:
|
|
|
|
raise RuntimeError("Error running %s: %s" % (" ".join(cmd), output[1]))
|
|
|
|
|
|
|
|
lines = output[0].split('\n')
|
2014-04-07 12:38:09 +00:00
|
|
|
return lines
|
2011-01-18 09:24:57 +00:00
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
def download_to_file(url, path):
|
|
|
|
"""Download a file to the given path,
|
|
|
|
return the storage path if successful,
|
|
|
|
or None if the download fails for some reason
|
|
|
|
"""
|
|
|
|
try:
|
2015-03-23 11:36:12 +00:00
|
|
|
# try to make sure the folder for the download exists
|
|
|
|
download_folder = os.path.split(path)[0]
|
|
|
|
if not os.access(download_folder, os.W_OK):
|
|
|
|
os.makedirs(download_folder)
|
2014-04-07 12:38:09 +00:00
|
|
|
result = urllib.urlretrieve(url, path)
|
|
|
|
# return the storage path
|
|
|
|
return result[0]
|
|
|
|
except IOError as e:
|
|
|
|
print("download of %s to %s failed with exception: %s" % (url, path, e))
|
|
|
|
return None
|
|
|
|
|
|
|
|
def create_RPM_cache_folder():
|
|
|
|
"""Create RPM package cache folder if it does not already exist"""
|
|
|
|
if not os.path.exists(RPM_FOLDER_NAME):
|
|
|
|
os.makedirs(RPM_FOLDER_NAME)
|
2011-01-18 09:24:57 +00:00
|
|
|
|
2015-05-30 11:20:59 +00:00
|
|
|
def copyUpdatedFiles(tag, updates, cwd, builddir):
|
2013-01-23 17:28:19 +00:00
|
|
|
def install_to_dir(fname, relpath):
|
|
|
|
sys.stdout.write("Including %s\n" % fname)
|
|
|
|
outdir = os.path.join(updates, relpath)
|
|
|
|
if not os.path.isdir(outdir):
|
|
|
|
os.makedirs(outdir)
|
2014-04-07 12:38:09 +00:00
|
|
|
shutil.copy2(fname, outdir)
|
2011-01-18 09:24:57 +00:00
|
|
|
|
2015-05-30 11:20:59 +00:00
|
|
|
def install_gschema():
|
|
|
|
# Run make install to a temp directory and pull the compiled file out
|
|
|
|
# of it
|
|
|
|
tmpdir = tempfile.mkdtemp()
|
|
|
|
try:
|
|
|
|
os.system('make -C %s/data/window-manager/config install DESTDIR=%s' %
|
|
|
|
(builddir,tmpdir))
|
|
|
|
# Find the .compiled file
|
|
|
|
for root, _dirs, files in os.walk(tmpdir):
|
|
|
|
for f in files:
|
|
|
|
if f.endswith('.compiled'):
|
|
|
|
install_to_dir(os.path.join(root, f),
|
|
|
|
'usr/share/anaconda/window-manager/glib-2.0/schemas')
|
|
|
|
finally:
|
|
|
|
shutil.rmtree(tmpdir)
|
|
|
|
|
|
|
|
|
|
|
|
|
2013-01-23 17:28:19 +00:00
|
|
|
# Updates get overlaid onto the runtime filesystem. Anaconda expects them
|
|
|
|
# to be in /run/install/updates, so put them in
|
|
|
|
# $updatedir/run/install/updates.
|
|
|
|
tmpupdates = updates.rstrip('/')
|
|
|
|
if not tmpupdates.endswith("/run/install/updates"):
|
|
|
|
tmpupdates = os.path.join(tmpupdates, "run/install/updates")
|
|
|
|
|
2015-03-23 11:36:12 +00:00
|
|
|
try:
|
|
|
|
lines = doGitDiff(tag)
|
|
|
|
except RuntimeError as e:
|
|
|
|
print("ERROR: %s" % e)
|
|
|
|
return
|
|
|
|
|
2011-01-18 09:24:57 +00:00
|
|
|
for line in lines:
|
|
|
|
fields = line.split()
|
|
|
|
|
|
|
|
if len(fields) < 2:
|
|
|
|
continue
|
|
|
|
|
|
|
|
status = fields[0]
|
2014-04-07 12:38:09 +00:00
|
|
|
gitfile = fields[1]
|
2011-01-18 09:24:57 +00:00
|
|
|
|
2015-03-23 11:36:12 +00:00
|
|
|
# R is followed by a number that doesn't matter to us.
|
|
|
|
if status == "D" or status[0] == "R":
|
|
|
|
if gitfile.startswith('pyanaconda/') and gitfile.endswith(".py"):
|
2014-04-07 12:38:09 +00:00
|
|
|
# deleted python module, write out a stub raising RemovedModuleError
|
|
|
|
file_path = os.path.join(tmpupdates, gitfile)
|
2015-03-23 11:36:12 +00:00
|
|
|
if not os.path.exists(os.path.dirname(file_path)):
|
|
|
|
os.makedirs(os.path.dirname(file_path))
|
2014-04-07 12:38:09 +00:00
|
|
|
with open(file_path, "w") as fobj:
|
|
|
|
fobj.write('from pyanaconda.errors import RemovedModuleError\n')
|
|
|
|
fobj.write('raise RemovedModuleError("This module no longer exists!")\n')
|
|
|
|
|
2015-03-23 11:36:12 +00:00
|
|
|
if status == "D":
|
|
|
|
continue
|
|
|
|
elif status[0] == "R":
|
|
|
|
gitfile = fields[2]
|
2011-01-18 09:24:57 +00:00
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
if gitfile.endswith('.spec.in') or (gitfile.find('Makefile') != -1) or \
|
|
|
|
gitfile.endswith('.c') or gitfile.endswith('.h') or \
|
|
|
|
gitfile.endswith('.sh') or gitfile == 'configure.ac':
|
2011-01-18 09:24:57 +00:00
|
|
|
continue
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
if gitfile.endswith('.glade'):
|
2013-01-23 17:28:19 +00:00
|
|
|
# Some UI files should go under ui/<dir> where dir is the
|
|
|
|
# directory above the file.glade
|
2014-04-07 12:38:09 +00:00
|
|
|
dir_parts = os.path.dirname(gitfile).split(os.path.sep)
|
2013-01-23 17:28:19 +00:00
|
|
|
g_idx = dir_parts.index("gui")
|
|
|
|
uidir = os.path.sep.join(dir_parts[g_idx+1:])
|
|
|
|
path_comps = [tmpupdates, "ui"]
|
|
|
|
if uidir:
|
|
|
|
path_comps.append(uidir)
|
2014-04-07 12:38:09 +00:00
|
|
|
install_to_dir(gitfile, os.path.join(*path_comps))
|
|
|
|
elif gitfile.startswith('pyanaconda/'):
|
2013-01-23 17:28:19 +00:00
|
|
|
# pyanaconda stuff goes into /tmp/updates/[path]
|
2014-04-07 12:38:09 +00:00
|
|
|
dirname = os.path.join(tmpupdates, os.path.dirname(gitfile))
|
|
|
|
install_to_dir(gitfile, dirname)
|
|
|
|
elif gitfile == 'anaconda':
|
2013-01-23 17:28:19 +00:00
|
|
|
# anaconda itself we just overwrite
|
2014-04-07 12:38:09 +00:00
|
|
|
install_to_dir(gitfile, "usr/sbin")
|
|
|
|
elif gitfile.endswith('.service') or gitfile.endswith(".target"):
|
2013-01-23 17:28:19 +00:00
|
|
|
# same for systemd services
|
2015-03-23 11:36:12 +00:00
|
|
|
install_to_dir(gitfile, "usr/lib/systemd/system")
|
2014-04-07 12:38:09 +00:00
|
|
|
elif gitfile.endswith('/anaconda-generator'):
|
2013-01-23 17:28:19 +00:00
|
|
|
# yeah, this should probably be more clever..
|
2015-03-23 11:36:12 +00:00
|
|
|
install_to_dir(gitfile, "usr/lib/systemd/system-generators")
|
2014-04-07 12:38:09 +00:00
|
|
|
elif gitfile == "data/tmux.conf":
|
|
|
|
install_to_dir(gitfile, "usr/share/anaconda")
|
|
|
|
elif gitfile == "data/anaconda-gtk.css":
|
2015-03-23 11:36:12 +00:00
|
|
|
install_to_dir(gitfile, "run/install/updates")
|
2014-04-07 12:38:09 +00:00
|
|
|
elif gitfile == "data/interactive-defaults.ks":
|
|
|
|
install_to_dir(gitfile, "usr/share/anaconda")
|
2015-03-23 11:36:12 +00:00
|
|
|
elif gitfile == "data/anaconda_options.txt":
|
|
|
|
install_to_dir(gitfile, "usr/share/anaconda")
|
2014-04-07 12:38:09 +00:00
|
|
|
elif gitfile == "data/liveinst/liveinst":
|
|
|
|
install_to_dir(gitfile, "usr/sbin")
|
|
|
|
elif gitfile.startswith("data/pixmaps"):
|
|
|
|
install_to_dir(gitfile, "usr/share/anaconda/pixmaps")
|
2015-03-23 11:36:12 +00:00
|
|
|
elif gitfile.startswith("widgets/data/pixmaps"):
|
|
|
|
install_to_dir(gitfile, "usr/share/anaconda/pixmaps")
|
2014-04-07 12:38:09 +00:00
|
|
|
elif gitfile.startswith("data/ui/"):
|
|
|
|
install_to_dir(gitfile, "usr/share/anaconda/ui")
|
2015-05-30 11:20:59 +00:00
|
|
|
elif gitfile.startswith("data/window-manager/config"):
|
|
|
|
install_gschema()
|
|
|
|
elif gitfile.startswith("data/window-manager/theme"):
|
|
|
|
install_to_dir(gitfile, "usr/share/themes/Anaconda/metacity-1")
|
2014-04-07 12:38:09 +00:00
|
|
|
elif gitfile.startswith("data/post-scripts/"):
|
|
|
|
install_to_dir(gitfile, "usr/share/anaconda/post-scripts")
|
2015-03-23 11:36:12 +00:00
|
|
|
elif any(gitfile.endswith(libexec_script) for libexec_script in \
|
|
|
|
("anaconda-yum", "zramswapon", "zramswapoff", "zram-stats")):
|
2014-04-07 12:38:09 +00:00
|
|
|
install_to_dir(gitfile, "usr/libexec/anaconda")
|
2015-03-23 11:36:12 +00:00
|
|
|
elif gitfile.endswith("AnacondaWidgets.py"):
|
|
|
|
import gi
|
|
|
|
install_to_dir(gitfile, gi._overridesdir[1:])
|
2014-04-07 12:38:09 +00:00
|
|
|
elif gitfile.find('/') != -1:
|
|
|
|
fields = gitfile.split('/')
|
2011-01-18 09:24:57 +00:00
|
|
|
subdir = fields[0]
|
2013-01-23 17:28:19 +00:00
|
|
|
if subdir in ['po', 'scripts','command-stubs', 'tests',
|
|
|
|
'docs', 'fonts', 'utils',
|
|
|
|
'liveinst', 'dracut', 'data']:
|
2011-01-18 09:24:57 +00:00
|
|
|
continue
|
|
|
|
else:
|
2014-04-07 12:38:09 +00:00
|
|
|
sys.stdout.write("Including %s\n" % (gitfile,))
|
|
|
|
install_to_dir(gitfile, tmpupdates)
|
2011-01-18 09:24:57 +00:00
|
|
|
else:
|
2014-04-07 12:38:09 +00:00
|
|
|
sys.stdout.write("Including %s\n" % (gitfile,))
|
|
|
|
install_to_dir(gitfile, tmpupdates)
|
2011-01-18 09:24:57 +00:00
|
|
|
|
2013-01-23 17:28:19 +00:00
|
|
|
def _compilableChanged(tag, compilable):
|
2015-03-23 11:36:12 +00:00
|
|
|
try:
|
|
|
|
lines = doGitDiff(tag, [compilable])
|
|
|
|
except RuntimeError as e:
|
|
|
|
print("ERROR: %s" % e)
|
|
|
|
return
|
2011-01-18 09:24:57 +00:00
|
|
|
|
|
|
|
for line in lines:
|
|
|
|
fields = line.split()
|
|
|
|
|
|
|
|
if len(fields) < 2:
|
|
|
|
continue
|
|
|
|
|
|
|
|
status = fields[0]
|
2014-04-07 12:38:09 +00:00
|
|
|
gitfile = fields[1]
|
2011-01-18 09:24:57 +00:00
|
|
|
|
|
|
|
if status == "D":
|
|
|
|
continue
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
if gitfile.startswith('Makefile') or gitfile.endswith('.h') or \
|
|
|
|
gitfile.endswith('.c') or gitfile.endswith('.py'):
|
2011-01-18 09:24:57 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2013-01-23 17:28:19 +00:00
|
|
|
def isysChanged(tag):
|
|
|
|
return _compilableChanged(tag, 'pyanaconda/isys')
|
|
|
|
|
|
|
|
def widgetsChanged(tag):
|
|
|
|
return _compilableChanged(tag, 'widgets')
|
|
|
|
|
2015-05-30 11:20:59 +00:00
|
|
|
def auditdChanged(tag):
|
|
|
|
return _compilableChanged(tag, 'pyanaconda/isys/auditd.c') or \
|
|
|
|
_compilableChanged(tag, 'pyanaconda/isys/auditd.h')
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
def checkAutotools(srcdir, builddir):
|
|
|
|
# Assumes that cwd is srcdir
|
|
|
|
if not os.path.isfile(os.path.join(builddir, 'Makefile')):
|
2011-01-18 09:24:57 +00:00
|
|
|
if not os.path.isfile('configure'):
|
|
|
|
os.system('./autogen.sh')
|
2014-04-07 12:38:09 +00:00
|
|
|
os.chdir(builddir)
|
|
|
|
os.system(os.path.join(srcdir, 'configure') + ' --prefix=`rpm --eval %_prefix` --enable-gtk-doc --enable-introspection')
|
|
|
|
os.chdir(srcdir)
|
|
|
|
|
|
|
|
def copyUpdatedIsys(updates, srcdir, builddir):
|
|
|
|
os.chdir(srcdir)
|
|
|
|
print("copyUpdatedIsys BUILDDIR %s" % builddir)
|
2011-01-18 09:24:57 +00:00
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
checkAutotools(srcdir, builddir)
|
|
|
|
|
|
|
|
os.system('make -C %s -j %d' % (builddir, multiprocessing.cpu_count()))
|
2011-01-18 09:24:57 +00:00
|
|
|
|
2013-01-23 17:28:19 +00:00
|
|
|
# Updates get overlaid onto the runtime filesystem. Anaconda expects them
|
|
|
|
# to be in /run/install/updates, so put them in
|
|
|
|
# $updatedir/run/install/updates.
|
|
|
|
tmpupdates = updates.rstrip('/')
|
|
|
|
if not tmpupdates.endswith("/run/install/updates/pyanaconda"):
|
|
|
|
tmpupdates = os.path.join(tmpupdates, "run/install/updates/pyanaconda")
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
if not os.path.isdir(tmpupdates):
|
|
|
|
os.makedirs(tmpupdates)
|
|
|
|
|
|
|
|
isysmodule = os.path.realpath(os.path.join(builddir,'pyanaconda/isys/.libs/_isys.so'))
|
|
|
|
|
2011-01-18 09:24:57 +00:00
|
|
|
if os.path.isfile(isysmodule):
|
2013-01-23 17:28:19 +00:00
|
|
|
shutil.copy2(isysmodule, tmpupdates)
|
|
|
|
|
2015-05-30 11:20:59 +00:00
|
|
|
def copyUpdatedAuditd(updates, srcdir, builddir):
|
|
|
|
os.chdir(srcdir)
|
|
|
|
print("copyUpdatedIsys BUILLDIR %s" % builddir)
|
|
|
|
auditdir = updates + '/usr/sbin'
|
|
|
|
|
|
|
|
checkAutotools(srcdir, builddir)
|
|
|
|
|
|
|
|
os.system('make -C %s -j %d auditd' % (builddir + '/pyanaconda/isys', multiprocessing.cpu_count()))
|
|
|
|
|
|
|
|
# Copy the auditd binary to /usr/sbin
|
|
|
|
if not os.path.isdir(auditdir):
|
|
|
|
os.makedirs(auditdir)
|
|
|
|
|
|
|
|
auditd = builddir + '/pyanaconda/isys/auditd'
|
|
|
|
if os.path.isfile(auditd):
|
|
|
|
shutil.copy2(auditd, auditdir)
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
def copyUpdatedWidgets(updates, srcdir, builddir):
|
|
|
|
os.chdir(srcdir)
|
2013-01-23 17:28:19 +00:00
|
|
|
|
2015-03-23 11:36:12 +00:00
|
|
|
if os.path.isdir("/usr/lib64"):
|
|
|
|
libdir = "/usr/lib64/"
|
2013-01-23 17:28:19 +00:00
|
|
|
else:
|
2015-03-23 11:36:12 +00:00
|
|
|
libdir = "/usr/lib/"
|
2013-01-23 17:28:19 +00:00
|
|
|
|
|
|
|
if not os.path.isdir(updates + libdir):
|
|
|
|
os.makedirs(updates + libdir)
|
|
|
|
|
|
|
|
if not os.path.isdir(updates + libdir + "girepository-1.0"):
|
|
|
|
os.makedirs(updates + libdir + "girepository-1.0")
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
checkAutotools(srcdir, builddir)
|
2013-01-23 17:28:19 +00:00
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
os.system('make -C %s' % builddir)
|
2013-01-23 17:28:19 +00:00
|
|
|
|
2015-03-23 11:36:12 +00:00
|
|
|
libglob = os.path.normpath(builddir + "/widgets/src/.libs") + "/libAnacondaWidgets.so*"
|
|
|
|
for path in glob.glob(libglob):
|
2013-01-23 17:28:19 +00:00
|
|
|
if os.path.islink(path) and not os.path.exists(updates + libdir + os.path.basename(path)):
|
|
|
|
os.symlink(os.readlink(path), updates + libdir + os.path.basename(path))
|
|
|
|
elif os.path.isfile(path):
|
|
|
|
shutil.copy2(path, updates + libdir)
|
|
|
|
|
2015-03-23 11:36:12 +00:00
|
|
|
typeglob = os.path.realpath(builddir + "/widgets/src") + "/AnacondaWidgets-*.typelib"
|
|
|
|
for typelib in glob.glob(typeglob):
|
|
|
|
if os.path.isfile(typelib):
|
|
|
|
shutil.copy2(typelib, updates + libdir + "girepository-1.0")
|
|
|
|
|
|
|
|
def copyTranslations(updates, srcdir, builddir):
|
|
|
|
localedir = "/usr/share/locale/"
|
|
|
|
|
|
|
|
# Ensure all the message files are up to date
|
|
|
|
if os.system('make -C %s/po' % builddir) != 0:
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
# From here gettext puts everything in $srcdir
|
|
|
|
# For each language in LINGUAS, install srcdir/<lang>.gmo as
|
|
|
|
# /usr/share/locale/$language/LC_MESSAGES/anaconda.mo
|
|
|
|
with open(srcdir + '/po/LINGUAS') as linguas:
|
|
|
|
for line in linguas.readlines():
|
|
|
|
if line.startswith('#'):
|
|
|
|
continue
|
|
|
|
|
|
|
|
for lang in line.strip().split(" "):
|
|
|
|
if not os.path.isdir(updates + localedir + lang + "/LC_MESSAGES"):
|
|
|
|
os.makedirs(updates + localedir + lang + "/LC_MESSAGES")
|
|
|
|
|
|
|
|
shutil.copy2(srcdir + "/po/" + lang + ".gmo",
|
|
|
|
updates + localedir + lang + "/LC_MESSAGES/anaconda.mo")
|
2013-01-23 17:28:19 +00:00
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
def addRpms(updates_path, add_rpms):
|
|
|
|
"""Add content one or more RPM packages to the updates image
|
|
|
|
|
|
|
|
:param updates_path: path to the updates image folder
|
|
|
|
:type updates_path: string
|
|
|
|
:param add_rpms: list of paths to RPM files
|
|
|
|
:type add_rpms: list of strings
|
|
|
|
"""
|
|
|
|
# convert all the RPM paths to absolute paths, so that
|
|
|
|
# relative paths can be used with -a/--add
|
|
|
|
add_rpms = map(os.path.abspath, add_rpms)
|
|
|
|
|
2015-05-30 11:20:59 +00:00
|
|
|
# resolve wildcards and also eliminate non-existing RPMs
|
|
|
|
resolved_rpms = []
|
2013-01-23 17:28:19 +00:00
|
|
|
for rpm in add_rpms:
|
2015-05-30 11:20:59 +00:00
|
|
|
resolved_path = glob.glob(rpm)
|
|
|
|
if not(resolved_path):
|
|
|
|
print("warning: requested rpm %s does not exist and can't be aded" % rpm)
|
|
|
|
elif len(resolved_path) > 1:
|
|
|
|
print("wildcard %s resolved to %d paths" % (rpm, len(resolved_path)))
|
|
|
|
resolved_rpms.extend(resolved_path)
|
|
|
|
|
|
|
|
for rpm in resolved_rpms:
|
2014-04-07 12:38:09 +00:00
|
|
|
cmd = "cd %s && rpm2cpio %s | cpio -dium" % (updates_path, rpm)
|
2013-01-23 17:28:19 +00:00
|
|
|
sys.stdout.write(cmd+"\n")
|
|
|
|
os.system(cmd)
|
2011-01-18 09:24:57 +00:00
|
|
|
|
|
|
|
def createUpdatesImage(cwd, updates):
|
|
|
|
os.chdir(updates)
|
2014-04-07 12:38:09 +00:00
|
|
|
os.system("find . | cpio -c -o | pigz -9cv > %s/updates.img" % (cwd,))
|
2011-01-18 09:24:57 +00:00
|
|
|
sys.stdout.write("updates.img ready\n")
|
|
|
|
|
2015-05-30 11:20:59 +00:00
|
|
|
def check_for_new_packages(tag, arch, args, specfile_path):
|
2014-04-07 12:38:09 +00:00
|
|
|
"""Download any new packages added to Requires and Defines
|
|
|
|
since the given tag, return list of RPM paths
|
|
|
|
"""
|
|
|
|
new_packages = {}
|
|
|
|
version_vars = {}
|
|
|
|
all_used_version_vars = {}
|
|
|
|
fedora_number = get_fedora_version()
|
|
|
|
release_id = "fc%s" % fedora_number
|
|
|
|
|
|
|
|
Package = namedtuple("Package", "name version version_request req_tuple")
|
|
|
|
|
2015-03-23 11:36:12 +00:00
|
|
|
try:
|
|
|
|
diff = doGitContentDiff(tag, ["anaconda.spec.in"])
|
|
|
|
except RuntimeError as e:
|
|
|
|
print("ERROR: %s" % e)
|
|
|
|
return
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
new_requires = filter(lambda x: x.startswith("+Requires:"), diff)
|
|
|
|
new_defines = filter(lambda x: x.startswith("+%define"), diff)
|
|
|
|
with open(specfile_path) as f:
|
|
|
|
spec_content = f.readlines()
|
|
|
|
all_defines = filter(lambda x: x.startswith("%define"), spec_content)
|
|
|
|
all_requires = filter(lambda x: x.startswith("Requires:"), spec_content)
|
|
|
|
|
|
|
|
# parse all defines, to get the version variables
|
|
|
|
for define in all_defines:
|
|
|
|
# second word & split the "ver" suffix
|
|
|
|
package = define.split()[1][:-3]
|
|
|
|
version = define.split()[2]
|
|
|
|
version_vars[package] = version
|
|
|
|
|
|
|
|
# parse all Requires and store lines referencing
|
|
|
|
# version variables
|
|
|
|
# ex.: Requires: langtable-data >= %{langtablever}
|
|
|
|
# will be stored as:
|
|
|
|
# langtable : [(langtable-data, VERSION_MORE_OR_EQUAL)]
|
|
|
|
|
|
|
|
for require in all_requires:
|
|
|
|
parts = require.split()
|
|
|
|
# we are interest only in Requires lines using
|
|
|
|
# version variables
|
|
|
|
if len(parts) >= 4 and parts[3].startswith('%'):
|
|
|
|
package_name = parts[1]
|
|
|
|
version_request = VERSION_OP_MAP.get(parts[2])
|
|
|
|
# drop the %{ prefix and ver} suffix
|
|
|
|
version_var = parts[3][2:-4]
|
|
|
|
# store (package_name, version_request) tuples for the given
|
|
|
|
# version variable
|
|
|
|
|
|
|
|
# single version variable might be used to set version of multiple
|
|
|
|
# package, see langtable for an example of such usage
|
|
|
|
if version_var in all_used_version_vars:
|
|
|
|
all_used_version_vars[version_var].append((package_name, version_request))
|
|
|
|
else:
|
|
|
|
all_used_version_vars[version_var] = [(package_name, version_request)]
|
|
|
|
|
|
|
|
# parse all new defines
|
|
|
|
for define in new_defines:
|
|
|
|
# second word & split the "ver" suffix
|
|
|
|
parts = define.split()
|
|
|
|
version_var = parts[1][:-3]
|
|
|
|
version = parts[2]
|
|
|
|
# if there are any packages in Requires using the version variable
|
|
|
|
# corresponding to the current %define, add a new package request
|
|
|
|
packages_using_this_define = all_used_version_vars.get(version_var, [])
|
|
|
|
# multiple requests might be using a single version variable
|
|
|
|
for package_name, version_request in packages_using_this_define:
|
|
|
|
if not version.count("-"):
|
|
|
|
version = "%s-1" % version
|
|
|
|
pkg_name = "%s-%s.%s.%s.rpm" % (package_name, version,
|
|
|
|
release_id, arch)
|
|
|
|
pkg_tuple = get_pkg_tuple(pkg_name)
|
|
|
|
req_tuple = get_req_tuple(pkg_tuple, version_request)
|
|
|
|
new_packages[package_name] = Package(package_name, version,
|
|
|
|
version_request, req_tuple)
|
|
|
|
|
|
|
|
# then parse requires and substitute version variables where needed
|
|
|
|
for req in new_requires:
|
|
|
|
parts = req.split()
|
|
|
|
if len(parts) < 2:
|
|
|
|
# must contain at least "+Requires:" and "some_package"
|
|
|
|
continue
|
|
|
|
package_name = parts[1]
|
2011-01-18 09:24:57 +00:00
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
# skip packages that were already added from new %defines
|
|
|
|
if package_name in new_packages:
|
|
|
|
continue
|
|
|
|
|
|
|
|
version_request = None
|
|
|
|
if len(parts) > 2:
|
|
|
|
# get the version request operator
|
|
|
|
version_operator = parts[2]
|
|
|
|
# at the moment only = (considered the default),
|
|
|
|
# >= and <= are supported
|
|
|
|
version_request = VERSION_OP_MAP.get(version_operator)
|
|
|
|
version = parts.pop()
|
2011-01-18 09:24:57 +00:00
|
|
|
else:
|
2014-04-07 12:38:09 +00:00
|
|
|
version = ""
|
|
|
|
|
2015-03-23 11:36:12 +00:00
|
|
|
# skip requires of our own packages
|
|
|
|
if version == "%{version}-%{release}":
|
|
|
|
continue
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
# handle version variables (%{package-namever})
|
|
|
|
if version.startswith("%"):
|
|
|
|
# drop the %{ prefix and ver} suffix
|
|
|
|
version_var = version[2:-4]
|
|
|
|
# resolve the variable to package version
|
|
|
|
try:
|
|
|
|
version = version_vars[version_var]
|
|
|
|
except KeyError:
|
|
|
|
# if there if this version variable is missing in version_vars,
|
|
|
|
# there must be a missing define in the specfile
|
|
|
|
print("%%define missing for %s in the Anaconda specfile" % version)
|
|
|
|
exit(1)
|
|
|
|
# create metadata tuple for version range checking
|
|
|
|
if version:
|
|
|
|
# check if version contains a build number
|
|
|
|
# and add a fake one if it doesn't, as the
|
|
|
|
# newest package will be fetched from Koji anyway
|
|
|
|
if not version.count("-"):
|
|
|
|
version = "%s-1" % version
|
|
|
|
pkg_name = "%s-%s.%s.%s.rpm" % (package_name, version, release_id, arch)
|
|
|
|
pkg_tuple = get_pkg_tuple(pkg_name)
|
|
|
|
else:
|
|
|
|
pkg_tuple = (package_name, arch, '', '', '')
|
|
|
|
req_tuple = get_req_tuple(pkg_tuple, version_request)
|
|
|
|
new_packages[package_name] = Package(package_name, version,
|
|
|
|
version_request, req_tuple)
|
|
|
|
|
|
|
|
# report about new package requests
|
|
|
|
if new_packages:
|
|
|
|
print("%d new packages found in Requires or updated %%defines for Requires:" %
|
|
|
|
len(new_packages))
|
|
|
|
for p in new_packages.values():
|
|
|
|
if p.version_request:
|
|
|
|
print("%s %s %s" % (p.name, p.version_request, p.version))
|
|
|
|
else:
|
|
|
|
print(p.name)
|
2015-05-30 11:20:59 +00:00
|
|
|
|
|
|
|
# remove ignored packages
|
|
|
|
ignored_count = 0
|
|
|
|
for ignored_package in args.ignored_packages:
|
|
|
|
matches = fnmatch.filter(new_packages, ignored_package)
|
|
|
|
# the ignored package specifications support glob
|
|
|
|
for match in matches:
|
|
|
|
print("the new package %s matches %s and will be ignored" % (match, ignored_package))
|
|
|
|
del new_packages[match]
|
|
|
|
ignored_count += 1
|
|
|
|
if ignored_count:
|
|
|
|
print("%d new packages have been ignored" % ignored_count)
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
else:
|
|
|
|
print("no new Requires or updated %%defines for Requires found")
|
|
|
|
return []
|
|
|
|
|
|
|
|
# make sure the RPM cache folder exists
|
|
|
|
create_RPM_cache_folder()
|
|
|
|
|
|
|
|
# get package names for RPMs added by the -a/--add flags
|
|
|
|
added_names = {}
|
2015-05-30 11:20:59 +00:00
|
|
|
for path in args.add_rpms:
|
2014-04-07 12:38:09 +00:00
|
|
|
try:
|
|
|
|
basename = os.path.basename(path)
|
|
|
|
name = get_pkg_tuple(basename)[0]
|
|
|
|
added_names[name] = basename
|
|
|
|
except ValueError:
|
|
|
|
print("malformed RPM name ? : %s" % path)
|
|
|
|
# remove available packages from the list
|
|
|
|
new_packages, include_rpms = remove_local_packages(new_packages, arch,
|
|
|
|
release_id, added_names)
|
|
|
|
# if some packages are not locally available, download them from Koji
|
|
|
|
if new_packages:
|
|
|
|
include_rpms.extend(get_RPMs_from_koji(new_packages, fedora_number, arch))
|
|
|
|
# return absolute paths for the packages
|
|
|
|
return map(os.path.abspath, include_rpms)
|
|
|
|
|
|
|
|
def remove_local_packages(packages, arch, release_id, added_rpms):
|
|
|
|
"""Remove locally available RPMs from the list of needed packages,
|
|
|
|
return locally unavailable packages and paths to relevant locally
|
|
|
|
available RPMs for inclusion"""
|
2015-03-23 11:36:12 +00:00
|
|
|
current_release_dir = RPM_RELEASE_DIR_TEMPLATE % release_id
|
2014-04-07 12:38:09 +00:00
|
|
|
# list all package names and version for the RPMs already in cache
|
|
|
|
folder_glob = os.path.join(RPM_FOLDER_NAME, "*.rpm")
|
|
|
|
folder_glob = os.path.abspath(folder_glob)
|
2015-03-23 11:36:12 +00:00
|
|
|
release_folder_glob = os.path.join(RPM_FOLDER_NAME, current_release_dir, "*.rpm")
|
|
|
|
release_folder_glob = os.path.abspath(release_folder_glob)
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
include_rpms = []
|
|
|
|
skipped_packages = []
|
|
|
|
|
|
|
|
# first remove from packages any packages that were provided manually
|
|
|
|
for package_name in packages.keys():
|
|
|
|
# check if the package was added by the
|
|
|
|
# -a/--add option
|
|
|
|
if package_name in added_rpms:
|
|
|
|
# the package was added by the -a/--add option,
|
|
|
|
# remove it from the list so it is not loaded from
|
|
|
|
# RPM cache and not fetched
|
|
|
|
# NOTE: the version of the added package is not checked,
|
|
|
|
# so "added" packages are always used, even if their
|
|
|
|
# version does not comply with the one given in the specfile
|
|
|
|
del packages[package_name]
|
|
|
|
# remember which packages were skipped due to the
|
|
|
|
# -a/--add option
|
|
|
|
skipped_packages.append(added_rpms[package_name])
|
|
|
|
|
|
|
|
# only check RPMs that are either noarch or built for the
|
|
|
|
# currently specified architecture
|
|
|
|
allowed = ("noarch.rpm", "%s.rpm" % arch)
|
|
|
|
relevant_rpms = [x for x in glob.glob(folder_glob) if x.endswith(allowed)]
|
2015-03-23 11:36:12 +00:00
|
|
|
# also add any RPMS from the current release folder
|
|
|
|
# (has RPMs from older releases that were not yet rebuilt
|
|
|
|
# for the current release)
|
|
|
|
relevant_rpms.extend(x for x in glob.glob(release_folder_glob)
|
|
|
|
if x.endswith(allowed))
|
2014-04-07 12:38:09 +00:00
|
|
|
# iterate over all relevant cached RPMs and check if they are needed
|
|
|
|
for rpm_path in relevant_rpms:
|
|
|
|
proc = subprocess.Popen(['rpm', '-qp', '--queryformat',
|
|
|
|
'%{NAME} %{VERSION} %{RELEASE}', rpm_path],
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=None)
|
|
|
|
proc_output = proc.communicate()
|
|
|
|
if proc.returncode != 0:
|
|
|
|
continue
|
|
|
|
name, version, release = proc_output[0].split()
|
|
|
|
# get the build number and release id
|
2015-03-23 11:36:12 +00:00
|
|
|
build_id, package_release_id = release.rsplit(".", 1)
|
|
|
|
|
|
|
|
# If a package is stored in the for_<release id>
|
|
|
|
# subfolder, we don't check its release id,
|
|
|
|
# because it is a package that has not been rebuilt
|
|
|
|
# for a new release but it still the latest version.
|
|
|
|
# If a package is not stored in a for_<release id> subfolder,
|
|
|
|
# we check the release id to filter out old cached packages.
|
|
|
|
if not os.path.split(rpm_path)[0].endswith(current_release_dir):
|
|
|
|
if package_release_id != release_id:
|
|
|
|
continue
|
2014-04-07 12:38:09 +00:00
|
|
|
# add the build id to the version string
|
|
|
|
version_build = "%s-%s" % (version, build_id)
|
|
|
|
# check if the package is needed
|
|
|
|
if name in packages:
|
|
|
|
package = packages[name]
|
|
|
|
package_version = package.version
|
|
|
|
# handle versions with build number and without it
|
|
|
|
if not package_version or package_version == version_build or \
|
|
|
|
package_version == version or \
|
|
|
|
check_package_version(rpm_path, package):
|
|
|
|
include_rpms.append(rpm_path)
|
|
|
|
del packages[name]
|
|
|
|
|
|
|
|
# return only those packages that are not locally available
|
|
|
|
if include_rpms and not packages and not added_rpms:
|
|
|
|
print("all %d required RPMs found locally:" % len(include_rpms))
|
|
|
|
elif include_rpms:
|
|
|
|
print("%d required RPMs found locally:" % len(include_rpms))
|
|
|
|
else:
|
|
|
|
print("no required packages found locally")
|
|
|
|
|
|
|
|
# print any locally found RPMs
|
|
|
|
for rpm in include_rpms:
|
|
|
|
print(os.path.basename(rpm))
|
|
|
|
|
|
|
|
# print skipped packages
|
|
|
|
if skipped_packages:
|
|
|
|
print('%d required packages found in the manually added RPMs:' % len(skipped_packages))
|
|
|
|
for item in skipped_packages:
|
|
|
|
print(item)
|
|
|
|
|
|
|
|
return packages, include_rpms
|
|
|
|
|
|
|
|
def get_RPMs_from_koji(packages, fedora_number, arch):
|
|
|
|
"""Get RPM download URLs for given packages and Fedora version,
|
|
|
|
return URLS and RPM filenames
|
|
|
|
"""
|
|
|
|
threads = []
|
|
|
|
rpm_paths = []
|
|
|
|
# the print lock is used to make sure only one
|
|
|
|
# thread is printing to stdout at a time
|
|
|
|
print_lock = threading.Lock()
|
|
|
|
|
|
|
|
index = 1
|
|
|
|
print("starting %d worker threads" % len(packages))
|
2015-05-30 11:20:59 +00:00
|
|
|
for _package_name, package in packages.items():
|
2014-04-07 12:38:09 +00:00
|
|
|
thread = threading.Thread(name=index, target=get_rpm_from_Koji_thread,
|
|
|
|
args=(package, fedora_number,
|
|
|
|
arch, rpm_paths, print_lock))
|
|
|
|
thread.start()
|
|
|
|
threads.append(thread)
|
|
|
|
index += 1
|
|
|
|
# wait for all threads to finish
|
|
|
|
for thread in threads:
|
|
|
|
thread.join()
|
|
|
|
|
|
|
|
print("%d RPMs have been downloaded" % len(rpm_paths))
|
|
|
|
|
|
|
|
# return the list of paths for the downloaded RPMs
|
|
|
|
return rpm_paths
|
|
|
|
|
|
|
|
def get_rpm_from_Koji_thread(package, fedora_number, arch,
|
|
|
|
rpm_paths, print_lock):
|
|
|
|
"""Download the given package from Koji and if successful,
|
|
|
|
append the path to the downloaded file to the rpm_paths list
|
|
|
|
"""
|
|
|
|
# just to be sure, create a separate session for each query,
|
|
|
|
# as the individual lookups will run in different threads
|
|
|
|
import koji
|
|
|
|
kojiclient = koji.ClientSession('http://koji.fedoraproject.org/kojihub', {})
|
|
|
|
version = package.version
|
|
|
|
if not version:
|
|
|
|
version = "*"
|
|
|
|
# check if version contains build number or not
|
|
|
|
if len(version.split("-")) == 1:
|
|
|
|
version = "%s-*" % version
|
|
|
|
# if there is a version-request, just get all package version for the given
|
|
|
|
# release and filter them afterwards
|
|
|
|
if package.version_request:
|
2015-03-23 11:36:12 +00:00
|
|
|
package_glob = "%s-*.fc*.*.rpm" % (package.name)
|
2014-04-07 12:38:09 +00:00
|
|
|
else:
|
2015-03-23 11:36:12 +00:00
|
|
|
package_glob = "%s-%s.fc*.*.rpm" % (package.name, version)
|
2014-04-07 12:38:09 +00:00
|
|
|
|
|
|
|
# get the current thread, so output can be prefixed by thread number
|
|
|
|
prefix = "thread %s:" % threading.current_thread().name
|
|
|
|
with print_lock:
|
|
|
|
if package.version_request:
|
|
|
|
print("%s searching for: %s (version %s %s) in Koji" % (
|
|
|
|
prefix, package_glob, package.version_request, package.version))
|
|
|
|
else:
|
|
|
|
print("%s searching for: %s (any version) in Koji" % (prefix, package_glob))
|
|
|
|
# call the Koji API
|
|
|
|
results = kojiclient.search(package_glob, "rpm", "glob")
|
|
|
|
# leave only results that are either noarch
|
|
|
|
# or are built for the current architecture
|
|
|
|
allowed = ("noarch.rpm", "%s.rpm" % arch)
|
|
|
|
results = [x for x in results if x['name'].endswith(allowed)]
|
|
|
|
|
|
|
|
# remove results that don't fully match the package name
|
|
|
|
# Example: searching for glade3 and getting glade3-devel instead is wrong
|
|
|
|
results = [x for x in results if get_pkg_tuple(x['name'])[0] == package.name]
|
|
|
|
|
|
|
|
# if there is a version request (=,>=,<=), remove packages that
|
|
|
|
# are outside of the specified version range
|
|
|
|
if package.version_request:
|
|
|
|
filtered_results = []
|
|
|
|
for result in results:
|
|
|
|
# check if the version complies with the version request
|
2015-03-23 11:36:12 +00:00
|
|
|
if check_package_version(result['name'], package,
|
|
|
|
check_release_id=False):
|
2014-04-07 12:38:09 +00:00
|
|
|
filtered_results.append(result)
|
|
|
|
# replace results with filtered results
|
|
|
|
results = filtered_results
|
|
|
|
|
2015-03-23 11:36:12 +00:00
|
|
|
# the response from Koji has multiple release ids;
|
|
|
|
# packages that were not updated in the given release might
|
|
|
|
# have an older release id, but will still be valid for the
|
|
|
|
# given Fedora release
|
|
|
|
# therefore we go back from the current release id,
|
|
|
|
# until we either find a package or run out of release ids
|
|
|
|
# Example:
|
|
|
|
# foo-0.1.fc19.x86_64.rpm could be the latest RPM for
|
|
|
|
# Fedora 19, 20 & 21, if foo was not updated since the 0.1 release
|
|
|
|
|
|
|
|
def is_in_release(result, release_number):
|
|
|
|
pkg_tuple = get_pkg_tuple(result["name"])
|
|
|
|
# there could be stuff like 16.git20131003.fc20,
|
|
|
|
# so we spit by all dots and get the last one
|
|
|
|
release_id = pkg_tuple[4].split(".").pop()
|
|
|
|
return release_id == "fc%d" % release_number
|
|
|
|
|
|
|
|
suitable_results = []
|
|
|
|
release_number_override = None
|
|
|
|
for release_number in range(fedora_number, 0, -1):
|
|
|
|
suitable_results = [x for x in results if is_in_release(x, release_number)]
|
|
|
|
if suitable_results:
|
|
|
|
if release_number != fedora_number:
|
|
|
|
release_number_override = release_number
|
|
|
|
break
|
|
|
|
|
|
|
|
results = suitable_results
|
|
|
|
if results and release_number_override:
|
|
|
|
with print_lock:
|
|
|
|
print("%s %s not found in fc%d, getting package from fc%d" %
|
|
|
|
(prefix, package.name, fedora_number, release_number_override))
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
if results: # any packages left ?
|
|
|
|
# as the newest packages are on the bottom of the
|
|
|
|
# result list, just pop the last item
|
|
|
|
newest_package = results.pop()
|
|
|
|
package_metadata = {}
|
|
|
|
rpm_name = newest_package['name']
|
|
|
|
package_metadata['rpm_name'] = rpm_name
|
|
|
|
with print_lock:
|
|
|
|
print("%s RPM found: %s" % (prefix, rpm_name))
|
|
|
|
rpm_id = newest_package['id']
|
|
|
|
|
|
|
|
# get info about the RPM to
|
|
|
|
# get the arch and build_id
|
|
|
|
result = kojiclient.getRPM(rpm_id)
|
|
|
|
package_metadata['arch'] = result['arch']
|
|
|
|
package_metadata['release'] = result['release']
|
|
|
|
build_id = result['build_id']
|
|
|
|
|
|
|
|
# so we can get the toplevel package name and version
|
|
|
|
result = kojiclient.getBuild(build_id)
|
|
|
|
package_metadata['toplevel_name'] = result['package_name']
|
|
|
|
package_metadata['toplevel_version'] = result['version']
|
|
|
|
|
|
|
|
# and use the information to build the URL
|
|
|
|
url = KOJI_BASE_URL % package_metadata
|
|
|
|
# simple, isn't it ? :)
|
|
|
|
|
2015-03-23 11:36:12 +00:00
|
|
|
# build RPM storage path
|
|
|
|
release_dir = ""
|
|
|
|
if release_number_override:
|
|
|
|
# Using package from older release, store it in a sub-folder
|
|
|
|
# so that it is not downloaded again each time.
|
|
|
|
release_id = "fc%d" % fedora_number
|
|
|
|
release_dir = RPM_RELEASE_DIR_TEMPLATE % release_id
|
|
|
|
# if a package from and older release is used, the release subfolder is
|
|
|
|
# added to the storage path, otherwise the package is downloaded to the
|
|
|
|
# main folder
|
|
|
|
download_path = os.path.join(RPM_FOLDER_NAME, release_dir, rpm_name)
|
2014-04-07 12:38:09 +00:00
|
|
|
# check if the download was successful
|
|
|
|
storage_path = download_to_file(url, download_path)
|
|
|
|
if storage_path is not None:
|
|
|
|
with print_lock:
|
|
|
|
print("%s download done: %s" % (prefix, rpm_name))
|
|
|
|
# add successful downloads to the RPM inclusion list
|
|
|
|
rpm_paths.append(storage_path)
|
|
|
|
# GIL should be enough for appending to the list
|
|
|
|
# from multiple threads
|
|
|
|
else:
|
|
|
|
with print_lock:
|
|
|
|
print("%s download failed: %s @ %s" % (prefix, rpm_name, url))
|
|
|
|
else:
|
|
|
|
with print_lock:
|
|
|
|
if package.version_request:
|
|
|
|
print("%s %s in version %s %s was not found in Koji" % (
|
|
|
|
prefix, package.name, package.version_request, package.version))
|
|
|
|
else:
|
|
|
|
print("%s %s in any version was not found in Koji" % (prefix, package.name))
|
|
|
|
|
2015-05-30 11:20:59 +00:00
|
|
|
class ExtendAction(argparse.Action):
|
|
|
|
""" A parsing action that extends a list of items instead of appending to
|
|
|
|
it. Useful where there is an option that can be used multiple times,
|
|
|
|
and each time the values yielded are a list, and a single list is
|
|
|
|
desired.
|
|
|
|
"""
|
|
|
|
def __call__(self, parser, namespace, values, option_string=None):
|
|
|
|
setattr(namespace, self.dest, getattr(namespace, self.dest, []) + values)
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
def main():
|
|
|
|
cwd = os.getcwd()
|
|
|
|
configure = os.path.realpath(os.path.join(cwd, 'configure.ac'))
|
|
|
|
spec = os.path.realpath(os.path.join(cwd, 'anaconda.spec.in'))
|
|
|
|
updates = os.path.join(cwd, 'updates')
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(description="Make Anaconda updates image")
|
|
|
|
|
|
|
|
parser.add_argument('-k', '--keep', action='store_true',
|
|
|
|
help='do not delete updates subdirectory')
|
|
|
|
|
|
|
|
parser.add_argument('-c', '--compile', action='store_true',
|
|
|
|
help='compile code if there are isys changes')
|
|
|
|
|
|
|
|
parser.add_argument('-t', '--tag', action='store', type=str,
|
|
|
|
help='make updates image from TAG to HEAD')
|
|
|
|
|
|
|
|
parser.add_argument('-o', '--offset', action='store', type=int, default=0,
|
|
|
|
help='make image from (latest_tag - OFFSET) to HEAD')
|
|
|
|
|
2015-03-23 11:36:12 +00:00
|
|
|
parser.add_argument('-p', '--po', action='store_true',
|
|
|
|
help='update translations')
|
|
|
|
|
2015-05-30 11:20:59 +00:00
|
|
|
parser.add_argument('-a', '--add', action=ExtendAction, type=str, nargs='+',
|
2014-04-07 12:38:09 +00:00
|
|
|
dest='add_rpms', metavar='PATH_TO_RPM', default=[],
|
2015-05-30 11:20:59 +00:00
|
|
|
help='add contents of RPMs to the updates image (glob supported)')
|
2014-04-07 12:38:09 +00:00
|
|
|
|
|
|
|
parser.add_argument('-f', '--fetch', action='store', type=str, metavar="ARCH",
|
|
|
|
help='autofetch new dependencies from Koji for ARCH')
|
|
|
|
|
2015-05-30 11:20:59 +00:00
|
|
|
parser.add_argument('-i', '--ignore', action=ExtendAction, type=str, metavar="PACKAGE_NAME",
|
|
|
|
dest="ignored_packages", nargs='+', default=[],
|
|
|
|
help='ignore this package when autofetching dependencies (glob supported)')
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
parser.add_argument('-b', '--builddir', action='store', type=str,
|
|
|
|
metavar='BUILDDIR', help='build directory for shared objects')
|
|
|
|
|
|
|
|
args = parser.parse_args()
|
2011-01-18 09:24:57 +00:00
|
|
|
|
|
|
|
if not os.path.isfile(configure) and not os.path.isfile(spec):
|
|
|
|
sys.stderr.write("You must be at the top level of the anaconda source tree.\n")
|
|
|
|
sys.exit(1)
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
if not args.tag:
|
|
|
|
# add a fake tag to the arguments to be consistent
|
|
|
|
if args.offset < 1:
|
|
|
|
args.tag = getArchiveTag(configure, spec)
|
|
|
|
else:
|
|
|
|
args.tag = getArchiveTagOffset(configure, spec, args.offset)
|
|
|
|
sys.stdout.write("Using tag: %s\n" % args.tag)
|
|
|
|
|
|
|
|
if args.builddir:
|
|
|
|
if os.path.isabs(args.builddir):
|
|
|
|
builddir = args.builddir
|
2011-01-18 09:24:57 +00:00
|
|
|
else:
|
2014-04-07 12:38:09 +00:00
|
|
|
builddir = os.path.join(cwd, args.builddir)
|
|
|
|
else:
|
|
|
|
builddir = cwd
|
|
|
|
print("BUILDDIR %s" % builddir)
|
2011-01-18 09:24:57 +00:00
|
|
|
|
|
|
|
if not os.path.isdir(updates):
|
|
|
|
os.makedirs(updates)
|
|
|
|
|
2015-05-30 11:20:59 +00:00
|
|
|
copyUpdatedFiles(args.tag, updates, cwd, builddir)
|
2014-04-07 12:38:09 +00:00
|
|
|
|
|
|
|
if args.compile:
|
|
|
|
if isysChanged(args.tag):
|
|
|
|
copyUpdatedIsys(updates, cwd, builddir)
|
|
|
|
|
|
|
|
if widgetsChanged(args.tag):
|
|
|
|
copyUpdatedWidgets(updates, cwd, builddir)
|
|
|
|
|
2015-05-30 11:20:59 +00:00
|
|
|
if auditdChanged(args.tag):
|
|
|
|
copyUpdatedAuditd(updates, cwd, builddir)
|
|
|
|
|
2015-03-23 11:36:12 +00:00
|
|
|
if args.po:
|
|
|
|
copyTranslations(updates, cwd, builddir)
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
if args.add_rpms:
|
2015-05-30 11:20:59 +00:00
|
|
|
args.add_rpms = list(set(args.add_rpms))
|
2014-04-07 12:38:09 +00:00
|
|
|
print('%d RPMs added manually:' % len(args.add_rpms))
|
|
|
|
for item in args.add_rpms:
|
|
|
|
print(os.path.basename(item))
|
2011-01-18 09:24:57 +00:00
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
if args.fetch:
|
|
|
|
arch = args.fetch
|
2015-05-30 11:20:59 +00:00
|
|
|
rpm_paths = check_for_new_packages(args.tag, arch, args, spec)
|
2014-04-07 12:38:09 +00:00
|
|
|
args.add_rpms.extend(rpm_paths)
|
2013-01-23 17:28:19 +00:00
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
if args.add_rpms:
|
|
|
|
addRpms(updates, args.add_rpms)
|
2013-01-23 17:28:19 +00:00
|
|
|
|
2011-01-18 09:24:57 +00:00
|
|
|
createUpdatesImage(cwd, updates)
|
|
|
|
|
2014-04-07 12:38:09 +00:00
|
|
|
if not args.keep:
|
2011-01-18 09:24:57 +00:00
|
|
|
shutil.rmtree(updates)
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2014-04-07 12:38:09 +00:00
|
|
|
main()
|