| Current File : /home/mmdealscpanel/yummmdeals.com/dnf-plugins.tar |
groups_manager.py 0000644 00000032334 15040264224 0010133 0 ustar 00 # groups_manager.py
# DNF plugin for managing comps groups metadata files
#
# Copyright (C) 2020 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from __future__ import absolute_import
from __future__ import unicode_literals
import argparse
import gzip
import libcomps
import os
import re
import shutil
import tempfile
from dnfpluginscore import _, logger
import dnf
import dnf.cli
RE_GROUP_ID_VALID = '-a-z0-9_.:'
RE_GROUP_ID = re.compile(r'^[{}]+$'.format(RE_GROUP_ID_VALID))
RE_LANG = re.compile(r'^[-a-zA-Z0-9_.@]+$')
COMPS_XML_OPTIONS = {
'default_explicit': True,
'uservisible_explicit': True,
'empty_groups': True}
def group_id_type(value):
'''group id validator'''
if not RE_GROUP_ID.match(value):
raise argparse.ArgumentTypeError(_('Invalid group id'))
return value
def translation_type(value):
'''translated texts validator'''
data = value.split(':', 2)
if len(data) != 2:
raise argparse.ArgumentTypeError(
_("Invalid translated data, should be in form 'lang:text'"))
lang, text = data
if not RE_LANG.match(lang):
raise argparse.ArgumentTypeError(_('Invalid/empty language for translated data'))
return lang, text
def text_to_id(text):
'''generate group id based on its name'''
group_id = text.lower()
group_id = re.sub('[^{}]'.format(RE_GROUP_ID_VALID), '', group_id)
if not group_id:
raise dnf.cli.CliError(
_("Can't generate group id from '{}'. Please specify group id using --id.").format(
text))
return group_id
@dnf.plugin.register_command
class GroupsManagerCommand(dnf.cli.Command):
aliases = ('groups-manager',)
summary = _('create and edit groups metadata file')
def __init__(self, cli):
super(GroupsManagerCommand, self).__init__(cli)
self.comps = libcomps.Comps()
@staticmethod
def set_argparser(parser):
# input / output options
parser.add_argument('--load', action='append', default=[],
metavar='COMPS.XML',
help=_('load groups metadata from file'))
parser.add_argument('--save', action='append', default=[],
metavar='COMPS.XML',
help=_('save groups metadata to file'))
parser.add_argument('--merge', metavar='COMPS.XML',
help=_('load and save groups metadata to file'))
parser.add_argument('--print', action='store_true', default=False,
help=_('print the result metadata to stdout'))
# group options
parser.add_argument('--id', type=group_id_type,
help=_('group id'))
parser.add_argument('-n', '--name', help=_('group name'))
parser.add_argument('--description',
help=_('group description'))
parser.add_argument('--display-order', type=int,
help=_('group display order'))
parser.add_argument('--translated-name', action='append', default=[],
metavar='LANG:TEXT', type=translation_type,
help=_('translated name for the group'))
parser.add_argument('--translated-description', action='append', default=[],
metavar='LANG:TEXT', type=translation_type,
help=_('translated description for the group'))
visible = parser.add_mutually_exclusive_group()
visible.add_argument('--user-visible', dest='user_visible', action='store_true',
default=None,
help=_('make the group user visible (default)'))
visible.add_argument('--not-user-visible', dest='user_visible', action='store_false',
default=None,
help=_('make the group user invisible'))
# package list options
section = parser.add_mutually_exclusive_group()
section.add_argument('--mandatory', action='store_true',
help=_('add packages to the mandatory section'))
section.add_argument('--optional', action='store_true',
help=_('add packages to the optional section'))
section.add_argument('--remove', action='store_true', default=False,
help=_('remove packages from the group instead of adding them'))
parser.add_argument('--dependencies', action='store_true',
help=_('include also direct dependencies for packages'))
parser.add_argument("packages", nargs='*', metavar='PACKAGE',
help=_('package specification'))
def configure(self):
demands = self.cli.demands
if self.opts.packages:
demands.sack_activation = True
demands.available_repos = True
demands.load_system_repo = False
# handle --merge option (shortcut to --load and --save the same file)
if self.opts.merge:
self.opts.load.insert(0, self.opts.merge)
self.opts.save.append(self.opts.merge)
# check that group is specified when editing is attempted
if (self.opts.description
or self.opts.display_order
or self.opts.translated_name
or self.opts.translated_description
or self.opts.user_visible is not None
or self.opts.packages):
if not self.opts.id and not self.opts.name:
raise dnf.cli.CliError(
_("Can't edit group without specifying it (use --id or --name)"))
def load_input_files(self):
"""
Loads all input xml files.
Returns True if at least one file was successfuly loaded
"""
for file_name in self.opts.load:
file_comps = libcomps.Comps()
try:
if file_name.endswith('.gz'):
# libcomps does not support gzipped files - decompress to temporary
# location
with gzip.open(file_name) as gz_file:
temp_file = tempfile.NamedTemporaryFile(delete=False)
try:
shutil.copyfileobj(gz_file, temp_file)
# close temp_file to ensure the content is flushed to disk
temp_file.close()
file_comps.fromxml_f(temp_file.name)
finally:
os.unlink(temp_file.name)
else:
file_comps.fromxml_f(file_name)
except (IOError, OSError, libcomps.ParserError) as err:
# gzip module raises OSError on reading from malformed gz file
# get_last_errors() output often contains duplicit lines, remove them
seen = set()
for error in file_comps.get_last_errors():
if error in seen:
continue
logger.error(error.strip())
seen.add(error)
raise dnf.exceptions.Error(
_("Can't load file \"{}\": {}").format(file_name, err))
else:
self.comps += file_comps
def save_output_files(self):
for file_name in self.opts.save:
try:
# xml_f returns a list of errors / log entries
errors = self.comps.xml_f(file_name, xml_options=COMPS_XML_OPTIONS)
except libcomps.XMLGenError as err:
errors = [err]
if errors:
# xml_f() method could return more than one error. In this case
# raise the latest of them and log the others.
for err in errors[:-1]:
logger.error(err.strip())
raise dnf.exceptions.Error(_("Can't save file \"{}\": {}").format(
file_name, errors[-1].strip()))
def find_group(self, group_id, name):
'''
Try to find group according to command line parameters - first by id
then by name.
'''
group = None
if group_id:
for grp in self.comps.groups:
if grp.id == group_id:
group = grp
break
if group is None and name:
for grp in self.comps.groups:
if grp.name == name:
group = grp
break
return group
def edit_group(self, group):
'''
Set attributes and package lists for selected group
'''
def langlist_to_strdict(lst):
str_dict = libcomps.StrDict()
for lang, text in lst:
str_dict[lang] = text
return str_dict
# set group attributes
if self.opts.name:
group.name = self.opts.name
if self.opts.description:
group.desc = self.opts.description
if self.opts.display_order:
group.display_order = self.opts.display_order
if self.opts.user_visible is not None:
group.uservisible = self.opts.user_visible
if self.opts.translated_name:
group.name_by_lang = langlist_to_strdict(self.opts.translated_name)
if self.opts.translated_description:
group.desc_by_lang = langlist_to_strdict(self.opts.translated_description)
# edit packages list
if self.opts.packages:
# find packages according to specifications from command line
packages = set()
for pkg_spec in self.opts.packages:
subj = dnf.subject.Subject(pkg_spec)
q = subj.get_best_query(self.base.sack, with_nevra=True,
with_provides=False, with_filenames=False).latest()
if not q:
logger.warning(_("No match for argument: {}").format(pkg_spec))
continue
packages.update(q)
if self.opts.dependencies:
# add packages that provide requirements
requirements = set()
for pkg in packages:
requirements.update(pkg.requires)
packages.update(self.base.sack.query().filterm(provides=requirements))
pkg_names = {pkg.name for pkg in packages}
if self.opts.remove:
for pkg_name in pkg_names:
for pkg in group.packages_match(name=pkg_name,
type=libcomps.PACKAGE_TYPE_UNKNOWN):
group.packages.remove(pkg)
else:
if self.opts.mandatory:
pkg_type = libcomps.PACKAGE_TYPE_MANDATORY
elif self.opts.optional:
pkg_type = libcomps.PACKAGE_TYPE_OPTIONAL
else:
pkg_type = libcomps.PACKAGE_TYPE_DEFAULT
for pkg_name in sorted(pkg_names):
if not group.packages_match(name=pkg_name, type=pkg_type):
group.packages.append(libcomps.Package(name=pkg_name, type=pkg_type))
def run(self):
self.load_input_files()
if self.opts.id or self.opts.name:
# we are adding / editing a group
group = self.find_group(group_id=self.opts.id, name=self.opts.name)
if group is None:
# create a new group
if self.opts.remove:
raise dnf.exceptions.Error(_("Can't remove packages from non-existent group"))
group = libcomps.Group()
if self.opts.id:
group.id = self.opts.id
group.name = self.opts.id
elif self.opts.name:
group_id = text_to_id(self.opts.name)
if self.find_group(group_id=group_id, name=None):
raise dnf.cli.CliError(
_("Group id '{}' generated from '{}' is duplicit. "
"Please specify group id using --id.").format(
group_id, self.opts.name))
group.id = group_id
self.comps.groups.append(group)
self.edit_group(group)
self.save_output_files()
if self.opts.print or (not self.opts.save):
print(self.comps.xml_str(xml_options=COMPS_XML_OPTIONS))
debug.py 0000644 00000030425 15040264224 0006207 0 ustar 00 #
# Copyright (C) 2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from __future__ import absolute_import
from __future__ import unicode_literals
from dnf.i18n import ucd
from dnfpluginscore import _, logger
import dnf
import dnf.cli
import gzip
import hawkey
import os
import rpm
import sys
import time
DEBUG_VERSION = "dnf-debug-dump version 1\n"
class Debug(dnf.Plugin):
name = 'debug'
def __init__(self, base, cli):
super(Debug, self).__init__(base, cli)
self.base = base
self.cli = cli
if self.cli is not None:
self.cli.register_command(DebugDumpCommand)
self.cli.register_command(DebugRestoreCommand)
class DebugDumpCommand(dnf.cli.Command):
aliases = ("debug-dump",)
summary = _("dump information about installed rpm packages to file")
def __init__(self, cli):
super(DebugDumpCommand, self).__init__(cli)
self.dump_file = None
def configure(self):
self.cli.demands.sack_activation = True
self.cli.demands.available_repos = True
@staticmethod
def set_argparser(parser):
parser.add_argument(
"--norepos", action="store_true", default=False,
help=_("do not attempt to dump the repository contents."))
parser.add_argument(
"filename", nargs="?",
help=_("optional name of dump file"))
def run(self):
"""create debug txt file and compress it, if no filename specified
use dnf_debug_dump-<timestamp>.txt.gz by default"""
filename = self.opts.filename
if not filename:
now = time.strftime("%Y-%m-%d_%T", time.localtime(time.time()))
filename = "dnf_debug_dump-%s-%s.txt.gz" % (os.uname()[1], now)
filename = os.path.abspath(filename)
if filename.endswith(".gz"):
self.dump_file = gzip.GzipFile(filename, "w")
else:
self.dump_file = open(filename, "w")
self.write(DEBUG_VERSION)
self.dump_system_info()
self.dump_dnf_config_info()
self.dump_rpm_problems()
self.dump_packages(not self.opts.norepos)
self.dump_rpmdb_versions()
self.dump_file.close()
print(_("Output written to: %s") % filename)
def write(self, msg):
if dnf.pycomp.PY3 and isinstance(self.dump_file, gzip.GzipFile):
msg = bytes(msg, "utf8")
dnf.pycomp.write_to_file(self.dump_file, msg)
def dump_system_info(self):
self.write("%%%%SYSTEM INFO\n")
uname = os.uname()
self.write(" uname: %s, %s\n" % (uname[2], uname[4]))
self.write(" rpm ver: %s\n" % rpm.__version__)
self.write(" python ver: %s\n" % sys.version.replace("\n", ""))
return
def dump_dnf_config_info(self):
var = self.base.conf.substitutions
plugins = ",".join([p.name for p in self.base._plugins.plugins])
self.write("%%%%DNF INFO\n")
self.write(" arch: %s\n" % var["arch"])
self.write(" basearch: %s\n" % var["basearch"])
self.write(" releasever: %s\n" % var["releasever"])
self.write(" dnf ver: %s\n" % dnf.const.VERSION)
self.write(" enabled plugins: %s\n" % plugins)
self.write(" global excludes: %s\n" % ",".join(self.base.conf.excludepkgs))
return
def dump_rpm_problems(self):
self.write("%%%%RPMDB PROBLEMS\n")
(missing, conflicts) = rpm_problems(self.base)
self.write("".join(["Package %s requires %s\n" % (ucd(pkg), ucd(req))
for (req, pkg) in missing]))
self.write("".join(["Package %s conflicts with %s\n" % (ucd(pkg),
ucd(conf))
for (conf, pkg) in conflicts]))
def dump_packages(self, load_repos):
q = self.base.sack.query()
# packages from rpmdb
self.write("%%%%RPMDB\n")
for p in sorted(q.installed()):
self.write(" %s\n" % pkgspec(p))
if not load_repos:
return
self.write("%%%%REPOS\n")
available = q.available()
for repo in sorted(self.base.repos.iter_enabled(), key=lambda x: x.id):
try:
url = None
if repo.metalink is not None:
url = repo.metalink
elif repo.mirrorlist is not None:
url = repo.mirrorlist
elif len(repo.baseurl) > 0:
url = repo.baseurl[0]
self.write("%%%s - %s\n" % (repo.id, url))
self.write(" excludes: %s\n" % ",".join(repo.excludepkgs))
for po in sorted(available.filter(reponame=repo.id)):
self.write(" %s\n" % pkgspec(po))
except dnf.exceptions.Error as e:
self.write("Error accessing repo %s: %s\n" % (repo, str(e)))
continue
return
def dump_rpmdb_versions(self):
self.write("%%%%RPMDB VERSIONS\n")
version = self.base.sack._rpmdb_version()
self.write(" all: %s\n" % version)
return
class DebugRestoreCommand(dnf.cli.Command):
aliases = ("debug-restore",)
summary = _("restore packages recorded in debug-dump file")
def configure(self):
self.cli.demands.sack_activation = True
self.cli.demands.available_repos = True
self.cli.demands.root_user = True
if not self.opts.output:
self.cli.demands.resolving = True
@staticmethod
def set_argparser(parser):
parser.add_argument(
"--output", action="store_true",
help=_("output commands that would be run to stdout."))
parser.add_argument(
"--install-latest", action="store_true",
help=_("Install the latest version of recorded packages."))
parser.add_argument(
"--ignore-arch", action="store_true",
help=_("Ignore architecture and install missing packages matching "
"the name, epoch, version and release."))
parser.add_argument(
"--filter-types", metavar="[install, remove, replace]",
default="install, remove, replace",
help=_("limit to specified type"))
parser.add_argument(
"--remove-installonly", action="store_true",
help=_('Allow removing of install-only packages. Using this option may '
'result in an attempt to remove the running kernel.'))
parser.add_argument(
"filename", nargs=1, help=_("name of dump file"))
def run(self):
"""Execute the command action here."""
if self.opts.filter_types:
self.opts.filter_types = set(
self.opts.filter_types.replace(",", " ").split())
dump_pkgs = self.read_dump_file(self.opts.filename[0])
self.process_installed(dump_pkgs, self.opts)
self.process_dump(dump_pkgs, self.opts)
def process_installed(self, dump_pkgs, opts):
installed = self.base.sack.query().installed()
installonly_pkgs = self.base._get_installonly_query(installed)
for pkg in installed:
pkg_remove = False
spec = pkgspec(pkg)
dumped_versions = dump_pkgs.get((pkg.name, pkg.arch), None)
if dumped_versions is not None:
evr = (pkg.epoch, pkg.version, pkg.release)
if evr in dumped_versions:
# the correct version is already installed
dumped_versions[evr] = 'skip'
else:
# other version is currently installed
if pkg in installonly_pkgs:
# package is install-only, should be removed
pkg_remove = True
else:
# package should be upgraded / downgraded
if "replace" in opts.filter_types:
action = 'replace'
else:
action = 'skip'
for d_evr in dumped_versions.keys():
dumped_versions[d_evr] = action
else:
# package should not be installed
pkg_remove = True
if pkg_remove and "remove" in opts.filter_types:
if pkg not in installonly_pkgs or opts.remove_installonly:
if opts.output:
print("remove %s" % spec)
else:
self.base.package_remove(pkg)
def process_dump(self, dump_pkgs, opts):
for (n, a) in sorted(dump_pkgs.keys()):
dumped_versions = dump_pkgs[(n, a)]
for (e, v, r) in sorted(dumped_versions.keys()):
action = dumped_versions[(e, v, r)]
if action == 'skip':
continue
if opts.ignore_arch:
arch = ""
else:
arch = "." + a
if opts.install_latest and action == "install":
pkg_spec = "%s%s" % (n, arch)
else:
pkg_spec = pkgtup2spec(n, arch, e, v, r)
if action in opts.filter_types:
if opts.output:
print("%s %s" % (action, pkg_spec))
else:
try:
self.base.install(pkg_spec)
except dnf.exceptions.MarkingError:
logger.error(_("Package %s is not available"), pkg_spec)
@staticmethod
def read_dump_file(filename):
if filename.endswith(".gz"):
fobj = gzip.GzipFile(filename)
else:
fobj = open(filename)
if ucd(fobj.readline()) != DEBUG_VERSION:
logger.error(_("Bad dnf debug file: %s"), filename)
raise dnf.exceptions.Error
skip = True
pkgs = {}
for line in fobj:
line = ucd(line)
if skip:
if line == "%%%%RPMDB\n":
skip = False
continue
if not line or line[0] != " ":
break
pkg_spec = line.strip()
nevra = hawkey.split_nevra(pkg_spec)
# {(name, arch): {(epoch, version, release): action}}
pkgs.setdefault((nevra.name, nevra.arch), {})[
(nevra.epoch, nevra.version, nevra.release)] = "install"
return pkgs
def rpm_problems(base):
rpmdb = dnf.sack._rpmdb_sack(base)
allpkgs = rpmdb.query().installed()
requires = set()
conflicts = set()
for pkg in allpkgs:
requires.update([(req, pkg) for req in pkg.requires
if not str(req) == "solvable:prereqmarker"
and not str(req).startswith("rpmlib(")])
conflicts.update([(conf, pkg) for conf in pkg.conflicts])
missing_requires = [(req, pkg) for (req, pkg) in requires
if not allpkgs.filter(provides=req)]
existing_conflicts = [(conf, pkg) for (conf, pkg) in conflicts
if allpkgs.filter(provides=conf)]
return missing_requires, existing_conflicts
def pkgspec(pkg):
return pkgtup2spec(pkg.name, pkg.arch, pkg.epoch, pkg.version, pkg.release)
def pkgtup2spec(name, arch, epoch, version, release):
a = "" if not arch else ".%s" % arch.lstrip('.')
e = "" if epoch in (None, "") else "%s:" % epoch
return "%s-%s%s-%s%s" % (name, e, version, release, a)
versionlock.py 0000644 00000030035 15040264224 0007454 0 ustar 00 #
# Copyright (C) 2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from __future__ import absolute_import
from __future__ import unicode_literals
from dnfpluginscore import _, logger
import dnf
import dnf.cli
import dnf.exceptions
import fnmatch
import hawkey
import os
import tempfile
import time
import warnings
NOT_READABLE = _('Unable to read version lock configuration: %s')
NO_LOCKLIST = _('Locklist not set')
ADDING_SPEC = _('Adding versionlock on:')
EXCLUDING_SPEC = _('Adding exclude on:')
EXISTING_SPEC = _('Package already locked in equivalent form:')
ALREADY_LOCKED = _('Package {} is already locked')
ALREADY_EXCLUDED = _('Package {} is already excluded')
DELETING_SPEC = _('Deleting versionlock for:')
NOTFOUND_SPEC = _('No package found for:')
NO_VERSIONLOCK = _('Excludes from versionlock plugin were not applied')
APPLY_LOCK = _('Versionlock plugin: number of lock rules from file "{}" applied: {}')
APPLY_EXCLUDE = _('Versionlock plugin: number of exclude rules from file "{}" applied: {}')
NEVRA_ERROR = _('Versionlock plugin: could not parse pattern:')
locklist_fn = None
class VersionLock(dnf.Plugin):
name = 'versionlock'
def __init__(self, base, cli):
super(VersionLock, self).__init__(base, cli)
self.base = base
self.cli = cli
if self.cli is not None:
self.cli.register_command(VersionLockCommand)
def config(self):
global locklist_fn
cp = self.read_config(self.base.conf)
locklist_fn = (cp.has_section('main') and cp.has_option('main', 'locklist')
and cp.get('main', 'locklist'))
def locking_enabled(self):
if self.cli is None:
enabled = True # loaded via the api, not called by cli
else:
enabled = self.cli.demands.plugin_filtering_enabled
if enabled is None:
enabled = self.cli.demands.resolving
return enabled
def sack(self):
if not self.locking_enabled():
logger.debug(NO_VERSIONLOCK)
return
excludes_query = self.base.sack.query().filter(empty=True)
locked_query = self.base.sack.query().filter(empty=True)
locked_names = set()
# counter of applied rules [locked_count, excluded_count]
count = [0, 0]
for pat in _read_locklist():
excl = 0
if pat and pat[0] == '!':
pat = pat[1:]
excl = 1
possible_nevras = dnf.subject.Subject(pat).get_nevra_possibilities(
forms=[hawkey.FORM_NEVRA, hawkey.FORM_NEVR, hawkey.FORM_NEV,
hawkey.FORM_NA, hawkey.FORM_NAME])
if possible_nevras:
count[excl] += 1
else:
logger.error("%s %s", NEVRA_ERROR, pat)
continue
for nevra in possible_nevras:
pat_query = nevra.to_query(self.base.sack)
if excl:
excludes_query = excludes_query.union(pat_query)
else:
locked_names.add(nevra.name)
locked_query = locked_query.union(pat_query)
if pat_query:
break
if count[1]:
logger.debug(APPLY_EXCLUDE.format(locklist_fn, count[1]))
if count[0]:
logger.debug(APPLY_LOCK.format(locklist_fn, count[0]))
if locked_names:
all_versions = self.base.sack.query().filter(name__glob=list(locked_names))
other_versions = all_versions.difference(locked_query)
excludes_query = excludes_query.union(other_versions)
# exclude also anything that obsoletes the locked versions of packages
obsoletes_query = self.base.sack.query().filterm(obsoletes=locked_query)
# leave out obsoleters that are also part of locked versions (otherwise the obsoleter package
# would not be installable at all)
excludes_query = excludes_query.union(obsoletes_query.difference(locked_query))
excludes_query.filterm(reponame__neq=hawkey.SYSTEM_REPO_NAME)
if excludes_query:
self.base.sack.add_excludes(excludes_query)
EXC_CMDS = ['exclude', 'add-!', 'add!']
DEL_CMDS = ['delete', 'del']
DEP_EXC_CMDS = ['blacklist']
ALL_CMDS = ['add', 'clear', 'list'] + EXC_CMDS + DEL_CMDS + DEP_EXC_CMDS
class VersionLockCommand(dnf.cli.Command):
aliases = ("versionlock",)
summary = _("control package version locks")
usage = "[add|exclude|list|delete|clear] [<package-nevr-spec>]"
@staticmethod
def set_argparser(parser):
parser.add_argument("--raw", default=False, action='store_true',
help=_("Use package specifications as they are, do not "
"try to parse them"))
parser.add_argument("subcommand", nargs='?',
metavar="[add|exclude|list|delete|clear]")
parser.add_argument("package", nargs='*',
metavar="[<package-nevr-spec>]")
def configure(self):
self.cli.demands.sack_activation = True
self.cli.demands.available_repos = True
def run(self):
cmd = 'list'
if self.opts.subcommand:
if self.opts.subcommand not in ALL_CMDS:
cmd = 'add'
self.opts.package.insert(0, self.opts.subcommand)
elif self.opts.subcommand in EXC_CMDS:
cmd = 'exclude'
elif self.opts.subcommand in DEP_EXC_CMDS:
msg = _("Subcommand '{}' is deprecated. Use 'exclude' subcommand instead.").format(
self.opts.subcommand)
warnings.warn(msg, dnf.exceptions.DeprecationWarning, stacklevel=2)
cmd = 'exclude'
elif self.opts.subcommand in DEL_CMDS:
cmd = 'delete'
else:
cmd = self.opts.subcommand
if cmd == 'add':
results = _search_locklist(self.opts.package)
for entry, entry_cmd in results:
if entry_cmd == '':
_write_locklist(self.base, [entry], self.opts.raw, True,
"\n# Added lock on %s\n" % time.ctime(),
ADDING_SPEC, '')
elif cmd != entry_cmd:
raise dnf.exceptions.Error(ALREADY_EXCLUDED.format(entry))
else:
logger.info("%s %s", EXISTING_SPEC, entry)
elif cmd == 'exclude':
results = _search_locklist(self.opts.package)
for entry, entry_cmd in results:
if entry_cmd == '':
_write_locklist(self.base, [entry], self.opts.raw, False,
"\n# Added exclude on %s\n" % time.ctime(),
EXCLUDING_SPEC, '!')
elif cmd != entry_cmd:
raise dnf.exceptions.Error(ALREADY_LOCKED.format(entry))
else:
logger.info("%s %s", EXISTING_SPEC, entry)
elif cmd == 'list':
for pat in _read_locklist():
print(pat)
elif cmd == 'clear':
if not locklist_fn:
raise dnf.exceptions.Error(NO_LOCKLIST)
with open(locklist_fn, 'w') as f:
# open in write mode truncates file
pass
elif cmd == 'delete':
if not locklist_fn:
raise dnf.exceptions.Error(NO_LOCKLIST)
dirname = os.path.dirname(locklist_fn)
(out, tmpfilename) = tempfile.mkstemp(dir=dirname, suffix='.tmp')
locked_specs = _read_locklist()
count = 0
with os.fdopen(out, 'w', -1) as out:
for ent in locked_specs:
if _match(ent, self.opts.package):
print("%s %s" % (DELETING_SPEC, ent))
count += 1
continue
out.write(ent)
out.write('\n')
if not count:
os.unlink(tmpfilename)
else:
os.chmod(tmpfilename, 0o644)
os.rename(tmpfilename, locklist_fn)
def _read_locklist():
locklist = []
try:
if not locklist_fn:
raise dnf.exceptions.Error(NO_LOCKLIST)
with open(locklist_fn) as llfile:
for line in llfile.readlines():
if line.startswith('#') or line.strip() == '':
continue
locklist.append(line.strip())
except IOError as e:
raise dnf.exceptions.Error(NOT_READABLE % e)
return locklist
def _search_locklist(package):
results = []
found = action = ''
locked_specs = _read_locklist()
for pkg in package:
match = False
for ent in locked_specs:
found = action = ''
if _match(ent, [pkg]):
found = ent
action = 'exclude' if ent.startswith('!') else 'add'
results.append((found, action))
match = True
if not match:
results.append((pkg, action))
return results
def _write_locklist(base, args, raw, try_installed, comment, info, prefix):
specs = set()
for pat in args:
if raw:
specs.add(pat)
continue
subj = dnf.subject.Subject(pat)
pkgs = None
if try_installed:
pkgs = subj.get_best_query(dnf.sack._rpmdb_sack(base), with_nevra=True,
with_provides=False, with_filenames=False)
if not pkgs:
pkgs = subj.get_best_query(base.sack, with_nevra=True, with_provides=False,
with_filenames=False)
if not pkgs:
print("%s %s" % (NOTFOUND_SPEC, pat))
for pkg in pkgs:
specs.add(pkgtup2spec(*pkg.pkgtup))
if specs:
try:
if not locklist_fn:
raise dnf.exceptions.Error(NO_LOCKLIST)
with open(locklist_fn, 'a') as f:
f.write(comment)
for spec in specs:
print("%s %s" % (info, spec))
f.write("%s%s\n" % (prefix, spec))
except IOError as e:
raise dnf.exceptions.Error(NOT_READABLE % e)
def _match(ent, patterns):
ent = ent.lstrip('!')
for pat in patterns:
if ent == pat:
return True
try:
n = hawkey.split_nevra(ent)
except hawkey.ValueException:
return False
for name in (
'%s' % n.name,
'%s.%s' % (n.name, n.arch),
'%s-%s' % (n.name, n.version),
'%s-%s-%s' % (n.name, n.version, n.release),
'%s-%s:%s' % (n.name, n.epoch, n.version),
'%s-%s-%s.%s' % (n.name, n.version, n.release, n.arch),
'%s-%s:%s-%s' % (n.name, n.epoch, n.version, n.release),
'%s:%s-%s-%s.%s' % (n.epoch, n.name, n.version, n.release, n.arch),
'%s-%s:%s-%s.%s' % (n.name, n.epoch, n.version, n.release, n.arch),
):
for pat in patterns:
if fnmatch.fnmatch(name, pat):
return True
return False
def pkgtup2spec(name, arch, epoch, version, release):
# we ignore arch
return "%s-%s:%s-%s.*" % (name, epoch or "0", version, release)
etckeeper.py 0000644 00000002406 15040264224 0007066 0 ustar 00 # etckeeper.py, support etckeeper for dnf
#
# Copyright (C) 2014 Peter Listiak
# https://github.com/plistiak/dnf-etckeeper
#
# Later modifications by Petr Spacek:
# Distutils code below was copied from etckeeper-bzr distributed with v1.15
#
import logging
import subprocess
import dnf
logger = logging.getLogger('dnf.plugin')
class Etckeeper(dnf.Plugin):
name = 'etckeeper'
def _run_command(self, command):
logger.debug('Etckeeper plugin: %s', command)
try:
with open("/dev/null", "wb") as devnull:
ret = subprocess.call(("etckeeper", command),
stdout=devnull, stderr=devnull,
close_fds=True)
if ret != 0:
raise dnf.exceptions.Error('"etckeeper %s" returned: %d' % (command, ret))
except OSError as err:
logger.warning('Failed to run "etckeeper %s": %s' % (command, err))
def resolved(self):
self._run_command("pre-install")
def transaction(self):
self._run_command("post-install")
if __name__ == "__main__":
from distutils.core import setup
setup(name="dnf-etckeeper",
packages=["dnf-plugins"],
package_dir={"dnf-plugins":"etckeeper-dnf"})
reposync.py 0000644 00000034470 15040264224 0006767 0 ustar 00 # reposync.py
# DNF plugin adding a command to download all packages from given remote repo.
#
# Copyright (C) 2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from __future__ import absolute_import
from __future__ import unicode_literals
import hawkey
import os
import shutil
import types
from dnfpluginscore import _, logger
from dnf.cli.option_parser import OptionParser
import dnf
import dnf.cli
def _pkgdir(intermediate, target):
cwd = dnf.i18n.ucd(os.getcwd())
return os.path.realpath(os.path.join(cwd, intermediate, target))
class RPMPayloadLocation(dnf.repo.RPMPayload):
def __init__(self, pkg, progress, pkg_location):
super(RPMPayloadLocation, self).__init__(pkg, progress)
self.package_dir = os.path.dirname(pkg_location)
def _target_params(self):
tp = super(RPMPayloadLocation, self)._target_params()
dnf.util.ensure_dir(self.package_dir)
tp['dest'] = self.package_dir
return tp
@dnf.plugin.register_command
class RepoSyncCommand(dnf.cli.Command):
aliases = ('reposync',)
summary = _('download all packages from remote repo')
def __init__(self, cli):
super(RepoSyncCommand, self).__init__(cli)
@staticmethod
def set_argparser(parser):
parser.add_argument('-a', '--arch', dest='arches', default=[],
action=OptionParser._SplitCallback, metavar='[arch]',
help=_('download only packages for this ARCH'))
parser.add_argument('--delete', default=False, action='store_true',
help=_('delete local packages no longer present in repository'))
parser.add_argument('--download-metadata', default=False, action='store_true',
help=_('download all the metadata.'))
parser.add_argument('-g', '--gpgcheck', default=False, action='store_true',
help=_('Remove packages that fail GPG signature checking '
'after downloading'))
parser.add_argument('-m', '--downloadcomps', default=False, action='store_true',
help=_('also download and uncompress comps.xml'))
parser.add_argument('--metadata-path',
help=_('where to store downloaded repository metadata. '
'Defaults to the value of --download-path.'))
parser.add_argument('-n', '--newest-only', default=False, action='store_true',
help=_('download only newest packages per-repo'))
parser.add_argument('--norepopath', default=False, action='store_true',
help=_("Don't add the reponame to the download path."))
parser.add_argument('-p', '--download-path', default='./',
help=_('where to store downloaded repositories'))
parser.add_argument('--remote-time', default=False, action='store_true',
help=_('try to set local timestamps of local files by '
'the one on the server'))
parser.add_argument('--source', default=False, action='store_true',
help=_('download only source packages'))
parser.add_argument('-u', '--urls', default=False, action='store_true',
help=_("Just list urls of what would be downloaded, "
"don't download"))
def configure(self):
demands = self.cli.demands
demands.available_repos = True
demands.sack_activation = True
repos = self.base.repos
if self.opts.repo:
repos.all().disable()
for repoid in self.opts.repo:
try:
repo = repos[repoid]
except KeyError:
raise dnf.cli.CliError("Unknown repo: '%s'." % repoid)
repo.enable()
if self.opts.source:
repos.enable_source_repos()
if len(list(repos.iter_enabled())) > 1 and self.opts.norepopath:
raise dnf.cli.CliError(
_("Can't use --norepopath with multiple repositories"))
for repo in repos.iter_enabled():
repo._repo.expire()
repo.deltarpm = False
def run(self):
self.base.conf.keepcache = True
gpgcheck_ok = True
for repo in self.base.repos.iter_enabled():
if self.opts.remote_time:
repo._repo.setPreserveRemoteTime(True)
if self.opts.download_metadata:
if self.opts.urls:
for md_type, md_location in repo._repo.getMetadataLocations():
url = repo.remote_location(md_location)
if url:
print(url)
else:
msg = _("Failed to get mirror for metadata: %s") % md_type
logger.warning(msg)
else:
self.download_metadata(repo)
if self.opts.downloadcomps:
if self.opts.urls:
mdl = dict(repo._repo.getMetadataLocations())
group_locations = [mdl[md_type]
for md_type in ('group', 'group_gz', 'group_gz_zck')
if md_type in mdl]
if group_locations:
for group_location in group_locations:
url = repo.remote_location(group_location)
if url:
print(url)
break
else:
msg = _("Failed to get mirror for the group file.")
logger.warning(msg)
else:
self.getcomps(repo)
pkglist = self.get_pkglist(repo)
if self.opts.urls:
self.print_urls(pkglist)
else:
self.download_packages(pkglist)
if self.opts.gpgcheck:
for pkg in pkglist:
local_path = self.pkg_download_path(pkg)
# base.package_signature_check uses pkg.localPkg() to determine
# the location of the package rpm file on the disk.
# Set it to the correct download path.
pkg.localPkg = types.MethodType(
lambda s, local_path=local_path: local_path, pkg)
result, error = self.base.package_signature_check(pkg)
if result != 0:
logger.warning(_("Removing {}: {}").format(
os.path.basename(local_path), error))
os.unlink(local_path)
gpgcheck_ok = False
if self.opts.delete:
self.delete_old_local_packages(repo, pkglist)
if not gpgcheck_ok:
raise dnf.exceptions.Error(_("GPG signature check failed."))
def repo_target(self, repo):
return _pkgdir(self.opts.destdir or self.opts.download_path,
repo.id if not self.opts.norepopath else '')
def metadata_target(self, repo):
if self.opts.metadata_path:
return _pkgdir(self.opts.metadata_path, repo.id)
else:
return self.repo_target(repo)
def pkg_download_path(self, pkg):
repo_target = self.repo_target(pkg.repo)
pkg_download_path = os.path.realpath(
os.path.join(repo_target, pkg.location))
# join() ensures repo_target ends with a path separator (otherwise the
# check would pass if pkg_download_path was a "sibling" path component
# of repo_target that has the same prefix).
if not pkg_download_path.startswith(os.path.join(repo_target, '')):
raise dnf.exceptions.Error(
_("Download target '{}' is outside of download path '{}'.").format(
pkg_download_path, repo_target))
return pkg_download_path
def delete_old_local_packages(self, repo, pkglist):
# delete any *.rpm file under target path, that was not downloaded from repository
downloaded_files = set(self.pkg_download_path(pkg) for pkg in pkglist)
for dirpath, dirnames, filenames in os.walk(self.repo_target(repo)):
for filename in filenames:
path = os.path.join(dirpath, filename)
if filename.endswith('.rpm') and os.path.isfile(path):
if path not in downloaded_files:
# Delete disappeared or relocated file
try:
os.unlink(path)
logger.info(_("[DELETED] %s"), path)
except OSError:
logger.error(_("failed to delete file %s"), path)
def getcomps(self, repo):
comps_fn = repo._repo.getCompsFn()
if comps_fn:
dest_path = self.metadata_target(repo)
dnf.util.ensure_dir(dest_path)
dest = os.path.join(dest_path, 'comps.xml')
dnf.yum.misc.decompress(comps_fn, dest=dest)
logger.info(_("comps.xml for repository %s saved"), repo.id)
def download_metadata(self, repo):
repo_target = self.metadata_target(repo)
repo._repo.downloadMetadata(repo_target)
return True
def _get_latest(self, query):
"""
return union of these queries:
- the latest NEVRAs from non-modular packages
- all packages from stream version with the latest package NEVRA
(this should not be needed but the latest package NEVRAs might be
part of an older module version)
- all packages from the latest stream version
"""
if not dnf.base.WITH_MODULES:
return query.latest()
query.apply()
module_packages = self.base._moduleContainer.getModulePackages()
all_artifacts = set()
module_dict = {} # {NameStream: {Version: [modules]}}
artifact_version = {} # {artifact: {NameStream: [Version]}}
for module_package in module_packages:
artifacts = module_package.getArtifacts()
all_artifacts.update(artifacts)
module_dict.setdefault(module_package.getNameStream(), {}).setdefault(
module_package.getVersionNum(), []).append(module_package)
for artifact in artifacts:
artifact_version.setdefault(artifact, {}).setdefault(
module_package.getNameStream(), []).append(module_package.getVersionNum())
# the latest NEVRAs from non-modular packages
latest_query = query.filter(
pkg__neq=query.filter(nevra_strict=all_artifacts)).latest()
# artifacts from the newest version and those versions that contain an artifact
# with the highest NEVRA
latest_stream_artifacts = set()
for namestream, version_dict in module_dict.items():
# versions that will be synchronized
versions = set()
# add the newest stream version
versions.add(sorted(version_dict.keys(), reverse=True)[0])
# collect all artifacts in all stream versions
stream_artifacts = set()
for modules in version_dict.values():
for module in modules:
stream_artifacts.update(module.getArtifacts())
# find versions to which the packages with the highest NEVRAs belong
for latest_pkg in query.filter(nevra_strict=stream_artifacts).latest():
# here we depend on modules.yaml allways containing full NEVRA (including epoch)
nevra = "{0.name}-{0.epoch}:{0.version}-{0.release}.{0.arch}".format(latest_pkg)
# download only highest version containing the latest artifact
versions.add(max(artifact_version[nevra][namestream]))
# add all artifacts from selected versions for synchronization
for version in versions:
for module in version_dict[version]:
latest_stream_artifacts.update(module.getArtifacts())
latest_query = latest_query.union(query.filter(nevra_strict=latest_stream_artifacts))
return latest_query
def get_pkglist(self, repo):
query = self.base.sack.query(flags=hawkey.IGNORE_MODULAR_EXCLUDES).available().filterm(
reponame=repo.id)
if self.opts.newest_only:
query = self._get_latest(query)
if self.opts.source:
query.filterm(arch='src')
elif self.opts.arches:
query.filterm(arch=self.opts.arches)
return query
def download_packages(self, pkglist):
base = self.base
progress = base.output.progress
if progress is None:
progress = dnf.callback.NullDownloadProgress()
drpm = dnf.drpm.DeltaInfo(base.sack.query(flags=hawkey.IGNORE_MODULAR_EXCLUDES).installed(),
progress, 0)
payloads = [RPMPayloadLocation(pkg, progress, self.pkg_download_path(pkg))
for pkg in pkglist]
base._download_remote_payloads(payloads, drpm, progress, None, False)
def print_urls(self, pkglist):
for pkg in pkglist:
url = pkg.remote_location()
if url:
print(url)
else:
msg = _("Failed to get mirror for package: %s") % pkg.name
logger.warning(msg)
repomanage.py 0000644 00000024512 15040264224 0007237 0 ustar 00 # repomanage.py
# DNF plugin adding a command to manage rpm packages from given directory.
#
# Copyright (C) 2015 Igor Gnatenko
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from __future__ import absolute_import
from __future__ import unicode_literals
from dnfpluginscore import _, logger
import dnf
import dnf.cli
import logging
import os
import hawkey
class RepoManage(dnf.Plugin):
name = "repomanage"
def __init__(self, base, cli):
super(RepoManage, self).__init__(base, cli)
if cli is None:
return
cli.register_command(RepoManageCommand)
class RepoManageCommand(dnf.cli.Command):
aliases = ("repomanage",)
summary = _("Manage a directory of rpm packages")
def pre_configure(self):
if not self.opts.verbose and not self.opts.quiet:
self.cli.redirect_logger(stdout=logging.WARNING, stderr=logging.INFO)
def configure(self):
if not self.opts.verbose and not self.opts.quiet:
self.cli.redirect_repo_progress()
demands = self.cli.demands
demands.sack_activation = True
def run(self):
if self.opts.new and self.opts.old:
raise dnf.exceptions.Error(_("Pass either --old or --new, not both!"))
if self.opts.new and self.opts.oldonly:
raise dnf.exceptions.Error(_("Pass either --oldonly or --new, not both!"))
if self.opts.old and self.opts.oldonly:
raise dnf.exceptions.Error(_("Pass either --old or --oldonly, not both!"))
if not self.opts.old and not self.opts.oldonly:
self.opts.new = True
verfile = {}
pkgdict = {}
module_dict = {} # {NameStream: {Version: [modules]}}
all_modular_artifacts = set()
keepnum = int(self.opts.keep) # the number of items to keep
try:
REPOMANAGE_REPOID = "repomanage_repo"
repo_conf = self.base.repos.add_new_repo(REPOMANAGE_REPOID, self.base.conf, baseurl=[self.opts.path])
# Always expire the repo, otherwise repomanage could use cached metadata and give identical results
# for multiple runs even if the actual repo changed in the meantime
repo_conf._repo.expire()
self.base._add_repo_to_sack(repo_conf)
if dnf.base.WITH_MODULES:
self.base._setup_modular_excludes()
# Prepare modules
module_packages = self.base._moduleContainer.getModulePackages()
for module_package in module_packages:
# Even though we load only REPOMANAGE_REPOID other modules can be loaded from system
# failsafe data automatically, we don't want them affecting repomanage results so ONLY
# use modules from REPOMANAGE_REPOID.
if module_package.getRepoID() == REPOMANAGE_REPOID:
all_modular_artifacts.update(module_package.getArtifacts())
module_dict.setdefault(module_package.getNameStream(), {}).setdefault(
module_package.getVersionNum(), []).append(module_package)
except dnf.exceptions.RepoError:
rpm_list = []
rpm_list = self._get_file_list(self.opts.path, ".rpm")
if len(rpm_list) == 0:
raise dnf.exceptions.Error(_("No files to process"))
self.base.reset(sack=True, repos=True)
self.base.fill_sack(load_system_repo=False, load_available_repos=False)
try:
self.base.add_remote_rpms(rpm_list, progress=self.base.output.progress)
except IOError:
logger.warning(_("Could not open {}").format(', '.join(rpm_list)))
# Prepare regular packages
query = self.base.sack.query(flags=hawkey.IGNORE_MODULAR_EXCLUDES).available()
packages = [x for x in query.filter(pkg__neq=query.filter(nevra_strict=all_modular_artifacts)).available()]
packages.sort()
for pkg in packages:
na = (pkg.name, pkg.arch)
if na in pkgdict:
if pkg not in pkgdict[na]:
pkgdict[na].append(pkg)
else:
pkgdict[na] = [pkg]
nevra = self._package_to_nevra(pkg)
if nevra in verfile:
verfile[nevra].append(self._package_to_path(pkg))
else:
verfile[nevra] = [self._package_to_path(pkg)]
outputpackages = []
# modular packages
keepnum_latest_stream_artifacts = set()
if self.opts.new:
# regular packages
for (n, a) in pkgdict.keys():
evrlist = pkgdict[(n, a)]
newevrs = evrlist[-keepnum:]
for package in newevrs:
nevra = self._package_to_nevra(package)
for fpkg in verfile[nevra]:
outputpackages.append(fpkg)
# modular packages
for streams_by_version in module_dict.values():
sorted_stream_versions = sorted(streams_by_version.keys())
new_sorted_stream_versions = sorted_stream_versions[-keepnum:]
for i in new_sorted_stream_versions:
for stream in streams_by_version[i]:
keepnum_latest_stream_artifacts.update(set(stream.getArtifacts()))
if self.opts.old:
# regular packages
for (n, a) in pkgdict.keys():
evrlist = pkgdict[(n, a)]
oldevrs = evrlist[:-keepnum]
for package in oldevrs:
nevra = self._package_to_nevra(package)
for fpkg in verfile[nevra]:
outputpackages.append(fpkg)
# modular packages
for streams_by_version in module_dict.values():
sorted_stream_versions = sorted(streams_by_version.keys())
old_sorted_stream_versions = sorted_stream_versions[:-keepnum]
for i in old_sorted_stream_versions:
for stream in streams_by_version[i]:
keepnum_latest_stream_artifacts.update(set(stream.getArtifacts()))
if self.opts.oldonly:
# regular packages
for (n, a) in pkgdict.keys():
evrlist = pkgdict[(n, a)]
oldevrs = evrlist[:-keepnum]
for package in oldevrs:
nevra = self._package_to_nevra(package)
for fpkg in verfile[nevra]:
outputpackages.append(fpkg)
# modular packages
keepnum_newer_stream_artifacts = set()
for streams_by_version in module_dict.values():
sorted_stream_versions = sorted(streams_by_version.keys())
new_sorted_stream_versions = sorted_stream_versions[-keepnum:]
for i in new_sorted_stream_versions:
for stream in streams_by_version[i]:
keepnum_newer_stream_artifacts.update(set(stream.getArtifacts()))
for streams_by_version in module_dict.values():
sorted_stream_versions = sorted(streams_by_version.keys())
old_sorted_stream_versions = sorted_stream_versions[:-keepnum]
for i in old_sorted_stream_versions:
for stream in streams_by_version[i]:
for artifact in stream.getArtifacts():
if artifact not in keepnum_newer_stream_artifacts:
keepnum_latest_stream_artifacts.add(artifact)
modular_packages = [self._package_to_path(x) for x in query.filter(pkg__eq=query.filter(nevra_strict=keepnum_latest_stream_artifacts)).available()]
outputpackages = outputpackages + modular_packages
outputpackages.sort()
if self.opts.space:
print(" ".join(outputpackages))
else:
for pkg in outputpackages:
print(pkg)
@staticmethod
def set_argparser(parser):
parser.add_argument("-o", "--old", action="store_true",
help=_("Print the older packages"))
parser.add_argument("-O", "--oldonly", action="store_true",
help=_("Print the older packages. Exclude the newest packages."))
parser.add_argument("-n", "--new", action="store_true",
help=_("Print the newest packages"))
parser.add_argument("-s", "--space", action="store_true",
help=_("Space separated output, not newline"))
parser.add_argument("-k", "--keep", action="store", metavar="KEEP",
help=_("Newest N packages to keep - defaults to 1"),
default=1, type=int)
parser.add_argument("path", action="store",
help=_("Path to directory"))
@staticmethod
def _get_file_list(path, ext):
"""Return all files in path matching ext
return list object
"""
filelist = []
for root, dirs, files in os.walk(path):
for f in files:
if os.path.splitext(f)[1].lower() == str(ext):
filelist.append(os.path.join(root, f))
return filelist
def _package_to_path(self, pkg):
if len(self.base.repos):
return os.path.join(self.opts.path, pkg.location)
else:
return pkg.location
@staticmethod
def _package_to_nevra(pkg):
return (pkg.name, pkg.epoch, pkg.version, pkg.release, pkg.arch)
changelog.py 0000644 00000011547 15040264224 0007054 0 ustar 00 # changelog.py
# DNF plugin adding a command changelog.
#
# Copyright (C) 2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from __future__ import absolute_import
from __future__ import unicode_literals
import argparse
import collections
import dateutil.parser
from dnfpluginscore import _, P_, logger
import dnf
import dnf.cli
def validate_date(val):
try:
return dateutil.parser.parse(val, fuzzy=True)
except (ValueError, TypeError, OverflowError):
raise argparse.ArgumentTypeError(_('Not a valid date: "{0}".').format(val))
@dnf.plugin.register_command
class ChangelogCommand(dnf.cli.Command):
aliases = ('changelog',)
summary = _('Show changelog data of packages')
@staticmethod
def set_argparser(parser):
filter_group = parser.add_mutually_exclusive_group()
filter_group.add_argument(
'--since', metavar="DATE", default=None,
type=validate_date,
help=_('show changelog entries since DATE. To avoid ambiguosity, '
'YYYY-MM-DD format is recommended.'))
filter_group.add_argument(
'--count', default=None, type=int,
help=_('show given number of changelog entries per package'))
filter_group.add_argument(
'--upgrades', default=False, action='store_true',
help=_('show only new changelog entries for packages, that provide an '
'upgrade for some of already installed packages.'))
parser.add_argument("package", nargs='*', metavar=_('PACKAGE'))
def configure(self):
demands = self.cli.demands
demands.available_repos = True
demands.sack_activation = True
demands.changelogs = True
def query(self):
q = self.base.sack.query()
if self.opts.package:
q.filterm(empty=True)
for pkg in self.opts.package:
pkg_q = dnf.subject.Subject(pkg, ignore_case=True).get_best_query(
self.base.sack, with_nevra=True,
with_provides=False, with_filenames=False)
if self.opts.repo:
pkg_q.filterm(reponame=self.opts.repo)
if pkg_q:
q = q.union(pkg_q.latest())
else:
logger.info(_('No match for argument: %s') % pkg)
elif self.opts.repo:
q.filterm(reponame=self.opts.repo)
if self.opts.upgrades:
q = q.upgrades()
else:
q = q.available()
return q
def by_srpm(self, packages):
by_srpm = collections.OrderedDict()
for pkg in sorted(packages):
by_srpm.setdefault((pkg.source_name or pkg.name, pkg.evr), []).append(pkg)
return by_srpm
def filter_changelogs(self, package):
if self.opts.upgrades:
return self.base.latest_changelogs(package)
elif self.opts.count:
return package.changelogs[:self.opts.count]
elif self.opts.since:
return [chlog for chlog in package.changelogs
if chlog['timestamp'] >= self.opts.since.date()]
else:
return package.changelogs
def run(self):
if self.opts.since:
logger.info(_('Listing changelogs since {}').format(self.opts.since))
elif self.opts.count:
logger.info(P_('Listing only latest changelog',
'Listing {} latest changelogs',
self.opts.count).format(self.opts.count))
elif self.opts.upgrades:
logger.info(
_('Listing only new changelogs since installed version of the package'))
else:
logger.info(_('Listing all changelogs'))
by_srpm = self.by_srpm(self.query())
for name in by_srpm:
print(_('Changelogs for {}').format(
', '.join(sorted({str(pkg) for pkg in by_srpm[name]}))))
for chlog in self.filter_changelogs(by_srpm[name][0]):
print(self.base.format_changelog(chlog))
__pycache__/builddep.cpython-36.pyc 0000644 00000016426 15040264224 0013202 0 ustar 00 3
�f�$ � @ s� d dl mZ d dl mZ d dlmZmZ d dlZd dlZd dlZd dl Zd dl
Zd dlZd dlZ
d dlZd dlZd dlZd dlZejjG dd� dejj��ZdS )� )�absolute_import)�unicode_literals)�_�loggerNc s� e Zd ZdZdZee�Zed�Z� fdd�Zdd� Z d d
� Z
edd� �Zd
d� Z
dd� Zdd� Zedd� �Zdd� Zdd� Zdd� Zdd� Z� ZS )�BuildDepCommand�builddep� build-depz3Install build dependencies for package or spec filez[PACKAGE|PACKAGE.spec]c s( t t| �j|� tjjj� | _g | _d S )N) �superr �__init__�dnf�rpmZtransactionZinitReadOnlyTransaction�_rpm_ts�tempdirs)�self�cli)� __class__� �/usr/lib/python3.6/builddep.pyr
/ s zBuildDepCommand.__init__c C s x| j D ]}tj|� qW d S )N)r �shutilZrmtree)r �temp_dirr r r �__del__4 s zBuildDepCommand.__del__c C s� t jjj|�}|d dkr |jS tjj� }tjdd�}t jj
|t jj|��}| jj
|� t|d�}zFy|j| jjj||j� � W n$ tk
r� } z� W Y dd}~X nX W d|j� X |S ) z�
In case pkgspec is a remote URL, download it to a temporary location
and use the temporary file instead.
r �file� Z
dnf_builddep_)�prefixzwb+N)r r )r ZpycompZurlparse�path�libdnfZrepoZ
Downloader�tempfileZmkdtemp�os�join�basenamer �append�openZdownloadURL�baseZconfZ_config�fileno�RuntimeError�close)r �pkgspec�locationZ
downloaderr Z temp_fileZtemp_fo�exr r r �_download_remote_file8 s
z%BuildDepCommand._download_remote_filec C s� dd� }| j dddtd�d� | j dd d
g d|td�d
� | j dddtd�d� | j� }|j ddtd�d� |j ddtd�d� d S )Nc S s: | r| j d d�ng }t|�dk r6td�| }tj|��|S )N� � z&'%s' is not of the format 'MACRO EXPR')�split�lenr �argparseZArgumentTypeError)�argZarglist�msgr r r � macro_defR s
z0BuildDepCommand.set_argparser.<locals>.macro_def�packages�+�packagez"packages with builddeps to install)�nargs�metavar�helpz-Dz--definer z'MACRO EXPR'z$define a macro for spec file parsing)�action�defaultr6 �typer7 z--skip-unavailable�
store_trueFz5skip build dependencies not available in repositories)r8 r9 r7 z--specz)treat commandline arguments as spec files)r8 r7 z--srpmz)treat commandline arguments as source rpm)�add_argumentr Zadd_mutually_exclusive_group)�parserr1 Zptyper r r �
set_argparserP s
zBuildDepCommand.set_argparserc C s | j jsd| j _d S )N�error)�optsZrpmverbosity)r r r r �
pre_configuref s zBuildDepCommand.pre_configurec C sr | j j}d|_d|_d|_d|_| jjp.| jjsnx<| jj D ]0}|j
d�pZ|j
d�pZ|j
d�s:| jjj
� P q:W d S )NTz.src.rpmz
.nosrc.rpmz.spec)r �demandsZavailable_reposZ resolvingZ root_userZsack_activationr@ �spec�srpmr2 �endswithr"