Current File : /home/mmdealscpanel/yummmdeals.com/dnf-plugins.tar
groups_manager.py000064400000032334150402642240010133 0ustar00# groups_manager.py
# DNF plugin for managing comps groups metadata files
#
# Copyright (C) 2020 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#

from __future__ import absolute_import
from __future__ import unicode_literals

import argparse
import gzip
import libcomps
import os
import re
import shutil
import tempfile

from dnfpluginscore import _, logger
import dnf
import dnf.cli


RE_GROUP_ID_VALID = '-a-z0-9_.:'
RE_GROUP_ID = re.compile(r'^[{}]+$'.format(RE_GROUP_ID_VALID))
RE_LANG = re.compile(r'^[-a-zA-Z0-9_.@]+$')
COMPS_XML_OPTIONS = {
    'default_explicit': True,
    'uservisible_explicit': True,
    'empty_groups': True}


def group_id_type(value):
    '''group id validator'''
    if not RE_GROUP_ID.match(value):
        raise argparse.ArgumentTypeError(_('Invalid group id'))
    return value


def translation_type(value):
    '''translated texts validator'''
    data = value.split(':', 2)
    if len(data) != 2:
        raise argparse.ArgumentTypeError(
            _("Invalid translated data, should be in form 'lang:text'"))
    lang, text = data
    if not RE_LANG.match(lang):
        raise argparse.ArgumentTypeError(_('Invalid/empty language for translated data'))
    return lang, text


def text_to_id(text):
    '''generate group id based on its name'''
    group_id = text.lower()
    group_id = re.sub('[^{}]'.format(RE_GROUP_ID_VALID), '', group_id)
    if not group_id:
        raise dnf.cli.CliError(
            _("Can't generate group id from '{}'. Please specify group id using --id.").format(
                text))
    return group_id


@dnf.plugin.register_command
class GroupsManagerCommand(dnf.cli.Command):
    aliases = ('groups-manager',)
    summary = _('create and edit groups metadata file')

    def __init__(self, cli):
        super(GroupsManagerCommand, self).__init__(cli)
        self.comps = libcomps.Comps()

    @staticmethod
    def set_argparser(parser):
        # input / output options
        parser.add_argument('--load', action='append', default=[],
                            metavar='COMPS.XML',
                            help=_('load groups metadata from file'))
        parser.add_argument('--save', action='append', default=[],
                            metavar='COMPS.XML',
                            help=_('save groups metadata to file'))
        parser.add_argument('--merge', metavar='COMPS.XML',
                            help=_('load and save groups metadata to file'))
        parser.add_argument('--print', action='store_true', default=False,
                            help=_('print the result metadata to stdout'))
        # group options
        parser.add_argument('--id', type=group_id_type,
                            help=_('group id'))
        parser.add_argument('-n', '--name', help=_('group name'))
        parser.add_argument('--description',
                            help=_('group description'))
        parser.add_argument('--display-order', type=int,
                            help=_('group display order'))
        parser.add_argument('--translated-name', action='append', default=[],
                            metavar='LANG:TEXT', type=translation_type,
                            help=_('translated name for the group'))
        parser.add_argument('--translated-description', action='append', default=[],
                            metavar='LANG:TEXT', type=translation_type,
                            help=_('translated description for the group'))
        visible = parser.add_mutually_exclusive_group()
        visible.add_argument('--user-visible', dest='user_visible', action='store_true',
                             default=None,
                             help=_('make the group user visible (default)'))
        visible.add_argument('--not-user-visible', dest='user_visible', action='store_false',
                             default=None,
                             help=_('make the group user invisible'))

        # package list options
        section = parser.add_mutually_exclusive_group()
        section.add_argument('--mandatory', action='store_true',
                             help=_('add packages to the mandatory section'))
        section.add_argument('--optional', action='store_true',
                             help=_('add packages to the optional section'))
        section.add_argument('--remove', action='store_true', default=False,
                             help=_('remove packages from the group instead of adding them'))
        parser.add_argument('--dependencies', action='store_true',
                            help=_('include also direct dependencies for packages'))

        parser.add_argument("packages", nargs='*', metavar='PACKAGE',
                            help=_('package specification'))

    def configure(self):
        demands = self.cli.demands

        if self.opts.packages:
            demands.sack_activation = True
            demands.available_repos = True
            demands.load_system_repo = False

        # handle --merge option (shortcut to --load and --save the same file)
        if self.opts.merge:
            self.opts.load.insert(0, self.opts.merge)
            self.opts.save.append(self.opts.merge)

        # check that group is specified when editing is attempted
        if (self.opts.description
                or self.opts.display_order
                or self.opts.translated_name
                or self.opts.translated_description
                or self.opts.user_visible is not None
                or self.opts.packages):
            if not self.opts.id and not self.opts.name:
                raise dnf.cli.CliError(
                    _("Can't edit group without specifying it (use --id or --name)"))

    def load_input_files(self):
        """
        Loads all input xml files.
        Returns True if at least one file was successfuly loaded
        """
        for file_name in self.opts.load:
            file_comps = libcomps.Comps()
            try:
                if file_name.endswith('.gz'):
                    # libcomps does not support gzipped files - decompress to temporary
                    # location
                    with gzip.open(file_name) as gz_file:
                        temp_file = tempfile.NamedTemporaryFile(delete=False)
                        try:
                            shutil.copyfileobj(gz_file, temp_file)
                            # close temp_file to ensure the content is flushed to disk
                            temp_file.close()
                            file_comps.fromxml_f(temp_file.name)
                        finally:
                            os.unlink(temp_file.name)
                else:
                    file_comps.fromxml_f(file_name)
            except (IOError, OSError, libcomps.ParserError) as err:
                # gzip module raises OSError on reading from malformed gz file
                # get_last_errors() output often contains duplicit lines, remove them
                seen = set()
                for error in file_comps.get_last_errors():
                    if error in seen:
                        continue
                    logger.error(error.strip())
                    seen.add(error)
                raise dnf.exceptions.Error(
                    _("Can't load file \"{}\": {}").format(file_name, err))
            else:
                self.comps += file_comps

    def save_output_files(self):
        for file_name in self.opts.save:
            try:
                # xml_f returns a list of errors / log entries
                errors = self.comps.xml_f(file_name, xml_options=COMPS_XML_OPTIONS)
            except libcomps.XMLGenError as err:
                errors = [err]
            if errors:
                # xml_f() method could return more than one error. In this case
                # raise the latest of them and log the others.
                for err in errors[:-1]:
                    logger.error(err.strip())
                raise dnf.exceptions.Error(_("Can't save file \"{}\": {}").format(
                    file_name, errors[-1].strip()))


    def find_group(self, group_id, name):
        '''
        Try to find group according to command line parameters - first by id
        then by name.
        '''
        group = None
        if group_id:
            for grp in self.comps.groups:
                if grp.id == group_id:
                    group = grp
                    break
        if group is None and name:
            for grp in self.comps.groups:
                if grp.name == name:
                    group = grp
                    break
        return group

    def edit_group(self, group):
        '''
        Set attributes and package lists for selected group
        '''
        def langlist_to_strdict(lst):
            str_dict = libcomps.StrDict()
            for lang, text in lst:
                str_dict[lang] = text
            return str_dict

        # set group attributes
        if self.opts.name:
            group.name = self.opts.name
        if self.opts.description:
            group.desc = self.opts.description
        if self.opts.display_order:
            group.display_order = self.opts.display_order
        if self.opts.user_visible is not None:
            group.uservisible = self.opts.user_visible
        if self.opts.translated_name:
            group.name_by_lang = langlist_to_strdict(self.opts.translated_name)
        if self.opts.translated_description:
            group.desc_by_lang = langlist_to_strdict(self.opts.translated_description)

        # edit packages list
        if self.opts.packages:
            # find packages according to specifications from command line
            packages = set()
            for pkg_spec in self.opts.packages:
                subj = dnf.subject.Subject(pkg_spec)
                q = subj.get_best_query(self.base.sack, with_nevra=True,
                                        with_provides=False, with_filenames=False).latest()
                if not q:
                    logger.warning(_("No match for argument: {}").format(pkg_spec))
                    continue
                packages.update(q)
            if self.opts.dependencies:
                # add packages that provide requirements
                requirements = set()
                for pkg in packages:
                    requirements.update(pkg.requires)
                packages.update(self.base.sack.query().filterm(provides=requirements))

            pkg_names = {pkg.name for pkg in packages}

            if self.opts.remove:
                for pkg_name in pkg_names:
                    for pkg in group.packages_match(name=pkg_name,
                                                    type=libcomps.PACKAGE_TYPE_UNKNOWN):
                        group.packages.remove(pkg)
            else:
                if self.opts.mandatory:
                    pkg_type = libcomps.PACKAGE_TYPE_MANDATORY
                elif self.opts.optional:
                    pkg_type = libcomps.PACKAGE_TYPE_OPTIONAL
                else:
                    pkg_type = libcomps.PACKAGE_TYPE_DEFAULT
                for pkg_name in sorted(pkg_names):
                    if not group.packages_match(name=pkg_name, type=pkg_type):
                        group.packages.append(libcomps.Package(name=pkg_name, type=pkg_type))

    def run(self):
        self.load_input_files()

        if self.opts.id or self.opts.name:
            # we are adding / editing a group
            group = self.find_group(group_id=self.opts.id, name=self.opts.name)
            if group is None:
                # create a new group
                if self.opts.remove:
                    raise dnf.exceptions.Error(_("Can't remove packages from non-existent group"))
                group = libcomps.Group()
                if self.opts.id:
                    group.id = self.opts.id
                    group.name = self.opts.id
                elif self.opts.name:
                    group_id = text_to_id(self.opts.name)
                    if self.find_group(group_id=group_id, name=None):
                        raise dnf.cli.CliError(
                            _("Group id '{}' generated from '{}' is duplicit. "
                              "Please specify group id using --id.").format(
                                  group_id, self.opts.name))
                    group.id = group_id
                self.comps.groups.append(group)
            self.edit_group(group)

        self.save_output_files()
        if self.opts.print or (not self.opts.save):
            print(self.comps.xml_str(xml_options=COMPS_XML_OPTIONS))
debug.py000064400000030425150402642240006207 0ustar00#
# Copyright (C) 2015  Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#

from __future__ import absolute_import
from __future__ import unicode_literals

from dnf.i18n import ucd
from dnfpluginscore import _, logger

import dnf
import dnf.cli
import gzip
import hawkey
import os
import rpm
import sys
import time

DEBUG_VERSION = "dnf-debug-dump version 1\n"


class Debug(dnf.Plugin):

    name = 'debug'

    def __init__(self, base, cli):
        super(Debug, self).__init__(base, cli)
        self.base = base
        self.cli = cli
        if self.cli is not None:
            self.cli.register_command(DebugDumpCommand)
            self.cli.register_command(DebugRestoreCommand)


class DebugDumpCommand(dnf.cli.Command):

    aliases = ("debug-dump",)
    summary = _("dump information about installed rpm packages to file")

    def __init__(self, cli):
        super(DebugDumpCommand, self).__init__(cli)
        self.dump_file = None

    def configure(self):
        self.cli.demands.sack_activation = True
        self.cli.demands.available_repos = True

    @staticmethod
    def set_argparser(parser):
        parser.add_argument(
            "--norepos", action="store_true", default=False,
            help=_("do not attempt to dump the repository contents."))
        parser.add_argument(
            "filename", nargs="?",
            help=_("optional name of dump file"))

    def run(self):
        """create debug txt file and compress it, if no filename specified
           use dnf_debug_dump-<timestamp>.txt.gz by default"""

        filename = self.opts.filename
        if not filename:
            now = time.strftime("%Y-%m-%d_%T", time.localtime(time.time()))
            filename = "dnf_debug_dump-%s-%s.txt.gz" % (os.uname()[1], now)

        filename = os.path.abspath(filename)
        if filename.endswith(".gz"):
            self.dump_file = gzip.GzipFile(filename, "w")
        else:
            self.dump_file = open(filename, "w")

        self.write(DEBUG_VERSION)
        self.dump_system_info()
        self.dump_dnf_config_info()
        self.dump_rpm_problems()
        self.dump_packages(not self.opts.norepos)
        self.dump_rpmdb_versions()
        self.dump_file.close()

        print(_("Output written to: %s") % filename)

    def write(self, msg):
        if dnf.pycomp.PY3 and isinstance(self.dump_file, gzip.GzipFile):
            msg = bytes(msg, "utf8")
        dnf.pycomp.write_to_file(self.dump_file, msg)

    def dump_system_info(self):
        self.write("%%%%SYSTEM INFO\n")
        uname = os.uname()
        self.write("  uname: %s, %s\n" % (uname[2], uname[4]))
        self.write("  rpm ver: %s\n" % rpm.__version__)
        self.write("  python ver: %s\n" % sys.version.replace("\n", ""))
        return

    def dump_dnf_config_info(self):
        var = self.base.conf.substitutions
        plugins = ",".join([p.name for p in self.base._plugins.plugins])
        self.write("%%%%DNF INFO\n")
        self.write("  arch: %s\n" % var["arch"])
        self.write("  basearch: %s\n" % var["basearch"])
        self.write("  releasever: %s\n" % var["releasever"])
        self.write("  dnf ver: %s\n" % dnf.const.VERSION)
        self.write("  enabled plugins: %s\n" % plugins)
        self.write("  global excludes: %s\n" % ",".join(self.base.conf.excludepkgs))
        return

    def dump_rpm_problems(self):
        self.write("%%%%RPMDB PROBLEMS\n")
        (missing, conflicts) = rpm_problems(self.base)
        self.write("".join(["Package %s requires %s\n" % (ucd(pkg), ucd(req))
                            for (req, pkg) in missing]))
        self.write("".join(["Package %s conflicts with %s\n" % (ucd(pkg),
                                                                ucd(conf))
                            for (conf, pkg) in conflicts]))

    def dump_packages(self, load_repos):
        q = self.base.sack.query()
        # packages from rpmdb
        self.write("%%%%RPMDB\n")
        for p in sorted(q.installed()):
            self.write("  %s\n" % pkgspec(p))

        if not load_repos:
            return

        self.write("%%%%REPOS\n")
        available = q.available()
        for repo in sorted(self.base.repos.iter_enabled(), key=lambda x: x.id):
            try:
                url = None
                if repo.metalink is not None:
                    url = repo.metalink
                elif repo.mirrorlist is not None:
                    url = repo.mirrorlist
                elif len(repo.baseurl) > 0:
                    url = repo.baseurl[0]
                self.write("%%%s - %s\n" % (repo.id, url))
                self.write("  excludes: %s\n" % ",".join(repo.excludepkgs))
                for po in sorted(available.filter(reponame=repo.id)):
                    self.write("  %s\n" % pkgspec(po))

            except dnf.exceptions.Error as e:
                self.write("Error accessing repo %s: %s\n" % (repo, str(e)))
                continue
        return

    def dump_rpmdb_versions(self):
        self.write("%%%%RPMDB VERSIONS\n")
        version = self.base.sack._rpmdb_version()
        self.write("  all: %s\n" % version)
        return


class DebugRestoreCommand(dnf.cli.Command):

    aliases = ("debug-restore",)
    summary = _("restore packages recorded in debug-dump file")

    def configure(self):
        self.cli.demands.sack_activation = True
        self.cli.demands.available_repos = True
        self.cli.demands.root_user = True
        if not self.opts.output:
            self.cli.demands.resolving = True

    @staticmethod
    def set_argparser(parser):
        parser.add_argument(
            "--output", action="store_true",
            help=_("output commands that would be run to stdout."))
        parser.add_argument(
            "--install-latest", action="store_true",
            help=_("Install the latest version of recorded packages."))
        parser.add_argument(
            "--ignore-arch", action="store_true",
            help=_("Ignore architecture and install missing packages matching "
                   "the name, epoch, version and release."))
        parser.add_argument(
            "--filter-types", metavar="[install, remove, replace]",
            default="install, remove, replace",
            help=_("limit to specified type"))
        parser.add_argument(
            "--remove-installonly", action="store_true",
            help=_('Allow removing of install-only packages. Using this option may '
                   'result in an attempt to remove the running kernel.'))
        parser.add_argument(
            "filename", nargs=1, help=_("name of dump file"))

    def run(self):
        """Execute the command action here."""
        if self.opts.filter_types:
            self.opts.filter_types = set(
                self.opts.filter_types.replace(",", " ").split())

        dump_pkgs = self.read_dump_file(self.opts.filename[0])

        self.process_installed(dump_pkgs, self.opts)

        self.process_dump(dump_pkgs, self.opts)

    def process_installed(self, dump_pkgs, opts):
        installed = self.base.sack.query().installed()
        installonly_pkgs = self.base._get_installonly_query(installed)
        for pkg in installed:
            pkg_remove = False
            spec = pkgspec(pkg)
            dumped_versions = dump_pkgs.get((pkg.name, pkg.arch), None)
            if dumped_versions is not None:
                evr = (pkg.epoch, pkg.version, pkg.release)
                if evr in dumped_versions:
                    # the correct version is already installed
                    dumped_versions[evr] = 'skip'
                else:
                    # other version is currently installed
                    if pkg in installonly_pkgs:
                        # package is install-only, should be removed
                        pkg_remove = True
                    else:
                        # package should be upgraded / downgraded
                        if "replace" in opts.filter_types:
                            action = 'replace'
                        else:
                            action = 'skip'
                        for d_evr in dumped_versions.keys():
                            dumped_versions[d_evr] = action
            else:
                # package should not be installed
                pkg_remove = True
            if pkg_remove and "remove" in opts.filter_types:
                if pkg not in installonly_pkgs or opts.remove_installonly:
                    if opts.output:
                        print("remove    %s" % spec)
                    else:
                        self.base.package_remove(pkg)

    def process_dump(self, dump_pkgs, opts):
        for (n, a) in sorted(dump_pkgs.keys()):
            dumped_versions = dump_pkgs[(n, a)]
            for (e, v, r) in sorted(dumped_versions.keys()):
                action = dumped_versions[(e, v, r)]
                if action == 'skip':
                    continue
                if opts.ignore_arch:
                    arch = ""
                else:
                    arch = "." + a
                if opts.install_latest and action == "install":
                    pkg_spec = "%s%s" % (n, arch)
                else:
                    pkg_spec = pkgtup2spec(n, arch, e, v, r)
                if action in opts.filter_types:
                    if opts.output:
                        print("%s   %s" % (action, pkg_spec))
                    else:
                        try:
                            self.base.install(pkg_spec)
                        except dnf.exceptions.MarkingError:
                            logger.error(_("Package %s is not available"), pkg_spec)

    @staticmethod
    def read_dump_file(filename):
        if filename.endswith(".gz"):
            fobj = gzip.GzipFile(filename)
        else:
            fobj = open(filename)

        if ucd(fobj.readline()) != DEBUG_VERSION:
            logger.error(_("Bad dnf debug file: %s"), filename)
            raise dnf.exceptions.Error

        skip = True
        pkgs = {}
        for line in fobj:
            line = ucd(line)
            if skip:
                if line == "%%%%RPMDB\n":
                    skip = False
                continue

            if not line or line[0] != " ":
                break

            pkg_spec = line.strip()
            nevra = hawkey.split_nevra(pkg_spec)
            # {(name, arch): {(epoch, version, release): action}}
            pkgs.setdefault((nevra.name, nevra.arch), {})[
                (nevra.epoch, nevra.version, nevra.release)] = "install"

        return pkgs


def rpm_problems(base):
    rpmdb = dnf.sack._rpmdb_sack(base)
    allpkgs = rpmdb.query().installed()

    requires = set()
    conflicts = set()
    for pkg in allpkgs:
        requires.update([(req, pkg) for req in pkg.requires
                         if not str(req) == "solvable:prereqmarker"
                         and not str(req).startswith("rpmlib(")])
        conflicts.update([(conf, pkg) for conf in pkg.conflicts])

    missing_requires = [(req, pkg) for (req, pkg) in requires
                        if not allpkgs.filter(provides=req)]
    existing_conflicts = [(conf, pkg) for (conf, pkg) in conflicts
                          if allpkgs.filter(provides=conf)]
    return missing_requires, existing_conflicts


def pkgspec(pkg):
    return pkgtup2spec(pkg.name, pkg.arch, pkg.epoch, pkg.version, pkg.release)


def pkgtup2spec(name, arch, epoch, version, release):
    a = "" if not arch else ".%s" % arch.lstrip('.')
    e = "" if epoch in (None, "") else "%s:" % epoch
    return "%s-%s%s-%s%s" % (name, e, version, release, a)
versionlock.py000064400000030035150402642240007454 0ustar00#
# Copyright (C) 2015  Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#

from __future__ import absolute_import
from __future__ import unicode_literals
from dnfpluginscore import _, logger

import dnf
import dnf.cli
import dnf.exceptions
import fnmatch
import hawkey
import os
import tempfile
import time
import warnings

NOT_READABLE = _('Unable to read version lock configuration: %s')
NO_LOCKLIST = _('Locklist not set')
ADDING_SPEC = _('Adding versionlock on:')
EXCLUDING_SPEC = _('Adding exclude on:')
EXISTING_SPEC = _('Package already locked in equivalent form:')
ALREADY_LOCKED = _('Package {} is already locked')
ALREADY_EXCLUDED = _('Package {} is already excluded')
DELETING_SPEC = _('Deleting versionlock for:')
NOTFOUND_SPEC = _('No package found for:')
NO_VERSIONLOCK = _('Excludes from versionlock plugin were not applied')
APPLY_LOCK = _('Versionlock plugin: number of lock rules from file "{}" applied: {}')
APPLY_EXCLUDE = _('Versionlock plugin: number of exclude rules from file "{}" applied: {}')
NEVRA_ERROR = _('Versionlock plugin: could not parse pattern:')

locklist_fn = None


class VersionLock(dnf.Plugin):

    name = 'versionlock'

    def __init__(self, base, cli):
        super(VersionLock, self).__init__(base, cli)
        self.base = base
        self.cli = cli
        if self.cli is not None:
            self.cli.register_command(VersionLockCommand)

    def config(self):
        global locklist_fn
        cp = self.read_config(self.base.conf)
        locklist_fn = (cp.has_section('main') and cp.has_option('main', 'locklist')
                       and cp.get('main', 'locklist'))

    def locking_enabled(self):
        if self.cli is None:
            enabled = True  # loaded via the api, not called by cli
        else:
            enabled = self.cli.demands.plugin_filtering_enabled
            if enabled is None:
                enabled = self.cli.demands.resolving
        return enabled

    def sack(self):
        if not self.locking_enabled():
            logger.debug(NO_VERSIONLOCK)
            return

        excludes_query = self.base.sack.query().filter(empty=True)
        locked_query = self.base.sack.query().filter(empty=True)
        locked_names = set()
        # counter of applied rules [locked_count, excluded_count]
        count = [0, 0]
        for pat in _read_locklist():
            excl = 0
            if pat and pat[0] == '!':
                pat = pat[1:]
                excl = 1

            possible_nevras = dnf.subject.Subject(pat).get_nevra_possibilities(
                forms=[hawkey.FORM_NEVRA, hawkey.FORM_NEVR, hawkey.FORM_NEV,
                       hawkey.FORM_NA, hawkey.FORM_NAME])
            if possible_nevras:
                count[excl] += 1
            else:
                logger.error("%s %s", NEVRA_ERROR, pat)
                continue
            for nevra in possible_nevras:
                pat_query = nevra.to_query(self.base.sack)
                if excl:
                    excludes_query = excludes_query.union(pat_query)
                else:
                    locked_names.add(nevra.name)
                    locked_query = locked_query.union(pat_query)
                if pat_query:
                    break

        if count[1]:
            logger.debug(APPLY_EXCLUDE.format(locklist_fn, count[1]))
        if count[0]:
            logger.debug(APPLY_LOCK.format(locklist_fn, count[0]))

        if locked_names:
            all_versions = self.base.sack.query().filter(name__glob=list(locked_names))
            other_versions = all_versions.difference(locked_query)
            excludes_query = excludes_query.union(other_versions)
            # exclude also anything that obsoletes the locked versions of packages
            obsoletes_query = self.base.sack.query().filterm(obsoletes=locked_query)
            # leave out obsoleters that are also part of locked versions (otherwise the obsoleter package
            # would not be installable at all)
            excludes_query = excludes_query.union(obsoletes_query.difference(locked_query))

        excludes_query.filterm(reponame__neq=hawkey.SYSTEM_REPO_NAME)
        if excludes_query:
            self.base.sack.add_excludes(excludes_query)

EXC_CMDS = ['exclude', 'add-!', 'add!']
DEL_CMDS = ['delete', 'del']
DEP_EXC_CMDS = ['blacklist']
ALL_CMDS = ['add', 'clear', 'list'] + EXC_CMDS + DEL_CMDS + DEP_EXC_CMDS


class VersionLockCommand(dnf.cli.Command):

    aliases = ("versionlock",)
    summary = _("control package version locks")
    usage = "[add|exclude|list|delete|clear] [<package-nevr-spec>]"

    @staticmethod
    def set_argparser(parser):
        parser.add_argument("--raw", default=False, action='store_true',
                            help=_("Use package specifications as they are, do not "
                                   "try to parse them"))
        parser.add_argument("subcommand", nargs='?',
                            metavar="[add|exclude|list|delete|clear]")
        parser.add_argument("package", nargs='*',
                            metavar="[<package-nevr-spec>]")

    def configure(self):
        self.cli.demands.sack_activation = True
        self.cli.demands.available_repos = True

    def run(self):
        cmd = 'list'
        if self.opts.subcommand:
            if self.opts.subcommand not in ALL_CMDS:
                cmd = 'add'
                self.opts.package.insert(0, self.opts.subcommand)
            elif self.opts.subcommand in EXC_CMDS:
                cmd = 'exclude'
            elif self.opts.subcommand in DEP_EXC_CMDS:
                msg = _("Subcommand '{}' is deprecated. Use 'exclude' subcommand instead.").format(
                    self.opts.subcommand)
                warnings.warn(msg, dnf.exceptions.DeprecationWarning, stacklevel=2)
                cmd = 'exclude'
            elif self.opts.subcommand in DEL_CMDS:
                cmd = 'delete'
            else:
                cmd = self.opts.subcommand

        if cmd == 'add':
            results = _search_locklist(self.opts.package)
            for entry, entry_cmd in results:
                if entry_cmd == '':
                    _write_locklist(self.base, [entry], self.opts.raw, True,
                                    "\n# Added lock on %s\n" % time.ctime(),
                                    ADDING_SPEC, '')
                elif cmd != entry_cmd:
                    raise dnf.exceptions.Error(ALREADY_EXCLUDED.format(entry))
                else:
                    logger.info("%s %s", EXISTING_SPEC, entry)
        elif cmd == 'exclude':
            results = _search_locklist(self.opts.package)
            for entry, entry_cmd in results:
                if entry_cmd == '':
                    _write_locklist(self.base, [entry], self.opts.raw, False,
                                    "\n# Added exclude on %s\n" % time.ctime(),
                                    EXCLUDING_SPEC, '!')
                elif cmd != entry_cmd:
                    raise dnf.exceptions.Error(ALREADY_LOCKED.format(entry))
                else:
                    logger.info("%s %s", EXISTING_SPEC, entry)
        elif cmd == 'list':
            for pat in _read_locklist():
                print(pat)
        elif cmd == 'clear':
            if not locklist_fn:
                raise dnf.exceptions.Error(NO_LOCKLIST)
            with open(locklist_fn, 'w') as f:
                # open in write mode truncates file
                pass
        elif cmd == 'delete':
            if not locklist_fn:
                raise dnf.exceptions.Error(NO_LOCKLIST)
            dirname = os.path.dirname(locklist_fn)
            (out, tmpfilename) = tempfile.mkstemp(dir=dirname, suffix='.tmp')
            locked_specs = _read_locklist()
            count = 0
            with os.fdopen(out, 'w', -1) as out:
                for ent in locked_specs:
                    if _match(ent, self.opts.package):
                        print("%s %s" % (DELETING_SPEC, ent))
                        count += 1
                        continue
                    out.write(ent)
                    out.write('\n')
            if not count:
                os.unlink(tmpfilename)
            else:
                os.chmod(tmpfilename, 0o644)
                os.rename(tmpfilename, locklist_fn)


def _read_locklist():
    locklist = []
    try:
        if not locklist_fn:
            raise dnf.exceptions.Error(NO_LOCKLIST)
        with open(locklist_fn) as llfile:
            for line in llfile.readlines():
                if line.startswith('#') or line.strip() == '':
                    continue
                locklist.append(line.strip())
    except IOError as e:
        raise dnf.exceptions.Error(NOT_READABLE % e)
    return locklist


def _search_locklist(package):
    results = []
    found = action = ''
    locked_specs = _read_locklist()
    for pkg in package:
        match = False
        for ent in locked_specs:
            found = action = ''
            if _match(ent, [pkg]):
                found = ent
                action = 'exclude' if ent.startswith('!') else 'add'
                results.append((found, action))
                match = True
        if not match:
            results.append((pkg, action))
    return results


def _write_locklist(base, args, raw, try_installed, comment, info, prefix):
    specs = set()
    for pat in args:
        if raw:
            specs.add(pat)
            continue
        subj = dnf.subject.Subject(pat)
        pkgs = None
        if try_installed:
            pkgs = subj.get_best_query(dnf.sack._rpmdb_sack(base), with_nevra=True,
                                       with_provides=False, with_filenames=False)
        if not pkgs:
            pkgs = subj.get_best_query(base.sack, with_nevra=True, with_provides=False,
                                       with_filenames=False)
        if not pkgs:
            print("%s %s" % (NOTFOUND_SPEC, pat))

        for pkg in pkgs:
            specs.add(pkgtup2spec(*pkg.pkgtup))

    if specs:
        try:
            if not locklist_fn:
                raise dnf.exceptions.Error(NO_LOCKLIST)
            with open(locklist_fn, 'a') as f:
                f.write(comment)
                for spec in specs:
                    print("%s %s" % (info, spec))
                    f.write("%s%s\n" % (prefix, spec))
        except IOError as e:
            raise dnf.exceptions.Error(NOT_READABLE % e)

def _match(ent, patterns):
    ent = ent.lstrip('!')
    for pat in patterns:
        if ent == pat:
            return True
    try:
        n = hawkey.split_nevra(ent)
    except hawkey.ValueException:
        return False
    for name in (
        '%s' % n.name,
        '%s.%s' % (n.name, n.arch),
        '%s-%s' % (n.name, n.version),
        '%s-%s-%s' % (n.name, n.version, n.release),
        '%s-%s:%s' % (n.name, n.epoch, n.version),
        '%s-%s-%s.%s' % (n.name, n.version, n.release, n.arch),
        '%s-%s:%s-%s' % (n.name, n.epoch, n.version, n.release),
        '%s:%s-%s-%s.%s' % (n.epoch, n.name, n.version, n.release, n.arch),
        '%s-%s:%s-%s.%s' % (n.name, n.epoch, n.version, n.release, n.arch),
    ):
        for pat in patterns:
            if fnmatch.fnmatch(name, pat):
                return True
    return False


def pkgtup2spec(name, arch, epoch, version, release):
    # we ignore arch
    return "%s-%s:%s-%s.*" % (name, epoch or "0", version, release)
etckeeper.py000064400000002406150402642240007066 0ustar00# etckeeper.py, support etckeeper for dnf
#
# Copyright (C) 2014 Peter Listiak
# https://github.com/plistiak/dnf-etckeeper
#
# Later modifications by Petr Spacek:
# Distutils code below was copied from etckeeper-bzr distributed with v1.15
#

import logging
import subprocess
import dnf

logger = logging.getLogger('dnf.plugin')


class Etckeeper(dnf.Plugin):

    name = 'etckeeper'

    def _run_command(self, command):
        logger.debug('Etckeeper plugin: %s', command)
        try:
            with open("/dev/null", "wb") as devnull:
                ret = subprocess.call(("etckeeper", command),
                                      stdout=devnull, stderr=devnull,
                                      close_fds=True)
                if ret != 0:
                    raise dnf.exceptions.Error('"etckeeper %s" returned: %d' % (command, ret))
        except OSError as err:
            logger.warning('Failed to run "etckeeper %s": %s' % (command, err))

    def resolved(self):
        self._run_command("pre-install")

    def transaction(self):
        self._run_command("post-install")

if __name__ == "__main__":
    from distutils.core import setup
    setup(name="dnf-etckeeper",
          packages=["dnf-plugins"],
          package_dir={"dnf-plugins":"etckeeper-dnf"})
reposync.py000064400000034470150402642240006767 0ustar00# reposync.py
# DNF plugin adding a command to download all packages from given remote repo.
#
# Copyright (C) 2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#

from __future__ import absolute_import
from __future__ import unicode_literals

import hawkey
import os
import shutil
import types

from dnfpluginscore import _, logger
from dnf.cli.option_parser import OptionParser
import dnf
import dnf.cli


def _pkgdir(intermediate, target):
    cwd = dnf.i18n.ucd(os.getcwd())
    return os.path.realpath(os.path.join(cwd, intermediate, target))


class RPMPayloadLocation(dnf.repo.RPMPayload):
    def __init__(self, pkg, progress, pkg_location):
        super(RPMPayloadLocation, self).__init__(pkg, progress)
        self.package_dir = os.path.dirname(pkg_location)

    def _target_params(self):
        tp = super(RPMPayloadLocation, self)._target_params()
        dnf.util.ensure_dir(self.package_dir)
        tp['dest'] = self.package_dir
        return tp


@dnf.plugin.register_command
class RepoSyncCommand(dnf.cli.Command):
    aliases = ('reposync',)
    summary = _('download all packages from remote repo')

    def __init__(self, cli):
        super(RepoSyncCommand, self).__init__(cli)

    @staticmethod
    def set_argparser(parser):
        parser.add_argument('-a', '--arch', dest='arches', default=[],
                            action=OptionParser._SplitCallback, metavar='[arch]',
                            help=_('download only packages for this ARCH'))
        parser.add_argument('--delete', default=False, action='store_true',
                            help=_('delete local packages no longer present in repository'))
        parser.add_argument('--download-metadata', default=False, action='store_true',
                            help=_('download all the metadata.'))
        parser.add_argument('-g', '--gpgcheck', default=False, action='store_true',
                            help=_('Remove packages that fail GPG signature checking '
                                   'after downloading'))
        parser.add_argument('-m', '--downloadcomps', default=False, action='store_true',
                            help=_('also download and uncompress comps.xml'))
        parser.add_argument('--metadata-path',
                            help=_('where to store downloaded repository metadata. '
                                   'Defaults to the value of --download-path.'))
        parser.add_argument('-n', '--newest-only', default=False, action='store_true',
                            help=_('download only newest packages per-repo'))
        parser.add_argument('--norepopath', default=False, action='store_true',
                            help=_("Don't add the reponame to the download path."))
        parser.add_argument('-p', '--download-path', default='./',
                            help=_('where to store downloaded repositories'))
        parser.add_argument('--remote-time', default=False, action='store_true',
                            help=_('try to set local timestamps of local files by '
                                   'the one on the server'))
        parser.add_argument('--source', default=False, action='store_true',
                            help=_('download only source packages'))
        parser.add_argument('-u', '--urls', default=False, action='store_true',
                            help=_("Just list urls of what would be downloaded, "
                                   "don't download"))

    def configure(self):
        demands = self.cli.demands
        demands.available_repos = True
        demands.sack_activation = True

        repos = self.base.repos

        if self.opts.repo:
            repos.all().disable()
            for repoid in self.opts.repo:
                try:
                    repo = repos[repoid]
                except KeyError:
                    raise dnf.cli.CliError("Unknown repo: '%s'." % repoid)
                repo.enable()

        if self.opts.source:
            repos.enable_source_repos()

        if len(list(repos.iter_enabled())) > 1 and self.opts.norepopath:
            raise dnf.cli.CliError(
                _("Can't use --norepopath with multiple repositories"))

        for repo in repos.iter_enabled():
            repo._repo.expire()
            repo.deltarpm = False

    def run(self):
        self.base.conf.keepcache = True
        gpgcheck_ok = True
        for repo in self.base.repos.iter_enabled():
            if self.opts.remote_time:
                repo._repo.setPreserveRemoteTime(True)
            if self.opts.download_metadata:
                if self.opts.urls:
                    for md_type, md_location in repo._repo.getMetadataLocations():
                        url = repo.remote_location(md_location)
                        if url:
                            print(url)
                        else:
                            msg = _("Failed to get mirror for metadata: %s") % md_type
                            logger.warning(msg)
                else:
                    self.download_metadata(repo)
            if self.opts.downloadcomps:
                if self.opts.urls:
                    mdl = dict(repo._repo.getMetadataLocations())
                    group_locations = [mdl[md_type]
                                       for md_type in ('group', 'group_gz', 'group_gz_zck')
                                       if md_type in mdl]
                    if group_locations:
                        for group_location in group_locations:
                            url = repo.remote_location(group_location)
                            if url:
                                print(url)
                                break
                        else:
                            msg = _("Failed to get mirror for the group file.")
                            logger.warning(msg)
                else:
                    self.getcomps(repo)
            pkglist = self.get_pkglist(repo)
            if self.opts.urls:
                self.print_urls(pkglist)
            else:
                self.download_packages(pkglist)
                if self.opts.gpgcheck:
                    for pkg in pkglist:
                        local_path = self.pkg_download_path(pkg)
                        # base.package_signature_check uses pkg.localPkg() to determine
                        # the location of the package rpm file on the disk.
                        # Set it to the correct download path.
                        pkg.localPkg  = types.MethodType(
                            lambda s, local_path=local_path: local_path, pkg)
                        result, error = self.base.package_signature_check(pkg)
                        if result != 0:
                            logger.warning(_("Removing {}: {}").format(
                                os.path.basename(local_path), error))
                            os.unlink(local_path)
                            gpgcheck_ok = False
            if self.opts.delete:
                self.delete_old_local_packages(repo, pkglist)
        if not gpgcheck_ok:
            raise dnf.exceptions.Error(_("GPG signature check failed."))

    def repo_target(self, repo):
        return _pkgdir(self.opts.destdir or self.opts.download_path,
                       repo.id if not self.opts.norepopath else '')

    def metadata_target(self, repo):
        if self.opts.metadata_path:
            return _pkgdir(self.opts.metadata_path, repo.id)
        else:
            return self.repo_target(repo)

    def pkg_download_path(self, pkg):
        repo_target = self.repo_target(pkg.repo)
        pkg_download_path = os.path.realpath(
            os.path.join(repo_target, pkg.location))
        # join() ensures repo_target ends with a path separator (otherwise the
        # check would pass if pkg_download_path was a "sibling" path component
        # of repo_target that has the same prefix).
        if not pkg_download_path.startswith(os.path.join(repo_target, '')):
            raise dnf.exceptions.Error(
                _("Download target '{}' is outside of download path '{}'.").format(
                    pkg_download_path, repo_target))
        return pkg_download_path

    def delete_old_local_packages(self, repo, pkglist):
        # delete any *.rpm file under target path, that was not downloaded from repository
        downloaded_files = set(self.pkg_download_path(pkg) for pkg in pkglist)
        for dirpath, dirnames, filenames in os.walk(self.repo_target(repo)):
            for filename in filenames:
                path = os.path.join(dirpath, filename)
                if filename.endswith('.rpm') and os.path.isfile(path):
                    if path not in downloaded_files:
                        # Delete disappeared or relocated file
                        try:
                            os.unlink(path)
                            logger.info(_("[DELETED] %s"), path)
                        except OSError:
                            logger.error(_("failed to delete file %s"), path)

    def getcomps(self, repo):
        comps_fn = repo._repo.getCompsFn()
        if comps_fn:
            dest_path = self.metadata_target(repo)
            dnf.util.ensure_dir(dest_path)
            dest = os.path.join(dest_path, 'comps.xml')
            dnf.yum.misc.decompress(comps_fn, dest=dest)
            logger.info(_("comps.xml for repository %s saved"), repo.id)

    def download_metadata(self, repo):
        repo_target = self.metadata_target(repo)
        repo._repo.downloadMetadata(repo_target)
        return True

    def _get_latest(self, query):
        """
        return union of these queries:
        - the latest NEVRAs from non-modular packages
        - all packages from stream version with the latest package NEVRA
          (this should not be needed but the latest package NEVRAs might be
          part of an older module version)
        - all packages from the latest stream version
        """
        if not dnf.base.WITH_MODULES:
            return query.latest()

        query.apply()
        module_packages = self.base._moduleContainer.getModulePackages()
        all_artifacts = set()
        module_dict = {}  # {NameStream: {Version: [modules]}}
        artifact_version = {} # {artifact: {NameStream: [Version]}}
        for module_package in module_packages:
            artifacts = module_package.getArtifacts()
            all_artifacts.update(artifacts)
            module_dict.setdefault(module_package.getNameStream(), {}).setdefault(
                module_package.getVersionNum(), []).append(module_package)
            for artifact in artifacts:
                artifact_version.setdefault(artifact, {}).setdefault(
                    module_package.getNameStream(), []).append(module_package.getVersionNum())

        # the latest NEVRAs from non-modular packages
        latest_query = query.filter(
            pkg__neq=query.filter(nevra_strict=all_artifacts)).latest()

        # artifacts from the newest version and those versions that contain an artifact
        # with the highest NEVRA
        latest_stream_artifacts = set()
        for namestream, version_dict in module_dict.items():
            # versions that will be synchronized
            versions = set()
            # add the newest stream version
            versions.add(sorted(version_dict.keys(), reverse=True)[0])
            # collect all artifacts in all stream versions
            stream_artifacts = set()
            for modules in version_dict.values():
                for module in modules:
                    stream_artifacts.update(module.getArtifacts())
            # find versions to which the packages with the highest NEVRAs belong
            for latest_pkg in query.filter(nevra_strict=stream_artifacts).latest():
                # here we depend on modules.yaml allways containing full NEVRA (including epoch)
                nevra = "{0.name}-{0.epoch}:{0.version}-{0.release}.{0.arch}".format(latest_pkg)
                # download only highest version containing the latest artifact
                versions.add(max(artifact_version[nevra][namestream]))
            # add all artifacts from selected versions for synchronization
            for version in versions:
                for module in version_dict[version]:
                    latest_stream_artifacts.update(module.getArtifacts())
        latest_query = latest_query.union(query.filter(nevra_strict=latest_stream_artifacts))

        return latest_query

    def get_pkglist(self, repo):
        query = self.base.sack.query(flags=hawkey.IGNORE_MODULAR_EXCLUDES).available().filterm(
            reponame=repo.id)
        if self.opts.newest_only:
            query = self._get_latest(query)
        if self.opts.source:
            query.filterm(arch='src')
        elif self.opts.arches:
            query.filterm(arch=self.opts.arches)
        return query

    def download_packages(self, pkglist):
        base = self.base
        progress = base.output.progress
        if progress is None:
            progress = dnf.callback.NullDownloadProgress()
        drpm = dnf.drpm.DeltaInfo(base.sack.query(flags=hawkey.IGNORE_MODULAR_EXCLUDES).installed(),
                                  progress, 0)
        payloads = [RPMPayloadLocation(pkg, progress, self.pkg_download_path(pkg))
                    for pkg in pkglist]
        base._download_remote_payloads(payloads, drpm, progress, None, False)

    def print_urls(self, pkglist):
        for pkg in pkglist:
            url = pkg.remote_location()
            if url:
                print(url)
            else:
                msg = _("Failed to get mirror for package: %s") % pkg.name
                logger.warning(msg)
repomanage.py000064400000024512150402642240007237 0ustar00# repomanage.py
# DNF plugin adding a command to manage rpm packages from given directory.
#
# Copyright (C) 2015 Igor Gnatenko
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#

from __future__ import absolute_import
from __future__ import unicode_literals
from dnfpluginscore import _, logger

import dnf
import dnf.cli
import logging
import os
import hawkey


class RepoManage(dnf.Plugin):

    name = "repomanage"

    def __init__(self, base, cli):
        super(RepoManage, self).__init__(base, cli)
        if cli is None:
            return
        cli.register_command(RepoManageCommand)


class RepoManageCommand(dnf.cli.Command):
    aliases = ("repomanage",)
    summary = _("Manage a directory of rpm packages")

    def pre_configure(self):
        if not self.opts.verbose and not self.opts.quiet:
            self.cli.redirect_logger(stdout=logging.WARNING, stderr=logging.INFO)

    def configure(self):
        if not self.opts.verbose and not self.opts.quiet:
            self.cli.redirect_repo_progress()
        demands = self.cli.demands
        demands.sack_activation = True

    def run(self):
        if self.opts.new and self.opts.old:
            raise dnf.exceptions.Error(_("Pass either --old or --new, not both!"))
        if self.opts.new and self.opts.oldonly:
            raise dnf.exceptions.Error(_("Pass either --oldonly or --new, not both!"))
        if self.opts.old and self.opts.oldonly:
            raise dnf.exceptions.Error(_("Pass either --old or --oldonly, not both!"))
        if not self.opts.old and not self.opts.oldonly:
            self.opts.new = True

        verfile = {}
        pkgdict = {}
        module_dict = {}  # {NameStream: {Version: [modules]}}
        all_modular_artifacts = set()

        keepnum = int(self.opts.keep) # the number of items to keep

        try:
            REPOMANAGE_REPOID = "repomanage_repo"
            repo_conf = self.base.repos.add_new_repo(REPOMANAGE_REPOID, self.base.conf, baseurl=[self.opts.path])
            # Always expire the repo, otherwise repomanage could use cached metadata and give identical results
            # for multiple runs even if the actual repo changed in the meantime
            repo_conf._repo.expire()
            self.base._add_repo_to_sack(repo_conf)
            if dnf.base.WITH_MODULES:
                self.base._setup_modular_excludes()

                # Prepare modules
                module_packages = self.base._moduleContainer.getModulePackages()

                for module_package in module_packages:
                    # Even though we load only REPOMANAGE_REPOID other modules can be loaded from system
                    # failsafe data automatically, we don't want them affecting repomanage results so ONLY
                    # use modules from REPOMANAGE_REPOID.
                    if module_package.getRepoID() == REPOMANAGE_REPOID:
                        all_modular_artifacts.update(module_package.getArtifacts())
                        module_dict.setdefault(module_package.getNameStream(), {}).setdefault(
                            module_package.getVersionNum(), []).append(module_package)

        except dnf.exceptions.RepoError:
            rpm_list = []
            rpm_list = self._get_file_list(self.opts.path, ".rpm")
            if len(rpm_list) == 0:
                raise dnf.exceptions.Error(_("No files to process"))

            self.base.reset(sack=True, repos=True)
            self.base.fill_sack(load_system_repo=False, load_available_repos=False)
            try:
                self.base.add_remote_rpms(rpm_list, progress=self.base.output.progress)
            except IOError:
                logger.warning(_("Could not open {}").format(', '.join(rpm_list)))

        # Prepare regular packages
        query = self.base.sack.query(flags=hawkey.IGNORE_MODULAR_EXCLUDES).available()
        packages = [x for x in query.filter(pkg__neq=query.filter(nevra_strict=all_modular_artifacts)).available()]
        packages.sort()

        for pkg in packages:
            na = (pkg.name, pkg.arch)
            if na in pkgdict:
                if pkg not in pkgdict[na]:
                    pkgdict[na].append(pkg)
            else:
                pkgdict[na] = [pkg]

            nevra = self._package_to_nevra(pkg)
            if nevra in verfile:
                verfile[nevra].append(self._package_to_path(pkg))
            else:
                verfile[nevra] = [self._package_to_path(pkg)]

        outputpackages = []
        # modular packages
        keepnum_latest_stream_artifacts = set()

        if self.opts.new:
            # regular packages
            for (n, a) in pkgdict.keys():
                evrlist = pkgdict[(n, a)]

                newevrs = evrlist[-keepnum:]

                for package in newevrs:
                    nevra = self._package_to_nevra(package)
                    for fpkg in verfile[nevra]:
                        outputpackages.append(fpkg)

            # modular packages
            for streams_by_version in module_dict.values():
                sorted_stream_versions = sorted(streams_by_version.keys())

                new_sorted_stream_versions = sorted_stream_versions[-keepnum:]

                for i in new_sorted_stream_versions:
                    for stream in streams_by_version[i]:
                        keepnum_latest_stream_artifacts.update(set(stream.getArtifacts()))

        if self.opts.old:
            # regular packages
            for (n, a) in pkgdict.keys():
                evrlist = pkgdict[(n, a)]

                oldevrs = evrlist[:-keepnum]

                for package in oldevrs:
                    nevra = self._package_to_nevra(package)
                    for fpkg in verfile[nevra]:
                        outputpackages.append(fpkg)

            # modular packages
            for streams_by_version in module_dict.values():
                sorted_stream_versions = sorted(streams_by_version.keys())

                old_sorted_stream_versions = sorted_stream_versions[:-keepnum]

                for i in old_sorted_stream_versions:
                    for stream in streams_by_version[i]:
                        keepnum_latest_stream_artifacts.update(set(stream.getArtifacts()))

        if self.opts.oldonly:
            # regular packages
            for (n, a) in pkgdict.keys():
                evrlist = pkgdict[(n, a)]

                oldevrs = evrlist[:-keepnum]

                for package in oldevrs:
                    nevra = self._package_to_nevra(package)
                    for fpkg in verfile[nevra]:
                        outputpackages.append(fpkg)

            # modular packages
            keepnum_newer_stream_artifacts = set()

            for streams_by_version in module_dict.values():
                sorted_stream_versions = sorted(streams_by_version.keys())

                new_sorted_stream_versions = sorted_stream_versions[-keepnum:]

                for i in new_sorted_stream_versions:
                    for stream in streams_by_version[i]:
                        keepnum_newer_stream_artifacts.update(set(stream.getArtifacts()))

            for streams_by_version in module_dict.values():
                sorted_stream_versions = sorted(streams_by_version.keys())

                old_sorted_stream_versions = sorted_stream_versions[:-keepnum]

                for i in old_sorted_stream_versions:
                    for stream in streams_by_version[i]:
                        for artifact in stream.getArtifacts():
                            if artifact not in keepnum_newer_stream_artifacts:
                                keepnum_latest_stream_artifacts.add(artifact)

        modular_packages = [self._package_to_path(x) for x in query.filter(pkg__eq=query.filter(nevra_strict=keepnum_latest_stream_artifacts)).available()]
        outputpackages = outputpackages + modular_packages
        outputpackages.sort()
        if self.opts.space:
            print(" ".join(outputpackages))
        else:
            for pkg in outputpackages:
                print(pkg)

    @staticmethod
    def set_argparser(parser):
        parser.add_argument("-o", "--old", action="store_true",
                            help=_("Print the older packages"))
        parser.add_argument("-O", "--oldonly", action="store_true",
                            help=_("Print the older packages. Exclude the newest packages."))
        parser.add_argument("-n", "--new", action="store_true",
                            help=_("Print the newest packages"))
        parser.add_argument("-s", "--space", action="store_true",
                            help=_("Space separated output, not newline"))
        parser.add_argument("-k", "--keep", action="store", metavar="KEEP",
                            help=_("Newest N packages to keep - defaults to 1"),
                            default=1, type=int)
        parser.add_argument("path", action="store",
                            help=_("Path to directory"))

    @staticmethod
    def _get_file_list(path, ext):
        """Return all files in path matching ext

        return list object
        """
        filelist = []
        for root, dirs, files in os.walk(path):
            for f in files:
                if os.path.splitext(f)[1].lower() == str(ext):
                    filelist.append(os.path.join(root, f))

        return filelist

    def _package_to_path(self, pkg):
        if len(self.base.repos):
            return os.path.join(self.opts.path, pkg.location)
        else:
            return pkg.location

    @staticmethod
    def _package_to_nevra(pkg):
        return (pkg.name, pkg.epoch, pkg.version, pkg.release, pkg.arch)
changelog.py000064400000011547150402642240007054 0ustar00# changelog.py
# DNF plugin adding a command changelog.
#
# Copyright (C) 2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#

from __future__ import absolute_import
from __future__ import unicode_literals

import argparse
import collections
import dateutil.parser

from dnfpluginscore import _, P_, logger
import dnf
import dnf.cli


def validate_date(val):
    try:
        return dateutil.parser.parse(val, fuzzy=True)
    except (ValueError, TypeError, OverflowError):
        raise argparse.ArgumentTypeError(_('Not a valid date: "{0}".').format(val))


@dnf.plugin.register_command
class ChangelogCommand(dnf.cli.Command):
    aliases = ('changelog',)
    summary = _('Show changelog data of packages')

    @staticmethod
    def set_argparser(parser):
        filter_group = parser.add_mutually_exclusive_group()
        filter_group.add_argument(
            '--since', metavar="DATE", default=None,
            type=validate_date,
            help=_('show changelog entries since DATE. To avoid ambiguosity, '
                   'YYYY-MM-DD format is recommended.'))
        filter_group.add_argument(
            '--count', default=None, type=int,
            help=_('show given number of changelog entries per package'))
        filter_group.add_argument(
            '--upgrades', default=False, action='store_true',
            help=_('show only new changelog entries for packages, that provide an '
                   'upgrade for some of already installed packages.'))
        parser.add_argument("package", nargs='*', metavar=_('PACKAGE'))

    def configure(self):
        demands = self.cli.demands
        demands.available_repos = True
        demands.sack_activation = True
        demands.changelogs = True

    def query(self):
        q = self.base.sack.query()
        if self.opts.package:
            q.filterm(empty=True)
            for pkg in self.opts.package:
                pkg_q = dnf.subject.Subject(pkg, ignore_case=True).get_best_query(
                    self.base.sack, with_nevra=True,
                    with_provides=False, with_filenames=False)
                if self.opts.repo:
                    pkg_q.filterm(reponame=self.opts.repo)
                if pkg_q:
                    q = q.union(pkg_q.latest())
                else:
                    logger.info(_('No match for argument: %s') % pkg)
        elif self.opts.repo:
            q.filterm(reponame=self.opts.repo)
        if self.opts.upgrades:
            q = q.upgrades()
        else:
            q = q.available()
        return q

    def by_srpm(self, packages):
        by_srpm = collections.OrderedDict()
        for pkg in sorted(packages):
            by_srpm.setdefault((pkg.source_name or pkg.name, pkg.evr), []).append(pkg)
        return by_srpm

    def filter_changelogs(self, package):
        if self.opts.upgrades:
            return self.base.latest_changelogs(package)
        elif self.opts.count:
            return package.changelogs[:self.opts.count]
        elif self.opts.since:
            return [chlog for chlog in package.changelogs
                    if chlog['timestamp'] >= self.opts.since.date()]
        else:
            return package.changelogs

    def run(self):
        if self.opts.since:
            logger.info(_('Listing changelogs since {}').format(self.opts.since))
        elif self.opts.count:
            logger.info(P_('Listing only latest changelog',
                           'Listing {} latest changelogs',
                           self.opts.count).format(self.opts.count))
        elif self.opts.upgrades:
            logger.info(
                _('Listing only new changelogs since installed version of the package'))
        else:
            logger.info(_('Listing all changelogs'))

        by_srpm = self.by_srpm(self.query())
        for name in by_srpm:
            print(_('Changelogs for {}').format(
                ', '.join(sorted({str(pkg) for pkg in by_srpm[name]}))))
            for chlog in self.filter_changelogs(by_srpm[name][0]):
                print(self.base.format_changelog(chlog))
__pycache__/builddep.cpython-36.pyc000064400000016426150402642240013202 0ustar003

�f�$�@s�ddlmZddlmZddlmZmZddlZddlZddlZddl	Zddl
ZddlZddlZ
ddlZddlZddlZddlZejjGdd�dejj��ZdS)�)�absolute_import)�unicode_literals)�_�loggerNcs�eZdZdZdZee�Zed�Z�fdd�Zdd�Z	d	d
�Z
edd��Zd
d�Z
dd�Zdd�Zedd��Zdd�Zdd�Zdd�Zdd�Z�ZS)�BuildDepCommand�builddep�	build-depz3Install build dependencies for package or spec filez[PACKAGE|PACKAGE.spec]cs(tt|�j|�tjjj�|_g|_dS)N)	�superr�__init__�dnf�rpmZtransactionZinitReadOnlyTransaction�_rpm_ts�tempdirs)�self�cli)�	__class__��/usr/lib/python3.6/builddep.pyr
/szBuildDepCommand.__init__cCsx|jD]}tj|�qWdS)N)r�shutilZrmtree)r�temp_dirrrr�__del__4szBuildDepCommand.__del__cCs�tjjj|�}|ddkr |jStjj�}tjdd�}t	jj
|t	jj|��}|jj
|�t|d�}zFy|j|jjj||j��Wn$tk
r�}z�WYdd}~XnXWd|j�X|S)	z�
        In case pkgspec is a remote URL, download it to a temporary location
        and use the temporary file instead.
        r�file�Z
dnf_builddep_)�prefixzwb+N)rr)rZpycompZurlparse�path�libdnfZrepoZ
Downloader�tempfileZmkdtemp�os�join�basenamer�append�openZdownloadURL�baseZconfZ_config�fileno�RuntimeError�close)r�pkgspec�locationZ
downloaderrZ	temp_fileZtemp_fo�exrrr�_download_remote_file8s


z%BuildDepCommand._download_remote_filec	Cs�dd�}|jdddtd�d�|jdd	d
gd|td�d
�|jdddtd�d�|j�}|jddtd�d�|jddtd�d�dS)NcSs:|r|jdd�ng}t|�dkr6td�|}tj|��|S)N��z&'%s' is not of the format 'MACRO EXPR')�split�lenr�argparseZArgumentTypeError)�argZarglist�msgrrr�	macro_defRs

z0BuildDepCommand.set_argparser.<locals>.macro_def�packages�+�packagez"packages with builddeps to install)�nargs�metavar�helpz-Dz--definer z'MACRO EXPR'z$define a macro for spec file parsing)�action�defaultr6�typer7z--skip-unavailable�
store_trueFz5skip build dependencies not available in repositories)r8r9r7z--specz)treat commandline arguments as spec files)r8r7z--srpmz)treat commandline arguments as source rpm)�add_argumentrZadd_mutually_exclusive_group)�parserr1Zptyperrr�
set_argparserPs

zBuildDepCommand.set_argparsercCs|jjsd|j_dS)N�error)�optsZrpmverbosity)rrrr�
pre_configurefszBuildDepCommand.pre_configurecCsr|jj}d|_d|_d|_d|_|jjp.|jjsnx<|jj	D]0}|j
d�pZ|j
d�pZ|j
d�s:|jjj
�Pq:WdS)NTz.src.rpmz
.nosrc.rpmz.spec)r�demandsZavailable_reposZ	resolvingZ	root_userZsack_activationr@�spec�srpmr2�endswithr"ZreposZenable_source_repos)rrBr&rrr�	configurejs


zBuildDepCommand.configurecCs\tjjj|j�}x$|jjD]}tj|d|d�qWd}x�|jj	D]�}|j
|�}yl|jjrh|j|�nT|jj
r||j|�n@|jd�s�|jd�r�|j|�n |jd�r�|j|�n
|j|�WqDtjjk
�r}z:x$|j�D]}tjtd�j|��q�Wtj|�d}WYdd}~XqDXqDWx |jjD]}tj|d��q*W|�rXtjjtd	���dS)
Nrr*Fz.src.rpmz	nosrc.rpmz.speczRPM: {}Tz!Some packages could not be found.)rZyumZrpmtransZRPMTransactionr"r@�definerZaddMacror2r)rD�	_src_depsrC�
_spec_depsrE�_remote_deps�
exceptions�ErrorZmessagesrr?r�formatZdelMacro)rZrpmlogZmacroZ
pkg_errorsr&�e�linerrr�runzs2


zBuildDepCommand.runcCs|j�dd�S)Nr+)ZDNEVR)Zrpm_deprrr�_rpm_dep2reldep_str�sz#BuildDepCommand._rpm_dep2reldep_strcCs�tjj|jj�}|j|d�|j�}|rX|jd�rXtjj|jj�}|j|d�|j�}|r�|jd�r�td�}t	j
||�|jjdkS|r�|jj
|�}|r�x|D]}tjj|�q�W|jjj|dd�dS)	N)Zprovides�/)r�(z$No matching package to install: '%s'TF)ZselectZoptional)rZselectorZSelectorr"�sack�setZmatches�
startswithrr�warningr@Zskip_unavailableZ_sltr_matches_installedZ_msg_installedZ_goalZinstall)r�
reldep_strZsltr�foundr0Zalready_instr4rrr�_install�s$
zBuildDepCommand._installc
Cs�tj|tj�}y|jj|�}WnRtjk
rp}z4t|�dkrJtd�|}tj	|�t
jj|��WYdd}~XnXtj	|�|j
d�}d}x0|D](}|j|�}|jd�r�q�||j|�M}q�W|s�td�}	t
jj|	��|jjr�tjtd��dS)Nzerror reading package headerz2Failed to open: '%s', not a valid source rpm file.ZrequirenameTzrpmlib(zNot all dependencies satisfiedzJWarning: -D or --define arguments have no meaning for source rpm packages.)rr!�O_RDONLYr
ZhdrFromFdnorr?�strrr%rrKrLZdsFromHeaderrQrVrZr@rGrrW)
rZsrc_fn�fd�hrN�ds�done�deprX�errrrrrH�s*





zBuildDepCommand._src_depsc	Cs�ytj|�}Wn>tk
rL}z"td�||f}tjj|��WYdd}~XnXd}x.tj|jd�D]}|j	|�}||j
|�M}qbW|s�td�}tjj|��dS)Nz/Failed to open: '%s', not a valid spec file: %sT�requireszNot all dependencies satisfied)rrC�
ValueErrorrrrKrLr_ZsourceHeaderrQrZ)	rZspec_fnrCr(r0r`rarXrbrrrrI�s

zBuildDepCommand._spec_depsc	Cs�tjj|�j|jj�jdd�}tdd�|D��}|jjj�j	�j||gdd�j
�j�}|sptjj
td�|��d}x.|D]&}x |jD]}||jt|��M}q�WqzW|s�td�}tjj
|��dS)	N�src)Z	arch__neqcSsh|]
}|j�qSr)Zsource_name)�.0�pkgrrr�	<setcomp>�sz/BuildDepCommand._remote_deps.<locals>.<setcomp>)�nameZarchzno package matched: %sTzNot all dependencies satisfied)rZsubjectZSubjectZget_best_queryr"rT�filter�listZquery�	availableZlatestrPrKrLrrcrZr\)	rr4rlZsourcenamesZpkgsr`rgZreqrbrrrrJ�s
zBuildDepCommand._remote_deps)rr)�__name__�
__module__�__qualname__�aliasesr0rZsummaryZusager
rr)�staticmethodr>rArFrPrQrZrHrIrJ�
__classcell__rr)rrr's !r)Z
__future__rrZdnfpluginscorerrr.rZdnf.cliZdnf.exceptionsZdnf.rpm.transactionZdnf.yum.rpmtransZlibdnf.reporrrrrZpluginZregister_commandrZCommandrrrrr�<module>s__pycache__/groups_manager.cpython-36.pyc000064400000020721150402642240014414 0ustar003

�f�4�@s�ddlmZddlmZddlZddlZddlZddlZddlZddlZddl	Z	ddl
mZmZddl
Z
ddlZ
dZejdje��Zejd�Zdddd	�Zd
d�Zdd
�Zdd�Ze
jjGdd�de
jj��ZdS)�)�absolute_import)�unicode_literalsN)�_�loggerz
-a-z0-9_.:z^[{}]+$z^[-a-zA-Z0-9_.@]+$T)Zdefault_explicitZuservisible_explicitZempty_groupscCstj|�stjtd���|S)zgroup id validatorzInvalid group id)�RE_GROUP_ID�match�argparse�ArgumentTypeErrorr)�value�r�$/usr/lib/python3.6/groups_manager.py�
group_id_type.s
r
cCsN|jdd�}t|�dkr&tjtd���|\}}tj|�sFtjtd���||fS)ztranslated texts validator�:�z6Invalid translated data, should be in form 'lang:text'z*Invalid/empty language for translated data)�split�lenrr	r�RE_LANGr)r
�data�lang�textrrr�translation_type5s

rcCs:|j�}tjdjt�d|�}|s6tjjtd�j|���|S)z#generate group id based on its namez[^{}]�zFCan't generate group id from '{}'. Please specify group id using --id.)	�lower�re�sub�format�RE_GROUP_ID_VALID�dnf�cli�CliErrorr)r�group_idrrr�
text_to_idAsr!csdeZdZdZed�Z�fdd�Zedd��Zdd�Z	d	d
�Z
dd�Zd
d�Zdd�Z
dd�Z�ZS)�GroupsManagerCommand�groups-managerz$create and edit groups metadata filecstt|�j|�tj�|_dS)N)�superr"�__init__�libcomps�Comps�comps)�selfr)�	__class__rrr%QszGroupsManagerCommand.__init__cCs�|jddgdtd�d�|jddgdtd�d�|jddtd	�d
�|jddd
td�d�|jdttd�d�|jddtd�d�|jdtd�d�|jdttd�d�|jddgdttd�d�|jddgdttd �d�|j�}|jd!d"ddtd#�d$�|jd%d"d&dtd'�d$�|j�}|jd(dtd)�d*�|jd+dtd,�d*�|jd-dd
td.�d�|jd/dtd0�d*�|jd1d2d3td4�d5�dS)6Nz--load�appendz	COMPS.XMLzload groups metadata from file)�action�default�metavar�helpz--savezsave groups metadata to filez--mergez%load and save groups metadata to file)r.r/z--print�
store_trueFz#print the result metadata to stdout)r,r-r/z--idzgroup id)�typer/z-nz--namez
group name)r/z
--descriptionzgroup descriptionz--display-orderzgroup display orderz--translated-namez	LANG:TEXTztranslated name for the group)r,r-r.r1r/z--translated-descriptionz$translated description for the groupz--user-visible�user_visiblez%make the group user visible (default))�destr,r-r/z--not-user-visibleZstore_falsezmake the group user invisiblez--mandatoryz%add packages to the mandatory section)r,r/z
--optionalz$add packages to the optional sectionz--removez5remove packages from the group instead of adding themz--dependenciesz-include also direct dependencies for packages�packages�*ZPACKAGEzpackage specification)�nargsr.r/)�add_argumentrr
�intrZadd_mutually_exclusive_group)�parserZvisibleZsectionrrr�
set_argparserUsR








z"GroupsManagerCommand.set_argparsercCs�|jj}|jjr"d|_d|_d|_|jjrP|jjj	d|jj�|jj
j|jj�|jjs�|jj
s�|jjs�|jjs�|jjdk	s�|jjr�|jjr�|jjr�tjjtd���dS)NTFrz;Can't edit group without specifying it (use --id or --name))r�demands�optsr4Zsack_activationZavailable_reposZload_system_repo�merge�load�insert�saver+�description�
display_order�translated_name�translated_descriptionr2�id�namerrr)r)r;rrr�	configure�s"zGroupsManagerCommand.configurecCs �x|jjD�]
}tj�}yp|jd�r~tj|��F}tjdd�}z$t	j
||�|j�|j|j
�Wdtj|j
�XWdQRXn
|j|�Wn~tttjfk
�r}zXt�}x2|j�D]&}||kr�q�tj|j��|j|�q�Wtjjtd�j||���WYdd}~XqX|j|7_qWdS)zm
        Loads all input xml files.
        Returns True if at least one file was successfuly loaded
        z.gzF)�deleteNzCan't load file "{}": {})r<r>r&r'�endswith�gzip�open�tempfileZNamedTemporaryFile�shutilZcopyfileobj�closeZ	fromxml_frF�os�unlink�IOError�OSErrorZParserError�setZget_last_errorsr�error�strip�addr�
exceptions�Errorrrr()r)�	file_nameZ
file_compsZgz_fileZ	temp_file�err�seenrTrrr�load_input_files�s,
$z%GroupsManagerCommand.load_input_filescCs�x�|jjD]�}y|jj|td�}Wn*tjk
rL}z|g}WYdd}~XnX|r
x"|dd�D]}tj|j	��q`Wt
jjt
d�j||dj	����q
WdS)N)�xml_options�zCan't save file "{}": {}���r_)r<r@r(Zxml_f�COMPS_XML_OPTIONSr&ZXMLGenErrorrrTrUrrWrXrr)r)rY�errorsrZrrr�save_output_files�sz&GroupsManagerCommand.save_output_filescCs\d}|r*x |jjD]}|j|kr|}PqW|dkrX|rXx |jjD]}|j|kr@|}Pq@W|S)zl
        Try to find group according to command line parameters - first by id
        then by name.
        N)r(�groupsrErF)r)r rF�groupZgrprrr�
find_group�s

zGroupsManagerCommand.find_groupcCs�dd�}|jjr|jj|_|jjr,|jj|_|jjr>|jj|_|jjdk	rT|jj|_|jjrj||jj�|_|jj	r�||jj	�|_
|jj�r�t�}xZ|jjD]N}t
jj|�}|j|jjdddd�j�}|s�tjtd�j|��q�|j|�q�W|jj�r2t�}x|D]}|j|j��qW|j|jjj�j|d��d	d
�|D�}	|jj�r�x�|	D].}
x&|j|
tj d�D]}|jj|��qfW�qPWnd|jj!�r�tj"}n|jj#�r�tj$}ntj%}x8t&|	�D],}
|j|
|d��s�|jj'tj(|
|d���q�WdS)zE
        Set attributes and package lists for selected group
        cSs&tj�}x|D]\}}|||<qW|S)N)r&ZStrDict)ZlstZstr_dictrrrrr�langlist_to_strdict�sz<GroupsManagerCommand.edit_group.<locals>.langlist_to_strdictNTF)Z
with_nevraZ
with_providesZwith_filenameszNo match for argument: {})ZprovidescSsh|]
}|j�qSr)rF)�.0�pkgrrr�	<setcomp>sz2GroupsManagerCommand.edit_group.<locals>.<setcomp>)rFr1))r<rFrAZdescrBr2ZuservisiblerCZname_by_langrDZdesc_by_langr4rSrZsubjectZSubjectZget_best_query�baseZsackZlatestrZwarningrr�updateZdependenciesZrequiresZqueryZfilterm�removeZpackages_matchr&ZPACKAGE_TYPE_UNKNOWNZ	mandatoryZPACKAGE_TYPE_MANDATORYZoptionalZPACKAGE_TYPE_OPTIONALZPACKAGE_TYPE_DEFAULT�sortedr+ZPackage)r)rdrfr4Zpkg_specZsubj�qZrequirementsrhZ	pkg_namesZpkg_nameZpkg_typerrr�
edit_group�sT










zGroupsManagerCommand.edit_groupcCs|j�|jjs|jjr�|j|jj|jjd�}|dkr�|jjrNtjjt	d���t
j�}|jjrt|jj|_|jj|_nD|jjr�t|jj�}|j|dd�r�tj
jt	d�j||jj���||_|jjj|�|j|�|j�|jjs�|jjr�t|jjtd��dS)N)r rFz-Can't remove packages from non-existent groupzRGroup id '{}' generated from '{}' is duplicit. Please specify group id using --id.)r])r\r<rErFrerlrrWrXrr&ZGroupr!rrrr(rcr+rorb�printr@Zxml_strr`)r)rdr rrr�run!s,

zGroupsManagerCommand.run)r#)�__name__�
__module__�__qualname__�aliasesrZsummaryr%�staticmethodr:rGr\rbrerorq�
__classcell__rr)r*rr"Ls1$=r")Z
__future__rrrrJr&rOrrMrLZdnfpluginscorerrrZdnf.clir�compilerrrr`r
rr!ZpluginZregister_commandrZCommandr"rrrr�<module>s,
__pycache__/changelog.cpython-36.pyc000064400000010117150402642240013330 0ustar003

�gt`g�@s|ddlmZddlmZddlZddlZddlZddlmZm	Z	m
Z
ddlZddlZdd�Z
ejjGdd�dejj��ZdS)	�)�absolute_import)�unicode_literalsN)�_�P_�loggerc
CsDytjj|dd�Stttfk
r>tjtd�j	|���YnXdS)NT)ZfuzzyzNot a valid date: "{0}".)
�dateutil�parser�parse�
ValueError�	TypeError�
OverflowError�argparseZArgumentTypeErrorr�format)�val�r�/usr/lib/python3.6/changelog.py�
validate_date!src@sLeZdZdZed�Zedd��Zdd�Zdd�Z	d	d
�Z
dd�Zd
d�ZdS)�ChangelogCommand�	changelogzShow changelog data of packagescCsd|j�}|jdddttd�d�|jddttd�d�|jdd	d
td�d�|jd
dtd�d�dS)Nz--sinceZDATEzZshow changelog entries since DATE. To avoid ambiguosity, YYYY-MM-DD format is recommended.)�metavar�default�type�helpz--countz2show given number of changelog entries per package)rrrz
--upgradesF�
store_truezmshow only new changelog entries for packages, that provide an upgrade for some of already installed packages.)r�actionr�package�*ZPACKAGE)�nargsr)Zadd_mutually_exclusive_group�add_argumentrr�int)rZfilter_grouprrr�
set_argparser-szChangelogCommand.set_argparsercCs|jj}d|_d|_d|_dS)NT)�cli�demandsZavailable_reposZsack_activation�
changelogs)�selfr"rrr�	configure>szChangelogCommand.configurecCs�|jjj�}|jjr�|jdd�x�|jjD]d}tjj|dd�j	|jjdddd�}|jj
rh|j|jj
d�|r||j|j��}q*t
jtd�|�q*Wn|jj
r�|j|jj
d�|jjr�|j�}n|j�}|S)NT)�empty)Zignore_caseF)Z
with_nevraZ
with_providesZwith_filenames)ZreponamezNo match for argument: %s)�baseZsack�query�optsrZfilterm�dnfZsubjectZSubjectZget_best_queryZrepo�unionZlatestr�infor�upgradesZ	available)r$�q�pkgZpkg_qrrrr(Ds$

zChangelogCommand.querycCs>tj�}x0t|�D]$}|j|jp$|j|jfg�j|�qW|S)N)�collections�OrderedDict�sorted�
setdefaultZsource_name�nameZevr�append)r$Zpackages�by_srpmr/rrrr6Zs$zChangelogCommand.by_srpmcsT�jjr�jj|�S�jjr.|jd�jj�S�jjrJ�fdd�|jD�S|jSdS)Ncs$g|]}|d�jjj�kr|�qS)Z	timestamp)r)�sinceZdate)�.0�chlog)r$rr�
<listcomp>fsz6ChangelogCommand.filter_changelogs.<locals>.<listcomp>)r)r-r'Zlatest_changelogs�countr#r7)r$rr)r$r�filter_changelogs`sz"ChangelogCommand.filter_changelogscCs�|jjr"tjtd�j|jj��nP|jjrLtjtdd|jj�j|jj��n&|jjrdtjtd��ntjtd��|j	|j
��}xb|D]Z}ttd�jdjt
dd	�||D�����x*|j||d
�D]}t|jj|��q�Wq�WdS)NzListing changelogs since {}zListing only latest changelogzListing {} latest changelogszBListing only new changelogs since installed version of the packagezListing all changelogszChangelogs for {}z, cSsh|]}t|��qSr)�str)r8r/rrr�	<setcomp>{sz'ChangelogCommand.run.<locals>.<setcomp>r)r)r7rr,rrr;rr-r6r(�print�joinr2r<r'Zformat_changelog)r$r6r4r9rrr�runks 

 zChangelogCommand.runN)r)
�__name__�
__module__�__qualname__�aliasesrZsummary�staticmethodr r%r(r6r<rArrrrr(sr)Z
__future__rrr
r0Zdateutil.parserrZdnfpluginscorerrrr*Zdnf.clirZpluginZregister_commandr!ZCommandrrrrr�<module>s__pycache__/repoclosure.cpython-36.pyc000064400000010500150402642240013737 0ustar003

�gt`��@sVddlmZddlmZddlmZddlZGdd�dej�ZGdd�dej	j
�ZdS)	�)�absolute_import)�unicode_literals)�_Ncs eZdZdZ�fdd�Z�ZS)�RepoClosure�repoclosurecs,tt|�j||�|dkrdS|jt�dS)N)�superr�__init__Zregister_command�RepoClosureCommand)�self�base�cli)�	__class__��!/usr/lib/python3.6/repoclosure.pyr!szRepoClosure.__init__)�__name__�
__module__�__qualname__�namer�
__classcell__rr)r
rrsrc@s>eZdZdZed�Zdd�Zdd�Zd
dd	�Ze	d
d��Z
dS)r	rz:Display a list of unresolved dependencies for repositoriescCsd|jj}d|_d|_|jjr`xB|jjj�D]2}|j	|jjkrT|j	|jj
krT|j�q*|j�q*WdS)NT)
r�demandsZsack_activationZavailable_repos�opts�repor�repos�all�id�check�disable�enable)r
rrrrr�	configure,s
zRepoClosureCommand.configurecCs�|jjr|j|jj�}n|j�}xRt|j��D]B}tdjt|�|j��td�x||D]}tdj|��qZWq.Wt	|�dkr�t
d�}tjj
|��dS)Nzpackage: {} from {}z  unresolved deps:z    {}rz/Repoclosure ended with unresolved dependencies.)r�arches�_get_unresolved�sorted�keys�print�format�str�reponame�lenr�dnf�
exceptions�Error)r
�
unresolved�pkgZdep�msgrrr�run7szRepoClosureCommand.runNcsLi}t�}|jjr�|jjj�jdd��|jjj�jdd�}xv|jjj�D]D}�j	|jjj�j|j
d�j���|j	|jjj�j|j
d�j��}qHWn |jjj�j��|jjj�j�}|jj
�rN|jjj�jdd�}g}xT|jj
D]H}tjj|�}	|j|	j|jjdddd��}
|
�r|j	|
�}q�|j|�q�W|�rJtjjtd�dj|���|}|jj�rh|j|jjd�|dk	�r~|j|d�|jjj�r��jdd	��j�|j�xf|D]^}t�||<xL|jD]B}t|�}|jd
��s�|jd��r�q�|j |�||j |��q�W�q�Wt�fdd
�|D����fdd�|j!�D�}
dd�|
j!�D�S)NT)�empty)r&F)Z
with_nevraZ
with_providesZwith_filenameszno package matched: %sz, )�arch)Zlatest_per_archz	solvable:zrpmlib(c3s|]}�j|d�s|VqdS))ZprovidesN)�filter)�.0�x)�	availablerr�	<genexpr>�sz5RepoClosureCommand._get_unresolved.<locals>.<genexpr>cs(i|] \}}t�fdd�|D��|�qS)c3s|]}|�kr|VqdS)Nr)r2r3)�unresolved_depsrrr5�sz@RepoClosureCommand._get_unresolved.<locals>.<dictcomp>.<genexpr>)�set)r2�k�v)r6rr�
<dictcomp>�sz6RepoClosureCommand._get_unresolved.<locals>.<dictcomp>cSsi|]\}}|r||�qSrr)r2r8r9rrrr:�s)"r7rZnewestrZsackZqueryr1rZiter_enabled�unionrZlatestr4�pkglistr(ZsubjectZSubject�intersectionZget_best_query�appendr)r*r�joinrZfiltermZconfZbestZapplyZrequiresr%�
startswith�add�items)r
r0r+ZdepsZto_checkrZ	pkglist_q�errorsr,ZsubjZpkg_qZreqZreqnameZunresolved_transitionr)r4r6rr Es\ &






z"RepoClosureCommand._get_unresolvedcCs`|jdgddtd�d�|jdgdtd�d�|jd	d
dtd�d
�|jdgdtd�dd�dS)Nz--archr>rzBcheck packages of the given archs, can be specified multiple times)�default�action�dest�helpz--checkzSpecify repositories to check)rDrErGz-nz--newest�
store_truez+Check only the newest packages in the repos)rErGz--pkgz#Check closure for this package onlyr<)rDrErGrF)�add_argumentr)�parserrrr�
set_argparser�s


z RepoClosureCommand.set_argparser)r)N)rrr�aliasesrZsummaryrr.r �staticmethodrKrrrrr	(s
Qr	)Z
__future__rrZdnfpluginscorerZdnf.clir(ZPluginrrZCommandr	rrrr�<module>s
__pycache__/debug.cpython-36.opt-1.pyc000064400000025101150402642240013425 0ustar003

�gt`1�@s�ddlmZddlmZddlmZddlmZmZddlZddl	Zddl
Z
ddlZddlZddl
Z
ddlZddlZdZGdd�dej�ZGd	d
�d
ejj�ZGdd�dejj�Zd
d�Zdd�Zdd�ZdS)�)�absolute_import)�unicode_literals)�ucd)�_�loggerNzdnf-debug-dump version 1
cs eZdZdZ�fdd�Z�ZS)�Debug�debugcsDtt|�j||�||_||_|jdk	r@|jjt�|jjt�dS)N)�superr�__init__�base�cliZregister_command�DebugDumpCommand�DebugRestoreCommand)�selfrr)�	__class__��/usr/lib/python3.6/debug.pyr
)s
zDebug.__init__)�__name__�
__module__�__qualname__�namer
�
__classcell__rr)rrr%srcsteZdZdZed�Z�fdd�Zdd�Zedd��Z	d	d
�Z
dd�Zd
d�Zdd�Z
dd�Zdd�Zdd�Z�ZS)r
�
debug-dumpz5dump information about installed rpm packages to filecstt|�j|�d|_dS)N)r	r
r
�	dump_file)rr)rrrr
7szDebugDumpCommand.__init__cCsd|jj_d|jj_dS)NT)r�demands�sack_activation�available_repos)rrrr�	configure;s
zDebugDumpCommand.configurecCs.|jdddtd�d�|jddtd�d	�dS)
Nz	--norepos�
store_trueFz/do not attempt to dump the repository contents.)�action�default�help�filename�?zoptional name of dump file)�nargsr!)�add_argumentr)�parserrrr�
set_argparser?szDebugDumpCommand.set_argparsercCs�|jj}|s6tjdtjtj���}dtj�d|f}tjj|�}|j	d�r\t
j|d�|_nt
|d�|_|jt�|j�|j�|j�|j|jj�|j�|jj�ttd�|�dS)z{create debug txt file and compress it, if no filename specified
           use dnf_debug_dump-<timestamp>.txt.gz by defaultz%Y-%m-%d_%Tzdnf_debug_dump-%s-%s.txt.gz�z.gz�wzOutput written to: %sN)�optsr"�timeZstrftimeZ	localtime�os�uname�path�abspath�endswith�gzip�GzipFiler�open�write�
DEBUG_VERSION�dump_system_info�dump_dnf_config_info�dump_rpm_problems�
dump_packagesZnorepos�dump_rpmdb_versions�close�printr)rr"Znowrrr�runHs 


zDebugDumpCommand.runcCs4tjjr t|jtj�r t|d�}tjj|j|�dS)N�utf8)	�dnfZpycompZPY3�
isinstancerr1r2�bytesZ
write_to_file)r�msgrrrr4as
zDebugDumpCommand.writecCsX|jd�tj�}|jd|d|df�|jdtj�|jdtjjdd��dS)	Nz%%%%SYSTEM INFO
z  uname: %s, %s
��z  rpm ver: %s
z  python ver: %s
�
�)r4r,r-�rpm�__version__�sys�version�replace)rr-rrrr6fs
z!DebugDumpCommand.dump_system_infocCs�|jjj}djdd�|jjjD��}|jd�|jd|d�|jd|d�|jd	|d
�|jdtjj	�|jd|�|jd
dj|jjj
��dS)N�,cSsg|]
}|j�qSr)r)�.0�prrr�
<listcomp>psz9DebugDumpCommand.dump_dnf_config_info.<locals>.<listcomp>z
%%%%DNF INFO
z  arch: %s
�archz  basearch: %s
Zbasearchz  releasever: %s
Z
releaseverz  dnf ver: %s
z  enabled plugins: %s
z  global excludes: %s
)r�confZ
substitutions�joinZ_plugins�pluginsr4r?�const�VERSION�excludepkgs)r�varrSrrrr7ns

z%DebugDumpCommand.dump_dnf_config_infocCsP|jd�t|j�\}}|jdjdd�|D���|jdjdd�|D���dS)Nz%%%%RPMDB PROBLEMS
rFcSs$g|]\}}dt|�t|�f�qS)zPackage %s requires %s
)r)rM�req�pkgrrrrO}sz6DebugDumpCommand.dump_rpm_problems.<locals>.<listcomp>cSs$g|]\}}dt|�t|�f�qS)zPackage %s conflicts with %s
)r)rMrQrYrrrrOs)r4�rpm_problemsrrR)rZmissing�	conflictsrrrr8zs
z"DebugDumpCommand.dump_rpm_problemsc	Cs\|jjj�}|jd�x&t|j��D]}|jdt|��q$W|sFdS|jd�|j�}x�t|jjj	�dd�d�D]�}y�d}|j
dk	r�|j
}n*|jdk	r�|j}nt|j
�dkr�|j
d}|jd|j|f�|jd	d
j|j��x,t|j|jd��D]}|jdt|��q�WWqrtjjk
�rR}z|jd|t|�f�wrWYdd}~XqrXqrWdS)
Nz
%%%%RPMDB
z  %s
z
%%%%REPOS
cSs|jS)N)�id)�xrrr�<lambda>�sz0DebugDumpCommand.dump_packages.<locals>.<lambda>)�keyrz
%%%s - %s
z  excludes: %s
rL)ZreponamezError accessing repo %s: %s
)r�sack�queryr4�sorted�	installed�pkgspec�	availableZreposZiter_enabledZmetalinkZ
mirrorlist�lenZbaseurlr\rRrV�filterr?�
exceptions�Error�str)	rZ
load_repos�qrNreZrepoZurlZpo�errrr9�s2




zDebugDumpCommand.dump_packagescCs(|jd�|jjj�}|jd|�dS)Nz%%%%RPMDB VERSIONS
z
  all: %s
)r4rr`Z_rpmdb_version)rrJrrrr:�s
z$DebugDumpCommand.dump_rpmdb_versions)r)rrr�aliasesr�summaryr
r�staticmethodr'r=r4r6r7r8r9r:rrr)rrr
2s		r
c@sPeZdZdZed�Zdd�Zedd��Zdd�Z	d	d
�Z
dd�Zed
d��ZdS)r�
debug-restorez,restore packages recorded in debug-dump filecCs4d|jj_d|jj_d|jj_|jjs0d|jj_dS)NT)rrrrZ	root_userr*�outputZ	resolving)rrrrr�s



zDebugRestoreCommand.configurecCs~|jddtd�d�|jddtd�d�|jddtd�d�|jd	d
dtd�d
�|jddtd�d�|jddtd�d�dS)Nz--outputrz,output commands that would be run to stdout.)rr!z--install-latestz0Install the latest version of recorded packages.z
--ignore-archz_Ignore architecture and install missing packages matching the name, epoch, version and release.z--filter-typesz[install, remove, replace]zinstall, remove, replacezlimit to specified type)�metavarr r!z--remove-installonlyzqAllow removing of install-only packages. Using this option may result in an attempt to remove the running kernel.r"r(zname of dump file)r$r!)r%r)r&rrrr'�s$z!DebugRestoreCommand.set_argparsercCsV|jjr$t|jjjdd�j��|j_|j|jjd�}|j||j�|j||j�dS)z Execute the command action here.rL� rN)	r*�filter_types�setrK�split�read_dump_filer"�process_installed�process_dump)r�	dump_pkgsrrrr=�szDebugRestoreCommand.runcCs�|jjj�j�}|jj|�}x�|D]�}d}t|�}|j|j|jfd�}|dk	r�|j	|j
|jf}	|	|krpd||	<q�||kr~d}q�d|jkr�d}
nd}
x|j
�D]}|
||<q�Wnd}|r"d|jkr"||ks�|jr"|jr�td|�q"|jj|�q"WdS)NF�skipTrK�removezremove    %s)rr`rarcZ_get_installonly_queryrd�getrrP�epochrJ�releasert�keysZremove_installonlyrqr<Zpackage_remove)rrzr*rcZinstallonly_pkgsrYZ
pkg_remove�spec�dumped_versionsZevrrZd_evrrrrrx�s.


z%DebugRestoreCommand.process_installedc
Cs�x�t|j��D]�\}}|||f}x�t|j��D]�\}}}||||f}	|	dkrRq0|jr^d}
nd|}
|jr�|	dkr�d||
f}nt||
|||�}|	|jkr0|jr�td|	|f�q0y|jj	|�Wq0t
jjk
r�t
jtd�|�Yq0Xq0WqWdS)Nr{rF�.�installz%s%sz%s   %szPackage %s is not available)rbr�Zignore_archZinstall_latest�pkgtup2specrtrqr<rr�r?rhZMarkingErrorr�errorr)rrzr*�n�ar�rl�v�rrrP�pkg_specrrrry�s&
z DebugRestoreCommand.process_dumpcCs�|jd�rtj|�}nt|�}t|j��tkrFtjt	d�|�t
jj�d}i}xp|D]h}t|�}|rr|dkrTd}qT|s�|ddkr�P|j
�}tj|�}d|j|j|jfi�|j|j|jf<qTW|S)	Nz.gzzBad dnf debug file: %sTz
%%%%RPMDB
Frrsr�)r0r1r2r3r�readliner5rr�rr?rhri�strip�hawkeyZsplit_nevra�
setdefaultrrPr~rJr)r"Zfobjr{Zpkgs�liner�Znevrarrrrws(


(z"DebugRestoreCommand.read_dump_fileN)rp)
rrrrmrrnrror'r=rxryrwrrrrr�s#rcs�tjj|�}|j�j��t�}t�}x@�D]8�|j�fdd��jD��|j�fdd��jD��q*W�fdd�|D�}�fdd�|D�}||fS)Ncs2g|]*}t|�dkrt|�jd�r|�f�qS)zsolvable:prereqmarkerzrpmlib()rj�
startswith)rMrX)rYrrrO:sz rpm_problems.<locals>.<listcomp>csg|]}|�f�qSrr)rMrQ)rYrrrO=scs$g|]\}}�j|d�s||f�qS))�provides)rg)rMrXrY)�allpkgsrrrO?scs$g|]\}}�j|d�r||f�qS))r�)rg)rMrQrY)r�rrrOAs)	r?r`Z_rpmdb_sackrarcru�update�requiresr[)rZrpmdbr�r[Zmissing_requiresZexisting_conflictsr)r�rYrrZ3s
rZcCst|j|j|j|j|j�S)N)r�rrPr~rJr)rYrrrrdFsrdcCs<|sdnd|jd�}|dkr"dnd|}d|||||fS)NrFz.%sr�z%s:z%s-%s%s-%s%s)NrF)�lstrip)rrPr~rJrr�rlrrrr�Jsr�)Z
__future__rrZdnf.i18nrZdnfpluginscorerrr?Zdnf.clir1r�r,rGrIr+r5ZPluginrrZCommandr
rrZrdr�rrrr�<module>s&
w__pycache__/repodiff.cpython-36.opt-1.pyc000064400000017063150402642240014145 0ustar003

�gt`�,�@sjddlmZddlmZddlZddlmZddlZddlm	Z	Gdd�dej
�ZGdd	�d	ejj
�ZdS)
�)�absolute_import)�unicode_literalsN)�OptionParser)�_cs eZdZdZ�fdd�Z�ZS)�RepoDiff�repodiffcs,tt|�j||�|dkrdS|jt�dS)N)�superr�__init__Zregister_command�RepoDiffCommand)�self�base�cli)�	__class__��/usr/lib/python3.6/repodiff.pyr	$szRepoDiff.__init__)�__name__�
__module__�__qualname__�namer	�
__classcell__rr)rrr src@sLeZdZdZed�Zedd��Zdd�Zdd�Z	d	d
�Z
dd�Zd
d�ZdS)r
rz1List differences between two sets of repositoriesc	Cs�|jddgddtd�d�|jddgdd	td
�d�|jddd
gtjdtd�d�|jdddtd�d�|jddtd�d�|jddtd�d�|jddtd�d�dS)Nz
--repo-oldz-o�append�oldz2Specify old repository, can be used multiple times)�default�action�dest�helpz
--repo-newz-n�newz2Specify new repository, can be used multiple timesz--archz
--archlistz-a�archeszhSpecify architectures to compare, can be used multiple times. By default, only source rpms are compared.z--sizez-s�
store_truez5Output additional data about the size of the changes.)rrz--compare-archzMCompare packages also by arch. By default packages are compared just by name.z--simplez7Output a simple one line message for modified packages.z--downgradezNSplit the data for modified packages between upgraded and downgraded packages.)�add_argumentrrZ_SplitCallback)�parserrrr�
set_argparser/s

zRepoDiffCommand.set_argparsercCs�|jj}d|_d|_d|_dg|jj_|jj	s:|jj
rNtd�}tj
j|��x<|jjj�D],}|j|jj	|jj
kr�|j�q\|j�q\W|jjs�dg|j_dS)NT�allz*Both old and new repositories must be set.�src)r
�demandsZsack_activationZavailable_repos�
changelogsrZconfZdisable_excludes�optsrrr�dnf�
exceptions�ErrorZreposr"�id�enable�disabler)rr$�msgZreporrr�	configureMs
zRepoDiffCommand.configurecCs|jjr|j|jfS|jS)N)r&�compare_archr�arch)r�pkgrrr�_pkgkey`szRepoDiffCommand._pkgkeyc
s6t�fdd�|D���t�j��}t�fdd�|D���t�j��}t�}x:|j|d�D]*}x$|j|jd�D]}||�j|�<qlWqXW�jjj}t�fdd�||D��fdd�||D�|ggd�}	xj|j	|�D]\}
�|
}�|
}|j
|j
kr�q�||j
|j
�d	k�r|	d
j||f�q�|	dj||f�q�W|	S)aNcompares packagesets old and new, returns dictionary with packages:
        added: only in new set
        removed: only in old set
        upgraded: in both old and new, new has bigger evr
        downgraded: in both old and new, new has lower evr
        obsoletes: dictionary of which old package is obsoleted by which new
        csg|]}�j|�|f�qSr)r2)�.0�p)rrr�
<listcomp>msz-RepoDiffCommand._repodiff.<locals>.<listcomp>csg|]}�j|�|f�qSr)r2)r3r4)rrrr5os)�	obsoletes)Zprovidescsg|]}�|�qSrr)r3�k)�new_drrr5zscsg|]}�|�qSrr)r3r7)�old_drrr5{s)�added�removedr6�upgraded�
downgradedrr=r<)�dict�set�keys�filterr6r2r�sack�evr_cmp�intersection�evrr)
rrrZold_keysZnew_keysr6Z	obsoleterZ	obsoletedrCrr7�pkg_old�pkg_newr)r8r9rr�	_repodiffes0
zRepoDiffCommand._repodiffc
sh�fdd��dd�}��fdd�}tddddd�}x<t|d	�D],}ttd
�j�|���|d	|j7<q@Wxjt|d�D]Z}ttd�j�|���|d
j�j|��}|r�ttd�j�|���|d|j7<q~W�jj	�r�|d�r:ttd��x<t|d�D],\}}|d|j|j7<|||��q
W|d�r�ttd��x�t|d�D],\}}|d|j|j7<|||��q^Wn\|d|d}	|	�r�ttd��x8t|	�D],\}}|d|j|j7<|||��q�Wttd��ttd�jt
|d	���ttd�jt
|d����jj	�rlttd�jt
|d���ttd�jt
|d���n&ttd�jt
|d�t
|d����jj�rdttd�j||d	���ttd�j||d����jj	�s�ttd�j||d|d���n4ttd�j||d���ttd�j||d���ttd�j||d	|d|d|d���dS) Ncs �jjrt|�Sd|j|jfS)Nz%s-%s)r&r/�strrrE)r1)rrr�pkgstr�sz'RepoDiffCommand._report.<locals>.pkgstrcSsXt|�}|dkr.|djtjjj|�j��7}n&|dkrT|djtjjj|�j��7}|S)Nrz ({})z (-{}))rI�formatr'r
Z
format_number�strip)Znumr-rrr�sizestr�sz(RepoDiffCommand._report.<locals>.sizestrcsBg}�jjr*|jd�|��|�f��n|jd�|jd�|��|�f�|jdt|d
��|jrv|jd}nd}x�|jD]�}|r�|d|dkr�Pn2|d|dkr�|d|dkr�|d|dkr�P|jd	|djd
�tjj|d�tjj|d�f�q�W�jj	�r0|jt
d�j|j	|j	��tdj
|��dS)Nz%s -> %s��-�rZ	timestampZauthor�textz
* %s %s
%sz%a %b %d %YzSize change: {} bytes�
���)r&Zsimpler�lenr%Zstrftimer'Zi18nZucd�sizerrK�print�join)rFrGZmsgsZ	old_chlogZchlog)rJrrr�report_modified�s2

z0RepoDiffCommand._report.<locals>.report_modifiedr)r:r;r<r=r:zAdded package  : {}r;zRemoved package: {}r6zObsoleted by   : {}r<z
Upgraded packagesr=z
Downgraded packagesz
Modified packagesz
SummaryzAdded packages: {}zRemoved packages: {}zUpgraded packages: {}zDowngraded packages: {}zModified packages: {}zSize of added packages: {}zSize of removed packages: {}zSize of modified packages: {}zSize of upgraded packages: {}zSize of downgraded packages: {}zSize change: {})r>�sortedrVrrKrU�getr2r&Z	downgraderT)
rrrMrXZsizesr1ZobsoletedbyrFrGZmodifiedr)rJrr�_report�sf










zRepoDiffCommand._reportcCs�|jjjtj�j|jjd�}|jjjtj�j|jjd�}|jj	rld|jj	krl|j
|jj	d�|j
|jj	d�|jjr�|j
dd�|j
dd�n|j
dd�|j
dd�|j�|j�|j
|j||��dS)N)Zreponame�*)r0rP)Zlatest_per_arch)Zlatest)rrBZquery�hawkeyZIGNORE_EXCLUDESrAr&rrrZfiltermr/Zapplyr[rH)rZq_newZq_oldrrr�run�szRepoDiffCommand.runN)r)
rrr�aliasesrZsummary�staticmethodr!r.r2rHr[r^rrrrr
+s&ar
)Z
__future__rrZdnf.clir'Zdnf.cli.option_parserrr]ZdnfpluginscorerZPluginrr
ZCommandr
rrrr�<module>s__pycache__/generate_completion_cache.cpython-36.pyc000064400000006000150402642240016543 0ustar003

�gt`l�@s^ddlmZddlmZddlmZddlmZddlZddlZ	ddl
Z
Gdd�dej�ZdS)�)�absolute_import)�unicode_literals)�ucd)�loggerNcs<eZdZdZ�fdd�Zedd��Zdd�Zdd	�Z�Z	S)
�BashCompletionCacheZgenerate_completion_cachecs"tt|�j||�||_d|_dS)Nz/var/cache/dnf/packages.db)�superr�__init__�base�
cache_file)�selfr	Zcli)�	__class__��//usr/lib/python3.6/generate_completion_cache.pyrszBashCompletionCache.__init__cCstjd|�dS)NzCompletion plugin: %s)r�debug)�msgr
r
r�_out$szBashCompletionCache._outcCsd}x,|jjj�D]}|jdk	r|jjrd}PqWtjj|j�sF|r�y~t	j
|j��h}|jd�|j�}|j
d�|j
d�|j
d�|jjj�j�}dd	�|D�}|jd
|�|j�WdQRXWn6t	jk
r�}z|jdt|��WYdd}~XnXdS)z& Generate cache of available packages FNTzGenerating completion cache...z/create table if not exists available (pkg TEXT)zAcreate unique index if not exists pkg_available ON available(pkg)zdelete from availablecSs g|]}|jdkrt|�g�qS)�src)�arch�str)�.0�xr
r
r�
<listcomp>@sz,BashCompletionCache.sack.<locals>.<listcomp>z*insert or ignore into available values (?)z Can't write completion cache: %s)r	ZreposZiter_enabledZmetadata�fresh�os�path�existsr
�sqlite3�connectr�cursor�execute�sack�queryZ	available�executemany�commit�OperationalErrorr)rrZrepo�conn�curZ
avail_pkgsZavail_pkgs_insert�er
r
rr (s,

zBashCompletionCache.sackcCs�|js
dSy�tj|j��n}|jd�|j�}|jd�|jd�|jd�tjj	|j
�j�j�}dd�|D�}|j
d|�|j�WdQRXWn6tjk
r�}z|jd	t|��WYdd}~XnXdS)
z& Generate cache of installed packages NzGenerating completion cache...z/create table if not exists installed (pkg TEXT)zAcreate unique index if not exists pkg_installed ON installed(pkg)zdelete from installedcSs g|]}|jdkrt|�g�qS)r)rr)rrr
r
rrVsz3BashCompletionCache.transaction.<locals>.<listcomp>z*insert or ignore into installed values (?)z Can't write completion cache: %s)�transactionrrr
rrr�dnfr Z_rpmdb_sackr	r!Z	installedr"r#r$r)rr%r&Z	inst_pkgsZinst_pkgs_insertr'r
r
rr(Gs"


zBashCompletionCache.transaction)
�__name__�
__module__�__qualname__�namer�staticmethodrr r(�
__classcell__r
r
)rrrs
r)
Z
__future__rrZdnf.i18nrZdnfpluginscorerr)Zos.pathrrZPluginrr
r
r
r�<module>s__pycache__/config_manager.cpython-36.pyc000064400000016133150402642250014345 0ustar003

�gt`�*�@s�ddlmZddlmZddlmZmZmZddlZddlZddl	Zddl
ZddlZddlZddl
Z
ddlZddlZejjGdd�dejj��Zdd�Zejd	�Zejd
�Zejd�Zejd�Zd
d�ZdS)�)�absolute_import)�unicode_literals)�_�logger�P_Nc@sReZdZdgZed�jejjd�Z	e
dd��Zdd�Zdd	�Z
d
d�Zdd
�ZdS)�ConfigManagerCommandzconfig-managerz4manage {prog} configuration options and repositories)�progcCs�|jdddtd�d�|jdddtd	�d
�|jdgdd
td�d�|jdddtd�d
�|jdddtd�d
�|j�}|jddddtd�d�|jddddtd�d�dS)N�crepo�*�repozrepo to modify)�nargs�metavar�helpz--saveF�
store_truez/save the current options (useful with --setopt))�default�actionrz
--add-repo�appendZURLz8add (and enable) the repo from the specified file or url)rrr
rz--dumpz,print current configuration values to stdoutz--dump-variableszprint variable values to stdoutz
--set-enabled�set_enabledz"enable repos (automatically saves))r�destrrz--set-disabled�set_disabledz#disable repos (automatically saves))�add_argumentrZadd_mutually_exclusive_group)�parserZenable_group�r�$/usr/lib/python3.6/config_manager.py�
set_argparser)s,z"ConfigManagerCommand.set_argparsercCs�|jj}d|_|jjgkp@|jjp@|jjp@|jjp@|jjp@|jj	sp|jj
jtd�j
djdddddd	d
dg���|jjgkr�tjtd��|jjs�|jj	s�|jjs�|jjr�d|_d
d�|jjD�}dd�|D�|j_dS)NTz.one of the following arguments is required: {}� z--savez
--add-repoz--dumpz--dump-variablesz
--set-enabledz--enablez--set-disabledz	--disablez{Warning: --enablerepo/--disablerepo arguments have no meaningwith config manager. Use --set-enabled/--set-disabled instead.cSsg|]}|dkr|jd��qS)�,)�split)�.0�xrrr�
<listcomp>_sz2ConfigManagerCommand.configure.<locals>.<listcomp>cSs"g|]}|D]}|dkr|�qqS)�r)rZsublist�itemrrrr as)�cli�demandsZavailable_repos�opts�add_repo�save�dump�dump_variablesrrZ	optparser�errorr�format�joinZrepos_edrZwarningZ	root_userr	)�selfr$Z	temp_listrrr�	configureBs*zConfigManagerCommand.configurecCs|jjr|j�n|j�dS)zExecute the util action here.N)r%r&�modify_repo)r-rrr�runds
zConfigManagerCommand.runc	s�g�t������fdd�}�jjrnx�jjD]�|�d�q.Wt�jd�r�xL�jjj�D]�|�d�qZWn,t�jd�r�x�jjj�D]�|�d�q�W�r�tjjt	d�dj
�����jj}i}t�jd�r�jj
r�jj
}�jj�rx*�jjjj�D]\�}td	�|f�q�W�jj�s0d
�jjk�r��jj�r\|�r\�jjj�jjjd
|j|��jj�r�t�jjjd
��t�jjj����s�dS�jj�s��jj�r�d�j_x�t��D]�}i}�jj�r�d|d
<n�jj�r�d|d
<t�jd��r*x4�jjj�D]$\}}tj|j|��r|j|��qW�jj�rT|�rT�jjj|j|j|j|��jj�r�t�jjjd|j��t|j���q�WdS)z< process --set-enabled, --set-disabled and --setopt options cs0�jjj|�}|s�j��n|r,�j|�dS)N)�baseZreposZget_matching�add�extend)�keyZadd_matching_reposZmatching)�matching_repos�name�not_matching_repos_idr-rr�match_reposqs
z5ConfigManagerCommand.modify_repo.<locals>.match_reposT�repo_setoptsFzNo matching repo to modify: %s.z, �main_setoptsz%s = %s�mainN�1Zenabled�0zrepo: )�setr%r	�hasattrr9�keys�dnf�
exceptions�Errorrr,r1�confr:r)Z
substitutions�items�printr'Zwrite_raw_configfileZconfig_file_pathr(�outputZ
fmtSectionrr�sorted�fnmatch�id�updateZrepofile)	r-r8ZsbcZmodify�valrZrepo_modify�repoidZsetoptsr)r5r6r7r-rr/ks`






z ConfigManagerCommand.modify_repoc
CsN|jjj}d}�x|jjD�]}tjjj|�jdkrDdt	j
j|�}tj
td�|�|jd�r�t	j
j|�}t	j
j||�}y6|jj|dd�}tj|j|�t	j|d�|j�Wn6tk
r�}z|d	7}tj|�wWYd
d
}~XnXqt|�}djtjj|�}t	j
j|d|�}d
|||f}	t||	�sqqW|�rJtjj t!dd|���d
S)z process --add-repo option rr!zfile://zAdding repo from: %sz.repozw+)�modei��Nz$created by {} config-manager from {}z%s.repoz"[%s]
name=%s
baseurl=%s
enabled=1
zConfiguration of repo failedzConfiguration of repos failed)"r1rDZget_reposdirr%r&rA�pycompZurlparse�scheme�os�path�abspathr�infor�endswith�basenamer,Zurlopen�shutilZcopy2r6�chmod�close�IOErrorr*�sanitize_url_to_fsr+�util�	MAIN_PROG�save_to_filerBrCr)
r-Z	myrepodirZerrors_count�urlZdestname�f�erMZreponame�contentrrrr&�s8




zConfigManagerCommand.add_repoN)�__name__�
__module__�__qualname__�aliasesrr+rAr]r^Zsummary�staticmethodrr.r0r/r&rrrrr"s"BrcCspy4t|d�� }tjj||�tj|d�WdQRXWn6ttfk
rj}ztj	t
d�||�dSd}~XnXdS)Nzw+i�z&Could not save repo to repofile %s: %sFT)�openrArPZ
write_to_filerRrYr[�OSErrorrr*r)�filenamerc�fdrbrrrr_�s
r_z^\w+:/*(\w+:|www\.)?z[?/:&#|~\*\[\]\(\)\'\\]+z^[,.]*z[,.]*$cCs*ybtj|�r`tjjr&|jd�jd�}n:t|t�rB|jd�jd�}n
|jd�}t|t	�r`|jd�}Wnt
ttt
fk
r~YnXtjd|�}tjd|�}tjd|�}tjd|�}t|�dk�r|dd�jd�}dt|d
�}tj�}|j||d�jd��|d|�d|j�}d	}tj|d|�S)z�Return a filename suitable for the filesystem and for repo id

    Strips dangerous and common characters to create a filename we
    can use to store the cache in.
    Zidnazutf-8r!r�N�rOzE[^abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.:-]���)�	RE_SCHEME�matchrArPZPY3�encode�decode�
isinstance�strZunicode�UnicodeDecodeError�UnicodeEncodeError�UnicodeError�	TypeError�sub�RE_SLASH�RE_BEGIN�RE_FINAL�lenr�hashlibZsha256rKZ	hexdigest�re)r`�parts�	lastindexZcsumZ
allowed_regexrrrr\�s.



r\)Z
__future__rrZdnfpluginscorerrrrAZdnf.cliZ
dnf.pycompZdnf.utilrIrrRr�rXZpluginZregister_commandr#ZCommandrr_�compilerpr{r|r}r\rrrr�<module>s(1



__pycache__/builddep.cpython-36.opt-1.pyc000064400000016426150402642250014142 0ustar003

�f�$�@s�ddlmZddlmZddlmZmZddlZddlZddlZddl	Zddl
ZddlZddlZ
ddlZddlZddlZddlZejjGdd�dejj��ZdS)�)�absolute_import)�unicode_literals)�_�loggerNcs�eZdZdZdZee�Zed�Z�fdd�Zdd�Z	d	d
�Z
edd��Zd
d�Z
dd�Zdd�Zedd��Zdd�Zdd�Zdd�Zdd�Z�ZS)�BuildDepCommand�builddep�	build-depz3Install build dependencies for package or spec filez[PACKAGE|PACKAGE.spec]cs(tt|�j|�tjjj�|_g|_dS)N)	�superr�__init__�dnf�rpmZtransactionZinitReadOnlyTransaction�_rpm_ts�tempdirs)�self�cli)�	__class__��/usr/lib/python3.6/builddep.pyr
/szBuildDepCommand.__init__cCsx|jD]}tj|�qWdS)N)r�shutilZrmtree)r�temp_dirrrr�__del__4szBuildDepCommand.__del__cCs�tjjj|�}|ddkr |jStjj�}tjdd�}t	jj
|t	jj|��}|jj
|�t|d�}zFy|j|jjj||j��Wn$tk
r�}z�WYdd}~XnXWd|j�X|S)	z�
        In case pkgspec is a remote URL, download it to a temporary location
        and use the temporary file instead.
        r�file�Z
dnf_builddep_)�prefixzwb+N)rr)rZpycompZurlparse�path�libdnfZrepoZ
Downloader�tempfileZmkdtemp�os�join�basenamer�append�openZdownloadURL�baseZconfZ_config�fileno�RuntimeError�close)r�pkgspec�locationZ
downloaderrZ	temp_fileZtemp_fo�exrrr�_download_remote_file8s


z%BuildDepCommand._download_remote_filec	Cs�dd�}|jdddtd�d�|jdd	d
gd|td�d
�|jdddtd�d�|j�}|jddtd�d�|jddtd�d�dS)NcSs:|r|jdd�ng}t|�dkr6td�|}tj|��|S)N��z&'%s' is not of the format 'MACRO EXPR')�split�lenr�argparseZArgumentTypeError)�argZarglist�msgrrr�	macro_defRs

z0BuildDepCommand.set_argparser.<locals>.macro_def�packages�+�packagez"packages with builddeps to install)�nargs�metavar�helpz-Dz--definer z'MACRO EXPR'z$define a macro for spec file parsing)�action�defaultr6�typer7z--skip-unavailable�
store_trueFz5skip build dependencies not available in repositories)r8r9r7z--specz)treat commandline arguments as spec files)r8r7z--srpmz)treat commandline arguments as source rpm)�add_argumentrZadd_mutually_exclusive_group)�parserr1Zptyperrr�
set_argparserPs

zBuildDepCommand.set_argparsercCs|jjsd|j_dS)N�error)�optsZrpmverbosity)rrrr�
pre_configurefszBuildDepCommand.pre_configurecCsr|jj}d|_d|_d|_d|_|jjp.|jjsnx<|jj	D]0}|j
d�pZ|j
d�pZ|j
d�s:|jjj
�Pq:WdS)NTz.src.rpmz
.nosrc.rpmz.spec)r�demandsZavailable_reposZ	resolvingZ	root_userZsack_activationr@�spec�srpmr2�endswithr"ZreposZenable_source_repos)rrBr&rrr�	configurejs


zBuildDepCommand.configurecCs\tjjj|j�}x$|jjD]}tj|d|d�qWd}x�|jj	D]�}|j
|�}yl|jjrh|j|�nT|jj
r||j|�n@|jd�s�|jd�r�|j|�n |jd�r�|j|�n
|j|�WqDtjjk
�r}z:x$|j�D]}tjtd�j|��q�Wtj|�d}WYdd}~XqDXqDWx |jjD]}tj|d��q*W|�rXtjjtd	���dS)
Nrr*Fz.src.rpmz	nosrc.rpmz.speczRPM: {}Tz!Some packages could not be found.)rZyumZrpmtransZRPMTransactionr"r@�definerZaddMacror2r)rD�	_src_depsrC�
_spec_depsrE�_remote_deps�
exceptions�ErrorZmessagesrr?r�formatZdelMacro)rZrpmlogZmacroZ
pkg_errorsr&�e�linerrr�runzs2


zBuildDepCommand.runcCs|j�dd�S)Nr+)ZDNEVR)Zrpm_deprrr�_rpm_dep2reldep_str�sz#BuildDepCommand._rpm_dep2reldep_strcCs�tjj|jj�}|j|d�|j�}|rX|jd�rXtjj|jj�}|j|d�|j�}|r�|jd�r�td�}t	j
||�|jjdkS|r�|jj
|�}|r�x|D]}tjj|�q�W|jjj|dd�dS)	N)Zprovides�/)r�(z$No matching package to install: '%s'TF)ZselectZoptional)rZselectorZSelectorr"�sack�setZmatches�
startswithrr�warningr@Zskip_unavailableZ_sltr_matches_installedZ_msg_installedZ_goalZinstall)r�
reldep_strZsltr�foundr0Zalready_instr4rrr�_install�s$
zBuildDepCommand._installc
Cs�tj|tj�}y|jj|�}WnRtjk
rp}z4t|�dkrJtd�|}tj	|�t
jj|��WYdd}~XnXtj	|�|j
d�}d}x0|D](}|j|�}|jd�r�q�||j|�M}q�W|s�td�}	t
jj|	��|jjr�tjtd��dS)Nzerror reading package headerz2Failed to open: '%s', not a valid source rpm file.ZrequirenameTzrpmlib(zNot all dependencies satisfiedzJWarning: -D or --define arguments have no meaning for source rpm packages.)rr!�O_RDONLYr
ZhdrFromFdnorr?�strrr%rrKrLZdsFromHeaderrQrVrZr@rGrrW)
rZsrc_fn�fd�hrN�ds�done�deprX�errrrrrH�s*





zBuildDepCommand._src_depsc	Cs�ytj|�}Wn>tk
rL}z"td�||f}tjj|��WYdd}~XnXd}x.tj|jd�D]}|j	|�}||j
|�M}qbW|s�td�}tjj|��dS)Nz/Failed to open: '%s', not a valid spec file: %sT�requireszNot all dependencies satisfied)rrC�
ValueErrorrrrKrLr_ZsourceHeaderrQrZ)	rZspec_fnrCr(r0r`rarXrbrrrrI�s

zBuildDepCommand._spec_depsc	Cs�tjj|�j|jj�jdd�}tdd�|D��}|jjj�j	�j||gdd�j
�j�}|sptjj
td�|��d}x.|D]&}x |jD]}||jt|��M}q�WqzW|s�td�}tjj
|��dS)	N�src)Z	arch__neqcSsh|]
}|j�qSr)Zsource_name)�.0�pkgrrr�	<setcomp>�sz/BuildDepCommand._remote_deps.<locals>.<setcomp>)�nameZarchzno package matched: %sTzNot all dependencies satisfied)rZsubjectZSubjectZget_best_queryr"rT�filter�listZquery�	availableZlatestrPrKrLrrcrZr\)	rr4rlZsourcenamesZpkgsr`rgZreqrbrrrrJ�s
zBuildDepCommand._remote_deps)rr)�__name__�
__module__�__qualname__�aliasesr0rZsummaryZusager
rr)�staticmethodr>rArFrPrQrZrHrIrJ�
__classcell__rr)rrr's !r)Z
__future__rrZdnfpluginscorerrr.rZdnf.cliZdnf.exceptionsZdnf.rpm.transactionZdnf.yum.rpmtransZlibdnf.reporrrrrZpluginZregister_commandrZCommandrrrrr�<module>s__pycache__/needs_restarting.cpython-36.opt-1.pyc000064400000023602150402642250015704 0ustar003

�f`.�	@s$ddlmZddlmZddlmZddlmZddlmZmZddlZddl	Zddl
Z
ddlZddlZddl
Z
ddlZddlZddd	d
ddd
ddg	ZdgZdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd �Zd!d"�ZGd#d$�d$e�ZGd%d&�d&e�ZejjGd'd(�d(ej j!��Z"dS))�)�absolute_import)�division)�print_function)�unicode_literals)�logger�_NZkernelz	kernel-rtZglibczlinux-firmwareZsystemd�dbuszdbus-brokerzdbus-daemonZ
microcode_ctl�zlibcs�tjj|�st�St�}xjtj|�D]\}tjj|�s$|jd�rBq$ttjj||���&}x|D]}|j	|j
�|f�q\WWdQRXq$Wt��x4|jj�j
�jdd�|D�d�D]}�j	|j�q�Wx6�fdd�|D�D] \}}tjtdj||d���q�W�S)	z�
    Provide filepath as string if single dir or list of strings
    Return set of package names contained in files under filepath
    z.confNcSsh|]}|d�qS)r�)�.0�xr
r
�&/usr/lib/python3.6/needs_restarting.py�	<setcomp>Bsz'get_options_from_dir.<locals>.<setcomp>)�namecsh|]}|d�kr|�qS)rr
)rr)�packagesr
r
rDsz`No installed package found for package name "{pkg}" specified in needs-restarting file "{file}".)�pkg�file)�os�path�exists�set�listdir�isdir�endswith�open�join�add�rstrip�sack�query�	installed�filterrr�warningr�format)�filepath�baseZoptionsr�fp�linerrr
)rr
�get_options_from_dir0s"
$&r(ccs�x�t�D]�\}}y<|dk	r(|t|�kr(wt|ddd��}|j�}WdQRXWn"tk
rntjd|�wYnXx$|D]}t||�}|dk	rv|VqvWqWdS)N�r�replace)�errorszFailed to read PID %d's smaps.)�
list_smaps�	owner_uidr�	readlines�EnvironmentErrorrr"�smap2opened_file)�uid�pid�smapsZ
smaps_file�linesr'�ofiler
r
r
�list_opened_filesKs

r6ccsNxHtjd�D]:}yt|�}Wntk
r2wYnXd|}||fVqWdS)Nz/procz/proc/%d/smaps)rr�int�
ValueError)Zdir_r2r3r
r
r
r,\sr,cst��i����fdd�}|S)Ncs,�j|��}|�k	r|S�|�}|�|<|S)N)�get)Zparam�val)�cache�func�sentinelr
r
�wrapperiszmemoize.<locals>.wrapper)�object)r<r>r
)r;r<r=r
�memoizefsr@cCstj|�tjS)N)r�stat�ST_UID)�fnamer
r
r
r-ssr-cCs$|j�j|d�j�}|r |dSdS)N)rr)rr!�run)rrCZmatchesr
r
r
�owning_packagewsrEcCsPd|}t|��}tjj|j��}WdQRXdj|jd��}td||f�dS)Nz/proc/%d/cmdline� �z%d : %s)r�dnfZi18nZucd�readr�split�print)r2ZcmdlineZcmdline_fileZcommandr
r
r
�	print_cmd~s

rLc	Cs�tj�}|jdd�}tj|d�}d}y|jd|j|��}Wn<tjk
rv}zt|�}tjdj	||��dSd}~XnXtj|dd�}|j
dd�}|jd	�r�|SdS)
Nzorg.freedesktop.systemd1z/org/freedesktop/systemd1z org.freedesktop.systemd1.Managerz)Failed to get systemd unit for PID {}: {}zorg.freedesktop.DBus.Properties)Zdbus_interfacezorg.freedesktop.systemd1.UnitZIdz.service)rZ	SystemBusZ
get_objectZ	InterfaceZGetUnitByPIDZ
DBusException�strrr"r#ZGetr)	r2ZbusZsystemd_manager_objectZsystemd_manager_interfaceZ
service_proxy�e�msgZservice_propertiesrr
r
r
�get_service_dbus�s0

rPcCsn|jd�}|dkrdS|jd�dkr(dS||d�j�}|jd�}|dkrVt||d�St||d|�d�SdS)N�/rz00:z
 (deleted)FT)�find�strip�rfind�
OpenedFile)r2r'Zslash�fnZsuffix_indexr
r
r
r0�s

r0c@s*eZdZejd�Zdd�Zedd��ZdS)rUz^(.+);[0-9A-Fa-f]{8,}$cCs||_||_||_dS)N)�deletedrr2)�selfr2rrWr
r
r
�__init__�szOpenedFile.__init__cCs(|jr"|jj|j�}|r"|jd�S|jS)a;Calculate the name of the file pre-transaction.

        In case of a file that got deleted during the transactionm, possibly
        just because of an upgrade to a newer version of the same file, RPM
        renames the old file to the same name with a hexadecimal suffix just
        before delting it.

        �)rW�RE_TRANSACTION_FILE�matchr�group)rXr\r
r
r
�
presumed_name�s

zOpenedFile.presumed_nameN)	�__name__�
__module__�__qualname__�re�compiler[rY�propertyr^r
r
r
r
rU�s
rUc@s4eZdZdd�Zedd��Zedd��Zdd�Zd	S)
�ProcessStartcCs|j�|_|j�|_dS)N)�
get_boot_time�	boot_time�get_sc_clk_tck�
sc_clk_tck)rXr
r
r
rY�s
zProcessStart.__init__cCshttjd�j�}tjjd�rdtdd��8}|j�j�j	�dj�}tt
j
�t|��}t||�SQRX|S)a	
        We have two sources from which to derive the boot time. These values vary
        depending on containerization, existence of a Real Time Clock, etc.
        For our purposes we want the latest derived value.
        - st_mtime of /proc/1
             Reflects the time the first process was run after booting
             This works for all known cases except machines without
             a RTC - they awake at the start of the epoch.
        - /proc/uptime
             Seconds field of /proc/uptime subtracted from the current time
             Works for machines without RTC iff the current time is reasonably correct.
             Does not work on containers which share their kernel with the
             host - there the host kernel uptime is returned
        z/proc/1z/proc/uptime�rbrN)
r7rrA�st_mtimer�isfiler�readlinerSrJ�time�float�max)Zproc_1_boot_time�fZuptimeZproc_uptime_boot_timer
r
r
rf�szProcessStart.get_boot_timecCstjtjd�S)N�
SC_CLK_TCK)r�sysconf�
sysconf_namesr
r
r
r
rh�szProcessStart.get_sc_clk_tckc
CsLd|}t|��}|j�j�j�}WdQRXt|d�}||j}|j|S)Nz
/proc/%d/stat�)rrIrSrJr7rirg)rXr2Zstat_fnZ	stat_fileZstatsZticks_after_bootZsecs_after_bootr
r
r
�__call__�s

zProcessStart.__call__N)r_r`rarY�staticmethodrfrhrvr
r
r
r
re�srec@s4eZdZd
Zed�Zedd��Zdd�Zdd�Z	d	S)�NeedsRestartingCommand�needs-restartingz/determine updated binaries that need restartingcCsF|jdddtd�d�|jdddtd�d�|jd	d
dtd�d�dS)Nz-uz
--useronly�
store_truez#only consider this user's processes)�action�helpz-rz--reboothintzKonly report whether a reboot is required (exit code 1) or not (exit code 0)z-sz
--servicesz%only report affected systemd services)�add_argumentr)�parserr
r
r
�
set_argparsers


z$NeedsRestartingCommand.set_argparsercCs|jj}d|_dS)NT)�cli�demandsZsack_activation)rXr�r
r
r
�	configuresz NeedsRestartingCommand.configurecCsNt�}tjt|jj�}t|�}ttj	j
|jjjd�|j�}t
j|�|jj�r�t�}t�}|jjj�j�}x,|jt
d�D]}|j|jkrx|j|j�qxW|jdddgd�}t|�dkr�x,|jtd�D]}|j|jkr�|j|j�q�W|s�|�rfttd��xt|�D]}	td|	��qWxt|�D]}	td	|	��q$Wt�ttd
��ttd�d�tjj ��nttd
��ttd��dSt�}
|jj!�r�tj"�nd}xHt#|�D]<}||j$�}|dk�rĐq�|j||j%�k�r�|
j|j%��q�W|jj&�r.tdd�t|
�D��}
x |
D]}	|	dk	�rt|	��qWdSxt|
�D]}t'|��q8WdS)Nz#etc/dnf/plugins/needs-restarting.d/)rrzdbus-daemonzdbus-brokerrz;Core libraries or services have been updated since boot-up:z  * %sz8  * %s (dependency of dbus. Recommending reboot of dbus)z2Reboot is required to fully utilize these updates.zMore information:z)https://access.redhat.com/solutions/27943z>No core libraries or services have been updated since boot-up.zReboot should not be necessary.cSsg|]}t|��qSr
)rP)rr2r
r
r
�
<listcomp>Bsz.NeedsRestartingCommand.run.<locals>.<listcomp>)(re�	functools�partialrEr%rr@r(rrrZconfZinstallroot�NEED_REBOOT�extendZoptsZ
reboothintrrr r!Zinstalltimergrr�len�NEED_REBOOT_DEPENDS_ON_DBUSrKr�sortedrH�
exceptions�ErrorZuseronly�geteuidr6r^r2ZservicesrL)rXZ
process_startZ
owning_pkg_fn�optZneed_rebootZneed_reboot_depends_on_dbusr rZdbus_installedrZ
stale_pidsr1r5�namesr2r
r
r
rDsd







zNeedsRestartingCommand.runN)ry)
r_r`ra�aliasesrZsummaryrwrr�rDr
r
r
r
rx�s

rx)#Z
__future__rrrrZdnfpluginscorerrrHZdnf.clirr�rrbrArnr�r�r(r6r,r@r-rErLrPr0r?rUreZpluginZregister_commandr�ZCommandrxr
r
r
r
�<module>s:

"+__pycache__/repograph.cpython-36.pyc000064400000005342150402642250013375 0ustar003

�gt`��@s^ddlmZddlmZddlmZmZddlZdZGdd�dej	�Z
Gdd	�d	ejj�Z
dS)
�)�absolute_import)�unicode_literals)�_�loggerNzY
size="20.69,25.52";
ratio="fill";
rankdir="TB";
orientation=port;
node[style="filled"];
cs eZdZdZ�fdd�Z�ZS)�	RepoGraph�	repographcs,tt|�j||�|dkrdS|jt�dS)N)�superr�__init__Zregister_command�RepoGraphCommand)�self�base�cli)�	__class__��/usr/lib/python3.6/repograph.pyr	)szRepoGraph.__init__)�__name__�
__module__�__qualname__�namer	�
__classcell__rr)rrr%src@s<eZdZd
Zed�Zdd�Zdd�Zdd	�Ze	d
d��Z
dS)r
r�
repo-graphz4Output a full package dependency graph in dot formatcCsV|jj}d|_d|_|jjrRx4|jjj�D]$}|j	|jjkrF|j
�q*|j�q*WdS)NT)r
�demandsZsack_activationZavailable_reposZopts�reporZrepos�all�id�disable�enable)rrrrrr�	configure4s
zRepoGraphCommand.configurecCs|jt�dS)N)�do_dot�
DOT_HEADER)rrrr�run?szRepoGraphCommand.runc	Cs�d}|j|jj�}td�tdj|��x�|j�D]�}t||�|krRt||�}ddt||�}|d}d}td	j||||��td
j|��x||D]}tdj|��q�Wtdj|||��q2Wtd
�dS)Nrzdigraph packages {z{}g�?g333333�?�g�������?g�?z""{}" [color="{:.12g} {:.12g} {}"];z
"{}" -> {{z"{}"z!}} [color="{:.12g} {:.12g} {}"];
�}g��s���?)�	_get_depsr�sack�print�format�keys�len)	r�headerZmaxdepsZdeps�pkg�h�s�b�reqrrrrBs zRepoGraphCommand.do_dotc
Cs�i}i}g}|j�j�}x�|D]�}i}x�|jD]�}t|�}||krDq.|jd�rPq.||krb||}	n@|j|d�}	|	s�tjtd�|�|j	|�q.n
|	dj
}	|	||<|	|j
kr�d||	<|	|ks.|	|kr�q.nd||	<|j�||j
<q.WqW|S)Nz	solvable:)ZprovideszNothing provides: '%s'r)Zquery�	available�requires�str�
startswith�filterr�debugr�appendrr')
r$r0Zprov�skipr/r*Zxxr.ZreqnameZproviderrrrr#Ys8





zRepoGraphCommand._get_depsN)rr)rrr�aliasesrZsummaryrr r�staticmethodr#rrrrr
0sr
)Z
__future__rrZdnfpluginscorerrZdnf.cliZdnfrZPluginrr
ZCommandr
rrrr�<module>s__pycache__/reposync.cpython-36.opt-1.pyc000064400000024276150402642250014216 0ustar003

�f89�@s�ddlmZddlmZddlZddlZddlZddlZddlmZm	Z	ddl
mZddlZddl
Zdd�ZGdd	�d	ejj�ZejjGd
d�dejj��ZdS)�)�absolute_import)�unicode_literalsN)�_�logger)�OptionParsercCs(tjjtj��}tjjtjj|||��S)N)�dnfZi18nZucd�os�getcwd�path�realpath�join)Zintermediate�target�cwd�r�/usr/lib/python3.6/reposync.py�_pkgdir#srcs(eZdZ�fdd�Z�fdd�Z�ZS)�RPMPayloadLocationcs$tt|�j||�tjj|�|_dS)N)�superr�__init__rr
�dirname�package_dir)�self�pkg�progressZpkg_location)�	__class__rrr)szRPMPayloadLocation.__init__cs*tt|�j�}tjj|j�|j|d<|S)N�dest)rr�_target_paramsr�util�
ensure_dirr)r�tp)rrrr-s
z!RPMPayloadLocation._target_params)�__name__�
__module__�__qualname__rr�
__classcell__rr)rrr(srcs�eZdZdZed�Z�fdd�Zedd��Zdd�Z	d	d
�Z
dd�Zd
d�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Z�ZS) �RepoSyncCommand�reposyncz&download all packages from remote repocstt|�j|�dS)N)rr$r)r�cli)rrrr9szRepoSyncCommand.__init__c	Cs|jdddgtjdtd�d�|jddd	td
�d�|jddd	td
�d�|jdddd	td�d�|jdddd	td�d�|jdtd�d�|jdddd	td�d�|jddd	td�d�|jdddtd�d �|jd!dd	td"�d�|jd#dd	td$�d�|jd%d&dd	td'�d�dS)(Nz-az--arch�archesz[arch]z$download only packages for this ARCH)r�default�action�metavar�helpz--deleteF�
store_truez5delete local packages no longer present in repository)r(r)r+z--download-metadatazdownload all the metadata.z-gz
--gpgcheckzBRemove packages that fail GPG signature checking after downloadingz-mz--downloadcompsz&also download and uncompress comps.xmlz--metadata-pathzXwhere to store downloaded repository metadata. Defaults to the value of --download-path.)r+z-nz
--newest-onlyz&download only newest packages per-repoz--norepopathz,Don't add the reponame to the download path.z-pz--download-pathz./z&where to store downloaded repositories)r(r+z
--remote-timezCtry to set local timestamps of local files by the one on the serverz--sourcezdownload only source packagesz-uz--urlsz:Just list urls of what would be downloaded, don't download)�add_argumentrZ_SplitCallbackr)�parserrrr�
set_argparser<s2





zRepoSyncCommand.set_argparsercCs�|jj}d|_d|_|jj}|jjr||j�j	�xJ|jjD]>}y||}Wn$t
k
rntjjd|��YnX|j
�q:W|jjr�|j�tt|j���dkr�|jjr�tjjtd���x |j�D]}|jj�d|_q�WdS)NTzUnknown repo: '%s'.�z1Can't use --norepopath with multiple repositoriesF)r&�demandsZavailable_reposZsack_activation�base�repos�opts�repo�all�disable�KeyErrorrZCliError�enable�sourceZenable_source_repos�len�list�iter_enabled�
norepopathr�_repoZexpireZdeltarpm)rr1r3Zrepoidr5rrr�	configure\s(

zRepoSyncCommand.configurecs�d|jj_d}�x�|jjj�D�]�}|jjr8|jjd�|jj	r�|jj
r�xP|jj�D]6\}}|j|�}|rtt
|�qTtd�|}tj|�qTWn
|j	|�|jj�r|jj
�rt|jj����fdd�dD�}|�rxB|D]}|j|�}|r�t
|�Pq�Wtd�}tj|�n
|j|�|j|�}	|jj
�r8|j|	�n�|j|	�|jj�r�xt|	D]l}
|j|
�}tj|fd	d
�|
�|
_|jj|
�\}}
|dk�rRtjtd�jtjj |�|
��tj!|�d
}�qRW|jj"r|j#||	�qW|�s�t$j%j&td���dS)NTz%Failed to get mirror for metadata: %scsg|]}|�kr�|�qSrr)�.0�md_type)�mdlrr�
<listcomp>�sz'RepoSyncCommand.run.<locals>.<listcomp>�group�group_gz�group_gz_zckz(Failed to get mirror for the group file.cSs|S)Nr)�s�
local_pathrrr�<lambda>�sz%RepoSyncCommand.run.<locals>.<lambda>rzRemoving {}: {}FzGPG signature check failed.)rErFrG)'r2ZconfZ	keepcacher3r=r4Zremote_timer?ZsetPreserveRemoteTime�download_metadataZurlsZgetMetadataLocations�remote_location�printrr�warningZ
downloadcomps�dict�getcomps�get_pkglist�
print_urls�download_packagesZgpgcheck�pkg_download_path�types�
MethodTypeZlocalPkgZpackage_signature_check�formatrr
�basename�unlink�delete�delete_old_local_packagesr�
exceptions�Error)rZgpgcheck_okr5rBZmd_location�url�msgZgroup_locationsZgroup_location�pkglistrrI�result�errorr)rCr�runws^


















zRepoSyncCommand.runcCs$t|jjp|jj|jjs|jnd�S)N�)rr4ZdestdirZ
download_pathr>�id)rr5rrr�repo_target�szRepoSyncCommand.repo_targetcCs&|jjrt|jj|j�S|j|�SdS)N)r4Z
metadata_pathrrerf)rr5rrr�metadata_target�szRepoSyncCommand.metadata_targetcCsT|j|j�}tjjtjj||j��}|jtjj|d��sPtj	j
td�j||���|S)Nrdz6Download target '{}' is outside of download path '{}'.)
rfr5rr
rr�location�
startswithrr\r]rrW)rrrfrTrrrrT�s
z!RepoSyncCommand.pkg_download_pathc	
s�t�fdd�|D��}x�tj�j|��D]�\}}}x||D]t}tjj||�}|jd�r8tjj|�r8||kr8ytj|�t	j
td�|�Wq8tk
r�t	j
td�|�Yq8Xq8Wq(WdS)Nc3s|]}�j|�VqdS)N)rT)rAr)rrr�	<genexpr>�sz<RepoSyncCommand.delete_old_local_packages.<locals>.<genexpr>z.rpmz[DELETED] %szfailed to delete file %s)�setr�walkrfr
r�endswith�isfilerYr�infor�OSErrorrb)	rr5r`Zdownloaded_files�dirpathZdirnames�	filenames�filenamer
r)rrr[�s

z)RepoSyncCommand.delete_old_local_packagescCsZ|jj�}|rV|j|�}tjj|�tjj|d�}tj	j
j||d�tj
td�|j�dS)Nz	comps.xml)rz!comps.xml for repository %s saved)r?Z
getCompsFnrgrrrrr
rZyumZmiscZ
decompressrrorre)rr5Zcomps_fnZ	dest_pathrrrrrP�s

zRepoSyncCommand.getcompscCs|j|�}|jj|�dS)NT)rgr?ZdownloadMetadata)rr5rfrrrrK�s
z!RepoSyncCommand.download_metadatacCs�tjjs|j�S|j�|jjj�}t�}i}i}xp|D]h}|j�}|j	|�|j
|j�i�j
|j�g�j
|�x.|D]&}|j
|i�j
|j�g�j
|j��qvWq8W|j|j|d�d�j�}	t�}
x�|j�D]�\}}t�}
|
jt|j�dd�d�t�}x0|j�D]$}x|D]}|j	|j���qW�qWx:|j|d�j�D]&}dj|�}|
jt|||���q>Wx0|
D](}x ||D]}|
j	|j���q|W�qnWq�W|	j|j|
d��}	|	S)a\
        return union of these queries:
        - the latest NEVRAs from non-modular packages
        - all packages from stream version with the latest package NEVRA
          (this should not be needed but the latest package NEVRAs might be
          part of an older module version)
        - all packages from the latest stream version
        )Znevra_strict)Zpkg__neqT)�reverserz3{0.name}-{0.epoch}:{0.version}-{0.release}.{0.arch})rr2ZWITH_MODULESZlatestZapplyZ_moduleContainerZgetModulePackagesrkZgetArtifacts�update�
setdefaultZ
getNameStreamZ
getVersionNum�append�filter�items�add�sorted�keys�valuesrW�max�union)r�queryZmodule_packagesZ
all_artifactsZmodule_dictZartifact_versionZmodule_packageZ	artifactsZartifactZlatest_queryZlatest_stream_artifactsZ
namestreamZversion_dictZversionsZstream_artifacts�modules�moduleZ
latest_pkgZnevra�versionrrr�_get_latest�sB	





zRepoSyncCommand._get_latestcCsd|jjjtjd�j�j|jd�}|jj	r2|j
|�}|jjrH|jdd�n|jjr`|j|jjd�|S)N)�flags)Zreponame�src)Zarch)
r2�sackr��hawkey�IGNORE_MODULAR_EXCLUDESZ	availableZfiltermrer4Znewest_onlyr�r:r')rr5r�rrrrQs

zRepoSyncCommand.get_pkglistcsj�j}|jj��dkr tjj��tjj|jj	t
jd�j��d�}��fdd�|D�}|j
||�dd�dS)N)r�rcsg|]}t|��j|���qSr)rrT)rAr)rrrrrD0sz5RepoSyncCommand.download_packages.<locals>.<listcomp>F)r2�outputrr�callbackZNullDownloadProgress�drpmZ	DeltaInfor�r�r�r�Z	installedZ_download_remote_payloads)rr`r2r�Zpayloadsr)rrrrS)s
z!RepoSyncCommand.download_packagescCs@x:|D]2}|j�}|r t|�qtd�|j}tj|�qWdS)Nz$Failed to get mirror for package: %s)rLrMr�namerrN)rr`rr^r_rrrrR4s

zRepoSyncCommand.print_urls)r%)r r!r"�aliasesrZsummaryr�staticmethodr/r@rcrfrgrTr[rPrKr�rQrSrRr#rr)rrr$4s  :
	9r$)Z
__future__rrr�rZshutilrUZdnfpluginscorerrZdnf.cli.option_parserrrZdnf.clirr5Z
RPMPayloadrZpluginZregister_commandr&ZCommandr$rrrr�<module>s__pycache__/system_upgrade.cpython-36.pyc000064400000054753150402642250014453 0ustar003

�f�h�@s�dZddlmZmZmZmZddlZddlZddlZddl	Z	ddl
Z
ddlZddlm
Z
ddlmZmZddlZddlZddlmZddlmZddlZddlmZmZddlZed	�ejd
�Zejd�Zejd�Zejd
�Z eZ!dZ"ed�Z#ed�Z$ed�Z%dZ&dd�Z'dd�Z(gfdd�Z)d7dd�Z*dd�Z+Gdd�de,�Z-Gdd �d e,�Z.e.�Z/Gd!d"�d"ej0j1�Z2d#d$�Z3d%d&�Z4d'd(�Z5d)d*�Z6d+d,dd-d.gZ7Gd/d0�d0ej8�Z9Gd1d2�d2ej:j;�Z<Gd3d4�d4e<�Z=Gd5d6�d6e<�Z>dS)8zGsystem_upgrade.py - DNF plugin to handle major-version system upgrades.�)�call�Popen�check_output�CalledProcessErrorN)�journal)�_�logger)�CliError)�ucd)�serialize_transaction�TransactionReplayzthe color of the skyZ 9348174c5cc74001a71ef26bd79d302eZ fef1cc509d5047268b83a3a553f54b43Z 3e0a5636d16b4ca4bbe5321d06c6aa62Z 8cec00a1566f4d3594f116450395f06cz/usr/bin/plymouthz<Need a --releasever greater than the current system version.z�Download complete! Use 'dnf {command} reboot' to start the upgrade.
To remove cached metadata and transaction use 'dnf {command} clean'zESorry, you need to use 'download --releasever' instead of '--network'�cCs.tjddd�rtjtd��ntddg�dS)NZDNF_SYSTEM_UPGRADE_NO_REBOOTF)�defaultz!Reboot turned off, not rebooting.Z	systemctl�reboot)�os�getenvr�inforr�rr�$/usr/lib/python3.6/system_upgrade.pyrEsrcCs|d}xrdD]j}yNt|��<}x4|D],}|j�}|j|�r |t|�d�jd�Sq WWdQRXWq
tk
rrw
Yq
Xq
WdS)NzUPGRADE_GUIDE_URL=�/etc/os-release�/usr/lib/os-release�")rr)�open�strip�
startswith�len�IOError)�key�pathZrelease_file�linerrr�get_url_from_os_releaseLs



(r cCs~tjj|�sdSxhtj|�D]Z}tjj||�}||kr8qy(tjj|�rTtjj|�n
tj|�Wqt	k
rtYqXqWdS)N)
rr�isdir�listdir�join�dnf�utilZrm_rf�unlink�OSError)r�ignore�entryZfullpathrrr�	clear_dir[sr*cCs6tjj|j�|jkrtt��|r2||jkr2tt��dS)N)r$�rpm�detect_releasever�installroot�
releaseverr	�RELEASEVER_MSG�CANT_RESET_RELEASEVER)�conf�targetrrr�check_release_verlsr3cCsPytdd�}|jd�Wn2tk
rJ}zttd�|�WYdd}~XnXdS)Nz	/dev/tty0�wbs[9;0]z%Screen blanking can't be disabled: %s)r�write�	Exception�printr)Ztty�errr�disable_blankingus

r9c@s�eZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	e	d�Z
e	d�Ze	d�Ze	d�Z
e	d�Ze	d�Ze	d�Ze	d�Ze	d�Ze	d�Ze	d�Ze	d�Ze	d�ZdS)�StatecCs||_i|_|j�dS)N)�	statefile�_data�_read)�selfr;rrr�__init__�szState.__init__cCspy&t|j��}tj|�|_WdQRXWnDtk
r@i|_Yn,tk
rji|_tjt	d�|j�YnXdS)Nz;Failed loading state file: %s, continuing with empty state.)
rr;�json�loadr<r�
ValueErrorr�warningr)r>�fprrrr=�s

zState._readc
CsFtjjtjj|j��t|jd��}tj	|j
|ddd�WdQRXdS)N�w�T)�indent�	sort_keys)r$r%�
ensure_dirrr�dirnamer;rr@�dumpr<)r>Zoutfrrrr5�szState.writecCs&tjj|j�rtj|j�|j�dS)N)rr�existsr;r&r=)r>rrr�clear�szState.clearcCs|S)Nr)r>rrr�	__enter__�szState.__enter__cCs|dkr|j�dS)N)r5)r>�exc_type�	exc_value�	tracebackrrr�__exit__�szState.__exit__cs"�fdd�}�fdd�}t||�S)Ncs||j�<dS)N)r<)r>�value)�optionrr�setprop�szState._prop.<locals>.setpropcs|jj��S)N)r<�get)r>)rTrr�getprop�szState._prop.<locals>.getprop)�property)rTrUrWr)rTr�_prop�szState._prop�
state_version�download_status�destdir�target_releasever�system_releasever�gpgcheck�gpgcheck_repos�repo_gpgcheck_repos�upgrade_status�upgrade_command�distro_sync�enable_disable_repos�module_platform_idN)�__name__�
__module__�__qualname__r?r=r5rMrNrRrYrZr[r\r]r^r_r`rarbrcrdrerfrrrrr:�s(
r:c@s@eZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dS)�PlymouthOutputz�A plymouth output helper class.

    Filters duplicate calls, and stops calling the plymouth binary if we
    fail to contact it.
    cCsd|_t�|_d|_dS)NT)�alive�dict�
_last_args�	_last_msg)r>rrrr?�szPlymouthOutput.__init__cGsj||jj|�k}|jr|s$|dkrdytt|f|�dk|_Wntk
rXd|_YnX||j|<|jS)Nz--pingrF)rmrVrkr�PLYMOUTHr')r>�cmd�argsZdupe_cmdrrr�	_plymouth�s
zPlymouthOutput._plymouthcCs
|jd�S)Nz--ping)rr)r>rrr�ping�szPlymouthOutput.pingcCs4|jr |j|kr |jdd|j�||_|jdd|�S)Nzhide-messagez--textzdisplay-message)rnrr)r>�msgrrr�message�szPlymouthOutput.messagecCsRd}y$ttdg�}tjdt|��r&d}Wnttfk
r@YnX|jdd|�S)NZupdatesz--helpz--system-upgradezsystem-upgradezchange-modez--)rro�re�searchr
rr'rr)r>�mode�srrr�set_mode�szPlymouthOutput.set_modecCs|jddt|��S)Nz
system-updatez
--progress)rr�str)r>Zpercentrrr�progress�szPlymouthOutput.progressN)
rgrhri�__doc__r?rrrsrurzr|rrrrrj�s

rjc@s$eZdZdd�Zdd�Zdd�ZdS)�PlymouthTransactionProgresscCs|j||||�dS)N)�_update_plymouth)r>�package�actionZti_doneZti_totalZts_doneZts_totalrrrr|�sz$PlymouthTransactionProgress.progresscCsd|dkrdS|tjjkr0tjtd||��ntjdtd||��tj|j||||��dS)N�g�V@�Zg$@)r$�callbackZ
PKG_VERIFY�Plymouthr|�intru�
_fmt_event)r>r�r��current�totalrrrr�sz,PlymouthTransactionProgress._update_plymouthcCs tjjj||�}d||||fS)Nz[%d/%d] %s %s...)r$�transactionZACTIONSrV)r>r�r�r�r�rrrr�sz&PlymouthTransactionProgress._fmt_eventN)rgrhrir|rr�rrrrr~�sr~ccsJtj�}|j|jdd�d}x(|D] }|d}||kr8q"|}|Vq"WdS)zVFind all boots with this message id.

    Returns the entries of all found boots.
    r)�
MESSAGE_IDZ_UIDN�_BOOT_ID)r�ReaderZ	add_match�hex)�
message_id�jZoldbootr)Zbootrrr�
find_bootss
r�c
Cstttd��d
}xJttt��D]:\}}tdj|d|d|d|jdd�|jdd���qW|dkrpttd	��dS)Nz3The following boots appear to contain upgrade logs:r�u){} / {.hex}: {:%Y-%m-%d %H:%M:%S} {}→{}r�Z__REALTIME_TIMESTAMP�SYSTEM_RELEASEVERz??�TARGET_RELEASEVERz-- no logs were found --���r�)r7r�	enumerater��ID_TO_IDENTIFY_BOOTS�formatrV)�nr)rrr�	list_logs s
r�cCsZtt|��}y(|dkrt�|dkr*|d8}||dStk
rTttd���YnXdS)Nrr�r�z!Cannot find logs with this index.)�listr��
IndexErrorr	r)r�r�Zbootsrrr�	pick_boot.sr�cCsDtt|�}tdd|jg�}|j�|j}|dkr@tjjt	d���dS)NZ
journalctlz--bootr�z%Unable to match systemd journal entry)
r�r�rr��wait�
returncoder$�
exceptions�Errorr)r�Zboot_idZprocessZrcrrr�show_log=s
r�ZdownloadZclean�upgrade�logcs eZdZdZ�fdd�Z�ZS)�SystemUpgradePluginzsystem-upgradecs8tt|�j||�|r4|jt�|jt�|jt�dS)N)�superr�r?Zregister_command�SystemUpgradeCommand�OfflineUpgradeCommand�OfflineDistrosyncCommand)r>�base�cli)�	__class__rrr?Ns


zSystemUpgradePlugin.__init__)rgrhri�namer?�
__classcell__rr)r�rr�Ksr�cs(eZdZdEZed�ZdZ�fdd�Zedd��Z	d	d
�Z
dd�Zd
d�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd �Zd!d"�Zd#d$�Zd%d&�Zd'd(�Zd)d*�Zd+d,�Zd-d.�Zd/d0�Zd1d2�Zd3d4�Zd5d6�Z d7d8�Z!d9d:�Z"d;d<�Z#d=d>�Z$d?d@�Z%dAdB�Z&dCdD�Z'�Z(S)Fr��system-upgrade�fedupz+Prepare system for upgrade to a new releasezvar/lib/dnf/system-upgradecsjtt|�j|�tjj|jjj|j	�|_
tjj|j
d�|_tjj|jjjd�|_t
tjj|j
d��|_dS)Nzsystem-upgrade-transaction.jsonz
system-updatezsystem-upgrade-state.json)r�r�r?rrr#r�r1r-�DATADIR�datadir�transaction_file�
magic_symlinkr:�state)r>r�)r�rrr?\s
zSystemUpgradeCommand.__init__cCsJ|jdddtd�d�|jddtdd	jt�d
�|jdttd�d
�dS)Nz--no-downgraderdZstore_falsez=keep installed packages if the new release's version is older)�destr��help�tidr�z[%s]�|)�nargs�choices�metavarz--numberzwhich logs to show)�typer�)�add_argumentr�CMDSr#r�)�parserrrr�
set_argparserds
z"SystemUpgradeCommand.set_argparsercCs(tj||tj|jj|jjtjjd�dS)zLog directly to the journal.)r�ZPRIORITYr�r�ZDNF_VERSIONN)	r�sendZ
LOG_NOTICEr�r^r]r$�const�VERSION)r>rur�rrr�
log_statusnszSystemUpgradeCommand.log_statuscCs|jd�|jd�dS)NZcheck�
pre_configure)�	_call_sub)r>rrrr�ws
z"SystemUpgradeCommand.pre_configurecCs|jd�dS)N�	configure)r�)r>rrrr�{szSystemUpgradeCommand.configurecCs|jd�dS)N�run)r�)r>rrrr�~szSystemUpgradeCommand.runcCs|jd�dS)Nr�)r�)r>rrr�run_transaction�sz$SystemUpgradeCommand.run_transactioncCs|jd�dS)NZresolved)r�)r>rrr�run_resolved�sz!SystemUpgradeCommand.run_resolvedcCs.t||d|jjdd�}t|�r*|�dS)Nrr)�getattr�optsr��callable)r>r�Zsubfuncrrrr��szSystemUpgradeCommand._call_subcCs(|jjtkr$td�j|d�}t|��dS)NzFIncompatible version of data. Rerun 'dnf {command} download [OPTIONS]')�command)r�rZ�
STATE_VERSIONrr�r	)r>r�rtrrr�_check_state_version�sz)SystemUpgradeCommand._check_state_versioncCs*|j|jj_|jjr|jjnd|jj_dS)N)r�r�r1�cachedirr�r\)r>rrr�
_set_cachedir�sz"SystemUpgradeCommand._set_cachedircCs�ttjjtjjg�}ttjj�}i}i}xl|jjjD]^}|j	|krp|j
}|j|j|j
ji�jt|�i�|j	<q6|j	|kr6|j|jt|j
�i�|j	<q6W||fS)z�
        forward = {repoid:{pkg_nevra: {tsi.action: tsi.reason}}
        reverse = {pkg_nevra: {tsi.action: tsi.reason}}
        :return: forward, reverse
        )�setr$r�ZBACKWARD_ACTIONS�libdnfZ!TransactionItemAction_REINSTALLEDZFORWARD_ACTIONSr�r�r��pkg�reason�
setdefault�repo�idr{)r>Zbackward_actionZforward_actionsZforward�reverseZtsir�rrr�%_get_forward_reverse_pkg_reason_pairs�s
&
z:SystemUpgradeCommand._get_forward_reverse_pkg_reason_pairscCsb|j|jj_|jjr|jjnd|jj_d|jjkrJ|jjrJtt	d���nd|jjkr^d|j_dS)Nzoffline-distrosynczFCommand 'offline-distrosync' cannot be used with --no-downgrade optionzoffline-upgradeF)
r�r�r1r�r�r\r�rdr	r)r>rrr�pre_configure_download�sz+SystemUpgradeCommand.pre_configure_downloadcCs|j�dS)N)r�)r>rrr�pre_configure_reboot�sz)SystemUpgradeCommand.pre_configure_rebootcCs.|j�|jjr|jj|j_|jj|jj_dS)N)	r�r�rer��repos_edr]r�r1r.)r>rrr�pre_configure_upgrade�sz*SystemUpgradeCommand.pre_configure_upgradecCs|j�dS)N)r�)r>rrr�pre_configure_clean�sz(SystemUpgradeCommand.pre_configure_cleancCsd|jjksd|jjkr�tjtd��t�}|rLtd�}tj|jt|���|j	j
�r�td�}|j	jjs�|j	j
jdj|�dj|�d�r�tjtd	��tjd
�t|j	j|jjd�nd|jjkr�|jj|j�d
|jj_d
|jj_d
|jj_d
|jj_d
|jj_|j	jjdg7_dS)Nzsystem-upgrader�z\WARNING: this operation is not supported on the RHEL distribution. Proceed at your own risk.z-Additional information for System Upgrade: {}zyBefore you continue ensure that your system is fully upgraded by running "dnf --refresh upgrade". Do you want to continuez
{} [y/N]: z
{} [Y/n]: )rtZdefaultyes_msgzOperation aborted.r�)r2zoffline-upgradeTZtest)r�r�rrCrr rr�r
r�Z
_promptWantedr1Zassumeno�outputZuserconfirm�error�sys�exitr3r.r�Z _populate_update_security_filter�demands�	root_user�	resolving�available_repos�sack_activationZfreshest_metadataZtsflags)r>Zhelp_urlrtrrr�configure_download�s*






z'SystemUpgradeCommand.configure_downloadcCsd|jj_dS)NT)r�r�r�)r>rrr�configure_reboot�sz%SystemUpgradeCommand.configure_rebootcCs�d|jj_d|jj_d|jj_d|jj_|jj|j_|jj	dk	rN|jj	|j
j_	|jjdk	r�x$|j
j
j�D]}|j|jjk|_	qhW|jjdk	r�x$|j
j
j�D]}|j|jjk|_q�W|jj|j
j_d|jj_d|j
j_t�|jj_d|j
j_d|j
j_dS)NTF)r�r�r�r�r�r�r�rdr�r_r�r1r`�repos�valuesr�ra�
repo_gpgcheckrfZ	cacheonlyZ	assumeyesr~Ztransaction_displayZclean_requirements_on_removeZinstall_weak_deps)r>r�rrr�configure_upgrade�s&






z&SystemUpgradeCommand.configure_upgradecCsd|jj_dS)NT)r�r�r�)r>rrr�configure_cleansz$SystemUpgradeCommand.configure_cleancCsdS)Nr)r>rrr�
configure_logsz"SystemUpgradeCommand.configure_logcCs~|jjdksttd���|j|jj�|jj|jjkrRtd�j|jjd�}t|��t	j
j|j�rlttd���t
jj|j�dS)N�completezsystem is not ready for upgradezZthe transaction was not prepared for '{command}'. Rerun 'dnf {command} download [OPTIONS]')r�zupgrade is already scheduled)r�r[r	rr�r�r�rcr�rr�lexistsr�r$r%rIr�)r>rtrrr�check_rebootsz!SystemUpgradeCommand.check_rebootcCs�tjj|j�s$tjtd��td��tj|j�|j	krLtjtd��td��t
jjj
|j�|jj}|sp|jj}|j|�|jjdks�td�j|d�}t|��dS)Nz-trigger file does not exist. exiting quietly.rz1another upgrade tool is running. exiting quietly.�readyz/use 'dnf {command} reboot' to begin the upgrade)r�)rrr�r�rrr�
SystemExit�readlinkr�r$ZyumZmiscZunlink_fr�rcr�r�r�rbr�r	)r>r�rtrrr�
check_upgrades
z"SystemUpgradeCommand.check_upgradec	Cs,tj|j|j�|j�}d|_WdQRXdS)Nr�)r�symlinkr�r�r�rb)r>r�rrr�run_prepare,sz SystemUpgradeCommand.run_preparecCs6|j�|jjddksdS|jtd�t�t�dS)NrrzRebooting to perform upgrade.)r�r�r�r�r�REBOOT_REQUESTED_IDr)r>rrr�
run_reboot3s
zSystemUpgradeCommand.run_rebootc	s��jjr�jj�n
�jj��jjdkr��jj��fdd��jjjD�}|r\�jj|��fdd��jjj	D�}|r��jj|��j
�$}d|_�jjj
|_�jjj|_WdQRXdS)N�offline-upgrade�offline-distrosynccs$g|]}�jjjj|j�r|j�qSr)r��history�grouprVr�)�.0�g)r>rr�
<listcomp>Gsz5SystemUpgradeCommand.run_download.<locals>.<listcomp>cs$g|]}�jjjj|j�r|j�qSr)r�r��envrVr�)rr)r>rrrJsZdownloading)r�r�)r�rdr�Zupgrade_allr�Z
read_comps�comps�groupsZenv_group_upgradeZenvironmentsr�r[r1r.r]r\)r>Zinstalled_groupsZinstalled_environmentsr�r)r>r�run_download=s

z!SystemUpgradeCommand.run_downloadc
Cs�d}|j�}d|_|j}WdQRX|dkr4td�}n|dkrFtd�}ntd�}|j|t�tj�tjd�tj	|�t
�t|j|j
�|_|jj�dS)	N�Z
incompletezoffline-upgradez1Starting offline upgrade. This will take a while.zoffline-distrosyncz4Starting offline distrosync. This will take a while.z0Starting system upgrade. This will take a while.r)r�rbrcrr��UPGRADE_STARTED_IDr�rzr|rur9rr�r��replayr�)r>r�r�rtrrr�run_upgradeSs 



z SystemUpgradeCommand.run_upgradec	Csdtjtd��t|jjjtjj	|jjj�j
g�|j�$}d|_d|_
d|_d|_d|_WdQRXdS)NzCleaning up downloaded data...)rrrr*r�r1r�r$Z	persistorZTempfilePersistorZdb_pathr�r[rZrbrcr\)r>r�rrr�	run_cleanms
zSystemUpgradeCommand.run_cleancCs |jjrt|jj�nt�dS)N)r�Znumberr�r�)r>rrr�run_logzszSystemUpgradeCommand.run_logcCs|jj�dS)z5Adjust transaction reasons according to stored valuesN)r
Zpost_transaction)r>rrr�resolved_upgrade�sz%SystemUpgradeCommand.resolved_upgradecCs�|jjj�}|j�s&tjtd��dSt|�}yLt|j	d��"}t
j||ddd�|jd�WdQRXt
td�j|j	��Wn<tk
r�}z tjjtd�jt|����WYdd}~XnXtjj|jjj�}|j��}d	|_t|_|jj|_|jjj|_d
d�|jjj �D�|_!dd�|jjj �D�|_"||_#|jjj$|_%|jjj&|_&|jj'|_(|jjj)|_)|jj*|_+WdQRXt,j|jj*d
�}tj|�|j-td�t.�dS)NzKThe system-upgrade transaction is empty, your system is already up-to-date.rErFT)rGrH�
zTransaction saved to {}.zError storing transaction: {}r�cSsg|]}|jr|j�qSr)r_r�)rr�rrrr�sz=SystemUpgradeCommand.transaction_download.<locals>.<listcomp>cSsg|]}|jr|j�qSr)r�r�)rr�rrrr�s)r�zDownload finished.)/r�r�Zget_currentZpackagesrrrrrr�r@rKr5r7r�r'r$r�r	r{r+r,r1r-r�r[r�rZr�rdr_r�r�r`rar^r.r]rfr�rer\r�rc�DOWNLOAD_FINISHED_MSGr��DOWNLOAD_FINISHED_ID)r>r��data�fr8Z
system_verr�rtrrr�transaction_download�s:,


z)SystemUpgradeCommand.transaction_downloadcCs@tjtd��|jtd�t�|j�|jjddkr<t�dS)Nz.Upgrade complete! Cleaning up and rebooting...rr�)	r�rurr��UPGRADE_FINISHED_IDrr�r�r)r>rrr�transaction_upgrade�s
z(SystemUpgradeCommand.transaction_upgrade)r�r�))rgrhri�aliasesr�summaryr�r?�staticmethodr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrr
rrrr�rr)r�rr�VsF
	
 


(r�c@seZdZdZed�ZdS)r��offline-upgradez%Prepare offline upgrade of the systemN)r)rgrhrirrrrrrrr��sr�c@seZdZdZed�ZdS)r��offline-distrosyncz(Prepare offline distrosync of the systemN)r)rgrhrirrrrrrrr��sr�)N)?r}�
subprocessrrrrr@rZos.pathrvr�ZuuidZsystemdrZdnfpluginscorerrr$Zdnf.clir	Zdnf.i18nr
Zdnf.transactionZdnf.transaction_srrrZlibdnf.confr�ZUUIDrr�r	rr�ror/rr0r�rr r*r3r9�objectr:rjr�r�ZTransactionProgressr~r�r�r�r�r�ZPluginr�r�ZCommandr�r�r�rrrr�<module>sd




	@.	e__pycache__/versionlock.cpython-36.pyc000064400000020202150402642250013734 0ustar003

�f0�@s\ddlmZddlmZddlmZmZddlZddlZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZed�Zed�Zed�Zed�Zed	�Zed
�Zed�Zed�Zed
�Zed�Zed�Zed�Zed�ZdaGdd�dej�ZdddgZddgZ dgZ!dddgee e!Z"Gdd�dej#j$�Z%dd �Z&d!d"�Z'd#d$�Z(d%d&�Z)d'd(�Z*dS))�)�absolute_import)�unicode_literals)�_�loggerNz-Unable to read version lock configuration: %szLocklist not setzAdding versionlock on:zAdding exclude on:z*Package already locked in equivalent form:zPackage {} is already lockedzPackage {} is already excludedzDeleting versionlock for:zNo package found for:z1Excludes from versionlock plugin were not appliedzCVersionlock plugin: number of lock rules from file "{}" applied: {}zFVersionlock plugin: number of exclude rules from file "{}" applied: {}z,Versionlock plugin: could not parse pattern:cs8eZdZdZ�fdd�Zdd�Zdd�Zdd	�Z�ZS)
�VersionLock�versionlockcs8tt|�j||�||_||_|jdk	r4|jjt�dS)N)�superr�__init__�base�cliZregister_command�VersionLockCommand)�selfr
r)�	__class__��!/usr/lib/python3.6/versionlock.pyr	6s

zVersionLock.__init__cCs4|j|jj�}|jd�o.|jdd�o.|jdd�adS)N�main�locklist)Zread_configr
ZconfZhas_sectionZ
has_option�get�locklist_fn)r
Zcprrr�config=szVersionLock.configcCs0|jdkrd}n|jjj}|dkr,|jjj}|S)NT)r�demandsZplugin_filtering_enabledZ	resolving)r
Zenabledrrr�locking_enabledCs


zVersionLock.locking_enabledc
Cs�|j�stjt�dS|jjj�jdd�}|jjj�jdd�}t�}ddg}x�t	�D]�}d}|r||ddkr||dd�}d}t
jj|�j
tjtjtjtjtjgd�}|r�||d7<ntjdt|�qTxF|D]>}|j|jj�}	|r�|j|	�}n|j|j�|j|	�}|	r�Pq�WqTW|d�r6tjtjt|d��|d�rVtjtjt|d��|�r�|jjj�jt|�d�}
|
j|�}|j|�}|jjj�j |d	�}|j|j|��}|j tj!d
�|�r�|jjj"|�dS)NT)�emptyr�!�)Zformsz%s %s)Z
name__glob)Z	obsoletes)Z
reponame__neq)#rr�debug�NO_VERSIONLOCKr
�sackZquery�filter�set�_read_locklist�dnf�subject�SubjectZget_nevra_possibilities�hawkeyZ
FORM_NEVRAZ	FORM_NEVRZFORM_NEVZFORM_NAZ	FORM_NAME�error�NEVRA_ERRORZto_query�union�add�name�
APPLY_EXCLUDE�formatr�
APPLY_LOCK�list�
differenceZfiltermZSYSTEM_REPO_NAMEZadd_excludes)
r
Zexcludes_queryZlocked_queryZlocked_names�count�patZexclZpossible_nevrasZnevraZ	pat_queryZall_versionsZother_versionsZobsoletes_queryrrrrLsP







zVersionLock.sack)	�__name__�
__module__�__qualname__r)r	rrr�
__classcell__rr)rrr2s
	r�excludezadd-!zadd!�delete�delZ	blacklistr(�clearr-c@s8eZdZdZed�ZdZedd��Zdd�Z	dd	�Z
d
S)rrzcontrol package version locksz5[add|exclude|list|delete|clear] [<package-nevr-spec>]cCs:|jdddtd�d�|jdddd	�|jd
ddd	�dS)
Nz--rawF�
store_truez@Use package specifications as they are, do not try to parse them)�default�action�help�
subcommand�?z[add|exclude|list|delete|clear])�nargs�metavar�package�*z[<package-nevr-spec>])�add_argumentr)�parserrrr�
set_argparser�s
z VersionLockCommand.set_argparsercCsd|jj_d|jj_dS)NT)rrZsack_activationZavailable_repos)r
rrr�	configure�s
zVersionLockCommand.configurecCs�d}|jjr�|jjtkr2d}|jjjd|jj�nd|jjtkrDd}nR|jjtkr|td�j|jj�}t	j
|tjj
dd�d}n|jjtkr�d}n|jj}|dk�rt|jj�}xj|D]b\}}|d	kr�t|j|g|jjd
dtj�td	�q�||k�rtjjtj|���q�tjdt|�q�W�n�|dk�r�t|jj�}xn|D]f\}}|d	k�rpt|j|g|jjd
dtj�td�n,||k�r�tjjtj|���ntjdt|��q8W�n4|dk�r�xt�D]}t|��q�W�n|dk�rt�s�tjjt ��t!td��}WdQRXn�|dk�r�t�stjjt ��t"j#j$t�}t%j&|dd�\}	}
t�}d}t"j'|	dd��V}	xN|D]F}
t(|
|jj��r�tdt)|
f�|d7}�q\|	j*|
�|	j*d��q\WWdQRX|�s�t"j+|
�nt"j,|
d�t"j-|
t�dS)Nr-r(rr5z@Subcommand '{}' is deprecated. Use 'exclude' subcommand instead.�)�
stacklevelr6�Tz
# Added lock on %s
z%s %sFz
# Added exclude on %s
rr8�wz.tmp)�dir�suffixr�
i����).Zoptsr=�ALL_CMDSrA�insert�EXC_CMDS�DEP_EXC_CMDSrr+�warnings�warnr!�
exceptions�DeprecationWarning�DEL_CMDS�_search_locklist�_write_locklistr
�raw�timeZctime�ADDING_SPEC�Error�ALREADY_EXCLUDEDr�info�
EXISTING_SPEC�EXCLUDING_SPEC�ALREADY_LOCKEDr �printr�NO_LOCKLIST�open�os�path�dirname�tempfileZmkstemp�fdopen�_match�
DELETING_SPEC�write�unlink�chmod�rename)r
�cmd�msg�results�entryZ	entry_cmdr0�frh�outZtmpfilename�locked_specsr/�entrrr�run�s|














zVersionLockCommand.runN)r)r1r2r3�aliasesrZsummaryZusage�staticmethodrErFryrrrrr�s
rcCs�g}y`tstjjt��tt��>}x6|j�D]*}|jd�s*|j�dkrFq*|j	|j��q*WWdQRXWn2t
k
r�}ztjjt|��WYdd}~XnX|S)N�#rI)rr!rUr]rdre�	readlines�
startswith�strip�append�IOError�NOT_READABLE)rZllfile�line�errrr �s
 "r cCs�g}d}}t�}xl|D]d}d}xH|D]@}d}}t||g�r&|}|jd�rPdnd}|j||f�d}q&W|s|j||f�qW|S)NrIFrr5r(T)r rkr~r�)rArs�foundr;rw�pkg�matchrxrrrrX�s

rXcCs@t�}x�|D]�}|r |j|�qtjj|�}	d}
|rN|	jtjj|�dddd�}
|
sf|	j|jdddd�}
|
sztdt	|f�x|
D]}|jt
|j��q�WqW|�r<ydts�tj
jt��ttd��@}|j|�x.|D]&}
td||
f�|jd||
f�q�WWdQRXWn4tk
�r:}ztj
jt|��WYdd}~XnXdS)NTF)Z
with_nevraZ
with_providesZwith_filenamesz%s %s�az%s%s
)rr(r!r"r#Zget_best_queryrZ_rpmdb_sackrc�
NOTFOUND_SPEC�pkgtup2specZpkgtuprrUr]rdrermr�r�)r
�argsrZZ
try_installedZcommentr_�prefixZspecsr0ZsubjZpkgsr�ru�specr�rrrrYs8





$rYcCs&|jd�}x|D]}||krdSqWytj|�}Wntjk
rHdSXx�d|jd|j|jfd|j|jfd|j|j|jfd|j|j|jfd	|j|j|j|jfd
|j|j|j|jfd|j|j|j|j|jfd|j|j|j|j|jff	D]&}x |D]}t	j	||��rdS�qWq�WdS)
NrTFz%sz%s.%sz%s-%sz%s-%s-%sz%s-%s:%sz%s-%s-%s.%sz%s-%s:%s-%sz%s:%s-%s-%s.%sz%s-%s:%s-%s.%s)
�lstripr$Zsplit_nevraZValueExceptionr)�arch�version�release�epoch�fnmatch)rxZpatternsr0�nr)rrrrk#s,

"
rkcCsd||p
d||fS)Nz
%s-%s:%s-%s.*�0r)r)r�r�r�r�rrrr�=sr�)+Z
__future__rrZdnfpluginscorerrr!Zdnf.cliZdnf.exceptionsr�r$rfrir[rSr�rdr\rar`rbr^rlr�rr,r*r&rZPluginrrQrWrRrOrZCommandrr rXrYrkr�rrrr�<module>sHO
[ __pycache__/debuginfo-install.cpython-36.pyc000064400000013665150402642250015023 0ustar003

�gt`L+�@sNddlmZmZddlZddlmZGdd�dej�ZGdd�dejj	�Z
dS)�)�_�loggerN)�Packagecs,eZdZdZdZ�fdd�Zdd�Z�ZS)�DebuginfoInstallz5DNF plugin supplying the 'debuginfo-install' command.zdebuginfo-installcs4tt|�j||�||_||_|dk	r0|jt�dS)zInitialize the plugin instance.N)�superr�__init__�base�cliZregister_command�DebuginfoInstallCommand)�selfrr	)�	__class__��'/usr/lib/python3.6/debuginfo-install.pyr s
zDebuginfoInstall.__init__cCsf|j|jj�}|jd�o.|jdd�o.|jdd�}|rbtjj|j�j	�j
dd�}t|�rb|jjj
�dS)N�main�
autoupdatez*-debuginfo)Z
name__glob)Zread_configr�confZhas_sectionZ
has_optionZ
getboolean�dnf�sackZ_rpmdb_sack�query�filterm�len�repos�enable_debug_repos)rZcprZdbginfor
r
r�config(s
zDebuginfoInstall.config)�__name__�
__module__�__qualname__�__doc__�namerr�
__classcell__r
r
)rrrsrcsheZdZdZdZed�Z�fdd�Zedd��Z	dd	�Z
d
d�Zdd
�Zdd�Z
dd�Zdd�Z�ZS)r
z! DebuginfoInstall plugin for DNF �debuginfo-installzinstall debuginfo packagescs4tt|�j|�t�|_t�|_t�|_t�|_dS)N)rr
r�set�available_debuginfo_missing�available_debugsource_missing�installed_debuginfo_missing�installed_debugsource_missing)rr	)rr
rr:s
z DebuginfoInstallCommand.__init__cCs|jddd�dS)N�package�+)�nargs)�add_argument)�parserr
r
r�
set_argparserBsz%DebuginfoInstallCommand.set_argparsercCs0|jj}d|_d|_d|_d|_|jjj�dS)NT)	r	�demandsZ	resolvingZ	root_userZsack_activationZavailable_reposrrr)rr,r
r
r�	configureFsz!DebuginfoInstallCommand.configurecCs�g}ttj�}ttj�}�x�|jjD�]�}tjj|�j	|j
jdd�}|d}|sxtj
td�|j
jjj|��|j|�q$|j�j�}|j|j�j��xdt|j��D]T}|jtj�r�|d|�|kr�|j|�|jtj�r�|d|�|kr�|j|�q�W�x�|j�D�]�}	|	d}
|
j�r�i}x"|	D]}|j|jg�j|��q(Wxj|j�D]^}
|
d}|j|j |��s�|j|j!|��s�|j"j#t$|��|j|j%|��sP|j&j#t$|���qPW�q|
j'jtj��s�|
j'jtj��r�|j(|	��q|ddk	�rb|j)|
j |d��s2|j)|
j!|d��s2|j*j#dj+|
j'|
j,��|j)|
j%|d��s|j-j#dj+|
j'|
j,���q|j.|
j |	��s�|j.|
j!|	��s�|j*j#dj+|
j'|
j,��|j.|
j%|	��s|j-j#dj+|
j'|
j,���qWq$W|j*�r�tj
td�d	j/t0|j*���|j-�rtj
td
�d	j/t0|j-���|j"�r8tj
td�d	j/t0|j"���|j&�r\tj
td�d	j/t0|j&���|�r�|j
j1j2�r�tj3j4td
�dj/|�d��dS)NF)Zwith_srcrzNo match for argument: %srZnevraz{}-{}zICould not find debuginfo package for the following available packages: %sz, zKCould not find debugsource package for the following available packages: %szICould not find debuginfo package for the following installed packages: %szKCould not find debugsource package for the following installed packages: %szUnable to find a match� )Zpkg_spec)5rrZDEBUGINFO_SUFFIXZDEBUGSOURCE_SUFFIXZoptsr&rZsubjectZSubjectZget_best_solutionrrr�infor�outputZtermZbold�appendZ	availableZ
_name_dict�updateZ	installed�list�keys�endswith�pop�valuesZ_from_system�
setdefault�arch�_install_debug_from_system�
debug_nameZsource_debug_namer$�add�strZdebugsource_namer%r�_install�_install_debugr"�format�evrr#�_install_debug_no_nevra�join�sortedr�strict�
exceptionsZPackagesNotAvailableError)rZerrors_specZdebuginfo_suffix_lenZdebugsource_suffix_lenZpkgspecZsolutionrZpackage_dictr�pkgsZ	first_pkgZ	arch_dict�pkgZpackage_arch_listr
r
r�runNs�





zDebuginfoInstallCommand.runcCs:|jjj�j||j|j|j|jd�}|r6|j|�dSdS)N)r�epoch�version�releaser9TF)	rrr�filterrJrKrLr9r>)rr;rHrr
r
rr:�s

z2DebuginfoInstallCommand._install_debug_from_systemcCs�i}|jdk	r|j|d<|jdk	r,|j|d<|jdk	r@|j|d<|jdk	rT|j|d<|jjj�jfd|i|��}|r�|j|�dSdS)NZepoch__globZ
version__globZ
release__globZ
arch__globrTF)	rJrKrLr9rrrrMr>)rr;Z
base_nevra�kwargsrr
r
rr?�s








z&DebuginfoInstallCommand._install_debugcs8|jjj�j�fdd�|D�d�}|r4|j|�dSdS)Ncsg|]}dj�|j|j��qS)z{}-{}.{})r@rAr9)�.0�p)r;r
r�
<listcomp>�szCDebuginfoInstallCommand._install_debug_no_nevra.<locals>.<listcomp>)Znevra_strictTF)rrrrr>)rr;rGrr
)r;rrB�s
z/DebuginfoInstallCommand._install_debug_no_nevracCs:tjj|jj�}|j|d�|jjj||jjj	d�dS)N)rH)ZselectZoptional)
r�selectorZSelectorrrr!ZgoalZinstallrrE)rrGrRr
r
rr>�sz DebuginfoInstallCommand._install)r )rrrr�aliasesrZsummaryr�staticmethodr+r-rIr:r?rBr>rr
r
)rrr
4s|
	r
)ZdnfpluginscorerrrZdnf.packagerZPluginrr	ZCommandr
r
r
r
r�<module>s__pycache__/etckeeper.cpython-36.opt-1.pyc000064400000002540150402642250014311 0ustar003

�:vh�@s`ddlZddlZddlZejd�ZGdd�dej�Zedkr\ddlm	Z	e	ddgdd	id
�dS)�Nz
dnf.pluginc@s(eZdZdZdd�Zdd�Zdd�ZdS)	�	Etckeeper�	etckeepercCs�tjd|�yLtdd��8}tjd|f||dd�}|dkrLtjjd||f��WdQRXWn4tk
r�}ztj	d	||f�WYdd}~XnXdS)
NzEtckeeper plugin: %sz	/dev/null�wbrT)�stdout�stderrZ	close_fdsrz"etckeeper %s" returned: %dz Failed to run "etckeeper %s": %s)
�logger�debug�open�
subprocessZcall�dnf�
exceptions�Error�OSErrorZwarning)�selfZcommand�devnull�ret�err�r�/usr/lib/python3.6/etckeeper.py�_run_commands
"zEtckeeper._run_commandcCs|jd�dS)Nzpre-install)r)rrrr�resolved!szEtckeeper.resolvedcCs|jd�dS)Nzpost-install)r)rrrr�transaction$szEtckeeper.transactionN)�__name__�
__module__�__qualname__�namerrrrrrrrsr�__main__)�setupz
dnf-etckeeperzdnf-pluginsz
etckeeper-dnf)rZpackagesZpackage_dir)
Zloggingr
rZ	getLoggerrZPluginrrZdistutils.corerrrrr�<module>
s
__pycache__/config_manager.cpython-36.opt-1.pyc000064400000016133150402642250015304 0ustar003

�gt`�*�@s�ddlmZddlmZddlmZmZmZddlZddlZddl	Zddl
ZddlZddlZddl
Z
ddlZddlZejjGdd�dejj��Zdd�Zejd	�Zejd
�Zejd�Zejd�Zd
d�ZdS)�)�absolute_import)�unicode_literals)�_�logger�P_Nc@sReZdZdgZed�jejjd�Z	e
dd��Zdd�Zdd	�Z
d
d�Zdd
�ZdS)�ConfigManagerCommandzconfig-managerz4manage {prog} configuration options and repositories)�progcCs�|jdddtd�d�|jdddtd	�d
�|jdgdd
td�d�|jdddtd�d
�|jdddtd�d
�|j�}|jddddtd�d�|jddddtd�d�dS)N�crepo�*�repozrepo to modify)�nargs�metavar�helpz--saveF�
store_truez/save the current options (useful with --setopt))�default�actionrz
--add-repo�appendZURLz8add (and enable) the repo from the specified file or url)rrr
rz--dumpz,print current configuration values to stdoutz--dump-variableszprint variable values to stdoutz
--set-enabled�set_enabledz"enable repos (automatically saves))r�destrrz--set-disabled�set_disabledz#disable repos (automatically saves))�add_argumentrZadd_mutually_exclusive_group)�parserZenable_group�r�$/usr/lib/python3.6/config_manager.py�
set_argparser)s,z"ConfigManagerCommand.set_argparsercCs�|jj}d|_|jjgkp@|jjp@|jjp@|jjp@|jjp@|jj	sp|jj
jtd�j
djdddddd	d
dg���|jjgkr�tjtd��|jjs�|jj	s�|jjs�|jjr�d|_d
d�|jjD�}dd�|D�|j_dS)NTz.one of the following arguments is required: {}� z--savez
--add-repoz--dumpz--dump-variablesz
--set-enabledz--enablez--set-disabledz	--disablez{Warning: --enablerepo/--disablerepo arguments have no meaningwith config manager. Use --set-enabled/--set-disabled instead.cSsg|]}|dkr|jd��qS)�,)�split)�.0�xrrr�
<listcomp>_sz2ConfigManagerCommand.configure.<locals>.<listcomp>cSs"g|]}|D]}|dkr|�qqS)�r)rZsublist�itemrrrr as)�cli�demandsZavailable_repos�opts�add_repo�save�dump�dump_variablesrrZ	optparser�errorr�format�joinZrepos_edrZwarningZ	root_userr	)�selfr$Z	temp_listrrr�	configureBs*zConfigManagerCommand.configurecCs|jjr|j�n|j�dS)zExecute the util action here.N)r%r&�modify_repo)r-rrr�runds
zConfigManagerCommand.runc	s�g�t������fdd�}�jjrnx�jjD]�|�d�q.Wt�jd�r�xL�jjj�D]�|�d�qZWn,t�jd�r�x�jjj�D]�|�d�q�W�r�tjjt	d�dj
�����jj}i}t�jd�r�jj
r�jj
}�jj�rx*�jjjj�D]\�}td	�|f�q�W�jj�s0d
�jjk�r��jj�r\|�r\�jjj�jjjd
|j|��jj�r�t�jjjd
��t�jjj����s�dS�jj�s��jj�r�d�j_x�t��D]�}i}�jj�r�d|d
<n�jj�r�d|d
<t�jd��r*x4�jjj�D]$\}}tj|j|��r|j|��qW�jj�rT|�rT�jjj|j|j|j|��jj�r�t�jjjd|j��t|j���q�WdS)z< process --set-enabled, --set-disabled and --setopt options cs0�jjj|�}|s�j��n|r,�j|�dS)N)�baseZreposZget_matching�add�extend)�keyZadd_matching_reposZmatching)�matching_repos�name�not_matching_repos_idr-rr�match_reposqs
z5ConfigManagerCommand.modify_repo.<locals>.match_reposT�repo_setoptsFzNo matching repo to modify: %s.z, �main_setoptsz%s = %s�mainN�1Zenabled�0zrepo: )�setr%r	�hasattrr9�keys�dnf�
exceptions�Errorrr,r1�confr:r)Z
substitutions�items�printr'Zwrite_raw_configfileZconfig_file_pathr(�outputZ
fmtSectionrr�sorted�fnmatch�id�updateZrepofile)	r-r8ZsbcZmodify�valrZrepo_modify�repoidZsetoptsr)r5r6r7r-rr/ks`






z ConfigManagerCommand.modify_repoc
CsN|jjj}d}�x|jjD�]}tjjj|�jdkrDdt	j
j|�}tj
td�|�|jd�r�t	j
j|�}t	j
j||�}y6|jj|dd�}tj|j|�t	j|d�|j�Wn6tk
r�}z|d	7}tj|�wWYd
d
}~XnXqt|�}djtjj|�}t	j
j|d|�}d
|||f}	t||	�sqqW|�rJtjj t!dd|���d
S)z process --add-repo option rr!zfile://zAdding repo from: %sz.repozw+)�modei��Nz$created by {} config-manager from {}z%s.repoz"[%s]
name=%s
baseurl=%s
enabled=1
zConfiguration of repo failedzConfiguration of repos failed)"r1rDZget_reposdirr%r&rA�pycompZurlparse�scheme�os�path�abspathr�infor�endswith�basenamer,Zurlopen�shutilZcopy2r6�chmod�close�IOErrorr*�sanitize_url_to_fsr+�util�	MAIN_PROG�save_to_filerBrCr)
r-Z	myrepodirZerrors_count�urlZdestname�f�erMZreponame�contentrrrr&�s8




zConfigManagerCommand.add_repoN)�__name__�
__module__�__qualname__�aliasesrr+rAr]r^Zsummary�staticmethodrr.r0r/r&rrrrr"s"BrcCspy4t|d�� }tjj||�tj|d�WdQRXWn6ttfk
rj}ztj	t
d�||�dSd}~XnXdS)Nzw+i�z&Could not save repo to repofile %s: %sFT)�openrArPZ
write_to_filerRrYr[�OSErrorrr*r)�filenamerc�fdrbrrrr_�s
r_z^\w+:/*(\w+:|www\.)?z[?/:&#|~\*\[\]\(\)\'\\]+z^[,.]*z[,.]*$cCs*ybtj|�r`tjjr&|jd�jd�}n:t|t�rB|jd�jd�}n
|jd�}t|t	�r`|jd�}Wnt
ttt
fk
r~YnXtjd|�}tjd|�}tjd|�}tjd|�}t|�dk�r|dd�jd�}dt|d
�}tj�}|j||d�jd��|d|�d|j�}d	}tj|d|�S)z�Return a filename suitable for the filesystem and for repo id

    Strips dangerous and common characters to create a filename we
    can use to store the cache in.
    Zidnazutf-8r!r�N�rOzE[^abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.:-]���)�	RE_SCHEME�matchrArPZPY3�encode�decode�
isinstance�strZunicode�UnicodeDecodeError�UnicodeEncodeError�UnicodeError�	TypeError�sub�RE_SLASH�RE_BEGIN�RE_FINAL�lenr�hashlibZsha256rKZ	hexdigest�re)r`�parts�	lastindexZcsumZ
allowed_regexrrrr\�s.



r\)Z
__future__rrZdnfpluginscorerrrrAZdnf.cliZ
dnf.pycompZdnf.utilrIrrRr�rXZpluginZregister_commandr#ZCommandrr_�compilerpr{r|r}r\rrrr�<module>s(1



__pycache__/repodiff.cpython-36.pyc000064400000017063150402642250013207 0ustar003

�gt`�,�@sjddlmZddlmZddlZddlmZddlZddlm	Z	Gdd�dej
�ZGdd	�d	ejj
�ZdS)
�)�absolute_import)�unicode_literalsN)�OptionParser)�_cs eZdZdZ�fdd�Z�ZS)�RepoDiff�repodiffcs,tt|�j||�|dkrdS|jt�dS)N)�superr�__init__Zregister_command�RepoDiffCommand)�self�base�cli)�	__class__��/usr/lib/python3.6/repodiff.pyr	$szRepoDiff.__init__)�__name__�
__module__�__qualname__�namer	�
__classcell__rr)rrr src@sLeZdZdZed�Zedd��Zdd�Zdd�Z	d	d
�Z
dd�Zd
d�ZdS)r
rz1List differences between two sets of repositoriesc	Cs�|jddgddtd�d�|jddgdd	td
�d�|jddd
gtjdtd�d�|jdddtd�d�|jddtd�d�|jddtd�d�|jddtd�d�dS)Nz
--repo-oldz-o�append�oldz2Specify old repository, can be used multiple times)�default�action�dest�helpz
--repo-newz-n�newz2Specify new repository, can be used multiple timesz--archz
--archlistz-a�archeszhSpecify architectures to compare, can be used multiple times. By default, only source rpms are compared.z--sizez-s�
store_truez5Output additional data about the size of the changes.)rrz--compare-archzMCompare packages also by arch. By default packages are compared just by name.z--simplez7Output a simple one line message for modified packages.z--downgradezNSplit the data for modified packages between upgraded and downgraded packages.)�add_argumentrrZ_SplitCallback)�parserrrr�
set_argparser/s

zRepoDiffCommand.set_argparsercCs�|jj}d|_d|_d|_dg|jj_|jj	s:|jj
rNtd�}tj
j|��x<|jjj�D],}|j|jj	|jj
kr�|j�q\|j�q\W|jjs�dg|j_dS)NT�allz*Both old and new repositories must be set.�src)r
�demandsZsack_activationZavailable_repos�
changelogsrZconfZdisable_excludes�optsrrr�dnf�
exceptions�ErrorZreposr"�id�enable�disabler)rr$�msgZreporrr�	configureMs
zRepoDiffCommand.configurecCs|jjr|j|jfS|jS)N)r&�compare_archr�arch)r�pkgrrr�_pkgkey`szRepoDiffCommand._pkgkeyc
s6t�fdd�|D���t�j��}t�fdd�|D���t�j��}t�}x:|j|d�D]*}x$|j|jd�D]}||�j|�<qlWqXW�jjj}t�fdd�||D��fdd�||D�|ggd�}	xj|j	|�D]\}
�|
}�|
}|j
|j
kr�q�||j
|j
�d	k�r|	d
j||f�q�|	dj||f�q�W|	S)aNcompares packagesets old and new, returns dictionary with packages:
        added: only in new set
        removed: only in old set
        upgraded: in both old and new, new has bigger evr
        downgraded: in both old and new, new has lower evr
        obsoletes: dictionary of which old package is obsoleted by which new
        csg|]}�j|�|f�qSr)r2)�.0�p)rrr�
<listcomp>msz-RepoDiffCommand._repodiff.<locals>.<listcomp>csg|]}�j|�|f�qSr)r2)r3r4)rrrr5os)�	obsoletes)Zprovidescsg|]}�|�qSrr)r3�k)�new_drrr5zscsg|]}�|�qSrr)r3r7)�old_drrr5{s)�added�removedr6�upgraded�
downgradedrr=r<)�dict�set�keys�filterr6r2r�sack�evr_cmp�intersection�evrr)
rrrZold_keysZnew_keysr6Z	obsoleterZ	obsoletedrCrr7�pkg_old�pkg_newr)r8r9rr�	_repodiffes0
zRepoDiffCommand._repodiffc
sh�fdd��dd�}��fdd�}tddddd�}x<t|d	�D],}ttd
�j�|���|d	|j7<q@Wxjt|d�D]Z}ttd�j�|���|d
j�j|��}|r�ttd�j�|���|d|j7<q~W�jj	�r�|d�r:ttd��x<t|d�D],\}}|d|j|j7<|||��q
W|d�r�ttd��x�t|d�D],\}}|d|j|j7<|||��q^Wn\|d|d}	|	�r�ttd��x8t|	�D],\}}|d|j|j7<|||��q�Wttd��ttd�jt
|d	���ttd�jt
|d����jj	�rlttd�jt
|d���ttd�jt
|d���n&ttd�jt
|d�t
|d����jj�rdttd�j||d	���ttd�j||d����jj	�s�ttd�j||d|d���n4ttd�j||d���ttd�j||d���ttd�j||d	|d|d|d���dS) Ncs �jjrt|�Sd|j|jfS)Nz%s-%s)r&r/�strrrE)r1)rrr�pkgstr�sz'RepoDiffCommand._report.<locals>.pkgstrcSsXt|�}|dkr.|djtjjj|�j��7}n&|dkrT|djtjjj|�j��7}|S)Nrz ({})z (-{}))rI�formatr'r
Z
format_number�strip)Znumr-rrr�sizestr�sz(RepoDiffCommand._report.<locals>.sizestrcsBg}�jjr*|jd�|��|�f��n|jd�|jd�|��|�f�|jdt|d
��|jrv|jd}nd}x�|jD]�}|r�|d|dkr�Pn2|d|dkr�|d|dkr�|d|dkr�P|jd	|djd
�tjj|d�tjj|d�f�q�W�jj	�r0|jt
d�j|j	|j	��tdj
|��dS)Nz%s -> %s��-�rZ	timestampZauthor�textz
* %s %s
%sz%a %b %d %YzSize change: {} bytes�
���)r&Zsimpler�lenr%Zstrftimer'Zi18nZucd�sizerrK�print�join)rFrGZmsgsZ	old_chlogZchlog)rJrrr�report_modified�s2

z0RepoDiffCommand._report.<locals>.report_modifiedr)r:r;r<r=r:zAdded package  : {}r;zRemoved package: {}r6zObsoleted by   : {}r<z
Upgraded packagesr=z
Downgraded packagesz
Modified packagesz
SummaryzAdded packages: {}zRemoved packages: {}zUpgraded packages: {}zDowngraded packages: {}zModified packages: {}zSize of added packages: {}zSize of removed packages: {}zSize of modified packages: {}zSize of upgraded packages: {}zSize of downgraded packages: {}zSize change: {})r>�sortedrVrrKrU�getr2r&Z	downgraderT)
rrrMrXZsizesr1ZobsoletedbyrFrGZmodifiedr)rJrr�_report�sf










zRepoDiffCommand._reportcCs�|jjjtj�j|jjd�}|jjjtj�j|jjd�}|jj	rld|jj	krl|j
|jj	d�|j
|jj	d�|jjr�|j
dd�|j
dd�n|j
dd�|j
dd�|j�|j�|j
|j||��dS)N)Zreponame�*)r0rP)Zlatest_per_arch)Zlatest)rrBZquery�hawkeyZIGNORE_EXCLUDESrAr&rrrZfiltermr/Zapplyr[rH)rZq_newZq_oldrrr�run�szRepoDiffCommand.runN)r)
rrr�aliasesrZsummary�staticmethodr!r.r2rHr[r^rrrrr
+s&ar
)Z
__future__rrZdnf.clir'Zdnf.cli.option_parserrr]ZdnfpluginscorerZPluginrr
ZCommandr
rrrr�<module>s__pycache__/spacewalk.cpython-36.pyc000064400000023462150402642250013363 0ustar003

.޾g�7�@sfddlmZddlmZddlmZmZddlZddlZddlZddl	Z	ddl
Z
ddlZddlmZddl
mZddlZddlZddlZddlZddlmZddlmZd	Zed
�Zed�Zed�Zed
�Zed�Zed�Zed�Zed�Zed�Z ed�Z!ed�Z"ed�Z#ed�Z$Gdd�dej%�Z&Gdd�dej'j(�Z)dd�Z*Gdd�de+�Z,dd �Z-d!d"�Z.dS)#�)�absolute_import)�unicode_literals)�_�loggerN)�copy)�PRIO_PLUGINCONFIG)�ustr)�
up2dateErrorsz_spacewalk.jsonz7CloudLinux Network based repositories will be disabled.z4CloudLinux Network channel support will be disabled.z@There was an error communicating with CloudLinux Network server.z=This system is not registered with CloudLinux Network server.z.This system is not subscribed to any channels.zSystemId could not be acquired.z%You can use rhn_register to register.z@This system is receiving updates from CloudLinux Network server.z�For security reasons packages from CloudLinux Network based repositories can be verified only with locally installed gpg keys. GPG key '%s' has been rejected.z.Package profile information could not be sent.z=Missing required login information for CloudLinux Network: %sz'Leapp upgrade is running - using cache.z>Spacewalk plugin has to be run under with the root privileges.csZeZdZdZ�fdd�Zdd�Zdd�Zdd	d
�Zdd�Zd
d�Z	dd�Z
dd�Z�ZS)�	SpacewalkZ	spacewalkcs�tt|�j||�||_||_tjj|jjj	t
�|_d|_i|_
t|jj�|_|j|j�|_d|jj�kr�|jjd�}x |D]\}}|jj||t�q|Wtjj�s�tjt�d|j_|jjs�dStjd�|j�dS)NF�mainzinitialized Spacewalk plugin)�superr
�__init__�base�cli�os�path�join�confZ
persistdir�STORED_CHANNELS_NAME�stored_channels_path�connected_to_spacewalk�up2date_cfgrZread_config�parser�sections�items�
_set_valuer�dnf�utilZ	am_i_rootr�warning�MUST_BE_ROOT�enabled�debug�activate_channels)�selfrr�options�key�value)�	__class__��/usr/lib/python3.6/spacewalk.pyr
;s(


zSpacewalk.__init__cCs$|jjsdS|jsdSd|jj_dS)NT)rr rZdemandsZ	root_user)r#r(r(r)�configRs
zSpacewalk.configcCstjd�dS)Nz$/usr/sbin/clnreg_ks --strict-edition)r�system)r#r(r(r)�clnreg^szSpacewalk.clnregTcCs�i}d}d}d}d}|j�}|s(|}�n�tjjd�rFtjt�|}�n�tjj	�|_
t|j
�}|j
df}d}xl|s�ytjj
|jjd�}d}Wqntjk
r�}	z*|dkr�|j�d}wntjdtt|	�dSd}	~	XqnXqnW|�s�tjdtt�|ji�dSytjj|jjd�}
Wn�tjk
�rF}	ztjdtt|	�dSd}	~	XnXtjk
�rttjdtt�|ji�dStjk
�r�tjd	ttt t�dSXd|_!tj"t#�x,|
D]$}|d
�r�t$|j%��||d<�q�W|j|�|j&j'}x�|j%�D]�\}
}|j(|
�}d}|�r|j(d
�}t)|j�}|
|j*j+�k�rf|j*j%|
�}x |D]\}}|j,||t-��qJWt.||j&j||j|||||j/|j0d�	�}|j1|��q�Wtj2|�dS)
Nrz/etc/cln_leapp_in_progressZuseNoSSLForPackagesF)�timeoutTz%s
%s
%sz%s
%sz%s %s
%s
%s�version�label)	r�proxyr-�	sslcacert�
force_http�cached_version�
login_info�gpgcheckr )3�_read_channels_filerr�isfilerr�LEAPP_IN_PROGRESS�up2date_clientr*ZinitUp2dateConfigr�get_ssl_ca_certZup2dateAuthZgetLoginInforr-r	�RhnServerExceptionr,�error�COMMUNICATION_ERROR�RHN_DISABLED�NOT_REGISTERED_ERROR�_write_channels_fileZ
rhnChannelZgetChannelDetailsZCommunicationErrorZNoChannelsError�NOT_SUBSCRIBED_ERROR�CHANNELS_DISABLEDZNoSystemIdError�NO_SYSTEM_ID_ERROR�USE_RHNREGISTERr�info�UPDATES_FROM_SPACEWALK�dictrr�repos�getrrrrr�
SpacewalkRepor5r �addr!)r#Z
networkingZenabled_channelsr1r2Z	proxy_urlr4Zcached_channelsZclnreg_tried�eZsvrChannels�channelrHZ
channel_idZchannel_dictZcached_channelr3rr$r%r&�repor(r(r)r"as�











zSpacewalk.activate_channelscCs�|jjsdS|jsdS|jddkr8|j�}tjj|�ytjj|jj	d�Wn4t
jk
r�}ztj
dtt|�WYdd}~XnXdS)z, Update system's profile after transaction. NZwriteChangesToLog�)r-z%s
%s
%s)rr rr�_make_package_deltar9ZrhnPackageInfoZlogDeltaPackagesZupdatePackageProfiler-r	r;rr<r=�PROFILE_NOT_SENT)r#�deltarLr(r(r)�transaction�szSpacewalk.transactioncCs�y.t|jd��}|j�}tj|�}|SQRXWnXttfk
rb}z|jtjkrR�WYdd}~Xn&tj	j
k
r�}zWYdd}~XnXiS)N�r)�openr�read�json�loads�FileNotFoundError�IOError�errno�ENOENT�decoderZJSONDecodeError)r#�
channels_fileZcontentZchannelsrLr(r(r)r6�s
zSpacewalk._read_channels_filecCsfy,t|jd��}tj||dd�WdQRXWn4ttfk
r`}z|jtjkrP�WYdd}~XnXdS)N�w�)�indent)rUrrW�dumprYrZr[r\)r#�varr^rLr(r(r)r@�szSpacewalk._write_channels_filecCs.dd�|jjjD�dd�|jjjD�d�}|S)NcSs$g|]}|j|j|j|j|jf�qSr()�namer.�release�epoch�arch)�.0�pr(r(r)�
<listcomp>�sz1Spacewalk._make_package_delta.<locals>.<listcomp>cSs$g|]}|j|j|j|j|jf�qSr()rdr.rerfrg)rhrir(r(r)rj�s)ZaddedZremoved)rrSZinstall_setZ
remove_set)r#rRr(r(r)rP�s
zSpacewalk._make_package_delta)T)
�__name__�
__module__�__qualname__rdr
r*r,r"rSr6r@rP�
__classcell__r(r()r'r)r
7s
Yr
csDeZdZdZdddddgZ�fdd�Zd	d
�Zd�fdd
�	Z�ZS)rJzB
    Repository object for Spacewalk. Uses up2date libraries.
    zX-RHN-Server-IdzX-RHN-Auth-User-Idz
X-RHN-AuthzX-RHN-Auth-Server-TimezX-RHN-Auth-Expire-OffsetcsTtt��jt|d�|jd��t|d��_�fdd�|dD��_|jd��_|jd��_yt	|d	��_
Wn<tk
r�}z tj
ttjj|��g�_
WYdd}~XnX|d
|jd�kr�d�_|jd
��_d�_d�_d�_d�_|jd��_|jd��_|jd��_|jd��r$�j�n�j�t�d��rP�j�}|�rP�j|�dS)Nr/rrdcsg|]}|d�j�qS)z	/GET-REQ/)�id)rh�url)r#r(r)rj�sz*SpacewalkRepo.__init__.<locals>.<listcomp>rpr1r0Zgpg_key_urlr.r3rOr4rr-r5r2r �set_http_headers) rrJr
rrIrdZbaseurlr1r0�get_gpg_key_urlsZgpgkey�InvalidGpgKeyLocationrr�GPG_KEY_REJECTEDrZi18nZucdZmetadata_expirer4Z	keepaliveZ	bandwidthZretriesZthrottler-r5r2�enable�disable�hasattr�create_http_headersrq)r#rMZoptsrL�http_headers)r')r#r)r
�s8
zSpacewalkRepo.__init__cCs�g}|js|Sxb|jD]X}||jkr8t|}tjj|��|j|dkrV|jd|�q|jd||j|f�qW|js�|jd�|S)N�z*%s: 
X-libcurl-Empty-Header-Workaround: *z%s: %sz.X-RHN-Transport-Capability: follow-redirects=3)Nrz)r4�needed_headers�MISSING_HEADERr�ErrorZ	RepoError�appendr2)r#ry�headerr<r(r(r)rxs

z!SpacewalkRepo.create_http_headersTcs0tt|�j||�}|j�}|r,|jtj|�|S)N)rrJ�_handle_new_remoterxZsetopt�librepoZLRO_HTTPHEADER)r#ZdestdirZmirror_setupZhandlery)r'r(r)r�-s
z SpacewalkRepo._handle_new_remote)T)	rkrlrm�__doc__r{r
rxr�rnr(r()r'r)rJ�s%rJcCs*|j�}x|D]}t|�st|��qW|S)a
    Parse the key urls and validate them.

    key_url_string is a space seperated list of gpg key urls that must be
    located in /etc/pkg/rpm-gpg/.
    Return a list of strings containing the key urls.
    Raises InvalidGpgKeyLocation if any of the key urls are invalid.
    )�split�is_valid_gpg_key_urlrs)Zkey_url_stringZkey_urls�key_urlr(r(r)rr9s
	
rrc@seZdZdS)rsN)rkrlrmr(r(r(r)rsHsrscCsP|jd�}t|�dkrdS|\}}|j�dkr2dStjj|�}|jd�sLdSdS)Nz://�F�filez/etc/pki/rpm-gpg/T)r��len�lowerrr�normpath�
startswith)r�Zproto_split�protorr(r(r)r�Ks

r�cCs4d|ko|dst�|d}t|�tkr0|dS|S)NZ	sslCACertr)ZBadSslCaCertConfig�type�list)rZca_certsr(r(r)r:Ysr:)/Z
__future__rrZdnfpluginscorerrrZdnf.exceptionsr[rWr�rrZdnf.conf.configrZup2date_client.up2dateAuthr9Zup2date_client.configZup2date_client.rhnChannelZup2date_client.rhnPackageInfoZrhn.i18nrr	rr>rBr=r?rArCrDrFrtrQr|r8rZPluginr
rNZReporJrr�	Exceptionrsr�r:r(r(r(r)�<module>sJ4O__pycache__/debuginfo-install.cpython-36.opt-1.pyc000064400000013665150402642250015762 0ustar003

�gt`L+�@sNddlmZmZddlZddlmZGdd�dej�ZGdd�dejj	�Z
dS)�)�_�loggerN)�Packagecs,eZdZdZdZ�fdd�Zdd�Z�ZS)�DebuginfoInstallz5DNF plugin supplying the 'debuginfo-install' command.zdebuginfo-installcs4tt|�j||�||_||_|dk	r0|jt�dS)zInitialize the plugin instance.N)�superr�__init__�base�cliZregister_command�DebuginfoInstallCommand)�selfrr	)�	__class__��'/usr/lib/python3.6/debuginfo-install.pyr s
zDebuginfoInstall.__init__cCsf|j|jj�}|jd�o.|jdd�o.|jdd�}|rbtjj|j�j	�j
dd�}t|�rb|jjj
�dS)N�main�
autoupdatez*-debuginfo)Z
name__glob)Zread_configr�confZhas_sectionZ
has_optionZ
getboolean�dnf�sackZ_rpmdb_sack�query�filterm�len�repos�enable_debug_repos)rZcprZdbginfor
r
r�config(s
zDebuginfoInstall.config)�__name__�
__module__�__qualname__�__doc__�namerr�
__classcell__r
r
)rrrsrcsheZdZdZdZed�Z�fdd�Zedd��Z	dd	�Z
d
d�Zdd
�Zdd�Z
dd�Zdd�Z�ZS)r
z! DebuginfoInstall plugin for DNF �debuginfo-installzinstall debuginfo packagescs4tt|�j|�t�|_t�|_t�|_t�|_dS)N)rr
r�set�available_debuginfo_missing�available_debugsource_missing�installed_debuginfo_missing�installed_debugsource_missing)rr	)rr
rr:s
z DebuginfoInstallCommand.__init__cCs|jddd�dS)N�package�+)�nargs)�add_argument)�parserr
r
r�
set_argparserBsz%DebuginfoInstallCommand.set_argparsercCs0|jj}d|_d|_d|_d|_|jjj�dS)NT)	r	�demandsZ	resolvingZ	root_userZsack_activationZavailable_reposrrr)rr,r
r
r�	configureFsz!DebuginfoInstallCommand.configurecCs�g}ttj�}ttj�}�x�|jjD�]�}tjj|�j	|j
jdd�}|d}|sxtj
td�|j
jjj|��|j|�q$|j�j�}|j|j�j��xdt|j��D]T}|jtj�r�|d|�|kr�|j|�|jtj�r�|d|�|kr�|j|�q�W�x�|j�D�]�}	|	d}
|
j�r�i}x"|	D]}|j|jg�j|��q(Wxj|j�D]^}
|
d}|j|j |��s�|j|j!|��s�|j"j#t$|��|j|j%|��sP|j&j#t$|���qPW�q|
j'jtj��s�|
j'jtj��r�|j(|	��q|ddk	�rb|j)|
j |d��s2|j)|
j!|d��s2|j*j#dj+|
j'|
j,��|j)|
j%|d��s|j-j#dj+|
j'|
j,���q|j.|
j |	��s�|j.|
j!|	��s�|j*j#dj+|
j'|
j,��|j.|
j%|	��s|j-j#dj+|
j'|
j,���qWq$W|j*�r�tj
td�d	j/t0|j*���|j-�rtj
td
�d	j/t0|j-���|j"�r8tj
td�d	j/t0|j"���|j&�r\tj
td�d	j/t0|j&���|�r�|j
j1j2�r�tj3j4td
�dj/|�d��dS)NF)Zwith_srcrzNo match for argument: %srZnevraz{}-{}zICould not find debuginfo package for the following available packages: %sz, zKCould not find debugsource package for the following available packages: %szICould not find debuginfo package for the following installed packages: %szKCould not find debugsource package for the following installed packages: %szUnable to find a match� )Zpkg_spec)5rrZDEBUGINFO_SUFFIXZDEBUGSOURCE_SUFFIXZoptsr&rZsubjectZSubjectZget_best_solutionrrr�infor�outputZtermZbold�appendZ	availableZ
_name_dict�updateZ	installed�list�keys�endswith�pop�valuesZ_from_system�
setdefault�arch�_install_debug_from_system�
debug_nameZsource_debug_namer$�add�strZdebugsource_namer%r�_install�_install_debugr"�format�evrr#�_install_debug_no_nevra�join�sortedr�strict�
exceptionsZPackagesNotAvailableError)rZerrors_specZdebuginfo_suffix_lenZdebugsource_suffix_lenZpkgspecZsolutionrZpackage_dictr�pkgsZ	first_pkgZ	arch_dict�pkgZpackage_arch_listr
r
r�runNs�





zDebuginfoInstallCommand.runcCs:|jjj�j||j|j|j|jd�}|r6|j|�dSdS)N)r�epoch�version�releaser9TF)	rrr�filterrJrKrLr9r>)rr;rHrr
r
rr:�s

z2DebuginfoInstallCommand._install_debug_from_systemcCs�i}|jdk	r|j|d<|jdk	r,|j|d<|jdk	r@|j|d<|jdk	rT|j|d<|jjj�jfd|i|��}|r�|j|�dSdS)NZepoch__globZ
version__globZ
release__globZ
arch__globrTF)	rJrKrLr9rrrrMr>)rr;Z
base_nevra�kwargsrr
r
rr?�s








z&DebuginfoInstallCommand._install_debugcs8|jjj�j�fdd�|D�d�}|r4|j|�dSdS)Ncsg|]}dj�|j|j��qS)z{}-{}.{})r@rAr9)�.0�p)r;r
r�
<listcomp>�szCDebuginfoInstallCommand._install_debug_no_nevra.<locals>.<listcomp>)Znevra_strictTF)rrrrr>)rr;rGrr
)r;rrB�s
z/DebuginfoInstallCommand._install_debug_no_nevracCs:tjj|jj�}|j|d�|jjj||jjj	d�dS)N)rH)ZselectZoptional)
r�selectorZSelectorrrr!ZgoalZinstallrrE)rrGrRr
r
rr>�sz DebuginfoInstallCommand._install)r )rrrr�aliasesrZsummaryr�staticmethodr+r-rIr:r?rBr>rr
r
)rrr
4s|
	r
)ZdnfpluginscorerrrZdnf.packagerZPluginrr	ZCommandr
r
r
r
r�<module>s__pycache__/changelog.cpython-36.opt-1.pyc000064400000010117150402642250014270 0ustar003

�gt`g�@s|ddlmZddlmZddlZddlZddlZddlmZm	Z	m
Z
ddlZddlZdd�Z
ejjGdd�dejj��ZdS)	�)�absolute_import)�unicode_literalsN)�_�P_�loggerc
CsDytjj|dd�Stttfk
r>tjtd�j	|���YnXdS)NT)ZfuzzyzNot a valid date: "{0}".)
�dateutil�parser�parse�
ValueError�	TypeError�
OverflowError�argparseZArgumentTypeErrorr�format)�val�r�/usr/lib/python3.6/changelog.py�
validate_date!src@sLeZdZdZed�Zedd��Zdd�Zdd�Z	d	d
�Z
dd�Zd
d�ZdS)�ChangelogCommand�	changelogzShow changelog data of packagescCsd|j�}|jdddttd�d�|jddttd�d�|jdd	d
td�d�|jd
dtd�d�dS)Nz--sinceZDATEzZshow changelog entries since DATE. To avoid ambiguosity, YYYY-MM-DD format is recommended.)�metavar�default�type�helpz--countz2show given number of changelog entries per package)rrrz
--upgradesF�
store_truezmshow only new changelog entries for packages, that provide an upgrade for some of already installed packages.)r�actionr�package�*ZPACKAGE)�nargsr)Zadd_mutually_exclusive_group�add_argumentrr�int)rZfilter_grouprrr�
set_argparser-szChangelogCommand.set_argparsercCs|jj}d|_d|_d|_dS)NT)�cli�demandsZavailable_reposZsack_activation�
changelogs)�selfr"rrr�	configure>szChangelogCommand.configurecCs�|jjj�}|jjr�|jdd�x�|jjD]d}tjj|dd�j	|jjdddd�}|jj
rh|j|jj
d�|r||j|j��}q*t
jtd�|�q*Wn|jj
r�|j|jj
d�|jjr�|j�}n|j�}|S)NT)�empty)Zignore_caseF)Z
with_nevraZ
with_providesZwith_filenames)ZreponamezNo match for argument: %s)�baseZsack�query�optsrZfilterm�dnfZsubjectZSubjectZget_best_queryZrepo�unionZlatestr�infor�upgradesZ	available)r$�q�pkgZpkg_qrrrr(Ds$

zChangelogCommand.querycCs>tj�}x0t|�D]$}|j|jp$|j|jfg�j|�qW|S)N)�collections�OrderedDict�sorted�
setdefaultZsource_name�nameZevr�append)r$Zpackages�by_srpmr/rrrr6Zs$zChangelogCommand.by_srpmcsT�jjr�jj|�S�jjr.|jd�jj�S�jjrJ�fdd�|jD�S|jSdS)Ncs$g|]}|d�jjj�kr|�qS)Z	timestamp)r)�sinceZdate)�.0�chlog)r$rr�
<listcomp>fsz6ChangelogCommand.filter_changelogs.<locals>.<listcomp>)r)r-r'Zlatest_changelogs�countr#r7)r$rr)r$r�filter_changelogs`sz"ChangelogCommand.filter_changelogscCs�|jjr"tjtd�j|jj��nP|jjrLtjtdd|jj�j|jj��n&|jjrdtjtd��ntjtd��|j	|j
��}xb|D]Z}ttd�jdjt
dd	�||D�����x*|j||d
�D]}t|jj|��q�Wq�WdS)NzListing changelogs since {}zListing only latest changelogzListing {} latest changelogszBListing only new changelogs since installed version of the packagezListing all changelogszChangelogs for {}z, cSsh|]}t|��qSr)�str)r8r/rrr�	<setcomp>{sz'ChangelogCommand.run.<locals>.<setcomp>r)r)r7rr,rrr;rr-r6r(�print�joinr2r<r'Zformat_changelog)r$r6r4r9rrr�runks 

 zChangelogCommand.runN)r)
�__name__�
__module__�__qualname__�aliasesrZsummary�staticmethodr r%r(r6r<rArrrrr(sr)Z
__future__rrr
r0Zdateutil.parserrZdnfpluginscorerrrr*Zdnf.clirZpluginZregister_commandr!ZCommandrrrrr�<module>s__pycache__/repomanage.cpython-36.pyc000064400000014573150402642250013532 0ustar003

�fJ)�@szddlmZddlmZddlmZmZddlZddlZddlZddl	Z	ddl
Z
Gdd�dej�ZGdd�dej
j�ZdS)	�)�absolute_import)�unicode_literals)�_�loggerNcs eZdZdZ�fdd�Z�ZS)�
RepoManage�
repomanagecs,tt|�j||�|dkrdS|jt�dS)N)�superr�__init__Zregister_command�RepoManageCommand)�self�base�cli)�	__class__�� /usr/lib/python3.6/repomanage.pyr	$szRepoManage.__init__)�__name__�
__module__�__qualname__�namer	�
__classcell__rr)rrr src@s\eZdZdZed�Zdd�Zdd�Zdd�Ze	d	d
��Z
e	dd��Zd
d�Ze	dd��Z
dS)r
rz"Manage a directory of rpm packagescCs,|jjr(|jjr(|jjtjtjd�dS)N)�stdout�stderr)�opts�verbose�quietr
Zredirect_logger�loggingZWARNING�INFO)rrrr�
pre_configure/szRepoManageCommand.pre_configurecCs0|jjr|jjr|jj�|jj}d|_dS)NT)rrrr
Zredirect_repo_progress�demandsZsack_activation)rrrrr�	configure3s
zRepoManageCommand.configurec"s@�jjr �jjr tjjtd����jjr@�jjr@tjjtd����jjr`�jjr`tjjtd����jjr|�jjr|d�j_i}i}i}t�}t	�jj
�}y�d}�jjj
|�jj�jjgd�}|jj��jj|�tjj�r>�jj��jjj�}xH|D]@}	|	j�|kr�|j|	j��|j|	j�i�j|	j�g�j|	�q�WWn�tjjk
�r�g}
�j�jjd�}
t |
�dk�r�tjjtd	����jj!ddd
��jj"ddd�y�jj#|
�jj$j%d
�Wn0t&k
�r�t'j(td�j)dj*|
���YnXYnX�jj+j,t-j.d�j/�}dd�|j0|j0|d�d�j/�D�}|j1�x�|D]~}
|
j2|
j3f}||k�rx|
||k�r�||j|
�n
|
g||<�j4|
�}||k�r�||j�j5|
��n�j5|
�g||<�q@Wg}t�}�jj�r�xh|j6�D]\\}}|||f}||d�}x6|D].}�j4|�}x||D]}|j|��q W�qW�q�Wxb|j7�D]V}t8|j6��}||d�}x4|D],}x$||D]}|jt|j����q|W�qnW�qJW�jj�r|xh|j6�D]\\}}|||f}|d|�}x6|D].}�j4|�}x||D]}|j|��q�W�q�W�q�Wxb|j7�D]V}t8|j6��}|d|�}x4|D],}x$||D]}|jt|j����qTW�qFW�q"W�jj�r�xh|j6�D]\\}}|||f}|d|�}x6|D].}�j4|�}x||D]}|j|��q�W�q�W�q�Wt�}xb|j7�D]V}t8|j6��}||d�}x4|D],}x$||D]}|jt|j����q2W�q$W�qWxx|j7�D]l}t8|j6��}|d|�}xJ|D]B}x:||D].}x&|j�D]} | |k�r�|j9| ��q�W�q�W�q�W�qdW�fdd�|j0|j0|d�d�j/�D�}!||!}|j1��jj:�r$t;dj*|��nx|D]}
t;|
��q*WdS)Nz%Pass either --old or --new, not both!z)Pass either --oldonly or --new, not both!z)Pass either --old or --oldonly, not both!TZrepomanage_repo)Zbaseurlz.rpmrzNo files to process)�sack�reposF)Zload_system_repoZload_available_repos)�progresszCould not open {}z, )�flagscSsg|]}|�qSrr)�.0�xrrr�
<listcomp>osz)RepoManageCommand.run.<locals>.<listcomp>)Znevra_strict)Zpkg__neqcsg|]}�j|��qSr)�_package_to_path)r$r%)rrrr&�s)Zpkg__eq� )<r�new�old�dnf�
exceptions�ErrorrZoldonly�set�intZkeeprr!Zadd_new_repoZconf�pathZ_repoZexpireZ_add_repo_to_sackZWITH_MODULESZ_setup_modular_excludesZ_moduleContainerZgetModulePackagesZ	getRepoID�updateZgetArtifacts�
setdefaultZ
getNameStreamZ
getVersionNum�appendZ	RepoError�_get_file_list�len�resetZ	fill_sackZadd_remote_rpms�outputr"�IOErrorrZwarning�format�joinr �query�hawkeyZIGNORE_MODULAR_EXCLUDESZ	available�filter�sortr�arch�_package_to_nevrar'�keys�values�sorted�addZspace�print)"rZverfileZpkgdictZmodule_dictZall_modular_artifactsZkeepnumZREPOMANAGE_REPOIDZ	repo_confZmodule_packagesZmodule_packageZrpm_listr;Zpackages�pkgZnaZnevraZoutputpackagesZkeepnum_latest_stream_artifacts�n�aZevrlistZnewevrs�packageZfpkgZstreams_by_versionZsorted_stream_versionsZnew_sorted_stream_versions�i�streamZoldevrsZold_sorted_stream_versionsZkeepnum_newer_stream_artifactsZartifactZmodular_packagesr)rr�run9s�



&"








$



$



$

"&

zRepoManageCommand.runc	Cs�|jdddtd�d�|jdddtd�d�|jd	d
dtd�d�|jdd
dtd�d�|jddddtd�dtd�|jddtd�d�dS)Nz-oz--old�
store_truezPrint the older packages)�action�helpz-Oz	--oldonlyz6Print the older packages. Exclude the newest packages.z-nz--newzPrint the newest packagesz-sz--spacez#Space separated output, not newlinez-kz--keepZstoreZKEEPz)Newest N packages to keep - defaults to 1�)rN�metavarrO�default�typer0zPath to directory)�add_argumentrr/)�parserrrr�
set_argparser�s




zRepoManageCommand.set_argparsercCs`g}xVtj|�D]H\}}}x<|D]4}tjj|�dj�t|�kr |jtjj||��q WqW|S)zJReturn all files in path matching ext

        return list object
        rP)�os�walkr0�splitext�lower�strr3r:)r0ZextZfilelist�root�dirs�files�frrrr4�s
z RepoManageCommand._get_file_listcCs*t|jj�r tjj|jj|j�S|jSdS)N)r5rr!rWr0r:r�location)rrFrrrr'�sz"RepoManageCommand._package_to_pathcCs|j|j|j|j|jfS)N)rZepoch�version�releaser?)rFrrrr@sz#RepoManageCommand._package_to_nevraN)r)rrr�aliasesrZsummaryrrrL�staticmethodrVr4r'r@rrrrr
+s$r
)Z
__future__rrZdnfpluginscorerrr+Zdnf.clirrWr<ZPluginrr
ZCommandr
rrrr�<module>s__pycache__/reposync.cpython-36.pyc000064400000024276150402642250013257 0ustar003

�f89�@s�ddlmZddlmZddlZddlZddlZddlZddlmZm	Z	ddl
mZddlZddl
Zdd�ZGdd	�d	ejj�ZejjGd
d�dejj��ZdS)�)�absolute_import)�unicode_literalsN)�_�logger)�OptionParsercCs(tjjtj��}tjjtjj|||��S)N)�dnfZi18nZucd�os�getcwd�path�realpath�join)Zintermediate�target�cwd�r�/usr/lib/python3.6/reposync.py�_pkgdir#srcs(eZdZ�fdd�Z�fdd�Z�ZS)�RPMPayloadLocationcs$tt|�j||�tjj|�|_dS)N)�superr�__init__rr
�dirname�package_dir)�self�pkg�progressZpkg_location)�	__class__rrr)szRPMPayloadLocation.__init__cs*tt|�j�}tjj|j�|j|d<|S)N�dest)rr�_target_paramsr�util�
ensure_dirr)r�tp)rrrr-s
z!RPMPayloadLocation._target_params)�__name__�
__module__�__qualname__rr�
__classcell__rr)rrr(srcs�eZdZdZed�Z�fdd�Zedd��Zdd�Z	d	d
�Z
dd�Zd
d�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Z�ZS) �RepoSyncCommand�reposyncz&download all packages from remote repocstt|�j|�dS)N)rr$r)r�cli)rrrr9szRepoSyncCommand.__init__c	Cs|jdddgtjdtd�d�|jddd	td
�d�|jddd	td
�d�|jdddd	td�d�|jdddd	td�d�|jdtd�d�|jdddd	td�d�|jddd	td�d�|jdddtd�d �|jd!dd	td"�d�|jd#dd	td$�d�|jd%d&dd	td'�d�dS)(Nz-az--arch�archesz[arch]z$download only packages for this ARCH)r�default�action�metavar�helpz--deleteF�
store_truez5delete local packages no longer present in repository)r(r)r+z--download-metadatazdownload all the metadata.z-gz
--gpgcheckzBRemove packages that fail GPG signature checking after downloadingz-mz--downloadcompsz&also download and uncompress comps.xmlz--metadata-pathzXwhere to store downloaded repository metadata. Defaults to the value of --download-path.)r+z-nz
--newest-onlyz&download only newest packages per-repoz--norepopathz,Don't add the reponame to the download path.z-pz--download-pathz./z&where to store downloaded repositories)r(r+z
--remote-timezCtry to set local timestamps of local files by the one on the serverz--sourcezdownload only source packagesz-uz--urlsz:Just list urls of what would be downloaded, don't download)�add_argumentrZ_SplitCallbackr)�parserrrr�
set_argparser<s2





zRepoSyncCommand.set_argparsercCs�|jj}d|_d|_|jj}|jjr||j�j	�xJ|jjD]>}y||}Wn$t
k
rntjjd|��YnX|j
�q:W|jjr�|j�tt|j���dkr�|jjr�tjjtd���x |j�D]}|jj�d|_q�WdS)NTzUnknown repo: '%s'.�z1Can't use --norepopath with multiple repositoriesF)r&�demandsZavailable_reposZsack_activation�base�repos�opts�repo�all�disable�KeyErrorrZCliError�enable�sourceZenable_source_repos�len�list�iter_enabled�
norepopathr�_repoZexpireZdeltarpm)rr1r3Zrepoidr5rrr�	configure\s(

zRepoSyncCommand.configurecs�d|jj_d}�x�|jjj�D�]�}|jjr8|jjd�|jj	r�|jj
r�xP|jj�D]6\}}|j|�}|rtt
|�qTtd�|}tj|�qTWn
|j	|�|jj�r|jj
�rt|jj����fdd�dD�}|�rxB|D]}|j|�}|r�t
|�Pq�Wtd�}tj|�n
|j|�|j|�}	|jj
�r8|j|	�n�|j|	�|jj�r�xt|	D]l}
|j|
�}tj|fd	d
�|
�|
_|jj|
�\}}
|dk�rRtjtd�jtjj |�|
��tj!|�d
}�qRW|jj"r|j#||	�qW|�s�t$j%j&td���dS)NTz%Failed to get mirror for metadata: %scsg|]}|�kr�|�qSrr)�.0�md_type)�mdlrr�
<listcomp>�sz'RepoSyncCommand.run.<locals>.<listcomp>�group�group_gz�group_gz_zckz(Failed to get mirror for the group file.cSs|S)Nr)�s�
local_pathrrr�<lambda>�sz%RepoSyncCommand.run.<locals>.<lambda>rzRemoving {}: {}FzGPG signature check failed.)rErFrG)'r2ZconfZ	keepcacher3r=r4Zremote_timer?ZsetPreserveRemoteTime�download_metadataZurlsZgetMetadataLocations�remote_location�printrr�warningZ
downloadcomps�dict�getcomps�get_pkglist�
print_urls�download_packagesZgpgcheck�pkg_download_path�types�
MethodTypeZlocalPkgZpackage_signature_check�formatrr
�basename�unlink�delete�delete_old_local_packagesr�
exceptions�Error)rZgpgcheck_okr5rBZmd_location�url�msgZgroup_locationsZgroup_location�pkglistrrI�result�errorr)rCr�runws^


















zRepoSyncCommand.runcCs$t|jjp|jj|jjs|jnd�S)N�)rr4ZdestdirZ
download_pathr>�id)rr5rrr�repo_target�szRepoSyncCommand.repo_targetcCs&|jjrt|jj|j�S|j|�SdS)N)r4Z
metadata_pathrrerf)rr5rrr�metadata_target�szRepoSyncCommand.metadata_targetcCsT|j|j�}tjjtjj||j��}|jtjj|d��sPtj	j
td�j||���|S)Nrdz6Download target '{}' is outside of download path '{}'.)
rfr5rr
rr�location�
startswithrr\r]rrW)rrrfrTrrrrT�s
z!RepoSyncCommand.pkg_download_pathc	
s�t�fdd�|D��}x�tj�j|��D]�\}}}x||D]t}tjj||�}|jd�r8tjj|�r8||kr8ytj|�t	j
td�|�Wq8tk
r�t	j
td�|�Yq8Xq8Wq(WdS)Nc3s|]}�j|�VqdS)N)rT)rAr)rrr�	<genexpr>�sz<RepoSyncCommand.delete_old_local_packages.<locals>.<genexpr>z.rpmz[DELETED] %szfailed to delete file %s)�setr�walkrfr
r�endswith�isfilerYr�infor�OSErrorrb)	rr5r`Zdownloaded_files�dirpathZdirnames�	filenames�filenamer
r)rrr[�s

z)RepoSyncCommand.delete_old_local_packagescCsZ|jj�}|rV|j|�}tjj|�tjj|d�}tj	j
j||d�tj
td�|j�dS)Nz	comps.xml)rz!comps.xml for repository %s saved)r?Z
getCompsFnrgrrrrr
rZyumZmiscZ
decompressrrorre)rr5Zcomps_fnZ	dest_pathrrrrrP�s

zRepoSyncCommand.getcompscCs|j|�}|jj|�dS)NT)rgr?ZdownloadMetadata)rr5rfrrrrK�s
z!RepoSyncCommand.download_metadatacCs�tjjs|j�S|j�|jjj�}t�}i}i}xp|D]h}|j�}|j	|�|j
|j�i�j
|j�g�j
|�x.|D]&}|j
|i�j
|j�g�j
|j��qvWq8W|j|j|d�d�j�}	t�}
x�|j�D]�\}}t�}
|
jt|j�dd�d�t�}x0|j�D]$}x|D]}|j	|j���qW�qWx:|j|d�j�D]&}dj|�}|
jt|||���q>Wx0|
D](}x ||D]}|
j	|j���q|W�qnWq�W|	j|j|
d��}	|	S)a\
        return union of these queries:
        - the latest NEVRAs from non-modular packages
        - all packages from stream version with the latest package NEVRA
          (this should not be needed but the latest package NEVRAs might be
          part of an older module version)
        - all packages from the latest stream version
        )Znevra_strict)Zpkg__neqT)�reverserz3{0.name}-{0.epoch}:{0.version}-{0.release}.{0.arch})rr2ZWITH_MODULESZlatestZapplyZ_moduleContainerZgetModulePackagesrkZgetArtifacts�update�
setdefaultZ
getNameStreamZ
getVersionNum�append�filter�items�add�sorted�keys�valuesrW�max�union)r�queryZmodule_packagesZ
all_artifactsZmodule_dictZartifact_versionZmodule_packageZ	artifactsZartifactZlatest_queryZlatest_stream_artifactsZ
namestreamZversion_dictZversionsZstream_artifacts�modules�moduleZ
latest_pkgZnevra�versionrrr�_get_latest�sB	





zRepoSyncCommand._get_latestcCsd|jjjtjd�j�j|jd�}|jj	r2|j
|�}|jjrH|jdd�n|jjr`|j|jjd�|S)N)�flags)Zreponame�src)Zarch)
r2�sackr��hawkey�IGNORE_MODULAR_EXCLUDESZ	availableZfiltermrer4Znewest_onlyr�r:r')rr5r�rrrrQs

zRepoSyncCommand.get_pkglistcsj�j}|jj��dkr tjj��tjj|jj	t
jd�j��d�}��fdd�|D�}|j
||�dd�dS)N)r�rcsg|]}t|��j|���qSr)rrT)rAr)rrrrrD0sz5RepoSyncCommand.download_packages.<locals>.<listcomp>F)r2�outputrr�callbackZNullDownloadProgress�drpmZ	DeltaInfor�r�r�r�Z	installedZ_download_remote_payloads)rr`r2r�Zpayloadsr)rrrrS)s
z!RepoSyncCommand.download_packagescCs@x:|D]2}|j�}|r t|�qtd�|j}tj|�qWdS)Nz$Failed to get mirror for package: %s)rLrMr�namerrN)rr`rr^r_rrrrR4s

zRepoSyncCommand.print_urls)r%)r r!r"�aliasesrZsummaryr�staticmethodr/r@rcrfrgrTr[rPrKr�rQrSrRr#rr)rrr$4s  :
	9r$)Z
__future__rrr�rZshutilrUZdnfpluginscorerrZdnf.cli.option_parserrrZdnf.clirr5Z
RPMPayloadrZpluginZregister_commandr&ZCommandr$rrrr�<module>s__pycache__/repograph.cpython-36.opt-1.pyc000064400000005342150402642250014334 0ustar003

�gt`��@s^ddlmZddlmZddlmZmZddlZdZGdd�dej	�Z
Gdd	�d	ejj�Z
dS)
�)�absolute_import)�unicode_literals)�_�loggerNzY
size="20.69,25.52";
ratio="fill";
rankdir="TB";
orientation=port;
node[style="filled"];
cs eZdZdZ�fdd�Z�ZS)�	RepoGraph�	repographcs,tt|�j||�|dkrdS|jt�dS)N)�superr�__init__Zregister_command�RepoGraphCommand)�self�base�cli)�	__class__��/usr/lib/python3.6/repograph.pyr	)szRepoGraph.__init__)�__name__�
__module__�__qualname__�namer	�
__classcell__rr)rrr%src@s<eZdZd
Zed�Zdd�Zdd�Zdd	�Ze	d
d��Z
dS)r
r�
repo-graphz4Output a full package dependency graph in dot formatcCsV|jj}d|_d|_|jjrRx4|jjj�D]$}|j	|jjkrF|j
�q*|j�q*WdS)NT)r
�demandsZsack_activationZavailable_reposZopts�reporZrepos�all�id�disable�enable)rrrrrr�	configure4s
zRepoGraphCommand.configurecCs|jt�dS)N)�do_dot�
DOT_HEADER)rrrr�run?szRepoGraphCommand.runc	Cs�d}|j|jj�}td�tdj|��x�|j�D]�}t||�|krRt||�}ddt||�}|d}d}td	j||||��td
j|��x||D]}tdj|��q�Wtdj|||��q2Wtd
�dS)Nrzdigraph packages {z{}g�?g333333�?�g�������?g�?z""{}" [color="{:.12g} {:.12g} {}"];z
"{}" -> {{z"{}"z!}} [color="{:.12g} {:.12g} {}"];
�}g��s���?)�	_get_depsr�sack�print�format�keys�len)	r�headerZmaxdepsZdeps�pkg�h�s�b�reqrrrrBs zRepoGraphCommand.do_dotc
Cs�i}i}g}|j�j�}x�|D]�}i}x�|jD]�}t|�}||krDq.|jd�rPq.||krb||}	n@|j|d�}	|	s�tjtd�|�|j	|�q.n
|	dj
}	|	||<|	|j
kr�d||	<|	|ks.|	|kr�q.nd||	<|j�||j
<q.WqW|S)Nz	solvable:)ZprovideszNothing provides: '%s'r)Zquery�	available�requires�str�
startswith�filterr�debugr�appendrr')
r$r0Zprov�skipr/r*Zxxr.ZreqnameZproviderrrrr#Ys8





zRepoGraphCommand._get_depsN)rr)rrr�aliasesrZsummaryrr r�staticmethodr#rrrrr
0sr
)Z
__future__rrZdnfpluginscorerrZdnf.cliZdnfrZPluginrr
ZCommandr
rrrr�<module>s__pycache__/universal_hooks.cpython-36.pyc000064400000013564150402642250014626 0ustar003

���_�@sddlZddlZddlZddlZddlmZddlZddlZddlZddlZddl	m
Z
eje�Z
e
jej�e
jejej��Gdd�de
�ZGdd�dejd�ZGd	d
�d
e�Ze�Zddd
�ZGdd�dejd�ZGdd�de�Zdd�Zdd�Zdd�ZdS)�N)�path)�PlugincsPeZdZdZ�fdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
�ZS)�UniversalHooksPluginzuniversal-hookscst�j||�d|_dS)Nz/etc/dnf/universal-hooks)�super�__init__�	hook_root)�self�baseZcli)�	__class__��%/usr/lib/python3.6/universal_hooks.pyr,szUniversalHooksPlugin.__init__cCsttj|j|jj�t�dS)N)�_run_dirr�joinr�
pre_config�__name__�LOG)rrrrr0szUniversalHooksPlugin.pre_configcCsttj|j|jj�t�dS)N)r
rrr�configrr)rrrrr3szUniversalHooksPlugin.configcCsttj|j|jj�t�dS)N)r
rrr�resolvedrr)rrrrr6szUniversalHooksPlugin.resolvedcCsttj|j|jj�t�dS)N)r
rrr�sackrr)rrrrr9szUniversalHooksPlugin.sackcCs8|jj}t|jt|t|jj��tt	j
|j|�t�dS)N)�pre_transactionr�
_run_pkg_dirsrr�DnfTransactionInfor	�transactionr
rr)r�namerrrr<sz$UniversalHooksPlugin.pre_transactioncCs8|jj}t|jt|t|jj��ttj	|j|�t�dS)N)
rrrrrrr	r
rr)rrrrrrAsz UniversalHooksPlugin.transaction)r�
__module__�__qualname__rrrrrrrr�
__classcell__rr)r
rr)src@sDeZdZejdd��Zejdd��Zejdd��Zejdd��Zd	S)
�
FileSystemcCsdS)Nr)r�pathnamerrr�globHszFileSystem.globcCsdS)Nr)rrrrr�isdirLszFileSystem.isdircCsdS)Nr)rr�moderrr�accessPszFileSystem.accesscCsdS)Nr)rr!�encodingrrr�NamedTemporaryFileTszFileSystem.NamedTemporaryFileN)	rrr�abc�abstractmethodrr r"r$rrrrrGsr)�	metaclassc@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
�RealFileSystemcCs
tj|�S)N)r)rrrrrrZszRealFileSystem.globcCs
tj|�S)N)rr )rrrrrr ]szRealFileSystem.isdircCstj||�S)N)�osr")rrr!rrrr"`szRealFileSystem.accesscCstj||d�S)N)r!r#)�tempfiler$)rr!r#rrrr$csz!RealFileSystem.NamedTemporaryFileN)rrrrr r"r$rrrrr(Ysr(�cCs�tj|�sdSxxttj|d��D]b}tj|�r2q"tj|tj�rx|�d|��}tj|dd�}d|j	kr�|j
d||j	�q"|j
d|�q"WdS)Nz/*� T)�shellrz!!! %s did not exit cleanly: %dz!!! %s is not executable)�fsr �sortedrr"r)�X_OK�
subprocessZrun�
returncode�error)Zhook_dir�log�argsZscriptZcmdlineZ	completedrrrr
js


r
c@seZdZejdd��ZdS)�TransactionInfocCsdS)Nr)rrrr�
getMembers|szTransactionInfo.getMembersN)rrrr%r&r7rrrrr6{sr6c@s"eZdZdd�dd�Zdd�ZdS)rN)�returncCs
||_dS)N)r)rrrrrr�szDnfTransactionInfo.__init__cCs|jS)N)r)rrrrr7�szDnfTransactionInfo.getMembers)rrrrr7rrrrr�srcCs�tj|d|�}t|�}i}tjddd���}i}|j�}	x�tt|	�dd�d�D]h}
|
j}||kr`qLd||<|j	|d	�t
tj|d
||�|�x&|j�D]\}}
|
j|�r�d||<q�WqLW|j
�x&|D]}t
tj||�|d|j�q�WWdQRXdS)
zu

    :param str base_dir:
    :param logging.Logger log:
    :param str slot:
    :param TransactionInfo tinfo:
    Z
multi_pkgs�wzutf-8)r!r#cSs|jS)N)r)�mrrr�<lambda>�sz_run_pkg_dirs.<locals>.<lambda>)�key��
Zpkgsz--pkg_list=N)rr�_make_dir_matchersr.r$r7r/�setr�writer
�items�search�flush)Zbase_dirr4ZslotZtinfoZ
wildcard_path�dir_matchersZwildcard_to_runZ
temp_pkg_fileZmembers_seen�members�memberZpkgZwildcard_dirZmatcherrrrr�s&	

rcCsFi}x<tj|d�D]*}tj|�rtjtj|��}t|�||<qW|S)Nz/*)r.rr r�basename�normpath�_regex_from_dir)Zwc_slot_dirrEZpthrrrr?�s
r?cCs|jdd�}tjd|d�S)NZ__WILDCARD__z.*�^�$)�replace�re�compile)r�exprrrrrJ�srJ)r+)r%rZloggingr)rrNr1�sysr*ZdnfrZ	getLoggerrrZsetLevelZERRORZ
addHandlerZ
StreamHandler�stderrr�ABCMetarr(r.r
r6rrr?rJrrrr�<module>s,

&	__pycache__/groups_manager.cpython-36.opt-1.pyc000064400000020721150402642250015354 0ustar003

�f�4�@s�ddlmZddlmZddlZddlZddlZddlZddlZddlZddl	Z	ddl
mZmZddl
Z
ddlZ
dZejdje��Zejd�Zdddd	�Zd
d�Zdd
�Zdd�Ze
jjGdd�de
jj��ZdS)�)�absolute_import)�unicode_literalsN)�_�loggerz
-a-z0-9_.:z^[{}]+$z^[-a-zA-Z0-9_.@]+$T)Zdefault_explicitZuservisible_explicitZempty_groupscCstj|�stjtd���|S)zgroup id validatorzInvalid group id)�RE_GROUP_ID�match�argparse�ArgumentTypeErrorr)�value�r�$/usr/lib/python3.6/groups_manager.py�
group_id_type.s
r
cCsN|jdd�}t|�dkr&tjtd���|\}}tj|�sFtjtd���||fS)ztranslated texts validator�:�z6Invalid translated data, should be in form 'lang:text'z*Invalid/empty language for translated data)�split�lenrr	r�RE_LANGr)r
�data�lang�textrrr�translation_type5s

rcCs:|j�}tjdjt�d|�}|s6tjjtd�j|���|S)z#generate group id based on its namez[^{}]�zFCan't generate group id from '{}'. Please specify group id using --id.)	�lower�re�sub�format�RE_GROUP_ID_VALID�dnf�cli�CliErrorr)r�group_idrrr�
text_to_idAsr!csdeZdZdZed�Z�fdd�Zedd��Zdd�Z	d	d
�Z
dd�Zd
d�Zdd�Z
dd�Z�ZS)�GroupsManagerCommand�groups-managerz$create and edit groups metadata filecstt|�j|�tj�|_dS)N)�superr"�__init__�libcomps�Comps�comps)�selfr)�	__class__rrr%QszGroupsManagerCommand.__init__cCs�|jddgdtd�d�|jddgdtd�d�|jddtd	�d
�|jddd
td�d�|jdttd�d�|jddtd�d�|jdtd�d�|jdttd�d�|jddgdttd�d�|jddgdttd �d�|j�}|jd!d"ddtd#�d$�|jd%d"d&dtd'�d$�|j�}|jd(dtd)�d*�|jd+dtd,�d*�|jd-dd
td.�d�|jd/dtd0�d*�|jd1d2d3td4�d5�dS)6Nz--load�appendz	COMPS.XMLzload groups metadata from file)�action�default�metavar�helpz--savezsave groups metadata to filez--mergez%load and save groups metadata to file)r.r/z--print�
store_trueFz#print the result metadata to stdout)r,r-r/z--idzgroup id)�typer/z-nz--namez
group name)r/z
--descriptionzgroup descriptionz--display-orderzgroup display orderz--translated-namez	LANG:TEXTztranslated name for the group)r,r-r.r1r/z--translated-descriptionz$translated description for the groupz--user-visible�user_visiblez%make the group user visible (default))�destr,r-r/z--not-user-visibleZstore_falsezmake the group user invisiblez--mandatoryz%add packages to the mandatory section)r,r/z
--optionalz$add packages to the optional sectionz--removez5remove packages from the group instead of adding themz--dependenciesz-include also direct dependencies for packages�packages�*ZPACKAGEzpackage specification)�nargsr.r/)�add_argumentrr
�intrZadd_mutually_exclusive_group)�parserZvisibleZsectionrrr�
set_argparserUsR








z"GroupsManagerCommand.set_argparsercCs�|jj}|jjr"d|_d|_d|_|jjrP|jjj	d|jj�|jj
j|jj�|jjs�|jj
s�|jjs�|jjs�|jjdk	s�|jjr�|jjr�|jjr�tjjtd���dS)NTFrz;Can't edit group without specifying it (use --id or --name))r�demands�optsr4Zsack_activationZavailable_reposZload_system_repo�merge�load�insert�saver+�description�
display_order�translated_name�translated_descriptionr2�id�namerrr)r)r;rrr�	configure�s"zGroupsManagerCommand.configurecCs �x|jjD�]
}tj�}yp|jd�r~tj|��F}tjdd�}z$t	j
||�|j�|j|j
�Wdtj|j
�XWdQRXn
|j|�Wn~tttjfk
�r}zXt�}x2|j�D]&}||kr�q�tj|j��|j|�q�Wtjjtd�j||���WYdd}~XqX|j|7_qWdS)zm
        Loads all input xml files.
        Returns True if at least one file was successfuly loaded
        z.gzF)�deleteNzCan't load file "{}": {})r<r>r&r'�endswith�gzip�open�tempfileZNamedTemporaryFile�shutilZcopyfileobj�closeZ	fromxml_frF�os�unlink�IOError�OSErrorZParserError�setZget_last_errorsr�error�strip�addr�
exceptions�Errorrrr()r)�	file_nameZ
file_compsZgz_fileZ	temp_file�err�seenrTrrr�load_input_files�s,
$z%GroupsManagerCommand.load_input_filescCs�x�|jjD]�}y|jj|td�}Wn*tjk
rL}z|g}WYdd}~XnX|r
x"|dd�D]}tj|j	��q`Wt
jjt
d�j||dj	����q
WdS)N)�xml_options�zCan't save file "{}": {}���r_)r<r@r(Zxml_f�COMPS_XML_OPTIONSr&ZXMLGenErrorrrTrUrrWrXrr)r)rY�errorsrZrrr�save_output_files�sz&GroupsManagerCommand.save_output_filescCs\d}|r*x |jjD]}|j|kr|}PqW|dkrX|rXx |jjD]}|j|kr@|}Pq@W|S)zl
        Try to find group according to command line parameters - first by id
        then by name.
        N)r(�groupsrErF)r)r rF�groupZgrprrr�
find_group�s

zGroupsManagerCommand.find_groupcCs�dd�}|jjr|jj|_|jjr,|jj|_|jjr>|jj|_|jjdk	rT|jj|_|jjrj||jj�|_|jj	r�||jj	�|_
|jj�r�t�}xZ|jjD]N}t
jj|�}|j|jjdddd�j�}|s�tjtd�j|��q�|j|�q�W|jj�r2t�}x|D]}|j|j��qW|j|jjj�j|d��d	d
�|D�}	|jj�r�x�|	D].}
x&|j|
tj d�D]}|jj|��qfW�qPWnd|jj!�r�tj"}n|jj#�r�tj$}ntj%}x8t&|	�D],}
|j|
|d��s�|jj'tj(|
|d���q�WdS)zE
        Set attributes and package lists for selected group
        cSs&tj�}x|D]\}}|||<qW|S)N)r&ZStrDict)ZlstZstr_dictrrrrr�langlist_to_strdict�sz<GroupsManagerCommand.edit_group.<locals>.langlist_to_strdictNTF)Z
with_nevraZ
with_providesZwith_filenameszNo match for argument: {})ZprovidescSsh|]
}|j�qSr)rF)�.0�pkgrrr�	<setcomp>sz2GroupsManagerCommand.edit_group.<locals>.<setcomp>)rFr1))r<rFrAZdescrBr2ZuservisiblerCZname_by_langrDZdesc_by_langr4rSrZsubjectZSubjectZget_best_query�baseZsackZlatestrZwarningrr�updateZdependenciesZrequiresZqueryZfilterm�removeZpackages_matchr&ZPACKAGE_TYPE_UNKNOWNZ	mandatoryZPACKAGE_TYPE_MANDATORYZoptionalZPACKAGE_TYPE_OPTIONALZPACKAGE_TYPE_DEFAULT�sortedr+ZPackage)r)rdrfr4Zpkg_specZsubj�qZrequirementsrhZ	pkg_namesZpkg_nameZpkg_typerrr�
edit_group�sT










zGroupsManagerCommand.edit_groupcCs|j�|jjs|jjr�|j|jj|jjd�}|dkr�|jjrNtjjt	d���t
j�}|jjrt|jj|_|jj|_nD|jjr�t|jj�}|j|dd�r�tj
jt	d�j||jj���||_|jjj|�|j|�|j�|jjs�|jjr�t|jjtd��dS)N)r rFz-Can't remove packages from non-existent groupzRGroup id '{}' generated from '{}' is duplicit. Please specify group id using --id.)r])r\r<rErFrerlrrWrXrr&ZGroupr!rrrr(rcr+rorb�printr@Zxml_strr`)r)rdr rrr�run!s,

zGroupsManagerCommand.run)r#)�__name__�
__module__�__qualname__�aliasesrZsummaryr%�staticmethodr:rGr\rbrerorq�
__classcell__rr)r*rr"Ls1$=r")Z
__future__rrrrJr&rOrrMrLZdnfpluginscorerrrZdnf.clir�compilerrrr`r
rr!ZpluginZregister_commandrZCommandr"rrrr�<module>s,
__pycache__/copr.cpython-36.opt-1.pyc000064400000050311150402642250013304 0ustar003

�fZv�@s�ddlmZddlZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlmZm
Z
ddlZddlmZddlmZddlZy$ddlmZmZmZmZdd�ZWnLek
�rd	d
�ZyddlmZWnek
r�dd�ZYnXYnXd
Zeed�ed�g�Zeed�ed�dg�Ze�rdddl m!Z!m"Z"m#Z#ddl$m%Z%m&Z&m'Z'n(ddl!m!Z!m"Z"m#Z#ddl(m%Z%m&Z&m'Z'ej)j*Gdd�dej+j,��Z-ej)j*Gdd�de-��Z.dS)�)�print_functionN)�_�logger)�PY3)�ucd)�name�version�codename�os_release_attrcCst�t�t�fS)N)rrr	�rr�/usr/lib/python3.6/copr.py�linux_distribution.sr
cCsdS)N�r)rrrrr
1sr
)r
cCsrtd��`}i}xF|D]>}y$|j�jd�\}}|jd�||<Wqtk
rPYqXqW|d|ddfSQRXdS)Nz/etc/os-release�=�"�NAMEZ
VERSION_ID)�open�rstrip�split�strip�
ValueError)Zos_release_fileZos_release_data�lineZos_release_keyZos_release_valuerrrr
7s


�copr�yes�y�no�nr)�ConfigParser�
NoOptionError�NoSectionError)�urlopen�	HTTPError�URLErrorc@seZdZdZdZdZdZdZdZedeZ	d8Z
ed	�Zd
Z
ed�Zedd
��Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zd d!�Zd"d#�Zed$d%��Zd&d'�Zd(d)�Zd*d+�Z d,d-�Z!d.d/�Z"d0d1�Z#ed2d3��Z$ed4d5��Z%ed6d7��Z&dS)9�CoprCommandz Copr plugin for DNF Nzcopr.fedorainfracloud.orgZfedoraZhttpsi�z://rz Interact with Copr repositories.Ta�
  enable name/project [chroot]
  disable name/project
  remove name/project
  list --installed/enabled/disabled
  list --available-by-user=NAME
  search project

  Examples:
  copr enable rhscl/perl516 epel-6-x86_64
  copr enable ignatenkobrain/ocltoys
  copr disable rhscl/perl516
  copr remove rhscl/perl516
  copr list --enabled
  copr list --available-by-user=ignatenkobrain
  copr search tests
    c	Cs�|jddddddddgd	�|j�}|jd
dtd�d
�|jddtd�d
�|jddtd�d
�|jddtd�d�|jdtd�d�|jddd�dS)N�
subcommand��help�enable�disable�remove�list�search)�nargs�choicesz--installed�
store_truez.List all installed Copr repositories (default))�actionr&z	--enabledzList enabled Copr repositoriesz
--disabledzList disabled Copr repositoriesz--available-by-userrz-List available Copr repositories by user NAME)�metavarr&z--hubz(Specify an instance of Copr to work with)r&�arg�*)r,)�add_argumentZadd_mutually_exclusive_groupr)�parserZlist_optionrrr�
set_argparserpszCoprCommand.set_argparsercCs�|jjjjdkrdSd}t�}g}|jjjd}tjj	|t
d�}tjj|�r�|j|�|j
|�|jdd�r�|jdd�r�|jdd�}|jdd�}||g|_n
ddg|_xHtjtjj	|t
d��D],}|jd�r�tjj	|t
d|�}	|j|	�q�Wg}
t|jj��r|jjdjd	�}
t|
�d
k�rV|jj�rVtjtd�td��tjjtd
���nL|jj�r�t|
�d
k�r�|j|_|j|_n t|
�d
k�r�|
d}n|jj}|�rH|�rHd|_|j
t |dd��|j!||dd�}|�rH|j!||d|j"�}|j!||d|j#�}
||_|d||_t$|
�|j#k�rH|jd|
7_|jd|
7_|j�s�d|k�rr||_|j"d||_n|jdd�d|_||_dS)Nrrz.conf�main�distribution�
releaseverFz.d�/�zError: z^specify Copr hub either with `--hub` or using `copr_hub/copr_username/copr_projectname` formatzmultiple hubs specifiedT)�reverse�hostname�protocol�portz://�:r%)%�cliZcommand�optsr�base�confZpluginconfpath�os�path�join�PLUGIN_CONF�isfile�append�readZ
has_option�get�
chroot_config�listdir�endswith�lenr1r�hubr�criticalr�dnf�CliError�default_hostname�
copr_hostname�default_url�copr_url�sorted�_read_config_item�default_protocol�default_port�int)�selfZcopr_hubZcopr_plugin_configZconfig_filesZconfig_pathZdefault_config_filer7r8�filenameZconfig_file�projectr<r=r>rrr�	configure�sl








zCoprCommand.configurecCs*y|j||�Sttfk
r$|SXdS)N)rKrr)r]�configrPZsection�defaultrrrrY�szCoprCommand._read_config_itemcCstjjdj|j���dS)Nz{0}
)�sys�stderr�write�formatr)r]�textrrr�_user_warning_before_prompt�sz'CoprCommand._user_warning_before_promptc
Cs�|jjd}|dkr&|jjj|�dS|dkrl|jjrH|j|jj�dS|j|jj	j
d|jj|jj�dSy|jj
d}WnLttfk
r�tjtd�td��|jjj|�tjjtd���YnXy\|jj
d}t|jj
�dkr�tjjtd���|jd	�|_t|j�d
k�r$tjjtd���Wn*tk
�rP|j�}|jd	�|_YnX|dk�rj|j|�dS|jd
�}t|�dk�r�tjtd�td��tjjtd���n<t|�dk�r�|d}|d}n|d}|d}|d
|}dj|jj	j|j|j|�|�}|dk�rn|j �td�}d
j!|j||g�}dj|�}	|j"||	�|j#||�tj$td��|j%||�nr|dk�r�|j �|j&||�tj$td��nD|dk�r�|j �|j'||�tj$td��ntjjtd�j|���dS)Nrr&r*zError: z>exactly two additional parameters to copr command are requiredr%�zToo many arguments.�-r:zOBad format of optional chroot. The format is distribution-version-architecture.r+r9zEuse format `copr_username/copr_projectname` to reference copr projectzbad copr project formatz{0}/_copr:{1}:{2}:{3}.repor'a
Enabling a Copr repository. Please note that this repository is not part
of the main distribution, and quality may vary.

The Fedora Project does not exercise any power over the contents of
this repository beyond the rules outlined in the Copr FAQ at
<https://docs.pagure.org/copr.copr/user_documentation.html#what-i-can-build-in-copr>,
and packages are not held to any quality or security level.

Please do not file bug reports about these packages in Fedora
Bugzilla. In case of problems, contact the owner of this repository.
z!Do you really want to enable {0}?z Repository successfully enabled.r(z!Repository successfully disabled.r)z Repository successfully removed.zUnknown subcommand {}.)rir:)(rAr$r@Z	optparserZ
print_helpZavailable_by_user�_list_user_projects�_list_installed_repositoriesrBrCZreposdir�enabledZdisabledr1r�
IndexErrorrrQrrRrSrO�
exceptions�Errorr�chroot_parts�
_guess_chroot�_searchrf�get_reposdirrU�_sanitize_username�
_need_rootrF�	_ask_user�_download_repo�info�_runtime_deps_warning�
_disable_repo�_remove_repo)
r]r$�project_name�chrootr_�
copr_username�copr_projectname�
repo_filenamery�msgrrr�run�s�








zCoprCommand.runcCs�|jjd�d}tjd|j|�}|j|jko8tjd|�}tjd|�}|jjr`|rh|rhdSn|shdStjd|�rxdStjd|�r�dS|j	}	|	r�|s�|	r�|r�dSd}
tjd	|�r�|jd
d�\}}}
}|d|
d|}n�tjd|��r2|jdd�}|j
d
d
�djd
d�d}|d|dd|d}n.|jdd�}|jd|dd|d}d}
|	�sn|d7}|
�r||d7}t|�|
S)Nr9r%z_copr:Z_copr_z_copr:|^_copr_zcopr:.*:.*:.*:mlz
coprdep:.*Fzcopr:r?r:rjrirTz (disabled)z *���)
�repofiler�re�matchrUrWrVrArPrm�rsplitrT�print)r]�repo_id�repo�enabled_only�
disabled_only�	file_nameZ	match_newZ	match_oldZ	match_anyrm�old_reporrUZ
copr_ownerZcopr_dirr�Z	copr_namerrr�_list_repo_file8sBzCoprCommand._list_repo_filecCsFd}x,|jjj�D]\}}|j||||�rd}qW|rBttd��dS)NFTz�* These coprs have repo file with an old format that contains no information about Copr hub - the default one was assumed. Re-enable the project to fix this.)rB�repos�itemsr�r�r)r]Z	directoryr�r�r�r�r�rrrrlisz(CoprCommand._list_installed_repositoriesc

Cs�dj|�}|j|}|jj|dd�}ytj|j��}Wn*tk
r`tj	j
td�j|���YnX|j|�td�j|�}|j
|�xL|dD]@}dj||d�}|d	p�td
�}	|jjjt|�|	�}t|�q�WdS)Nz!/api_3/project/list?ownername={0}zw+)�modez+Can't parse repositories for username '{}'.zList of {} coprsr�z
{0}/{1} : r�descriptionzNo description given)rfrWrBr �json�loadsrJrrRrorpr�_check_json_output�_print_match_section�output�
fmtKeyValFillrr�)
r]Z	user_name�api_path�url�res�
json_parse�section_text�itemr��descrrrrkss"



zCoprCommand._list_user_projectsc
Cs�dj|�}|j|}|jj|dd�}ytj|j��}Wn*tk
r`tj	j
td�j|���YnX|j|�td�j|�}|j
|�xJ|dD]>}dj|d�}|d	p�td
�}	|jjjt|�|	�}t|�q�WdS)Nz/api_3/project/search?query={}zw+)r�zCan't parse search for '{}'.zMatched: {}r�z{0} : Z	full_namer�zNo description given.)rfrWrBr r�r�rJrrRrorprr�r�r�r�rr�)
r]Zqueryr�r�r�r�r�r�r�r�rrrrs�s 



zCoprCommand._searchcCs|jjj|�}t|�dS)N)rBr�Z
fmtSectionr�)r]rgZ	formattedrrrr��sz CoprCommand._print_match_sectioncCsj|jstjjd�d|_tjjdj|j���|jj�rf|jjj	sb|jj
jdj|�dj|�d�rfdSdS)N�
Fz{0}
z
{} [y/N]: z
{} [Y/n]: )r�Zdefaultyes_msgT)�
first_warningrcrdrerfrrBZ
_promptWantedrCZassumenor�Zuserconfirm)r]ryr�rrr�_ask_user_no_raise�s
zCoprCommand._ask_user_no_raisecCs |j||�stjjtd���dS)NzSafe and good answer. Exiting.)r�rRrorpr)r]ryr�rrrrw�szCoprCommand._ask_usercCs tj�dkrtjjtd���dS)Nrz/This command has to be run under the root user.)rD�geteuidrRrorpr)�clsrrrrv�szCoprCommand._need_rootcs|j��dks&�ddks&�ddkr,t��|jjjd}t�fdd�dD��r�d
�krbd|}n&dtd
�krxd|}ndj�d|�}n�d�kr�tj	d�}d�kr�dj|�}ndj�d|�}nPd�kr�tj	d�}d�kr�dj|�}ndj�d|�}nd�dj
dd�d}|S)z2 Guess which chroot is equivalent to this machine NrFr%Zbasearchcsg|]}|�k�qSrr)�.0r)�distrr�
<listcomp>�sz-CoprCommand._guess_chroot.<locals>.<listcomp>�Fedora�Fedora LinuxZRawhidezfedora-rawhide-ZrawhideZredhat_support_product_versionzfedora-{0}-{1}ZMageiaz%{distro_arch}ZCauldronzmageia-cauldron-{}zmageia-{0}-{1}ZopenSUSEz%{_target_cpu}Z
Tumbleweedzopensuse-tumbleweed-{}zopensuse-leap-{0}-{1}zepel-%s-x86_64�.)r�r�)rLr
rBrC�
substitutions�anyr
rf�rpmZexpandMacror)r]Zdistarchr~r)r�rrr�s, 



zCoprCommand._guess_chrootcCs�dj|jdd��}|jd}dj|||�}y*t|j|�}tjj|�rRtj|�W�n^t	k
�rl}z�|j
dkr�td�j|j||j
t|��}t
jj|��td�}|jjd�}	|	�r>tj|	�jd�}
tj|
�}
|td	�jdj|j�|�7}|
jd
��r0|td�djd
d�|
d
D��7}|td�j|�7}t
jj|��n|td�j|�7}t
jj|��WYdd}~XnJtk
�r�}z,td�j|j||jj�}t
jj|��WYdd}~XnX|j�}|jd�}tjd|��r�tjj|jjjd|dd�d�}|j|j k�rR|j!ddd�j!|j"d�j!ddd�j!dd�j!dd�}
tjj|
��rRtj|
�t#|d��.}|j$|�x|j%�D]}|j$|��qrWWdQRXtj&|t'j(t'j)Bt'j*Bt'j+B�dS) Nrjr%z%/coprs/{0}/repo/{1}/dnf.repo?arch={2}i�z Request to {0} failed: {1} - {2}z+It wasn't possible to enable this project.
zCopr-Error-Datazutf-8z1Repository '{0}' does not exist in project '{1}'.zavailable chrootsz
Available repositories: z, css|]}dj|�VqdS)z'{}'N)rf)r��xrrr�	<genexpr>�sz-CoprCommand._download_repo.<locals>.<genexpr>z�

If you want to enable a non-default repository, use the following command:
  'dnf copr enable {0} <repository>'
But note that the installed repo file will likely need a manual modification.zProject {0} does not exist.zFailed to connect to {0}: {1}z\[copr:rriz.repoz_copr:�_coprrr?Zgroup_�@�wbr�r����),rFrqrfr rWrDrE�existsr)r!�coder�strrRrorpZheadersrK�base64Z	b64decode�decoder�r�r"�reason�strerror�readliner�r�rBrCrtrV�replacerUrre�	readlines�chmod�stat�S_IRUSR�S_IWUSR�S_IRGRP�S_IROTH)r]r}r�Zshort_chrootZarchr�Zresponse�eZ	error_msgZ
error_dataZerror_data_decodedZ
first_linerZold_repo_filename�frrrrx�sX





$

zCoprCommand._download_repocs�|jjdd�|jj�|j|j|�|��g}x(�jj�D]}|jd�rJq:|j|�q:W|s`dSt	d�}t
jd��|jdj
��fdd	�|D���}|j|t	d
��s�x,|D]$}|jjj�j||jjjddi�q�WdS)
a,
        In addition to the main copr repo (that has repo ID prefixed with
        `copr:`), the repofile might contain additional repositories that
        serve as runtime dependencies. This method informs the user about
        the additional repos and provides an option to disable them.
        T)r�zcopr:Na�Maintainer of the enabled Copr repository decided to make
it dependent on other repositories. Such repositories are
usually necessary for successful installation of RPMs from
the main Copr repository (they provide runtime dependencies).

Be aware that the note about quality and bug-reporting
above applies here too, Fedora Project doesn't control the
content. Please review the list:

{0}

These repositories have been enabled automatically.r%z

cs*g|]"}djt��|�jj|d�d��qS)z){num:2}. [{repoid}]
    baseurl={baseurl}�baseurl)Znum�repoidr�)rf�next�cfgZgetValue)r�r�)�counterr�rrr�9sz5CoprCommand._runtime_deps_warning.<locals>.<listcomp>z!Do you want to keep them enabled?rm�0)rB�resetZread_all_repos�_get_copr_reporur��sections�
startswithrIr�	itertools�countrfrFr�rC�write_raw_configfiler�r�)r]rr�Zruntime_depsr�ryZdepr)r�r�rrzs*



z!CoprCommand._runtime_deps_warningcCs�dj|jjdd�d|j|�|�}||jjkr�dj|j|�|�}}||jjkr�d|jj|jkr�|jj|jjd�d	}y.|jdd�djdd�d}||jkr�dSWq�tk
r�Yq�XndS|jj|S)
Nzcopr:{0}:{1}:{2}r?r%rz{0}-{1}r�r9rir�)	rfrUr�rurBr�r�rrn)r]rr�r�r�rUrrrr�Fs 

zCoprCommand._get_copr_repocCst|j||�}|s,tjjtdj|j||����ytj|j	�Wn2t
k
rn}ztjjt|���WYdd}~XnXdS)Nz&Failed to remove copr repo {0}/{1}/{2})r�rRrorprrfrUrDr)r��OSErrorr�)r]rr�r�r�rrrr|\szCoprCommand._remove_repocCsd|j||�}|dkr,tjjtdj||����x2|jj�D]$}|jj	j
|j||jj	jddi�q8WdS)Nz!Failed to disable copr repo {}/{}rmr�)
r�rRrorprrfr�r�rBrCr�r�r�)r]rr�r�r�rrrr{hszCoprCommand._disable_repocCs<ytj|j��}Wn$tk
r6tjjtd��dSX|S)z� Wrapper around response from server

        check data and print nice error in case of some error (and return None)
        otherwise return json object.
        zUnknown response from server.N)r�r�rJrrRr@rSr)r�r�r�rrr�	_get_datatszCoprCommand._get_datacCs"d|krtjjdj|d���dS)N�errorz{})rRrorprf)r�Zjson_objrrrr��szCoprCommand._check_json_outputcCs&|ddkrdj|dd��S|SdS)Nrr�zgroup_{}r%)rf)r�rrrrru�szCoprCommand._sanitize_username)r)'�__name__�
__module__�__qualname__�__doc__rLrTZdefault_hubrZr[rV�aliasesr�summaryr��usage�staticmethodr5r`rYrhr�r�rlrkrsr�r�rw�classmethodrvrrrxrzr�r|r{r�r�rurrrrr#PsDL_1
%82r#c@sDeZdZdZdZed�ZdZdd�Zdd�Z	e
d	d
��Zdd�Zd
S)�PlaygroundCommandz Playground plugin for DNF �
playgroundz$Interact with Playground repository.z [enable|disable|upgrade]c	Cs0|j�|jtd�td��dj|j�}|jj|dd�}|j|�}|j�|ddkrft	j
jtd���x�|d	D]�}d
j|d|d�}d
j|jjj
|jdd��}yj||dkr�wpdj|j||�}|jj|dd�}|j|�}|j�|o�d|ko�|ddk�r
|j||�Wqpt	jjk
�r&YqpXqpWdS)Nz!Enabling a Playground repository.zDo you want to continue?z{0}/api/playground/list/zw+)r�r��okzUnknown response from server.r�z{0}/{1}ZusernameZcoprnamez{}/_playground_{}.repor9rjZchrootsz{0}/api/coprs/{1}/detail/{2}/)rvrwrrfrWrBr r��closerRr@rSrCrtr�rxrorp)	r]r~Zapi_urlr�r�r�r}r�Zoutput2rrr�_cmd_enable�s8




zPlaygroundCommand._cmd_enablecCs6|j�x(tjdj|jjj��D]}|j|�q WdS)Nz{}/_playground_*.repo)rv�globrfrBrCrtr|)r]r�rrr�_cmd_disable�szPlaygroundCommand._cmd_disablecCs|jdddddgd�dS)Nr$r%r'r(�upgrade)r,r-)r3)r4rrrr5�szPlaygroundCommand.set_argparsercCs�tjjd��|jjd}|j�}|dkrB|j|�tjt	d��n`|dkrb|j
�tjt	d��n@|dkr�|j
�|j|�tjt	d��ntjjt	d	�j|���dS)
Nz%Playground is temporarily unsupportedrr'z-Playground repositories successfully enabled.r(z.Playground repositories successfully disabled.r�z-Playground repositories successfully updated.zUnknown subcommand {}.)rRrorprAr$rrr�rryrr�rf)r]r$r~rrrr��s

zPlaygroundCommand.runN)r�)
r�r�r�r�r�rr�r�r�r�r�r5r�rrrrr��s r�)/Z
__future__rr�r�r�rDr�Zshutilr�rcr�ZdnfpluginscorerrrRZ
dnf.pycomprZdnf.i18nrr�Zdistrorrr	r
r
�ImportError�platformrG�setZYESZNOZconfigparserrrrZurllib.requestr r!r"Zurllib2ZpluginZregister_commandr@ZCommandr#r�rrrr�<module>sP
B__pycache__/copr.cpython-36.pyc000064400000050311150402642250012345 0ustar003

�fZv�@s�ddlmZddlZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlmZm
Z
ddlZddlmZddlmZddlZy$ddlmZmZmZmZdd�ZWnLek
�rd	d
�ZyddlmZWnek
r�dd�ZYnXYnXd
Zeed�ed�g�Zeed�ed�dg�Ze�rdddl m!Z!m"Z"m#Z#ddl$m%Z%m&Z&m'Z'n(ddl!m!Z!m"Z"m#Z#ddl(m%Z%m&Z&m'Z'ej)j*Gdd�dej+j,��Z-ej)j*Gdd�de-��Z.dS)�)�print_functionN)�_�logger)�PY3)�ucd)�name�version�codename�os_release_attrcCst�t�t�fS)N)rrr	�rr�/usr/lib/python3.6/copr.py�linux_distribution.sr
cCsdS)N�r)rrrrr
1sr
)r
cCsrtd��`}i}xF|D]>}y$|j�jd�\}}|jd�||<Wqtk
rPYqXqW|d|ddfSQRXdS)Nz/etc/os-release�=�"�NAMEZ
VERSION_ID)�open�rstrip�split�strip�
ValueError)Zos_release_fileZos_release_data�lineZos_release_keyZos_release_valuerrrr
7s


�copr�yes�y�no�nr)�ConfigParser�
NoOptionError�NoSectionError)�urlopen�	HTTPError�URLErrorc@seZdZdZdZdZdZdZdZedeZ	d8Z
ed	�Zd
Z
ed�Zedd
��Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zd d!�Zd"d#�Zed$d%��Zd&d'�Zd(d)�Zd*d+�Z d,d-�Z!d.d/�Z"d0d1�Z#ed2d3��Z$ed4d5��Z%ed6d7��Z&dS)9�CoprCommandz Copr plugin for DNF Nzcopr.fedorainfracloud.orgZfedoraZhttpsi�z://rz Interact with Copr repositories.Ta�
  enable name/project [chroot]
  disable name/project
  remove name/project
  list --installed/enabled/disabled
  list --available-by-user=NAME
  search project

  Examples:
  copr enable rhscl/perl516 epel-6-x86_64
  copr enable ignatenkobrain/ocltoys
  copr disable rhscl/perl516
  copr remove rhscl/perl516
  copr list --enabled
  copr list --available-by-user=ignatenkobrain
  copr search tests
    c	Cs�|jddddddddgd	�|j�}|jd
dtd�d
�|jddtd�d
�|jddtd�d
�|jddtd�d�|jdtd�d�|jddd�dS)N�
subcommand��help�enable�disable�remove�list�search)�nargs�choicesz--installed�
store_truez.List all installed Copr repositories (default))�actionr&z	--enabledzList enabled Copr repositoriesz
--disabledzList disabled Copr repositoriesz--available-by-userrz-List available Copr repositories by user NAME)�metavarr&z--hubz(Specify an instance of Copr to work with)r&�arg�*)r,)�add_argumentZadd_mutually_exclusive_groupr)�parserZlist_optionrrr�
set_argparserpszCoprCommand.set_argparsercCs�|jjjjdkrdSd}t�}g}|jjjd}tjj	|t
d�}tjj|�r�|j|�|j
|�|jdd�r�|jdd�r�|jdd�}|jdd�}||g|_n
ddg|_xHtjtjj	|t
d��D],}|jd�r�tjj	|t
d|�}	|j|	�q�Wg}
t|jj��r|jjdjd	�}
t|
�d
k�rV|jj�rVtjtd�td��tjjtd
���nL|jj�r�t|
�d
k�r�|j|_|j|_n t|
�d
k�r�|
d}n|jj}|�rH|�rHd|_|j
t |dd��|j!||dd�}|�rH|j!||d|j"�}|j!||d|j#�}
||_|d||_t$|
�|j#k�rH|jd|
7_|jd|
7_|j�s�d|k�rr||_|j"d||_n|jdd�d|_||_dS)Nrrz.conf�main�distribution�
releaseverFz.d�/�zError: z^specify Copr hub either with `--hub` or using `copr_hub/copr_username/copr_projectname` formatzmultiple hubs specifiedT)�reverse�hostname�protocol�portz://�:r%)%�cliZcommand�optsr�base�confZpluginconfpath�os�path�join�PLUGIN_CONF�isfile�append�readZ
has_option�get�
chroot_config�listdir�endswith�lenr1r�hubr�criticalr�dnf�CliError�default_hostname�
copr_hostname�default_url�copr_url�sorted�_read_config_item�default_protocol�default_port�int)�selfZcopr_hubZcopr_plugin_configZconfig_filesZconfig_pathZdefault_config_filer7r8�filenameZconfig_file�projectr<r=r>rrr�	configure�sl








zCoprCommand.configurecCs*y|j||�Sttfk
r$|SXdS)N)rKrr)r]�configrPZsection�defaultrrrrY�szCoprCommand._read_config_itemcCstjjdj|j���dS)Nz{0}
)�sys�stderr�write�formatr)r]�textrrr�_user_warning_before_prompt�sz'CoprCommand._user_warning_before_promptc
Cs�|jjd}|dkr&|jjj|�dS|dkrl|jjrH|j|jj�dS|j|jj	j
d|jj|jj�dSy|jj
d}WnLttfk
r�tjtd�td��|jjj|�tjjtd���YnXy\|jj
d}t|jj
�dkr�tjjtd���|jd	�|_t|j�d
k�r$tjjtd���Wn*tk
�rP|j�}|jd	�|_YnX|dk�rj|j|�dS|jd
�}t|�dk�r�tjtd�td��tjjtd���n<t|�dk�r�|d}|d}n|d}|d}|d
|}dj|jj	j|j|j|�|�}|dk�rn|j �td�}d
j!|j||g�}dj|�}	|j"||	�|j#||�tj$td��|j%||�nr|dk�r�|j �|j&||�tj$td��nD|dk�r�|j �|j'||�tj$td��ntjjtd�j|���dS)Nrr&r*zError: z>exactly two additional parameters to copr command are requiredr%�zToo many arguments.�-r:zOBad format of optional chroot. The format is distribution-version-architecture.r+r9zEuse format `copr_username/copr_projectname` to reference copr projectzbad copr project formatz{0}/_copr:{1}:{2}:{3}.repor'a
Enabling a Copr repository. Please note that this repository is not part
of the main distribution, and quality may vary.

The Fedora Project does not exercise any power over the contents of
this repository beyond the rules outlined in the Copr FAQ at
<https://docs.pagure.org/copr.copr/user_documentation.html#what-i-can-build-in-copr>,
and packages are not held to any quality or security level.

Please do not file bug reports about these packages in Fedora
Bugzilla. In case of problems, contact the owner of this repository.
z!Do you really want to enable {0}?z Repository successfully enabled.r(z!Repository successfully disabled.r)z Repository successfully removed.zUnknown subcommand {}.)rir:)(rAr$r@Z	optparserZ
print_helpZavailable_by_user�_list_user_projects�_list_installed_repositoriesrBrCZreposdir�enabledZdisabledr1r�
IndexErrorrrQrrRrSrO�
exceptions�Errorr�chroot_parts�
_guess_chroot�_searchrf�get_reposdirrU�_sanitize_username�
_need_rootrF�	_ask_user�_download_repo�info�_runtime_deps_warning�
_disable_repo�_remove_repo)
r]r$�project_name�chrootr_�
copr_username�copr_projectname�
repo_filenamery�msgrrr�run�s�








zCoprCommand.runcCs�|jjd�d}tjd|j|�}|j|jko8tjd|�}tjd|�}|jjr`|rh|rhdSn|shdStjd|�rxdStjd|�r�dS|j	}	|	r�|s�|	r�|r�dSd}
tjd	|�r�|jd
d�\}}}
}|d|
d|}n�tjd|��r2|jdd�}|j
d
d
�djd
d�d}|d|dd|d}n.|jdd�}|jd|dd|d}d}
|	�sn|d7}|
�r||d7}t|�|
S)Nr9r%z_copr:Z_copr_z_copr:|^_copr_zcopr:.*:.*:.*:mlz
coprdep:.*Fzcopr:r?r:rjrirTz (disabled)z *���)
�repofiler�re�matchrUrWrVrArPrm�rsplitrT�print)r]�repo_id�repo�enabled_only�
disabled_only�	file_nameZ	match_newZ	match_oldZ	match_anyrm�old_reporrUZ
copr_ownerZcopr_dirr�Z	copr_namerrr�_list_repo_file8sBzCoprCommand._list_repo_filecCsFd}x,|jjj�D]\}}|j||||�rd}qW|rBttd��dS)NFTz�* These coprs have repo file with an old format that contains no information about Copr hub - the default one was assumed. Re-enable the project to fix this.)rB�repos�itemsr�r�r)r]Z	directoryr�r�r�r�r�rrrrlisz(CoprCommand._list_installed_repositoriesc

Cs�dj|�}|j|}|jj|dd�}ytj|j��}Wn*tk
r`tj	j
td�j|���YnX|j|�td�j|�}|j
|�xL|dD]@}dj||d�}|d	p�td
�}	|jjjt|�|	�}t|�q�WdS)Nz!/api_3/project/list?ownername={0}zw+)�modez+Can't parse repositories for username '{}'.zList of {} coprsr�z
{0}/{1} : r�descriptionzNo description given)rfrWrBr �json�loadsrJrrRrorpr�_check_json_output�_print_match_section�output�
fmtKeyValFillrr�)
r]Z	user_name�api_path�url�res�
json_parse�section_text�itemr��descrrrrkss"



zCoprCommand._list_user_projectsc
Cs�dj|�}|j|}|jj|dd�}ytj|j��}Wn*tk
r`tj	j
td�j|���YnX|j|�td�j|�}|j
|�xJ|dD]>}dj|d�}|d	p�td
�}	|jjjt|�|	�}t|�q�WdS)Nz/api_3/project/search?query={}zw+)r�zCan't parse search for '{}'.zMatched: {}r�z{0} : Z	full_namer�zNo description given.)rfrWrBr r�r�rJrrRrorprr�r�r�r�rr�)
r]Zqueryr�r�r�r�r�r�r�r�rrrrs�s 



zCoprCommand._searchcCs|jjj|�}t|�dS)N)rBr�Z
fmtSectionr�)r]rgZ	formattedrrrr��sz CoprCommand._print_match_sectioncCsj|jstjjd�d|_tjjdj|j���|jj�rf|jjj	sb|jj
jdj|�dj|�d�rfdSdS)N�
Fz{0}
z
{} [y/N]: z
{} [Y/n]: )r�Zdefaultyes_msgT)�
first_warningrcrdrerfrrBZ
_promptWantedrCZassumenor�Zuserconfirm)r]ryr�rrr�_ask_user_no_raise�s
zCoprCommand._ask_user_no_raisecCs |j||�stjjtd���dS)NzSafe and good answer. Exiting.)r�rRrorpr)r]ryr�rrrrw�szCoprCommand._ask_usercCs tj�dkrtjjtd���dS)Nrz/This command has to be run under the root user.)rD�geteuidrRrorpr)�clsrrrrv�szCoprCommand._need_rootcs|j��dks&�ddks&�ddkr,t��|jjjd}t�fdd�dD��r�d
�krbd|}n&dtd
�krxd|}ndj�d|�}n�d�kr�tj	d�}d�kr�dj|�}ndj�d|�}nPd�kr�tj	d�}d�kr�dj|�}ndj�d|�}nd�dj
dd�d}|S)z2 Guess which chroot is equivalent to this machine NrFr%Zbasearchcsg|]}|�k�qSrr)�.0r)�distrr�
<listcomp>�sz-CoprCommand._guess_chroot.<locals>.<listcomp>�Fedora�Fedora LinuxZRawhidezfedora-rawhide-ZrawhideZredhat_support_product_versionzfedora-{0}-{1}ZMageiaz%{distro_arch}ZCauldronzmageia-cauldron-{}zmageia-{0}-{1}ZopenSUSEz%{_target_cpu}Z
Tumbleweedzopensuse-tumbleweed-{}zopensuse-leap-{0}-{1}zepel-%s-x86_64�.)r�r�)rLr
rBrC�
substitutions�anyr
rf�rpmZexpandMacror)r]Zdistarchr~r)r�rrr�s, 



zCoprCommand._guess_chrootcCs�dj|jdd��}|jd}dj|||�}y*t|j|�}tjj|�rRtj|�W�n^t	k
�rl}z�|j
dkr�td�j|j||j
t|��}t
jj|��td�}|jjd�}	|	�r>tj|	�jd�}
tj|
�}
|td	�jdj|j�|�7}|
jd
��r0|td�djd
d�|
d
D��7}|td�j|�7}t
jj|��n|td�j|�7}t
jj|��WYdd}~XnJtk
�r�}z,td�j|j||jj�}t
jj|��WYdd}~XnX|j�}|jd�}tjd|��r�tjj|jjjd|dd�d�}|j|j k�rR|j!ddd�j!|j"d�j!ddd�j!dd�j!dd�}
tjj|
��rRtj|
�t#|d��.}|j$|�x|j%�D]}|j$|��qrWWdQRXtj&|t'j(t'j)Bt'j*Bt'j+B�dS) Nrjr%z%/coprs/{0}/repo/{1}/dnf.repo?arch={2}i�z Request to {0} failed: {1} - {2}z+It wasn't possible to enable this project.
zCopr-Error-Datazutf-8z1Repository '{0}' does not exist in project '{1}'.zavailable chrootsz
Available repositories: z, css|]}dj|�VqdS)z'{}'N)rf)r��xrrr�	<genexpr>�sz-CoprCommand._download_repo.<locals>.<genexpr>z�

If you want to enable a non-default repository, use the following command:
  'dnf copr enable {0} <repository>'
But note that the installed repo file will likely need a manual modification.zProject {0} does not exist.zFailed to connect to {0}: {1}z\[copr:rriz.repoz_copr:�_coprrr?Zgroup_�@�wbr�r����),rFrqrfr rWrDrE�existsr)r!�coder�strrRrorpZheadersrK�base64Z	b64decode�decoder�r�r"�reason�strerror�readliner�r�rBrCrtrV�replacerUrre�	readlines�chmod�stat�S_IRUSR�S_IWUSR�S_IRGRP�S_IROTH)r]r}r�Zshort_chrootZarchr�Zresponse�eZ	error_msgZ
error_dataZerror_data_decodedZ
first_linerZold_repo_filename�frrrrx�sX





$

zCoprCommand._download_repocs�|jjdd�|jj�|j|j|�|��g}x(�jj�D]}|jd�rJq:|j|�q:W|s`dSt	d�}t
jd��|jdj
��fdd	�|D���}|j|t	d
��s�x,|D]$}|jjj�j||jjjddi�q�WdS)
a,
        In addition to the main copr repo (that has repo ID prefixed with
        `copr:`), the repofile might contain additional repositories that
        serve as runtime dependencies. This method informs the user about
        the additional repos and provides an option to disable them.
        T)r�zcopr:Na�Maintainer of the enabled Copr repository decided to make
it dependent on other repositories. Such repositories are
usually necessary for successful installation of RPMs from
the main Copr repository (they provide runtime dependencies).

Be aware that the note about quality and bug-reporting
above applies here too, Fedora Project doesn't control the
content. Please review the list:

{0}

These repositories have been enabled automatically.r%z

cs*g|]"}djt��|�jj|d�d��qS)z){num:2}. [{repoid}]
    baseurl={baseurl}�baseurl)Znum�repoidr�)rf�next�cfgZgetValue)r�r�)�counterr�rrr�9sz5CoprCommand._runtime_deps_warning.<locals>.<listcomp>z!Do you want to keep them enabled?rm�0)rB�resetZread_all_repos�_get_copr_reporur��sections�
startswithrIr�	itertools�countrfrFr�rC�write_raw_configfiler�r�)r]rr�Zruntime_depsr�ryZdepr)r�r�rrzs*



z!CoprCommand._runtime_deps_warningcCs�dj|jjdd�d|j|�|�}||jjkr�dj|j|�|�}}||jjkr�d|jj|jkr�|jj|jjd�d	}y.|jdd�djdd�d}||jkr�dSWq�tk
r�Yq�XndS|jj|S)
Nzcopr:{0}:{1}:{2}r?r%rz{0}-{1}r�r9rir�)	rfrUr�rurBr�r�rrn)r]rr�r�r�rUrrrr�Fs 

zCoprCommand._get_copr_repocCst|j||�}|s,tjjtdj|j||����ytj|j	�Wn2t
k
rn}ztjjt|���WYdd}~XnXdS)Nz&Failed to remove copr repo {0}/{1}/{2})r�rRrorprrfrUrDr)r��OSErrorr�)r]rr�r�r�rrrr|\szCoprCommand._remove_repocCsd|j||�}|dkr,tjjtdj||����x2|jj�D]$}|jj	j
|j||jj	jddi�q8WdS)Nz!Failed to disable copr repo {}/{}rmr�)
r�rRrorprrfr�r�rBrCr�r�r�)r]rr�r�r�rrrr{hszCoprCommand._disable_repocCs<ytj|j��}Wn$tk
r6tjjtd��dSX|S)z� Wrapper around response from server

        check data and print nice error in case of some error (and return None)
        otherwise return json object.
        zUnknown response from server.N)r�r�rJrrRr@rSr)r�r�r�rrr�	_get_datatszCoprCommand._get_datacCs"d|krtjjdj|d���dS)N�errorz{})rRrorprf)r�Zjson_objrrrr��szCoprCommand._check_json_outputcCs&|ddkrdj|dd��S|SdS)Nrr�zgroup_{}r%)rf)r�rrrrru�szCoprCommand._sanitize_username)r)'�__name__�
__module__�__qualname__�__doc__rLrTZdefault_hubrZr[rV�aliasesr�summaryr��usage�staticmethodr5r`rYrhr�r�rlrkrsr�r�rw�classmethodrvrrrxrzr�r|r{r�r�rurrrrr#PsDL_1
%82r#c@sDeZdZdZdZed�ZdZdd�Zdd�Z	e
d	d
��Zdd�Zd
S)�PlaygroundCommandz Playground plugin for DNF �
playgroundz$Interact with Playground repository.z [enable|disable|upgrade]c	Cs0|j�|jtd�td��dj|j�}|jj|dd�}|j|�}|j�|ddkrft	j
jtd���x�|d	D]�}d
j|d|d�}d
j|jjj
|jdd��}yj||dkr�wpdj|j||�}|jj|dd�}|j|�}|j�|o�d|ko�|ddk�r
|j||�Wqpt	jjk
�r&YqpXqpWdS)Nz!Enabling a Playground repository.zDo you want to continue?z{0}/api/playground/list/zw+)r�r��okzUnknown response from server.r�z{0}/{1}ZusernameZcoprnamez{}/_playground_{}.repor9rjZchrootsz{0}/api/coprs/{1}/detail/{2}/)rvrwrrfrWrBr r��closerRr@rSrCrtr�rxrorp)	r]r~Zapi_urlr�r�r�r}r�Zoutput2rrr�_cmd_enable�s8




zPlaygroundCommand._cmd_enablecCs6|j�x(tjdj|jjj��D]}|j|�q WdS)Nz{}/_playground_*.repo)rv�globrfrBrCrtr|)r]r�rrr�_cmd_disable�szPlaygroundCommand._cmd_disablecCs|jdddddgd�dS)Nr$r%r'r(�upgrade)r,r-)r3)r4rrrr5�szPlaygroundCommand.set_argparsercCs�tjjd��|jjd}|j�}|dkrB|j|�tjt	d��n`|dkrb|j
�tjt	d��n@|dkr�|j
�|j|�tjt	d��ntjjt	d	�j|���dS)
Nz%Playground is temporarily unsupportedrr'z-Playground repositories successfully enabled.r(z.Playground repositories successfully disabled.r�z-Playground repositories successfully updated.zUnknown subcommand {}.)rRrorprAr$rrr�rryrr�rf)r]r$r~rrrr��s

zPlaygroundCommand.runN)r�)
r�r�r�r�r�rr�r�r�r�r�r5r�rrrrr��s r�)/Z
__future__rr�r�r�rDr�Zshutilr�rcr�ZdnfpluginscorerrrRZ
dnf.pycomprZdnf.i18nrr�Zdistrorrr	r
r
�ImportError�platformrG�setZYESZNOZconfigparserrrrZurllib.requestr r!r"Zurllib2ZpluginZregister_commandr@ZCommandr#r�rrrr�<module>sP
B__pycache__/repomanage.cpython-36.opt-1.pyc000064400000014573150402642250014471 0ustar003

�fJ)�@szddlmZddlmZddlmZmZddlZddlZddlZddl	Z	ddl
Z
Gdd�dej�ZGdd�dej
j�ZdS)	�)�absolute_import)�unicode_literals)�_�loggerNcs eZdZdZ�fdd�Z�ZS)�
RepoManage�
repomanagecs,tt|�j||�|dkrdS|jt�dS)N)�superr�__init__Zregister_command�RepoManageCommand)�self�base�cli)�	__class__�� /usr/lib/python3.6/repomanage.pyr	$szRepoManage.__init__)�__name__�
__module__�__qualname__�namer	�
__classcell__rr)rrr src@s\eZdZdZed�Zdd�Zdd�Zdd�Ze	d	d
��Z
e	dd��Zd
d�Ze	dd��Z
dS)r
rz"Manage a directory of rpm packagescCs,|jjr(|jjr(|jjtjtjd�dS)N)�stdout�stderr)�opts�verbose�quietr
Zredirect_logger�loggingZWARNING�INFO)rrrr�
pre_configure/szRepoManageCommand.pre_configurecCs0|jjr|jjr|jj�|jj}d|_dS)NT)rrrr
Zredirect_repo_progress�demandsZsack_activation)rrrrr�	configure3s
zRepoManageCommand.configurec"s@�jjr �jjr tjjtd����jjr@�jjr@tjjtd����jjr`�jjr`tjjtd����jjr|�jjr|d�j_i}i}i}t�}t	�jj
�}y�d}�jjj
|�jj�jjgd�}|jj��jj|�tjj�r>�jj��jjj�}xH|D]@}	|	j�|kr�|j|	j��|j|	j�i�j|	j�g�j|	�q�WWn�tjjk
�r�g}
�j�jjd�}
t |
�dk�r�tjjtd	����jj!ddd
��jj"ddd�y�jj#|
�jj$j%d
�Wn0t&k
�r�t'j(td�j)dj*|
���YnXYnX�jj+j,t-j.d�j/�}dd�|j0|j0|d�d�j/�D�}|j1�x�|D]~}
|
j2|
j3f}||k�rx|
||k�r�||j|
�n
|
g||<�j4|
�}||k�r�||j�j5|
��n�j5|
�g||<�q@Wg}t�}�jj�r�xh|j6�D]\\}}|||f}||d�}x6|D].}�j4|�}x||D]}|j|��q W�qW�q�Wxb|j7�D]V}t8|j6��}||d�}x4|D],}x$||D]}|jt|j����q|W�qnW�qJW�jj�r|xh|j6�D]\\}}|||f}|d|�}x6|D].}�j4|�}x||D]}|j|��q�W�q�W�q�Wxb|j7�D]V}t8|j6��}|d|�}x4|D],}x$||D]}|jt|j����qTW�qFW�q"W�jj�r�xh|j6�D]\\}}|||f}|d|�}x6|D].}�j4|�}x||D]}|j|��q�W�q�W�q�Wt�}xb|j7�D]V}t8|j6��}||d�}x4|D],}x$||D]}|jt|j����q2W�q$W�qWxx|j7�D]l}t8|j6��}|d|�}xJ|D]B}x:||D].}x&|j�D]} | |k�r�|j9| ��q�W�q�W�q�W�qdW�fdd�|j0|j0|d�d�j/�D�}!||!}|j1��jj:�r$t;dj*|��nx|D]}
t;|
��q*WdS)Nz%Pass either --old or --new, not both!z)Pass either --oldonly or --new, not both!z)Pass either --old or --oldonly, not both!TZrepomanage_repo)Zbaseurlz.rpmrzNo files to process)�sack�reposF)Zload_system_repoZload_available_repos)�progresszCould not open {}z, )�flagscSsg|]}|�qSrr)�.0�xrrr�
<listcomp>osz)RepoManageCommand.run.<locals>.<listcomp>)Znevra_strict)Zpkg__neqcsg|]}�j|��qSr)�_package_to_path)r$r%)rrrr&�s)Zpkg__eq� )<r�new�old�dnf�
exceptions�ErrorrZoldonly�set�intZkeeprr!Zadd_new_repoZconf�pathZ_repoZexpireZ_add_repo_to_sackZWITH_MODULESZ_setup_modular_excludesZ_moduleContainerZgetModulePackagesZ	getRepoID�updateZgetArtifacts�
setdefaultZ
getNameStreamZ
getVersionNum�appendZ	RepoError�_get_file_list�len�resetZ	fill_sackZadd_remote_rpms�outputr"�IOErrorrZwarning�format�joinr �query�hawkeyZIGNORE_MODULAR_EXCLUDESZ	available�filter�sortr�arch�_package_to_nevrar'�keys�values�sorted�addZspace�print)"rZverfileZpkgdictZmodule_dictZall_modular_artifactsZkeepnumZREPOMANAGE_REPOIDZ	repo_confZmodule_packagesZmodule_packageZrpm_listr;Zpackages�pkgZnaZnevraZoutputpackagesZkeepnum_latest_stream_artifacts�n�aZevrlistZnewevrs�packageZfpkgZstreams_by_versionZsorted_stream_versionsZnew_sorted_stream_versions�i�streamZoldevrsZold_sorted_stream_versionsZkeepnum_newer_stream_artifactsZartifactZmodular_packagesr)rr�run9s�



&"








$



$



$

"&

zRepoManageCommand.runc	Cs�|jdddtd�d�|jdddtd�d�|jd	d
dtd�d�|jdd
dtd�d�|jddddtd�dtd�|jddtd�d�dS)Nz-oz--old�
store_truezPrint the older packages)�action�helpz-Oz	--oldonlyz6Print the older packages. Exclude the newest packages.z-nz--newzPrint the newest packagesz-sz--spacez#Space separated output, not newlinez-kz--keepZstoreZKEEPz)Newest N packages to keep - defaults to 1�)rN�metavarrO�default�typer0zPath to directory)�add_argumentrr/)�parserrrr�
set_argparser�s




zRepoManageCommand.set_argparsercCs`g}xVtj|�D]H\}}}x<|D]4}tjj|�dj�t|�kr |jtjj||��q WqW|S)zJReturn all files in path matching ext

        return list object
        rP)�os�walkr0�splitext�lower�strr3r:)r0ZextZfilelist�root�dirs�files�frrrr4�s
z RepoManageCommand._get_file_listcCs*t|jj�r tjj|jj|j�S|jSdS)N)r5rr!rWr0r:r�location)rrFrrrr'�sz"RepoManageCommand._package_to_pathcCs|j|j|j|j|jfS)N)rZepoch�version�releaser?)rFrrrr@sz#RepoManageCommand._package_to_nevraN)r)rrr�aliasesrZsummaryrrrL�staticmethodrVr4r'r@rrrrr
+s$r
)Z
__future__rrZdnfpluginscorerrr+Zdnf.clirrWr<ZPluginrr
ZCommandr
rrrr�<module>s__pycache__/generate_completion_cache.cpython-36.opt-1.pyc000064400000006000150402642250017503 0ustar003

�gt`l�@s^ddlmZddlmZddlmZddlmZddlZddlZ	ddl
Z
Gdd�dej�ZdS)�)�absolute_import)�unicode_literals)�ucd)�loggerNcs<eZdZdZ�fdd�Zedd��Zdd�Zdd	�Z�Z	S)
�BashCompletionCacheZgenerate_completion_cachecs"tt|�j||�||_d|_dS)Nz/var/cache/dnf/packages.db)�superr�__init__�base�
cache_file)�selfr	Zcli)�	__class__��//usr/lib/python3.6/generate_completion_cache.pyrszBashCompletionCache.__init__cCstjd|�dS)NzCompletion plugin: %s)r�debug)�msgr
r
r�_out$szBashCompletionCache._outcCsd}x,|jjj�D]}|jdk	r|jjrd}PqWtjj|j�sF|r�y~t	j
|j��h}|jd�|j�}|j
d�|j
d�|j
d�|jjj�j�}dd	�|D�}|jd
|�|j�WdQRXWn6t	jk
r�}z|jdt|��WYdd}~XnXdS)z& Generate cache of available packages FNTzGenerating completion cache...z/create table if not exists available (pkg TEXT)zAcreate unique index if not exists pkg_available ON available(pkg)zdelete from availablecSs g|]}|jdkrt|�g�qS)�src)�arch�str)�.0�xr
r
r�
<listcomp>@sz,BashCompletionCache.sack.<locals>.<listcomp>z*insert or ignore into available values (?)z Can't write completion cache: %s)r	ZreposZiter_enabledZmetadata�fresh�os�path�existsr
�sqlite3�connectr�cursor�execute�sack�queryZ	available�executemany�commit�OperationalErrorr)rrZrepo�conn�curZ
avail_pkgsZavail_pkgs_insert�er
r
rr (s,

zBashCompletionCache.sackcCs�|js
dSy�tj|j��n}|jd�|j�}|jd�|jd�|jd�tjj	|j
�j�j�}dd�|D�}|j
d|�|j�WdQRXWn6tjk
r�}z|jd	t|��WYdd}~XnXdS)
z& Generate cache of installed packages NzGenerating completion cache...z/create table if not exists installed (pkg TEXT)zAcreate unique index if not exists pkg_installed ON installed(pkg)zdelete from installedcSs g|]}|jdkrt|�g�qS)r)rr)rrr
r
rrVsz3BashCompletionCache.transaction.<locals>.<listcomp>z*insert or ignore into installed values (?)z Can't write completion cache: %s)�transactionrrr
rrr�dnfr Z_rpmdb_sackr	r!Z	installedr"r#r$r)rr%r&Z	inst_pkgsZinst_pkgs_insertr'r
r
rr(Gs"


zBashCompletionCache.transaction)
�__name__�
__module__�__qualname__�namer�staticmethodrr r(�
__classcell__r
r
)rrrs
r)
Z
__future__rrZdnf.i18nrZdnfpluginscorerr)Zos.pathrrZPluginrr
r
r
r�<module>s__pycache__/versionlock.cpython-36.opt-1.pyc000064400000020202150402642250014673 0ustar003

�f0�@s\ddlmZddlmZddlmZmZddlZddlZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZed�Zed�Zed�Zed�Zed	�Zed
�Zed�Zed�Zed
�Zed�Zed�Zed�Zed�ZdaGdd�dej�ZdddgZddgZ dgZ!dddgee e!Z"Gdd�dej#j$�Z%dd �Z&d!d"�Z'd#d$�Z(d%d&�Z)d'd(�Z*dS))�)�absolute_import)�unicode_literals)�_�loggerNz-Unable to read version lock configuration: %szLocklist not setzAdding versionlock on:zAdding exclude on:z*Package already locked in equivalent form:zPackage {} is already lockedzPackage {} is already excludedzDeleting versionlock for:zNo package found for:z1Excludes from versionlock plugin were not appliedzCVersionlock plugin: number of lock rules from file "{}" applied: {}zFVersionlock plugin: number of exclude rules from file "{}" applied: {}z,Versionlock plugin: could not parse pattern:cs8eZdZdZ�fdd�Zdd�Zdd�Zdd	�Z�ZS)
�VersionLock�versionlockcs8tt|�j||�||_||_|jdk	r4|jjt�dS)N)�superr�__init__�base�cliZregister_command�VersionLockCommand)�selfr
r)�	__class__��!/usr/lib/python3.6/versionlock.pyr	6s

zVersionLock.__init__cCs4|j|jj�}|jd�o.|jdd�o.|jdd�adS)N�main�locklist)Zread_configr
ZconfZhas_sectionZ
has_option�get�locklist_fn)r
Zcprrr�config=szVersionLock.configcCs0|jdkrd}n|jjj}|dkr,|jjj}|S)NT)r�demandsZplugin_filtering_enabledZ	resolving)r
Zenabledrrr�locking_enabledCs


zVersionLock.locking_enabledc
Cs�|j�stjt�dS|jjj�jdd�}|jjj�jdd�}t�}ddg}x�t	�D]�}d}|r||ddkr||dd�}d}t
jj|�j
tjtjtjtjtjgd�}|r�||d7<ntjdt|�qTxF|D]>}|j|jj�}	|r�|j|	�}n|j|j�|j|	�}|	r�Pq�WqTW|d�r6tjtjt|d��|d�rVtjtjt|d��|�r�|jjj�jt|�d�}
|
j|�}|j|�}|jjj�j |d	�}|j|j|��}|j tj!d
�|�r�|jjj"|�dS)NT)�emptyr�!�)Zformsz%s %s)Z
name__glob)Z	obsoletes)Z
reponame__neq)#rr�debug�NO_VERSIONLOCKr
�sackZquery�filter�set�_read_locklist�dnf�subject�SubjectZget_nevra_possibilities�hawkeyZ
FORM_NEVRAZ	FORM_NEVRZFORM_NEVZFORM_NAZ	FORM_NAME�error�NEVRA_ERRORZto_query�union�add�name�
APPLY_EXCLUDE�formatr�
APPLY_LOCK�list�
differenceZfiltermZSYSTEM_REPO_NAMEZadd_excludes)
r
Zexcludes_queryZlocked_queryZlocked_names�count�patZexclZpossible_nevrasZnevraZ	pat_queryZall_versionsZother_versionsZobsoletes_queryrrrrLsP







zVersionLock.sack)	�__name__�
__module__�__qualname__r)r	rrr�
__classcell__rr)rrr2s
	r�excludezadd-!zadd!�delete�delZ	blacklistr(�clearr-c@s8eZdZdZed�ZdZedd��Zdd�Z	dd	�Z
d
S)rrzcontrol package version locksz5[add|exclude|list|delete|clear] [<package-nevr-spec>]cCs:|jdddtd�d�|jdddd	�|jd
ddd	�dS)
Nz--rawF�
store_truez@Use package specifications as they are, do not try to parse them)�default�action�help�
subcommand�?z[add|exclude|list|delete|clear])�nargs�metavar�package�*z[<package-nevr-spec>])�add_argumentr)�parserrrr�
set_argparser�s
z VersionLockCommand.set_argparsercCsd|jj_d|jj_dS)NT)rrZsack_activationZavailable_repos)r
rrr�	configure�s
zVersionLockCommand.configurecCs�d}|jjr�|jjtkr2d}|jjjd|jj�nd|jjtkrDd}nR|jjtkr|td�j|jj�}t	j
|tjj
dd�d}n|jjtkr�d}n|jj}|dk�rt|jj�}xj|D]b\}}|d	kr�t|j|g|jjd
dtj�td	�q�||k�rtjjtj|���q�tjdt|�q�W�n�|dk�r�t|jj�}xn|D]f\}}|d	k�rpt|j|g|jjd
dtj�td�n,||k�r�tjjtj|���ntjdt|��q8W�n4|dk�r�xt�D]}t|��q�W�n|dk�rt�s�tjjt ��t!td��}WdQRXn�|dk�r�t�stjjt ��t"j#j$t�}t%j&|dd�\}	}
t�}d}t"j'|	dd��V}	xN|D]F}
t(|
|jj��r�tdt)|
f�|d7}�q\|	j*|
�|	j*d��q\WWdQRX|�s�t"j+|
�nt"j,|
d�t"j-|
t�dS)Nr-r(rr5z@Subcommand '{}' is deprecated. Use 'exclude' subcommand instead.�)�
stacklevelr6�Tz
# Added lock on %s
z%s %sFz
# Added exclude on %s
rr8�wz.tmp)�dir�suffixr�
i����).Zoptsr=�ALL_CMDSrA�insert�EXC_CMDS�DEP_EXC_CMDSrr+�warnings�warnr!�
exceptions�DeprecationWarning�DEL_CMDS�_search_locklist�_write_locklistr
�raw�timeZctime�ADDING_SPEC�Error�ALREADY_EXCLUDEDr�info�
EXISTING_SPEC�EXCLUDING_SPEC�ALREADY_LOCKEDr �printr�NO_LOCKLIST�open�os�path�dirname�tempfileZmkstemp�fdopen�_match�
DELETING_SPEC�write�unlink�chmod�rename)r
�cmd�msg�results�entryZ	entry_cmdr0�frh�outZtmpfilename�locked_specsr/�entrrr�run�s|














zVersionLockCommand.runN)r)r1r2r3�aliasesrZsummaryZusage�staticmethodrErFryrrrrr�s
rcCs�g}y`tstjjt��tt��>}x6|j�D]*}|jd�s*|j�dkrFq*|j	|j��q*WWdQRXWn2t
k
r�}ztjjt|��WYdd}~XnX|S)N�#rI)rr!rUr]rdre�	readlines�
startswith�strip�append�IOError�NOT_READABLE)rZllfile�line�errrr �s
 "r cCs�g}d}}t�}xl|D]d}d}xH|D]@}d}}t||g�r&|}|jd�rPdnd}|j||f�d}q&W|s|j||f�qW|S)NrIFrr5r(T)r rkr~r�)rArs�foundr;rw�pkg�matchrxrrrrX�s

rXcCs@t�}x�|D]�}|r |j|�qtjj|�}	d}
|rN|	jtjj|�dddd�}
|
sf|	j|jdddd�}
|
sztdt	|f�x|
D]}|jt
|j��q�WqW|�r<ydts�tj
jt��ttd��@}|j|�x.|D]&}
td||
f�|jd||
f�q�WWdQRXWn4tk
�r:}ztj
jt|��WYdd}~XnXdS)NTF)Z
with_nevraZ
with_providesZwith_filenamesz%s %s�az%s%s
)rr(r!r"r#Zget_best_queryrZ_rpmdb_sackrc�
NOTFOUND_SPEC�pkgtup2specZpkgtuprrUr]rdrermr�r�)r
�argsrZZ
try_installedZcommentr_�prefixZspecsr0ZsubjZpkgsr�ru�specr�rrrrYs8





$rYcCs&|jd�}x|D]}||krdSqWytj|�}Wntjk
rHdSXx�d|jd|j|jfd|j|jfd|j|j|jfd|j|j|jfd	|j|j|j|jfd
|j|j|j|jfd|j|j|j|j|jfd|j|j|j|j|jff	D]&}x |D]}t	j	||��rdS�qWq�WdS)
NrTFz%sz%s.%sz%s-%sz%s-%s-%sz%s-%s:%sz%s-%s-%s.%sz%s-%s:%s-%sz%s:%s-%s-%s.%sz%s-%s:%s-%s.%s)
�lstripr$Zsplit_nevraZValueExceptionr)�arch�version�release�epoch�fnmatch)rxZpatternsr0�nr)rrrrk#s,

"
rkcCsd||p
d||fS)Nz
%s-%s:%s-%s.*�0r)r)r�r�r�r�rrrr�=sr�)+Z
__future__rrZdnfpluginscorerrr!Zdnf.cliZdnf.exceptionsr�r$rfrir[rSr�rdr\rar`rbr^rlr�rr,r*r&rZPluginrrQrWrRrOrZCommandrr rXrYrkr�rrrr�<module>sHO
[ __pycache__/system_upgrade.cpython-36.opt-1.pyc000064400000054753150402642250015412 0ustar003

�f�h�@s�dZddlmZmZmZmZddlZddlZddlZddl	Z	ddl
Z
ddlZddlm
Z
ddlmZmZddlZddlZddlmZddlmZddlZddlmZmZddlZed	�ejd
�Zejd�Zejd�Zejd
�Z eZ!dZ"ed�Z#ed�Z$ed�Z%dZ&dd�Z'dd�Z(gfdd�Z)d7dd�Z*dd�Z+Gdd�de,�Z-Gdd �d e,�Z.e.�Z/Gd!d"�d"ej0j1�Z2d#d$�Z3d%d&�Z4d'd(�Z5d)d*�Z6d+d,dd-d.gZ7Gd/d0�d0ej8�Z9Gd1d2�d2ej:j;�Z<Gd3d4�d4e<�Z=Gd5d6�d6e<�Z>dS)8zGsystem_upgrade.py - DNF plugin to handle major-version system upgrades.�)�call�Popen�check_output�CalledProcessErrorN)�journal)�_�logger)�CliError)�ucd)�serialize_transaction�TransactionReplayzthe color of the skyZ 9348174c5cc74001a71ef26bd79d302eZ fef1cc509d5047268b83a3a553f54b43Z 3e0a5636d16b4ca4bbe5321d06c6aa62Z 8cec00a1566f4d3594f116450395f06cz/usr/bin/plymouthz<Need a --releasever greater than the current system version.z�Download complete! Use 'dnf {command} reboot' to start the upgrade.
To remove cached metadata and transaction use 'dnf {command} clean'zESorry, you need to use 'download --releasever' instead of '--network'�cCs.tjddd�rtjtd��ntddg�dS)NZDNF_SYSTEM_UPGRADE_NO_REBOOTF)�defaultz!Reboot turned off, not rebooting.Z	systemctl�reboot)�os�getenvr�inforr�rr�$/usr/lib/python3.6/system_upgrade.pyrEsrcCs|d}xrdD]j}yNt|��<}x4|D],}|j�}|j|�r |t|�d�jd�Sq WWdQRXWq
tk
rrw
Yq
Xq
WdS)NzUPGRADE_GUIDE_URL=�/etc/os-release�/usr/lib/os-release�")rr)�open�strip�
startswith�len�IOError)�key�pathZrelease_file�linerrr�get_url_from_os_releaseLs



(r cCs~tjj|�sdSxhtj|�D]Z}tjj||�}||kr8qy(tjj|�rTtjj|�n
tj|�Wqt	k
rtYqXqWdS)N)
rr�isdir�listdir�join�dnf�utilZrm_rf�unlink�OSError)r�ignore�entryZfullpathrrr�	clear_dir[sr*cCs6tjj|j�|jkrtt��|r2||jkr2tt��dS)N)r$�rpm�detect_releasever�installroot�
releaseverr	�RELEASEVER_MSG�CANT_RESET_RELEASEVER)�conf�targetrrr�check_release_verlsr3cCsPytdd�}|jd�Wn2tk
rJ}zttd�|�WYdd}~XnXdS)Nz	/dev/tty0�wbs[9;0]z%Screen blanking can't be disabled: %s)r�write�	Exception�printr)Ztty�errr�disable_blankingus

r9c@s�eZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	e	d�Z
e	d�Ze	d�Ze	d�Z
e	d�Ze	d�Ze	d�Ze	d�Ze	d�Ze	d�Ze	d�Ze	d�Ze	d�ZdS)�StatecCs||_i|_|j�dS)N)�	statefile�_data�_read)�selfr;rrr�__init__�szState.__init__cCspy&t|j��}tj|�|_WdQRXWnDtk
r@i|_Yn,tk
rji|_tjt	d�|j�YnXdS)Nz;Failed loading state file: %s, continuing with empty state.)
rr;�json�loadr<r�
ValueErrorr�warningr)r>�fprrrr=�s

zState._readc
CsFtjjtjj|j��t|jd��}tj	|j
|ddd�WdQRXdS)N�w�T)�indent�	sort_keys)r$r%�
ensure_dirrr�dirnamer;rr@�dumpr<)r>Zoutfrrrr5�szState.writecCs&tjj|j�rtj|j�|j�dS)N)rr�existsr;r&r=)r>rrr�clear�szState.clearcCs|S)Nr)r>rrr�	__enter__�szState.__enter__cCs|dkr|j�dS)N)r5)r>�exc_type�	exc_value�	tracebackrrr�__exit__�szState.__exit__cs"�fdd�}�fdd�}t||�S)Ncs||j�<dS)N)r<)r>�value)�optionrr�setprop�szState._prop.<locals>.setpropcs|jj��S)N)r<�get)r>)rTrr�getprop�szState._prop.<locals>.getprop)�property)rTrUrWr)rTr�_prop�szState._prop�
state_version�download_status�destdir�target_releasever�system_releasever�gpgcheck�gpgcheck_repos�repo_gpgcheck_repos�upgrade_status�upgrade_command�distro_sync�enable_disable_repos�module_platform_idN)�__name__�
__module__�__qualname__r?r=r5rMrNrRrYrZr[r\r]r^r_r`rarbrcrdrerfrrrrr:�s(
r:c@s@eZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dS)�PlymouthOutputz�A plymouth output helper class.

    Filters duplicate calls, and stops calling the plymouth binary if we
    fail to contact it.
    cCsd|_t�|_d|_dS)NT)�alive�dict�
_last_args�	_last_msg)r>rrrr?�szPlymouthOutput.__init__cGsj||jj|�k}|jr|s$|dkrdytt|f|�dk|_Wntk
rXd|_YnX||j|<|jS)Nz--pingrF)rmrVrkr�PLYMOUTHr')r>�cmd�argsZdupe_cmdrrr�	_plymouth�s
zPlymouthOutput._plymouthcCs
|jd�S)Nz--ping)rr)r>rrr�ping�szPlymouthOutput.pingcCs4|jr |j|kr |jdd|j�||_|jdd|�S)Nzhide-messagez--textzdisplay-message)rnrr)r>�msgrrr�message�szPlymouthOutput.messagecCsRd}y$ttdg�}tjdt|��r&d}Wnttfk
r@YnX|jdd|�S)NZupdatesz--helpz--system-upgradezsystem-upgradezchange-modez--)rro�re�searchr
rr'rr)r>�mode�srrr�set_mode�szPlymouthOutput.set_modecCs|jddt|��S)Nz
system-updatez
--progress)rr�str)r>Zpercentrrr�progress�szPlymouthOutput.progressN)
rgrhri�__doc__r?rrrsrurzr|rrrrrj�s

rjc@s$eZdZdd�Zdd�Zdd�ZdS)�PlymouthTransactionProgresscCs|j||||�dS)N)�_update_plymouth)r>�package�actionZti_doneZti_totalZts_doneZts_totalrrrr|�sz$PlymouthTransactionProgress.progresscCsd|dkrdS|tjjkr0tjtd||��ntjdtd||��tj|j||||��dS)N�g�V@�Zg$@)r$�callbackZ
PKG_VERIFY�Plymouthr|�intru�
_fmt_event)r>r�r��current�totalrrrr�sz,PlymouthTransactionProgress._update_plymouthcCs tjjj||�}d||||fS)Nz[%d/%d] %s %s...)r$�transactionZACTIONSrV)r>r�r�r�r�rrrr�sz&PlymouthTransactionProgress._fmt_eventN)rgrhrir|rr�rrrrr~�sr~ccsJtj�}|j|jdd�d}x(|D] }|d}||kr8q"|}|Vq"WdS)zVFind all boots with this message id.

    Returns the entries of all found boots.
    r)�
MESSAGE_IDZ_UIDN�_BOOT_ID)r�ReaderZ	add_match�hex)�
message_id�jZoldbootr)Zbootrrr�
find_bootss
r�c
Cstttd��d
}xJttt��D]:\}}tdj|d|d|d|jdd�|jdd���qW|dkrpttd	��dS)Nz3The following boots appear to contain upgrade logs:r�u){} / {.hex}: {:%Y-%m-%d %H:%M:%S} {}→{}r�Z__REALTIME_TIMESTAMP�SYSTEM_RELEASEVERz??�TARGET_RELEASEVERz-- no logs were found --���r�)r7r�	enumerater��ID_TO_IDENTIFY_BOOTS�formatrV)�nr)rrr�	list_logs s
r�cCsZtt|��}y(|dkrt�|dkr*|d8}||dStk
rTttd���YnXdS)Nrr�r�z!Cannot find logs with this index.)�listr��
IndexErrorr	r)r�r�Zbootsrrr�	pick_boot.sr�cCsDtt|�}tdd|jg�}|j�|j}|dkr@tjjt	d���dS)NZ
journalctlz--bootr�z%Unable to match systemd journal entry)
r�r�rr��wait�
returncoder$�
exceptions�Errorr)r�Zboot_idZprocessZrcrrr�show_log=s
r�ZdownloadZclean�upgrade�logcs eZdZdZ�fdd�Z�ZS)�SystemUpgradePluginzsystem-upgradecs8tt|�j||�|r4|jt�|jt�|jt�dS)N)�superr�r?Zregister_command�SystemUpgradeCommand�OfflineUpgradeCommand�OfflineDistrosyncCommand)r>�base�cli)�	__class__rrr?Ns


zSystemUpgradePlugin.__init__)rgrhri�namer?�
__classcell__rr)r�rr�Ksr�cs(eZdZdEZed�ZdZ�fdd�Zedd��Z	d	d
�Z
dd�Zd
d�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd �Zd!d"�Zd#d$�Zd%d&�Zd'd(�Zd)d*�Zd+d,�Zd-d.�Zd/d0�Zd1d2�Zd3d4�Zd5d6�Z d7d8�Z!d9d:�Z"d;d<�Z#d=d>�Z$d?d@�Z%dAdB�Z&dCdD�Z'�Z(S)Fr��system-upgrade�fedupz+Prepare system for upgrade to a new releasezvar/lib/dnf/system-upgradecsjtt|�j|�tjj|jjj|j	�|_
tjj|j
d�|_tjj|jjjd�|_t
tjj|j
d��|_dS)Nzsystem-upgrade-transaction.jsonz
system-updatezsystem-upgrade-state.json)r�r�r?rrr#r�r1r-�DATADIR�datadir�transaction_file�
magic_symlinkr:�state)r>r�)r�rrr?\s
zSystemUpgradeCommand.__init__cCsJ|jdddtd�d�|jddtdd	jt�d
�|jdttd�d
�dS)Nz--no-downgraderdZstore_falsez=keep installed packages if the new release's version is older)�destr��help�tidr�z[%s]�|)�nargs�choices�metavarz--numberzwhich logs to show)�typer�)�add_argumentr�CMDSr#r�)�parserrrr�
set_argparserds
z"SystemUpgradeCommand.set_argparsercCs(tj||tj|jj|jjtjjd�dS)zLog directly to the journal.)r�ZPRIORITYr�r�ZDNF_VERSIONN)	r�sendZ
LOG_NOTICEr�r^r]r$�const�VERSION)r>rur�rrr�
log_statusnszSystemUpgradeCommand.log_statuscCs|jd�|jd�dS)NZcheck�
pre_configure)�	_call_sub)r>rrrr�ws
z"SystemUpgradeCommand.pre_configurecCs|jd�dS)N�	configure)r�)r>rrrr�{szSystemUpgradeCommand.configurecCs|jd�dS)N�run)r�)r>rrrr�~szSystemUpgradeCommand.runcCs|jd�dS)Nr�)r�)r>rrr�run_transaction�sz$SystemUpgradeCommand.run_transactioncCs|jd�dS)NZresolved)r�)r>rrr�run_resolved�sz!SystemUpgradeCommand.run_resolvedcCs.t||d|jjdd�}t|�r*|�dS)Nrr)�getattr�optsr��callable)r>r�Zsubfuncrrrr��szSystemUpgradeCommand._call_subcCs(|jjtkr$td�j|d�}t|��dS)NzFIncompatible version of data. Rerun 'dnf {command} download [OPTIONS]')�command)r�rZ�
STATE_VERSIONrr�r	)r>r�rtrrr�_check_state_version�sz)SystemUpgradeCommand._check_state_versioncCs*|j|jj_|jjr|jjnd|jj_dS)N)r�r�r1�cachedirr�r\)r>rrr�
_set_cachedir�sz"SystemUpgradeCommand._set_cachedircCs�ttjjtjjg�}ttjj�}i}i}xl|jjjD]^}|j	|krp|j
}|j|j|j
ji�jt|�i�|j	<q6|j	|kr6|j|jt|j
�i�|j	<q6W||fS)z�
        forward = {repoid:{pkg_nevra: {tsi.action: tsi.reason}}
        reverse = {pkg_nevra: {tsi.action: tsi.reason}}
        :return: forward, reverse
        )�setr$r�ZBACKWARD_ACTIONS�libdnfZ!TransactionItemAction_REINSTALLEDZFORWARD_ACTIONSr�r�r��pkg�reason�
setdefault�repo�idr{)r>Zbackward_actionZforward_actionsZforward�reverseZtsir�rrr�%_get_forward_reverse_pkg_reason_pairs�s
&
z:SystemUpgradeCommand._get_forward_reverse_pkg_reason_pairscCsb|j|jj_|jjr|jjnd|jj_d|jjkrJ|jjrJtt	d���nd|jjkr^d|j_dS)Nzoffline-distrosynczFCommand 'offline-distrosync' cannot be used with --no-downgrade optionzoffline-upgradeF)
r�r�r1r�r�r\r�rdr	r)r>rrr�pre_configure_download�sz+SystemUpgradeCommand.pre_configure_downloadcCs|j�dS)N)r�)r>rrr�pre_configure_reboot�sz)SystemUpgradeCommand.pre_configure_rebootcCs.|j�|jjr|jj|j_|jj|jj_dS)N)	r�r�rer��repos_edr]r�r1r.)r>rrr�pre_configure_upgrade�sz*SystemUpgradeCommand.pre_configure_upgradecCs|j�dS)N)r�)r>rrr�pre_configure_clean�sz(SystemUpgradeCommand.pre_configure_cleancCsd|jjksd|jjkr�tjtd��t�}|rLtd�}tj|jt|���|j	j
�r�td�}|j	jjs�|j	j
jdj|�dj|�d�r�tjtd	��tjd
�t|j	j|jjd�nd|jjkr�|jj|j�d
|jj_d
|jj_d
|jj_d
|jj_d
|jj_|j	jjdg7_dS)Nzsystem-upgrader�z\WARNING: this operation is not supported on the RHEL distribution. Proceed at your own risk.z-Additional information for System Upgrade: {}zyBefore you continue ensure that your system is fully upgraded by running "dnf --refresh upgrade". Do you want to continuez
{} [y/N]: z
{} [Y/n]: )rtZdefaultyes_msgzOperation aborted.r�)r2zoffline-upgradeTZtest)r�r�rrCrr rr�r
r�Z
_promptWantedr1Zassumeno�outputZuserconfirm�error�sys�exitr3r.r�Z _populate_update_security_filter�demands�	root_user�	resolving�available_repos�sack_activationZfreshest_metadataZtsflags)r>Zhelp_urlrtrrr�configure_download�s*






z'SystemUpgradeCommand.configure_downloadcCsd|jj_dS)NT)r�r�r�)r>rrr�configure_reboot�sz%SystemUpgradeCommand.configure_rebootcCs�d|jj_d|jj_d|jj_d|jj_|jj|j_|jj	dk	rN|jj	|j
j_	|jjdk	r�x$|j
j
j�D]}|j|jjk|_	qhW|jjdk	r�x$|j
j
j�D]}|j|jjk|_q�W|jj|j
j_d|jj_d|j
j_t�|jj_d|j
j_d|j
j_dS)NTF)r�r�r�r�r�r�r�rdr�r_r�r1r`�repos�valuesr�ra�
repo_gpgcheckrfZ	cacheonlyZ	assumeyesr~Ztransaction_displayZclean_requirements_on_removeZinstall_weak_deps)r>r�rrr�configure_upgrade�s&






z&SystemUpgradeCommand.configure_upgradecCsd|jj_dS)NT)r�r�r�)r>rrr�configure_cleansz$SystemUpgradeCommand.configure_cleancCsdS)Nr)r>rrr�
configure_logsz"SystemUpgradeCommand.configure_logcCs~|jjdksttd���|j|jj�|jj|jjkrRtd�j|jjd�}t|��t	j
j|j�rlttd���t
jj|j�dS)N�completezsystem is not ready for upgradezZthe transaction was not prepared for '{command}'. Rerun 'dnf {command} download [OPTIONS]')r�zupgrade is already scheduled)r�r[r	rr�r�r�rcr�rr�lexistsr�r$r%rIr�)r>rtrrr�check_rebootsz!SystemUpgradeCommand.check_rebootcCs�tjj|j�s$tjtd��td��tj|j�|j	krLtjtd��td��t
jjj
|j�|jj}|sp|jj}|j|�|jjdks�td�j|d�}t|��dS)Nz-trigger file does not exist. exiting quietly.rz1another upgrade tool is running. exiting quietly.�readyz/use 'dnf {command} reboot' to begin the upgrade)r�)rrr�r�rrr�
SystemExit�readlinkr�r$ZyumZmiscZunlink_fr�rcr�r�r�rbr�r	)r>r�rtrrr�
check_upgrades
z"SystemUpgradeCommand.check_upgradec	Cs,tj|j|j�|j�}d|_WdQRXdS)Nr�)r�symlinkr�r�r�rb)r>r�rrr�run_prepare,sz SystemUpgradeCommand.run_preparecCs6|j�|jjddksdS|jtd�t�t�dS)NrrzRebooting to perform upgrade.)r�r�r�r�r�REBOOT_REQUESTED_IDr)r>rrr�
run_reboot3s
zSystemUpgradeCommand.run_rebootc	s��jjr�jj�n
�jj��jjdkr��jj��fdd��jjjD�}|r\�jj|��fdd��jjj	D�}|r��jj|��j
�$}d|_�jjj
|_�jjj|_WdQRXdS)N�offline-upgrade�offline-distrosynccs$g|]}�jjjj|j�r|j�qSr)r��history�grouprVr�)�.0�g)r>rr�
<listcomp>Gsz5SystemUpgradeCommand.run_download.<locals>.<listcomp>cs$g|]}�jjjj|j�r|j�qSr)r�r��envrVr�)rr)r>rrrJsZdownloading)r�r�)r�rdr�Zupgrade_allr�Z
read_comps�comps�groupsZenv_group_upgradeZenvironmentsr�r[r1r.r]r\)r>Zinstalled_groupsZinstalled_environmentsr�r)r>r�run_download=s

z!SystemUpgradeCommand.run_downloadc
Cs�d}|j�}d|_|j}WdQRX|dkr4td�}n|dkrFtd�}ntd�}|j|t�tj�tjd�tj	|�t
�t|j|j
�|_|jj�dS)	N�Z
incompletezoffline-upgradez1Starting offline upgrade. This will take a while.zoffline-distrosyncz4Starting offline distrosync. This will take a while.z0Starting system upgrade. This will take a while.r)r�rbrcrr��UPGRADE_STARTED_IDr�rzr|rur9rr�r��replayr�)r>r�r�rtrrr�run_upgradeSs 



z SystemUpgradeCommand.run_upgradec	Csdtjtd��t|jjjtjj	|jjj�j
g�|j�$}d|_d|_
d|_d|_d|_WdQRXdS)NzCleaning up downloaded data...)rrrr*r�r1r�r$Z	persistorZTempfilePersistorZdb_pathr�r[rZrbrcr\)r>r�rrr�	run_cleanms
zSystemUpgradeCommand.run_cleancCs |jjrt|jj�nt�dS)N)r�Znumberr�r�)r>rrr�run_logzszSystemUpgradeCommand.run_logcCs|jj�dS)z5Adjust transaction reasons according to stored valuesN)r
Zpost_transaction)r>rrr�resolved_upgrade�sz%SystemUpgradeCommand.resolved_upgradecCs�|jjj�}|j�s&tjtd��dSt|�}yLt|j	d��"}t
j||ddd�|jd�WdQRXt
td�j|j	��Wn<tk
r�}z tjjtd�jt|����WYdd}~XnXtjj|jjj�}|j��}d	|_t|_|jj|_|jjj|_d
d�|jjj �D�|_!dd�|jjj �D�|_"||_#|jjj$|_%|jjj&|_&|jj'|_(|jjj)|_)|jj*|_+WdQRXt,j|jj*d
�}tj|�|j-td�t.�dS)NzKThe system-upgrade transaction is empty, your system is already up-to-date.rErFT)rGrH�
zTransaction saved to {}.zError storing transaction: {}r�cSsg|]}|jr|j�qSr)r_r�)rr�rrrr�sz=SystemUpgradeCommand.transaction_download.<locals>.<listcomp>cSsg|]}|jr|j�qSr)r�r�)rr�rrrr�s)r�zDownload finished.)/r�r�Zget_currentZpackagesrrrrrr�r@rKr5r7r�r'r$r�r	r{r+r,r1r-r�r[r�rZr�rdr_r�r�r`rar^r.r]rfr�rer\r�rc�DOWNLOAD_FINISHED_MSGr��DOWNLOAD_FINISHED_ID)r>r��data�fr8Z
system_verr�rtrrr�transaction_download�s:,


z)SystemUpgradeCommand.transaction_downloadcCs@tjtd��|jtd�t�|j�|jjddkr<t�dS)Nz.Upgrade complete! Cleaning up and rebooting...rr�)	r�rurr��UPGRADE_FINISHED_IDrr�r�r)r>rrr�transaction_upgrade�s
z(SystemUpgradeCommand.transaction_upgrade)r�r�))rgrhri�aliasesr�summaryr�r?�staticmethodr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrr
rrrr�rr)r�rr�VsF
	
 


(r�c@seZdZdZed�ZdS)r��offline-upgradez%Prepare offline upgrade of the systemN)r)rgrhrirrrrrrrr��sr�c@seZdZdZed�ZdS)r��offline-distrosyncz(Prepare offline distrosync of the systemN)r)rgrhrirrrrrrrr��sr�)N)?r}�
subprocessrrrrr@rZos.pathrvr�ZuuidZsystemdrZdnfpluginscorerrr$Zdnf.clir	Zdnf.i18nr
Zdnf.transactionZdnf.transaction_srrrZlibdnf.confr�ZUUIDrr�r	rr�ror/rr0r�rr r*r3r9�objectr:rjr�r�ZTransactionProgressr~r�r�r�r�r�ZPluginr�r�ZCommandr�r�r�rrrr�<module>sd




	@.	e__pycache__/universal_hooks.cpython-36.opt-1.pyc000064400000013564150402642250015565 0ustar003

���_�@sddlZddlZddlZddlZddlmZddlZddlZddlZddlZddl	m
Z
eje�Z
e
jej�e
jejej��Gdd�de
�ZGdd�dejd�ZGd	d
�d
e�Ze�Zddd
�ZGdd�dejd�ZGdd�de�Zdd�Zdd�Zdd�ZdS)�N)�path)�PlugincsPeZdZdZ�fdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
�ZS)�UniversalHooksPluginzuniversal-hookscst�j||�d|_dS)Nz/etc/dnf/universal-hooks)�super�__init__�	hook_root)�self�baseZcli)�	__class__��%/usr/lib/python3.6/universal_hooks.pyr,szUniversalHooksPlugin.__init__cCsttj|j|jj�t�dS)N)�_run_dirr�joinr�
pre_config�__name__�LOG)rrrrr0szUniversalHooksPlugin.pre_configcCsttj|j|jj�t�dS)N)r
rrr�configrr)rrrrr3szUniversalHooksPlugin.configcCsttj|j|jj�t�dS)N)r
rrr�resolvedrr)rrrrr6szUniversalHooksPlugin.resolvedcCsttj|j|jj�t�dS)N)r
rrr�sackrr)rrrrr9szUniversalHooksPlugin.sackcCs8|jj}t|jt|t|jj��tt	j
|j|�t�dS)N)�pre_transactionr�
_run_pkg_dirsrr�DnfTransactionInfor	�transactionr
rr)r�namerrrr<sz$UniversalHooksPlugin.pre_transactioncCs8|jj}t|jt|t|jj��ttj	|j|�t�dS)N)
rrrrrrr	r
rr)rrrrrrAsz UniversalHooksPlugin.transaction)r�
__module__�__qualname__rrrrrrrr�
__classcell__rr)r
rr)src@sDeZdZejdd��Zejdd��Zejdd��Zejdd��Zd	S)
�
FileSystemcCsdS)Nr)r�pathnamerrr�globHszFileSystem.globcCsdS)Nr)rrrrr�isdirLszFileSystem.isdircCsdS)Nr)rr�moderrr�accessPszFileSystem.accesscCsdS)Nr)rr!�encodingrrr�NamedTemporaryFileTszFileSystem.NamedTemporaryFileN)	rrr�abc�abstractmethodrr r"r$rrrrrGsr)�	metaclassc@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
�RealFileSystemcCs
tj|�S)N)r)rrrrrrZszRealFileSystem.globcCs
tj|�S)N)rr )rrrrrr ]szRealFileSystem.isdircCstj||�S)N)�osr")rrr!rrrr"`szRealFileSystem.accesscCstj||d�S)N)r!r#)�tempfiler$)rr!r#rrrr$csz!RealFileSystem.NamedTemporaryFileN)rrrrr r"r$rrrrr(Ysr(�cCs�tj|�sdSxxttj|d��D]b}tj|�r2q"tj|tj�rx|�d|��}tj|dd�}d|j	kr�|j
d||j	�q"|j
d|�q"WdS)Nz/*� T)�shellrz!!! %s did not exit cleanly: %dz!!! %s is not executable)�fsr �sortedrr"r)�X_OK�
subprocessZrun�
returncode�error)Zhook_dir�log�argsZscriptZcmdlineZ	completedrrrr
js


r
c@seZdZejdd��ZdS)�TransactionInfocCsdS)Nr)rrrr�
getMembers|szTransactionInfo.getMembersN)rrrr%r&r7rrrrr6{sr6c@s"eZdZdd�dd�Zdd�ZdS)rN)�returncCs
||_dS)N)r)rrrrrr�szDnfTransactionInfo.__init__cCs|jS)N)r)rrrrr7�szDnfTransactionInfo.getMembers)rrrrr7rrrrr�srcCs�tj|d|�}t|�}i}tjddd���}i}|j�}	x�tt|	�dd�d�D]h}
|
j}||kr`qLd||<|j	|d	�t
tj|d
||�|�x&|j�D]\}}
|
j|�r�d||<q�WqLW|j
�x&|D]}t
tj||�|d|j�q�WWdQRXdS)
zu

    :param str base_dir:
    :param logging.Logger log:
    :param str slot:
    :param TransactionInfo tinfo:
    Z
multi_pkgs�wzutf-8)r!r#cSs|jS)N)r)�mrrr�<lambda>�sz_run_pkg_dirs.<locals>.<lambda>)�key��
Zpkgsz--pkg_list=N)rr�_make_dir_matchersr.r$r7r/�setr�writer
�items�search�flush)Zbase_dirr4ZslotZtinfoZ
wildcard_path�dir_matchersZwildcard_to_runZ
temp_pkg_fileZmembers_seen�members�memberZpkgZwildcard_dirZmatcherrrrr�s&	

rcCsFi}x<tj|d�D]*}tj|�rtjtj|��}t|�||<qW|S)Nz/*)r.rr r�basename�normpath�_regex_from_dir)Zwc_slot_dirrEZpthrrrr?�s
r?cCs|jdd�}tjd|d�S)NZ__WILDCARD__z.*�^�$)�replace�re�compile)r�exprrrrrJ�srJ)r+)r%rZloggingr)rrNr1�sysr*ZdnfrZ	getLoggerrrZsetLevelZERRORZ
addHandlerZ
StreamHandler�stderrr�ABCMetarr(r.r
r6rrr?rJrrrr�<module>s,

&	__pycache__/repoclosure.cpython-36.opt-1.pyc000064400000010500150402642250014677 0ustar003

�gt`��@sVddlmZddlmZddlmZddlZGdd�dej�ZGdd�dej	j
�ZdS)	�)�absolute_import)�unicode_literals)�_Ncs eZdZdZ�fdd�Z�ZS)�RepoClosure�repoclosurecs,tt|�j||�|dkrdS|jt�dS)N)�superr�__init__Zregister_command�RepoClosureCommand)�self�base�cli)�	__class__��!/usr/lib/python3.6/repoclosure.pyr!szRepoClosure.__init__)�__name__�
__module__�__qualname__�namer�
__classcell__rr)r
rrsrc@s>eZdZdZed�Zdd�Zdd�Zd
dd	�Ze	d
d��Z
dS)r	rz:Display a list of unresolved dependencies for repositoriescCsd|jj}d|_d|_|jjr`xB|jjj�D]2}|j	|jjkrT|j	|jj
krT|j�q*|j�q*WdS)NT)
r�demandsZsack_activationZavailable_repos�opts�repor�repos�all�id�check�disable�enable)r
rrrrr�	configure,s
zRepoClosureCommand.configurecCs�|jjr|j|jj�}n|j�}xRt|j��D]B}tdjt|�|j��td�x||D]}tdj|��qZWq.Wt	|�dkr�t
d�}tjj
|��dS)Nzpackage: {} from {}z  unresolved deps:z    {}rz/Repoclosure ended with unresolved dependencies.)r�arches�_get_unresolved�sorted�keys�print�format�str�reponame�lenr�dnf�
exceptions�Error)r
�
unresolved�pkgZdep�msgrrr�run7szRepoClosureCommand.runNcsLi}t�}|jjr�|jjj�jdd��|jjj�jdd�}xv|jjj�D]D}�j	|jjj�j|j
d�j���|j	|jjj�j|j
d�j��}qHWn |jjj�j��|jjj�j�}|jj
�rN|jjj�jdd�}g}xT|jj
D]H}tjj|�}	|j|	j|jjdddd��}
|
�r|j	|
�}q�|j|�q�W|�rJtjjtd�dj|���|}|jj�rh|j|jjd�|dk	�r~|j|d�|jjj�r��jdd	��j�|j�xf|D]^}t�||<xL|jD]B}t|�}|jd
��s�|jd��r�q�|j |�||j |��q�W�q�Wt�fdd
�|D����fdd�|j!�D�}
dd�|
j!�D�S)NT)�empty)r&F)Z
with_nevraZ
with_providesZwith_filenameszno package matched: %sz, )�arch)Zlatest_per_archz	solvable:zrpmlib(c3s|]}�j|d�s|VqdS))ZprovidesN)�filter)�.0�x)�	availablerr�	<genexpr>�sz5RepoClosureCommand._get_unresolved.<locals>.<genexpr>cs(i|] \}}t�fdd�|D��|�qS)c3s|]}|�kr|VqdS)Nr)r2r3)�unresolved_depsrrr5�sz@RepoClosureCommand._get_unresolved.<locals>.<dictcomp>.<genexpr>)�set)r2�k�v)r6rr�
<dictcomp>�sz6RepoClosureCommand._get_unresolved.<locals>.<dictcomp>cSsi|]\}}|r||�qSrr)r2r8r9rrrr:�s)"r7rZnewestrZsackZqueryr1rZiter_enabled�unionrZlatestr4�pkglistr(ZsubjectZSubject�intersectionZget_best_query�appendr)r*r�joinrZfiltermZconfZbestZapplyZrequiresr%�
startswith�add�items)r
r0r+ZdepsZto_checkrZ	pkglist_q�errorsr,ZsubjZpkg_qZreqZreqnameZunresolved_transitionr)r4r6rr Es\ &






z"RepoClosureCommand._get_unresolvedcCs`|jdgddtd�d�|jdgdtd�d�|jd	d
dtd�d
�|jdgdtd�dd�dS)Nz--archr>rzBcheck packages of the given archs, can be specified multiple times)�default�action�dest�helpz--checkzSpecify repositories to check)rDrErGz-nz--newest�
store_truez+Check only the newest packages in the repos)rErGz--pkgz#Check closure for this package onlyr<)rDrErGrF)�add_argumentr)�parserrrr�
set_argparser�s


z RepoClosureCommand.set_argparser)r)N)rrr�aliasesrZsummaryrr.r �staticmethodrKrrrrr	(s
Qr	)Z
__future__rrZdnfpluginscorerZdnf.clir(ZPluginrrZCommandr	rrrr�<module>s
__pycache__/needs_restarting.cpython-36.pyc000064400000023602150402642250014745 0ustar003

�f`.�	@s$ddlmZddlmZddlmZddlmZddlmZmZddlZddl	Zddl
Z
ddlZddlZddl
Z
ddlZddlZddd	d
ddd
ddg	ZdgZdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd �Zd!d"�ZGd#d$�d$e�ZGd%d&�d&e�ZejjGd'd(�d(ej j!��Z"dS))�)�absolute_import)�division)�print_function)�unicode_literals)�logger�_NZkernelz	kernel-rtZglibczlinux-firmwareZsystemd�dbuszdbus-brokerzdbus-daemonZ
microcode_ctl�zlibcs�tjj|�st�St�}xjtj|�D]\}tjj|�s$|jd�rBq$ttjj||���&}x|D]}|j	|j
�|f�q\WWdQRXq$Wt��x4|jj�j
�jdd�|D�d�D]}�j	|j�q�Wx6�fdd�|D�D] \}}tjtdj||d���q�W�S)	z�
    Provide filepath as string if single dir or list of strings
    Return set of package names contained in files under filepath
    z.confNcSsh|]}|d�qS)r�)�.0�xr
r
�&/usr/lib/python3.6/needs_restarting.py�	<setcomp>Bsz'get_options_from_dir.<locals>.<setcomp>)�namecsh|]}|d�kr|�qS)rr
)rr)�packagesr
r
rDsz`No installed package found for package name "{pkg}" specified in needs-restarting file "{file}".)�pkg�file)�os�path�exists�set�listdir�isdir�endswith�open�join�add�rstrip�sack�query�	installed�filterrr�warningr�format)�filepath�baseZoptionsr�fp�linerrr
)rr
�get_options_from_dir0s"
$&r(ccs�x�t�D]�\}}y<|dk	r(|t|�kr(wt|ddd��}|j�}WdQRXWn"tk
rntjd|�wYnXx$|D]}t||�}|dk	rv|VqvWqWdS)N�r�replace)�errorszFailed to read PID %d's smaps.)�
list_smaps�	owner_uidr�	readlines�EnvironmentErrorrr"�smap2opened_file)�uid�pid�smapsZ
smaps_file�linesr'�ofiler
r
r
�list_opened_filesKs

r6ccsNxHtjd�D]:}yt|�}Wntk
r2wYnXd|}||fVqWdS)Nz/procz/proc/%d/smaps)rr�int�
ValueError)Zdir_r2r3r
r
r
r,\sr,cst��i����fdd�}|S)Ncs,�j|��}|�k	r|S�|�}|�|<|S)N)�get)Zparam�val)�cache�func�sentinelr
r
�wrapperiszmemoize.<locals>.wrapper)�object)r<r>r
)r;r<r=r
�memoizefsr@cCstj|�tjS)N)r�stat�ST_UID)�fnamer
r
r
r-ssr-cCs$|j�j|d�j�}|r |dSdS)N)rr)rr!�run)rrCZmatchesr
r
r
�owning_packagewsrEcCsPd|}t|��}tjj|j��}WdQRXdj|jd��}td||f�dS)Nz/proc/%d/cmdline� �z%d : %s)r�dnfZi18nZucd�readr�split�print)r2ZcmdlineZcmdline_fileZcommandr
r
r
�	print_cmd~s

rLc	Cs�tj�}|jdd�}tj|d�}d}y|jd|j|��}Wn<tjk
rv}zt|�}tjdj	||��dSd}~XnXtj|dd�}|j
dd�}|jd	�r�|SdS)
Nzorg.freedesktop.systemd1z/org/freedesktop/systemd1z org.freedesktop.systemd1.Managerz)Failed to get systemd unit for PID {}: {}zorg.freedesktop.DBus.Properties)Zdbus_interfacezorg.freedesktop.systemd1.UnitZIdz.service)rZ	SystemBusZ
get_objectZ	InterfaceZGetUnitByPIDZ
DBusException�strrr"r#ZGetr)	r2ZbusZsystemd_manager_objectZsystemd_manager_interfaceZ
service_proxy�e�msgZservice_propertiesrr
r
r
�get_service_dbus�s0

rPcCsn|jd�}|dkrdS|jd�dkr(dS||d�j�}|jd�}|dkrVt||d�St||d|�d�SdS)N�/rz00:z
 (deleted)FT)�find�strip�rfind�
OpenedFile)r2r'Zslash�fnZsuffix_indexr
r
r
r0�s

r0c@s*eZdZejd�Zdd�Zedd��ZdS)rUz^(.+);[0-9A-Fa-f]{8,}$cCs||_||_||_dS)N)�deletedrr2)�selfr2rrWr
r
r
�__init__�szOpenedFile.__init__cCs(|jr"|jj|j�}|r"|jd�S|jS)a;Calculate the name of the file pre-transaction.

        In case of a file that got deleted during the transactionm, possibly
        just because of an upgrade to a newer version of the same file, RPM
        renames the old file to the same name with a hexadecimal suffix just
        before delting it.

        �)rW�RE_TRANSACTION_FILE�matchr�group)rXr\r
r
r
�
presumed_name�s

zOpenedFile.presumed_nameN)	�__name__�
__module__�__qualname__�re�compiler[rY�propertyr^r
r
r
r
rU�s
rUc@s4eZdZdd�Zedd��Zedd��Zdd�Zd	S)
�ProcessStartcCs|j�|_|j�|_dS)N)�
get_boot_time�	boot_time�get_sc_clk_tck�
sc_clk_tck)rXr
r
r
rY�s
zProcessStart.__init__cCshttjd�j�}tjjd�rdtdd��8}|j�j�j	�dj�}tt
j
�t|��}t||�SQRX|S)a	
        We have two sources from which to derive the boot time. These values vary
        depending on containerization, existence of a Real Time Clock, etc.
        For our purposes we want the latest derived value.
        - st_mtime of /proc/1
             Reflects the time the first process was run after booting
             This works for all known cases except machines without
             a RTC - they awake at the start of the epoch.
        - /proc/uptime
             Seconds field of /proc/uptime subtracted from the current time
             Works for machines without RTC iff the current time is reasonably correct.
             Does not work on containers which share their kernel with the
             host - there the host kernel uptime is returned
        z/proc/1z/proc/uptime�rbrN)
r7rrA�st_mtimer�isfiler�readlinerSrJ�time�float�max)Zproc_1_boot_time�fZuptimeZproc_uptime_boot_timer
r
r
rf�szProcessStart.get_boot_timecCstjtjd�S)N�
SC_CLK_TCK)r�sysconf�
sysconf_namesr
r
r
r
rh�szProcessStart.get_sc_clk_tckc
CsLd|}t|��}|j�j�j�}WdQRXt|d�}||j}|j|S)Nz
/proc/%d/stat�)rrIrSrJr7rirg)rXr2Zstat_fnZ	stat_fileZstatsZticks_after_bootZsecs_after_bootr
r
r
�__call__�s

zProcessStart.__call__N)r_r`rarY�staticmethodrfrhrvr
r
r
r
re�srec@s4eZdZd
Zed�Zedd��Zdd�Zdd�Z	d	S)�NeedsRestartingCommand�needs-restartingz/determine updated binaries that need restartingcCsF|jdddtd�d�|jdddtd�d�|jd	d
dtd�d�dS)Nz-uz
--useronly�
store_truez#only consider this user's processes)�action�helpz-rz--reboothintzKonly report whether a reboot is required (exit code 1) or not (exit code 0)z-sz
--servicesz%only report affected systemd services)�add_argumentr)�parserr
r
r
�
set_argparsers


z$NeedsRestartingCommand.set_argparsercCs|jj}d|_dS)NT)�cli�demandsZsack_activation)rXr�r
r
r
�	configuresz NeedsRestartingCommand.configurecCsNt�}tjt|jj�}t|�}ttj	j
|jjjd�|j�}t
j|�|jj�r�t�}t�}|jjj�j�}x,|jt
d�D]}|j|jkrx|j|j�qxW|jdddgd�}t|�dkr�x,|jtd�D]}|j|jkr�|j|j�q�W|s�|�rfttd��xt|�D]}	td|	��qWxt|�D]}	td	|	��q$Wt�ttd
��ttd�d�tjj ��nttd
��ttd��dSt�}
|jj!�r�tj"�nd}xHt#|�D]<}||j$�}|dk�rĐq�|j||j%�k�r�|
j|j%��q�W|jj&�r.tdd�t|
�D��}
x |
D]}	|	dk	�rt|	��qWdSxt|
�D]}t'|��q8WdS)Nz#etc/dnf/plugins/needs-restarting.d/)rrzdbus-daemonzdbus-brokerrz;Core libraries or services have been updated since boot-up:z  * %sz8  * %s (dependency of dbus. Recommending reboot of dbus)z2Reboot is required to fully utilize these updates.zMore information:z)https://access.redhat.com/solutions/27943z>No core libraries or services have been updated since boot-up.zReboot should not be necessary.cSsg|]}t|��qSr
)rP)rr2r
r
r
�
<listcomp>Bsz.NeedsRestartingCommand.run.<locals>.<listcomp>)(re�	functools�partialrEr%rr@r(rrrZconfZinstallroot�NEED_REBOOT�extendZoptsZ
reboothintrrr r!Zinstalltimergrr�len�NEED_REBOOT_DEPENDS_ON_DBUSrKr�sortedrH�
exceptions�ErrorZuseronly�geteuidr6r^r2ZservicesrL)rXZ
process_startZ
owning_pkg_fn�optZneed_rebootZneed_reboot_depends_on_dbusr rZdbus_installedrZ
stale_pidsr1r5�namesr2r
r
r
rDsd







zNeedsRestartingCommand.runN)ry)
r_r`ra�aliasesrZsummaryrwrr�rDr
r
r
r
rx�s

rx)#Z
__future__rrrrZdnfpluginscorerrrHZdnf.clirr�rrbrArnr�r�r(r6r,r@r-rErLrPr0r?rUreZpluginZregister_commandr�ZCommandrxr
r
r
r
�<module>s:

"+__pycache__/download.cpython-36.opt-1.pyc000064400000022456150402642250014161 0ustar003

�gt`*0�@s�ddlmZddlmZddlmZmZddlmZddlZddl	Zddl
ZddlZddlZddl
ZddlZddlZddlZddlZejjGdd�dejj��ZdS)�)�absolute_import)�unicode_literals)�_�logger)�OptionParserNcs�eZdZdgZed�Z�fdd�Zedd��Zdd�Z	d	d
�Z
dd�Zd
d�Zdd�Z
dd�Zdd�Zd dd�Zd!dd�Zedd��Zdd�Zdd�Z�ZS)"�DownloadCommandZdownloadz%Download package to current directorycs tt|�j|�d|_d|_dS)N)�superr�__init__�opts�parser)�self�cli)�	__class__��/usr/lib/python3.6/download.pyr	+szDownloadCommand.__init__c	Cs�|jddtd�d�|jddtd�d�|jd	dtd
�d�|jddtd�d�|jd
ddgtjdtd�d�|jddtd�d�|jddtd�d�|jddddtd�d�|jddddd d!ggtd"�d#�dS)$N�packages�+zpackages to download)�nargs�helpz--source�
store_truezdownload the src.rpm instead)�actionrz--debuginfoz'download the -debuginfo package insteadz
--debugsourcez)download the -debugsource package insteadz--archz
--archlist�archesz[arch]z5limit  the  query to packages of given architectures.)�dest�defaultr�metavarrz	--resolvez(resolve and download needed dependenciesz	--alldepsz^when running with --resolve, download all dependencies (do not exclude already installed ones)z--urlz--urls�urlzJprint list of urls where the rpms can be downloaded instead of downloading)rrrz--urlprotocols�append�http�httpsZrsync�ftpz4when running with --url, limit to specific protocols)r�choicesrr)�add_argumentrrZ_SplitCallback)rrrr�
set_argparser0s*
zDownloadCommand.set_argparsercCs�|jj}d|_d|_|jjr*|jjr*d|_|jjr>|j	j
j�|jjsN|jj
rZ|j	j
j�|jjrr|jj|j	j_ntjjtj��|j	j_dS)NTF)r
�demandsZsack_activationZavailable_reposr
�resolveZalldepsZload_system_repo�source�baseZreposZenable_source_repos�	debuginfo�debugsourceZenable_debug_repos�destdir�conf�dnf�i18n�ucd�os�getcwd)rr#rrr�	configureKszDownloadCommand.configurecCs|jjr.|jjr.|jjr.|j|jj�}nXg}|jjrN|j|j|jj��|jjrj|j|j|jj��|jjr�|j|j	|jj��|jj
r�xd|D]\}|jtj
kr�|j|jjd�}|r�t|�q�td�|j}|jjjr�tjj|��tj|�q�WdS|j|�dS)zExecute the util action here.)�schemesz$Failed to get mirror for package: %sN)r
r%r'r(�_get_pkg_objs_rpmsr�extend�_get_pkg_objs_source�_get_pkg_objs_debuginfo�_get_pkg_objs_debugsourcer�repoid�hawkey�CMDLINE_REPO_NAMEZremote_locationZurlprotocols�printr�namer&r*�strictr+�
exceptions�ErrorrZwarning�
_do_downloads)r�pkgs�pkgr�msgrrr�run^s.





zDownloadCommand.runcCsi}x"|D]}|jt|�g�j|�q
Wg}g}xP|j�D]D}dd�|D�}|r`|j|d�q:|jdd�d�|j|d�q:W|r�|jj||jjj�|r�x^|D]V}|j	�}t
jj|jj
jt
jj|��}	t
jj|	�r�t
jj||	�r�q�tj||jj
j�q�Wtdd�||D��}
|
S)z=
        Perform the download for a list of packages
        cSsg|]}|jtjkr|�qSr)r7r8r9)�.0rArrr�
<listcomp>�sz1DownloadCommand._do_downloads.<locals>.<listcomp>rcSs|jj|jjfS)N)ZrepoZpriorityZcost)�xrrr�<lambda>�sz/DownloadCommand._do_downloads.<locals>.<lambda>)�keycSsg|]}|j��qSr)�localPkg)rDrArrrrE�s)�
setdefault�strr�values�sortr&Zdownload_packages�output�progressrIr.�path�joinr*r)�basename�exists�samefile�shutil�copy�sorted)rr@Zpkg_dictrAZto_downloadZcmdlineZpkg_listZpkgs_cmdline�src�dstZ	locationsrrrr?�s.

zDownloadCommand._do_downloadscCs"|jjr|j|�}n
|j|�}|S)zc
        Return a list of dnf.Package objects that represent the rpms
        to download.
        )r
r$�_get_packages_with_deps�
_get_packages)r�	pkg_specsr@rrrr2�s
z"DownloadCommand._get_pkg_objs_rpmscCs*|j|�}|j|�}t|j|dd��}|S)zj
        Return a list of dnf.Package objects that represent the source
        rpms to download.
        T)r%)r2�_get_source_packages�setr[)rr\r@�source_pkgsrrrr4�s

z$DownloadCommand._get_pkg_objs_sourcec	Cs�t�}|jjj�j�}xh|j|�D]Z}xT|j|jgD]D}|j|t	|j
�|j|j|j
d�}|s^q4x|D]}|j|�qdWPq4Wq"W|S)zm
        Return a list of dnf.Package objects that represent the debuginfo
        rpms to download.
        )r;�epoch�version�release�arch)r^r&�sack�query�	availabler[Z
debug_nameZsource_debug_name�filter�intr`rarbrc�add)rr\�dbg_pkgs�qrAZdbg_name�
dbg_available�prrrr5�s 


z'DownloadCommand._get_pkg_objs_debuginfocCsht�}|jjj�j�}xL|j|�D]>}|j|jt|j	�|j
|j|jd�}x|D]}|j
|�qNWq"W|S)zo
        Return a list of dnf.Package objects that represent the debugsource
        rpms to download.
        )r;r`rarbrc)r^r&rdrerfr[rgZdebugsource_namerhr`rarbrcri)rr\rjrkrArlrmrrrr6�s

z)DownloadCommand._get_pkg_objs_debugsourceFcCs�|r
|jn|j}g}x||D]t}y|j||��Wqtjjk
r�}z<tjtjj	|��|j
jjr|tjt
d��tjj|��WYdd}~XqXqWttj|��}|S)z Get packages matching pkg_specs.zExiting due to strict setting.N)�_get_query_source�
_get_queryrr+r=�PackageNotFoundErrorr�errorr,r-r&r*r<rr>�list�	itertools�chain)rr\r%�funcZqueries�pkg_spec�er@rrrr[�s

"zDownloadCommand._get_packagesc	Cs�|j|�}t|�}x�|D]�}tj|jj�}|j|�|j�}|r^|j|j	��|j|j
��qtd�g}tj
dj|dd�|D���tj
tjj|j���tjj��qW|S)z-Get packages matching pkg_specs and the deps.zError in resolve of packages:z
    cSsg|]}t|��qSr)rK)rDrArrrrEsz;DownloadCommand._get_packages_with_deps.<locals>.<listcomp>)r[r^r8ZGoalr&rdZinstallrC�updateZ
list_installsZ
list_upgradesrrrqrQr+�utilZ_format_resolve_problemsZ
problem_rulesr=r>)	rr\r%r@Zpkg_setrAZgoalZrcrBrrrrZ�s



z'DownloadCommand._get_packages_with_depscCszt�}xj|D]b}|jr8|j|j�tjdt|�|j�q|jdkrZ|jd|j|jf�qtj	t
d�t|��qWt|�S)z4Get list of source rpm names for a list of packages.z  --> Package : %s Source : %srXz
%s-%s.src.rpmzNo source rpm defined for %s)r^Z	sourcerpmrir�debugrKrcr;Zevr�inforrr)r@r_rArrrr]s

z$DownloadCommand._get_source_packagescCs�tjjj|�d}|o|dk}|s8|jd�rdtjj|�rd|jj|g|jj	j
d�}|jjj�j
|d�Stjj|�}|j|jj|jjd	�}|j�}|j
d
d�}|jjr�|j|jjd�}t|j��dkr�td
�|}tjj|��|S)z#Return a query to match a pkg_spec.rrr�filerz.rpm)rO)rA)Zwith_srcT)Zlatest_per_arch_by_priority)rczNo package %s available.)rrr|r)r+ZpycompZurlparse�endswithr.rP�isfiler&Zadd_remote_rpmsrNrOrdreZfilterm�subject�SubjectZget_best_queryr
r%rfrrg�lenrCrr=rp)rrvr1Zis_urlr@�subjrkrBrrrroszDownloadCommand._get_querycCsd|dd�}tjj|�}x.|j�D]"}|j|jj�j�}|r"|j�Sq"Wt	d�|}tj
j|��dS)z/Return a query to match a source rpm file name.N�zNo package %s available.���)r+rr�Zget_nevra_possibilitiesZto_queryr&rdrfZlatestrr=rp)rrvr�Z	nevra_objZ	tmp_queryrBrrrrn,sz!DownloadCommand._get_query_source)F)F)�__name__�
__module__�__qualname__�aliasesrZsummaryr	�staticmethodr"r0rCr?r2r4r5r6r[rZr]rorn�
__classcell__rr)rrr%s #!


r)Z
__future__rrZdnfpluginscorerrZdnf.cli.option_parserrr+Zdnf.cliZdnf.exceptionsZdnf.i18nZdnf.subjectZdnf.utilr8rsr.rUZpluginZregister_commandr
ZCommandrrrrr�<module>s__pycache__/etckeeper.cpython-36.pyc000064400000002540150402642250013352 0ustar003

�:vh�@s`ddlZddlZddlZejd�ZGdd�dej�Zedkr\ddlm	Z	e	ddgdd	id
�dS)�Nz
dnf.pluginc@s(eZdZdZdd�Zdd�Zdd�ZdS)	�	Etckeeper�	etckeepercCs�tjd|�yLtdd��8}tjd|f||dd�}|dkrLtjjd||f��WdQRXWn4tk
r�}ztj	d	||f�WYdd}~XnXdS)
NzEtckeeper plugin: %sz	/dev/null�wbrT)�stdout�stderrZ	close_fdsrz"etckeeper %s" returned: %dz Failed to run "etckeeper %s": %s)
�logger�debug�open�
subprocessZcall�dnf�
exceptions�Error�OSErrorZwarning)�selfZcommand�devnull�ret�err�r�/usr/lib/python3.6/etckeeper.py�_run_commands
"zEtckeeper._run_commandcCs|jd�dS)Nzpre-install)r)rrrr�resolved!szEtckeeper.resolvedcCs|jd�dS)Nzpost-install)r)rrrr�transaction$szEtckeeper.transactionN)�__name__�
__module__�__qualname__�namerrrrrrrrsr�__main__)�setupz
dnf-etckeeperzdnf-pluginsz
etckeeper-dnf)rZpackagesZpackage_dir)
Zloggingr
rZ	getLoggerrZPluginrrZdistutils.corerrrrr�<module>
s
__pycache__/debug.cpython-36.pyc000064400000025101150402642250012467 0ustar003

�gt`1�@s�ddlmZddlmZddlmZddlmZmZddlZddl	Zddl
Z
ddlZddlZddl
Z
ddlZddlZdZGdd�dej�ZGd	d
�d
ejj�ZGdd�dejj�Zd
d�Zdd�Zdd�ZdS)�)�absolute_import)�unicode_literals)�ucd)�_�loggerNzdnf-debug-dump version 1
cs eZdZdZ�fdd�Z�ZS)�Debug�debugcsDtt|�j||�||_||_|jdk	r@|jjt�|jjt�dS)N)�superr�__init__�base�cliZregister_command�DebugDumpCommand�DebugRestoreCommand)�selfrr)�	__class__��/usr/lib/python3.6/debug.pyr
)s
zDebug.__init__)�__name__�
__module__�__qualname__�namer
�
__classcell__rr)rrr%srcsteZdZdZed�Z�fdd�Zdd�Zedd��Z	d	d
�Z
dd�Zd
d�Zdd�Z
dd�Zdd�Zdd�Z�ZS)r
�
debug-dumpz5dump information about installed rpm packages to filecstt|�j|�d|_dS)N)r	r
r
�	dump_file)rr)rrrr
7szDebugDumpCommand.__init__cCsd|jj_d|jj_dS)NT)r�demands�sack_activation�available_repos)rrrr�	configure;s
zDebugDumpCommand.configurecCs.|jdddtd�d�|jddtd�d	�dS)
Nz	--norepos�
store_trueFz/do not attempt to dump the repository contents.)�action�default�help�filename�?zoptional name of dump file)�nargsr!)�add_argumentr)�parserrrr�
set_argparser?szDebugDumpCommand.set_argparsercCs�|jj}|s6tjdtjtj���}dtj�d|f}tjj|�}|j	d�r\t
j|d�|_nt
|d�|_|jt�|j�|j�|j�|j|jj�|j�|jj�ttd�|�dS)z{create debug txt file and compress it, if no filename specified
           use dnf_debug_dump-<timestamp>.txt.gz by defaultz%Y-%m-%d_%Tzdnf_debug_dump-%s-%s.txt.gz�z.gz�wzOutput written to: %sN)�optsr"�timeZstrftimeZ	localtime�os�uname�path�abspath�endswith�gzip�GzipFiler�open�write�
DEBUG_VERSION�dump_system_info�dump_dnf_config_info�dump_rpm_problems�
dump_packagesZnorepos�dump_rpmdb_versions�close�printr)rr"Znowrrr�runHs 


zDebugDumpCommand.runcCs4tjjr t|jtj�r t|d�}tjj|j|�dS)N�utf8)	�dnfZpycompZPY3�
isinstancerr1r2�bytesZ
write_to_file)r�msgrrrr4as
zDebugDumpCommand.writecCsX|jd�tj�}|jd|d|df�|jdtj�|jdtjjdd��dS)	Nz%%%%SYSTEM INFO
z  uname: %s, %s
��z  rpm ver: %s
z  python ver: %s
�
�)r4r,r-�rpm�__version__�sys�version�replace)rr-rrrr6fs
z!DebugDumpCommand.dump_system_infocCs�|jjj}djdd�|jjjD��}|jd�|jd|d�|jd|d�|jd	|d
�|jdtjj	�|jd|�|jd
dj|jjj
��dS)N�,cSsg|]
}|j�qSr)r)�.0�prrr�
<listcomp>psz9DebugDumpCommand.dump_dnf_config_info.<locals>.<listcomp>z
%%%%DNF INFO
z  arch: %s
�archz  basearch: %s
Zbasearchz  releasever: %s
Z
releaseverz  dnf ver: %s
z  enabled plugins: %s
z  global excludes: %s
)r�confZ
substitutions�joinZ_plugins�pluginsr4r?�const�VERSION�excludepkgs)r�varrSrrrr7ns

z%DebugDumpCommand.dump_dnf_config_infocCsP|jd�t|j�\}}|jdjdd�|D���|jdjdd�|D���dS)Nz%%%%RPMDB PROBLEMS
rFcSs$g|]\}}dt|�t|�f�qS)zPackage %s requires %s
)r)rM�req�pkgrrrrO}sz6DebugDumpCommand.dump_rpm_problems.<locals>.<listcomp>cSs$g|]\}}dt|�t|�f�qS)zPackage %s conflicts with %s
)r)rMrQrYrrrrOs)r4�rpm_problemsrrR)rZmissing�	conflictsrrrr8zs
z"DebugDumpCommand.dump_rpm_problemsc	Cs\|jjj�}|jd�x&t|j��D]}|jdt|��q$W|sFdS|jd�|j�}x�t|jjj	�dd�d�D]�}y�d}|j
dk	r�|j
}n*|jdk	r�|j}nt|j
�dkr�|j
d}|jd|j|f�|jd	d
j|j��x,t|j|jd��D]}|jdt|��q�WWqrtjjk
�rR}z|jd|t|�f�wrWYdd}~XqrXqrWdS)
Nz
%%%%RPMDB
z  %s
z
%%%%REPOS
cSs|jS)N)�id)�xrrr�<lambda>�sz0DebugDumpCommand.dump_packages.<locals>.<lambda>)�keyrz
%%%s - %s
z  excludes: %s
rL)ZreponamezError accessing repo %s: %s
)r�sack�queryr4�sorted�	installed�pkgspec�	availableZreposZiter_enabledZmetalinkZ
mirrorlist�lenZbaseurlr\rRrV�filterr?�
exceptions�Error�str)	rZ
load_repos�qrNreZrepoZurlZpo�errrr9�s2




zDebugDumpCommand.dump_packagescCs(|jd�|jjj�}|jd|�dS)Nz%%%%RPMDB VERSIONS
z
  all: %s
)r4rr`Z_rpmdb_version)rrJrrrr:�s
z$DebugDumpCommand.dump_rpmdb_versions)r)rrr�aliasesr�summaryr
r�staticmethodr'r=r4r6r7r8r9r:rrr)rrr
2s		r
c@sPeZdZdZed�Zdd�Zedd��Zdd�Z	d	d
�Z
dd�Zed
d��ZdS)r�
debug-restorez,restore packages recorded in debug-dump filecCs4d|jj_d|jj_d|jj_|jjs0d|jj_dS)NT)rrrrZ	root_userr*�outputZ	resolving)rrrrr�s



zDebugRestoreCommand.configurecCs~|jddtd�d�|jddtd�d�|jddtd�d�|jd	d
dtd�d
�|jddtd�d�|jddtd�d�dS)Nz--outputrz,output commands that would be run to stdout.)rr!z--install-latestz0Install the latest version of recorded packages.z
--ignore-archz_Ignore architecture and install missing packages matching the name, epoch, version and release.z--filter-typesz[install, remove, replace]zinstall, remove, replacezlimit to specified type)�metavarr r!z--remove-installonlyzqAllow removing of install-only packages. Using this option may result in an attempt to remove the running kernel.r"r(zname of dump file)r$r!)r%r)r&rrrr'�s$z!DebugRestoreCommand.set_argparsercCsV|jjr$t|jjjdd�j��|j_|j|jjd�}|j||j�|j||j�dS)z Execute the command action here.rL� rN)	r*�filter_types�setrK�split�read_dump_filer"�process_installed�process_dump)r�	dump_pkgsrrrr=�szDebugRestoreCommand.runcCs�|jjj�j�}|jj|�}x�|D]�}d}t|�}|j|j|jfd�}|dk	r�|j	|j
|jf}	|	|krpd||	<q�||kr~d}q�d|jkr�d}
nd}
x|j
�D]}|
||<q�Wnd}|r"d|jkr"||ks�|jr"|jr�td|�q"|jj|�q"WdS)NF�skipTrK�removezremove    %s)rr`rarcZ_get_installonly_queryrd�getrrP�epochrJ�releasert�keysZremove_installonlyrqr<Zpackage_remove)rrzr*rcZinstallonly_pkgsrYZ
pkg_remove�spec�dumped_versionsZevrrZd_evrrrrrx�s.


z%DebugRestoreCommand.process_installedc
Cs�x�t|j��D]�\}}|||f}x�t|j��D]�\}}}||||f}	|	dkrRq0|jr^d}
nd|}
|jr�|	dkr�d||
f}nt||
|||�}|	|jkr0|jr�td|	|f�q0y|jj	|�Wq0t
jjk
r�t
jtd�|�Yq0Xq0WqWdS)Nr{rF�.�installz%s%sz%s   %szPackage %s is not available)rbr�Zignore_archZinstall_latest�pkgtup2specrtrqr<rr�r?rhZMarkingErrorr�errorr)rrzr*�n�ar�rl�v�rrrP�pkg_specrrrry�s&
z DebugRestoreCommand.process_dumpcCs�|jd�rtj|�}nt|�}t|j��tkrFtjt	d�|�t
jj�d}i}xp|D]h}t|�}|rr|dkrTd}qT|s�|ddkr�P|j
�}tj|�}d|j|j|jfi�|j|j|jf<qTW|S)	Nz.gzzBad dnf debug file: %sTz
%%%%RPMDB
Frrsr�)r0r1r2r3r�readliner5rr�rr?rhri�strip�hawkeyZsplit_nevra�
setdefaultrrPr~rJr)r"Zfobjr{Zpkgs�liner�Znevrarrrrws(


(z"DebugRestoreCommand.read_dump_fileN)rp)
rrrrmrrnrror'r=rxryrwrrrrr�s#rcs�tjj|�}|j�j��t�}t�}x@�D]8�|j�fdd��jD��|j�fdd��jD��q*W�fdd�|D�}�fdd�|D�}||fS)Ncs2g|]*}t|�dkrt|�jd�r|�f�qS)zsolvable:prereqmarkerzrpmlib()rj�
startswith)rMrX)rYrrrO:sz rpm_problems.<locals>.<listcomp>csg|]}|�f�qSrr)rMrQ)rYrrrO=scs$g|]\}}�j|d�s||f�qS))�provides)rg)rMrXrY)�allpkgsrrrO?scs$g|]\}}�j|d�r||f�qS))r�)rg)rMrQrY)r�rrrOAs)	r?r`Z_rpmdb_sackrarcru�update�requiresr[)rZrpmdbr�r[Zmissing_requiresZexisting_conflictsr)r�rYrrZ3s
rZcCst|j|j|j|j|j�S)N)r�rrPr~rJr)rYrrrrdFsrdcCs<|sdnd|jd�}|dkr"dnd|}d|||||fS)NrFz.%sr�z%s:z%s-%s%s-%s%s)NrF)�lstrip)rrPr~rJrr�rlrrrr�Jsr�)Z
__future__rrZdnf.i18nrZdnfpluginscorerrr?Zdnf.clir1r�r,rGrIr+r5ZPluginrrZCommandr
rrZrdr�rrrr�<module>s&
w__pycache__/download.cpython-36.pyc000064400000022456150402642250013222 0ustar003

�gt`*0�@s�ddlmZddlmZddlmZmZddlmZddlZddl	Zddl
ZddlZddlZddl
ZddlZddlZddlZddlZejjGdd�dejj��ZdS)�)�absolute_import)�unicode_literals)�_�logger)�OptionParserNcs�eZdZdgZed�Z�fdd�Zedd��Zdd�Z	d	d
�Z
dd�Zd
d�Zdd�Z
dd�Zdd�Zd dd�Zd!dd�Zedd��Zdd�Zdd�Z�ZS)"�DownloadCommandZdownloadz%Download package to current directorycs tt|�j|�d|_d|_dS)N)�superr�__init__�opts�parser)�self�cli)�	__class__��/usr/lib/python3.6/download.pyr	+szDownloadCommand.__init__c	Cs�|jddtd�d�|jddtd�d�|jd	dtd
�d�|jddtd�d�|jd
ddgtjdtd�d�|jddtd�d�|jddtd�d�|jddddtd�d�|jddddd d!ggtd"�d#�dS)$N�packages�+zpackages to download)�nargs�helpz--source�
store_truezdownload the src.rpm instead)�actionrz--debuginfoz'download the -debuginfo package insteadz
--debugsourcez)download the -debugsource package insteadz--archz
--archlist�archesz[arch]z5limit  the  query to packages of given architectures.)�dest�defaultr�metavarrz	--resolvez(resolve and download needed dependenciesz	--alldepsz^when running with --resolve, download all dependencies (do not exclude already installed ones)z--urlz--urls�urlzJprint list of urls where the rpms can be downloaded instead of downloading)rrrz--urlprotocols�append�http�httpsZrsync�ftpz4when running with --url, limit to specific protocols)r�choicesrr)�add_argumentrrZ_SplitCallback)rrrr�
set_argparser0s*
zDownloadCommand.set_argparsercCs�|jj}d|_d|_|jjr*|jjr*d|_|jjr>|j	j
j�|jjsN|jj
rZ|j	j
j�|jjrr|jj|j	j_ntjjtj��|j	j_dS)NTF)r
�demandsZsack_activationZavailable_reposr
�resolveZalldepsZload_system_repo�source�baseZreposZenable_source_repos�	debuginfo�debugsourceZenable_debug_repos�destdir�conf�dnf�i18n�ucd�os�getcwd)rr#rrr�	configureKszDownloadCommand.configurecCs|jjr.|jjr.|jjr.|j|jj�}nXg}|jjrN|j|j|jj��|jjrj|j|j|jj��|jjr�|j|j	|jj��|jj
r�xd|D]\}|jtj
kr�|j|jjd�}|r�t|�q�td�|j}|jjjr�tjj|��tj|�q�WdS|j|�dS)zExecute the util action here.)�schemesz$Failed to get mirror for package: %sN)r
r%r'r(�_get_pkg_objs_rpmsr�extend�_get_pkg_objs_source�_get_pkg_objs_debuginfo�_get_pkg_objs_debugsourcer�repoid�hawkey�CMDLINE_REPO_NAMEZremote_locationZurlprotocols�printr�namer&r*�strictr+�
exceptions�ErrorrZwarning�
_do_downloads)r�pkgs�pkgr�msgrrr�run^s.





zDownloadCommand.runcCsi}x"|D]}|jt|�g�j|�q
Wg}g}xP|j�D]D}dd�|D�}|r`|j|d�q:|jdd�d�|j|d�q:W|r�|jj||jjj�|r�x^|D]V}|j	�}t
jj|jj
jt
jj|��}	t
jj|	�r�t
jj||	�r�q�tj||jj
j�q�Wtdd�||D��}
|
S)z=
        Perform the download for a list of packages
        cSsg|]}|jtjkr|�qSr)r7r8r9)�.0rArrr�
<listcomp>�sz1DownloadCommand._do_downloads.<locals>.<listcomp>rcSs|jj|jjfS)N)ZrepoZpriorityZcost)�xrrr�<lambda>�sz/DownloadCommand._do_downloads.<locals>.<lambda>)�keycSsg|]}|j��qSr)�localPkg)rDrArrrrE�s)�
setdefault�strr�values�sortr&Zdownload_packages�output�progressrIr.�path�joinr*r)�basename�exists�samefile�shutil�copy�sorted)rr@Zpkg_dictrAZto_downloadZcmdlineZpkg_listZpkgs_cmdline�src�dstZ	locationsrrrr?�s.

zDownloadCommand._do_downloadscCs"|jjr|j|�}n
|j|�}|S)zc
        Return a list of dnf.Package objects that represent the rpms
        to download.
        )r
r$�_get_packages_with_deps�
_get_packages)r�	pkg_specsr@rrrr2�s
z"DownloadCommand._get_pkg_objs_rpmscCs*|j|�}|j|�}t|j|dd��}|S)zj
        Return a list of dnf.Package objects that represent the source
        rpms to download.
        T)r%)r2�_get_source_packages�setr[)rr\r@�source_pkgsrrrr4�s

z$DownloadCommand._get_pkg_objs_sourcec	Cs�t�}|jjj�j�}xh|j|�D]Z}xT|j|jgD]D}|j|t	|j
�|j|j|j
d�}|s^q4x|D]}|j|�qdWPq4Wq"W|S)zm
        Return a list of dnf.Package objects that represent the debuginfo
        rpms to download.
        )r;�epoch�version�release�arch)r^r&�sack�query�	availabler[Z
debug_nameZsource_debug_name�filter�intr`rarbrc�add)rr\�dbg_pkgs�qrAZdbg_name�
dbg_available�prrrr5�s 


z'DownloadCommand._get_pkg_objs_debuginfocCsht�}|jjj�j�}xL|j|�D]>}|j|jt|j	�|j
|j|jd�}x|D]}|j
|�qNWq"W|S)zo
        Return a list of dnf.Package objects that represent the debugsource
        rpms to download.
        )r;r`rarbrc)r^r&rdrerfr[rgZdebugsource_namerhr`rarbrcri)rr\rjrkrArlrmrrrr6�s

z)DownloadCommand._get_pkg_objs_debugsourceFcCs�|r
|jn|j}g}x||D]t}y|j||��Wqtjjk
r�}z<tjtjj	|��|j
jjr|tjt
d��tjj|��WYdd}~XqXqWttj|��}|S)z Get packages matching pkg_specs.zExiting due to strict setting.N)�_get_query_source�
_get_queryrr+r=�PackageNotFoundErrorr�errorr,r-r&r*r<rr>�list�	itertools�chain)rr\r%�funcZqueries�pkg_spec�er@rrrr[�s

"zDownloadCommand._get_packagesc	Cs�|j|�}t|�}x�|D]�}tj|jj�}|j|�|j�}|r^|j|j	��|j|j
��qtd�g}tj
dj|dd�|D���tj
tjj|j���tjj��qW|S)z-Get packages matching pkg_specs and the deps.zError in resolve of packages:z
    cSsg|]}t|��qSr)rK)rDrArrrrEsz;DownloadCommand._get_packages_with_deps.<locals>.<listcomp>)r[r^r8ZGoalr&rdZinstallrC�updateZ
list_installsZ
list_upgradesrrrqrQr+�utilZ_format_resolve_problemsZ
problem_rulesr=r>)	rr\r%r@Zpkg_setrAZgoalZrcrBrrrrZ�s



z'DownloadCommand._get_packages_with_depscCszt�}xj|D]b}|jr8|j|j�tjdt|�|j�q|jdkrZ|jd|j|jf�qtj	t
d�t|��qWt|�S)z4Get list of source rpm names for a list of packages.z  --> Package : %s Source : %srXz
%s-%s.src.rpmzNo source rpm defined for %s)r^Z	sourcerpmrir�debugrKrcr;Zevr�inforrr)r@r_rArrrr]s

z$DownloadCommand._get_source_packagescCs�tjjj|�d}|o|dk}|s8|jd�rdtjj|�rd|jj|g|jj	j
d�}|jjj�j
|d�Stjj|�}|j|jj|jjd	�}|j�}|j
d
d�}|jjr�|j|jjd�}t|j��dkr�td
�|}tjj|��|S)z#Return a query to match a pkg_spec.rrr�filerz.rpm)rO)rA)Zwith_srcT)Zlatest_per_arch_by_priority)rczNo package %s available.)rrr|r)r+ZpycompZurlparse�endswithr.rP�isfiler&Zadd_remote_rpmsrNrOrdreZfilterm�subject�SubjectZget_best_queryr
r%rfrrg�lenrCrr=rp)rrvr1Zis_urlr@�subjrkrBrrrroszDownloadCommand._get_querycCsd|dd�}tjj|�}x.|j�D]"}|j|jj�j�}|r"|j�Sq"Wt	d�|}tj
j|��dS)z/Return a query to match a source rpm file name.N�zNo package %s available.���)r+rr�Zget_nevra_possibilitiesZto_queryr&rdrfZlatestrr=rp)rrvr�Z	nevra_objZ	tmp_queryrBrrrrn,sz!DownloadCommand._get_query_source)F)F)�__name__�
__module__�__qualname__�aliasesrZsummaryr	�staticmethodr"r0rCr?r2r4r5r6r[rZr]rorn�
__classcell__rr)rrr%s #!


r)Z
__future__rrZdnfpluginscorerrZdnf.cli.option_parserrr+Zdnf.cliZdnf.exceptionsZdnf.i18nZdnf.subjectZdnf.utilr8rsr.rUZpluginZregister_commandr
ZCommandrrrrr�<module>s__pycache__/spacewalk.cpython-36.opt-1.pyc000064400000023462150402642250014322 0ustar003

.޾g�7�@sfddlmZddlmZddlmZmZddlZddlZddlZddl	Z	ddl
Z
ddlZddlmZddl
mZddlZddlZddlZddlZddlmZddlmZd	Zed
�Zed�Zed�Zed
�Zed�Zed�Zed�Zed�Zed�Z ed�Z!ed�Z"ed�Z#ed�Z$Gdd�dej%�Z&Gdd�dej'j(�Z)dd�Z*Gdd�de+�Z,dd �Z-d!d"�Z.dS)#�)�absolute_import)�unicode_literals)�_�loggerN)�copy)�PRIO_PLUGINCONFIG)�ustr)�
up2dateErrorsz_spacewalk.jsonz7CloudLinux Network based repositories will be disabled.z4CloudLinux Network channel support will be disabled.z@There was an error communicating with CloudLinux Network server.z=This system is not registered with CloudLinux Network server.z.This system is not subscribed to any channels.zSystemId could not be acquired.z%You can use rhn_register to register.z@This system is receiving updates from CloudLinux Network server.z�For security reasons packages from CloudLinux Network based repositories can be verified only with locally installed gpg keys. GPG key '%s' has been rejected.z.Package profile information could not be sent.z=Missing required login information for CloudLinux Network: %sz'Leapp upgrade is running - using cache.z>Spacewalk plugin has to be run under with the root privileges.csZeZdZdZ�fdd�Zdd�Zdd�Zdd	d
�Zdd�Zd
d�Z	dd�Z
dd�Z�ZS)�	SpacewalkZ	spacewalkcs�tt|�j||�||_||_tjj|jjj	t
�|_d|_i|_
t|jj�|_|j|j�|_d|jj�kr�|jjd�}x |D]\}}|jj||t�q|Wtjj�s�tjt�d|j_|jjs�dStjd�|j�dS)NF�mainzinitialized Spacewalk plugin)�superr
�__init__�base�cli�os�path�join�confZ
persistdir�STORED_CHANNELS_NAME�stored_channels_path�connected_to_spacewalk�up2date_cfgrZread_config�parser�sections�items�
_set_valuer�dnf�utilZ	am_i_rootr�warning�MUST_BE_ROOT�enabled�debug�activate_channels)�selfrr�options�key�value)�	__class__��/usr/lib/python3.6/spacewalk.pyr
;s(


zSpacewalk.__init__cCs$|jjsdS|jsdSd|jj_dS)NT)rr rZdemandsZ	root_user)r#r(r(r)�configRs
zSpacewalk.configcCstjd�dS)Nz$/usr/sbin/clnreg_ks --strict-edition)r�system)r#r(r(r)�clnreg^szSpacewalk.clnregTcCs�i}d}d}d}d}|j�}|s(|}�n�tjjd�rFtjt�|}�n�tjj	�|_
t|j
�}|j
df}d}xl|s�ytjj
|jjd�}d}Wqntjk
r�}	z*|dkr�|j�d}wntjdtt|	�dSd}	~	XqnXqnW|�s�tjdtt�|ji�dSytjj|jjd�}
Wn�tjk
�rF}	ztjdtt|	�dSd}	~	XnXtjk
�rttjdtt�|ji�dStjk
�r�tjd	ttt t�dSXd|_!tj"t#�x,|
D]$}|d
�r�t$|j%��||d<�q�W|j|�|j&j'}x�|j%�D]�\}
}|j(|
�}d}|�r|j(d
�}t)|j�}|
|j*j+�k�rf|j*j%|
�}x |D]\}}|j,||t-��qJWt.||j&j||j|||||j/|j0d�	�}|j1|��q�Wtj2|�dS)
Nrz/etc/cln_leapp_in_progressZuseNoSSLForPackagesF)�timeoutTz%s
%s
%sz%s
%sz%s %s
%s
%s�version�label)	r�proxyr-�	sslcacert�
force_http�cached_version�
login_info�gpgcheckr )3�_read_channels_filerr�isfilerr�LEAPP_IN_PROGRESS�up2date_clientr*ZinitUp2dateConfigr�get_ssl_ca_certZup2dateAuthZgetLoginInforr-r	�RhnServerExceptionr,�error�COMMUNICATION_ERROR�RHN_DISABLED�NOT_REGISTERED_ERROR�_write_channels_fileZ
rhnChannelZgetChannelDetailsZCommunicationErrorZNoChannelsError�NOT_SUBSCRIBED_ERROR�CHANNELS_DISABLEDZNoSystemIdError�NO_SYSTEM_ID_ERROR�USE_RHNREGISTERr�info�UPDATES_FROM_SPACEWALK�dictrr�repos�getrrrrr�
SpacewalkRepor5r �addr!)r#Z
networkingZenabled_channelsr1r2Z	proxy_urlr4Zcached_channelsZclnreg_tried�eZsvrChannels�channelrHZ
channel_idZchannel_dictZcached_channelr3rr$r%r&�repor(r(r)r"as�











zSpacewalk.activate_channelscCs�|jjsdS|jsdS|jddkr8|j�}tjj|�ytjj|jj	d�Wn4t
jk
r�}ztj
dtt|�WYdd}~XnXdS)z, Update system's profile after transaction. NZwriteChangesToLog�)r-z%s
%s
%s)rr rr�_make_package_deltar9ZrhnPackageInfoZlogDeltaPackagesZupdatePackageProfiler-r	r;rr<r=�PROFILE_NOT_SENT)r#�deltarLr(r(r)�transaction�szSpacewalk.transactioncCs�y.t|jd��}|j�}tj|�}|SQRXWnXttfk
rb}z|jtjkrR�WYdd}~Xn&tj	j
k
r�}zWYdd}~XnXiS)N�r)�openr�read�json�loads�FileNotFoundError�IOError�errno�ENOENT�decoderZJSONDecodeError)r#�
channels_fileZcontentZchannelsrLr(r(r)r6�s
zSpacewalk._read_channels_filecCsfy,t|jd��}tj||dd�WdQRXWn4ttfk
r`}z|jtjkrP�WYdd}~XnXdS)N�w�)�indent)rUrrW�dumprYrZr[r\)r#�varr^rLr(r(r)r@�szSpacewalk._write_channels_filecCs.dd�|jjjD�dd�|jjjD�d�}|S)NcSs$g|]}|j|j|j|j|jf�qSr()�namer.�release�epoch�arch)�.0�pr(r(r)�
<listcomp>�sz1Spacewalk._make_package_delta.<locals>.<listcomp>cSs$g|]}|j|j|j|j|jf�qSr()rdr.rerfrg)rhrir(r(r)rj�s)ZaddedZremoved)rrSZinstall_setZ
remove_set)r#rRr(r(r)rP�s
zSpacewalk._make_package_delta)T)
�__name__�
__module__�__qualname__rdr
r*r,r"rSr6r@rP�
__classcell__r(r()r'r)r
7s
Yr
csDeZdZdZdddddgZ�fdd�Zd	d
�Zd�fdd
�	Z�ZS)rJzB
    Repository object for Spacewalk. Uses up2date libraries.
    zX-RHN-Server-IdzX-RHN-Auth-User-Idz
X-RHN-AuthzX-RHN-Auth-Server-TimezX-RHN-Auth-Expire-OffsetcsTtt��jt|d�|jd��t|d��_�fdd�|dD��_|jd��_|jd��_yt	|d	��_
Wn<tk
r�}z tj
ttjj|��g�_
WYdd}~XnX|d
|jd�kr�d�_|jd
��_d�_d�_d�_d�_|jd��_|jd��_|jd��_|jd��r$�j�n�j�t�d��rP�j�}|�rP�j|�dS)Nr/rrdcsg|]}|d�j�qS)z	/GET-REQ/)�id)rh�url)r#r(r)rj�sz*SpacewalkRepo.__init__.<locals>.<listcomp>rpr1r0Zgpg_key_urlr.r3rOr4rr-r5r2r �set_http_headers) rrJr
rrIrdZbaseurlr1r0�get_gpg_key_urlsZgpgkey�InvalidGpgKeyLocationrr�GPG_KEY_REJECTEDrZi18nZucdZmetadata_expirer4Z	keepaliveZ	bandwidthZretriesZthrottler-r5r2�enable�disable�hasattr�create_http_headersrq)r#rMZoptsrL�http_headers)r')r#r)r
�s8
zSpacewalkRepo.__init__cCs�g}|js|Sxb|jD]X}||jkr8t|}tjj|��|j|dkrV|jd|�q|jd||j|f�qW|js�|jd�|S)N�z*%s: 
X-libcurl-Empty-Header-Workaround: *z%s: %sz.X-RHN-Transport-Capability: follow-redirects=3)Nrz)r4�needed_headers�MISSING_HEADERr�ErrorZ	RepoError�appendr2)r#ry�headerr<r(r(r)rxs

z!SpacewalkRepo.create_http_headersTcs0tt|�j||�}|j�}|r,|jtj|�|S)N)rrJ�_handle_new_remoterxZsetopt�librepoZLRO_HTTPHEADER)r#ZdestdirZmirror_setupZhandlery)r'r(r)r�-s
z SpacewalkRepo._handle_new_remote)T)	rkrlrm�__doc__r{r
rxr�rnr(r()r'r)rJ�s%rJcCs*|j�}x|D]}t|�st|��qW|S)a
    Parse the key urls and validate them.

    key_url_string is a space seperated list of gpg key urls that must be
    located in /etc/pkg/rpm-gpg/.
    Return a list of strings containing the key urls.
    Raises InvalidGpgKeyLocation if any of the key urls are invalid.
    )�split�is_valid_gpg_key_urlrs)Zkey_url_stringZkey_urls�key_urlr(r(r)rr9s
	
rrc@seZdZdS)rsN)rkrlrmr(r(r(r)rsHsrscCsP|jd�}t|�dkrdS|\}}|j�dkr2dStjj|�}|jd�sLdSdS)Nz://�F�filez/etc/pki/rpm-gpg/T)r��len�lowerrr�normpath�
startswith)r�Zproto_split�protorr(r(r)r�Ks

r�cCs4d|ko|dst�|d}t|�tkr0|dS|S)NZ	sslCACertr)ZBadSslCaCertConfig�type�list)rZca_certsr(r(r)r:Ysr:)/Z
__future__rrZdnfpluginscorerrrZdnf.exceptionsr[rWr�rrZdnf.conf.configrZup2date_client.up2dateAuthr9Zup2date_client.configZup2date_client.rhnChannelZup2date_client.rhnPackageInfoZrhn.i18nrr	rr>rBr=r?rArCrDrFrtrQr|r8rZPluginr
rNZReporJrr�	Exceptionrsr�r:r(r(r(r)�<module>sJ4Oconfig_manager.py000064400000025205150402642250010061 0ustar00#
# Copyright (C) 2015  Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#

from __future__ import absolute_import
from __future__ import unicode_literals
from dnfpluginscore import _, logger, P_

import dnf
import dnf.cli
import dnf.pycomp
import dnf.util
import fnmatch
import hashlib
import os
import re
import shutil


@dnf.plugin.register_command
class ConfigManagerCommand(dnf.cli.Command):

    aliases = ['config-manager']
    summary = _('manage {prog} configuration options and repositories').format(
        prog=dnf.util.MAIN_PROG)

    @staticmethod
    def set_argparser(parser):
        parser.add_argument(
            'crepo', nargs='*', metavar='repo',
            help=_('repo to modify'))
        parser.add_argument(
            '--save', default=False, action='store_true',
            help=_('save the current options (useful with --setopt)'))
        parser.add_argument(
            '--add-repo', default=[], action='append', metavar='URL',
            help=_('add (and enable) the repo from the specified file or url'))
        parser.add_argument(
            '--dump', default=False, action='store_true',
            help=_('print current configuration values to stdout'))
        parser.add_argument(
            '--dump-variables', default=False, action='store_true',
            help=_('print variable values to stdout'))
        enable_group = parser.add_mutually_exclusive_group()
        enable_group.add_argument("--set-enabled", default=False,
                                  dest="set_enabled", action="store_true",
                                  help=_("enable repos (automatically saves)"))
        enable_group.add_argument("--set-disabled", default=False,
                                  dest="set_disabled", action="store_true",
                                  help=_("disable repos (automatically saves)"))

    def configure(self):
        # setup sack and populate it with enabled repos
        demands = self.cli.demands
        demands.available_repos = True

        # if no argument was passed then error
        if (not (self.opts.add_repo != [] or
                 self.opts.save or
                 self.opts.dump or
                 self.opts.dump_variables or
                 self.opts.set_disabled or
                 self.opts.set_enabled) ):
            self.cli.optparser.error(_("one of the following arguments is required: {}")
                                     .format(' '.join([
                                         "--save", "--add-repo",
                                         "--dump", "--dump-variables",
                                         "--set-enabled", "--enable",
                                         "--set-disabled", "--disable"])))

        # warn with hint if --enablerepo or --disablerepo argument was passed
        if self.opts.repos_ed != []:
            logger.warning(_("Warning: --enablerepo/--disablerepo arguments have no meaning"
                             "with config manager. Use --set-enabled/--set-disabled instead."))

        if (self.opts.save or self.opts.set_enabled or
                self.opts.set_disabled or self.opts.add_repo):
            demands.root_user = True

        # sanitize commas https://bugzilla.redhat.com/show_bug.cgi?id=1830530
        temp_list = [x.split(',') for x in self.opts.crepo if x != ',']
        # flatten sublists
        self.opts.crepo = [item for sublist in temp_list
            for item in sublist if item != '']

    def run(self):
        """Execute the util action here."""
        if self.opts.add_repo:
            self.add_repo()
        else:
            self.modify_repo()

    def modify_repo(self):
        """ process --set-enabled, --set-disabled and --setopt options """

        matching_repos = []         # list of matched repositories
        not_matching_repos_id = set()  # IDs of not matched repositories

        def match_repos(key, add_matching_repos):
            matching = self.base.repos.get_matching(key)
            if not matching:
                not_matching_repos_id.add(name)
            elif add_matching_repos:
                matching_repos.extend(matching)

        if self.opts.crepo:
            for name in self.opts.crepo:
                match_repos(name, True)
            if hasattr(self.opts, 'repo_setopts'):
                for name in self.opts.repo_setopts.keys():
                    match_repos(name, False)
        else:
            if hasattr(self.opts, 'repo_setopts'):
                for name in self.opts.repo_setopts.keys():
                    match_repos(name, True)

        if not_matching_repos_id:
            raise dnf.exceptions.Error(_("No matching repo to modify: %s.")
                                       % ', '.join(not_matching_repos_id))

        sbc = self.base.conf
        modify = {}
        if hasattr(self.opts, 'main_setopts') and self.opts.main_setopts:
            modify = self.opts.main_setopts
        if self.opts.dump_variables:
            for name, val in self.base.conf.substitutions.items():
                print("%s = %s" % (name, val))
        if not self.opts.crepo or 'main' in self.opts.crepo:
            if self.opts.save and modify:
                # modify [main] in global configuration file
                self.base.conf.write_raw_configfile(self.base.conf.config_file_path, 'main',
                                                    sbc.substitutions, modify)
            if self.opts.dump:
                print(self.base.output.fmtSection('main'))
                print(self.base.conf.dump())

        if not matching_repos:
            return

        if self.opts.set_enabled or self.opts.set_disabled:
            self.opts.save = True

        for repo in sorted(matching_repos):
            repo_modify = {}
            if self.opts.set_enabled:
                repo_modify['enabled'] = "1"
            elif self.opts.set_disabled:
                repo_modify['enabled'] = "0"
            if hasattr(self.opts, 'repo_setopts'):
                for repoid, setopts in self.opts.repo_setopts.items():
                    if fnmatch.fnmatch(repo.id, repoid):
                        repo_modify.update(setopts)
            if self.opts.save and repo_modify:
                self.base.conf.write_raw_configfile(repo.repofile, repo.id, sbc.substitutions, repo_modify)
            if self.opts.dump:
                print(self.base.output.fmtSection('repo: ' + repo.id))
                print(repo.dump())

    def add_repo(self):
        """ process --add-repo option """

        # Get the reposdir location
        myrepodir = self.base.conf.get_reposdir
        errors_count = 0

        for url in self.opts.add_repo:
            if dnf.pycomp.urlparse.urlparse(url).scheme == '':
                url = 'file://' + os.path.abspath(url)
            logger.info(_('Adding repo from: %s'), url)
            if url.endswith('.repo'):
                # .repo file - download, put into reposdir and enable it
                destname = os.path.basename(url)
                destname = os.path.join(myrepodir, destname)
                try:
                    f = self.base.urlopen(url, mode='w+')
                    shutil.copy2(f.name, destname)
                    os.chmod(destname, 0o644)
                    f.close()
                except IOError as e:
                    errors_count += 1
                    logger.error(e)
                    continue
            else:
                # just url to repo, create .repo file on our own
                repoid = sanitize_url_to_fs(url)
                reponame = 'created by {} config-manager from {}'.format(dnf.util.MAIN_PROG, url)
                destname = os.path.join(myrepodir, "%s.repo" % repoid)
                content = "[%s]\nname=%s\nbaseurl=%s\nenabled=1\n" % \
                                                (repoid, reponame, url)
                if not save_to_file(destname, content):
                    continue
        if errors_count:
            raise dnf.exceptions.Error(P_("Configuration of repo failed",
                                          "Configuration of repos failed", errors_count))


def save_to_file(filename, content):
    try:
        with open(filename, 'w+') as fd:
            dnf.pycomp.write_to_file(fd, content)
            os.chmod(filename, 0o644)
    except (IOError, OSError) as e:
        logger.error(_('Could not save repo to repofile %s: %s'),
                     filename, e)
        return False
    return True

# Regular expressions to sanitise cache filenames
RE_SCHEME = re.compile(r'^\w+:/*(\w+:|www\.)?')
RE_SLASH = re.compile(r'[?/:&#|~\*\[\]\(\)\'\\]+')
RE_BEGIN = re.compile(r'^[,.]*')
RE_FINAL = re.compile(r'[,.]*$')

def sanitize_url_to_fs(url):
    """Return a filename suitable for the filesystem and for repo id

    Strips dangerous and common characters to create a filename we
    can use to store the cache in.
    """

    try:
        if RE_SCHEME.match(url):
            if dnf.pycomp.PY3:
                url = url.encode('idna').decode('utf-8')
            else:
                if isinstance(url, str):
                    url = url.decode('utf-8').encode('idna')
                else:
                    url = url.encode('idna')
                if isinstance(url, unicode):
                    url = url.encode('utf-8')
    except (UnicodeDecodeError, UnicodeEncodeError, UnicodeError, TypeError):
        pass
    url = RE_SCHEME.sub("", url)
    url = RE_SLASH.sub("_", url)
    url = RE_BEGIN.sub("", url)
    url = RE_FINAL.sub("", url)

    # limit length of url
    if len(url) > 250:
        parts = url[:185].split('_')
        lastindex = 185-len(parts[-1])
        csum = hashlib.sha256()
        csum.update(url[lastindex:].encode('utf-8'))
        url = url[:lastindex] + '_' + csum.hexdigest()
    # remove all not allowed characters
    allowed_regex = "[^abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.:-]"
    return re.sub(allowed_regex, '', url)
repoclosure.py000064400000015233150402642250007464 0ustar00# repoclosure.py
# DNF plugin adding a command to display a list of unresolved dependencies
# for repositories.
#
# Copyright (C) 2015 Igor Gnatenko
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#

from __future__ import absolute_import
from __future__ import unicode_literals
from dnfpluginscore import _

import dnf.cli


class RepoClosure(dnf.Plugin):

    name = "repoclosure"

    def __init__(self, base, cli):
        super(RepoClosure, self).__init__(base, cli)
        if cli is None:
            return
        cli.register_command(RepoClosureCommand)


class RepoClosureCommand(dnf.cli.Command):
    aliases = ("repoclosure",)
    summary = _("Display a list of unresolved dependencies for repositories")

    def configure(self):
        demands = self.cli.demands
        demands.sack_activation = True
        demands.available_repos = True
        if self.opts.repo:
            for repo in self.base.repos.all():
                if repo.id not in self.opts.repo and repo.id not in self.opts.check:
                    repo.disable()
                else:
                    repo.enable()

    def run(self):
        if self.opts.arches:
            unresolved = self._get_unresolved(self.opts.arches)
        else:
            unresolved = self._get_unresolved()
        for pkg in sorted(unresolved.keys()):
            print("package: {} from {}".format(str(pkg), pkg.reponame))
            print("  unresolved deps:")
            for dep in unresolved[pkg]:
                print("    {}".format(dep))
        if len(unresolved) > 0:
            msg = _("Repoclosure ended with unresolved dependencies.")
            raise dnf.exceptions.Error(msg)

    def _get_unresolved(self, arch=None):
        unresolved = {}
        deps = set()

        # We have two sets of packages, available and to_check:
        # * available is the set of packages used to satisfy dependencies
        # * to_check is the set of packages we are checking the dependencies of
        #
        # to_check can be a subset of available if the --arch, --best, --check,
        # --newest, or --pkg options are used
        #
        # --arch:   only packages matching arch are checked
        # --best:   available only contains the latest packages per arch across all repos
        # --check:  only check packages in the specified repo(s)
        # --newest: only consider the latest versions of a package from each repo
        # --pkg:    only check the specified packages
        #
        # Relationship of --best and --newest:
        #
        # Pkg Set   | Neither |  --best             | --newest        | --best and --newest |
        # available | all     | latest in all repos | latest per repo | latest in all repos |
        # to_check  | all     | all                 | latest per repo | latest per repo     |

        if self.opts.newest:
            available = self.base.sack.query().filter(empty=True)
            to_check = self.base.sack.query().filter(empty=True)
            for repo in self.base.repos.iter_enabled():
                available = \
                    available.union(self.base.sack.query().filter(reponame=repo.id).latest())
                to_check = \
                    to_check.union(self.base.sack.query().filter(reponame=repo.id).latest())
        else:
            available = self.base.sack.query().available()
            to_check = self.base.sack.query().available()

        if self.opts.pkglist:
            pkglist_q = self.base.sack.query().filter(empty=True)
            errors = []
            for pkg in self.opts.pkglist:
                subj = dnf.subject.Subject(pkg)
                pkg_q = to_check.intersection(
                    subj.get_best_query(self.base.sack, with_nevra=True,
                                        with_provides=False, with_filenames=False))
                if pkg_q:
                    pkglist_q = pkglist_q.union(pkg_q)
                else:
                    errors.append(pkg)
            if errors:
                raise dnf.exceptions.Error(
                    _('no package matched: %s') % ', '.join(errors))
            to_check = pkglist_q

        if self.opts.check:
            to_check.filterm(reponame=self.opts.check)

        if arch is not None:
            to_check.filterm(arch=arch)

        if self.base.conf.best:
            available.filterm(latest_per_arch=True)

        available.apply()
        to_check.apply()

        for pkg in to_check:
            unresolved[pkg] = set()
            for req in pkg.requires:
                reqname = str(req)
                # XXX: https://bugzilla.redhat.com/show_bug.cgi?id=1186721
                if reqname.startswith("solvable:") or \
                        reqname.startswith("rpmlib("):
                    continue
                deps.add(req)
                unresolved[pkg].add(req)

        unresolved_deps = set(x for x in deps if not available.filter(provides=x))

        unresolved_transition = {k: set(x for x in v if x in unresolved_deps)
                                 for k, v in unresolved.items()}
        return {k: v for k, v in unresolved_transition.items() if v}

    @staticmethod
    def set_argparser(parser):
        parser.add_argument("--arch", default=[], action="append", dest='arches',
                            help=_("check packages of the given archs, can be "
                                   "specified multiple times"))
        parser.add_argument("--check", default=[], action="append",
                            help=_("Specify repositories to check"))
        parser.add_argument("-n", "--newest", action="store_true",
                            help=_("Check only the newest packages in the "
                                   "repos"))
        parser.add_argument("--pkg", default=[], action="append",
                            help=_("Check closure for this package only"),
                            dest="pkglist")
copr.py000064400000073132150402642250006067 0ustar00# supplies the 'copr' command.
#
# Copyright (C) 2014-2015  Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#

from __future__ import print_function

import glob
import itertools
import json
import os
import re
import shutil
import stat
import sys
import base64
import json

from dnfpluginscore import _, logger
import dnf
from dnf.pycomp import PY3
from dnf.i18n import ucd
import rpm

# Attempt importing the linux_distribution function from distro
# If that fails, attempt to import the deprecated implementation
# from the platform module.
try:
    from distro import name, version, codename, os_release_attr

    # Re-implement distro.linux_distribution() to avoid a deprecation warning
    def linux_distribution():
        return (name(), version(), codename())
except ImportError:
    def os_release_attr(_):
        return ""
    try:
        from platform import linux_distribution
    except ImportError:
        # Simple fallback for distributions that lack an implementation
        def linux_distribution():
            with open('/etc/os-release') as os_release_file:
                os_release_data = {}
                for line in os_release_file:
                    try:
                        os_release_key, os_release_value = line.rstrip().split('=')
                        os_release_data[os_release_key] = os_release_value.strip('"')
                    except ValueError:
                        # Skip empty lines and everything that is not a simple
                        # variable assignment
                        pass
                return (os_release_data['NAME'], os_release_data['VERSION_ID'], None)

PLUGIN_CONF = 'copr'

YES = set([_('yes'), _('y')])
NO = set([_('no'), _('n'), ''])

if PY3:
    from configparser import ConfigParser, NoOptionError, NoSectionError
    from urllib.request import urlopen, HTTPError, URLError
else:
    from ConfigParser import ConfigParser, NoOptionError, NoSectionError
    from urllib2 import urlopen, HTTPError, URLError

@dnf.plugin.register_command
class CoprCommand(dnf.cli.Command):
    """ Copr plugin for DNF """

    chroot_config = None

    default_hostname = "copr.fedorainfracloud.org"
    default_hub = "fedora"
    default_protocol = "https"
    default_port = 443
    default_url = default_protocol + "://" + default_hostname
    aliases = ("copr",)
    summary = _("Interact with Copr repositories.")
    first_warning = True
    usage = _("""
  enable name/project [chroot]
  disable name/project
  remove name/project
  list --installed/enabled/disabled
  list --available-by-user=NAME
  search project

  Examples:
  copr enable rhscl/perl516 epel-6-x86_64
  copr enable ignatenkobrain/ocltoys
  copr disable rhscl/perl516
  copr remove rhscl/perl516
  copr list --enabled
  copr list --available-by-user=ignatenkobrain
  copr search tests
    """)

    @staticmethod
    def set_argparser(parser):
        parser.add_argument('subcommand', nargs=1,
                            choices=['help', 'enable', 'disable',
                                     'remove', 'list', 'search'])

        list_option = parser.add_mutually_exclusive_group()
        list_option.add_argument('--installed', action='store_true',
                                 help=_('List all installed Copr repositories (default)'))
        list_option.add_argument('--enabled', action='store_true',
                                 help=_('List enabled Copr repositories'))
        list_option.add_argument('--disabled', action='store_true',
                                 help=_('List disabled Copr repositories'))
        list_option.add_argument('--available-by-user', metavar='NAME',
                                 help=_('List available Copr repositories by user NAME'))

        parser.add_argument('--hub', help=_('Specify an instance of Copr to work with'))

        parser.add_argument('arg', nargs='*')

    def configure(self):
        if self.cli.command.opts.command != "copr":
            return
        copr_hub = None
        copr_plugin_config = ConfigParser()
        config_files = []
        config_path = self.base.conf.pluginconfpath[0]

        default_config_file = os.path.join(config_path, PLUGIN_CONF + ".conf")
        if os.path.isfile(default_config_file):
            config_files.append(default_config_file)

            copr_plugin_config.read(default_config_file)
            if copr_plugin_config.has_option('main', 'distribution') and\
                    copr_plugin_config.has_option('main', 'releasever'):
                distribution = copr_plugin_config.get('main', 'distribution')
                releasever = copr_plugin_config.get('main', 'releasever')
                self.chroot_config = [distribution, releasever]
            else:
                self.chroot_config = [False, False]

        for filename in os.listdir(os.path.join(config_path, PLUGIN_CONF + ".d")):
            if filename.endswith('.conf'):
                config_file = os.path.join(config_path, PLUGIN_CONF + ".d", filename)
                config_files.append(config_file)

        project = []
        if len(self.opts.arg):
            project = self.opts.arg[0].split("/")

        if len(project) == 3 and self.opts.hub:
            logger.critical(
                _('Error: ') +
                _('specify Copr hub either with `--hub` or using '
                  '`copr_hub/copr_username/copr_projectname` format')
            )
            raise dnf.cli.CliError(_('multiple hubs specified'))

        # Copr hub was not specified, using default hub `fedora`
        elif not self.opts.hub and len(project) != 3:
            self.copr_hostname = self.default_hostname
            self.copr_url = self.default_url

        # Copr hub specified with hub/user/project format
        elif len(project) == 3:
            copr_hub = project[0]

        else:
            copr_hub = self.opts.hub

        # Try to find hub in a config file
        if config_files and copr_hub:
            self.copr_url = None
            copr_plugin_config.read(sorted(config_files, reverse=True))
            hostname = self._read_config_item(copr_plugin_config, copr_hub, 'hostname', None)

            if hostname:
                protocol = self._read_config_item(copr_plugin_config, copr_hub, 'protocol',
                                                  self.default_protocol)
                port = self._read_config_item(copr_plugin_config, copr_hub, 'port',
                                              self.default_port)

                self.copr_hostname = hostname
                self.copr_url = protocol + "://" + hostname
                if int(port) != self.default_port:
                    self.copr_url += ":" + port
                    self.copr_hostname += ":" + port

        if not self.copr_url:
            if '://' not in copr_hub:
                self.copr_hostname = copr_hub
                self.copr_url = self.default_protocol + "://" + copr_hub
            else:
                self.copr_hostname = copr_hub.split('://', 1)[1]
                self.copr_url = copr_hub

    def _read_config_item(self, config, hub, section, default):
        try:
            return config.get(hub, section)
        except (NoOptionError, NoSectionError):
            return default

    def _user_warning_before_prompt(self, text):
        sys.stderr.write("{0}\n".format(text.strip()))

    def run(self):
        subcommand = self.opts.subcommand[0]

        if subcommand == "help":
            self.cli.optparser.print_help(self)
            return 0
        if subcommand == "list":
            if self.opts.available_by_user:
                self._list_user_projects(self.opts.available_by_user)
                return
            else:
                self._list_installed_repositories(self.base.conf.reposdir[0],
                                                  self.opts.enabled, self.opts.disabled)
                return

        try:
            project_name = self.opts.arg[0]
        except (ValueError, IndexError):
            logger.critical(
                _('Error: ') +
                _('exactly two additional parameters to '
                  'copr command are required'))
            self.cli.optparser.print_help(self)
            raise dnf.cli.CliError(
                _('exactly two additional parameters to '
                  'copr command are required'))
        try:
            chroot = self.opts.arg[1]
            if len(self.opts.arg) > 2:
                raise dnf.exceptions.Error(_('Too many arguments.'))
            self.chroot_parts = chroot.split("-")
            if len(self.chroot_parts) < 3:
                raise dnf.exceptions.Error(_('Bad format of optional chroot. The format is '
                                             'distribution-version-architecture.'))
        except IndexError:
            chroot = self._guess_chroot()
            self.chroot_parts = chroot.split("-")

        # commands without defined copr_username/copr_projectname
        if subcommand == "search":
            self._search(project_name)
            return

        project = project_name.split("/")
        if len(project) not in [2, 3]:
            logger.critical(
                _('Error: ') +
                _('use format `copr_username/copr_projectname` '
                  'to reference copr project'))
            raise dnf.cli.CliError(_('bad copr project format'))
        elif len(project) == 2:
            copr_username = project[0]
            copr_projectname = project[1]
        else:
            copr_username = project[1]
            copr_projectname = project[2]
            project_name = copr_username + "/" + copr_projectname

        repo_filename = "{0}/_copr:{1}:{2}:{3}.repo".format(
            self.base.conf.get_reposdir, self.copr_hostname,
            self._sanitize_username(copr_username), copr_projectname)
        if subcommand == "enable":
            self._need_root()
            info = _("""
Enabling a Copr repository. Please note that this repository is not part
of the main distribution, and quality may vary.

The Fedora Project does not exercise any power over the contents of
this repository beyond the rules outlined in the Copr FAQ at
<https://docs.pagure.org/copr.copr/user_documentation.html#what-i-can-build-in-copr>,
and packages are not held to any quality or security level.

Please do not file bug reports about these packages in Fedora
Bugzilla. In case of problems, contact the owner of this repository.
""")
            project = '/'.join([self.copr_hostname, copr_username,
                                copr_projectname])
            msg = "Do you really want to enable {0}?".format(project)
            self._ask_user(info, msg)
            self._download_repo(project_name, repo_filename)
            logger.info(_("Repository successfully enabled."))
            self._runtime_deps_warning(copr_username, copr_projectname)
        elif subcommand == "disable":
            self._need_root()
            self._disable_repo(copr_username, copr_projectname)
            logger.info(_("Repository successfully disabled."))
        elif subcommand == "remove":
            self._need_root()
            self._remove_repo(copr_username, copr_projectname)
            logger.info(_("Repository successfully removed."))

        else:
            raise dnf.exceptions.Error(
                _('Unknown subcommand {}.').format(subcommand))

    def _list_repo_file(self, repo_id, repo, enabled_only, disabled_only):
        file_name = repo.repofile.split('/')[-1]

        match_new = re.match("_copr:" + self.copr_hostname, file_name)
        match_old = self.copr_url == self.default_url and re.match("_copr_", file_name)
        match_any = re.match("_copr:|^_copr_", file_name)

        if self.opts.hub:
            if not match_new and not match_old:
                return
        elif not match_any:
            return

        if re.match('copr:.*:.*:.*:ml', repo_id):
            # We skip multilib repositories
            return

        if re.match('coprdep:.*', repo_id):
            # Runtime dependencies are not listed.
            return

        enabled = repo.enabled
        if (enabled and disabled_only) or (not enabled and enabled_only):
            return

        old_repo = False
        # repo ID has copr:<hostname>:<user>:<copr_dir> format, while <copr_dir>
        # can contain more colons
        if re.match("copr:", repo_id):
            _, copr_hostname, copr_owner, copr_dir = repo_id.split(':', 3)
            msg = copr_hostname + '/' + copr_owner + "/" + copr_dir
        # repo ID has <user>-<project> format, try to get hub from file name
        elif re.match("_copr:", file_name):
            copr_name = repo_id.split('-', 1)
            copr_hostname = file_name.rsplit(':', 2)[0].split(':', 1)[1]
            msg = copr_hostname + '/' + copr_name[0] + '/' + copr_name[1]
        # no information about hub, assume the default one
        else:
            copr_name = repo_id.split('-', 1)
            msg = self.default_hostname + '/' + copr_name[0] + '/' + copr_name[1]
            old_repo = True
        if not enabled:
            msg += " (disabled)"
        if old_repo:
            msg += " *"

        print(msg)
        return old_repo

    def _list_installed_repositories(self, directory, enabled_only, disabled_only):
        old_repo = False
        for repo_id, repo in self.base.repos.items():
            if self._list_repo_file(repo_id, repo, enabled_only, disabled_only):
                old_repo = True
        if old_repo:
            print(_("* These coprs have repo file with an old format that contains "
                    "no information about Copr hub - the default one was assumed. "
                    "Re-enable the project to fix this."))

    def _list_user_projects(self, user_name):
        # https://copr.fedorainfracloud.org/api_3/project/list?ownername=ignatenkobrain
        api_path = "/api_3/project/list?ownername={0}".format(user_name)
        url = self.copr_url + api_path
        res = self.base.urlopen(url, mode='w+')
        try:
            json_parse = json.loads(res.read())
        except ValueError:
            raise dnf.exceptions.Error(
                _("Can't parse repositories for username '{}'.")
                .format(user_name))
        self._check_json_output(json_parse)
        section_text = _("List of {} coprs").format(user_name)
        self._print_match_section(section_text)

        for item in json_parse["items"]:
            msg = "{0}/{1} : ".format(user_name, item["name"])
            desc = item["description"] or _("No description given")
            msg = self.base.output.fmtKeyValFill(ucd(msg), desc)
            print(msg)

    def _search(self, query):
        # https://copr.fedorainfracloud.org/api_3/project/search?query=tests
        api_path = "/api_3/project/search?query={}".format(query)
        url = self.copr_url + api_path
        res = self.base.urlopen(url, mode='w+')
        try:
            json_parse = json.loads(res.read())
        except ValueError:
            raise dnf.exceptions.Error(_("Can't parse search for '{}'."
                                         ).format(query))
        self._check_json_output(json_parse)
        section_text = _("Matched: {}").format(query)
        self._print_match_section(section_text)

        for item in json_parse["items"]:
            msg = "{0} : ".format(item["full_name"])
            desc = item["description"] or _("No description given.")
            msg = self.base.output.fmtKeyValFill(ucd(msg), desc)
            print(msg)

    def _print_match_section(self, text):
        formatted = self.base.output.fmtSection(text)
        print(formatted)

    def _ask_user_no_raise(self, info, msg):
        if not self.first_warning:
            sys.stderr.write("\n")
        self.first_warning = False
        sys.stderr.write("{0}\n".format(info.strip()))

        if self.base._promptWanted():
            if self.base.conf.assumeno or not self.base.output.userconfirm(
                    msg='\n{} [y/N]: '.format(msg), defaultyes_msg='\n{} [Y/n]: '.format(msg)):
                return False
        return True

    def _ask_user(self, info, msg):
        if not self._ask_user_no_raise(info, msg):
            raise dnf.exceptions.Error(_('Safe and good answer. Exiting.'))

    @classmethod
    def _need_root(cls):
        # FIXME this should do dnf itself (BZ#1062889)
        if os.geteuid() != 0:
            raise dnf.exceptions.Error(
                _('This command has to be run under the root user.'))

    def _guess_chroot(self):
        """ Guess which chroot is equivalent to this machine """
        # FIXME Copr should generate non-specific arch repo
        dist = self.chroot_config
        if dist is None or (dist[0] is False) or (dist[1] is False):
            dist = linux_distribution()
        # Get distribution architecture
        distarch = self.base.conf.substitutions['basearch']
        if any([name in dist for name in ["Fedora", "Fedora Linux"]]):
            if "Rawhide" in dist:
                chroot = ("fedora-rawhide-" + distarch)
            # workaround for enabling repos in Rawhide when VERSION in os-release
            # contains a name other than Rawhide
            elif "rawhide" in os_release_attr("redhat_support_product_version"):
                chroot = ("fedora-rawhide-" + distarch)
            else:
                chroot = ("fedora-{0}-{1}".format(dist[1], distarch))
        elif "Mageia" in dist:
            # Get distribution architecture (Mageia does not use $basearch)
            distarch = rpm.expandMacro("%{distro_arch}")
            # Set the chroot
            if "Cauldron" in dist:
                chroot = ("mageia-cauldron-{}".format(distarch))
            else:
                chroot = ("mageia-{0}-{1}".format(dist[1], distarch))
        elif "openSUSE" in dist:
            # Get distribution architecture (openSUSE does not use $basearch)
            distarch = rpm.expandMacro("%{_target_cpu}")
            # Set the chroot
            if "Tumbleweed" in dist:
                chroot = ("opensuse-tumbleweed-{}".format(distarch))
            else:
                chroot = ("opensuse-leap-{0}-{1}".format(dist[1], distarch))
        else:
            chroot = ("epel-%s-x86_64" % dist[1].split(".", 1)[0])
        return chroot

    def _download_repo(self, project_name, repo_filename):
        short_chroot = '-'.join(self.chroot_parts[:-1])
        arch = self.chroot_parts[-1]
        api_path = "/coprs/{0}/repo/{1}/dnf.repo?arch={2}".format(project_name, short_chroot, arch)

        try:
            response = urlopen(self.copr_url + api_path)
            if os.path.exists(repo_filename):
                os.remove(repo_filename)
        except HTTPError as e:
            if e.code != 404:
                error_msg = _("Request to {0} failed: {1} - {2}").format(self.copr_url + api_path, e.code, str(e))
                raise dnf.exceptions.Error(error_msg)
            error_msg = _("It wasn't possible to enable this project.\n")
            error_data = e.headers.get("Copr-Error-Data")
            if error_data:
                error_data_decoded = base64.b64decode(error_data).decode('utf-8')
                error_data_decoded = json.loads(error_data_decoded)
                error_msg += _("Repository '{0}' does not exist in project '{1}'.").format(
                    '-'.join(self.chroot_parts), project_name)
                if error_data_decoded.get("available chroots"):
                    error_msg += _("\nAvailable repositories: ") + ', '.join(
                        "'{}'".format(x) for x in error_data_decoded["available chroots"])
                    error_msg += _("\n\nIf you want to enable a non-default repository, use the following command:\n"
                                   "  'dnf copr enable {0} <repository>'\n"
                                   "But note that the installed repo file will likely need a manual "
                                   "modification.").format(project_name)
                raise dnf.exceptions.Error(error_msg)
            else:
                error_msg += _("Project {0} does not exist.").format(project_name)
                raise dnf.exceptions.Error(error_msg)
        except URLError as e:
            error_msg = _("Failed to connect to {0}: {1}").format(self.copr_url + api_path, e.reason.strerror)
            raise dnf.exceptions.Error(error_msg)

        # Try to read the first line, and detect the repo_filename from that (override the repo_filename value).
        first_line = response.readline()
        line = first_line.decode("utf-8")
        if re.match(r"\[copr:", line):
            repo_filename = os.path.join(self.base.conf.get_reposdir, "_" + line[1:-2] + ".repo")

        # if using default hub, remove possible old repofile
        if self.copr_url == self.default_url:
            # copr:hub:user:project.repo => _copr_user_project.repo
            old_repo_filename = repo_filename.replace("_copr:", "_copr", 1)\
                .replace(self.copr_hostname, "").replace(":", "_", 1).replace(":", "-")\
                .replace("group_", "@")
            if os.path.exists(old_repo_filename):
                os.remove(old_repo_filename)

        with open(repo_filename, 'wb') as f:
            f.write(first_line)
            for line in response.readlines():
                f.write(line)
        os.chmod(repo_filename, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)

    def _runtime_deps_warning(self, copr_username, copr_projectname):
        """
        In addition to the main copr repo (that has repo ID prefixed with
        `copr:`), the repofile might contain additional repositories that
        serve as runtime dependencies. This method informs the user about
        the additional repos and provides an option to disable them.
        """

        self.base.reset(repos=True)
        self.base.read_all_repos()

        repo = self._get_copr_repo(self._sanitize_username(copr_username), copr_projectname)

        runtime_deps = []
        for repo_id in repo.cfg.sections():
            if repo_id.startswith("copr:"):
                continue
            runtime_deps.append(repo_id)

        if not runtime_deps:
            return

        info = _(
            "Maintainer of the enabled Copr repository decided to make\n"
            "it dependent on other repositories. Such repositories are\n"
            "usually necessary for successful installation of RPMs from\n"
            "the main Copr repository (they provide runtime dependencies).\n\n"

            "Be aware that the note about quality and bug-reporting\n"
            "above applies here too, Fedora Project doesn't control the\n"
            "content. Please review the list:\n\n"
            "{0}\n\n"
            "These repositories have been enabled automatically."
        )

        counter = itertools.count(1)
        info = info.format("\n\n".join([
            "{num:2}. [{repoid}]\n    baseurl={baseurl}".format(
                num=next(counter),
                repoid=repoid,
                baseurl=repo.cfg.getValue(repoid, "baseurl"))
            for repoid in runtime_deps
        ]))

        if not self._ask_user_no_raise(info, _("Do you want to keep them enabled?")):
            for dep in runtime_deps:
                self.base.conf.write_raw_configfile(repo.repofile, dep,
                                                    self.base.conf.substitutions,
                                                    {"enabled": "0"})

    def _get_copr_repo(self, copr_username, copr_projectname):
        repo_id = "copr:{0}:{1}:{2}".format(self.copr_hostname.rsplit(':', 1)[0],
                                            self._sanitize_username(copr_username),
                                            copr_projectname)
        if repo_id not in self.base.repos:
            # check if there is a repo with old ID format
            repo_id = repo_id = "{0}-{1}".format(self._sanitize_username(copr_username),
                                                 copr_projectname)
            if repo_id in self.base.repos and "_copr" in self.base.repos[repo_id].repofile:
                file_name = self.base.repos[repo_id].repofile.split('/')[-1]
                try:
                    copr_hostname = file_name.rsplit(':', 2)[0].split(':', 1)[1]
                    if copr_hostname != self.copr_hostname:
                        return None
                except IndexError:
                    # old filename format without hostname
                    pass
            else:
                return None

        return self.base.repos[repo_id]

    def _remove_repo(self, copr_username, copr_projectname):
        # FIXME is it Copr repo ?
        repo = self._get_copr_repo(copr_username, copr_projectname)
        if not repo:
            raise dnf.exceptions.Error(
                _("Failed to remove copr repo {0}/{1}/{2}"
                  .format(self.copr_hostname, copr_username, copr_projectname)))
        try:
            os.remove(repo.repofile)
        except OSError as e:
            raise dnf.exceptions.Error(str(e))

    def _disable_repo(self, copr_username, copr_projectname):
        repo = self._get_copr_repo(copr_username, copr_projectname)
        if repo is None:
            raise dnf.exceptions.Error(
                _("Failed to disable copr repo {}/{}"
                  .format(copr_username, copr_projectname)))

        # disable all repos provided by the repo file
        for repo_id in repo.cfg.sections():
            self.base.conf.write_raw_configfile(repo.repofile, repo_id,
                                                self.base.conf.substitutions, {"enabled": "0"})

    @classmethod
    def _get_data(cls, f):
        """ Wrapper around response from server

        check data and print nice error in case of some error (and return None)
        otherwise return json object.
        """
        try:
            output = json.loads(f.read())
        except ValueError:
            dnf.cli.CliError(_("Unknown response from server."))
            return
        return output

    @classmethod
    def _check_json_output(cls, json_obj):
        if "error" in json_obj:
            raise dnf.exceptions.Error("{}".format(json_obj["error"]))

    @classmethod
    def _sanitize_username(cls, copr_username):
        if copr_username[0] == "@":
            return "group_{}".format(copr_username[1:])
        else:
            return copr_username


@dnf.plugin.register_command
class PlaygroundCommand(CoprCommand):
    """ Playground plugin for DNF """

    aliases = ("playground",)
    summary = _("Interact with Playground repository.")
    usage = " [enable|disable|upgrade]"

    def _cmd_enable(self, chroot):
        self._need_root()
        self._ask_user(
            _("Enabling a Playground repository."),
            _("Do you want to continue?"),
        )
        api_url = "{0}/api/playground/list/".format(
            self.copr_url)
        f = self.base.urlopen(api_url, mode="w+")
        output = self._get_data(f)
        f.close()
        if output["output"] != "ok":
            raise dnf.cli.CliError(_("Unknown response from server."))
        for repo in output["repos"]:
            project_name = "{0}/{1}".format(repo["username"],
                                            repo["coprname"])
            repo_filename = "{}/_playground_{}.repo".format(self.base.conf.get_reposdir, project_name.replace("/", "-"))
            try:
                if chroot not in repo["chroots"]:
                    continue
                api_url = "{0}/api/coprs/{1}/detail/{2}/".format(
                    self.copr_url, project_name, chroot)
                f = self.base.urlopen(api_url, mode='w+')
                output2 = self._get_data(f)
                f.close()
                if (output2 and ("output" in output2)
                        and (output2["output"] == "ok")):
                    self._download_repo(project_name, repo_filename)
            except dnf.exceptions.Error:
                # likely 404 and that repo does not exist
                pass

    def _cmd_disable(self):
        self._need_root()
        for repo_filename in glob.glob("{}/_playground_*.repo".format(self.base.conf.get_reposdir)):
            self._remove_repo(repo_filename)

    @staticmethod
    def set_argparser(parser):
        parser.add_argument('subcommand', nargs=1,
                            choices=['enable', 'disable', 'upgrade'])

    def run(self):
        raise dnf.exceptions.Error("Playground is temporarily unsupported")
        subcommand = self.opts.subcommand[0]
        chroot = self._guess_chroot()
        if subcommand == "enable":
            self._cmd_enable(chroot)
            logger.info(_("Playground repositories successfully enabled."))
        elif subcommand == "disable":
            self._cmd_disable()
            logger.info(_("Playground repositories successfully disabled."))
        elif subcommand == "upgrade":
            self._cmd_disable()
            self._cmd_enable(chroot)
            logger.info(_("Playground repositories successfully updated."))
        else:
            raise dnf.exceptions.Error(
                _('Unknown subcommand {}.').format(subcommand))
needs_restarting.py000064400000027140150402642250010462 0ustar00# needs_restarting.py
# DNF plugin to check for running binaries in a need of restarting.
#
# Copyright (C) 2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# the mechanism of scanning smaps for opened files and matching them back to
# packages is heavily inspired by the original needs-restarting.py:
# http://yum.baseurl.org/gitweb?p=yum-utils.git;a=blob;f=needs-restarting.py

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from dnfpluginscore import logger, _

import dnf
import dnf.cli
import dbus
import functools
import os
import re
import stat
import time


# For which package updates we should recommend a reboot
# Mostly taken from https://access.redhat.com/solutions/27943
NEED_REBOOT = ['kernel', 'kernel-rt', 'glibc', 'linux-firmware',
               'systemd', 'dbus', 'dbus-broker', 'dbus-daemon',
               'microcode_ctl']

NEED_REBOOT_DEPENDS_ON_DBUS = ['zlib']

def get_options_from_dir(filepath, base):
    """
    Provide filepath as string if single dir or list of strings
    Return set of package names contained in files under filepath
    """

    if not os.path.exists(filepath):
        return set()
    options = set()
    for file in os.listdir(filepath):
        if os.path.isdir(file) or not file.endswith('.conf'):
            continue

        with open(os.path.join(filepath, file)) as fp:
            for line in fp:
                options.add((line.rstrip(), file))

    packages = set()
    for pkg in base.sack.query().installed().filter(name={x[0] for x in options}):
        packages.add(pkg.name)
    for name, file in {x for x in options if x[0] not in packages}:
        logger.warning(
            _('No installed package found for package name "{pkg}" '
                'specified in needs-restarting file "{file}".'.format(pkg=name, file=file)))
    return packages


def list_opened_files(uid):
    for (pid, smaps) in list_smaps():
        try:
            if uid is not None and uid != owner_uid(smaps):
                continue
            with open(smaps, 'r', errors='replace') as smaps_file:
                lines = smaps_file.readlines()
        except EnvironmentError:
            logger.warning("Failed to read PID %d's smaps.", pid)
            continue

        for line in lines:
            ofile = smap2opened_file(pid, line)
            if ofile is not None:
                yield ofile


def list_smaps():
    for dir_ in os.listdir('/proc'):
        try:
            pid = int(dir_)
        except ValueError:
            continue
        smaps = '/proc/%d/smaps' % pid
        yield (pid, smaps)


def memoize(func):
    sentinel = object()
    cache = {}
    def wrapper(param):
        val = cache.get(param, sentinel)
        if val is not sentinel:
            return val
        val = func(param)
        cache[param] = val
        return val
    return wrapper


def owner_uid(fname):
    return os.stat(fname)[stat.ST_UID]


def owning_package(sack, fname):
    matches = sack.query().filter(file=fname).run()
    if matches:
        return matches[0]
    return None


def print_cmd(pid):
    cmdline = '/proc/%d/cmdline' % pid
    with open(cmdline) as cmdline_file:
        command = dnf.i18n.ucd(cmdline_file.read())
    command = ' '.join(command.split('\000'))
    print('%d : %s' % (pid, command))


def get_service_dbus(pid):
    bus = dbus.SystemBus()
    systemd_manager_object = bus.get_object(
        'org.freedesktop.systemd1',
        '/org/freedesktop/systemd1'
    )
    systemd_manager_interface = dbus.Interface(
        systemd_manager_object,
        'org.freedesktop.systemd1.Manager'
    )
    service_proxy = None
    try:
        service_proxy = bus.get_object(
            'org.freedesktop.systemd1',
            systemd_manager_interface.GetUnitByPID(pid)
        )
    except dbus.DBusException as e:
        # There is no unit for the pid. Usually error is 'NoUnitForPid'.
        # Considering what we do at the bottom (just return if not service)
        # Then there's really no reason to exit here on that exception.
        # Log what's happened then move on.
        msg = str(e)
        logger.warning("Failed to get systemd unit for PID {}: {}".format(pid, msg))
        return
    service_properties = dbus.Interface(
        service_proxy, dbus_interface="org.freedesktop.DBus.Properties")
    name = service_properties.Get(
        "org.freedesktop.systemd1.Unit",
        'Id'
    )
    if name.endswith(".service"):
        return name
    return

def smap2opened_file(pid, line):
    slash = line.find('/')
    if slash < 0:
        return None
    if line.find('00:') >= 0:
        # not a regular file
        return None
    fn = line[slash:].strip()
    suffix_index = fn.rfind(' (deleted)')
    if suffix_index < 0:
        return OpenedFile(pid, fn, False)
    else:
        return OpenedFile(pid, fn[:suffix_index], True)


class OpenedFile(object):
    RE_TRANSACTION_FILE = re.compile('^(.+);[0-9A-Fa-f]{8,}$')

    def __init__(self, pid, name, deleted):
        self.deleted = deleted
        self.name = name
        self.pid = pid

    @property
    def presumed_name(self):
        """Calculate the name of the file pre-transaction.

        In case of a file that got deleted during the transactionm, possibly
        just because of an upgrade to a newer version of the same file, RPM
        renames the old file to the same name with a hexadecimal suffix just
        before delting it.

        """

        if self.deleted:
            match = self.RE_TRANSACTION_FILE.match(self.name)
            if match:
                return match.group(1)
        return self.name


class ProcessStart(object):
    def __init__(self):
        self.boot_time = self.get_boot_time()
        self.sc_clk_tck = self.get_sc_clk_tck()

    @staticmethod
    def get_boot_time():
        """
        We have two sources from which to derive the boot time. These values vary
        depending on containerization, existence of a Real Time Clock, etc.
        For our purposes we want the latest derived value.
        - st_mtime of /proc/1
             Reflects the time the first process was run after booting
             This works for all known cases except machines without
             a RTC - they awake at the start of the epoch.
        - /proc/uptime
             Seconds field of /proc/uptime subtracted from the current time
             Works for machines without RTC iff the current time is reasonably correct.
             Does not work on containers which share their kernel with the
             host - there the host kernel uptime is returned
        """

        proc_1_boot_time = int(os.stat('/proc/1').st_mtime)
        if os.path.isfile('/proc/uptime'):
            with open('/proc/uptime', 'rb') as f:
                uptime = f.readline().strip().split()[0].strip()
                proc_uptime_boot_time = int(time.time() - float(uptime))
                return max(proc_1_boot_time, proc_uptime_boot_time)
        return proc_1_boot_time

    @staticmethod
    def get_sc_clk_tck():
        return os.sysconf(os.sysconf_names['SC_CLK_TCK'])

    def __call__(self, pid):
        stat_fn = '/proc/%d/stat' % pid
        with open(stat_fn) as stat_file:
            stats = stat_file.read().strip().split()
        ticks_after_boot = int(stats[21])
        secs_after_boot = ticks_after_boot // self.sc_clk_tck
        return self.boot_time + secs_after_boot


@dnf.plugin.register_command
class NeedsRestartingCommand(dnf.cli.Command):
    aliases = ('needs-restarting',)
    summary = _('determine updated binaries that need restarting')

    @staticmethod
    def set_argparser(parser):
        parser.add_argument('-u', '--useronly', action='store_true',
                            help=_("only consider this user's processes"))
        parser.add_argument('-r', '--reboothint', action='store_true',
                            help=_("only report whether a reboot is required "
                                   "(exit code 1) or not (exit code 0)"))
        parser.add_argument('-s', '--services', action='store_true',
                            help=_("only report affected systemd services"))

    def configure(self):
        demands = self.cli.demands
        demands.sack_activation = True

    def run(self):
        process_start = ProcessStart()
        owning_pkg_fn = functools.partial(owning_package, self.base.sack)
        owning_pkg_fn = memoize(owning_pkg_fn)

        opt = get_options_from_dir(os.path.join(
            self.base.conf.installroot,
            "etc/dnf/plugins/needs-restarting.d/"),
            self.base)
        NEED_REBOOT.extend(opt)
        if self.opts.reboothint:
            need_reboot = set()
            need_reboot_depends_on_dbus = set()
            installed = self.base.sack.query().installed()
            for pkg in installed.filter(name=NEED_REBOOT):
                if pkg.installtime > process_start.boot_time:
                    need_reboot.add(pkg.name)

            dbus_installed = installed.filter(name=['dbus', 'dbus-daemon', 'dbus-broker'])
            if len(dbus_installed) != 0:
                for pkg in installed.filter(name=NEED_REBOOT_DEPENDS_ON_DBUS):
                    if pkg.installtime > process_start.boot_time:
                        need_reboot_depends_on_dbus.add(pkg.name)
            if need_reboot or need_reboot_depends_on_dbus:
                print(_('Core libraries or services have been updated '
                        'since boot-up:'))
                for name in sorted(need_reboot):
                    print('  * %s' % name)
                for name in sorted(need_reboot_depends_on_dbus):
                    print('  * %s (dependency of dbus. Recommending reboot of dbus)' % name)
                print()
                print(_('Reboot is required to fully utilize these updates.'))
                print(_('More information:'),
                      'https://access.redhat.com/solutions/27943')
                raise dnf.exceptions.Error()  # Sets exit code 1
            else:
                print(_('No core libraries or services have been updated '
                        'since boot-up.'))
                print(_('Reboot should not be necessary.'))
                return None

        stale_pids = set()
        uid = os.geteuid() if self.opts.useronly else None
        for ofile in list_opened_files(uid):
            pkg = owning_pkg_fn(ofile.presumed_name)
            if pkg is None:
                continue
            if pkg.installtime > process_start(ofile.pid):
                stale_pids.add(ofile.pid)

        if self.opts.services:
            names = set([get_service_dbus(pid) for pid in sorted(stale_pids)])
            for name in names:
                if name is not None:
                    print(name)
            return 0
        for pid in sorted(stale_pids):
            print_cmd(pid)
builddep.py000064400000022202150402642250006704 0ustar00# builddep.py
# Install all the deps needed to build this package.
#
# Copyright (C) 2013-2015  Red Hat, Inc.
# Copyright (C) 2015 Igor Gnatenko
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#

from __future__ import absolute_import
from __future__ import unicode_literals
from dnfpluginscore import _, logger

import argparse
import dnf
import dnf.cli
import dnf.exceptions
import dnf.rpm.transaction
import dnf.yum.rpmtrans
import libdnf.repo
import os
import rpm
import shutil
import tempfile


@dnf.plugin.register_command
class BuildDepCommand(dnf.cli.Command):

    aliases = ('builddep', 'build-dep')
    msg = "Install build dependencies for package or spec file"
    summary = _(msg)
    usage = _("[PACKAGE|PACKAGE.spec]")

    def __init__(self, cli):
        super(BuildDepCommand, self).__init__(cli)
        self._rpm_ts = dnf.rpm.transaction.initReadOnlyTransaction()
        self.tempdirs = []

    def __del__(self):
        for temp_dir in self.tempdirs:
            shutil.rmtree(temp_dir)

    def _download_remote_file(self, pkgspec):
        """
        In case pkgspec is a remote URL, download it to a temporary location
        and use the temporary file instead.
        """
        location = dnf.pycomp.urlparse.urlparse(pkgspec)
        if location[0] in ('file', ''):
            # just strip the file:// prefix
            return location.path

        downloader = libdnf.repo.Downloader()
        temp_dir = tempfile.mkdtemp(prefix="dnf_builddep_")
        temp_file = os.path.join(temp_dir, os.path.basename(pkgspec))
        self.tempdirs.append(temp_dir)

        temp_fo = open(temp_file, "wb+")
        try:
            downloader.downloadURL(self.base.conf._config, pkgspec, temp_fo.fileno())
        except RuntimeError as ex:
            raise
        finally:
            temp_fo.close()
        return temp_file

    @staticmethod
    def set_argparser(parser):
        def macro_def(arg):
            arglist = arg.split(None, 1) if arg else []
            if len(arglist) < 2:
                msg = _("'%s' is not of the format 'MACRO EXPR'") % arg
                raise argparse.ArgumentTypeError(msg)
            return arglist

        parser.add_argument('packages', nargs='+', metavar='package',
                            help=_('packages with builddeps to install'))
        parser.add_argument('-D', '--define', action='append', default=[],
                            metavar="'MACRO EXPR'", type=macro_def,
                            help=_('define a macro for spec file parsing'))
        parser.add_argument('--skip-unavailable', action='store_true', default=False,
                            help=_('skip build dependencies not available in repositories'))
        ptype = parser.add_mutually_exclusive_group()
        ptype.add_argument('--spec', action='store_true',
                            help=_('treat commandline arguments as spec files'))
        ptype.add_argument('--srpm', action='store_true',
                            help=_('treat commandline arguments as source rpm'))

    def pre_configure(self):
        if not self.opts.rpmverbosity:
            self.opts.rpmverbosity = 'error'

    def configure(self):
        demands = self.cli.demands
        demands.available_repos = True
        demands.resolving = True
        demands.root_user = True
        demands.sack_activation = True

        # enable source repos only if needed
        if not (self.opts.spec or self.opts.srpm):
            for pkgspec in self.opts.packages:
                if not (pkgspec.endswith('.src.rpm')
                        or pkgspec.endswith('.nosrc.rpm')
                        or pkgspec.endswith('.spec')):
                    self.base.repos.enable_source_repos()
                    break

    def run(self):
        rpmlog = dnf.yum.rpmtrans.RPMTransaction(self.base)
        # Push user-supplied macro definitions for spec parsing
        for macro in self.opts.define:
            rpm.addMacro(macro[0], macro[1])

        pkg_errors = False
        for pkgspec in self.opts.packages:
            pkgspec = self._download_remote_file(pkgspec)
            try:
                if self.opts.srpm:
                    self._src_deps(pkgspec)
                elif self.opts.spec:
                    self._spec_deps(pkgspec)
                elif pkgspec.endswith('.src.rpm') or pkgspec.endswith('nosrc.rpm'):
                    self._src_deps(pkgspec)
                elif pkgspec.endswith('.spec'):
                    self._spec_deps(pkgspec)
                else:
                    self._remote_deps(pkgspec)
            except dnf.exceptions.Error as e:
                for line in rpmlog.messages():
                    logger.error(_("RPM: {}").format(line))
                logger.error(e)
                pkg_errors = True

        # Pop user macros so they don't affect future rpm calls
        for macro in self.opts.define:
            rpm.delMacro(macro[0])

        if pkg_errors:
            raise dnf.exceptions.Error(_("Some packages could not be found."))

    @staticmethod
    def _rpm_dep2reldep_str(rpm_dep):
        return rpm_dep.DNEVR()[2:]

    def _install(self, reldep_str):
        # Try to find something by provides
        sltr = dnf.selector.Selector(self.base.sack)
        sltr.set(provides=reldep_str)
        found = sltr.matches()
        if not found and reldep_str.startswith("/"):
            # Nothing matches by provides and since it's file, try by files
            sltr = dnf.selector.Selector(self.base.sack)
            sltr.set(file=reldep_str)
            found = sltr.matches()

        if not found and not reldep_str.startswith("("):
            # No provides, no files
            # Richdeps can have no matches but it could be correct (solver must decide later)
            msg = _("No matching package to install: '%s'")
            logger.warning(msg, reldep_str)
            return self.opts.skip_unavailable is True

        if found:
            already_inst = self.base._sltr_matches_installed(sltr)
            if already_inst:
                for package in already_inst:
                    dnf.base._msg_installed(package)
        self.base._goal.install(select=sltr, optional=False)
        return True

    def _src_deps(self, src_fn):
        fd = os.open(src_fn, os.O_RDONLY)
        try:
            h = self._rpm_ts.hdrFromFdno(fd)
        except rpm.error as e:
            if str(e) == 'error reading package header':
                e = _("Failed to open: '%s', not a valid source rpm file.") % src_fn
            os.close(fd)
            raise dnf.exceptions.Error(e)
        os.close(fd)
        ds = h.dsFromHeader('requirename')
        done = True
        for dep in ds:
            reldep_str = self._rpm_dep2reldep_str(dep)
            if reldep_str.startswith('rpmlib('):
                continue
            done &= self._install(reldep_str)

        if not done:
            err = _("Not all dependencies satisfied")
            raise dnf.exceptions.Error(err)

        if self.opts.define:
            logger.warning(_("Warning: -D or --define arguments have no meaning "
                             "for source rpm packages."))

    def _spec_deps(self, spec_fn):
        try:
            spec = rpm.spec(spec_fn)
        except ValueError as ex:
            msg = _("Failed to open: '%s', not a valid spec file: %s") % (
                    spec_fn, ex)
            raise dnf.exceptions.Error(msg)
        done = True
        for dep in rpm.ds(spec.sourceHeader, 'requires'):
            reldep_str = self._rpm_dep2reldep_str(dep)
            done &= self._install(reldep_str)

        if not done:
            err = _("Not all dependencies satisfied")
            raise dnf.exceptions.Error(err)

    def _remote_deps(self, package):
        available = dnf.subject.Subject(package).get_best_query(
                        self.base.sack).filter(arch__neq="src")
        sourcenames = list({pkg.source_name for pkg in available})
        pkgs = self.base.sack.query().available().filter(
                name=(sourcenames + [package]), arch="src").latest().run()
        if not pkgs:
            raise dnf.exceptions.Error(_('no package matched: %s') % package)
        done = True
        for pkg in pkgs:
            for req in pkg.requires:
                done &= self._install(str(req))

        if not done:
            err = _("Not all dependencies satisfied")
            raise dnf.exceptions.Error(err)
debuginfo-install.py000064400000025514150402642250010533 0ustar00# debuginfo-install.py
# Install the debuginfo of packages and their dependencies to debug this package.
#
# Copyright (C) 2014 Igor Gnatenko
# Copyright (C) 2014-2019 Red Hat
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#

from dnfpluginscore import _, logger

import dnf
from dnf.package import Package

class DebuginfoInstall(dnf.Plugin):
    """DNF plugin supplying the 'debuginfo-install' command."""

    name = 'debuginfo-install'

    def __init__(self, base, cli):
        """Initialize the plugin instance."""
        super(DebuginfoInstall, self).__init__(base, cli)
        self.base = base
        self.cli = cli
        if cli is not None:
            cli.register_command(DebuginfoInstallCommand)

    def config(self):
        cp = self.read_config(self.base.conf)
        autoupdate = (cp.has_section('main')
                      and cp.has_option('main', 'autoupdate')
                      and cp.getboolean('main', 'autoupdate'))

        if autoupdate:
            # allow update of already installed debuginfo packages
            dbginfo = dnf.sack._rpmdb_sack(self.base).query().filterm(name__glob="*-debuginfo")
            if len(dbginfo):
                self.base.repos.enable_debug_repos()

class DebuginfoInstallCommand(dnf.cli.Command):
    """ DebuginfoInstall plugin for DNF """

    aliases = ("debuginfo-install",)
    summary = _('install debuginfo packages')

    def __init__(self, cli):
        super(DebuginfoInstallCommand, self).__init__(cli)

        self.available_debuginfo_missing = set()
        self.available_debugsource_missing = set()
        self.installed_debuginfo_missing = set()
        self.installed_debugsource_missing = set()

    @staticmethod
    def set_argparser(parser):
        parser.add_argument('package', nargs='+')

    def configure(self):
        demands = self.cli.demands
        demands.resolving = True
        demands.root_user = True
        demands.sack_activation = True
        demands.available_repos = True
        self.base.repos.enable_debug_repos()

    def run(self):
        errors_spec = []

        debuginfo_suffix_len = len(Package.DEBUGINFO_SUFFIX)
        debugsource_suffix_len = len(Package.DEBUGSOURCE_SUFFIX)

        for pkgspec in self.opts.package:
            solution = dnf.subject.Subject(pkgspec).get_best_solution(self.base.sack,
                                                                      with_src=False)

            query = solution["query"]
            if not query:
                logger.info(_('No match for argument: %s'), self.base.output.term.bold(pkgspec))
                errors_spec.append(pkgspec)
                continue

            package_dict = query.available()._name_dict()
            # installed versions of packages have priority, replace / add them to the dict
            package_dict.update(query.installed()._name_dict())

            # Remove debuginfo packages if their base packages are in the query.
            # They can get there through globs and they break the installation
            # of debug packages with the same version as the installed base
            # packages. If the base package of a debuginfo package is not in
            # the query, the user specified a debug package on the command
            # line. We don't want to ignore those, so we will install them.
            # But, in this case the version will not be matched to the
            # installed version of the base package, as that would require
            # another query and is further complicated if the user specifies a
            # version themselves etc.
            for name in list(package_dict.keys()):
                if name.endswith(Package.DEBUGINFO_SUFFIX):
                    if name[:-debuginfo_suffix_len] in package_dict:
                        package_dict.pop(name)
                if name.endswith(Package.DEBUGSOURCE_SUFFIX):
                    if name[:-debugsource_suffix_len] in package_dict:
                        package_dict.pop(name)

            # attempt to install debuginfo and debugsource for the highest
            # listed version of the package (in case the package is installed,
            # only the installed version is listed)
            for pkgs in package_dict.values():
                first_pkg = pkgs[0]

                # for packages from system (installed) there can be more
                # packages with different architectures listed and we want to
                # install debuginfo for all of them
                if first_pkg._from_system:
                    # we need to split them by architectures and install the
                    # latest version for each architecture
                    arch_dict = {}

                    for pkg in pkgs:
                        arch_dict.setdefault(pkg.arch, []).append(pkg)

                    for package_arch_list in arch_dict.values():
                        pkg = package_arch_list[0]

                        if not self._install_debug_from_system(pkg.debug_name, pkg):
                            if not self._install_debug_from_system(pkg.source_debug_name, pkg):
                                self.installed_debuginfo_missing.add(str(pkg))

                        if not self._install_debug_from_system(pkg.debugsource_name, pkg):
                            self.installed_debugsource_missing.add(str(pkg))

                    continue

                # if the package in question is -debuginfo or -debugsource, install it directly
                if first_pkg.name.endswith(Package.DEBUGINFO_SUFFIX) \
                        or first_pkg.name.endswith(Package.DEBUGSOURCE_SUFFIX):

                    self._install(pkgs)  # pass all pkgs to the solver, it will pick the best one
                    continue

                # if we have NEVRA parsed from the pkgspec, use it to install the package
                if solution["nevra"] is not None:
                    if not self._install_debug(first_pkg.debug_name, solution["nevra"]):
                        if not self._install_debug(first_pkg.source_debug_name, solution["nevra"]):
                            self.available_debuginfo_missing.add(
                                "{}-{}".format(first_pkg.name, first_pkg.evr))

                    if not self._install_debug(first_pkg.debugsource_name, solution["nevra"]):
                        self.available_debugsource_missing.add(
                            "{}-{}".format(first_pkg.name, first_pkg.evr))

                    continue

                # if we don't have NEVRA from the pkgspec, pass nevras from
                # all packages that were found (while replacing the name with
                # the -debuginfo and -debugsource variant) to the solver, which
                # will pick the correct version and architecture
                if not self._install_debug_no_nevra(first_pkg.debug_name, pkgs):
                    if not self._install_debug_no_nevra(first_pkg.source_debug_name, pkgs):
                        self.available_debuginfo_missing.add(
                            "{}-{}".format(first_pkg.name, first_pkg.evr))

                if not self._install_debug_no_nevra(first_pkg.debugsource_name, pkgs):
                    self.available_debugsource_missing.add(
                        "{}-{}".format(first_pkg.name, first_pkg.evr))

        if self.available_debuginfo_missing:
            logger.info(
                _("Could not find debuginfo package for the following available packages: %s"),
                ", ".join(sorted(self.available_debuginfo_missing)))

        if self.available_debugsource_missing:
            logger.info(
                _("Could not find debugsource package for the following available packages: %s"),
                ", ".join(sorted(self.available_debugsource_missing)))

        if self.installed_debuginfo_missing:
            logger.info(
                _("Could not find debuginfo package for the following installed packages: %s"),
                ", ".join(sorted(self.installed_debuginfo_missing)))

        if self.installed_debugsource_missing:
            logger.info(
                _("Could not find debugsource package for the following installed packages: %s"),
                ", ".join(sorted(self.installed_debugsource_missing)))

        if errors_spec and self.base.conf.strict:
            raise dnf.exceptions.PackagesNotAvailableError(_("Unable to find a match"),
                                                           pkg_spec=' '.join(errors_spec))

    def _install_debug_from_system(self, debug_name, pkg):
        query = self.base.sack.query().filter(name=debug_name,
                                              epoch=pkg.epoch,
                                              version=pkg.version,
                                              release=pkg.release,
                                              arch=pkg.arch)

        if query:
            self._install(query)
            return True

        return False

    def _install_debug(self, debug_name, base_nevra):
        kwargs = {}

        # if some part of EVRA was specified in the argument, add it as a filter
        if base_nevra.epoch is not None:
            kwargs["epoch__glob"] = base_nevra.epoch
        if base_nevra.version is not None:
            kwargs["version__glob"] = base_nevra.version
        if base_nevra.release is not None:
            kwargs["release__glob"] = base_nevra.release
        if base_nevra.arch is not None:
            kwargs["arch__glob"] = base_nevra.arch

        query = self.base.sack.query().filter(name=debug_name, **kwargs)

        if query:
            self._install(query)
            return True

        return False

    def _install_debug_no_nevra(self, debug_name, pkgs):
        query = self.base.sack.query().filterm(
            nevra_strict=["{}-{}.{}".format(debug_name, p.evr, p.arch) for p in pkgs])
        if query:
            self._install(query)
            return True

        return False

    def _install(self, pkgs):
        selector = dnf.selector.Selector(self.base.sack)
        selector.set(pkg=pkgs)
        self.base.goal.install(select=selector, optional=not self.base.conf.strict)
repograph.py000064400000007774150402642250007124 0ustar00# repograph.py
# DNF plugin adding a command to Output a full package dependency graph in dot
# format.
#
# Copyright (C) 2015 Igor Gnatenko
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#

from __future__ import absolute_import
from __future__ import unicode_literals
from dnfpluginscore import _, logger

import dnf.cli

DOT_HEADER = """
size="20.69,25.52";
ratio="fill";
rankdir="TB";
orientation=port;
node[style="filled"];
"""


class RepoGraph(dnf.Plugin):

    name = "repograph"

    def __init__(self, base, cli):
        super(RepoGraph, self).__init__(base, cli)
        if cli is None:
            return
        cli.register_command(RepoGraphCommand)


class RepoGraphCommand(dnf.cli.Command):
    aliases = ("repograph", "repo-graph",)
    summary = _("Output a full package dependency graph in dot format")

    def configure(self):
        demands = self.cli.demands
        demands.sack_activation = True
        demands.available_repos = True
        if self.opts.repo:
            for repo in self.base.repos.all():
                if repo.id not in self.opts.repo:
                    repo.disable()
                else:
                    repo.enable()

    def run(self):
        self.do_dot(DOT_HEADER)

    def do_dot(self, header):
        maxdeps = 0
        deps = self._get_deps(self.base.sack)

        print("digraph packages {")
        print("{}".format(header))

        for pkg in deps.keys():
            if len(deps[pkg]) > maxdeps:
                maxdeps = len(deps[pkg])

            # color calculations lifted from rpmgraph
            h = 0.5 + (0.6 / 23 * len(deps[pkg]))
            s = h + 0.1
            b = 1.0

            print('"{}" [color="{:.12g} {:.12g} {}"];'.format(pkg, h, s, b))
            print('"{}" -> {{'.format(pkg))
            for req in deps[pkg]:
                print('"{}"'.format(req))
            print('}} [color="{:.12g} {:.12g} {}"];\n'.format(h, s, b))
        print("}")

    @staticmethod
    def _get_deps(sack):
        requires = {}
        prov = {}
        skip = []

        available = sack.query().available()
        for pkg in available:
            xx = {}
            for req in pkg.requires:
                reqname = str(req)
                if reqname in skip:
                    continue
                # XXX: https://bugzilla.redhat.com/show_bug.cgi?id=1186721
                if reqname.startswith("solvable:"):
                    continue
                if reqname in prov:
                    provider = prov[reqname]
                else:
                    provider = available.filter(provides=reqname)
                    if not provider:
                        logger.debug(_("Nothing provides: '%s'"), reqname)
                        skip.append(reqname)
                        continue
                    else:
                        provider = provider[0].name
                    prov[reqname] = provider
                if provider == pkg.name:
                    xx[provider] = None
                if provider in xx or provider in skip:
                    continue
                else:
                    xx[provider] = None
                requires[pkg.name] = xx.keys()
        return requires
repodiff.py000064400000026323150402642250006722 0ustar00# repodiff.py
# DNF plugin adding a command to show differencies between two sets
# of repositories.
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#

from __future__ import absolute_import
from __future__ import unicode_literals

import dnf.cli
from dnf.cli.option_parser import OptionParser
import hawkey

from dnfpluginscore import _


class RepoDiff(dnf.Plugin):

    name = "repodiff"

    def __init__(self, base, cli):
        super(RepoDiff, self).__init__(base, cli)
        if cli is None:
            return
        cli.register_command(RepoDiffCommand)


class RepoDiffCommand(dnf.cli.Command):
    aliases = ("repodiff",)
    summary = _("List differences between two sets of repositories")

    @staticmethod
    def set_argparser(parser):
        # I'd like to use --old and --new options like Yum did.
        # But ability to disable abbreviated long options is added
        # only in Python >= 3.5
        # So in command arguments we are not able to use arguments,
        # which are prefixes of main arguments (i.w. --new would be
        # treated as --newpackage). This is because we run .parse_args
        # two times - for main and then for command arguments.
        # https://stackoverflow.com/questions/33900846
        parser.add_argument("--repo-old", "-o", default=[], action="append", dest="old",
                            help=_("Specify old repository, can be used multiple times"))
        parser.add_argument("--repo-new", "-n", default=[], action="append", dest="new",
                            help=_("Specify new repository, can be used multiple times"))
        parser.add_argument("--arch", "--archlist", "-a", default=[],
                            action=OptionParser._SplitCallback, dest="arches",
                            help=_("Specify architectures to compare, can be used "
                                   "multiple times. By default, only source rpms are "
                                   "compared."))
        parser.add_argument("--size", "-s", action="store_true",
                            help=_("Output additional data about the size of the changes."))
        parser.add_argument("--compare-arch", action="store_true",
                            help=_("Compare packages also by arch. By default "
                                   "packages are compared just by name."))
        parser.add_argument("--simple", action="store_true",
                            help=_("Output a simple one line message for modified packages."))
        parser.add_argument("--downgrade", action="store_true",
                            help=_("Split the data for modified packages between "
                                   "upgraded and downgraded packages."))

    def configure(self):
        demands = self.cli.demands
        demands.sack_activation = True
        demands.available_repos = True
        demands.changelogs = True
        self.base.conf.disable_excludes = ["all"]
        # TODO yum was able to handle mirrorlist in --new/--old arguments
        # Can be resolved by improving --repofrompath option
        if not self.opts.new or not self.opts.old:
            msg = _("Both old and new repositories must be set.")
            raise dnf.exceptions.Error(msg)
        for repo in self.base.repos.all():
            if repo.id in self.opts.new + self.opts.old:
                repo.enable()
            else:
                repo.disable()
        if not self.opts.arches:
            self.opts.arches = ['src']

    def _pkgkey(self, pkg):
        if self.opts.compare_arch:
            return (pkg.name, pkg.arch)
        return pkg.name

    def _repodiff(self, old, new):
        '''compares packagesets old and new, returns dictionary with packages:
        added: only in new set
        removed: only in old set
        upgraded: in both old and new, new has bigger evr
        downgraded: in both old and new, new has lower evr
        obsoletes: dictionary of which old package is obsoleted by which new
        '''
        old_d = dict([(self._pkgkey(p), p) for p in old])
        old_keys = set(old_d.keys())
        new_d = dict([(self._pkgkey(p), p) for p in new])
        new_keys = set(new_d.keys())

        # mapping obsoleted_package_from_old: obsoleted_by_package_from_new
        obsoletes = dict()
        for obsoleter in new.filter(obsoletes=old):
            for obsoleted in old.filter(provides=obsoleter.obsoletes):
                obsoletes[self._pkgkey(obsoleted)] = obsoleter

        evr_cmp = self.base.sack.evr_cmp
        repodiff = dict(
            added=[new_d[k] for k in new_keys - old_keys],
            removed=[old_d[k] for k in old_keys - new_keys],
            obsoletes=obsoletes,
            upgraded=[],
            downgraded=[])
        for k in old_keys.intersection(new_keys):
            pkg_old = old_d[k]
            pkg_new = new_d[k]
            if pkg_old.evr == pkg_new.evr:
                continue
            if evr_cmp(pkg_old.evr, pkg_new.evr) > 0:
                repodiff['downgraded'].append((pkg_old, pkg_new))
            else:
                repodiff['upgraded'].append((pkg_old, pkg_new))

        return repodiff

    def _report(self, repodiff):
        def pkgstr(pkg):
            if self.opts.compare_arch:
                return str(pkg)
            return "%s-%s" % (pkg.name, pkg.evr)

        def sizestr(num):
            msg = str(num)
            if num > 0:
                msg += " ({})".format(dnf.cli.format.format_number(num).strip())
            elif num < 0:
                msg += " (-{})".format(dnf.cli.format.format_number(-num).strip())
            return msg

        def report_modified(pkg_old, pkg_new):
            msgs = []
            if self.opts.simple:
                msgs.append("%s -> %s" % (pkgstr(pkg_old), pkgstr(pkg_new)))
            else:
                msgs.append('')
                msgs.append("%s -> %s" % (pkgstr(pkg_old), pkgstr(pkg_new)))
                msgs.append('-' * len(msgs[-1]))
                if pkg_old.changelogs:
                    old_chlog = pkg_old.changelogs[0]
                else:
                    old_chlog = None
                for chlog in pkg_new.changelogs:
                    if old_chlog:
                        if chlog['timestamp'] < old_chlog['timestamp']:
                            break
                        elif (chlog['timestamp'] == old_chlog['timestamp'] and
                              chlog['author'] == old_chlog['author'] and
                              chlog['text'] == old_chlog['text']):
                            break
                    msgs.append('* %s %s\n%s' % (
                        chlog['timestamp'].strftime("%a %b %d %Y"),
                        dnf.i18n.ucd(chlog['author']),
                        dnf.i18n.ucd(chlog['text'])))
                if self.opts.size:
                    msgs.append(_("Size change: {} bytes").format(
                        pkg_new.size - pkg_old.size))
            print('\n'.join(msgs))

        sizes = dict(added=0, removed=0, upgraded=0, downgraded=0)
        for pkg in sorted(repodiff['added']):
            print(_("Added package  : {}").format(pkgstr(pkg)))
            sizes['added'] += pkg.size
        for pkg in sorted(repodiff['removed']):
            print(_("Removed package: {}").format(pkgstr(pkg)))
            obsoletedby = repodiff['obsoletes'].get(self._pkgkey(pkg))
            if obsoletedby:
                print(_("Obsoleted by   : {}").format(pkgstr(obsoletedby)))
            sizes['removed'] += pkg.size

        if self.opts.downgrade:
            if repodiff['upgraded']:
                print(_("\nUpgraded packages"))
                for (pkg_old, pkg_new) in sorted(repodiff['upgraded']):
                    sizes['upgraded'] += (pkg_new.size - pkg_old.size)
                    report_modified(pkg_old, pkg_new)
            if repodiff['downgraded']:
                print(_("\nDowngraded packages"))
                for (pkg_old, pkg_new) in sorted(repodiff['downgraded']):
                    sizes['downgraded'] += (pkg_new.size - pkg_old.size)
                    report_modified(pkg_old, pkg_new)
        else:
            modified = repodiff['upgraded'] + repodiff['downgraded']
            if modified:
                print(_("\nModified packages"))
                for (pkg_old, pkg_new) in sorted(modified):
                    sizes['upgraded'] += (pkg_new.size - pkg_old.size)
                    report_modified(pkg_old, pkg_new)

        print(_("\nSummary"))
        print(_("Added packages: {}").format(len(repodiff['added'])))
        print(_("Removed packages: {}").format(len(repodiff['removed'])))
        if self.opts.downgrade:
            print(_("Upgraded packages: {}").format(len(repodiff['upgraded'])))
            print(_("Downgraded packages: {}").format(len(repodiff['downgraded'])))
        else:
            print(_("Modified packages: {}").format(
                len(repodiff['upgraded']) + len(repodiff['downgraded'])))
        if self.opts.size:
            print(_("Size of added packages: {}").format(sizestr(sizes['added'])))
            print(_("Size of removed packages: {}").format(sizestr(sizes['removed'])))
            if not self.opts.downgrade:
                print(_("Size of modified packages: {}").format(
                    sizestr(sizes['upgraded'] + sizes['downgraded'])))
            else:
                print(_("Size of upgraded packages: {}").format(
                    sizestr(sizes['upgraded'])))
                print(_("Size of downgraded packages: {}").format(
                    sizestr(sizes['downgraded'])))
            print(_("Size change: {}").format(
                sizestr(sizes['added'] + sizes['upgraded'] + sizes['downgraded'] -
                        sizes['removed'])))

    def run(self):
        # prepare old and new packagesets based by given arguments
        q_new = self.base.sack.query(hawkey.IGNORE_EXCLUDES).filter(
            reponame=self.opts.new)
        q_old = self.base.sack.query(hawkey.IGNORE_EXCLUDES).filter(
            reponame=self.opts.old)
        if self.opts.arches and '*' not in self.opts.arches:
            q_new.filterm(arch=self.opts.arches)
            q_old.filterm(arch=self.opts.arches)
        if self.opts.compare_arch:
            q_new.filterm(latest_per_arch=1)
            q_old.filterm(latest_per_arch=1)
        else:
            q_new.filterm(latest=1)
            q_old.filterm(latest=1)
        q_new.apply()
        q_old.apply()

        self._report(self._repodiff(q_old, q_new))
spacewalk.py000064400000033601150402642250007073 0ustar00#
# Copyright (C) 2015  Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#

from __future__ import absolute_import
from __future__ import unicode_literals
from dnfpluginscore import _, logger

import dnf
import dnf.exceptions
import errno
import json
import librepo
import os
from copy import copy
from dnf.conf.config import PRIO_PLUGINCONFIG

import up2date_client.up2dateAuth
import up2date_client.config
import up2date_client.rhnChannel
import up2date_client.rhnPackageInfo
from rhn.i18n import ustr
from up2date_client import up2dateErrors

STORED_CHANNELS_NAME = '_spacewalk.json'

RHN_DISABLED    = _("CloudLinux Network based repositories will be disabled.")
CHANNELS_DISABLED = _("CloudLinux Network channel support will be disabled.")
COMMUNICATION_ERROR  = _("There was an error communicating with CloudLinux Network server.")
NOT_REGISTERED_ERROR = _("This system is not registered with CloudLinux Network server.")
NOT_SUBSCRIBED_ERROR = _("This system is not subscribed to any channels.")
NO_SYSTEM_ID_ERROR   = _("SystemId could not be acquired.")
USE_RHNREGISTER      = _("You can use rhn_register to register.")
UPDATES_FROM_SPACEWALK = _("This system is receiving updates from CloudLinux Network server.")
GPG_KEY_REJECTED     = _("For security reasons packages from CloudLinux Network based repositories can be verified only with locally installed gpg keys. GPG key '%s' has been rejected.")
PROFILE_NOT_SENT     = _("Package profile information could not be sent.")
MISSING_HEADER       = _("Missing required login information for CloudLinux Network: %s")
LEAPP_IN_PROGRESS    = _("Leapp upgrade is running - using cache.")
MUST_BE_ROOT         = _('Spacewalk plugin has to be run under with the root privileges.')

class Spacewalk(dnf.Plugin):

    name = 'spacewalk'

    def __init__(self, base, cli):
        super(Spacewalk, self).__init__(base, cli)
        self.base = base
        self.cli = cli
        self.stored_channels_path = os.path.join(self.base.conf.persistdir,
                                                 STORED_CHANNELS_NAME)
        self.connected_to_spacewalk = False
        self.up2date_cfg = {}
        self.conf = copy(self.base.conf)
        self.parser = self.read_config(self.conf)
        if "main" in self.parser.sections():
            options = self.parser.items("main")
            for (key, value) in options:
                self.conf._set_value(key, value, PRIO_PLUGINCONFIG)
        if not dnf.util.am_i_root():
            logger.warning(MUST_BE_ROOT)
            self.conf.enabled = False
        if not self.conf.enabled:
            return
        logger.debug('initialized Spacewalk plugin')

        self.activate_channels()

    def config(self):
        if not self.conf.enabled:
            return

        # cli is None when plugin is executed
        # though automatic actions or other pluings
        # like in case of leapp which uses custom dnf plugin
        if not self.cli:
            return

        self.cli.demands.root_user = True

    def clnreg(self):
        os.system('/usr/sbin/clnreg_ks --strict-edition')

    def activate_channels(self, networking=True):
        enabled_channels = {}
        sslcacert = None
        force_http = 0
        proxy_url = None
        login_info = None
        cached_channels = self._read_channels_file()
        if not networking:
            # no network communication, use list of channels from persistdir
            enabled_channels = cached_channels
        elif os.path.isfile("/etc/cln_leapp_in_progress"):
            # networking is true, but CLN urls won't be accessible, use cache
            logger.warning(LEAPP_IN_PROGRESS)
            enabled_channels = cached_channels
        else:
            # setup proxy according to up2date
            self.up2date_cfg = up2date_client.config.initUp2dateConfig()
            sslcacert = get_ssl_ca_cert(self.up2date_cfg)
            force_http = self.up2date_cfg['useNoSSLForPackages'],

            # trying to register system once in case of error while getLoginInfo
            clnreg_tried = False
            while not clnreg_tried:
                try:
                    login_info = up2date_client.up2dateAuth.getLoginInfo(timeout=self.conf.timeout)
                    clnreg_tried = True
                except up2dateErrors.RhnServerException as e:
                    if clnreg_tried == False:
                        self.clnreg()
                        clnreg_tried = True
                        continue
                    logger.error("%s\n%s\n%s", COMMUNICATION_ERROR, RHN_DISABLED, e)
                    return

            if not login_info:
                logger.error("%s\n%s", NOT_REGISTERED_ERROR, RHN_DISABLED)
                self._write_channels_file({})
                return

            try:
                svrChannels = up2date_client.rhnChannel.getChannelDetails(
                                                              timeout=self.conf.timeout)
            except up2dateErrors.CommunicationError as e:
                logger.error("%s\n%s\n%s", COMMUNICATION_ERROR, RHN_DISABLED, e)
                return
            except up2dateErrors.NoChannelsError:
                logger.error("%s\n%s", NOT_SUBSCRIBED_ERROR, CHANNELS_DISABLED)
                self._write_channels_file({})
                return
            except up2dateErrors.NoSystemIdError:
                logger.error("%s %s\n%s\n%s", NOT_SUBSCRIBED_ERROR,
                             NO_SYSTEM_ID_ERROR, USE_RHNREGISTER, RHN_DISABLED)
                return
            self.connected_to_spacewalk = True
            logger.info(UPDATES_FROM_SPACEWALK)

            for channel in svrChannels:
                if channel['version']:
                     enabled_channels[channel['label']] = dict(channel.items())
            self._write_channels_file(enabled_channels)

        repos = self.base.repos

        for (channel_id, channel_dict) in enabled_channels.items():
            cached_channel = cached_channels.get(channel_id)
            cached_version = None
            if cached_channel:
                cached_version = cached_channel.get('version')
            conf = copy(self.conf)
            if channel_id in self.parser.sections():
                options = self.parser.items(channel_id)
                for (key, value) in options:
                    conf._set_value(key, value, PRIO_PLUGINCONFIG)
            repo = SpacewalkRepo(channel_dict, {
                                    'conf'      : self.base.conf,
                                    'proxy'     : proxy_url,
                                    'timeout'   : conf.timeout,
                                    'sslcacert' : sslcacert,
                                    'force_http': force_http,
                                    'cached_version' : cached_version,
                                    'login_info': login_info,
                                    'gpgcheck': conf.gpgcheck,
                                    'enabled': conf.enabled,
                                })
            repos.add(repo)

        # DEBUG
        logger.debug(enabled_channels)

    def transaction(self):
        """ Update system's profile after transaction. """
        if not self.conf.enabled:
            return
        if not self.connected_to_spacewalk:
            # not connected so nothing to do here
            return
        if self.up2date_cfg['writeChangesToLog'] == 1:
            delta = self._make_package_delta()
            up2date_client.rhnPackageInfo.logDeltaPackages(delta)
        try:
            up2date_client.rhnPackageInfo.updatePackageProfile(
                                                        timeout=self.conf.timeout)
        except up2dateErrors.RhnServerException as e:
            logger.error("%s\n%s\n%s", COMMUNICATION_ERROR, PROFILE_NOT_SENT, e)


    def _read_channels_file(self):
        try:
            with open(self.stored_channels_path, "r") as channels_file:
                content = channels_file.read()
                channels = json.loads(content)
                return channels
        except (FileNotFoundError, IOError) as e:
            if e.errno != errno.ENOENT:
                raise
        except json.decoder.JSONDecodeError as e:
            pass        # ignore broken json and recreate it later

        return {}

    def _write_channels_file(self, var):
        try:
            with open(self.stored_channels_path, "w") as channels_file:
                json.dump(var, channels_file, indent=4)
        except (FileNotFoundError, IOError) as e:
            if e.errno != errno.ENOENT:
                raise

    def _make_package_delta(self):
        delta = {'added'  : [(p.name, p.version, p. release, p.epoch, p.arch)
                                for p in self.base.transaction.install_set],
                 'removed': [(p.name, p.version, p. release, p.epoch, p.arch)
                                for p in self.base.transaction.remove_set],
                }
        return delta


class  SpacewalkRepo(dnf.repo.Repo):
    """
    Repository object for Spacewalk. Uses up2date libraries.
    """
    needed_headers = ['X-RHN-Server-Id',
                      'X-RHN-Auth-User-Id',
                      'X-RHN-Auth',
                      'X-RHN-Auth-Server-Time',
                      'X-RHN-Auth-Expire-Offset']

    def __init__(self, channel, opts):
        super(SpacewalkRepo, self).__init__(ustr(channel['label']),
                                            opts.get('conf'))
        # dnf stuff
        self.name = ustr(channel['name'])
        self.baseurl = [ url + '/GET-REQ/' + self.id for url in channel['url']]
        self.sslcacert = opts.get('sslcacert')
        self.proxy = opts.get('proxy')
        try:
            self.gpgkey = get_gpg_key_urls(channel['gpg_key_url'])
        except InvalidGpgKeyLocation as e:
            logger.warning(GPG_KEY_REJECTED, dnf.i18n.ucd(e))
            self.gpgkey = []
        if channel['version'] != opts.get('cached_version'):
            self.metadata_expire = 1

        # spacewalk stuff
        self.login_info = opts.get('login_info')
        self.keepalive = 0
        self.bandwidth = 0
        self.retries = 1
        self.throttle = 0
        self.timeout = opts.get('timeout')
        self.gpgcheck = opts.get('gpgcheck')
        self.force_http = opts.get('force_http')

        if opts.get('enabled'):
            self.enable()
        else:
            self.disable()

        if hasattr(self, 'set_http_headers'):
            # dnf > 4.0.9  on RHEL 8, Fedora 29/30
            http_headers = self.create_http_headers()
            if http_headers:
                self.set_http_headers(http_headers)

    def create_http_headers(self):
        http_headers = []
        if not self.login_info:
            return http_headers
        for header in self.needed_headers:
            if not header in self.login_info:
                error = MISSING_HEADER % header
                raise dnf.Error.RepoError(error)
            if self.login_info[header] in (None, ''):
                # This doesn't work due to bug in librepo (or even deeper in libcurl)
                # the workaround bellow can be removed once BZ#1211662 is fixed
                #http_headers.append("%s;" % header)
                http_headers.append("%s: \r\nX-libcurl-Empty-Header-Workaround: *" % header)
            else:
                http_headers.append("%s: %s" % (header, self.login_info[header]))
        if not self.force_http:
            http_headers.append("X-RHN-Transport-Capability: follow-redirects=3")

        return http_headers

    def _handle_new_remote(self, destdir, mirror_setup=True):
        # this function is called only on dnf < 3.6.0 (up to Fedora 29)
        handle = super(SpacewalkRepo, self)._handle_new_remote(destdir, mirror_setup)
        http_headers = self.create_http_headers()
        if http_headers:
            handle.setopt(librepo.LRO_HTTPHEADER, http_headers)
        return handle


# FIXME
# all rutines bellow should go to rhn-client-tools because they are share
# between yum-rhn-plugin and dnf-plugin-spacewalk
def get_gpg_key_urls(key_url_string):
    """
    Parse the key urls and validate them.

    key_url_string is a space seperated list of gpg key urls that must be
    located in /etc/pkg/rpm-gpg/.
    Return a list of strings containing the key urls.
    Raises InvalidGpgKeyLocation if any of the key urls are invalid.
    """
    key_urls = key_url_string.split()
    for key_url in key_urls:
        if not is_valid_gpg_key_url(key_url):
            raise InvalidGpgKeyLocation(key_url)
    return key_urls

class InvalidGpgKeyLocation(Exception):
    pass

def is_valid_gpg_key_url(key_url):
    proto_split = key_url.split('://')
    if len(proto_split) != 2:
        return False

    proto, path = proto_split
    if proto.lower() != 'file':
        return False

    path = os.path.normpath(path)
    if not path.startswith('/etc/pki/rpm-gpg/'):
        return False
    return True

def get_ssl_ca_cert(up2date_cfg):
    if not ('sslCACert' in up2date_cfg and up2date_cfg['sslCACert']):
        raise BadSslCaCertConfig

    ca_certs = up2date_cfg['sslCACert']
    if type(ca_certs) == list:
        return ca_certs[0]

    return ca_certs
generate_completion_cache.py000064400000007554150402642250012277 0ustar00# coding=utf-8
# generate_completion_cache.py - generate cache for dnf bash completion
# Copyright © 2013 Elad Alfassa <elad@fedoraproject.org>
# Copyright (C) 2014-2015 Igor Gnatenko <i.gnatenko.brain@gmail.com>
# Copyright (C) 2015  Red Hat, Inc.

# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.

from __future__ import absolute_import
from __future__ import unicode_literals
from dnf.i18n import ucd
from dnfpluginscore import logger

import dnf
import os.path
import sqlite3


class BashCompletionCache(dnf.Plugin):
    name = 'generate_completion_cache'

    def __init__(self, base, cli):
        super(BashCompletionCache, self).__init__(base, cli)
        self.base = base
        self.cache_file = "/var/cache/dnf/packages.db"

    @staticmethod
    def _out(msg):
        logger.debug('Completion plugin: %s', msg)

    def sack(self):
        ''' Generate cache of available packages '''
        # We generate this cache only if the repos were just freshed or if the
        # cache file doesn't exist

        fresh = False
        for repo in self.base.repos.iter_enabled():
            if repo.metadata is not None and repo.metadata.fresh:
                # One fresh repo is enough to cause a regen of the cache
                fresh = True
                break

        if not os.path.exists(self.cache_file) or fresh:
            try:
                with sqlite3.connect(self.cache_file) as conn:
                    self._out('Generating completion cache...')
                    cur = conn.cursor()
                    cur.execute(
                        "create table if not exists available (pkg TEXT)")
                    cur.execute(
                        "create unique index if not exists "
                        "pkg_available ON available(pkg)")
                    cur.execute("delete from available")
                    avail_pkgs = self.base.sack.query().available()
                    avail_pkgs_insert = [[str(x)] for x in avail_pkgs if x.arch != "src"]
                    cur.executemany("insert or ignore into available values (?)",
                                    avail_pkgs_insert)
                    conn.commit()
            except sqlite3.OperationalError as e:
                self._out("Can't write completion cache: %s" % ucd(e))

    def transaction(self):
        ''' Generate cache of installed packages '''
        if not self.transaction:
            return

        try:
            with sqlite3.connect(self.cache_file) as conn:
                self._out('Generating completion cache...')
                cur = conn.cursor()
                cur.execute("create table if not exists installed (pkg TEXT)")
                cur.execute(
                    "create unique index if not exists "
                    "pkg_installed ON installed(pkg)")
                cur.execute("delete from installed")
                inst_pkgs = dnf.sack._rpmdb_sack(self.base).query().installed()
                inst_pkgs_insert = [[str(x)] for x in inst_pkgs if x.arch != "src"]
                cur.executemany("insert or ignore into installed values (?)",
                                inst_pkgs_insert)
                conn.commit()
        except sqlite3.OperationalError as e:
            self._out("Can't write completion cache: %s" % ucd(e))
universal_hooks.py000075500000013437150402642250010344 0ustar00#!/usr/bin/python3.6

# Copyright (c) 2020, cPanel, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import abc
import glob
import logging
import os
from os import path
import re
import subprocess
import sys
import tempfile

from dnf import Plugin

# this logger is configured by the dnf CLI, but error() is not shown by default (but is with -v)
# LOG = logging.getLogger("dnf")

LOG = logging.getLogger(__name__)
LOG.setLevel(logging.ERROR)
LOG.addHandler(logging.StreamHandler(sys.stderr))


class UniversalHooksPlugin(Plugin):
    name = 'universal-hooks'

    def __init__(self, base, cli):
        super().__init__(base, cli)
        self.hook_root = '/etc/dnf/universal-hooks'

    def pre_config(self):
        _run_dir(path.join(self.hook_root, self.pre_config.__name__), LOG)

    def config(self):
        _run_dir(path.join(self.hook_root, self.config.__name__), LOG)

    def resolved(self):
        _run_dir(path.join(self.hook_root, self.resolved.__name__), LOG)

    def sack(self):
        _run_dir(path.join(self.hook_root, self.sack.__name__), LOG)

    def pre_transaction(self):
        name = self.pre_transaction.__name__
        _run_pkg_dirs(self.hook_root, LOG, name, DnfTransactionInfo(self.base.transaction))
        _run_dir(path.join(self.hook_root, name), LOG)

    def transaction(self):
        name = self.transaction.__name__
        _run_pkg_dirs(self.hook_root, LOG, name, DnfTransactionInfo(self.base.transaction))
        _run_dir(path.join(self.hook_root, name), LOG)


class FileSystem(metaclass=abc.ABCMeta):
    @abc.abstractmethod
    def glob(self, pathname):
        pass

    @abc.abstractmethod
    def isdir(self, pathname):
        pass

    @abc.abstractmethod
    def access(self, path, mode):
        pass

    @abc.abstractmethod
    def NamedTemporaryFile(self, mode, encoding):
        pass


class RealFileSystem(FileSystem):
    def glob(self, pathname):
        return glob.glob(pathname)

    def isdir(self, pathname):
        return path.isdir(pathname)

    def access(self, path, mode):
        return os.access(path, mode)

    def NamedTemporaryFile(self, mode, encoding):
        return tempfile.NamedTemporaryFile(mode=mode, encoding=encoding)


fs = RealFileSystem()


def _run_dir(hook_dir, log, args=''):
    if not fs.isdir(hook_dir):
        return None

    for script in sorted(fs.glob(hook_dir + "/*")):
        if fs.isdir(script):
            continue

        if fs.access(script, os.X_OK):
            cmdline = f'{script} {args}'
            completed = subprocess.run(cmdline, shell=True)  # todo change args to a list, shell=False
            if 0 != completed.returncode:
                log.error("!!! %s did not exit cleanly: %d", cmdline, completed.returncode)
        else:
            log.error("!!! %s is not executable", script)


class TransactionInfo(metaclass=abc.ABCMeta):
    @abc.abstractmethod
    def getMembers(self):
        pass


class DnfTransactionInfo(TransactionInfo):
    def __init__(self, transaction) -> None:
        self.transaction = transaction

    def getMembers(self):
        return self.transaction


def _run_pkg_dirs(base_dir, log, slot, tinfo):
    """

    :param str base_dir:
    :param logging.Logger log:
    :param str slot:
    :param TransactionInfo tinfo:
    """

    wildcard_path = path.join(base_dir, 'multi_pkgs', slot)
    dir_matchers = _make_dir_matchers(wildcard_path)
    wildcard_to_run = {}

    with fs.NamedTemporaryFile(mode='w', encoding='utf-8') as temp_pkg_file:
        members_seen = {}
        members = tinfo.getMembers()
        for member in sorted(set(members), key=lambda m: m.name):
            pkg = member.name
            if pkg in members_seen:
                continue

            members_seen[pkg] = 1

            temp_pkg_file.write(pkg + "\n")

            _run_dir(path.join(base_dir, 'pkgs', pkg, slot), log)

            for wildcard_dir, matcher in dir_matchers.items():
                if matcher.search(pkg):
                    wildcard_to_run[wildcard_dir] = 1

        # the file may be used by a subprocess, so make sure it is flushed to kernel
        temp_pkg_file.flush()

        for wildcard_dir in wildcard_to_run:
            _run_dir(path.join(wildcard_path, wildcard_dir), log, "--pkg_list=" + temp_pkg_file.name)


def _make_dir_matchers(wc_slot_dir):
    dir_matchers = {}
    for pth in fs.glob(wc_slot_dir + "/*"):
        if fs.isdir(pth):
            pth = path.basename(path.normpath(pth))
            dir_matchers[pth] = _regex_from_dir(pth)
    return dir_matchers


def _regex_from_dir(path):
    expr = path.replace("__WILDCARD__", ".*")
    return re.compile("^" + expr + "$")
download.py000064400000030052150402642250006725 0ustar00# download.py, supplies the 'download' command.
#
# Copyright (C) 2013-2015  Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#

from __future__ import absolute_import
from __future__ import unicode_literals
from dnfpluginscore import _, logger
from dnf.cli.option_parser import OptionParser

import dnf
import dnf.cli
import dnf.exceptions
import dnf.i18n
import dnf.subject
import dnf.util
import hawkey
import itertools
import os
import shutil


@dnf.plugin.register_command
class DownloadCommand(dnf.cli.Command):

    aliases = ['download']
    summary = _('Download package to current directory')

    def __init__(self, cli):
        super(DownloadCommand, self).__init__(cli)
        self.opts = None
        self.parser = None

    @staticmethod
    def set_argparser(parser):
        parser.add_argument('packages', nargs='+',
                            help=_('packages to download'))
        parser.add_argument("--source", action='store_true',
                            help=_('download the src.rpm instead'))
        parser.add_argument("--debuginfo", action='store_true',
                            help=_('download the -debuginfo package instead'))
        parser.add_argument("--debugsource", action='store_true',
                            help=_('download the -debugsource package instead'))
        parser.add_argument("--arch", '--archlist', dest='arches', default=[],
                            action=OptionParser._SplitCallback, metavar='[arch]',
                            help=_("limit  the  query to packages of given architectures."))
        parser.add_argument('--resolve', action='store_true',
                            help=_('resolve and download needed dependencies'))
        parser.add_argument('--alldeps', action='store_true',
                            help=_('when running with --resolve, download all dependencies '
                                   '(do not exclude already installed ones)'))
        parser.add_argument('--url', '--urls', action='store_true', dest='url',
                            help=_('print list of urls where the rpms '
                                   'can be downloaded instead of downloading'))
        parser.add_argument('--urlprotocols', action='append',
                            choices=['http', 'https', 'rsync', 'ftp'],
                            default=[],
                            help=_('when running with --url, '
                                   'limit to specific protocols'))

    def configure(self):
        # setup sack and populate it with enabled repos
        demands = self.cli.demands
        demands.sack_activation = True
        demands.available_repos = True
        if self.opts.resolve and self.opts.alldeps:
            demands.load_system_repo = False

        if self.opts.source:
            self.base.repos.enable_source_repos()

        if self.opts.debuginfo or self.opts.debugsource:
            self.base.repos.enable_debug_repos()

        if self.opts.destdir:
            self.base.conf.destdir = self.opts.destdir
        else:
            self.base.conf.destdir = dnf.i18n.ucd(os.getcwd())

    def run(self):
        """Execute the util action here."""

        if (not self.opts.source
                and not self.opts.debuginfo
                and not self.opts.debugsource):
            pkgs = self._get_pkg_objs_rpms(self.opts.packages)
        else:
            pkgs = []
            if self.opts.source:
                pkgs.extend(self._get_pkg_objs_source(self.opts.packages))

            if self.opts.debuginfo:
                pkgs.extend(self._get_pkg_objs_debuginfo(self.opts.packages))

            if self.opts.debugsource:
                pkgs.extend(self._get_pkg_objs_debugsource(self.opts.packages))

        # If user asked for just urls then print them and we're done
        if self.opts.url:
            for pkg in pkgs:
                # command line repo packages do not have .remote_location
                if pkg.repoid != hawkey.CMDLINE_REPO_NAME:
                    url = pkg.remote_location(schemes=self.opts.urlprotocols)
                    if url:
                        print(url)
                    else:
                        msg = _("Failed to get mirror for package: %s") % pkg.name
                        if self.base.conf.strict:
                            raise dnf.exceptions.Error(msg)
                        logger.warning(msg)
            return
        else:
            self._do_downloads(pkgs)  # download rpms

    def _do_downloads(self, pkgs):
        """
        Perform the download for a list of packages
        """
        pkg_dict = {}
        for pkg in pkgs:
            pkg_dict.setdefault(str(pkg), []).append(pkg)

        to_download = []
        cmdline = []
        for pkg_list in pkg_dict.values():
            pkgs_cmdline = [pkg for pkg in pkg_list
                            if pkg.repoid == hawkey.CMDLINE_REPO_NAME]
            if pkgs_cmdline:
                cmdline.append(pkgs_cmdline[0])
                continue
            pkg_list.sort(key=lambda x: (x.repo.priority, x.repo.cost))
            to_download.append(pkg_list[0])
        if to_download:
            self.base.download_packages(to_download, self.base.output.progress)
        if cmdline:
            # command line repo packages are either local files or already downloaded urls
            # just copy them to the destination
            for pkg in cmdline:
                # python<3.4 shutil module does not raise SameFileError, check manually
                src = pkg.localPkg()
                dst = os.path.join(self.base.conf.destdir, os.path.basename(src))
                if os.path.exists(dst) and os.path.samefile(src, dst):
                    continue
                shutil.copy(src, self.base.conf.destdir)
        locations = sorted([pkg.localPkg() for pkg in to_download + cmdline])
        return locations

    def _get_pkg_objs_rpms(self, pkg_specs):
        """
        Return a list of dnf.Package objects that represent the rpms
        to download.
        """
        if self.opts.resolve:
            pkgs = self._get_packages_with_deps(pkg_specs)
        else:
            pkgs = self._get_packages(pkg_specs)
        return pkgs

    def _get_pkg_objs_source(self, pkg_specs):
        """
        Return a list of dnf.Package objects that represent the source
        rpms to download.
        """
        pkgs = self._get_pkg_objs_rpms(pkg_specs)
        source_pkgs = self._get_source_packages(pkgs)
        pkgs = set(self._get_packages(source_pkgs, source=True))
        return pkgs

    def _get_pkg_objs_debuginfo(self, pkg_specs):
        """
        Return a list of dnf.Package objects that represent the debuginfo
        rpms to download.
        """
        dbg_pkgs = set()
        q = self.base.sack.query().available()

        for pkg in self._get_packages(pkg_specs):
            for dbg_name in [pkg.debug_name, pkg.source_debug_name]:
                dbg_available = q.filter(
                    name=dbg_name,
                    epoch=int(pkg.epoch),
                    version=pkg.version,
                    release=pkg.release,
                    arch=pkg.arch
                )

                if not dbg_available:
                    continue

                for p in dbg_available:
                    dbg_pkgs.add(p)

                break

        return dbg_pkgs

    def _get_pkg_objs_debugsource(self, pkg_specs):
        """
        Return a list of dnf.Package objects that represent the debugsource
        rpms to download.
        """
        dbg_pkgs = set()
        q = self.base.sack.query().available()

        for pkg in self._get_packages(pkg_specs):
            dbg_available = q.filter(
                name=pkg.debugsource_name,
                epoch=int(pkg.epoch),
                version=pkg.version,
                release=pkg.release,
                arch=pkg.arch
            )

            for p in dbg_available:
                dbg_pkgs.add(p)

        return dbg_pkgs

    def _get_packages(self, pkg_specs, source=False):
        """Get packages matching pkg_specs."""
        func = self._get_query_source if source else self._get_query
        queries = []
        for pkg_spec in pkg_specs:
            try:
                queries.append(func(pkg_spec))
            except dnf.exceptions.PackageNotFoundError as e:
                logger.error(dnf.i18n.ucd(e))
                if self.base.conf.strict:
                    logger.error(_("Exiting due to strict setting."))
                    raise dnf.exceptions.Error(e)

        pkgs = list(itertools.chain(*queries))
        return pkgs

    def _get_packages_with_deps(self, pkg_specs, source=False):
        """Get packages matching pkg_specs and the deps."""
        pkgs = self._get_packages(pkg_specs)
        pkg_set = set(pkgs)
        for pkg in pkgs:
            goal = hawkey.Goal(self.base.sack)
            goal.install(pkg)
            rc = goal.run()
            if rc:
                pkg_set.update(goal.list_installs())
                pkg_set.update(goal.list_upgrades())
            else:
                msg = [_('Error in resolve of packages:')]
                logger.error("\n    ".join(msg + [str(pkg) for pkg in pkgs]))
                logger.error(dnf.util._format_resolve_problems(goal.problem_rules()))
                raise dnf.exceptions.Error()
        return pkg_set

    @staticmethod
    def _get_source_packages(pkgs):
        """Get list of source rpm names for a list of packages."""
        source_pkgs = set()
        for pkg in pkgs:
            if pkg.sourcerpm:
                source_pkgs.add(pkg.sourcerpm)
                logger.debug('  --> Package : %s Source : %s',
                             str(pkg), pkg.sourcerpm)
            elif pkg.arch == 'src':
                source_pkgs.add("%s-%s.src.rpm" % (pkg.name, pkg.evr))
            else:
                logger.info(_("No source rpm defined for %s"), str(pkg))
        return list(source_pkgs)

    def _get_query(self, pkg_spec):
        """Return a query to match a pkg_spec."""
        schemes = dnf.pycomp.urlparse.urlparse(pkg_spec)[0]
        is_url = schemes and schemes in ('http', 'ftp', 'file', 'https')
        if is_url or (pkg_spec.endswith('.rpm') and os.path.isfile(pkg_spec)):
            pkgs = self.base.add_remote_rpms([pkg_spec], progress=self.base.output.progress)
            return self.base.sack.query().filterm(pkg=pkgs)
        subj = dnf.subject.Subject(pkg_spec)
        q = subj.get_best_query(self.base.sack, with_src=self.opts.source)
        q = q.available()
        q = q.filterm(latest_per_arch_by_priority=True)
        if self.opts.arches:
            q = q.filter(arch=self.opts.arches)
        if len(q.run()) == 0:
            msg = _("No package %s available.") % (pkg_spec)
            raise dnf.exceptions.PackageNotFoundError(msg)
        return q

    def _get_query_source(self, pkg_spec):
        """Return a query to match a source rpm file name."""
        pkg_spec = pkg_spec[:-4]  # skip the .rpm
        subj = dnf.subject.Subject(pkg_spec)
        for nevra_obj in subj.get_nevra_possibilities():
            tmp_query = nevra_obj.to_query(self.base.sack).available()
            if tmp_query:
                return tmp_query.latest()

        msg = _("No package %s available.") % (pkg_spec)
        raise dnf.exceptions.PackageNotFoundError(msg)
system_upgrade.py000064400000064251150402642250010161 0ustar00# -*- coding: utf-8 -*-
#
# Copyright (c) 2015-2020 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program.  If not, see <http://www.gnu.org/licenses/>.
#
# Author(s): Will Woods <wwoods@redhat.com>

"""system_upgrade.py - DNF plugin to handle major-version system upgrades."""

from subprocess import call, Popen, check_output, CalledProcessError
import json
import os
import os.path
import re
import sys
import uuid

from systemd import journal

from dnfpluginscore import _, logger

import dnf
import dnf.cli
from dnf.cli import CliError
from dnf.i18n import ucd
import dnf.transaction
from dnf.transaction_sr import serialize_transaction, TransactionReplay

import libdnf.conf


# Translators: This string is only used in unit tests.
_("the color of the sky")

DOWNLOAD_FINISHED_ID = uuid.UUID('9348174c5cc74001a71ef26bd79d302e')
REBOOT_REQUESTED_ID = uuid.UUID('fef1cc509d5047268b83a3a553f54b43')
UPGRADE_STARTED_ID = uuid.UUID('3e0a5636d16b4ca4bbe5321d06c6aa62')
UPGRADE_FINISHED_ID = uuid.UUID('8cec00a1566f4d3594f116450395f06c')

ID_TO_IDENTIFY_BOOTS = UPGRADE_STARTED_ID

PLYMOUTH = '/usr/bin/plymouth'

RELEASEVER_MSG = _(
    "Need a --releasever greater than the current system version.")
DOWNLOAD_FINISHED_MSG = _(  # Translators: do not change "reboot" here
    "Download complete! Use 'dnf {command} reboot' to start the upgrade.\n"
    "To remove cached metadata and transaction use 'dnf {command} clean'")
CANT_RESET_RELEASEVER = _(
    "Sorry, you need to use 'download --releasever' instead of '--network'")

STATE_VERSION = 2

# --- Miscellaneous helper functions ------------------------------------------


def reboot():
    if os.getenv("DNF_SYSTEM_UPGRADE_NO_REBOOT", default=False):
        logger.info(_("Reboot turned off, not rebooting."))
    else:
        Popen(["systemctl", "reboot"])


def get_url_from_os_release():
    key = "UPGRADE_GUIDE_URL="
    for path in ["/etc/os-release", "/usr/lib/os-release"]:
        try:
            with open(path) as release_file:
                for line in release_file:
                    line = line.strip()
                    if line.startswith(key):
                        return line[len(key):].strip('"')
        except IOError:
            continue
    return None


# DNF-FIXME: dnf.util.clear_dir() doesn't delete regular files :/
def clear_dir(path, ignore=[]):
    if not os.path.isdir(path):
        return

    for entry in os.listdir(path):
        fullpath = os.path.join(path, entry)
        if fullpath in ignore:
            continue
        try:
            if os.path.isdir(fullpath):
                dnf.util.rm_rf(fullpath)
            else:
                os.unlink(fullpath)
        except OSError:
            pass


def check_release_ver(conf, target=None):
    if dnf.rpm.detect_releasever(conf.installroot) == conf.releasever:
        raise CliError(RELEASEVER_MSG)
    if target and target != conf.releasever:
        # it's too late to set releasever here, so this can't work.
        # (see https://bugzilla.redhat.com/show_bug.cgi?id=1212341)
        raise CliError(CANT_RESET_RELEASEVER)


def disable_blanking():
    try:
        tty = open('/dev/tty0', 'wb')
        tty.write(b'\33[9;0]')
    except Exception as e:
        print(_("Screen blanking can't be disabled: %s") % e)

# --- State object - for tracking upgrade state between runs ------------------


# DNF-INTEGRATION-NOTE: basically the same thing as dnf.persistor.JSONDB
class State(object):
    def __init__(self, statefile):
        self.statefile = statefile
        self._data = {}
        self._read()

    def _read(self):
        try:
            with open(self.statefile) as fp:
                self._data = json.load(fp)
        except IOError:
            self._data = {}
        except ValueError:
            self._data = {}
            logger.warning(_("Failed loading state file: %s, continuing with "
                             "empty state."), self.statefile)

    def write(self):
        dnf.util.ensure_dir(os.path.dirname(self.statefile))
        with open(self.statefile, 'w') as outf:
            json.dump(self._data, outf, indent=4, sort_keys=True)

    def clear(self):
        if os.path.exists(self.statefile):
            os.unlink(self.statefile)
        self._read()

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        if exc_type is None:
            self.write()

    # helper function for creating properties. pylint: disable=protected-access
    def _prop(option):  # pylint: disable=no-self-argument
        def setprop(self, value):
            self._data[option] = value

        def getprop(self):
            return self._data.get(option)
        return property(getprop, setprop)

    #  !!! Increase STATE_VERSION for any changes in data structure like a new property or a new
    #  data structure !!!
    state_version = _prop("state_version")
    download_status = _prop("download_status")
    destdir = _prop("destdir")
    target_releasever = _prop("target_releasever")
    system_releasever = _prop("system_releasever")
    gpgcheck = _prop("gpgcheck")
    # list of repos with gpgcheck=True
    gpgcheck_repos = _prop("gpgcheck_repos")
    # list of repos with repo_gpgcheck=True
    repo_gpgcheck_repos = _prop("repo_gpgcheck_repos")
    upgrade_status = _prop("upgrade_status")
    upgrade_command = _prop("upgrade_command")
    distro_sync = _prop("distro_sync")
    enable_disable_repos = _prop("enable_disable_repos")
    module_platform_id = _prop("module_platform_id")

# --- Plymouth output helpers -------------------------------------------------


class PlymouthOutput(object):
    """A plymouth output helper class.

    Filters duplicate calls, and stops calling the plymouth binary if we
    fail to contact it.
    """

    def __init__(self):
        self.alive = True
        self._last_args = dict()
        self._last_msg = None

    def _plymouth(self, cmd, *args):
        dupe_cmd = (args == self._last_args.get(cmd))
        if (self.alive and not dupe_cmd) or cmd == '--ping':
            try:
                self.alive = (call((PLYMOUTH, cmd) + args) == 0)
            except OSError:
                self.alive = False
            self._last_args[cmd] = args
        return self.alive

    def ping(self):
        return self._plymouth("--ping")

    def message(self, msg):
        if self._last_msg and self._last_msg != msg:
            self._plymouth("hide-message", "--text", self._last_msg)
        self._last_msg = msg
        return self._plymouth("display-message", "--text", msg)

    def set_mode(self):
        mode = 'updates'
        try:
            s = check_output([PLYMOUTH, '--help'])
            if re.search('--system-upgrade', ucd(s)):
                mode = 'system-upgrade'
        except (CalledProcessError, OSError):
            pass
        return self._plymouth("change-mode", "--" + mode)

    def progress(self, percent):
        return self._plymouth("system-update", "--progress", str(percent))


# A single PlymouthOutput instance for us to use within this module
Plymouth = PlymouthOutput()


# A TransactionProgress class that updates plymouth for us.
class PlymouthTransactionProgress(dnf.callback.TransactionProgress):

    # pylint: disable=too-many-arguments
    def progress(self, package, action, ti_done, ti_total, ts_done, ts_total):
        self._update_plymouth(package, action, ts_done, ts_total)

    def _update_plymouth(self, package, action, current, total):
        # Prevents quick jumps of progressbar when pretrans scriptlets
        # and TRANS_PREPARATION are reported as 1/1
        if total == 1:
            return
        # Verification goes through all the packages again,
        # which resets the "current" param value, this prevents
        # resetting of the progress bar as well. (Rhbug:1809096)
        if action != dnf.callback.PKG_VERIFY:
            Plymouth.progress(int(90.0 * current / total))
        else:
            Plymouth.progress(90 + int(10.0 * current / total))

        Plymouth.message(self._fmt_event(package, action, current, total))

    def _fmt_event(self, package, action, current, total):
        action = dnf.transaction.ACTIONS.get(action, action)
        return "[%d/%d] %s %s..." % (current, total, action, package)

# --- journal helpers -------------------------------------------------


def find_boots(message_id):
    """Find all boots with this message id.

    Returns the entries of all found boots.
    """
    j = journal.Reader()
    j.add_match(MESSAGE_ID=message_id.hex,  # identify the message
                _UID=0)                     # prevent spoofing of logs

    oldboot = None
    for entry in j:
        boot = entry['_BOOT_ID']
        if boot == oldboot:
            continue
        oldboot = boot
        yield entry


def list_logs():
    print(_('The following boots appear to contain upgrade logs:'))
    n = -1
    for n, entry in enumerate(find_boots(ID_TO_IDENTIFY_BOOTS)):
        print('{} / {.hex}: {:%Y-%m-%d %H:%M:%S} {}→{}'.format(
            n + 1,
            entry['_BOOT_ID'],
            entry['__REALTIME_TIMESTAMP'],
            entry.get('SYSTEM_RELEASEVER', '??'),
            entry.get('TARGET_RELEASEVER', '??')))
    if n == -1:
        print(_('-- no logs were found --'))


def pick_boot(message_id, n):
    boots = list(find_boots(message_id))
    # Positive indices index all found boots starting with 1 and going forward,
    # zero is the current boot, and -1, -2, -3 are previous going backwards.
    # This is the same as journalctl.
    try:
        if n == 0:
            raise IndexError
        if n > 0:
            n -= 1
        return boots[n]['_BOOT_ID']
    except IndexError:
        raise CliError(_("Cannot find logs with this index."))


def show_log(n):
    boot_id = pick_boot(ID_TO_IDENTIFY_BOOTS, n)
    process = Popen(['journalctl', '--boot', boot_id.hex])
    process.wait()
    rc = process.returncode
    if rc == 1:
        raise dnf.exceptions.Error(_("Unable to match systemd journal entry"))


CMDS = ['download', 'clean', 'reboot', 'upgrade', 'log']

# --- The actual Plugin and Command objects! ----------------------------------


class SystemUpgradePlugin(dnf.Plugin):
    name = 'system-upgrade'

    def __init__(self, base, cli):
        super(SystemUpgradePlugin, self).__init__(base, cli)
        if cli:
            cli.register_command(SystemUpgradeCommand)
            cli.register_command(OfflineUpgradeCommand)
            cli.register_command(OfflineDistrosyncCommand)


class SystemUpgradeCommand(dnf.cli.Command):
    aliases = ('system-upgrade', 'fedup',)
    summary = _("Prepare system for upgrade to a new release")

    DATADIR = 'var/lib/dnf/system-upgrade'

    def __init__(self, cli):
        super(SystemUpgradeCommand, self).__init__(cli)
        self.datadir = os.path.join(cli.base.conf.installroot, self.DATADIR)
        self.transaction_file = os.path.join(self.datadir, 'system-upgrade-transaction.json')
        self.magic_symlink = os.path.join(cli.base.conf.installroot, 'system-update')

        self.state = State(os.path.join(self.datadir, 'system-upgrade-state.json'))

    @staticmethod
    def set_argparser(parser):
        parser.add_argument("--no-downgrade", dest='distro_sync',
                            action='store_false',
                            help=_("keep installed packages if the new "
                                   "release's version is older"))
        parser.add_argument('tid', nargs=1, choices=CMDS,
                            metavar="[%s]" % "|".join(CMDS))
        parser.add_argument('--number', type=int, help=_('which logs to show'))

    def log_status(self, message, message_id):
        """Log directly to the journal."""
        journal.send(message,
                     MESSAGE_ID=message_id,
                     PRIORITY=journal.LOG_NOTICE,
                     SYSTEM_RELEASEVER=self.state.system_releasever,
                     TARGET_RELEASEVER=self.state.target_releasever,
                     DNF_VERSION=dnf.const.VERSION)

    def pre_configure(self):
        self._call_sub("check")
        self._call_sub("pre_configure")

    def configure(self):
        self._call_sub("configure")

    def run(self):
        self._call_sub("run")

    def run_transaction(self):
        self._call_sub("transaction")

    def run_resolved(self):
        self._call_sub("resolved")

    def _call_sub(self, name):
        subfunc = getattr(self, name + '_' + self.opts.tid[0], None)
        if callable(subfunc):
            subfunc()

    def _check_state_version(self, command):
        if self.state.state_version != STATE_VERSION:
            msg = _("Incompatible version of data. Rerun 'dnf {command} download [OPTIONS]'"
                    "").format(command=command)
            raise CliError(msg)

    def _set_cachedir(self):
        # set download directories from json state file
        self.base.conf.cachedir = self.datadir
        self.base.conf.destdir = self.state.destdir if self.state.destdir else None

    def _get_forward_reverse_pkg_reason_pairs(self):
        """
        forward = {repoid:{pkg_nevra: {tsi.action: tsi.reason}}
        reverse = {pkg_nevra: {tsi.action: tsi.reason}}
        :return: forward, reverse
        """
        backward_action = set(dnf.transaction.BACKWARD_ACTIONS + [libdnf.transaction.TransactionItemAction_REINSTALLED])
        forward_actions = set(dnf.transaction.FORWARD_ACTIONS)

        forward = {}
        reverse = {}
        for tsi in self.cli.base.transaction:
            if tsi.action in forward_actions:
                pkg = tsi.pkg
                forward.setdefault(pkg.repo.id, {}).setdefault(
                    str(pkg), {})[tsi.action] = tsi.reason
            elif tsi.action in backward_action:
                reverse.setdefault(str(tsi.pkg), {})[tsi.action] = tsi.reason
        return forward, reverse

    # == pre_configure_*: set up action-specific demands ==========================
    def pre_configure_download(self):
        # only download subcommand accepts --destdir command line option
        self.base.conf.cachedir = self.datadir
        self.base.conf.destdir = self.opts.destdir if self.opts.destdir else None
        if 'offline-distrosync' == self.opts.command and not self.opts.distro_sync:
            raise CliError(
                _("Command 'offline-distrosync' cannot be used with --no-downgrade option"))
        elif 'offline-upgrade' == self.opts.command:
            self.opts.distro_sync = False

    def pre_configure_reboot(self):
        self._set_cachedir()

    def pre_configure_upgrade(self):
        self._set_cachedir()
        if self.state.enable_disable_repos:
            self.opts.repos_ed = self.state.enable_disable_repos
        self.base.conf.releasever = self.state.target_releasever

    def pre_configure_clean(self):
        self._set_cachedir()

    # == configure_*: set up action-specific demands ==========================

    def configure_download(self):
        if 'system-upgrade' == self.opts.command or 'fedup' == self.opts.command:
            logger.warning(_('WARNING: this operation is not supported on the RHEL distribution. '
                             'Proceed at your own risk.'))
            help_url = get_url_from_os_release()
            if help_url:
                msg = _('Additional information for System Upgrade: {}')
                logger.info(msg.format(ucd(help_url)))
            if self.base._promptWanted():
                msg = _('Before you continue ensure that your system is fully upgraded by running '
                        '"dnf --refresh upgrade". Do you want to continue')
                if self.base.conf.assumeno or not self.base.output.userconfirm(
                        msg='{} [y/N]: '.format(msg), defaultyes_msg='{} [Y/n]: '.format(msg)):
                    logger.error(_("Operation aborted."))
                    sys.exit(1)
            check_release_ver(self.base.conf, target=self.opts.releasever)
        elif 'offline-upgrade' == self.opts.command:
            self.cli._populate_update_security_filter(self.opts)

        self.cli.demands.root_user = True
        self.cli.demands.resolving = True
        self.cli.demands.available_repos = True
        self.cli.demands.sack_activation = True
        self.cli.demands.freshest_metadata = True
        # We want to do the depsolve / download / transaction-test, but *not*
        # run the actual RPM transaction to install the downloaded packages.
        # Setting the "test" flag makes the RPM transaction a test transaction,
        # so nothing actually gets installed.
        # (It also means that we run two test transactions in a row, which is
        # kind of silly, but that's something for DNF to fix...)
        self.base.conf.tsflags += ["test"]

    def configure_reboot(self):
        # FUTURE: add a --debug-shell option to enable debug shell:
        # systemctl add-wants system-update.target debug-shell.service
        self.cli.demands.root_user = True

    def configure_upgrade(self):
        # same as the download, but offline and non-interactive. so...
        self.cli.demands.root_user = True
        self.cli.demands.resolving = True
        self.cli.demands.available_repos = True
        self.cli.demands.sack_activation = True
        # use the saved value for --allowerasing, etc.
        self.opts.distro_sync = self.state.distro_sync
        if self.state.gpgcheck is not None:
            self.base.conf.gpgcheck = self.state.gpgcheck
        if self.state.gpgcheck_repos is not None:
            for repo in self.base.repos.values():
                repo.gpgcheck = repo.id in self.state.gpgcheck_repos
        if self.state.repo_gpgcheck_repos is not None:
            for repo in self.base.repos.values():
                repo.repo_gpgcheck = repo.id in self.state.repo_gpgcheck_repos
        self.base.conf.module_platform_id = self.state.module_platform_id
        # don't try to get new metadata, 'cuz we're offline
        self.cli.demands.cacheonly = True
        # and don't ask any questions (we confirmed all this beforehand)
        self.base.conf.assumeyes = True
        self.cli.demands.transaction_display = PlymouthTransactionProgress()
        # upgrade operation already removes all element that must be removed. Additional removal
        # could trigger unwanted changes in transaction.
        self.base.conf.clean_requirements_on_remove = False
        self.base.conf.install_weak_deps = False

    def configure_clean(self):
        self.cli.demands.root_user = True

    def configure_log(self):
        pass

    # == check_*: do any action-specific checks ===============================

    def check_reboot(self):
        if not self.state.download_status == 'complete':
            raise CliError(_("system is not ready for upgrade"))
        self._check_state_version(self.opts.command)
        if self.state.upgrade_command != self.opts.command:
            msg = _("the transaction was not prepared for '{command}'. "
                    "Rerun 'dnf {command} download [OPTIONS]'").format(command=self.opts.command)
            raise CliError(msg)
        if os.path.lexists(self.magic_symlink):
            raise CliError(_("upgrade is already scheduled"))
        dnf.util.ensure_dir(self.datadir)
        # FUTURE: checkRPMDBStatus(self.state.download_transaction_id)

    def check_upgrade(self):
        if not os.path.lexists(self.magic_symlink):
            logger.info(_("trigger file does not exist. exiting quietly."))
            raise SystemExit(0)
        if os.readlink(self.magic_symlink) != self.datadir:
            logger.info(_("another upgrade tool is running. exiting quietly."))
            raise SystemExit(0)
        # Delete symlink ASAP to avoid reboot loops
        dnf.yum.misc.unlink_f(self.magic_symlink)
        command = self.state.upgrade_command
        if not command:
            command = self.opts.command
        self._check_state_version(command)
        if not self.state.upgrade_status == 'ready':
            msg = _("use 'dnf {command} reboot' to begin the upgrade").format(command=command)
            raise CliError(msg)

    # == run_*: run the action/prep the transaction ===========================

    def run_prepare(self):
        # make the magic symlink
        os.symlink(self.datadir, self.magic_symlink)
        # set upgrade_status so that the upgrade can run
        with self.state as state:
            state.upgrade_status = 'ready'

    def run_reboot(self):
        self.run_prepare()

        if not self.opts.tid[0] == "reboot":
            return

        self.log_status(_("Rebooting to perform upgrade."),
                        REBOOT_REQUESTED_ID)
        reboot()

    def run_download(self):
        # Mark everything in the world for upgrade/sync
        if self.opts.distro_sync:
            self.base.distro_sync()
        else:
            self.base.upgrade_all()

        if self.opts.command not in ['offline-upgrade', 'offline-distrosync']:
            # Mark all installed groups and environments for upgrade
            self.base.read_comps()
            installed_groups = [g.id for g in self.base.comps.groups if self.base.history.group.get(g.id)]
            if installed_groups:
                self.base.env_group_upgrade(installed_groups)
            installed_environments = [g.id for g in self.base.comps.environments if self.base.history.env.get(g.id)]
            if installed_environments:
                self.base.env_group_upgrade(installed_environments)

        with self.state as state:
            state.download_status = 'downloading'
            state.target_releasever = self.base.conf.releasever
            state.destdir = self.base.conf.destdir

    def run_upgrade(self):
        # change the upgrade status (so we can detect crashed upgrades later)
        command = ''
        with self.state as state:
            state.upgrade_status = 'incomplete'
            command = state.upgrade_command
        if command == 'offline-upgrade':
            msg = _("Starting offline upgrade. This will take a while.")
        elif command == 'offline-distrosync':
            msg = _("Starting offline distrosync. This will take a while.")
        else:
            msg = _("Starting system upgrade. This will take a while.")

        self.log_status(msg, UPGRADE_STARTED_ID)

        # reset the splash mode and let the user know we're running
        Plymouth.set_mode()
        Plymouth.progress(0)
        Plymouth.message(msg)

        # disable screen blanking
        disable_blanking()

        self.replay = TransactionReplay(self.base, self.transaction_file)
        self.replay.run()

    def run_clean(self):
        logger.info(_("Cleaning up downloaded data..."))
        # Don't delete persistor, it contains paths for downloaded packages
        # that are used by dnf during finalizing base to clean them up
        clear_dir(self.base.conf.cachedir,
                  [dnf.persistor.TempfilePersistor(self.base.conf.cachedir).db_path])
        with self.state as state:
            state.download_status = None
            state.state_version = None
            state.upgrade_status = None
            state.upgrade_command = None
            state.destdir = None

    def run_log(self):
        if self.opts.number:
            show_log(self.opts.number)
        else:
            list_logs()

    # == resolved_*: do staff after succesful resolvement =====================

    def resolved_upgrade(self):
        """Adjust transaction reasons according to stored values"""
        self.replay.post_transaction()

    # == transaction_*: do stuff after a successful transaction ===============

    def transaction_download(self):
        transaction = self.base.history.get_current()

        if not transaction.packages():
            logger.info(_("The system-upgrade transaction is empty, your system is already up-to-date."))
            return

        data = serialize_transaction(transaction)
        try:
            with open(self.transaction_file, "w") as f:
                json.dump(data, f, indent=4, sort_keys=True)
                f.write("\n")

            print(_("Transaction saved to {}.").format(self.transaction_file))

        except OSError as e:
            raise dnf.cli.CliError(_('Error storing transaction: {}').format(str(e)))

        # Okay! Write out the state so the upgrade can use it.
        system_ver = dnf.rpm.detect_releasever(self.base.conf.installroot)
        with self.state as state:
            state.download_status = 'complete'
            state.state_version = STATE_VERSION
            state.distro_sync = self.opts.distro_sync
            state.gpgcheck = self.base.conf.gpgcheck
            state.gpgcheck_repos = [
                repo.id for repo in self.base.repos.values() if repo.gpgcheck]
            state.repo_gpgcheck_repos = [
                repo.id for repo in self.base.repos.values() if repo.repo_gpgcheck]
            state.system_releasever = system_ver
            state.target_releasever = self.base.conf.releasever
            state.module_platform_id = self.base.conf.module_platform_id
            state.enable_disable_repos = self.opts.repos_ed
            state.destdir = self.base.conf.destdir
            state.upgrade_command = self.opts.command

        msg = DOWNLOAD_FINISHED_MSG.format(command=self.opts.command)
        logger.info(msg)
        self.log_status(_("Download finished."), DOWNLOAD_FINISHED_ID)

    def transaction_upgrade(self):
        Plymouth.message(_("Upgrade complete! Cleaning up and rebooting..."))
        self.log_status(_("Upgrade complete! Cleaning up and rebooting..."),
                        UPGRADE_FINISHED_ID)
        self.run_clean()
        if self.opts.tid[0] == "upgrade":
            reboot()


class OfflineUpgradeCommand(SystemUpgradeCommand):
    aliases = ('offline-upgrade',)
    summary = _("Prepare offline upgrade of the system")


class OfflineDistrosyncCommand(SystemUpgradeCommand):
    aliases = ('offline-distrosync',)
    summary = _("Prepare offline distrosync of the system")