aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBrian Harring <ferringb@gmail.com>2022-12-25 11:26:31 -0800
committerArthur Zamarin <arthurzam@gentoo.org>2022-12-25 21:32:26 +0200
commitb17af120f9081aa30d6a368fd75a69c298cb70e0 (patch)
tree3de8bc1fba4fbd004b9b1703e67b1a9feb5ab6fc
parentFix secondary exception from when an EBD fails to start. (diff)
downloadpkgcore-b17af120f9081aa30d6a368fd75a69c298cb70e0.tar.gz
pkgcore-b17af120f9081aa30d6a368fd75a69c298cb70e0.tar.bz2
pkgcore-b17af120f9081aa30d6a368fd75a69c298cb70e0.zip
Reformat w/ black-22.12.0
Signed-off-by: Brian Harring <ferringb@gmail.com> Signed-off-by: Arthur Zamarin <arthurzam@gentoo.org>
-rw-r--r--doc/conf.py156
-rwxr-xr-xexamples/changed_use.py45
-rwxr-xr-xexamples/identify-installed-non-split-debug-pkgs.py2
-rwxr-xr-xexamples/pkg_info.py35
-rwxr-xr-xexamples/repo_list.py20
-rwxr-xr-xexamples/report_pkg_changes.py40
-rw-r--r--py_build.py28
-rw-r--r--src/pkgcore/__init__.py4
-rw-r--r--src/pkgcore/binpkg/remote.py129
-rw-r--r--src/pkgcore/binpkg/repo_ops.py23
-rw-r--r--src/pkgcore/binpkg/repository.py98
-rw-r--r--src/pkgcore/binpkg/xpak.py70
-rw-r--r--src/pkgcore/cache/__init__.py56
-rw-r--r--src/pkgcore/cache/errors.py18
-rw-r--r--src/pkgcore/cache/flat_hash.py31
-rw-r--r--src/pkgcore/cache/fs_template.py14
-rw-r--r--src/pkgcore/config/__init__.py25
-rw-r--r--src/pkgcore/config/basics.py274
-rw-r--r--src/pkgcore/config/central.py167
-rw-r--r--src/pkgcore/config/cparser.py4
-rw-r--r--src/pkgcore/config/domain.py17
-rw-r--r--src/pkgcore/config/errors.py46
-rw-r--r--src/pkgcore/config/hint.py33
-rw-r--r--src/pkgcore/const.py38
-rw-r--r--src/pkgcore/ebuild/atom.py476
-rw-r--r--src/pkgcore/ebuild/conditionals.py76
-rw-r--r--src/pkgcore/ebuild/const.py42
-rw-r--r--src/pkgcore/ebuild/cpv.py133
-rw-r--r--src/pkgcore/ebuild/digest.py48
-rw-r--r--src/pkgcore/ebuild/domain.py354
-rw-r--r--src/pkgcore/ebuild/eapi.py716
-rw-r--r--src/pkgcore/ebuild/ebd.py567
-rw-r--r--src/pkgcore/ebuild/ebd_ipc.py428
-rw-r--r--src/pkgcore/ebuild/ebuild_built.py59
-rw-r--r--src/pkgcore/ebuild/ebuild_src.py216
-rw-r--r--src/pkgcore/ebuild/eclass.py373
-rw-r--r--src/pkgcore/ebuild/eclass_cache.py24
-rw-r--r--src/pkgcore/ebuild/errors.py50
-rw-r--r--src/pkgcore/ebuild/filter_env.py234
-rw-r--r--src/pkgcore/ebuild/formatter.py279
-rw-r--r--src/pkgcore/ebuild/inspect_profile.py91
-rw-r--r--src/pkgcore/ebuild/misc.py178
-rw-r--r--src/pkgcore/ebuild/pkg_updates.py52
-rw-r--r--src/pkgcore/ebuild/portage_conf.py464
-rw-r--r--src/pkgcore/ebuild/portageq.py120
-rw-r--r--src/pkgcore/ebuild/processor.py247
-rw-r--r--src/pkgcore/ebuild/profiles.py289
-rw-r--r--src/pkgcore/ebuild/repo_objs.py524
-rw-r--r--src/pkgcore/ebuild/repository.py384
-rw-r--r--src/pkgcore/ebuild/resolver.py63
-rw-r--r--src/pkgcore/ebuild/restricts.py76
-rw-r--r--src/pkgcore/ebuild/triggers.py320
-rw-r--r--src/pkgcore/exceptions.py9
-rw-r--r--src/pkgcore/fetch/__init__.py28
-rw-r--r--src/pkgcore/fetch/base.py22
-rw-r--r--src/pkgcore/fetch/custom.py48
-rw-r--r--src/pkgcore/fetch/errors.py8
-rw-r--r--src/pkgcore/fs/contents.py97
-rw-r--r--src/pkgcore/fs/fs.py116
-rw-r--r--src/pkgcore/fs/livefs.py82
-rw-r--r--src/pkgcore/fs/ops.py43
-rw-r--r--src/pkgcore/fs/tar.py62
-rw-r--r--src/pkgcore/gpg.py15
-rw-r--r--src/pkgcore/log.py2
-rw-r--r--src/pkgcore/merge/engine.py156
-rw-r--r--src/pkgcore/merge/errors.py8
-rw-r--r--src/pkgcore/merge/triggers.py311
-rw-r--r--src/pkgcore/operations/__init__.py23
-rw-r--r--src/pkgcore/operations/domain.py69
-rw-r--r--src/pkgcore/operations/format.py71
-rw-r--r--src/pkgcore/operations/observer.py41
-rw-r--r--src/pkgcore/operations/regen.py6
-rw-r--r--src/pkgcore/operations/repo.py96
-rw-r--r--src/pkgcore/os_data.py15
-rw-r--r--src/pkgcore/package/base.py27
-rw-r--r--src/pkgcore/package/conditionals.py88
-rw-r--r--src/pkgcore/package/errors.py23
-rw-r--r--src/pkgcore/package/metadata.py28
-rw-r--r--src/pkgcore/package/mutated.py16
-rw-r--r--src/pkgcore/package/virtual.py4
-rw-r--r--src/pkgcore/pkgsets/filelist.py19
-rw-r--r--src/pkgcore/pkgsets/glsa.py99
-rw-r--r--src/pkgcore/pkgsets/installed.py13
-rw-r--r--src/pkgcore/pkgsets/live_rebuild_set.py11
-rw-r--r--src/pkgcore/pkgsets/system.py3
-rw-r--r--src/pkgcore/plugin.py105
-rw-r--r--src/pkgcore/pytest/plugin.py164
-rw-r--r--src/pkgcore/repository/configured.py22
-rw-r--r--src/pkgcore/repository/errors.py4
-rw-r--r--src/pkgcore/repository/filtered.py22
-rw-r--r--src/pkgcore/repository/misc.py34
-rw-r--r--src/pkgcore/repository/multiplex.py65
-rw-r--r--src/pkgcore/repository/prototype.py117
-rw-r--r--src/pkgcore/repository/syncable.py2
-rw-r--r--src/pkgcore/repository/util.py22
-rw-r--r--src/pkgcore/repository/virtual.py30
-rw-r--r--src/pkgcore/repository/wrapper.py3
-rw-r--r--src/pkgcore/resolver/choice_point.py25
-rw-r--r--src/pkgcore/resolver/pigeonholes.py3
-rw-r--r--src/pkgcore/resolver/plan.py428
-rw-r--r--src/pkgcore/resolver/state.py89
-rw-r--r--src/pkgcore/resolver/util.py14
-rw-r--r--src/pkgcore/restrictions/boolean.py211
-rw-r--r--src/pkgcore/restrictions/delegated.py2
-rw-r--r--src/pkgcore/restrictions/packages.py85
-rw-r--r--src/pkgcore/restrictions/required_use.py90
-rw-r--r--src/pkgcore/restrictions/restriction.py15
-rw-r--r--src/pkgcore/restrictions/util.py14
-rw-r--r--src/pkgcore/restrictions/values.py232
-rwxr-xr-xsrc/pkgcore/scripts/__init__.py25
-rw-r--r--src/pkgcore/scripts/patom.py29
-rw-r--r--src/pkgcore/scripts/pclean.py402
-rw-r--r--src/pkgcore/scripts/pclonecache.py24
-rw-r--r--src/pkgcore/scripts/pconfig.py328
-rw-r--r--src/pkgcore/scripts/pebuild.py49
-rw-r--r--src/pkgcore/scripts/pinspect.py233
-rw-r--r--src/pkgcore/scripts/pmaint.py322
-rw-r--r--src/pkgcore/scripts/pmerge.py626
-rw-r--r--src/pkgcore/scripts/pplugincache.py18
-rw-r--r--src/pkgcore/scripts/pquery.py866
-rw-r--r--src/pkgcore/sync/base.py75
-rw-r--r--src/pkgcore/sync/bzr.py8
-rw-r--r--src/pkgcore/sync/cvs.py23
-rw-r--r--src/pkgcore/sync/darcs.py4
-rw-r--r--src/pkgcore/sync/git.py22
-rw-r--r--src/pkgcore/sync/git_svn.py12
-rw-r--r--src/pkgcore/sync/hg.py13
-rw-r--r--src/pkgcore/sync/http.py35
-rw-r--r--src/pkgcore/sync/rsync.py121
-rw-r--r--src/pkgcore/sync/sqfs.py4
-rw-r--r--src/pkgcore/sync/svn.py23
-rw-r--r--src/pkgcore/sync/tar.py44
-rw-r--r--src/pkgcore/system/libtool.py57
-rw-r--r--src/pkgcore/test/misc.py77
-rw-r--r--src/pkgcore/test/scripts/helpers.py17
-rw-r--r--src/pkgcore/util/commandline.py254
-rw-r--r--src/pkgcore/util/file_type.py9
-rw-r--r--src/pkgcore/util/packages.py2
-rw-r--r--src/pkgcore/util/parserestrict.py80
-rw-r--r--src/pkgcore/util/thread_pool.py2
-rw-r--r--src/pkgcore/vdb/contents.py53
-rw-r--r--src/pkgcore/vdb/ondisk.py120
-rw-r--r--src/pkgcore/vdb/repo_ops.py37
-rw-r--r--tests/cache/test_base.py96
-rw-r--r--tests/cache/test_flat_hash.py82
-rw-r--r--tests/config/test_basics.py396
-rw-r--r--tests/config/test_central.py1160
-rw-r--r--tests/config/test_cparser.py70
-rw-r--r--tests/config/test_init.py39
-rw-r--r--tests/conftest.py12
-rw-r--r--tests/ebuild/test_atom.py572
-rw-r--r--tests/ebuild/test_conditionals.py230
-rw-r--r--tests/ebuild/test_cpv.py153
-rw-r--r--tests/ebuild/test_digest.py24
-rw-r--r--tests/ebuild/test_eapi.py36
-rw-r--r--tests/ebuild/test_ebuild_src.py647
-rw-r--r--tests/ebuild/test_eclass.py236
-rw-r--r--tests/ebuild/test_eclass_cache.py38
-rw-r--r--tests/ebuild/test_filter_env.py203
-rw-r--r--tests/ebuild/test_formatter.py1860
-rw-r--r--tests/ebuild/test_misc.py50
-rw-r--r--tests/ebuild/test_portage_conf.py118
-rw-r--r--tests/ebuild/test_profiles.py1724
-rw-r--r--tests/ebuild/test_repo_objs.py568
-rw-r--r--tests/ebuild/test_repository.py262
-rw-r--r--tests/fetch/test_base.py32
-rw-r--r--tests/fetch/test_init.py31
-rw-r--r--tests/fs/fs_util.py4
-rw-r--r--tests/fs/test_contents.py266
-rw-r--r--tests/fs/test_fs.py66
-rw-r--r--tests/fs/test_livefs.py33
-rw-r--r--tests/fs/test_ops.py65
-rw-r--r--tests/merge/test_engine.py54
-rw-r--r--tests/merge/test_triggers.py313
-rw-r--r--tests/merge/util.py8
-rw-r--r--tests/package/test_base.py23
-rw-r--r--tests/package/test_metadata.py14
-rw-r--r--tests/package/test_mutated.py6
-rw-r--r--tests/pkgsets/test_filelist.py52
-rw-r--r--tests/pkgsets/test_glsa.py80
-rw-r--r--tests/pkgsets/test_installed.py33
-rw-r--r--tests/repository/test_filtered.py46
-rw-r--r--tests/repository/test_multiplex.py35
-rw-r--r--tests/repository/test_prototype.py252
-rw-r--r--tests/resolver/test_choice_point.py63
-rw-r--r--tests/resolver/test_pigeonholes.py3
-rw-r--r--tests/resolver/test_plan.py23
-rw-r--r--tests/restrictions/test_boolean.py143
-rw-r--r--tests/restrictions/test_delegated.py35
-rw-r--r--tests/restrictions/test_packages.py77
-rw-r--r--tests/restrictions/test_required_use.py192
-rw-r--r--tests/restrictions/test_restriction.py17
-rw-r--r--tests/restrictions/test_util.py19
-rw-r--r--tests/restrictions/test_values.py330
-rw-r--r--tests/restrictions/utils.py57
-rw-r--r--tests/scripts/test_patom.py88
-rw-r--r--tests/scripts/test_pclean.py2
-rw-r--r--tests/scripts/test_pclonecache.py32
-rw-r--r--tests/scripts/test_pconfig.py492
-rw-r--r--tests/scripts/test_pebuild.py38
-rw-r--r--tests/scripts/test_pmaint.py207
-rw-r--r--tests/scripts/test_pmerge.py76
-rw-r--r--tests/scripts/test_pquery.py64
-rw-r--r--tests/sync/test_base.py76
-rw-r--r--tests/sync/test_bzr.py11
-rw-r--r--tests/sync/test_cvs.py26
-rw-r--r--tests/sync/test_darcs.py11
-rw-r--r--tests/sync/test_git.py33
-rw-r--r--tests/sync/test_git_svn.py15
-rw-r--r--tests/sync/test_hg.py28
-rw-r--r--tests/sync/test_rsync.py56
-rw-r--r--tests/sync/test_sqfs.py13
-rw-r--r--tests/sync/test_svn.py11
-rw-r--r--tests/sync/test_tar.py11
-rw-r--r--tests/test_gpg.py16
-rw-r--r--tests/test_plugin.py190
-rw-r--r--tests/test_source_hygene.py2
-rw-r--r--tests/util/test_parserestrict.py127
218 files changed, 17529 insertions, 10771 deletions
diff --git a/doc/conf.py b/doc/conf.py
index f18382148..a972eb5d4 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -14,76 +14,76 @@
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-#needs_sphinx = '1.0'
+# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
- 'sphinx.ext.extlinks',
- 'sphinx.ext.autosummary',
- 'sphinx.ext.autodoc',
- 'sphinx.ext.doctest',
- 'sphinx.ext.intersphinx',
- 'sphinx.ext.todo',
- 'sphinx.ext.coverage',
- 'sphinx.ext.ifconfig',
- 'sphinx.ext.viewcode',
- 'snakeoil.dist.sphinxext',
+ "sphinx.ext.extlinks",
+ "sphinx.ext.autosummary",
+ "sphinx.ext.autodoc",
+ "sphinx.ext.doctest",
+ "sphinx.ext.intersphinx",
+ "sphinx.ext.todo",
+ "sphinx.ext.coverage",
+ "sphinx.ext.ifconfig",
+ "sphinx.ext.viewcode",
+ "snakeoil.dist.sphinxext",
]
# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
+templates_path = ["_templates"]
# The suffix of source filenames.
-source_suffix = '.rst'
+source_suffix = ".rst"
# The encoding of source files.
-#source_encoding = 'utf-8-sig'
+# source_encoding = 'utf-8-sig'
# The master toctree document.
-master_doc = 'index'
+master_doc = "index"
# General information about the project.
-project = 'pkgcore'
-authors = ''
-copyright = '2006-2022, pkgcore contributors'
+project = "pkgcore"
+authors = ""
+copyright = "2006-2022, pkgcore contributors"
# version is set by snakeoil extension
-release = 'master'
+release = "master"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
-#language = None
+# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
-#today = ''
+# today = ''
# Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
+# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
-exclude_patterns = ['_build', 'generated']
+exclude_patterns = ["_build", "generated"]
# The reST default role (used for this markup: `text`) to use for all documents.
-#default_role = None
+# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
+# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
-#add_module_names = True
+# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
+pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
+# modindex_common_prefix = []
# auto-generate required files for RTD build environment
@@ -91,128 +91,125 @@ pygments_style = 'sphinx'
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
-html_theme = 'default'
+html_theme = "default"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
-#html_theme_options = {}
+# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
-#html_theme_path = []
+# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
-#html_title = None
+# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
-#html_short_title = None
+# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
-#html_logo = None
+# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
-#html_favicon = None
+# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
-#html_static_path = ['_static']
+# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
+# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
-#html_use_smartypants = True
+# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
+# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
-#html_additional_pages = {}
+# html_additional_pages = {}
# If false, no module index is generated.
-#html_domain_indices = True
+# html_domain_indices = True
# If false, no index is generated.
-#html_use_index = True
+# html_use_index = True
# If true, the index is split into individual pages for each letter.
-#html_split_index = False
+# html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#html_show_sphinx = True
+# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#html_show_copyright = True
+# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
-#html_use_opensearch = ''
+# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = None
+# html_file_suffix = None
# Output file base name for HTML help builder.
-htmlhelp_basename = 'pkgcoredoc'
+htmlhelp_basename = "pkgcoredoc"
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
-#latex_paper_size = 'letter'
+# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
-#latex_font_size = '10pt'
+# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
- ('index', 'pkgcore.tex', 'pkgcore Documentation',
- authors, 'manual'),
+ ("index", "pkgcore.tex", "pkgcore Documentation", authors, "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
-#latex_logo = None
+# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
-#latex_use_parts = False
+# latex_use_parts = False
# If true, show page references after internal links.
-#latex_show_pagerefs = False
+# latex_show_pagerefs = False
# If true, show URL addresses after external links.
-#latex_show_urls = False
+# latex_show_urls = False
# Additional stuff for the LaTeX preamble.
-#latex_preamble = ''
+# latex_preamble = ''
# Documents to append as an appendix to all manuals.
-#latex_appendices = []
+# latex_appendices = []
# If false, no module index is generated.
-#latex_domain_indices = True
+# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
-man_pages = [
- ('man/pkgcore', 'pkgcore', 'a framework for package management', None, 5)
-]
+man_pages = [("man/pkgcore", "pkgcore", "a framework for package management", None, 5)]
# -- Options for Epub output ---------------------------------------------------
@@ -224,45 +221,52 @@ epub_copyright = copyright
# The language of the text. It defaults to the language option
# or en if the language is not set.
-#epub_language = ''
+# epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
-#epub_scheme = ''
+# epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
-#epub_identifier = ''
+# epub_identifier = ''
# A unique identification for the text.
-#epub_uid = ''
+# epub_uid = ''
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
-#epub_pre_files = []
+# epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
-#epub_post_files = []
+# epub_post_files = []
# A list of files that should not be packed into the epub file.
-#epub_exclude_files = []
+# epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
-#epub_tocdepth = 3
+# epub_tocdepth = 3
# Allow duplicate toc entries.
-#epub_tocdup = True
+# epub_tocdup = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
- 'python': ('http://docs.python.org/', None),
- 'snakeoil': ('https://github.com/pkgcore/snakeoil', None),
+ "python": ("http://docs.python.org/", None),
+ "snakeoil": ("https://github.com/pkgcore/snakeoil", None),
}
autodoc_default_flags = [
- "members", "show-inheritance", "inherited-members", "undoc-members"]
+ "members",
+ "show-inheritance",
+ "inherited-members",
+ "undoc-members",
+]
extlinks = {
- 'git_tag': ('https://github.com/pkgcore/pkgcore/releases/tag/%s', 'git log '),
- 'git_release': ('https://github.com/pkgcore/pkgcore/archive/%s.tar.gz', 'release download ')
+ "git_tag": ("https://github.com/pkgcore/pkgcore/releases/tag/%s", "git log "),
+ "git_release": (
+ "https://github.com/pkgcore/pkgcore/archive/%s.tar.gz",
+ "release download ",
+ ),
}
diff --git a/examples/changed_use.py b/examples/changed_use.py
index 4575de343..4623dde10 100755
--- a/examples/changed_use.py
+++ b/examples/changed_use.py
@@ -9,27 +9,34 @@ try:
from pkgcore.restrictions.boolean import OrRestriction
from pkgcore.util import commandline
except ImportError:
- print('Cannot import pkgcore!', file=sys.stderr)
- print('Verify it is properly installed and/or PYTHONPATH is set correctly.', file=sys.stderr)
- if '--debug' not in sys.argv:
- print('Add --debug to the commandline for a traceback.', file=sys.stderr)
+ print("Cannot import pkgcore!", file=sys.stderr)
+ print(
+ "Verify it is properly installed and/or PYTHONPATH is set correctly.",
+ file=sys.stderr,
+ )
+ if "--debug" not in sys.argv:
+ print("Add --debug to the commandline for a traceback.", file=sys.stderr)
else:
raise
sys.exit(1)
argparser = commandline.ArgumentParser(color=False, version=False)
+argparser.add_argument("target", nargs="+", help="target package atoms")
argparser.add_argument(
- 'target', nargs='+', help='target package atoms')
+ "--repo",
+ action=commandline.StoreRepoObject,
+ help="repo to use (default from domain if omitted).",
+)
argparser.add_argument(
- '--repo', action=commandline.StoreRepoObject,
- help='repo to use (default from domain if omitted).')
-argparser.add_argument(
- '--print_type', '-t', default="cpvstr",
+ "--print_type",
+ "-t",
+ default="cpvstr",
choices=("slotted_atom", "versioned_atom", "cpvstr"),
- help='''type of atom to output:
+ help="""type of atom to output:
'versioned_atom' : a valid versioned atom,
'slotted_atom' : a valid slotted atom,
- 'cpvstr' : the cpv of the package''')
+ 'cpvstr' : the cpv of the package""",
+)
@argparser.bind_final_check
@@ -38,10 +45,11 @@ def check_args(parser, namespace):
namespace.vdb = domain.all_installed_repos
if not namespace.repo:
# fallback to default repo
- namespace.repo = namespace.config.get_default('repo')
+ namespace.repo = namespace.config.get_default("repo")
namespace.restrict = OrRestriction(
- *commandline.convert_to_restrict(namespace.target))
+ *commandline.convert_to_restrict(namespace.target)
+ )
namespace.outputter = attrgetter(namespace.print_type)
@@ -58,8 +66,13 @@ def main(options, out, err):
changed_flags = (oldflags ^ newflags) | (current.iuse ^ built.iuse)
if options.verbosity > 0:
out.write(
- "package %s, %d flags have changed:\n\t%s" %
- (current.unversioned_atom, len(changed_flags), ' '.join(changed_flags)))
+ "package %s, %d flags have changed:\n\t%s"
+ % (
+ current.unversioned_atom,
+ len(changed_flags),
+ " ".join(changed_flags),
+ )
+ )
else:
out.write(options.outputter(current))
else:
@@ -67,6 +80,6 @@ def main(options, out, err):
out.write("%s is the same as it was before" % current.cpvstr)
-if __name__ == '__main__':
+if __name__ == "__main__":
tool = commandline.Tool(argparser)
sys.exit(tool())
diff --git a/examples/identify-installed-non-split-debug-pkgs.py b/examples/identify-installed-non-split-debug-pkgs.py
index 00e1ccacf..0c5912d8a 100755
--- a/examples/identify-installed-non-split-debug-pkgs.py
+++ b/examples/identify-installed-non-split-debug-pkgs.py
@@ -8,7 +8,7 @@ debug_paths = ["/usr/lib/debug"]
fi = file_identifier()
vdbs = load_config().get_default("domain").all_installed_repos
for pkg in sorted(vdbs):
- contents = getattr(pkg, 'contents', ())
+ contents = getattr(pkg, "contents", ())
if not contents:
continue
files = contents.iterfiles()
diff --git a/examples/pkg_info.py b/examples/pkg_info.py
index 51ff4347e..aa13de2d5 100755
--- a/examples/pkg_info.py
+++ b/examples/pkg_info.py
@@ -7,25 +7,28 @@ try:
from pkgcore.restrictions.boolean import OrRestriction
from pkgcore.util import commandline
except ImportError:
- print('Cannot import pkgcore!', file=sys.stderr)
- print('Verify it is properly installed and/or PYTHONPATH is set correctly.', file=sys.stderr)
- if '--debug' not in sys.argv:
- print('Add --debug to the commandline for a traceback.', file=sys.stderr)
+ print("Cannot import pkgcore!", file=sys.stderr)
+ print(
+ "Verify it is properly installed and/or PYTHONPATH is set correctly.",
+ file=sys.stderr,
+ )
+ if "--debug" not in sys.argv:
+ print("Add --debug to the commandline for a traceback.", file=sys.stderr)
else:
raise
sys.exit(1)
argparser = commandline.ArgumentParser(color=False, version=False)
-argparser.add_argument(
- 'target', nargs='+', help='target package atoms')
+argparser.add_argument("target", nargs="+", help="target package atoms")
@argparser.bind_final_check
def check_args(parser, namespace):
namespace.repo = namespace.domain.ebuild_repos
namespace.restrict = OrRestriction(
- *commandline.convert_to_restrict(namespace.target))
+ *commandline.convert_to_restrict(namespace.target)
+ )
def getter(pkg):
@@ -35,21 +38,27 @@ def getter(pkg):
@argparser.bind_main_func
def main(options, out, err):
for t, pkgs in itertools.groupby(
- options.repo.itermatch(options.restrict, sorter=sorted), getter):
+ options.repo.itermatch(options.restrict, sorter=sorted), getter
+ ):
out.write(t[0])
out.first_prefix = " "
for pkg in pkgs:
- out.write('%s::%s' % (pkg.cpvstr, pkg.repo.repo_id))
+ out.write("%s::%s" % (pkg.cpvstr, pkg.repo.repo_id))
out.first_prefix = ""
- item = 'maintainer'
+ item = "maintainer"
values = t[1]
if values:
out.write(
- "%s%s: %s" %
- (item.title(), 's'[len(values) == 1:], ', '.join(str(x) for x in values)))
+ "%s%s: %s"
+ % (
+ item.title(),
+ "s"[len(values) == 1 :],
+ ", ".join(str(x) for x in values),
+ )
+ )
out.write()
-if __name__ == '__main__':
+if __name__ == "__main__":
tool = commandline.Tool(argparser)
sys.exit(tool())
diff --git a/examples/repo_list.py b/examples/repo_list.py
index 2b3cb2c51..3c5114baa 100755
--- a/examples/repo_list.py
+++ b/examples/repo_list.py
@@ -7,10 +7,13 @@ try:
from pkgcore.repository.util import get_raw_repos, get_virtual_repos
from pkgcore.util import commandline
except ImportError:
- print('Cannot import pkgcore!', file=sys.stderr)
- print('Verify it is properly installed and/or PYTHONPATH is set correctly.', file=sys.stderr)
- if '--debug' not in sys.argv:
- print('Add --debug to the commandline for a traceback.', file=sys.stderr)
+ print("Cannot import pkgcore!", file=sys.stderr)
+ print(
+ "Verify it is properly installed and/or PYTHONPATH is set correctly.",
+ file=sys.stderr,
+ )
+ if "--debug" not in sys.argv:
+ print("Add --debug to the commandline for a traceback.", file=sys.stderr)
else:
raise
sys.exit(1)
@@ -18,8 +21,11 @@ except ImportError:
argparser = commandline.ArgumentParser(color=False, version=False)
argparser.add_argument(
- '-r', '--repo', action=commandline.StoreRepoObject,
- help='repo to give info about (default from domain if omitted)')
+ "-r",
+ "--repo",
+ action=commandline.StoreRepoObject,
+ help="repo to give info about (default from domain if omitted)",
+)
@argparser.bind_final_check
@@ -46,6 +52,6 @@ def main(options, out, err):
out.write()
-if __name__ == '__main__':
+if __name__ == "__main__":
tool = commandline.Tool(argparser)
sys.exit(tool())
diff --git a/examples/report_pkg_changes.py b/examples/report_pkg_changes.py
index 23282c731..ace270629 100755
--- a/examples/report_pkg_changes.py
+++ b/examples/report_pkg_changes.py
@@ -8,6 +8,7 @@ from snakeoil.osutils import listdir_files
from pkgcore.config import load_config
from pkgcore.ebuild.atom import atom
+
# we use a WorldFile since it *currently* forces unversioned atoms.
from pkgcore.pkgsets.filelist import WorldFile
@@ -36,17 +37,20 @@ def main(target_repo, seen, moves):
for l, prefix in ((new_pkgs, "added pkgs"), (removed, "removed pkgs")):
if l:
- sys.stdout.write("%s:\n %s\n\n" %
- (prefix, "\n ".join(str(x) for x in sorted(l))))
+ sys.stdout.write(
+ "%s:\n %s\n\n" % (prefix, "\n ".join(str(x) for x in sorted(l)))
+ )
if finished_moves:
- sys.stdout.write("moved pkgs:\n %s\n\n" %
- "\n ".join("%s -> %s" % (k, moves[k])
- for k in sorted(finished_moves)))
+ sys.stdout.write(
+ "moved pkgs:\n %s\n\n"
+ % "\n ".join("%s -> %s" % (k, moves[k]) for k in sorted(finished_moves))
+ )
if in_transit:
- sys.stdout.write("pkg moves in transit:\n %s\n\n" %
- "\n ".join("%s -> %s" % (k, in_transit[k])
- for k in sorted(in_transit)))
+ sys.stdout.write(
+ "pkg moves in transit:\n %s\n\n"
+ % "\n ".join("%s -> %s" % (k, in_transit[k]) for k in sorted(in_transit))
+ )
# just flush the seen fully, simplest.
for x in seen_set:
@@ -71,32 +75,36 @@ def parse_moves(location):
# schwartzian comparison, convert it into YYYY-QQ
def get_key(fname):
- return tuple(reversed(fname.split('-')))
+ return tuple(reversed(fname.split("-")))
moves = {}
for update_file in sorted(listdir_files(location), key=get_key):
for line in iter_read_bash(pjoin(location, update_file)):
line = line.split()
- if line[0] != 'move':
+ if line[0] != "move":
continue
moves[atom(line[1])] = atom(line[2])
return moves
-if __name__ == '__main__':
+if __name__ == "__main__":
args = sys.argv[1:]
- if len(args) not in (2,3) or "--help" in args or "-h" in args:
- sys.stderr.write("need two args; repository to scan, and "
+ if len(args) not in (2, 3) or "--help" in args or "-h" in args:
+ sys.stderr.write(
+ "need two args; repository to scan, and "
"file to store the state info in.\nOptional third arg is "
- "a profiles update directory to scan for moves.\n")
+ "a profiles update directory to scan for moves.\n"
+ )
sys.exit(-1)
conf = load_config()
try:
repo = conf.repo[args[0]]
except KeyError:
- sys.stderr.write("repository %r wasn't found- known repos\n%r\n" %
- (args[0], list(conf.repo.keys())))
+ sys.stderr.write(
+ "repository %r wasn't found- known repos\n%r\n"
+ % (args[0], list(conf.repo.keys()))
+ )
sys.exit(-2)
if not os.path.exists(args[1]):
diff --git a/py_build.py b/py_build.py
index fe1adb1d9..a639ed28e 100644
--- a/py_build.py
+++ b/py_build.py
@@ -12,7 +12,7 @@ from flit_core import buildapi
@contextmanager
def sys_path():
orig_path = sys.path[:]
- sys.path.insert(0, str(Path.cwd() / 'src'))
+ sys.path.insert(0, str(Path.cwd() / "src"))
try:
yield
finally:
@@ -27,7 +27,9 @@ def write_pkgcore_lookup_configs(cleanup_files):
with open(path, "w") as f:
os.chmod(path, 0o644)
- f.write(textwrap.dedent("""\
+ f.write(
+ textwrap.dedent(
+ """\
from os.path import abspath, exists, join
import sys
@@ -39,7 +41,9 @@ def write_pkgcore_lookup_configs(cleanup_files):
LIBDIR_PATH = join(INSTALL_PREFIX, 'lib/pkgcore')
EBD_PATH = join(LIBDIR_PATH, 'ebd')
INJECTED_BIN_PATH = ()
- """))
+ """
+ )
+ )
def write_verinfo(cleanup_files):
@@ -47,6 +51,7 @@ def write_verinfo(cleanup_files):
path.parent.mkdir(parents=True, exist_ok=True)
print(f"generating version info: {path}")
from snakeoil.version import get_git_version
+
path.write_text(f"version_info={get_git_version(Path.cwd())!r}")
@@ -62,7 +67,14 @@ def prepare_pkgcore(callback, consts: bool):
write_pkgcore_lookup_configs(cleanup_files)
# generate function lists so they don't need to be created on install
- if subprocess.call(['make', f'PYTHON={sys.executable}', 'PYTHONPATH=' + ':'.join(sys.path)], cwd=Path.cwd() / 'data/lib/pkgcore/ebd'):
+ if subprocess.call(
+ [
+ "make",
+ f"PYTHON={sys.executable}",
+ "PYTHONPATH=" + ":".join(sys.path),
+ ],
+ cwd=Path.cwd() / "data/lib/pkgcore/ebd",
+ ):
raise Exception("Running makefile failed")
return callback()
@@ -76,13 +88,17 @@ def prepare_pkgcore(callback, consts: bool):
def build_wheel(wheel_directory, config_settings=None, metadata_directory=None):
"""Builds a wheel, places it in wheel_directory"""
- callback = partial(buildapi.build_wheel, wheel_directory, config_settings, metadata_directory)
+ callback = partial(
+ buildapi.build_wheel, wheel_directory, config_settings, metadata_directory
+ )
return prepare_pkgcore(callback, consts=True)
def build_editable(wheel_directory, config_settings=None, metadata_directory=None):
"""Builds an "editable" wheel, places it in wheel_directory"""
- callback = partial(buildapi.build_editable, wheel_directory, config_settings, metadata_directory)
+ callback = partial(
+ buildapi.build_editable, wheel_directory, config_settings, metadata_directory
+ )
return prepare_pkgcore(callback, consts=False)
diff --git a/src/pkgcore/__init__.py b/src/pkgcore/__init__.py
index 2dfa59cd6..a9d006afd 100644
--- a/src/pkgcore/__init__.py
+++ b/src/pkgcore/__init__.py
@@ -1,2 +1,2 @@
-__title__ = 'pkgcore'
-__version__ = '0.12.18'
+__title__ = "pkgcore"
+__version__ = "0.12.18"
diff --git a/src/pkgcore/binpkg/remote.py b/src/pkgcore/binpkg/remote.py
index 0ef145d33..ee8989f67 100644
--- a/src/pkgcore/binpkg/remote.py
+++ b/src/pkgcore/binpkg/remote.py
@@ -25,7 +25,7 @@ def _iter_till_empty_newline(data):
for x in data:
if not x:
return
- k, v = x.split(':', 1)
+ k, v = x.split(":", 1)
yield k, v.strip()
@@ -35,6 +35,7 @@ class CacheEntry(StackedDict):
Note that this pop doesn't through KeyError if something is missing- just
returns None instead. This is likely to be changed.
"""
+
def pop(self, key, default=None):
try:
return self[key]
@@ -56,44 +57,60 @@ class PackagesCacheV0(cache.bulk):
doesn't bundle certain useful keys like RESTRICT
"""
- _header_mangling_map = ImmutableDict({
- 'FEATURES': 'UPSTREAM_FEATURES',
- 'ACCEPT_KEYWORDS': 'KEYWORDS',
- })
+ _header_mangling_map = ImmutableDict(
+ {
+ "FEATURES": "UPSTREAM_FEATURES",
+ "ACCEPT_KEYWORDS": "KEYWORDS",
+ }
+ )
# this maps from literal keys in the cache to .data[key] expected forms
_deserialize_map = {
- 'DESC': 'DESCRIPTION',
- 'MTIME': 'mtime',
- 'repo': 'REPO',
+ "DESC": "DESCRIPTION",
+ "MTIME": "mtime",
+ "repo": "REPO",
}
# this maps from .attr to data items.
_serialize_map = {
- 'DESCRIPTION': 'DESC',
- 'mtime': 'MTIME',
- 'source_repository': 'REPO',
+ "DESCRIPTION": "DESC",
+ "mtime": "MTIME",
+ "source_repository": "REPO",
}
- deserialized_inheritable = frozenset(('CBUILD', 'CHOST', 'source_repository'))
- _pkg_attr_sequences = ('use', 'keywords', 'iuse')
+ deserialized_inheritable = frozenset(("CBUILD", "CHOST", "source_repository"))
+ _pkg_attr_sequences = ("use", "keywords", "iuse")
_deserialized_defaults = dict.fromkeys(
(
- 'BDEPEND', 'DEPEND', 'RDEPEND', 'PDEPEND', 'IDEPEND',
- 'BUILD_TIME', 'IUSE', 'KEYWORDS', 'LICENSE', 'PATH', 'PROPERTIES',
- 'USE', 'DEFINED_PHASES', 'CHOST', 'CBUILD', 'DESC', 'REPO',
- 'DESCRIPTION',
+ "BDEPEND",
+ "DEPEND",
+ "RDEPEND",
+ "PDEPEND",
+ "IDEPEND",
+ "BUILD_TIME",
+ "IUSE",
+ "KEYWORDS",
+ "LICENSE",
+ "PATH",
+ "PROPERTIES",
+ "USE",
+ "DEFINED_PHASES",
+ "CHOST",
+ "CBUILD",
+ "DESC",
+ "REPO",
+ "DESCRIPTION",
),
- ''
+ "",
)
- _deserialized_defaults.update({'EAPI': '0', 'SLOT': '0'})
+ _deserialized_defaults.update({"EAPI": "0", "SLOT": "0"})
_deserialized_defaults = ImmutableDict(_deserialized_defaults)
- _stored_chfs = ('size', 'sha1', 'md5', 'mtime')
+ _stored_chfs = ("size", "sha1", "md5", "mtime")
version = 0
def __init__(self, location, *args, **kwds):
self._location = location
- vkeys = {'CPV'}
+ vkeys = {"CPV"}
vkeys.update(self._deserialized_defaults)
vkeys.update(x.upper() for x in self._stored_chfs)
kwds["auxdbkeys"] = vkeys
@@ -105,7 +122,8 @@ class PackagesCacheV0(cache.bulk):
def read_preamble(self, handle):
return ImmutableDict(
(self._header_mangling_map.get(k, k), v)
- for k, v in _iter_till_empty_newline(handle))
+ for k, v in _iter_till_empty_newline(handle)
+ )
def _read_data(self):
try:
@@ -115,8 +133,11 @@ class PackagesCacheV0(cache.bulk):
self.preamble = self.read_preamble(handle)
defaults = dict(self._deserialized_defaults.items())
- defaults.update((k, v) for k, v in self.preamble.items()
- if k in self.deserialized_inheritable)
+ defaults.update(
+ (k, v)
+ for k, v in self.preamble.items()
+ if k in self.deserialized_inheritable
+ )
defaults = ImmutableDict(defaults)
pkgs = {}
@@ -133,27 +154,28 @@ class PackagesCacheV0(cache.bulk):
if cpv is None:
cpv = f"{d.pop('CATEGORY')}/{d.pop('PF')}"
- if 'USE' in d:
- d.setdefault('IUSE', d.get('USE', ''))
+ if "USE" in d:
+ d.setdefault("IUSE", d.get("USE", ""))
for src, dst in self._deserialize_map.items():
if src in d:
d.setdefault(dst, d.pop(src))
pkgs[cpv] = CacheEntry(d, defaults)
- assert count == int(self.preamble.get('PACKAGES', count))
+ assert count == int(self.preamble.get("PACKAGES", count))
return pkgs
@classmethod
def _assemble_preamble_dict(cls, target_dicts):
preamble = {
- 'VERSION': cls.version,
- 'PACKAGES': len(target_dicts),
- 'TIMESTAMP': str(int(time())),
+ "VERSION": cls.version,
+ "PACKAGES": len(target_dicts),
+ "TIMESTAMP": str(int(time())),
}
for key in cls.deserialized_inheritable:
try:
preamble[key] = find_best_savings(
- (d[1].get(key, '') for d in target_dicts), key)
+ (d[1].get(key, "") for d in target_dicts), key
+ )
except ValueError:
# empty iterable handed to max
pass
@@ -167,15 +189,16 @@ class PackagesCacheV0(cache.bulk):
value = getattr(pkg, key)
if key in sequences:
- value = ' '.join(sorted(value))
+ value = " ".join(sorted(value))
else:
value = str(getattr(pkg, key)).strip()
key = key.upper()
d[cls._serialize_map.get(key, key)] = value
- for key, value in zip(cls._stored_chfs,
- get_chksums(pkg.path, *cls._stored_chfs)):
- if key != 'size':
+ for key, value in zip(
+ cls._stored_chfs, get_chksums(pkg.path, *cls._stored_chfs)
+ ):
+ if key != "size":
value = "%x" % (value,)
d[key.upper()] = value
d["MTIME"] = str(os.stat(pkg.path).st_mtime)
@@ -189,8 +212,7 @@ class PackagesCacheV0(cache.bulk):
self._serialize_to_handle(list(self.data.items()), handler)
handler.close()
except PermissionError as e:
- logger.error(
- f'failed writing binpkg cache to {self._location!r}: {e}')
+ logger.error(f"failed writing binpkg cache to {self._location!r}: {e}")
finally:
if handler is not None:
handler.discard()
@@ -202,17 +224,16 @@ class PackagesCacheV0(cache.bulk):
for key in sorted(preamble):
handler.write(f"{convert_key(key, key)}: {preamble[key]}\n")
- handler.write('\n')
+ handler.write("\n")
- spacer = ' '
+ spacer = " "
if self.version != 0:
- spacer = ''
+ spacer = ""
vkeys = self._known_keys
for cpv, pkg_data in sorted(data, key=itemgetter(0)):
handler.write(f"CPV:{spacer}{cpv}\n")
- data = [(convert_key(key, key), value)
- for key, value in pkg_data.items()]
+ data = [(convert_key(key, key), value) for key, value in pkg_data.items()]
for write_key, value in sorted(data):
if write_key not in vkeys:
continue
@@ -225,16 +246,16 @@ class PackagesCacheV0(cache.bulk):
handler.write(f"{write_key}:\n")
elif value:
handler.write(f"{write_key}:{spacer}{value}\n")
- handler.write('\n')
+ handler.write("\n")
def update_from_xpak(self, pkg, xpak):
# invert the lookups here; if you do .items() on an xpak,
# it'll load up the contents in full.
new_dict = {k: xpak[k] for k in self._known_keys if k in xpak}
- new_dict['_chf_'] = xpak._chf_
- chfs = [x for x in self._stored_chfs if x != 'mtime']
+ new_dict["_chf_"] = xpak._chf_
+ chfs = [x for x in self._stored_chfs if x != "mtime"]
for key, value in zip(chfs, get_chksums(pkg.path, *chfs)):
- if key != 'size':
+ if key != "size":
value = "%x" % (value,)
new_dict[key.upper()] = value
self[pkg.cpvstr] = new_dict
@@ -246,7 +267,7 @@ class PackagesCacheV0(cache.bulk):
if not targets:
# just open/trunc the target instead, and bail
- open(self._location, 'wb').close()
+ open(self._location, "wb").close()
return
@@ -258,10 +279,12 @@ class PackagesCacheV1(PackagesCacheV0):
"""
deserialized_inheritable = PackagesCacheV0.deserialized_inheritable.union(
- ('SLOT', 'EAPI', 'LICENSE', 'KEYWORDS', 'USE', 'RESTRICT'))
+ ("SLOT", "EAPI", "LICENSE", "KEYWORDS", "USE", "RESTRICT")
+ )
_deserialized_defaults = ImmutableDict(
- list(PackagesCacheV0._deserialized_defaults.items()) + [('RESTRICT', '')])
+ list(PackagesCacheV0._deserialized_defaults.items()) + [("RESTRICT", "")]
+ )
@classmethod
def _assemble_pkg_dict(cls, pkg):
@@ -269,9 +292,9 @@ class PackagesCacheV1(PackagesCacheV0):
d = PackagesCacheV0._assemble_pkg_dict(pkg)
use = set(pkg.use).intersection(pkg.iuse_stripped)
d.pop("IUSE", None)
- iuse_bits = [f'-{x}' for x in pkg.iuse_stripped if x not in use]
+ iuse_bits = [f"-{x}" for x in pkg.iuse_stripped if x not in use]
use.update(iuse_bits)
- d["USE"] = ' '.join(sorted(use))
+ d["USE"] = " ".join(sorted(use))
return d
version = 1
@@ -279,8 +302,8 @@ class PackagesCacheV1(PackagesCacheV0):
def get_cache_kls(version):
version = str(version)
- if version == '0':
+ if version == "0":
return PackagesCacheV0
- elif version in ('1', '-1'):
+ elif version in ("1", "-1"):
return PackagesCacheV1
raise KeyError(f"cache version {version} unsupported")
diff --git a/src/pkgcore/binpkg/repo_ops.py b/src/pkgcore/binpkg/repo_ops.py
index ac85c6856..f04e336a7 100644
--- a/src/pkgcore/binpkg/repo_ops.py
+++ b/src/pkgcore/binpkg/repo_ops.py
@@ -23,7 +23,7 @@ from ..operations import repo as repo_interfaces
from . import xpak
-def discern_loc(base, pkg, extension='.tbz2'):
+def discern_loc(base, pkg, extension=".tbz2"):
return pjoin(base, pkg.category, f"{pkg.package}-{pkg.fullver}{extension}")
@@ -40,13 +40,12 @@ def generate_attr_dict(pkg, portage_compatible=True):
if k == "contents":
continue
v = getattr(pkg, k)
- if k == 'environment':
- d['environment.bz2'] = compress_data(
- 'bzip2', v.bytes_fileobj().read())
+ if k == "environment":
+ d["environment.bz2"] = compress_data("bzip2", v.bytes_fileobj().read())
continue
elif not isinstance(v, str):
try:
- s = ' '.join(v)
+ s = " ".join(v)
except TypeError:
s = str(v)
else:
@@ -64,7 +63,6 @@ def generate_attr_dict(pkg, portage_compatible=True):
class install(repo_interfaces.install):
-
@steal_docs(repo_interfaces.install)
def add_data(self):
if self.observer is None:
@@ -76,18 +74,18 @@ class install(repo_interfaces.install):
final_path = discern_loc(self.repo.base, pkg, self.repo.extension)
tmp_path = pjoin(
os.path.dirname(final_path),
- ".tmp.%i.%s" % (os.getpid(), os.path.basename(final_path)))
+ ".tmp.%i.%s" % (os.getpid(), os.path.basename(final_path)),
+ )
self.tmp_path, self.final_path = tmp_path, final_path
if not ensure_dirs(os.path.dirname(tmp_path), mode=0o755):
raise repo_interfaces.Failure(
- f"failed creating directory: {os.path.dirname(tmp_path)!r}")
+ f"failed creating directory: {os.path.dirname(tmp_path)!r}"
+ )
try:
start(f"generating tarball: {tmp_path}")
- tar.write_set(
- pkg.contents, tmp_path, compressor='bzip2',
- parallelize=True)
+ tar.write_set(pkg.contents, tmp_path, compressor="bzip2", parallelize=True)
end("tarball created", True)
start("writing Xpak")
# ok... got a tarball. now add xpak.
@@ -109,7 +107,6 @@ class install(repo_interfaces.install):
class uninstall(repo_interfaces.uninstall):
-
@steal_docs(repo_interfaces.uninstall)
def remove_data(self):
return True
@@ -121,7 +118,6 @@ class uninstall(repo_interfaces.uninstall):
class replace(install, uninstall, repo_interfaces.replace):
-
@steal_docs(repo_interfaces.replace)
def finalize_data(self):
# we just invoke install finalize_data, since it atomically
@@ -131,7 +127,6 @@ class replace(install, uninstall, repo_interfaces.replace):
class operations(repo_interfaces.operations):
-
def _cmd_implementation_install(self, *args):
return install(self.repo, *args)
diff --git a/src/pkgcore/binpkg/repository.py b/src/pkgcore/binpkg/repository.py
index fba61df3a..9a6e7870a 100644
--- a/src/pkgcore/binpkg/repository.py
+++ b/src/pkgcore/binpkg/repository.py
@@ -29,10 +29,10 @@ from .xpak import Xpak
class force_unpacking(triggers.base):
- required_csets = ('new_cset',)
+ required_csets = ("new_cset",)
priority = 5
- _hooks = ('sanity_check',)
- _label = 'forced decompression'
+ _hooks = ("sanity_check",)
+ _label = "forced decompression"
_engine_type = triggers.INSTALLING_MODES
def __init__(self, format_op):
@@ -40,11 +40,11 @@ class force_unpacking(triggers.base):
def trigger(self, engine, cset):
op = self.format_op
- op = getattr(op, 'install_op', op)
+ op = getattr(op, "install_op", op)
op.setup_workdir()
merge_cset = cset
- if engine.offset != '/':
- merge_cset = cset.change_offset(engine.offset, '/')
+ if engine.offset != "/":
+ merge_cset = cset.change_offset(engine.offset, "/")
merge_contents(merge_cset, offset=op.env["D"])
# ok. they're on disk.
@@ -54,9 +54,10 @@ class force_unpacking(triggers.base):
# this rewrites the data_source to the ${D} loc.
d = op.env["D"]
- fi = (x.change_attributes(data=local_source(
- pjoin(d, x.location.lstrip('/'))))
- for x in merge_cset.iterfiles())
+ fi = (
+ x.change_attributes(data=local_source(pjoin(d, x.location.lstrip("/"))))
+ for x in merge_cset.iterfiles()
+ )
if engine.offset:
# we're using merge_cset above, which has the final offset loc
@@ -69,20 +70,20 @@ class force_unpacking(triggers.base):
# we *probably* should change the csets class at some point
# since it no longer needs to be tar, but that's for another day.
- engine.replace_cset('new_cset', cset)
+ engine.replace_cset("new_cset", cset)
class BinPkg(ebuild_built.generate_new_factory):
-
- def _add_format_triggers(self, pkg, op_inst, format_op_inst,
- engine_inst):
- if (engine.UNINSTALL_MODE != engine_inst.mode and
- pkg == engine_inst.new and pkg.repo is engine_inst.new.repo):
+ def _add_format_triggers(self, pkg, op_inst, format_op_inst, engine_inst):
+ if (
+ engine.UNINSTALL_MODE != engine_inst.mode
+ and pkg == engine_inst.new
+ and pkg.repo is engine_inst.new.repo
+ ):
t = force_unpacking(op_inst.format_op)
t.register(engine_inst)
- klass._add_format_triggers(
- self, pkg, op_inst, format_op_inst, engine_inst)
+ klass._add_format_triggers(self, pkg, op_inst, format_op_inst, engine_inst)
def scan_contents(self, location):
return scan(location, offset=location)
@@ -112,9 +113,9 @@ class StackedXpakDict(DictMixin):
def xpak(self):
return Xpak(self._parent._get_path(self._pkg))
- mtime = alias_attr('_chf_.mtime')
+ mtime = alias_attr("_chf_.mtime")
- @jit_attr_named('_chf_obj')
+ @jit_attr_named("_chf_obj")
def _chf_(self):
return chksum.LazilyHashedPath(self._parent._get_path(self._pkg))
@@ -132,10 +133,12 @@ class StackedXpakDict(DictMixin):
if data is None:
raise KeyError(
"environment.bz2 not found in xpak segment, "
- "malformed binpkg?")
+ "malformed binpkg?"
+ )
else:
data = data_source(
- compression.decompress_data('bzip2', data), mutable=True)
+ compression.decompress_data("bzip2", data), mutable=True
+ )
elif key == "ebuild":
data = self.xpak.get(f"{self._pkg.package}-{self._pkg.fullver}.ebuild", "")
data = data_source(data)
@@ -143,11 +146,11 @@ class StackedXpakDict(DictMixin):
try:
data = self.xpak[key]
except KeyError:
- if key == '_eclasses_':
+ if key == "_eclasses_":
# hack...
data = {}
else:
- data = ''
+ data = ""
return data
def __delitem__(self, key):
@@ -177,7 +180,7 @@ class StackedXpakDict(DictMixin):
translated_key = self._metadata_rewrites.get(key, key)
if translated_key in self._wipes:
return False
- elif key in ('ebuild', 'environment', 'contents'):
+ elif key in ("ebuild", "environment", "contents"):
return True
return translated_key in self.xpak
@@ -201,12 +204,11 @@ class tree(prototype.tree):
operations_kls = repo_ops.operations
cache_name = "Packages"
- pkgcore_config_type = ConfigHint({
- 'location': 'str',
- 'repo_id': 'str'},
- typename='repo')
+ pkgcore_config_type = ConfigHint(
+ {"location": "str", "repo_id": "str"}, typename="repo"
+ )
- def __init__(self, location, repo_id=None, cache_version='0'):
+ def __init__(self, location, repo_id=None, cache_version="0"):
"""
:param location: root of the tbz2 repository
:keyword repo_id: unique repository id to use; else defaults to
@@ -226,14 +228,16 @@ class tree(prototype.tree):
raise errors.InitializationError(f"base {self.base!r} doesn't exist")
raise errors.InitializationError(
"base directory %r with mode 0%03o isn't readable/executable"
- " by this user" %
- (self.base, os.stat(self.base).st_mode & 0o4777))
+ " by this user" % (self.base, os.stat(self.base).st_mode & 0o4777)
+ )
- self.cache = remote.get_cache_kls(cache_version)(pjoin(self.base, self.cache_name))
+ self.cache = remote.get_cache_kls(cache_version)(
+ pjoin(self.base, self.cache_name)
+ )
self.package_class = BinPkg(self)
def configure(self, *args):
- return(ConfiguredTree(self, *args))
+ return ConfiguredTree(self, *args)
def __str__(self):
return self.repo_id
@@ -243,9 +247,7 @@ class tree(prototype.tree):
if optional_category:
return {}
try:
- return tuple(
- x for x in listdir_dirs(self.base)
- if x.lower() != "all")
+ return tuple(x for x in listdir_dirs(self.base) if x.lower() != "all")
except EnvironmentError as e:
raise KeyError(f"failed fetching categories: {e}") from e
@@ -258,18 +260,21 @@ class tree(prototype.tree):
try:
for x in listdir_files(cpath):
# don't use lstat; symlinks may exist
- if (x.endswith(".lockfile") or
- not x[-lext:].lower() == self.extension or
- x.startswith(".tmp.")):
+ if (
+ x.endswith(".lockfile")
+ or not x[-lext:].lower() == self.extension
+ or x.startswith(".tmp.")
+ ):
continue
pv = x[:-lext]
- pkg = VersionedCPV(f'{category}/{pv}')
+ pkg = VersionedCPV(f"{category}/{pv}")
l.add(pkg.package)
d.setdefault((category, pkg.package), []).append(pkg.fullver)
except EnvironmentError as e:
raise KeyError(
- "failed fetching packages for category %s: %s" %
- (pjoin(self.base, category.lstrip(os.path.sep)), str(e))) from e
+ "failed fetching packages for category %s: %s"
+ % (pjoin(self.base, category.lstrip(os.path.sep)), str(e))
+ ) from e
self._versions_tmp_cache.update(d)
return tuple(l)
@@ -288,7 +293,7 @@ class tree(prototype.tree):
if force:
raise KeyError
cache_data = self.cache[pkg.cpvstr]
- if int(cache_data['mtime']) != int(xpak.mtime):
+ if int(cache_data["mtime"]) != int(xpak.mtime):
raise KeyError
except KeyError:
cache_data = self.cache.update_from_xpak(pkg, xpak)
@@ -326,8 +331,8 @@ class _WrappedBinpkg(pkg_base.wrapper):
def __str__(self):
return (
- f'ebuild binary pkg: {self.cpvstr}::{self.repo.repo_id}, '
- f'source repo {self.source_repository!r}'
+ f"ebuild binary pkg: {self.cpvstr}::{self.repo.repo_id}, "
+ f"source repo {self.source_repository!r}"
)
@@ -344,4 +349,5 @@ class ConfiguredTree(wrapper.tree):
def _generate_operations(self, domain, pkg, **kwargs):
return ebd.built_operations(
- domain, pkg._raw_pkg, initial_env=self.domain_settings, **kwargs)
+ domain, pkg._raw_pkg, initial_env=self.domain_settings, **kwargs
+ )
diff --git a/src/pkgcore/binpkg/xpak.py b/src/pkgcore/binpkg/xpak.py
index d7fd8d02f..13af26300 100644
--- a/src/pkgcore/binpkg/xpak.py
+++ b/src/pkgcore/binpkg/xpak.py
@@ -28,7 +28,6 @@ from ..exceptions import PkgcoreException
class MalformedXpak(PkgcoreException):
-
def __init__(self, msg):
super().__init__(f"xpak as malformed: {msg}")
self.msg = msg
@@ -37,12 +36,13 @@ class MalformedXpak(PkgcoreException):
class Xpak:
__slots__ = ("_source", "_source_is_path", "xpak_start", "_keys_dict")
- _reading_key_rewrites = {'repo': 'REPO'}
+ _reading_key_rewrites = {"repo": "REPO"}
trailer_pre_magic = "XPAKSTOP"
trailer_post_magic = "STOP"
- trailer = struct.Struct(">%isL%is" % (
- len(trailer_pre_magic), len(trailer_post_magic)))
+ trailer = struct.Struct(
+ ">%isL%is" % (len(trailer_pre_magic), len(trailer_post_magic))
+ )
header_pre_magic = "XPAKPACK"
header = struct.Struct(">%isLL" % (len(header_pre_magic),))
@@ -98,12 +98,12 @@ class Xpak:
cur_pos = 0
for key, val in data.items():
if isinstance(val, str):
- val = val.encode('utf8')
+ val = val.encode("utf8")
if isinstance(key, str):
key = key.encode()
- new_index.append(struct.pack(
- ">L%isLL" % len(key),
- len(key), key, cur_pos, len(val)))
+ new_index.append(
+ struct.pack(">L%isLL" % len(key), len(key), key, cur_pos, len(val))
+ )
new_data.append(val)
cur_pos += len(val)
@@ -113,22 +113,26 @@ class Xpak:
else:
handle = target_source.bytes_fileobj(writable=True)
- joiner = b''
+ joiner = b""
new_index = joiner.join(new_index)
new_data = joiner.join(new_data)
handle.seek(start, 0)
- cls.header.write(
- handle, cls.header_pre_magic, len(new_index), len(new_data))
+ cls.header.write(handle, cls.header_pre_magic, len(new_index), len(new_data))
- handle.write(struct.pack(
- ">%is%is" % (len(new_index), len(new_data)), new_index, new_data))
+ handle.write(
+ struct.pack(
+ ">%is%is" % (len(new_index), len(new_data)), new_index, new_data
+ )
+ )
# the +8 is for the longs for new_index/new_data
cls.trailer.write(
- handle, cls.trailer_pre_magic,
+ handle,
+ cls.trailer_pre_magic,
len(new_index) + len(new_data) + cls.trailer.size + 8,
- cls.trailer_post_magic)
+ cls.trailer_post_magic,
+ )
handle.truncate()
handle.close()
return Xpak(target_source)
@@ -143,22 +147,26 @@ class Xpak:
while index_len:
key_len = struct.unpack(">L", fd.read(4))[0]
key = fd.read(key_len)
- key = key.decode('ascii')
+ key = key.decode("ascii")
if len(key) != key_len:
raise MalformedXpak(
- "tried reading key %i of len %i, but hit EOF" % (
- len(keys_dict) + 1, key_len))
+ "tried reading key %i of len %i, but hit EOF"
+ % (len(keys_dict) + 1, key_len)
+ )
try:
offset, data_len = struct.unpack(">LL", fd.read(8))
except struct.error as e:
raise MalformedXpak(
- "key %i, tried reading data offset/len but hit EOF" % (
- len(keys_dict) + 1)) from e
+ "key %i, tried reading data offset/len but hit EOF"
+ % (len(keys_dict) + 1)
+ ) from e
key = key_rewrite(key, key)
keys_dict[key] = (
- data_start + offset, data_len,
- not key.startswith("environment"))
- index_len -= (key_len + 12) # 12 for key_len, offset, data_len longs
+ data_start + offset,
+ data_len,
+ not key.startswith("environment"),
+ )
+ index_len -= key_len + 12 # 12 for key_len, offset, data_len longs
return keys_dict
@@ -168,10 +176,12 @@ class Xpak:
pre, size, post = self.trailer.read(fd)
if pre != self.trailer_pre_magic or post != self.trailer_post_magic:
raise MalformedXpak(
- "not an xpak segment, trailer didn't match: %r" % fd)
+ "not an xpak segment, trailer didn't match: %r" % fd
+ )
except struct.error as e:
raise MalformedXpak(
- "not an xpak segment, failed parsing trailer: %r" % fd) from e
+ "not an xpak segment, failed parsing trailer: %r" % fd
+ ) from e
# this is a bit daft, but the format seems to intentionally
# have an off by 8 in the offset address. presumably cause the
@@ -182,11 +192,11 @@ class Xpak:
try:
pre, index_len, data_len = self.header.read(fd)
if pre != self.header_pre_magic:
- raise MalformedXpak(
- "not an xpak segment, header didn't match: %r" % fd)
+ raise MalformedXpak("not an xpak segment, header didn't match: %r" % fd)
except struct.error as e:
raise MalformedXpak(
- "not an xpak segment, failed parsing header: %r" % fd) from e
+ "not an xpak segment, failed parsing header: %r" % fd
+ ) from e
return self.xpak_start + self.header.size, index_len, data_len
@@ -200,9 +210,7 @@ class Xpak:
def items(self):
# note that it's an OrderedDict, so this works.
fd = self._fd
- return (
- (k, self._get_data(fd, *v))
- for k, v in self.keys_dict.items())
+ return ((k, self._get_data(fd, *v)) for k, v in self.keys_dict.items())
def __len__(self):
return len(self.keys_dict)
diff --git a/src/pkgcore/cache/__init__.py b/src/pkgcore/cache/__init__.py
index 36ecb184f..6e7ee4b5d 100644
--- a/src/pkgcore/cache/__init__.py
+++ b/src/pkgcore/cache/__init__.py
@@ -32,13 +32,13 @@ class base:
autocommits = False
cleanse_keys = False
default_sync_rate = 1
- chf_type = 'mtime'
- eclass_chf_types = ('mtime',)
- eclass_splitter = '\t'
+ chf_type = "mtime"
+ eclass_chf_types = ("mtime",)
+ eclass_splitter = "\t"
default_keys = metadata_keys
- frozen = klass.alias_attr('readonly')
+ frozen = klass.alias_attr("readonly")
def __init__(self, auxdbkeys=None, readonly=False):
"""
@@ -51,7 +51,7 @@ class base:
if auxdbkeys is None:
auxdbkeys = self.default_keys
self._known_keys = frozenset(auxdbkeys)
- self._chf_key = '_%s_' % self.chf_type
+ self._chf_key = "_%s_" % self.chf_type
self._chf_serializer = self._get_chf_serializer(self.chf_type)
self._chf_deserializer = self._get_chf_deserializer(self.chf_type)
self._known_keys |= frozenset([self._chf_key])
@@ -66,7 +66,7 @@ class base:
@staticmethod
def _mtime_serializer(data):
- return '%.0f' % math.floor(data.mtime)
+ return "%.0f" % math.floor(data.mtime)
@staticmethod
def _default_serializer(chf, data):
@@ -75,9 +75,9 @@ class base:
return get_handler(chf).long2str(getter(data))
def _get_chf_serializer(self, chf):
- if chf == 'eclassdir':
+ if chf == "eclassdir":
return self._eclassdir_serializer
- if chf == 'mtime':
+ if chf == "mtime":
return self._mtime_serializer
return partial(self._default_serializer, chf)
@@ -90,16 +90,15 @@ class base:
return int(data, 16)
def _get_chf_deserializer(self, chf):
- if chf == 'eclassdir':
+ if chf == "eclassdir":
return str
- elif chf == 'mtime':
+ elif chf == "mtime":
return self._mtime_deserializer
return self._default_deserializer
@klass.jit_attr
def eclass_chf_serializers(self):
- return tuple(self._get_chf_serializer(chf) for chf in
- self.eclass_chf_types)
+ return tuple(self._get_chf_serializer(chf) for chf in self.eclass_chf_types)
@klass.jit_attr
def eclass_chf_deserializers(self):
@@ -155,7 +154,7 @@ class base:
elif "_eclasses_" in values:
d["_eclasses_"] = self.deconstruct_eclasses(d["_eclasses_"])
- d[self._chf_key] = self._chf_serializer(d.pop('_chf_'))
+ d[self._chf_key] = self._chf_serializer(d.pop("_chf_"))
self._setitem(cpv, d)
self._sync_if_needed(True)
@@ -231,8 +230,7 @@ class base:
def reconstruct_eclasses(self, cpv, eclass_string):
"""Turn a string from :obj:`serialize_eclasses` into a dict."""
if not isinstance(eclass_string, str):
- raise TypeError("eclass_string must be basestring, got %r" %
- eclass_string)
+ raise TypeError("eclass_string must be basestring, got %r" % eclass_string)
eclass_data = eclass_string.strip().split(self.eclass_splitter)
if eclass_data == [""]:
# occasionally this occurs in the fs backends. they suck.
@@ -243,8 +241,9 @@ class base:
tuple_len = len(chf_funcs) + 1
if len(eclass_data) % tuple_len:
raise errors.CacheCorruption(
- cpv, f'_eclasses_ was of invalid len {len(eclass_data)}'
- f'(must be mod {tuple_len})'
+ cpv,
+ f"_eclasses_ was of invalid len {len(eclass_data)}"
+ f"(must be mod {tuple_len})",
)
i = iter(eclass_data)
@@ -256,28 +255,29 @@ class base:
# a dict; in effect, if 2 chfs, this results in a stream of-
# (eclass_name, ((chf1,chf1_val), (chf2, chf2_val))).
try:
- return [(eclass, tuple(self._deserialize_eclass_chfs(i)))
- for eclass in i]
+ return [(eclass, tuple(self._deserialize_eclass_chfs(i))) for eclass in i]
except ValueError as e:
raise errors.CacheCorruption(
- cpv, f'ValueError reading {eclass_string!r}') from e
+ cpv, f"ValueError reading {eclass_string!r}"
+ ) from e
def validate_entry(self, cache_item, ebuild_hash_item, eclass_db):
chf_hash = cache_item.get(self._chf_key)
- if (chf_hash is None or
- chf_hash != getattr(ebuild_hash_item, self.chf_type, None)):
+ if chf_hash is None or chf_hash != getattr(
+ ebuild_hash_item, self.chf_type, None
+ ):
return False
- eclass_data = cache_item.get('_eclasses_')
+ eclass_data = cache_item.get("_eclasses_")
if eclass_data is None:
return True
# if the INHERIT key is missing yet we did inherit some eclasses,
# trigger a refresh to upgrade metadata cache
- if cache_item.get('INHERIT') is None:
+ if cache_item.get("INHERIT") is None:
return False
update = eclass_db.rebuild_cache_entry(eclass_data)
if update is None:
return False
- cache_item['_eclasses_'] = update
+ cache_item["_eclasses_"] = update
return True
@@ -294,10 +294,10 @@ class bulk(base):
return self._read_data()
def _read_data(self):
- raise NotImplementedError(self, '_read_data')
+ raise NotImplementedError(self, "_read_data")
def _write_data(self):
- raise NotImplementedError(self, '_write_data')
+ raise NotImplementedError(self, "_write_data")
def __contains__(self, key):
return key in self.data
@@ -307,7 +307,7 @@ class bulk(base):
def _setitem(self, key, val):
known = self._known_keys
- val = self._cdict_kls((k, v) for k,v in val.items() if k in known)
+ val = self._cdict_kls((k, v) for k, v in val.items() if k in known)
self._pending_updates.append((key, val))
self.data[key] = val
diff --git a/src/pkgcore/cache/errors.py b/src/pkgcore/cache/errors.py
index 0c8f964dd..e0c7d171c 100644
--- a/src/pkgcore/cache/errors.py
+++ b/src/pkgcore/cache/errors.py
@@ -3,8 +3,11 @@ cache subsystem exceptions
"""
__all__ = (
- "CacheError", "InitializationError", "CacheCorruption",
- "GeneralCacheCorruption", "ReadOnly",
+ "CacheError",
+ "InitializationError",
+ "CacheCorruption",
+ "GeneralCacheCorruption",
+ "ReadOnly",
)
from ..exceptions import PkgcoreException
@@ -16,24 +19,23 @@ class CacheError(PkgcoreException):
class InitializationError(CacheError):
def __init__(self, class_name, error):
- super().__init__(
- f'creation of instance {class_name} failed due to {error}')
+ super().__init__(f"creation of instance {class_name} failed due to {error}")
self.error, self.class_name = error, class_name
class CacheCorruption(CacheError):
def __init__(self, key, ex):
- super().__init__(f'{key} is corrupt: {ex}')
+ super().__init__(f"{key} is corrupt: {ex}")
self.key, self.ex = key, ex
class GeneralCacheCorruption(CacheError):
def __init__(self, ex):
- super().__init__(f'corruption detected: {ex}')
+ super().__init__(f"corruption detected: {ex}")
self.ex = ex
class ReadOnly(CacheError):
- def __init__(self, info=''):
- super().__init__(f'cache is non-modifiable {info}')
+ def __init__(self, info=""):
+ super().__init__(f"cache is non-modifiable {info}")
self.info = info
diff --git a/src/pkgcore/cache/flat_hash.py b/src/pkgcore/cache/flat_hash.py
index 808e71deb..78c49c112 100644
--- a/src/pkgcore/cache/flat_hash.py
+++ b/src/pkgcore/cache/flat_hash.py
@@ -19,15 +19,15 @@ class database(fs_template.FsBased):
# TODO: different way of passing in default auxdbkeys and location
pkgcore_config_type = ConfigHint(
- {'readonly': 'bool', 'location': 'str', 'label': 'str',
- 'auxdbkeys': 'list'},
- required=['location'],
- positional=['location'],
- typename='cache')
+ {"readonly": "bool", "location": "str", "label": "str", "auxdbkeys": "list"},
+ required=["location"],
+ positional=["location"],
+ typename="cache",
+ )
autocommits = True
mtime_in_entry = True
- eclass_chf_types = ('eclassdir', 'mtime')
+ eclass_chf_types = ("eclassdir", "mtime")
def _getitem(self, cpv):
path = pjoin(self.location, cpv)
@@ -59,14 +59,15 @@ class database(fs_template.FsBased):
def _setitem(self, cpv, values):
# might seem weird, but we rely on the trailing +1; this
# makes it behave properly for any cache depth (including no depth)
- s = cpv.rfind('/') + 1
- fp = pjoin(self.location, cpv[:s], f'.update.{os.getpid()}.{cpv[s:]}')
+ s = cpv.rfind("/") + 1
+ fp = pjoin(self.location, cpv[:s], f".update.{os.getpid()}.{cpv[s:]}")
try:
myf = open(fp, "w", 32768)
except FileNotFoundError:
if not self._ensure_dirs(cpv):
raise errors.CacheCorruption(
- cpv, f'error creating directory for {fp!r}')
+ cpv, f"error creating directory for {fp!r}"
+ )
try:
myf = open(fp, "w", 32768)
except EnvironmentError as e:
@@ -76,9 +77,9 @@ class database(fs_template.FsBased):
if self._mtime_used:
if not self.mtime_in_entry:
- mtime = values['_mtime_']
+ mtime = values["_mtime_"]
for k, v in sorted(values.items()):
- myf.writelines(f'{k}={v}\n')
+ myf.writelines(f"{k}={v}\n")
myf.close()
if self._mtime_used and not self.mtime_in_entry:
@@ -132,15 +133,15 @@ class database(fs_template.FsBased):
if stat.S_ISDIR(st.st_mode):
dirs.append(p)
continue
- yield p[len_base+1:]
+ yield p[len_base + 1 :]
class md5_cache(database):
- chf_type = 'md5'
- eclass_chf_types = ('md5',)
+ chf_type = "md5"
+ eclass_chf_types = ("md5",)
chf_base = 16
def __init__(self, location, **config):
- location = pjoin(location, 'metadata', 'md5-cache')
+ location = pjoin(location, "metadata", "md5-cache")
super().__init__(location, **config)
diff --git a/src/pkgcore/cache/fs_template.py b/src/pkgcore/cache/fs_template.py
index f129f43d1..1652b6db3 100644
--- a/src/pkgcore/cache/fs_template.py
+++ b/src/pkgcore/cache/fs_template.py
@@ -29,10 +29,10 @@ class FsBased(base):
for x, y in (("gid", portage_gid), ("perms", 0o664)):
if x in config:
- setattr(self, f'_{x}', config[x])
+ setattr(self, f"_{x}", config[x])
del config[x]
else:
- setattr(self, f'_{x}', y)
+ setattr(self, f"_{x}", y)
super().__init__(**config)
if label is not None:
@@ -40,13 +40,17 @@ class FsBased(base):
self.location = location
- self._mtime_used = 'mtime' == self.chf_type
+ self._mtime_used = "mtime" == self.chf_type
__init__.__doc__ = "\n".join(
- x.lstrip() for x in __init__.__doc__.split("\n") + [
+ x.lstrip()
+ for x in __init__.__doc__.split("\n")
+ + [
y.lstrip().replace("@param", "@keyword")
for y in base.__init__.__doc__.split("\n")
- if "@param" in y])
+ if "@param" in y
+ ]
+ )
def _ensure_access(self, path, mtime=None):
"""Ensure access to a path.
diff --git a/src/pkgcore/config/__init__.py b/src/pkgcore/config/__init__.py
index af4c3761f..50ac571b9 100644
--- a/src/pkgcore/config/__init__.py
+++ b/src/pkgcore/config/__init__.py
@@ -1,6 +1,6 @@
"""configuration subsystem"""
-__all__ = ('load_config',)
+__all__ = ("load_config",)
# keep these imports as minimal as possible; access to
# pkgcore.config isn't uncommon, thus don't trigger till
@@ -12,11 +12,16 @@ from .. import const
from . import central, cparser
-def load_config(user_conf_file=const.USER_CONF_FILE,
- system_conf_file=const.SYSTEM_CONF_FILE,
- debug=False, prepend_sources=(),
- skip_config_files=False, profile_override=None,
- location=None, **kwargs):
+def load_config(
+ user_conf_file=const.USER_CONF_FILE,
+ system_conf_file=const.SYSTEM_CONF_FILE,
+ debug=False,
+ prepend_sources=(),
+ skip_config_files=False,
+ profile_override=None,
+ location=None,
+ **kwargs
+):
"""The main entry point for any code looking to use pkgcore.
Args:
@@ -41,6 +46,10 @@ def load_config(user_conf_file=const.USER_CONF_FILE,
else:
# delay importing to avoid circular imports
from pkgcore.ebuild.portage_conf import PortageConfig
- configs.append(PortageConfig(
- location=location, profile_override=profile_override, **kwargs))
+
+ configs.append(
+ PortageConfig(
+ location=location, profile_override=profile_override, **kwargs
+ )
+ )
return central.CompatConfigManager(central.ConfigManager(configs, debug=debug))
diff --git a/src/pkgcore/config/basics.py b/src/pkgcore/config/basics.py
index 06cd0b979..2bc3e1c9b 100644
--- a/src/pkgcore/config/basics.py
+++ b/src/pkgcore/config/basics.py
@@ -5,10 +5,21 @@ all callables can/may throw a :class:`pkgcore.config.errors.ConfigurationError`
"""
__all__ = (
- "ConfigType", "LazySectionRef", "LazyNamedSectionRef", "ConfigSection",
- "DictConfigSection", "FakeIncrementalDictConfigSection", "convert_string",
- "convert_asis", "convert_hybrid", "section_alias", "str_to_list",
- "str_to_str", "str_to_bool", "str_to_int", "parse_config_file",
+ "ConfigType",
+ "LazySectionRef",
+ "LazyNamedSectionRef",
+ "ConfigSection",
+ "DictConfigSection",
+ "FakeIncrementalDictConfigSection",
+ "convert_string",
+ "convert_asis",
+ "convert_hybrid",
+ "section_alias",
+ "str_to_list",
+ "str_to_str",
+ "str_to_bool",
+ "str_to_int",
+ "parse_config_file",
)
from functools import partial
@@ -57,8 +68,8 @@ class ConfigType:
original_func_obj = func_obj
self.name = func_obj.__name__
self.callable = func_obj
- self.doc = getattr(func_obj, '__doc__', None)
- if not hasattr(func_obj, '__code__'):
+ self.doc = getattr(func_obj, "__doc__", None)
+ if not hasattr(func_obj, "__code__"):
# No function or method, should be a class so grab __init__.
func_obj = func_obj.__init__
# We do not use the inspect module because that is a heavy
@@ -72,20 +83,21 @@ class ConfigType:
hint_overrides = getattr(self.callable, "pkgcore_config_type", None)
# if it's not authorative, do introspection; the getattr is to protect
# against the case where there is no Hint
- if not getattr(hint_overrides, 'authorative', None):
+ if not getattr(hint_overrides, "authorative", None):
try:
- code = getattr(func_obj, '__code__')
+ code = getattr(func_obj, "__code__")
except AttributeError:
if func_obj != object.__init__:
raise TypeError(
"func %s has no %r attribute; likely a "
"builtin object which can't be introspected without hints"
- % (original_func_obj, '__code__'))
+ % (original_func_obj, "__code__")
+ )
else:
- if code.co_argcount and code.co_varnames[0] == 'self':
- args = code.co_varnames[1:code.co_argcount]
+ if code.co_argcount and code.co_varnames[0] == "self":
+ args = code.co_varnames[1 : code.co_argcount]
else:
- args = code.co_varnames[:code.co_argcount]
+ args = code.co_varnames[: code.co_argcount]
varargs = bool(code.co_flags & CO_VARARGS)
varkw = bool(code.co_flags & CO_VARKEYWORDS)
defaults = func_obj.__defaults__
@@ -95,18 +107,19 @@ class ConfigType:
for i, default in enumerate(reversed(defaults)):
argname = args[-1 - i]
for typeobj, typename in [
- (bool, 'bool'),
- (tuple, 'list'),
- (str, 'str'),
- (int, 'int')]:
+ (bool, "bool"),
+ (tuple, "list"),
+ (str, "str"),
+ (int, "int"),
+ ]:
if isinstance(default, typeobj):
self.types[argname] = typename
break
# just [:-len(defaults)] doesn't work if there are no defaults
- self.positional = args[:len(args)-len(defaults)]
+ self.positional = args[: len(args) - len(defaults)]
# no defaults to determine the type from -> default to str.
for arg in self.positional:
- self.types[arg] = 'str'
+ self.types[arg] = "str"
self.required = tuple(self.positional)
self.allow_unknowns = False
self.requires_config = False
@@ -128,22 +141,26 @@ class ConfigType:
self.raw_class = hint_overrides.raw_class
if self.requires_config:
if self.requires_config in self.required:
- self.required = tuple(x for x in self.required if x != self.requires_config)
+ self.required = tuple(
+ x for x in self.required if x != self.requires_config
+ )
elif varargs or varkw:
raise TypeError(
- f'func {self.callable} accepts *args or **kwargs, '
- 'and no ConfigHint is provided'
+ f"func {self.callable} accepts *args or **kwargs, "
+ "and no ConfigHint is provided"
)
- for var in ('class', 'inherit', 'default'):
+ for var in ("class", "inherit", "default"):
if var in self.types:
raise errors.TypeDefinitionError(
- f'{self.callable}: you cannot change the type of {var!r}')
+ f"{self.callable}: you cannot change the type of {var!r}"
+ )
for var in self.positional:
if var not in self.required and var != self.requires_config:
raise errors.TypeDefinitionError(
- f'{self.callable}: {var!r} is in positionals but not in required')
+ f"{self.callable}: {var!r} is in positionals but not in required"
+ )
class LazySectionRef:
@@ -151,7 +168,7 @@ class LazySectionRef:
def __init__(self, central, typename):
self.central = central
- self.typename = typename.split(':', 1)[1]
+ self.typename = typename.split(":", 1)[1]
self.cached_config = None
def _collapse(self):
@@ -164,8 +181,8 @@ class LazySectionRef:
config = self.cached_config = self._collapse()
if self.typename is not None and config.type.name != self.typename:
raise errors.ConfigurationError(
- f'reference {self.name!r} should be of type '
- f'{self.typename!r}, got {config.type.name!r}'
+ f"reference {self.name!r} should be of type "
+ f"{self.typename!r}, got {config.type.name!r}"
)
return self.cached_config
@@ -175,7 +192,6 @@ class LazySectionRef:
class LazyNamedSectionRef(LazySectionRef):
-
def __init__(self, central, typename, name):
super().__init__(central, typename)
self.name = name
@@ -185,7 +201,6 @@ class LazyNamedSectionRef(LazySectionRef):
class LazyUnnamedSectionRef(LazySectionRef):
-
def __init__(self, central, typename, section):
super().__init__(central, typename)
self.section = section
@@ -211,7 +226,7 @@ class ConfigSection:
def render_value(self, central, name, arg_type):
"""Return a setting, converted to the requested type."""
- raise NotImplementedError(self, 'render_value')
+ raise NotImplementedError(self, "render_value")
class DictConfigSection(ConfigSection):
@@ -242,7 +257,8 @@ class DictConfigSection(ConfigSection):
raise
except Exception as e:
raise errors.ConfigurationError(
- f'Failed converting argument {name!r} to {arg_type}') from e
+ f"Failed converting argument {name!r} to {arg_type}"
+ ) from e
class FakeIncrementalDictConfigSection(ConfigSection):
@@ -267,26 +283,29 @@ class FakeIncrementalDictConfigSection(ConfigSection):
self.dict = source_dict
def __contains__(self, name):
- return name in self.dict or name + '.append' in self.dict or \
- name + '.prepend' in self.dict
+ return (
+ name in self.dict
+ or name + ".append" in self.dict
+ or name + ".prepend" in self.dict
+ )
def keys(self):
keys = set()
for key in self.dict:
- if key.endswith('.append'):
+ if key.endswith(".append"):
key = key[:-7]
- elif key.endswith('.prepend'):
+ elif key.endswith(".prepend"):
key = key[:-8]
keys.add(key)
return list(keys)
def render_value(self, central, name, arg_type):
# Check if we need our special incremental magic.
- if arg_type in ('list', 'str', 'repr') or arg_type.startswith('refs:'):
+ if arg_type in ("list", "str", "repr") or arg_type.startswith("refs:"):
result = []
# Careful: None is a valid dict value, so use something else here.
missing = object()
- for subname in (name + '.prepend', name, name + '.append'):
+ for subname in (name + ".prepend", name, name + ".append"):
val = self.dict.get(subname, missing)
if val is missing:
val = None
@@ -297,11 +316,12 @@ class FakeIncrementalDictConfigSection(ConfigSection):
raise
except Exception as e:
raise errors.ConfigurationError(
- f'Failed converting argument {subname!r} to {arg_type}') from e
+ f"Failed converting argument {subname!r} to {arg_type}"
+ ) from e
result.append(val)
if result[0] is result[1] is result[2] is None:
raise KeyError(name)
- if arg_type != 'repr':
+ if arg_type != "repr":
# Done.
return result
# If "kind" is of some incremental-ish kind or we have
@@ -325,7 +345,7 @@ class FakeIncrementalDictConfigSection(ConfigSection):
# Simple case: no extra data, so no need for any
# conversions.
kind, val = result[1]
- if kind in ('list', 'str') or kind == 'refs':
+ if kind in ("list", "str") or kind == "refs":
# Caller expects a three-tuple.
return kind, (None, val, None)
else:
@@ -334,56 +354,56 @@ class FakeIncrementalDictConfigSection(ConfigSection):
# We have more than one return value. Figure out what
# target to convert to. Choices are list, str and refs.
kinds = set(v[0] for v in result if v is not None)
- if 'refs' in kinds or 'ref' in kinds:
+ if "refs" in kinds or "ref" in kinds:
# If we have any refs we have to convert to refs.
- target_kind = 'refs'
- elif kinds == set(['str']):
+ target_kind = "refs"
+ elif kinds == set(["str"]):
# If we have only str we can just use that.
- target_kind = 'str'
+ target_kind = "str"
else:
# Convert to list. May not make any sense, but is
# the best we can do.
- target_kind = 'list'
+ target_kind = "list"
converted = []
for val in result:
if val is None:
converted.append(None)
continue
kind, val = val
- if kind == 'ref':
- if target_kind != 'refs':
+ if kind == "ref":
+ if target_kind != "refs":
raise ValueError(
- 'Internal issue detected: kind(ref), '
- f'target_kind({target_kind!r}), name({name!r}), '
- f'val({val!r}), arg_type({arg_type!r})'
+ "Internal issue detected: kind(ref), "
+ f"target_kind({target_kind!r}), name({name!r}), "
+ f"val({val!r}), arg_type({arg_type!r})"
)
converted.append([val])
- elif kind == 'refs':
- if target_kind != 'refs':
+ elif kind == "refs":
+ if target_kind != "refs":
raise ValueError(
- 'Internal issue detected: kind(refs), '
- f'target_kind({target_kind!r}), name({name!r}), '
- f'val({val!r}), arg_type({arg_type!r})'
+ "Internal issue detected: kind(refs), "
+ f"target_kind({target_kind!r}), name({name!r}), "
+ f"val({val!r}), arg_type({arg_type!r})"
)
converted.append(val)
- elif kind == 'list':
- if target_kind == 'str':
+ elif kind == "list":
+ if target_kind == "str":
raise ValueError(
- 'Internal issue detected: kind(str), '
- f'target_kind({target_kind!r}), name({name!r}), '
- f'val({val!r}), arg_type({arg_type!r})'
+ "Internal issue detected: kind(str), "
+ f"target_kind({target_kind!r}), name({name!r}), "
+ f"val({val!r}), arg_type({arg_type!r})"
)
converted.append(val)
else:
# Everything else gets converted to a string first.
- if kind == 'callable':
- val = '%s.%s' % (val.__module__, val.__name__)
- elif kind in ('bool', 'int', 'str'):
+ if kind == "callable":
+ val = "%s.%s" % (val.__module__, val.__name__)
+ elif kind in ("bool", "int", "str"):
val = str(val)
else:
- raise errors.ConfigurationError(f'unsupported type {kind!r}')
+ raise errors.ConfigurationError(f"unsupported type {kind!r}")
# Then convert the str to list if needed.
- if target_kind == 'str':
+ if target_kind == "str":
converted.append(val)
else:
converted.append([val])
@@ -395,7 +415,8 @@ class FakeIncrementalDictConfigSection(ConfigSection):
raise
except Exception as e:
raise errors.ConfigurationError(
- f'Failed converting argument {name!r} to {arg_type}') from e
+ f"Failed converting argument {name!r} to {arg_type}"
+ ) from e
def str_to_list(string):
@@ -406,7 +427,7 @@ def str_to_list(string):
# check for stringness because we return something interesting if
# feeded a sequence of strings
if not isinstance(string, str):
- raise TypeError(f'expected a string, got {string!r}')
+ raise TypeError(f"expected a string, got {string!r}")
while i < e:
if not string[i].isspace():
if string[i] in ("'", '"'):
@@ -414,24 +435,23 @@ def str_to_list(string):
i += 1
res = []
while i < e and string[i] != string[q]:
- if string[i] == '\\':
+ if string[i] == "\\":
i += 1
res.append(string[i])
i += 1
if i >= e:
raise errors.QuoteInterpretationError(string)
- l.append(''.join(res))
+ l.append("".join(res))
else:
res = []
- while i < e and not (string[i].isspace() or
- string[i] in ("'", '"')):
- if string[i] == '\\':
+ while i < e and not (string[i].isspace() or string[i] in ("'", '"')):
+ if string[i] == "\\":
i += 1
res.append(string[i])
i += 1
if i < e and string[i] in ("'", '"'):
raise errors.QuoteInterpretationError(string)
- l.append(''.join(res))
+ l.append("".join(res))
i += 1
return l
@@ -439,9 +459,9 @@ def str_to_list(string):
def str_to_str(string):
"""Yank leading/trailing whitespace and quotation, along with newlines."""
s = string.strip()
- if len(s) > 1 and s[0] in '"\'' and s[0] == s[-1]:
+ if len(s) > 1 and s[0] in "\"'" and s[0] == s[-1]:
s = s[1:-1]
- return s.replace('\n', ' ').replace('\t', ' ')
+ return s.replace("\n", " ").replace("\t", " ")
def str_to_bool(string):
@@ -451,7 +471,7 @@ def str_to_bool(string):
return False
if s in ("yes", "true", "1"):
return True
- raise errors.ConfigurationError(f'{s!r} is not a boolean')
+ raise errors.ConfigurationError(f"{s!r} is not a boolean")
def str_to_int(string):
@@ -460,13 +480,14 @@ def str_to_int(string):
try:
return int(string)
except ValueError:
- raise errors.ConfigurationError(f'{string!r} is not an integer')
+ raise errors.ConfigurationError(f"{string!r} is not an integer")
+
_str_converters = {
- 'list': str_to_list,
- 'str': str_to_str,
- 'bool': str_to_bool,
- 'int': str_to_int
+ "list": str_to_list,
+ "str": str_to_str,
+ "bool": str_to_bool,
+ "int": str_to_int,
}
@@ -474,67 +495,67 @@ def convert_string(central, value, arg_type):
"""Conversion func for a string-based DictConfigSection."""
if not isinstance(value, str):
raise ValueError(
- 'convert_string invoked with non str instance: '
- f'val({value!r}), arg_type({arg_type!r})'
+ "convert_string invoked with non str instance: "
+ f"val({value!r}), arg_type({arg_type!r})"
)
- if arg_type == 'callable':
+ if arg_type == "callable":
try:
func = modules.load_attribute(value)
except modules.FailedImport as e:
- raise errors.ConfigurationError(f'cannot import {value!r}') from e
+ raise errors.ConfigurationError(f"cannot import {value!r}") from e
if not callable(func):
- raise errors.ConfigurationError(f'{value!r} is not callable')
+ raise errors.ConfigurationError(f"{value!r} is not callable")
return func
- elif arg_type.startswith('refs:'):
- return list(LazyNamedSectionRef(central, arg_type, ref)
- for ref in str_to_list(value))
- elif arg_type.startswith('ref:'):
+ elif arg_type.startswith("refs:"):
+ return list(
+ LazyNamedSectionRef(central, arg_type, ref) for ref in str_to_list(value)
+ )
+ elif arg_type.startswith("ref:"):
return LazyNamedSectionRef(central, arg_type, str_to_str(value))
- elif arg_type == 'repr':
- return 'str', value
+ elif arg_type == "repr":
+ return "str", value
func = _str_converters.get(arg_type)
if func is None:
- raise errors.ConfigurationError(f'unknown type {arg_type!r}')
+ raise errors.ConfigurationError(f"unknown type {arg_type!r}")
return func(value)
def convert_asis(central, value, arg_type):
- """"Conversion" func assuming the types are already correct."""
- if arg_type == 'callable':
+ """ "Conversion" func assuming the types are already correct."""
+ if arg_type == "callable":
if not callable(value):
- raise errors.ConfigurationError(f'{value!r} is not callable')
+ raise errors.ConfigurationError(f"{value!r} is not callable")
return value
- elif arg_type.startswith('ref:'):
+ elif arg_type.startswith("ref:"):
if not isinstance(value, ConfigSection):
- raise errors.ConfigurationError(f'{value!r} is not a config section')
+ raise errors.ConfigurationError(f"{value!r} is not a config section")
return LazyUnnamedSectionRef(central, arg_type, value)
- elif arg_type.startswith('refs:'):
+ elif arg_type.startswith("refs:"):
l = []
for section in value:
if not isinstance(section, ConfigSection):
- raise errors.ConfigurationError(f'{value!r} is not a config section')
+ raise errors.ConfigurationError(f"{value!r} is not a config section")
l.append(LazyUnnamedSectionRef(central, arg_type, section))
return l
- elif arg_type == 'repr':
+ elif arg_type == "repr":
if callable(value):
- return 'callable', value
+ return "callable", value
if isinstance(value, ConfigSection):
- return 'ref', value
+ return "ref", value
if isinstance(value, str):
- return 'str', value
+ return "str", value
if isinstance(value, bool):
- return 'bool', value
+ return "bool", value
if isinstance(value, (list, tuple)):
if not value or isinstance(value[0], str):
- return 'list', value
+ return "list", value
if isinstance(value[0], ConfigSection):
- return 'refs', value
- raise errors.ConfigurationError(f'unsupported type for {value!r}')
- elif not isinstance(value, {'list': (list, tuple),
- 'str': str,
- 'bool': bool}[arg_type]):
- raise errors.ConfigurationError(
- f'{value!r} does not have type {arg_type!r}')
+ return "refs", value
+ raise errors.ConfigurationError(f"unsupported type for {value!r}")
+ elif not isinstance(
+ value, {"list": (list, tuple), "str": str, "bool": bool}[arg_type]
+ ):
+ raise errors.ConfigurationError(f"{value!r} does not have type {arg_type!r}")
return value
@@ -547,18 +568,16 @@ def convert_hybrid(central, value, arg_type):
Be careful about handing in escaped strings: they are not
unescaped (for arg_type str).
"""
- if arg_type != 'str' and isinstance(value, str):
+ if arg_type != "str" and isinstance(value, str):
return convert_string(central, value, arg_type)
return convert_asis(central, value, arg_type)
+
# "Invalid name" (pylint thinks these are module-level constants)
# pylint: disable-msg=C0103
-HardCodedConfigSection = partial(
- FakeIncrementalDictConfigSection, convert_asis)
-ConfigSectionFromStringDict = partial(
- FakeIncrementalDictConfigSection, convert_string)
-AutoConfigSection = partial(
- FakeIncrementalDictConfigSection, convert_hybrid)
+HardCodedConfigSection = partial(FakeIncrementalDictConfigSection, convert_asis)
+ConfigSectionFromStringDict = partial(FakeIncrementalDictConfigSection, convert_string)
+AutoConfigSection = partial(FakeIncrementalDictConfigSection, convert_hybrid)
def section_alias(target, typename):
@@ -567,18 +586,20 @@ def section_alias(target, typename):
Because of central's caching our instantiated value will be
identical to our target's.
"""
- @configurable({'target': 'ref:' + typename}, typename=typename)
+
+ @configurable({"target": "ref:" + typename}, typename=typename)
def section_alias(target):
return target
- return AutoConfigSection({'class': section_alias, 'target': target})
+ return AutoConfigSection({"class": section_alias, "target": target})
-@configurable({'path': 'str', 'parser': 'callable'}, typename='configsection')
+
+@configurable({"path": "str", "parser": "callable"}, typename="configsection")
def parse_config_file(path, parser):
try:
- f = open(path, 'r')
+ f = open(path, "r")
except (IOError, OSError):
- raise errors.InstantiationError(f'failed opening {path!r}')
+ raise errors.InstantiationError(f"failed opening {path!r}")
try:
return parser(f)
finally:
@@ -590,11 +611,10 @@ class ConfigSource:
description = "No description available"
def sections(self):
- raise NotImplementedError(self, 'sections')
+ raise NotImplementedError(self, "sections")
class GeneratedConfigSource(ConfigSource):
-
def __init__(self, section_data, description):
self.description = description
self.section_data = section_data
diff --git a/src/pkgcore/config/central.py b/src/pkgcore/config/central.py
index a04509e12..b142963b4 100644
--- a/src/pkgcore/config/central.py
+++ b/src/pkgcore/config/central.py
@@ -3,7 +3,10 @@
A lot of extra documentation on this is in dev-notes/config.rst.
"""
-__all__ = ("CollapsedConfig", "ConfigManager",)
+__all__ = (
+ "CollapsedConfig",
+ "ConfigManager",
+)
import weakref
from collections import defaultdict, deque, namedtuple
@@ -14,7 +17,7 @@ from snakeoil.compatibility import IGNORED_EXCEPTIONS
from . import basics, errors
-_section_data = namedtuple('_section_data', ['name', 'section'])
+_section_data = namedtuple("_section_data", ["name", "section"])
class _ConfigMapping(mappings.DictMixin):
@@ -59,7 +62,6 @@ class _ConfigMapping(mappings.DictMixin):
class _ConfigStack(defaultdict):
-
def __init__(self):
super().__init__(list)
@@ -118,9 +120,10 @@ class CollapsedConfig:
if missing:
module = type_obj.callable.__module__
name = type_obj.callable.__name__
- missing_vars = ', '.join(map(repr, missing))
+ missing_vars = ", ".join(map(repr, missing))
raise errors.ConfigurationError(
- f'type {module}.{name} needs settings for {missing_vars}')
+ f"type {module}.{name} needs settings for {missing_vars}"
+ )
self.name = None
self.default = default
@@ -163,10 +166,10 @@ class CollapsedConfig:
continue
# central already checked the type, no need to repeat that here.
unlist_it = False
- if typename.startswith('ref:'):
+ if typename.startswith("ref:"):
val = [val]
unlist_it = True
- if typename.startswith('refs:') or unlist_it:
+ if typename.startswith("refs:") or unlist_it:
try:
final_val = []
for ref in val:
@@ -175,25 +178,25 @@ class CollapsedConfig:
raise
except Exception as e:
raise errors.ConfigurationError(
- f'Instantiating reference {name!r} pointing at {ref.name!r}') from e
+ f"Instantiating reference {name!r} pointing at {ref.name!r}"
+ ) from e
if unlist_it:
final_val = final_val[0]
config[name] = final_val
-
if self.type.requires_config:
if self.manager is None:
raise Exception(
- 'configuration internal error; '
- 'requires_config is enabled '
- 'but we have no config manager to return '
+ "configuration internal error; "
+ "requires_config is enabled "
+ "but we have no config manager to return "
)
manager = self.manager()
if manager is None:
raise Exception(
- 'Configuration internal error, potentially '
- 'client code error; manager requested, but the config '
- 'manager is no longer in memory'
+ "Configuration internal error, potentially "
+ "client code error; manager requested, but the config "
+ "manager is no longer in memory"
)
config[self.type.requires_config] = manager
@@ -216,28 +219,31 @@ class CollapsedConfig:
except Exception as e:
source = errors._identify_functor_source(self.type.callable)
raise errors.InstantiationError(
- self.name, f'exception caught from {source!r}') from e
+ self.name, f"exception caught from {source!r}"
+ ) from e
if self._instance is None:
raise errors.ComplexInstantiationError(
- 'No object returned', callable_obj=callable_obj, pargs=pargs,
- kwargs=configdict)
+ "No object returned",
+ callable_obj=callable_obj,
+ pargs=pargs,
+ kwargs=configdict,
+ )
return self._instance
def __getstate__(self):
d = self.__dict__.copy()
# pull actual value from weakref
- d['manager'] = d['manager']()
+ d["manager"] = d["manager"]()
return d
def __setstate__(self, state):
self.__dict__ = state.copy()
# reset weakref
- self.__dict__['manager'] = weakref.ref(self.__dict__['manager'])
+ self.__dict__["manager"] = weakref.ref(self.__dict__["manager"])
class _ConfigObjMap:
-
def __init__(self, manager):
self._manager = manager
@@ -260,13 +266,12 @@ class _ConfigObjMap:
class CompatConfigManager:
-
def __init__(self, manager):
self._manager = manager
def __getattr__(self, attr):
- if attr == '_manager':
- return object.__getattribute__(self, '_manager')
+ if attr == "_manager":
+ return object.__getattribute__(self, "_manager")
obj = getattr(self._manager, attr, klass.sentinel)
if obj is klass.sentinel:
obj = getattr(self._manager.objects, attr)
@@ -309,7 +314,7 @@ class ConfigManager:
self.objects = _ConfigObjMap(self)
def _compat_mangle_config(self, config):
- if hasattr(config, 'sections'):
+ if hasattr(config, "sections"):
return config
return basics.GeneratedConfigSource(config, "unknown")
@@ -354,10 +359,10 @@ class ConfigManager:
# If this matches something we previously instantiated
# we should probably blow up to prevent massive
# amounts of confusion (and recursive autoloads)
- sections = ', '.join(repr(x) for x in sorted(collision))
+ sections = ", ".join(repr(x) for x in sorted(collision))
raise errors.ConfigurationError(
- 'New config is trying to modify existing section(s) '
- f'{sections} that was already instantiated.'
+ "New config is trying to modify existing section(s) "
+ f"{sections} that was already instantiated."
)
self.configs.append(config_data)
@@ -366,7 +371,7 @@ class ConfigManager:
self.sections_lookup[name].appendleft(config_data[name])
# Do not even touch the ConfigSection if it's not an autoload.
- if not name.startswith('autoload'):
+ if not name.startswith("autoload"):
continue
try:
@@ -375,12 +380,13 @@ class ConfigManager:
raise
except Exception as e:
raise errors.ConfigurationError(
- f'Failed collapsing autoload section {name!r}') from e
+ f"Failed collapsing autoload section {name!r}"
+ ) from e
- if collapsed.type.name != 'configsection':
+ if collapsed.type.name != "configsection":
raise errors.ConfigurationError(
- f'Section {name!r} is marked as autoload but '
- f'type is {collapsed.type.name}, not configsection'
+ f"Section {name!r} is marked as autoload but "
+ f"type is {collapsed.type.name}, not configsection"
)
try:
instance = collapsed.instantiate()
@@ -388,7 +394,7 @@ class ConfigManager:
raise
except Exception as e:
raise errors.AutoloadInstantiationError(name) from e
- if collapsed.type.name == 'configsection':
+ if collapsed.type.name == "configsection":
self.add_config_source(instance)
def sections(self):
@@ -404,7 +410,7 @@ class ConfigManager:
unless raise_on_missing is False in which case None is returned.
"""
if name in self._refs:
- raise errors.ConfigurationError(f'Reference to {name!r} is recursive')
+ raise errors.ConfigurationError(f"Reference to {name!r} is recursive")
self._refs.add(name)
try:
result = self.rendered_sections.get(name)
@@ -414,7 +420,7 @@ class ConfigManager:
if section_stack is None:
if not raise_on_missing:
return None
- raise errors.ConfigurationError(f'no section called {name!r}')
+ raise errors.ConfigurationError(f"no section called {name!r}")
try:
result = self.collapse_section(section_stack, name)
result.name = name
@@ -422,7 +428,8 @@ class ConfigManager:
raise
except Exception as e:
raise errors.ConfigurationError(
- f'Collapsing section named {name!r}') from e
+ f"Collapsing section named {name!r}"
+ ) from e
self.rendered_sections[name] = result
return result
finally:
@@ -436,20 +443,23 @@ class ConfigManager:
inherit_names = set([name])
for current_section, section_stack in slist:
current_conf = section_stack[0]
- if 'inherit' not in current_conf:
+ if "inherit" not in current_conf:
continue
prepend, inherits, append = current_conf.render_value(
- self, 'inherit', 'list')
+ self, "inherit", "list"
+ )
if prepend is not None or append is not None:
raise errors.ConfigurationError(
- 'Prepending or appending to the inherit list makes no sense')
+ "Prepending or appending to the inherit list makes no sense"
+ )
for inherit in inherits:
if inherit == current_section:
# self-inherit. Mkae use of section_stack to handle this.
if len(section_stack) == 1:
# nothing else to self inherit.
raise errors.ConfigurationError(
- f'Self-inherit {inherit!r} cannot be found')
+ f"Self-inherit {inherit!r} cannot be found"
+ )
if isinstance(section_stack, deque):
slist.append((inherit, list(section_stack)[1:]))
else:
@@ -457,18 +467,20 @@ class ConfigManager:
else:
if inherit in inherit_names:
raise errors.ConfigurationError(
- f'Inherit {inherit!r} is recursive')
+ f"Inherit {inherit!r} is recursive"
+ )
inherit_names.add(inherit)
target = self.sections_lookup.get(inherit)
if target is None:
raise errors.ConfigurationError(
- f'Inherit target {inherit!r} cannot be found')
+ f"Inherit target {inherit!r} cannot be found"
+ )
slist.append((inherit, target))
return [_section_data(name, stack[0]) for (name, stack) in slist]
def _section_is_inherit_only(self, section):
- if 'inherit-only' in section:
- if section.render_value(self, 'inherit-only', 'bool'):
+ if "inherit-only" in section:
+ if section.render_value(self, "inherit-only", "bool"):
return True
return False
@@ -476,9 +488,8 @@ class ConfigManager:
"""Collapse a ConfigSection to a :obj:`CollapsedConfig`."""
if self._section_is_inherit_only(sections[0]):
- if sections[0].render_value(self, 'inherit-only', 'bool'):
- raise errors.CollapseInheritOnly(
- 'cannot collapse inherit-only section')
+ if sections[0].render_value(self, "inherit-only", "bool"):
+ raise errors.CollapseInheritOnly("cannot collapse inherit-only section")
relevant_sections = self._get_inherited_sections(_name, sections)
@@ -487,17 +498,22 @@ class ConfigManager:
for key in data.section.keys():
config_stack[key].append(data)
- kls = config_stack.render_val(self, 'class', 'callable')
+ kls = config_stack.render_val(self, "class", "callable")
if kls is None:
- raise errors.ConfigurationError('no class specified')
+ raise errors.ConfigurationError("no class specified")
type_obj = basics.ConfigType(kls)
- is_default = bool(config_stack.render_val(self, 'default', 'bool'))
+ is_default = bool(config_stack.render_val(self, "default", "bool"))
- for key in ('inherit', 'inherit-only', 'class', 'default'):
+ for key in ("inherit", "inherit-only", "class", "default"):
config_stack.pop(key, None)
- collapsed = CollapsedConfig(type_obj, self._render_config_stack(type_obj, config_stack),
- self, default=is_default, debug=self.debug)
+ collapsed = CollapsedConfig(
+ type_obj,
+ self._render_config_stack(type_obj, config_stack),
+ self,
+ default=is_default,
+ debug=self.debug,
+ )
return collapsed
@klass.jit_attr
@@ -509,8 +525,8 @@ class ConfigManager:
obj = self.collapse_named_section(name)
type_map[obj.type.name][name] = obj
return mappings.ImmutableDict(
- (k, mappings.ImmutableDict(v))
- for k,v in type_map.items())
+ (k, mappings.ImmutableDict(v)) for k, v in type_map.items()
+ )
def _render_config_stack(self, type_obj, config_stack):
conf = {}
@@ -518,19 +534,21 @@ class ConfigManager:
typename = type_obj.types.get(key)
if typename is None:
if not type_obj.allow_unknowns:
- raise errors.ConfigurationError(f'Type of {key!r} unknown')
- typename = 'str'
+ raise errors.ConfigurationError(f"Type of {key!r} unknown")
+ typename = "str"
- is_ref = typename.startswith('ref:')
- is_refs = typename.startswith('refs:')
+ is_ref = typename.startswith("ref:")
+ is_refs = typename.startswith("refs:")
- if typename.startswith('lazy_'):
+ if typename.startswith("lazy_"):
typename = typename[5:]
- if typename.startswith('refs:') or typename in ('list', 'str'):
- result = config_stack.render_prepends(self, key, typename, flatten=(typename != 'str'))
- if typename == 'str':
- result = ' '.join(result)
+ if typename.startswith("refs:") or typename in ("list", "str"):
+ result = config_stack.render_prepends(
+ self, key, typename, flatten=(typename != "str")
+ )
+ if typename == "str":
+ result = " ".join(result)
else:
result = config_stack.render_val(self, key, typename)
@@ -545,7 +563,8 @@ class ConfigManager:
raise
except Exception as e:
raise errors.ConfigurationError(
- f'Failed collapsing section key {key!r}') from e
+ f"Failed collapsing section key {key!r}"
+ ) from e
if is_ref:
result = result[0]
@@ -556,9 +575,10 @@ class ConfigManager:
if missing:
module = type_obj.callable.__module__
name = type_obj.callable.__name__
- missing_vars = ', '.join(map(repr, missing))
+ missing_vars = ", ".join(map(repr, missing))
raise errors.ConfigurationError(
- f'type {module}.{name} needs settings for {missing_vars}')
+ f"type {module}.{name} needs settings for {missing_vars}"
+ )
return mappings.ImmutableDict(conf)
@@ -573,16 +593,18 @@ class ConfigManager:
raise
except Exception as e:
raise errors.ConfigurationError(
- f'Collapsing defaults for {type_name!r}') from e
+ f"Collapsing defaults for {type_name!r}"
+ ) from e
defaults = [(name, section) for name, section in defaults if section.default]
if not defaults:
return None
if len(defaults) > 1:
- defaults = ', '.join(map(repr, sorted(x[0] for x in defaults)))
+ defaults = ", ".join(map(repr, sorted(x[0] for x in defaults)))
raise errors.ConfigurationError(
- f'type {type_name} incorrectly has multiple default sections: {defaults}')
+ f"type {type_name} incorrectly has multiple default sections: {defaults}"
+ )
try:
return defaults[0][1].instantiate()
@@ -590,5 +612,6 @@ class ConfigManager:
raise
except Exception as e:
raise errors.ConfigurationError(
- f'failed instantiating default {type_name} {defaults[0][0]!r}') from e
+ f"failed instantiating default {type_name} {defaults[0][0]!r}"
+ ) from e
return None
diff --git a/src/pkgcore/config/cparser.py b/src/pkgcore/config/cparser.py
index 3a3ad6fba..7c49cc141 100644
--- a/src/pkgcore/config/cparser.py
+++ b/src/pkgcore/config/cparser.py
@@ -27,7 +27,9 @@ def config_from_file(file_obj):
try:
cparser.read_file(file_obj)
except configparser.ParsingError as e:
- raise errors.ParsingError(f'while parsing {file_obj}', e) from e
+ raise errors.ParsingError(f"while parsing {file_obj}", e) from e
+
def get_section(section):
return basics.ConfigSectionFromStringDict(dict(cparser.items(section)))
+
return mappings.LazyValDict(cparser.sections, get_section)
diff --git a/src/pkgcore/config/domain.py b/src/pkgcore/config/domain.py
index e08303643..f1b37ccc6 100644
--- a/src/pkgcore/config/domain.py
+++ b/src/pkgcore/config/domain.py
@@ -15,7 +15,8 @@ class MissingFile(PkgcoreException):
def __init__(self, filename, setting):
super().__init__(
- f"setting {setting} points at {filename!r}, which doesn't exist.")
+ f"setting {setting} points at {filename!r}, which doesn't exist."
+ )
self.file, self.setting = filename, setting
@@ -23,7 +24,7 @@ class Failure(PkgcoreException):
"""Generic domain failure."""
def __init__(self, text):
- super().__init__(f'domain failure: {text}')
+ super().__init__(f"domain failure: {text}")
self.text = text
@@ -46,19 +47,23 @@ class domain:
def build_pkg(self, pkg, observer=None, failed=False, clean=True, **kwargs):
domain = self.get_package_domain(pkg)
return domain.pkg_operations(pkg, observer=observer).build(
- observer=observer, failed=failed, clean=clean, **kwargs)
+ observer=observer, failed=failed, clean=clean, **kwargs
+ )
def install_pkg(self, newpkg, observer):
domain = self.get_package_domain(newpkg)
return domain_ops.install(
- domain, domain.all_installed_repos, newpkg, observer, domain.root)
+ domain, domain.all_installed_repos, newpkg, observer, domain.root
+ )
def uninstall_pkg(self, pkg, observer):
domain = self.get_package_domain(pkg)
return domain_ops.uninstall(
- domain, domain.all_installed_repos, pkg, observer, domain.root)
+ domain, domain.all_installed_repos, pkg, observer, domain.root
+ )
def replace_pkg(self, oldpkg, newpkg, observer):
domain = self.get_package_domain(newpkg)
return domain_ops.replace(
- domain, domain.all_installed_repos, oldpkg, newpkg, observer, domain.root)
+ domain, domain.all_installed_repos, oldpkg, newpkg, observer, domain.root
+ )
diff --git a/src/pkgcore/config/errors.py b/src/pkgcore/config/errors.py
index 68a42bb81..4f75a1045 100644
--- a/src/pkgcore/config/errors.py
+++ b/src/pkgcore/config/errors.py
@@ -4,8 +4,11 @@
"""Exceptions raised by the config code."""
__all__ = (
- "TypeDefinitionError", "ConfigurationError", "ParsingError",
- "CollapseInheritOnly", "ComplexInstantiationError",
+ "TypeDefinitionError",
+ "ConfigurationError",
+ "ParsingError",
+ "CollapseInheritOnly",
+ "ComplexInstantiationError",
"QuoteInterpretationError",
)
@@ -13,10 +16,10 @@ from ..exceptions import PkgcoreException, PkgcoreUserException
def _identify_functor_source(functor):
- module = getattr(functor, '__module__', None)
+ module = getattr(functor, "__module__", None)
if module is None:
return functor.__name__
- return f'{module}.{functor.__name__}'
+ return f"{module}.{functor.__name__}"
class ConfigError(PkgcoreException):
@@ -47,7 +50,7 @@ class ConfigurationError(ConfigError):
self.stack = [message]
def __str__(self):
- return ':\n'.join(reversed(self.stack))
+ return ":\n".join(reversed(self.stack))
class ParsingError(ConfigurationError, PkgcoreUserException):
@@ -59,14 +62,14 @@ class ParsingError(ConfigurationError, PkgcoreUserException):
elif exception is not None:
super().__init__(str(exception))
else:
- raise ValueError('specify at least one of message and exception')
+ raise ValueError("specify at least one of message and exception")
self.message = message
self.exc = exception
def __str__(self):
- msg = f'parsing failed: {self.message}'
+ msg = f"parsing failed: {self.message}"
if self.exc is not None:
- msg += f'\n{self.exc}'
+ msg += f"\n{self.exc}"
return msg
@@ -88,9 +91,9 @@ class InstantiationError(ConfigurationError):
def __str__(self):
s = self.message
if s is None:
- s = ''
+ s = ""
else:
- s = ': %s' % (s,)
+ s = ": %s" % (s,)
return self._txt % (self.section_name, s)
@@ -115,14 +118,15 @@ class ComplexInstantiationError(ConfigurationError):
this, but that will lose the traceback.
"""
- def __init__(self, message=None, exception=None, callable_obj=None,
- pargs=None, kwargs=None):
+ def __init__(
+ self, message=None, exception=None, callable_obj=None, pargs=None, kwargs=None
+ ):
if message is not None:
super().__init__(message)
elif exception is not None:
super().__init__(str(exception))
else:
- raise ValueError('specify at least one of message and exception')
+ raise ValueError("specify at least one of message and exception")
self.message = message
self.callable = callable_obj
self.pargs = pargs
@@ -134,26 +138,26 @@ class ComplexInstantiationError(ConfigurationError):
# and str() this before central had a chance to fill it in)
if self.message is not None:
if self.callable is None:
- message = f'{self.message!r}, callable unset!'
+ message = f"{self.message!r}, callable unset!"
else:
message = (
- f'{self.message!r} instantiating '
- f'{self.callable.__module__}.{self.callable.__name__}'
+ f"{self.message!r} instantiating "
+ f"{self.callable.__module__}.{self.callable.__name__}"
)
# The weird repr(str(exc)) used here quotes the message nicely.
elif self.callable is not None:
message = (
- f'Caught exception {str(self.exc)!r} '
- f'instantiating {self.callable.__module__}.{self.callable.__name__}'
+ f"Caught exception {str(self.exc)!r} "
+ f"instantiating {self.callable.__module__}.{self.callable.__name__}"
)
else:
- message = f'Caught exception {str(self.exc)!r}, callable unset!'
- return ':\n'.join(reversed([message] + self.stack[1:]))
+ message = f"Caught exception {str(self.exc)!r}, callable unset!"
+ return ":\n".join(reversed([message] + self.stack[1:]))
class QuoteInterpretationError(ConfigurationError):
"""Quoting of a var was screwed up."""
def __init__(self, string):
- super().__init__(f'parsing of {string!r} failed')
+ super().__init__(f"parsing of {string!r} failed")
self.str = string
diff --git a/src/pkgcore/config/hint.py b/src/pkgcore/config/hint.py
index 4304b1bb0..73419865e 100644
--- a/src/pkgcore/config/hint.py
+++ b/src/pkgcore/config/hint.py
@@ -1,7 +1,7 @@
"""Config introspection support."""
-__all__ = ('ConfigHint', 'configurable')
+__all__ = ("ConfigHint", "configurable")
class ConfigHint:
@@ -9,12 +9,29 @@ class ConfigHint:
# be aware this is used in clone
__slots__ = (
- "types", "positional", "required", "typename", "allow_unknowns",
- "doc", "authorative", "requires_config", "raw_class")
-
- def __init__(self, types=None, positional=None, required=None, doc=None,
- typename=None, allow_unknowns=False, authorative=False,
- requires_config=False, raw_class=False):
+ "types",
+ "positional",
+ "required",
+ "typename",
+ "allow_unknowns",
+ "doc",
+ "authorative",
+ "requires_config",
+ "raw_class",
+ )
+
+ def __init__(
+ self,
+ types=None,
+ positional=None,
+ required=None,
+ doc=None,
+ typename=None,
+ allow_unknowns=False,
+ authorative=False,
+ requires_config=False,
+ raw_class=False,
+ ):
self.types = types or {}
self.positional = positional or []
self.required = required or []
@@ -37,7 +54,9 @@ class ConfigHint:
def configurable(*args, **kwargs):
"""Decorator version of ConfigHint."""
hint = ConfigHint(*args, **kwargs)
+
def decorator(original):
original.pkgcore_config_type = hint
return original
+
return decorator
diff --git a/src/pkgcore/const.py b/src/pkgcore/const.py
index 6f2315231..59b273112 100644
--- a/src/pkgcore/const.py
+++ b/src/pkgcore/const.py
@@ -38,20 +38,24 @@ def _GET_CONST(attr, default_value):
# determine XDG compatible paths
for xdg_var, var_name, fallback_dir in (
- ('XDG_CONFIG_HOME', 'USER_CONFIG_PATH', '~/.config'),
- ('XDG_CACHE_HOME', 'USER_CACHE_PATH', '~/.cache'),
- ('XDG_DATA_HOME', 'USER_DATA_PATH', '~/.local/share')):
- setattr(_module, var_name,
- os.environ.get(xdg_var, osp.join(osp.expanduser(fallback_dir), __title__)))
-
-USER_CONF_FILE = osp.join(getattr(_module, 'USER_CONFIG_PATH'), 'pkgcore.conf')
-SYSTEM_CONF_FILE = '/etc/pkgcore/pkgcore.conf'
-SYSTEM_CACHE_PATH = '/var/cache/pkgcore'
-
-REPO_PATH = _GET_CONST('REPO_PATH', _reporoot)
-DATA_PATH = _GET_CONST('DATA_PATH', '%(REPO_PATH)s/data/share/pkgcore')
-LIBDIR_PATH = _GET_CONST('LIBDIR_PATH', '%(REPO_PATH)s/data/lib/pkgcore')
-EBD_PATH = _GET_CONST('EBD_PATH', '%(LIBDIR_PATH)s/ebd')
-CONFIG_PATH = _GET_CONST('CONFIG_PATH', '%(DATA_PATH)s/config')
-PATH_FORCED_PREPEND = _GET_CONST('INJECTED_BIN_PATH', ('%(REPO_PATH)s/bin',))
-CP_BINARY = process.find_binary('cp', fallback='/bin/cp')
+ ("XDG_CONFIG_HOME", "USER_CONFIG_PATH", "~/.config"),
+ ("XDG_CACHE_HOME", "USER_CACHE_PATH", "~/.cache"),
+ ("XDG_DATA_HOME", "USER_DATA_PATH", "~/.local/share"),
+):
+ setattr(
+ _module,
+ var_name,
+ os.environ.get(xdg_var, osp.join(osp.expanduser(fallback_dir), __title__)),
+ )
+
+USER_CONF_FILE = osp.join(getattr(_module, "USER_CONFIG_PATH"), "pkgcore.conf")
+SYSTEM_CONF_FILE = "/etc/pkgcore/pkgcore.conf"
+SYSTEM_CACHE_PATH = "/var/cache/pkgcore"
+
+REPO_PATH = _GET_CONST("REPO_PATH", _reporoot)
+DATA_PATH = _GET_CONST("DATA_PATH", "%(REPO_PATH)s/data/share/pkgcore")
+LIBDIR_PATH = _GET_CONST("LIBDIR_PATH", "%(REPO_PATH)s/data/lib/pkgcore")
+EBD_PATH = _GET_CONST("EBD_PATH", "%(LIBDIR_PATH)s/ebd")
+CONFIG_PATH = _GET_CONST("CONFIG_PATH", "%(DATA_PATH)s/config")
+PATH_FORCED_PREPEND = _GET_CONST("INJECTED_BIN_PATH", ("%(REPO_PATH)s/bin",))
+CP_BINARY = process.find_binary("cp", fallback="/bin/cp")
diff --git a/src/pkgcore/ebuild/atom.py b/src/pkgcore/ebuild/atom.py
index 88de97e87..1be4405ac 100644
--- a/src/pkgcore/ebuild/atom.py
+++ b/src/pkgcore/ebuild/atom.py
@@ -32,9 +32,9 @@ valid_slot_chars.update(".+_-")
alphanum = frozenset(alphanum)
valid_repo_chars = frozenset(valid_repo_chars)
valid_slot_chars = frozenset(valid_slot_chars)
-valid_ops = frozenset(['<', '<=', '=', '~', '>=', '>'])
+valid_ops = frozenset(["<", "<=", "=", "~", ">=", ">"])
-demand_compile_regexp('valid_use_flag', r'^[A-Za-z0-9][A-Za-z0-9+_@-]*$')
+demand_compile_regexp("valid_use_flag", r"^[A-Za-z0-9][A-Za-z0-9+_@-]*$")
class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
@@ -45,8 +45,18 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
# note we don't need _hash
__slots__ = (
- "blocks", "blocks_strongly", "op", "cpvstr", "negate_vers", "use",
- "slot_operator", "slot", "subslot", "repo_id", "_hash", "_cpv",
+ "blocks",
+ "blocks_strongly",
+ "op",
+ "cpvstr",
+ "negate_vers",
+ "use",
+ "slot_operator",
+ "slot",
+ "subslot",
+ "repo_id",
+ "_hash",
+ "_cpv",
"_restrictions",
)
@@ -57,8 +67,16 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
_evaluate_collapse = True
__attr_comparison__ = (
- "cpvstr", "op", "blocks", "negate_vers",
- "use", "slot", "subslot", "slot_operator", "repo_id")
+ "cpvstr",
+ "op",
+ "blocks",
+ "negate_vers",
+ "use",
+ "slot",
+ "subslot",
+ "slot_operator",
+ "repo_id",
+ )
klass.inject_richcmp_methods_from_cmp(locals())
# hack; combine these 2 metaclasses at some point...
@@ -69,7 +87,7 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
# overrided in child class if it's supported
evaluate_depset = None
- def __init__(self, atom: str, negate_vers: bool=False, eapi: str='-1'):
+ def __init__(self, atom: str, negate_vers: bool = False, eapi: str = "-1"):
"""
:param atom: string, see gentoo ebuild atom syntax
:keyword negate_vers: boolean controlling whether the version should be
@@ -89,99 +107,117 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
# use dep
use_end = atom.find("]", use_start)
if use_start < slot_start:
- raise errors.MalformedAtom(orig_atom,
- "slot restriction must proceed use")
- elif use_end == -1:
raise errors.MalformedAtom(
- orig_atom, "use restriction isn't completed")
+ orig_atom, "slot restriction must proceed use"
+ )
+ elif use_end == -1:
+ raise errors.MalformedAtom(orig_atom, "use restriction isn't completed")
elif use_end != len(atom) - 1:
- raise errors.MalformedAtom(
- orig_atom, "trailing garbage after use dep")
- sf(self, "use", tuple(sorted(atom[use_start + 1:use_end].split(','))))
+ raise errors.MalformedAtom(orig_atom, "trailing garbage after use dep")
+ sf(self, "use", tuple(sorted(atom[use_start + 1 : use_end].split(","))))
for x in self.use:
# stripped purely for validation reasons
try:
if x[-1] in "=?":
override_kls = True
x = x[:-1]
- if x[0] == '!':
+ if x[0] == "!":
x = x[1:]
- if x[0] == '-':
+ if x[0] == "-":
raise errors.MalformedAtom(
- orig_atom, f"malformed use flag: {x}")
- elif x[0] == '-':
+ orig_atom, f"malformed use flag: {x}"
+ )
+ elif x[0] == "-":
x = x[1:]
- if x[-1] == ')' and eapi not in ('0', '1', '2', '3'):
+ if x[-1] == ")" and eapi not in ("0", "1", "2", "3"):
# use defaults.
if x[-3:] in ("(+)", "(-)"):
x = x[:-3]
if not x:
- raise errors.MalformedAtom(orig_atom, 'empty use dep detected')
+ raise errors.MalformedAtom(orig_atom, "empty use dep detected")
if not valid_use_flag.match(x):
- raise errors.MalformedAtom(orig_atom, f'invalid USE flag: {x!r}')
+ raise errors.MalformedAtom(
+ orig_atom, f"invalid USE flag: {x!r}"
+ )
except IndexError:
- raise errors.MalformedAtom(orig_atom, 'empty use dep detected')
+ raise errors.MalformedAtom(orig_atom, "empty use dep detected")
if override_kls:
- sf(self, '__class__', transitive_use_atom)
- atom = atom[0:use_start]+atom[use_end + 1:]
+ sf(self, "__class__", transitive_use_atom)
+ atom = atom[0:use_start] + atom[use_end + 1 :]
else:
sf(self, "use", None)
if slot_start != -1:
i2 = atom.find("::", slot_start)
if i2 != -1:
- repo_id = atom[i2 + 2:]
+ repo_id = atom[i2 + 2 :]
if not repo_id:
raise errors.MalformedAtom(orig_atom, "repo_id must not be empty")
- elif repo_id[0] in '-':
- raise errors.MalformedAtom(orig_atom,
- f"invalid first char of repo_id '{repo_id}' (must not begin with a hyphen)")
+ elif repo_id[0] in "-":
+ raise errors.MalformedAtom(
+ orig_atom,
+ f"invalid first char of repo_id '{repo_id}' (must not begin with a hyphen)",
+ )
elif not valid_repo_chars.issuperset(repo_id):
- raise errors.MalformedAtom(orig_atom,
- f"repo_id may contain only [a-Z0-9_-/], found {repo_id!r}")
+ raise errors.MalformedAtom(
+ orig_atom,
+ f"repo_id may contain only [a-Z0-9_-/], found {repo_id!r}",
+ )
atom = atom[:i2]
sf(self, "repo_id", repo_id)
else:
sf(self, "repo_id", None)
# slot dep.
- slot = atom[slot_start+1:]
+ slot = atom[slot_start + 1 :]
slot_operator = subslot = None
if not slot:
# if the slot char came in only due to repo_id, force slots to None
if i2 == -1:
- raise errors.MalformedAtom(orig_atom, "Empty slot targets aren't allowed")
+ raise errors.MalformedAtom(
+ orig_atom, "Empty slot targets aren't allowed"
+ )
slot = None
else:
slots = (slot,)
- if eapi not in ('0', '1', '2', '3', '4'):
+ if eapi not in ("0", "1", "2", "3", "4"):
if slot[0:1] in ("*", "="):
if len(slot) > 1:
- raise errors.MalformedAtom(orig_atom,
- "Slot operators '*' and '=' do not take slot targets")
+ raise errors.MalformedAtom(
+ orig_atom,
+ "Slot operators '*' and '=' do not take slot targets",
+ )
slot_operator = slot
slot, slots = None, ()
else:
- if slot.endswith('='):
- slot_operator = '='
+ if slot.endswith("="):
+ slot_operator = "="
slot = slot[:-1]
- slots = slot.split('/', 1)
- elif eapi == '0':
- raise errors.MalformedAtom(orig_atom,
- "slot dependencies aren't allowed in EAPI 0")
+ slots = slot.split("/", 1)
+ elif eapi == "0":
+ raise errors.MalformedAtom(
+ orig_atom, "slot dependencies aren't allowed in EAPI 0"
+ )
for chunk in slots:
if not chunk:
- raise errors.MalformedAtom(orig_atom,
- "Empty slot targets aren't allowed")
-
- if chunk[0] in '-.':
- raise errors.MalformedAtom(orig_atom,
- "Slot targets must not start with a hypen or dot: {chunk!r}")
+ raise errors.MalformedAtom(
+ orig_atom, "Empty slot targets aren't allowed"
+ )
+
+ if chunk[0] in "-.":
+ raise errors.MalformedAtom(
+ orig_atom,
+ "Slot targets must not start with a hypen or dot: {chunk!r}",
+ )
elif not valid_slot_chars.issuperset(chunk):
- invalid_chars = ', '.join(map(repr, sorted(set(chunk).difference(valid_slot_chars))))
- raise errors.MalformedAtom(orig_atom,
- f"Invalid character(s) in slot target: {invalid_chars}")
+ invalid_chars = ", ".join(
+ map(repr, sorted(set(chunk).difference(valid_slot_chars)))
+ )
+ raise errors.MalformedAtom(
+ orig_atom,
+ f"Invalid character(s) in slot target: {invalid_chars}",
+ )
if len(slots) == 2:
slot, subslot = slots
@@ -201,7 +237,7 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
atom = atom[1:]
# hackish/slow, but lstrip doesn't take a 'prune this many' arg
# open to alternatives
- if eapi not in ('0', '1') and atom.startswith("!"):
+ if eapi not in ("0", "1") and atom.startswith("!"):
atom = atom[1:]
sf(self, "blocks_strongly", True)
else:
@@ -209,40 +245,43 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
else:
sf(self, "blocks_strongly", False)
- if atom[0] in ('<', '>'):
- if atom[1] == '=':
- sf(self, 'op', atom[:2])
+ if atom[0] in ("<", ">"):
+ if atom[1] == "=":
+ sf(self, "op", atom[:2])
atom = atom[2:]
else:
- sf(self, 'op', atom[0])
+ sf(self, "op", atom[0])
atom = atom[1:]
- elif atom[0] == '=':
- if atom[-1] == '*':
- sf(self, 'op', '=*')
+ elif atom[0] == "=":
+ if atom[-1] == "*":
+ sf(self, "op", "=*")
atom = atom[1:-1]
else:
atom = atom[1:]
- sf(self, 'op', '=')
- elif atom[0] == '~':
- sf(self, 'op', '~')
+ sf(self, "op", "=")
+ elif atom[0] == "~":
+ sf(self, "op", "~")
atom = atom[1:]
else:
- sf(self, 'op', '')
- sf(self, 'cpvstr', atom)
+ sf(self, "op", "")
+ sf(self, "cpvstr", atom)
- if eapi == '0':
- for x in ('use', 'slot'):
+ if eapi == "0":
+ for x in ("use", "slot"):
if getattr(self, x) is not None:
- raise errors.MalformedAtom(orig_atom,
- f"{x} atoms aren't supported for EAPI 0")
- elif eapi == '1':
+ raise errors.MalformedAtom(
+ orig_atom, f"{x} atoms aren't supported for EAPI 0"
+ )
+ elif eapi == "1":
if self.use is not None:
- raise errors.MalformedAtom(orig_atom,
- "use atoms aren't supported for EAPI < 2")
- if eapi != '-1':
+ raise errors.MalformedAtom(
+ orig_atom, "use atoms aren't supported for EAPI < 2"
+ )
+ if eapi != "-1":
if self.repo_id is not None:
- raise errors.MalformedAtom(orig_atom,
- f"repo_id atoms aren't supported for EAPI {eapi}")
+ raise errors.MalformedAtom(
+ orig_atom, f"repo_id atoms aren't supported for EAPI {eapi}"
+ )
try:
sf(self, "_cpv", cpv.CPV(self.cpvstr, versioned=bool(self.op)))
except errors.InvalidCPV as e:
@@ -251,11 +290,12 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
if self.op:
if self.version is None:
raise errors.MalformedAtom(orig_atom, "operator requires a version")
- elif self.op == '~' and self.revision:
- raise errors.MalformedAtom(orig_atom,
- "~ revision operator cannot be combined with a revision")
+ elif self.op == "~" and self.revision:
+ raise errors.MalformedAtom(
+ orig_atom, "~ revision operator cannot be combined with a revision"
+ )
elif self.version is not None:
- raise errors.MalformedAtom(orig_atom, 'versioned atom requires an operator')
+ raise errors.MalformedAtom(orig_atom, "versioned atom requires an operator")
sf(self, "_hash", hash(orig_atom))
sf(self, "negate_vers", negate_vers)
@@ -269,26 +309,26 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
weak_blocker = klass.alias_attr("blocks_temp_ignorable")
def __repr__(self):
- if self.op == '=*':
+ if self.op == "=*":
atom = f"={self.cpvstr}*"
else:
atom = self.op + self.cpvstr
if self.blocks:
- atom = '!' + atom
+ atom = "!" + atom
if self.blocks:
if self.blocks_strongly:
- atom = '!!' + atom
+ atom = "!!" + atom
else:
- atom = '!' + atom
+ atom = "!" + atom
attrs = [atom]
if self.use:
- attrs.append(f'use={self.use!r}')
+ attrs.append(f"use={self.use!r}")
if self.slot is not None:
- attrs.append(f'slot={self.slot!r}')
+ attrs.append(f"slot={self.slot!r}")
if self.subslot is not None:
- attrs.append(f'subslot={self.subslot!r}')
+ attrs.append(f"subslot={self.subslot!r}")
if self.repo_id is not None:
- attrs.append(f'repo_id={self.repo_id!r}')
+ attrs.append(f"repo_id={self.repo_id!r}")
return f'<{self.__class__.__name__} {" ".join(attrs)} @#{id(self):x}>'
def __reduce__(self):
@@ -329,12 +369,18 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
r.insert(0, restricts.RepositoryDep(self.repo_id))
if self.fullver is not None:
- if self.op == '=*':
- r.append(packages.PackageRestriction(
- "fullver", values.StrGlobMatch(self.fullver)))
+ if self.op == "=*":
+ r.append(
+ packages.PackageRestriction(
+ "fullver", values.StrGlobMatch(self.fullver)
+ )
+ )
else:
- r.append(restricts.VersionMatch(
- self.op, self.version, self.revision, negate=self.negate_vers))
+ r.append(
+ restricts.VersionMatch(
+ self.op, self.version, self.revision, negate=self.negate_vers
+ )
+ )
if self.slot is not None:
r.append(restricts.SlotDep(self.slot))
@@ -347,15 +393,15 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
return tuple(r)
def __str__(self):
- if self.op == '=*':
+ if self.op == "=*":
s = f"={self.cpvstr}*"
else:
s = self.op + self.cpvstr
if self.blocks:
if self.blocks_strongly:
- s = '!!' + s
+ s = "!!" + s
else:
- s = '!' + s
+ s = "!" + s
if self.slot:
s += f":{self.slot}"
if self.subslot:
@@ -367,11 +413,11 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
if self.repo_id:
s += f"::{self.repo_id}"
if self.use:
- use = ','.join(self.use)
+ use = ",".join(self.use)
s += f"[{use}]"
return s
- __hash__ = klass.reflective_hash('_hash')
+ __hash__ = klass.reflective_hash("_hash")
def __iter__(self):
return iter(self.restrictions)
@@ -381,8 +427,7 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
def __cmp__(self, other):
if not isinstance(other, atom):
- raise TypeError(
- f"other isn't of {atom!r} type, is {other.__class__}")
+ raise TypeError(f"other isn't of {atom!r} type, is {other.__class__}")
c = cmp(self.category, other.category)
if c:
@@ -396,8 +441,7 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
if c:
return c
- c = cpv.ver_cmp(self.version, self.revision,
- other.version, other.revision)
+ c = cpv.ver_cmp(self.version, self.revision, other.version, other.revision)
if c:
return c
@@ -417,7 +461,8 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
return c
def f(v):
- return '' if v is None else v
+ return "" if v is None else v
+
c = cmp(f(self.slot), f(other.slot))
if c:
return c
@@ -435,18 +480,18 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
"""Return atom object stripped of USE dependencies."""
if not self.use:
return self
- if self.op == '=*':
- s = f'={self.cpvstr}*'
+ if self.op == "=*":
+ s = f"={self.cpvstr}*"
else:
s = self.op + self.cpvstr
if self.blocks:
- s = '!' + s
+ s = "!" + s
if not self.blocks_temp_ignorable:
- s = '!' + s
+ s = "!" + s
if self.slot:
- s += f':{self.slot}'
+ s += f":{self.slot}"
if self.subslot:
- s += f'/{self.subslot}'
+ s += f"/{self.subslot}"
return atom(s)
def intersects(self, other):
@@ -471,18 +516,23 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
# Slot dep only matters if we both have one. If we do they
# must be identical:
- if (self.slot is not None and other.slot is not None and
- self.slot != other.slot):
+ if self.slot is not None and other.slot is not None and self.slot != other.slot:
return False
# Subslot dep only matters if we both have one. If we do they
# must be identical:
- if (self.subslot is not None and other.subslot is not None and
- self.subslot != other.subslot):
+ if (
+ self.subslot is not None
+ and other.subslot is not None
+ and self.subslot != other.subslot
+ ):
return False
- if (self.repo_id is not None and other.repo_id is not None and
- self.repo_id != other.repo_id):
+ if (
+ self.repo_id is not None
+ and other.repo_id is not None
+ and self.repo_id != other.repo_id
+ ):
return False
# Use deps are similar: if one of us forces a flag on and the
@@ -495,7 +545,7 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
flags = set(self.use) ^ set(other.use)
for flag in flags:
# If this is unset and we also have the set version we fail:
- if flag[0] == '-' and flag[1:] in flags:
+ if flag[0] == "-" and flag[1:] in flags:
return False
# Remaining thing to check is version restrictions. Get the
@@ -507,8 +557,7 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
return True
# If we are both "unbounded" in the same direction we intersect:
- if (('<' in self.op and '<' in other.op) or
- ('>' in self.op and '>' in other.op)):
+ if ("<" in self.op and "<" in other.op) or (">" in self.op and ">" in other.op):
return True
# Trick used here: just use the atoms as sufficiently
@@ -516,74 +565,81 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
# needed is a version and revision attr).
# If one of us is an exact match we intersect if the other matches it:
- if self.op == '=':
- if other.op == '=*':
+ if self.op == "=":
+ if other.op == "=*":
return self.fullver.startswith(other.fullver)
return restricts.VersionMatch(
- other.op, other.version, other.revision).match(self)
- if other.op == '=':
- if self.op == '=*':
+ other.op, other.version, other.revision
+ ).match(self)
+ if other.op == "=":
+ if self.op == "=*":
return other.fullver.startswith(self.fullver)
- return restricts.VersionMatch(
- self.op, self.version, self.revision).match(other)
+ return restricts.VersionMatch(self.op, self.version, self.revision).match(
+ other
+ )
# If we are both ~ matches we match if we are identical:
- if self.op == other.op == '~':
- return (self.version == other.version and
- self.revision == other.revision)
+ if self.op == other.op == "~":
+ return self.version == other.version and self.revision == other.revision
# If we are both glob matches we match if one of us matches the other.
- if self.op == other.op == '=*':
- return (self.fullver.startswith(other.fullver) or
- other.fullver.startswith(self.fullver))
+ if self.op == other.op == "=*":
+ return self.fullver.startswith(other.fullver) or other.fullver.startswith(
+ self.fullver
+ )
# If one of us is a glob match and the other a ~ we match if the glob
# matches the ~ (ignoring a revision on the glob):
- if self.op == '=*' and other.op == '~':
+ if self.op == "=*" and other.op == "~":
return other.fullver.startswith(self.version)
- if other.op == '=*' and self.op == '~':
+ if other.op == "=*" and self.op == "~":
return self.fullver.startswith(other.version)
# If we get here at least one of us is a <, <=, > or >=:
- if self.op in ('<', '<=', '>', '>='):
+ if self.op in ("<", "<=", ">", ">="):
ranged, other = self, other
else:
ranged, other = other, self
- if '<' in other.op or '>' in other.op:
+ if "<" in other.op or ">" in other.op:
# We are both ranged, and in the opposite "direction" (or
# we would have matched above). We intersect if we both
# match the other's endpoint (just checking one endpoint
# is not enough, it would give a false positive on <=2 vs >2)
- return (
- restricts.VersionMatch(
- other.op, other.version, other.revision).match(ranged) and
- restricts.VersionMatch(
- ranged.op, ranged.version, ranged.revision).match(other))
-
- if other.op == '~':
+ return restricts.VersionMatch(
+ other.op, other.version, other.revision
+ ).match(ranged) and restricts.VersionMatch(
+ ranged.op, ranged.version, ranged.revision
+ ).match(
+ other
+ )
+
+ if other.op == "~":
# Other definitely matches its own version. If ranged also
# does we're done:
- if restricts.VersionMatch(
- ranged.op, ranged.version, ranged.revision).match(other):
+ if restricts.VersionMatch(ranged.op, ranged.version, ranged.revision).match(
+ other
+ ):
return True
# The only other case where we intersect is if ranged is a
# > or >= on other's version and a nonzero revision. In
# that case other will match ranged. Be careful not to
# give a false positive for ~2 vs <2 here:
- return ranged.op in ('>', '>=') and restricts.VersionMatch(
- other.op, other.version, other.revision).match(ranged)
+ return ranged.op in (">", ">=") and restricts.VersionMatch(
+ other.op, other.version, other.revision
+ ).match(ranged)
- if other.op == '=*':
+ if other.op == "=*":
# The fun one, since glob matches do not correspond to a
# single contiguous region of versions.
# a glob match definitely matches its own version, so if
# ranged does too we're done:
- if restricts.VersionMatch(
- ranged.op, ranged.version, ranged.revision).match(other):
+ if restricts.VersionMatch(ranged.op, ranged.version, ranged.revision).match(
+ other
+ ):
return True
- if '<' in ranged.op:
+ if "<" in ranged.op:
# Remaining cases where this intersects: there is a
# package smaller than ranged.fullver and
# other.fullver that they both match.
@@ -616,7 +672,8 @@ class atom(boolean.AndRestriction, metaclass=klass.generic_equality):
# Handled all possible ops.
raise NotImplementedError(
- 'Someone added an op to atom without adding it to intersects')
+ "Someone added an op to atom without adding it to intersects"
+ )
def evaluate_conditionals(self, parent_cls, parent_seq, enabled, tristate=None):
parent_seq.append(self)
@@ -635,75 +692,102 @@ class transitive_use_atom(atom):
@staticmethod
def _mk_conditional(flag, payload, negate=False):
- return Conditional('use', ContainmentMatch(flag, negate=negate), payload)
+ return Conditional("use", ContainmentMatch(flag, negate=negate), payload)
def _recurse_transitive_use_conds(self, atom_str, forced_use, varied):
if not varied:
- s = ','.join(forced_use)
+ s = ",".join(forced_use)
if s:
- s = f'[{s}]'
- return (self._nontransitive_use_atom(atom_str + s), )
+ s = f"[{s}]"
+ return (self._nontransitive_use_atom(atom_str + s),)
flag = varied[0]
- use = flag.lstrip('!').rstrip('?=')
+ use = flag.lstrip("!").rstrip("?=")
varied = varied[1:]
- if flag[-1] == '?':
+ if flag[-1] == "?":
# a[x?] == x? ( a[x] ) !x? ( a )
# a[!x?] == x? ( a ) !x? ( a[-x] )
- if flag[0] != '!':
- return (self._mk_conditional(use,
+ if flag[0] != "!":
+ return (
+ self._mk_conditional(
+ use,
self._recurse_transitive_use_conds(
- atom_str, forced_use + [use], varied)),
- self._mk_conditional(
- use, self._recurse_transitive_use_conds(
- atom_str, forced_use, varied), negate=True))
- return (self._mk_conditional(
- use, self._recurse_transitive_use_conds(
- atom_str, forced_use, varied)),
+ atom_str, forced_use + [use], varied
+ ),
+ ),
self._mk_conditional(
- use, self._recurse_transitive_use_conds(
- atom_str, forced_use + ['-' + use], varied), negate=True))
+ use,
+ self._recurse_transitive_use_conds(
+ atom_str, forced_use, varied
+ ),
+ negate=True,
+ ),
+ )
+ return (
+ self._mk_conditional(
+ use,
+ self._recurse_transitive_use_conds(atom_str, forced_use, varied),
+ ),
+ self._mk_conditional(
+ use,
+ self._recurse_transitive_use_conds(
+ atom_str, forced_use + ["-" + use], varied
+ ),
+ negate=True,
+ ),
+ )
# a[x=] == x? ( a[x] ) !x? ( a[-x] )
# a[!x=] == x? ( a[-x] ) !x? ( a[x] )
- if flag[0] != '!':
- use_states = [[use], ['-' + use]]
+ if flag[0] != "!":
+ use_states = [[use], ["-" + use]]
else:
- use_states = [['-' + use], [use]]
-
- return (self._mk_conditional(
- use, self._recurse_transitive_use_conds(
- atom_str, forced_use + use_states[0], varied)),
- self._mk_conditional(
- use, self._recurse_transitive_use_conds(
- atom_str, forced_use + use_states[1], varied), negate=True))
+ use_states = [["-" + use], [use]]
+
+ return (
+ self._mk_conditional(
+ use,
+ self._recurse_transitive_use_conds(
+ atom_str, forced_use + use_states[0], varied
+ ),
+ ),
+ self._mk_conditional(
+ use,
+ self._recurse_transitive_use_conds(
+ atom_str, forced_use + use_states[1], varied
+ ),
+ negate=True,
+ ),
+ )
@klass.jit_attr
def restrictions(self):
return self.convert_to_conditionals()
def convert_to_conditionals(self):
- static_use = [use for use in self.use if use[-1] not in '?=']
- variable = [use for use in self.use if use[-1] in '?=']
+ static_use = [use for use in self.use if use[-1] not in "?="]
+ variable = [use for use in self.use if use[-1] in "?="]
return PkgAndRestriction(
*self._recurse_transitive_use_conds(
- self._stripped_use(), static_use, variable))
+ self._stripped_use(), static_use, variable
+ )
+ )
def _evaluate_depset_qa_in_place(self, flags, variable_flags, enabled, tristate):
# note this mutates flags
for flag in variable_flags:
conditional = flag[-1]
- negated = flag[0] == '!'
+ negated = flag[0] == "!"
if negated:
flag = flag[1:-1]
else:
flag = flag[:-1]
real_flag = flag
- if flag[-1] == ')':
+ if flag[-1] == ")":
flag = flag[:-3]
- if conditional == '=':
+ if conditional == "=":
# if it's locked to a state, take that state; else use whatever
# the default state was.
if flag in tristate:
@@ -712,7 +796,7 @@ class transitive_use_atom(atom):
# roughly if locked to enabled: x= == x, !x= == -x
# if locked to disabled: x= == -x , !x= == x
if (flag in enabled) == negated:
- real_flag = '-' + real_flag
+ real_flag = "-" + real_flag
else:
if flag in tristate:
# if the flag was on, but it was !x?, then skip it.
@@ -721,41 +805,44 @@ class transitive_use_atom(atom):
continue
# enforce the allowed state.
if flag not in enabled:
- real_flag = '-' + real_flag
+ real_flag = "-" + real_flag
else:
# enforce the state that gets us a flag to test on the target.
# thus if !x?, we want -x, or +x; take the negation basically.
if negated:
- real_flag = '-' + real_flag
+ real_flag = "-" + real_flag
flags.append(real_flag)
- def evaluate_conditionals(self, parent_cls, parent_seq, enabled, tristate_filter=None):
- new_flags = [use for use in self.use if use[-1] not in '?=']
- variable_flags = [use for use in self.use if use[-1] in '?=']
+ def evaluate_conditionals(
+ self, parent_cls, parent_seq, enabled, tristate_filter=None
+ ):
+ new_flags = [use for use in self.use if use[-1] not in "?="]
+ variable_flags = [use for use in self.use if use[-1] in "?="]
if tristate_filter is not None:
# note this updates the flags in place.
self._evaluate_depset_qa_in_place(
- new_flags, variable_flags, enabled, tristate_filter)
+ new_flags, variable_flags, enabled, tristate_filter
+ )
else:
for flag in variable_flags:
conditional = flag[-1]
- negated = flag[0] == '!'
+ negated = flag[0] == "!"
if negated:
flag = raw_flag = flag[1:-1]
else:
flag = raw_flag = flag[:-1]
- if raw_flag[-1] == ')':
+ if raw_flag[-1] == ")":
# use default... strip "(+)"
raw_flag = raw_flag[:-3]
- if conditional == '=':
+ if conditional == "=":
# given '!x=', if x is off, force x on for the target,
# and vice versa. render out a non relative - or ''.
- negated = ((raw_flag in enabled) == negated)
+ negated = (raw_flag in enabled) == negated
if negated:
- flag = '-' + flag
+ flag = "-" + flag
else:
# enforce the flag only if our state matches. !x? and x is on, means no dep.
# for !x? with -x, the assertion becomes !x; conditionally transitive basically.
@@ -768,7 +855,8 @@ class transitive_use_atom(atom):
a = self._nontransitive_use_atom(self._stripped_use())
else:
a = self._nontransitive_use_atom(
- "%s[%s]" % (self._stripped_use(), ','.join(new_flags)))
+ "%s[%s]" % (self._stripped_use(), ",".join(new_flags))
+ )
parent_seq.append(a)
iter_dnf_solutions = boolean.AndRestriction.iter_dnf_solutions
diff --git a/src/pkgcore/ebuild/conditionals.py b/src/pkgcore/ebuild/conditionals.py
index 71b3a3e02..6c2984aae 100644
--- a/src/pkgcore/ebuild/conditionals.py
+++ b/src/pkgcore/ebuild/conditionals.py
@@ -18,28 +18,39 @@ from .errors import DepsetParseError
class DepSet(boolean.AndRestriction):
"""Gentoo DepSet syntax parser"""
- __slots__ = ('element_class', '_node_conds', '_known_conditionals')
+ __slots__ = ("element_class", "_node_conds", "_known_conditionals")
_evaluate_collapse = True
# do not enable instance caching w/out adjust evaluate_depset!
__inst_caching__ = False
- def __init__(self, restrictions='', element_class=atom,
- node_conds=True, known_conditionals=None):
+ def __init__(
+ self,
+ restrictions="",
+ element_class=atom,
+ node_conds=True,
+ known_conditionals=None,
+ ):
sf = object.__setattr__
- sf(self, '_known_conditionals', known_conditionals)
- sf(self, 'element_class', element_class)
- sf(self, 'restrictions', restrictions)
- sf(self, '_node_conds', node_conds)
- sf(self, 'type', restriction.package_type)
- sf(self, 'negate', False)
+ sf(self, "_known_conditionals", known_conditionals)
+ sf(self, "element_class", element_class)
+ sf(self, "restrictions", restrictions)
+ sf(self, "_node_conds", node_conds)
+ sf(self, "type", restriction.package_type)
+ sf(self, "negate", False)
@classmethod
- def parse(cls, dep_str, element_class,
- operators=None, attr=None,
- element_func=None, transitive_use_atoms=False,
- allow_src_uri_file_renames=False):
+ def parse(
+ cls,
+ dep_str,
+ element_class,
+ operators=None,
+ attr=None,
+ element_func=None,
+ transitive_use_atoms=False,
+ allow_src_uri_file_renames=False,
+ ):
"""
:param dep_str: string abiding by DepSet syntax
:param operators: mapping of node -> callable for special operators
@@ -81,7 +92,8 @@ class DepSet(boolean.AndRestriction):
depsets[-2].append(depsets[-1][0])
else:
depsets[-2].append(
- operators[raw_conditionals[-1]](*depsets[-1]))
+ operators[raw_conditionals[-1]](*depsets[-1])
+ )
else:
node_conds = True
c = raw_conditionals[-1]
@@ -91,18 +103,19 @@ class DepSet(boolean.AndRestriction):
c = values.ContainmentMatch(c[:-1])
depsets[-2].append(
- packages.Conditional("use", c, tuple(depsets[-1])))
+ packages.Conditional("use", c, tuple(depsets[-1]))
+ )
raw_conditionals.pop()
depsets.pop()
elif "(" == k:
- k = ''
+ k = ""
# push another frame on
depsets.append([])
raw_conditionals.append(k)
- elif k[-1] == '?' or k in operators:
+ elif k[-1] == "?" or k in operators:
# use conditional or custom op.
# no tokens left == bad dep_str.
k2 = next(words)
@@ -122,7 +135,7 @@ class DepSet(boolean.AndRestriction):
except StopIteration:
depsets[-1].append(element_func(k))
else:
- if k2 != '->':
+ if k2 != "->":
depsets[-1].append(element_func(k))
words.appendleft((k2,))
else:
@@ -178,8 +191,8 @@ class DepSet(boolean.AndRestriction):
results = []
self.evaluate_conditionals(
- self.__class__, results,
- cond_dict, tristate_filter, force_collapse=True)
+ self.__class__, results, cond_dict, tristate_filter, force_collapse=True
+ )
return self.__class__(tuple(results), self.element_class, False)
@@ -193,12 +206,11 @@ class DepSet(boolean.AndRestriction):
new_set.appendleft(list(cur_node.payload) + [None])
elif isinstance(cur_node, transitive_use_atom):
new_set.appendleft(cur_node.convert_to_conditionals())
- elif (isinstance(cur_node, boolean.base) and
- not isinstance(cur_node, atom)):
+ elif isinstance(cur_node, boolean.base) and not isinstance(cur_node, atom):
new_set.appendleft(cur_node.restrictions)
elif cur_node is None:
conditions_stack.pop()
- elif conditions_stack or yield_non_conditionals: # leaf
+ elif conditions_stack or yield_non_conditionals: # leaf
yield (cur_node, conditions_stack[:])
@property
@@ -285,7 +297,8 @@ def stringify_boolean(node, func=str, domain=None):
_internal_stringify_boolean(x, domain, func, l.append)
else:
_internal_stringify_boolean(node, domain, func, l.append)
- return ' '.join(l)
+ return " ".join(l)
+
def _internal_stringify_boolean(node, domain, func, visit):
"""func is used to stringify the actual content. Useful for fetchables."""
@@ -293,19 +306,20 @@ def _internal_stringify_boolean(node, domain, func, visit):
if isinstance(node, boolean.OrRestriction):
visit("|| (")
iterable = node.restrictions
- elif (isinstance(node, boolean.AndRestriction) and
- not isinstance(node, atom)):
+ elif isinstance(node, boolean.AndRestriction) and not isinstance(node, atom):
visit("(")
iterable = node.restrictions
elif isinstance(node, packages.Conditional):
assert len(node.restriction.vals) == 1
iterable = node.payload
- visit("%s%s? (" % (
- node.restriction.negate and "!" or "",
- list(node.restriction.vals)[0]))
+ visit(
+ "%s%s? ("
+ % (node.restriction.negate and "!" or "", list(node.restriction.vals)[0])
+ )
else:
- if (domain is not None and
- (isinstance(node, atom) and node.slot_operator == '=')):
+ if domain is not None and (
+ isinstance(node, atom) and node.slot_operator == "="
+ ):
pkg = max(sorted(domain.all_installed_repos.itermatch(node)))
object.__setattr__(node, "slot", pkg.slot)
object.__setattr__(node, "subslot", pkg.subslot)
diff --git a/src/pkgcore/ebuild/const.py b/src/pkgcore/ebuild/const.py
index 53bcdfbcb..86618c5f1 100644
--- a/src/pkgcore/ebuild/const.py
+++ b/src/pkgcore/ebuild/const.py
@@ -7,23 +7,47 @@ from snakeoil.osutils import pjoin
from ..const import EBD_PATH
incrementals = (
- "ACCEPT_KEYWORDS", "ACCEPT_LICENSE", "CONFIG_PROTECT",
- "CONFIG_PROTECT_MASK", "FEATURES", "IUSE_IMPLICIT",
- "PROFILE_ONLY_VARIABLES", "USE", "USE_EXPAND",
- "USE_EXPAND_HIDDEN", "USE_EXPAND_IMPLICIT", "USE_EXPAND_UNPREFIXED",
+ "ACCEPT_KEYWORDS",
+ "ACCEPT_LICENSE",
+ "CONFIG_PROTECT",
+ "CONFIG_PROTECT_MASK",
+ "FEATURES",
+ "IUSE_IMPLICIT",
+ "PROFILE_ONLY_VARIABLES",
+ "USE",
+ "USE_EXPAND",
+ "USE_EXPAND_HIDDEN",
+ "USE_EXPAND_IMPLICIT",
+ "USE_EXPAND_UNPREFIXED",
"ENV_UNSET",
)
incrementals_unfinalized = ("USE",)
metadata_keys = (
- "BDEPEND", "DEPEND", "RDEPEND", "PDEPEND", "IDEPEND",
- "DEFINED_PHASES", "DESCRIPTION", "EAPI", "HOMEPAGE",
- "INHERIT", "INHERITED", "IUSE", "KEYWORDS", "LICENSE", "PROPERTIES",
- "REQUIRED_USE", "RESTRICT", "SLOT", "SRC_URI", "_eclasses_",
+ "BDEPEND",
+ "DEPEND",
+ "RDEPEND",
+ "PDEPEND",
+ "IDEPEND",
+ "DEFINED_PHASES",
+ "DESCRIPTION",
+ "EAPI",
+ "HOMEPAGE",
+ "INHERIT",
+ "INHERITED",
+ "IUSE",
+ "KEYWORDS",
+ "LICENSE",
+ "PROPERTIES",
+ "REQUIRED_USE",
+ "RESTRICT",
+ "SLOT",
+ "SRC_URI",
+ "_eclasses_",
)
-WORLD_FILE = '/var/lib/portage/world'
+WORLD_FILE = "/var/lib/portage/world"
EBUILD_DAEMON_PATH = pjoin(EBD_PATH, "ebuild-daemon.bash")
EBUILD_HELPERS_PATH = pjoin(EBD_PATH, "helpers")
diff --git a/src/pkgcore/ebuild/cpv.py b/src/pkgcore/ebuild/cpv.py
index 1b2f87037..41d3a32c8 100644
--- a/src/pkgcore/ebuild/cpv.py
+++ b/src/pkgcore/ebuild/cpv.py
@@ -9,8 +9,7 @@ from ..package import base
from . import atom
from .errors import InvalidCPV
-demand_compile_regexp(
- 'suffix_regexp', '^(alpha|beta|rc|pre|p)(\\d*)$')
+demand_compile_regexp("suffix_regexp", "^(alpha|beta|rc|pre|p)(\\d*)$")
suffix_value = {"pre": -2, "p": 1, "alpha": -4, "beta": -3, "rc": -1}
@@ -18,19 +17,20 @@ suffix_value = {"pre": -2, "p": 1, "alpha": -4, "beta": -3, "rc": -1}
# to prevent version chunks from showing up in the package
demand_compile_regexp(
- 'isvalid_version_re',
- r"^(?:\d+)(?:\.\d+)*[a-zA-Z]?(?:_(p(?:re)?|beta|alpha|rc)\d*)*$")
+ "isvalid_version_re",
+ r"^(?:\d+)(?:\.\d+)*[a-zA-Z]?(?:_(p(?:re)?|beta|alpha|rc)\d*)*$",
+)
demand_compile_regexp(
- 'isvalid_cat_re', r"^(?:[a-zA-Z0-9][-a-zA-Z0-9+._]*(?:/(?!$))?)+$")
+ "isvalid_cat_re", r"^(?:[a-zA-Z0-9][-a-zA-Z0-9+._]*(?:/(?!$))?)+$"
+)
# empty string is fine, means a -- was encounter.
-demand_compile_regexp(
- '_pkg_re', r"^[a-zA-Z0-9+_]+$")
+demand_compile_regexp("_pkg_re", r"^[a-zA-Z0-9+_]+$")
def isvalid_pkg_name(chunks):
- if not chunks[0] or chunks[0][0] == '+':
+ if not chunks[0] or chunks[0][0] == "+":
# this means a leading -; additionally, '+asdf' is disallowed
return False
# all remaining chunks can either be empty (meaning multiple
@@ -46,7 +46,7 @@ def isvalid_pkg_name(chunks):
def isvalid_rev(s):
- return s and s[0] == 'r' and s[1:].isdigit()
+ return s and s[0] == "r" and s[1:].isdigit()
class Revision(UserString):
@@ -62,13 +62,13 @@ class Revision(UserString):
try:
self._revint = int(self.data)
except ValueError:
- raise InvalidCPV(self.data, 'invalid revision')
+ raise InvalidCPV(self.data, "invalid revision")
else:
self._revint = 0
def __str__(self):
if not self.data:
- return '0'
+ return "0"
else:
return self.data
@@ -204,13 +204,13 @@ def ver_cmp(ver1, rev1, ver2, rev2):
val = suffix_value[match.group(1)]
if val:
return cmp(0, val)
- return cmp(0, int("0"+match.group(2)))
+ return cmp(0, int("0" + match.group(2)))
if x == parts2_len:
match = suffix_regexp.match(parts1[x])
val = suffix_value[match.group(1)]
if val:
return cmp(val, 0)
- return cmp(int("0"+match.group(2)), 0)
+ return cmp(int("0" + match.group(2)), 0)
# If the string values are equal, no need to parse them.
# Continue on to the next.
@@ -229,7 +229,7 @@ def ver_cmp(ver1, rev1, ver2, rev2):
return c
# Otherwise use the digit as the basis for comparison.
- c = cmp(int("0"+match1.group(2)), int("0"+match2.group(2)))
+ c = cmp(int("0" + match1.group(2)), int("0" + match2.group(2)))
if c:
return c
@@ -250,7 +250,15 @@ class CPV(base.base):
:ivar unversioned_atom: atom matching all versions of this package
"""
- __slots__ = ("cpvstr", "key", "category", "package", "version", "revision", "fullver")
+ __slots__ = (
+ "cpvstr",
+ "key",
+ "category",
+ "package",
+ "version",
+ "revision",
+ "fullver",
+ )
def __init__(self, *args, versioned=None):
"""
@@ -271,7 +279,8 @@ class CPV(base.base):
cpvstr = args[0]
if versioned is None:
raise TypeError(
- f"single argument invocation requires versioned kwarg; {cpvstr!r}")
+ f"single argument invocation requires versioned kwarg; {cpvstr!r}"
+ )
elif l == 2:
cpvstr = f"{args[0]}/{args[1]}"
versioned = False
@@ -280,76 +289,84 @@ class CPV(base.base):
versioned = True
else:
raise TypeError(
- f"CPV takes 1 arg (cpvstr), 2 (cat, pkg), or 3 (cat, pkg, ver): got {args!r}")
+ f"CPV takes 1 arg (cpvstr), 2 (cat, pkg), or 3 (cat, pkg, ver): got {args!r}"
+ )
try:
category, pkgver = cpvstr.rsplit("/", 1)
except ValueError:
# occurs if the rsplit yields only one item
- raise InvalidCPV(cpvstr, 'no package or version components')
+ raise InvalidCPV(cpvstr, "no package or version components")
if not isvalid_cat_re.match(category):
- raise InvalidCPV(cpvstr, 'invalid category name')
+ raise InvalidCPV(cpvstr, "invalid category name")
sf = object.__setattr__
- sf(self, 'category', category)
- sf(self, 'cpvstr', cpvstr)
+ sf(self, "category", category)
+ sf(self, "cpvstr", cpvstr)
pkg_chunks = pkgver.split("-")
lpkg_chunks = len(pkg_chunks)
if versioned:
if lpkg_chunks == 1:
- raise InvalidCPV(cpvstr, 'missing package version')
+ raise InvalidCPV(cpvstr, "missing package version")
if isvalid_rev(pkg_chunks[-1]):
if lpkg_chunks < 3:
# needs at least ('pkg', 'ver', 'rev')
raise InvalidCPV(
- cpvstr, 'missing package name, version, and/or revision')
+ cpvstr, "missing package name, version, and/or revision"
+ )
rev = Revision(pkg_chunks.pop(-1)[1:])
if rev == 0:
# reset stored cpvstr to drop -r0+
- sf(self, 'cpvstr', f"{category}/{'-'.join(pkg_chunks)}")
- elif rev[0] == '0':
+ sf(self, "cpvstr", f"{category}/{'-'.join(pkg_chunks)}")
+ elif rev[0] == "0":
# reset stored cpvstr to drop leading zeroes from revision
- sf(self, 'cpvstr', f"{category}/{'-'.join(pkg_chunks)}-r{int(rev)}")
- sf(self, 'revision', rev)
+ sf(self, "cpvstr", f"{category}/{'-'.join(pkg_chunks)}-r{int(rev)}")
+ sf(self, "revision", rev)
else:
- sf(self, 'revision', Revision(''))
+ sf(self, "revision", Revision(""))
if not isvalid_version_re.match(pkg_chunks[-1]):
raise InvalidCPV(cpvstr, f"invalid version '{pkg_chunks[-1]}'")
- sf(self, 'version', pkg_chunks.pop(-1))
+ sf(self, "version", pkg_chunks.pop(-1))
if self.revision:
- sf(self, 'fullver', f"{self.version}-r{self.revision}")
+ sf(self, "fullver", f"{self.version}-r{self.revision}")
else:
- sf(self, 'fullver', self.version)
+ sf(self, "fullver", self.version)
if not isvalid_pkg_name(pkg_chunks):
- raise InvalidCPV(cpvstr, 'invalid package name')
- sf(self, 'package', '-'.join(pkg_chunks))
- sf(self, 'key', f"{category}/{self.package}")
+ raise InvalidCPV(cpvstr, "invalid package name")
+ sf(self, "package", "-".join(pkg_chunks))
+ sf(self, "key", f"{category}/{self.package}")
else:
if not isvalid_pkg_name(pkg_chunks):
- raise InvalidCPV(cpvstr, 'invalid package name')
- sf(self, 'revision', None)
- sf(self, 'fullver', None)
- sf(self, 'version', None)
- sf(self, 'key', cpvstr)
- sf(self, 'package', '-'.join(pkg_chunks))
+ raise InvalidCPV(cpvstr, "invalid package name")
+ sf(self, "revision", None)
+ sf(self, "fullver", None)
+ sf(self, "version", None)
+ sf(self, "key", cpvstr)
+ sf(self, "package", "-".join(pkg_chunks))
def __hash__(self):
return hash(self.cpvstr)
def __repr__(self):
- return '<%s cpvstr=%s @%#8x>' % (
- self.__class__.__name__, getattr(self, 'cpvstr', None), id(self))
+ return "<%s cpvstr=%s @%#8x>" % (
+ self.__class__.__name__,
+ getattr(self, "cpvstr", None),
+ id(self),
+ )
def __str__(self):
- return getattr(self, 'cpvstr', 'None')
+ return getattr(self, "cpvstr", "None")
def __eq__(self, other):
try:
if self.cpvstr == other.cpvstr:
return True
if self.category == other.category and self.package == other.package:
- return ver_cmp(self.version, self.revision, other.version, other.revision) == 0
+ return (
+ ver_cmp(self.version, self.revision, other.version, other.revision)
+ == 0
+ )
except AttributeError:
pass
return False
@@ -361,7 +378,12 @@ class CPV(base.base):
try:
if self.category == other.category:
if self.package == other.package:
- return ver_cmp(self.version, self.revision, other.version, other.revision) < 0
+ return (
+ ver_cmp(
+ self.version, self.revision, other.version, other.revision
+ )
+ < 0
+ )
return self.package < other.package
return self.category < other.category
except AttributeError:
@@ -374,7 +396,12 @@ class CPV(base.base):
try:
if self.category == other.category:
if self.package == other.package:
- return ver_cmp(self.version, self.revision, other.version, other.revision) <= 0
+ return (
+ ver_cmp(
+ self.version, self.revision, other.version, other.revision
+ )
+ <= 0
+ )
return self.package < other.package
return self.category < other.category
except AttributeError:
@@ -387,7 +414,12 @@ class CPV(base.base):
try:
if self.category == other.category:
if self.package == other.package:
- return ver_cmp(self.version, self.revision, other.version, other.revision) > 0
+ return (
+ ver_cmp(
+ self.version, self.revision, other.version, other.revision
+ )
+ > 0
+ )
return self.package > other.package
return self.category > other.category
except AttributeError:
@@ -400,7 +432,12 @@ class CPV(base.base):
try:
if self.category == other.category:
if self.package == other.package:
- return ver_cmp(self.version, self.revision, other.version, other.revision) >= 0
+ return (
+ ver_cmp(
+ self.version, self.revision, other.version, other.revision
+ )
+ >= 0
+ )
return self.package > other.package
return self.category > other.category
except AttributeError:
diff --git a/src/pkgcore/ebuild/digest.py b/src/pkgcore/ebuild/digest.py
index d8552d304..5a9cce5b2 100644
--- a/src/pkgcore/ebuild/digest.py
+++ b/src/pkgcore/ebuild/digest.py
@@ -23,13 +23,13 @@ def _write_manifest(handle, chf, filename, chksums):
handle.write("%s %s %i" % (chf.upper(), filename, size))
for chf in sorted(chksums):
handle.write(" %s %s" % (chf.upper(), get_handler(chf).long2str(chksums[chf])))
- handle.write('\n')
+ handle.write("\n")
def convert_chksums(iterable):
for chf, sum in iterable:
chf = chf.lower()
- if chf == 'size':
+ if chf == "size":
# explicit size entries are stupid, format has implicit size
continue
else:
@@ -61,20 +61,21 @@ def parse_manifest(source, ignore_gpg=True):
d = types.get(line[0])
if d is None:
raise errors.ParseChksumError(
- source, f"unknown manifest type: {line[0]}: {line!r}")
+ source, f"unknown manifest type: {line[0]}: {line!r}"
+ )
if len(line) % 2 != 1:
raise errors.ParseChksumError(
source,
"manifest 2 entry doesn't have right "
- "number of tokens, %i: %r" %
- (len(line), line))
+ "number of tokens, %i: %r" % (len(line), line),
+ )
chf_types.update(line[3::2])
# this is a trick to do pairwise collapsing;
# [size, 1] becomes [(size, 1)]
i = iter(line[3:])
d[line[1]] = [("size", int(line[2]))] + list(convert_chksums(zip(i, i)))
except (IndexError, ValueError):
- raise errors.ParseChksumError(source, 'invalid data format')
+ raise errors.ParseChksumError(source, "invalid data format")
finally:
if f is not None and f.close:
f.close()
@@ -86,7 +87,6 @@ def parse_manifest(source, ignore_gpg=True):
class Manifest:
-
def __init__(self, path, enforce_gpg=False, thin=False, allow_missing=False):
self.path = path
self.thin = thin
@@ -107,7 +107,7 @@ class Manifest:
# recreate cpv from manifest path
catpn = os.sep.join(self.path.split(os.sep)[-3:-1])
pkg = cpv.UnversionedCPV(catpn)
- raise errors.MetadataException(pkg, 'manifest', str(e))
+ raise errors.MetadataException(pkg, "manifest", str(e))
self._dist, self._aux, self._ebuild, self._misc = data
self._sourced = True
@@ -126,37 +126,45 @@ class Manifest:
excludes = frozenset(["CVS", ".svn", "Manifest"])
aux, ebuild, misc = {}, {}, {}
if not self.thin:
- filesdir = '/files/'
- for obj in iter_scan('/', offset=os.path.dirname(self.path), chksum_types=chfs):
+ filesdir = "/files/"
+ for obj in iter_scan(
+ "/", offset=os.path.dirname(self.path), chksum_types=chfs
+ ):
if not obj.is_reg:
continue
pathname = obj.location
- if excludes.intersection(pathname.split('/')):
+ if excludes.intersection(pathname.split("/")):
continue
if pathname.startswith(filesdir):
d = aux
- pathname = pathname[len(filesdir):]
- elif obj.dirname == '/':
+ pathname = pathname[len(filesdir) :]
+ elif obj.dirname == "/":
pathname = pathname[1:]
- if obj.location[-7:] == '.ebuild':
+ if obj.location[-7:] == ".ebuild":
d = ebuild
else:
d = misc
else:
- raise Exception("Unexpected directory found in %r; %r" % (self.path, obj.dirname))
+ raise Exception(
+ "Unexpected directory found in %r; %r"
+ % (self.path, obj.dirname)
+ )
d[pathname] = dict(obj.chksums)
- handle = open(self.path, 'w')
+ handle = open(self.path, "w")
# write it in alphabetical order; aux gets flushed now.
for path, chksums in sorted(aux.items(), key=_key_sort):
- _write_manifest(handle, 'AUX', path, chksums)
+ _write_manifest(handle, "AUX", path, chksums)
# next dist...
- for fetchable in sorted(fetchables, key=operator.attrgetter('filename')):
+ for fetchable in sorted(fetchables, key=operator.attrgetter("filename")):
_write_manifest(
- handle, 'DIST', os.path.basename(fetchable.filename),
- dict(fetchable.chksums))
+ handle,
+ "DIST",
+ os.path.basename(fetchable.filename),
+ dict(fetchable.chksums),
+ )
# then ebuild and misc
for mtype, inst in (("EBUILD", ebuild), ("MISC", misc)):
diff --git a/src/pkgcore/ebuild/domain.py b/src/pkgcore/ebuild/domain.py
index a1f231e9f..15f2b2179 100644
--- a/src/pkgcore/ebuild/domain.py
+++ b/src/pkgcore/ebuild/domain.py
@@ -44,9 +44,15 @@ from ..util.parserestrict import ParseError, parse_match
from . import const
from . import repository as ebuild_repo
from .atom import atom as _atom
-from .misc import (ChunkedDataDict, chunked_data, collapsed_restrict_to_data, incremental_expansion,
- incremental_expansion_license, non_incremental_collapsed_restrict_to_data,
- optimize_incrementals)
+from .misc import (
+ ChunkedDataDict,
+ chunked_data,
+ collapsed_restrict_to_data,
+ incremental_expansion,
+ incremental_expansion_license,
+ non_incremental_collapsed_restrict_to_data,
+ optimize_incrementals,
+)
from .portage_conf import PortageConfig
from .repo_objs import Licenses, RepoConfig
from .triggers import GenerateTriggers
@@ -57,7 +63,7 @@ def package_masks(iterable):
try:
yield parse_match(line), line, lineno, path
except ParseError as e:
- logger.warning(f'{path!r}, line {lineno}: parsing error: {e}')
+ logger.warning(f"{path!r}, line {lineno}: parsing error: {e}")
def package_keywords_splitter(iterable):
@@ -66,7 +72,7 @@ def package_keywords_splitter(iterable):
try:
yield parse_match(v[0]), tuple(v[1:]), line, lineno, path
except ParseError as e:
- logger.warning(f'{path!r}, line {lineno}: parsing error: {e}')
+ logger.warning(f"{path!r}, line {lineno}: parsing error: {e}")
def package_env_splitter(basedir, iterable):
@@ -85,7 +91,7 @@ def package_env_splitter(basedir, iterable):
try:
yield parse_match(val[0]), tuple(paths), line, lineno, path
except ParseError as e:
- logger.warning(f'{path!r}, line {lineno}: parsing error: {e}')
+ logger.warning(f"{path!r}, line {lineno}: parsing error: {e}")
def apply_mask_filter(globs, atoms, pkg, mode):
@@ -118,17 +124,20 @@ def generate_filter(masks, unmasks, *extra):
r = (packages.OrRestriction(masking, unmasking, disable_inst_caching=True),)
else:
r = (masking,)
- return packages.AndRestriction(disable_inst_caching=True, finalize=True, *(r + extra))
+ return packages.AndRestriction(
+ disable_inst_caching=True, finalize=True, *(r + extra)
+ )
def _read_config_file(path):
"""Read all the data files under a given path."""
try:
for fs_obj in iter_scan(path, follow_symlinks=True):
- if not fs_obj.is_reg or '/.' in fs_obj.location:
+ if not fs_obj.is_reg or "/." in fs_obj.location:
continue
for lineno, line in iter_read_bash(
- fs_obj.location, allow_line_cont=True, enum_line=True):
+ fs_obj.location, allow_line_cont=True, enum_line=True
+ ):
yield line, lineno, fs_obj.location
except FileNotFoundError:
pass
@@ -136,8 +145,9 @@ def _read_config_file(path):
raise Failure(f"failed reading {path!r}: {e}") from e
-def load_property(filename, *, read_func=_read_config_file,
- parse_func=lambda x: x, fallback=()):
+def load_property(
+ filename, *, read_func=_read_config_file, parse_func=lambda x: x, fallback=()
+):
"""Decorator for parsing files using specified read/parse methods.
:param filename: The filename to parse within the config directory.
@@ -146,6 +156,7 @@ def load_property(filename, *, read_func=_read_config_file,
:keyword fallback: What to return if the file does not exist.
:return: A :py:`klass.jit.attr_named` property instance.
"""
+
def f(func):
def _load_and_invoke(func, fallback, self, *args, **kwargs):
if filename.startswith(os.path.sep):
@@ -159,9 +170,11 @@ def load_property(filename, *, read_func=_read_config_file,
else:
data = fallback
return func(self, data, *args, **kwargs)
- doc = getattr(func, '__doc__', None)
- jit_attr_named = klass.jit_attr_named(f'_jit_{func.__name__}', doc=doc)
+
+ doc = getattr(func, "__doc__", None)
+ jit_attr_named = klass.jit_attr_named(f"_jit_{func.__name__}", doc=doc)
return jit_attr_named(partial(_load_and_invoke, func, fallback))
+
return f
@@ -175,26 +188,50 @@ def load_property(filename, *, read_func=_read_config_file,
class domain(config_domain):
# XXX ouch, verify this crap and add defaults and stuff
- _types = {'profile': 'ref:profile', 'repos': 'lazy_refs:repo', 'vdb': 'lazy_refs:repo'}
+ _types = {
+ "profile": "ref:profile",
+ "repos": "lazy_refs:repo",
+ "vdb": "lazy_refs:repo",
+ }
for _thing in (
- 'root', 'config_dir', 'CHOST', 'CBUILD', 'CTARGET', 'CFLAGS',
- 'PATH', 'PORTAGE_TMPDIR', 'DISTCC_PATH', 'DISTCC_DIR', 'CCACHE_DIR'):
- _types[_thing] = 'str'
+ "root",
+ "config_dir",
+ "CHOST",
+ "CBUILD",
+ "CTARGET",
+ "CFLAGS",
+ "PATH",
+ "PORTAGE_TMPDIR",
+ "DISTCC_PATH",
+ "DISTCC_DIR",
+ "CCACHE_DIR",
+ ):
+ _types[_thing] = "str"
# TODO this is missing defaults
pkgcore_config_type = ConfigHint(
- _types, typename='domain',
- required=['repos', 'profile', 'vdb'],
- allow_unknowns=True)
+ _types,
+ typename="domain",
+ required=["repos", "profile", "vdb"],
+ allow_unknowns=True,
+ )
del _types, _thing
- def __init__(self, profile, repos, vdb, root='/', prefix='/',
- config_dir='/etc/portage', **settings):
+ def __init__(
+ self,
+ profile,
+ repos,
+ vdb,
+ root="/",
+ prefix="/",
+ config_dir="/etc/portage",
+ **settings,
+ ):
self.root = settings["ROOT"] = root
self.config_dir = config_dir
self.prefix = prefix
- self.ebuild_hook_dir = pjoin(self.config_dir, 'env')
+ self.ebuild_hook_dir = pjoin(self.config_dir, "env")
self.profile = profile
self.__repos = repos
self.__vdb = vdb
@@ -210,21 +247,22 @@ class domain(config_domain):
@load_property("/etc/profile.env", read_func=read_bash_dict)
def system_profile(self, data):
# prepend system profile $PATH if it exists
- if 'PATH' in data:
+ if "PATH" in data:
path = stable_unique(
- data['PATH'].split(os.pathsep) + os.environ['PATH'].split(os.pathsep))
- os.environ['PATH'] = os.pathsep.join(path)
+ data["PATH"].split(os.pathsep) + os.environ["PATH"].split(os.pathsep)
+ )
+ os.environ["PATH"] = os.pathsep.join(path)
return ImmutableDict(data)
- @klass.jit_attr_named('_jit_reset_settings', uncached_val=None)
+ @klass.jit_attr_named("_jit_reset_settings", uncached_val=None)
def settings(self):
settings = self._settings
- if 'CHOST' in settings and 'CBUILD' not in settings:
- settings['CBUILD'] = settings['CHOST']
+ if "CHOST" in settings and "CBUILD" not in settings:
+ settings["CBUILD"] = settings["CHOST"]
# if unset, MAKEOPTS defaults to CPU thread count
- if 'MAKEOPTS' not in settings:
- settings['MAKEOPTS'] = '-j%i' % cpu_count()
+ if "MAKEOPTS" not in settings:
+ settings["MAKEOPTS"] = "-j%i" % cpu_count()
# reformat env.d and make.conf incrementals
system_profile_settings = {}
@@ -257,16 +295,19 @@ class domain(config_domain):
# skipped because negations are required for license filtering.
if incremental not in settings or incremental in ("USE", "ACCEPT_LICENSE"):
continue
- settings[incremental] = tuple(incremental_expansion(
- settings[incremental],
- msg_prefix=f'while expanding {incremental}'))
-
- if 'ACCEPT_KEYWORDS' not in settings:
- raise Failure("No ACCEPT_KEYWORDS setting detected from profile, "
- "or user config")
- settings['ACCEPT_KEYWORDS'] = incremental_expansion(
- settings['ACCEPT_KEYWORDS'],
- msg_prefix='while expanding ACCEPT_KEYWORDS')
+ settings[incremental] = tuple(
+ incremental_expansion(
+ settings[incremental], msg_prefix=f"while expanding {incremental}"
+ )
+ )
+
+ if "ACCEPT_KEYWORDS" not in settings:
+ raise Failure(
+ "No ACCEPT_KEYWORDS setting detected from profile, " "or user config"
+ )
+ settings["ACCEPT_KEYWORDS"] = incremental_expansion(
+ settings["ACCEPT_KEYWORDS"], msg_prefix="while expanding ACCEPT_KEYWORDS"
+ )
# pull trigger options from the env
self._triggers = GenerateTriggers(self, settings)
@@ -276,14 +317,14 @@ class domain(config_domain):
@property
def arch(self):
try:
- return self.settings['ARCH']
+ return self.settings["ARCH"]
except KeyError:
raise Failure("No ARCH setting detected from profile, or user config")
@property
def distdir(self):
try:
- return self.settings['DISTDIR']
+ return self.settings["DISTDIR"]
except KeyError:
raise Failure("No DISTDIR setting detected from config")
@@ -295,16 +336,18 @@ class domain(config_domain):
def unstable_arch(self):
return f"~{self.arch}"
- @klass.jit_attr_named('_jit_reset_features', uncached_val=None)
+ @klass.jit_attr_named("_jit_reset_features", uncached_val=None)
def features(self):
- conf_features = list(self.settings.get('FEATURES', ()))
- env_features = os.environ.get('FEATURES', '').split()
+ conf_features = list(self.settings.get("FEATURES", ()))
+ env_features = os.environ.get("FEATURES", "").split()
return frozenset(optimize_incrementals(conf_features + env_features))
- @klass.jit_attr_named('_jit_reset_use', uncached_val=None)
+ @klass.jit_attr_named("_jit_reset_use", uncached_val=None)
def use(self):
# append expanded use, FEATURES, and environment defined USE flags
- use = list(self.settings.get('USE', ())) + list(self.profile.expand_use(self.settings))
+ use = list(self.settings.get("USE", ())) + list(
+ self.profile.expand_use(self.settings)
+ )
# hackish implementation; if test is on, flip on the flag
if "test" in self.features:
@@ -312,9 +355,9 @@ class domain(config_domain):
if "prefix" in self.features:
use.append("prefix")
- return frozenset(optimize_incrementals(use + os.environ.get('USE', '').split()))
+ return frozenset(optimize_incrementals(use + os.environ.get("USE", "").split()))
- @klass.jit_attr_named('_jit_reset_enabled_use', uncached_val=None)
+ @klass.jit_attr_named("_jit_reset_enabled_use", uncached_val=None)
def enabled_use(self):
use = ChunkedDataDict()
use.add_bare_global(*split_negations(self.use))
@@ -326,7 +369,7 @@ class domain(config_domain):
@klass.jit_attr_none
def forced_use(self):
use = ChunkedDataDict()
- use.merge(getattr(self.profile, 'forced_use'))
+ use.merge(getattr(self.profile, "forced_use"))
use.add_bare_global((), (self.arch,))
use.freeze()
return use
@@ -334,7 +377,7 @@ class domain(config_domain):
@klass.jit_attr_none
def stable_forced_use(self):
use = ChunkedDataDict()
- use.merge(getattr(self.profile, 'stable_forced_use'))
+ use.merge(getattr(self.profile, "stable_forced_use"))
use.add_bare_global((), (self.arch,))
use.freeze()
return use
@@ -386,7 +429,7 @@ class domain(config_domain):
@klass.jit_attr
def bashrcs(self):
- files = sorted_scan(pjoin(self.config_dir, 'bashrc'), follow_symlinks=True)
+ files = sorted_scan(pjoin(self.config_dir, "bashrc"), follow_symlinks=True)
return tuple(local_source(x) for x in files)
def _pkg_filters(self, pkg_accept_keywords=None, pkg_keywords=None):
@@ -397,21 +440,26 @@ class domain(config_domain):
# ~amd64 -> [amd64, ~amd64]
default_keywords = set([self.arch])
- default_keywords.update(self.settings['ACCEPT_KEYWORDS'])
- for x in self.settings['ACCEPT_KEYWORDS']:
+ default_keywords.update(self.settings["ACCEPT_KEYWORDS"])
+ for x in self.settings["ACCEPT_KEYWORDS"]:
if x.startswith("~"):
default_keywords.add(x.lstrip("~"))
# create keyword filters
accept_keywords = (
- pkg_keywords + pkg_accept_keywords + self.profile.accept_keywords)
- filters = [self._make_keywords_filter(
- default_keywords, accept_keywords,
- incremental="package.keywords" in const.incrementals)]
+ pkg_keywords + pkg_accept_keywords + self.profile.accept_keywords
+ )
+ filters = [
+ self._make_keywords_filter(
+ default_keywords,
+ accept_keywords,
+ incremental="package.keywords" in const.incrementals,
+ )
+ ]
# add license filters
master_license = []
- master_license.extend(self.settings.get('ACCEPT_LICENSE', ()))
+ master_license.extend(self.settings.get("ACCEPT_LICENSE", ()))
if master_license or self.pkg_licenses:
# restrict that matches iff the licenses are allowed
restrict = delegate(partial(self._apply_license_filter, master_license))
@@ -435,12 +483,16 @@ class domain(config_domain):
matched_pkg_licenses += licenses
raw_accepted_licenses = master_licenses + matched_pkg_licenses
- license_manager = getattr(pkg.repo, 'licenses', self._default_licenses_manager)
+ license_manager = getattr(pkg.repo, "licenses", self._default_licenses_manager)
for and_pair in pkg.license.dnf_solutions():
accepted = incremental_expansion_license(
- pkg, and_pair, license_manager.groups, raw_accepted_licenses,
- msg_prefix="while checking ACCEPT_LICENSE ")
+ pkg,
+ and_pair,
+ license_manager.groups,
+ raw_accepted_licenses,
+ msg_prefix="while checking ACCEPT_LICENSE ",
+ )
if accepted.issuperset(and_pair):
return True
return False
@@ -449,7 +501,8 @@ class domain(config_domain):
"""Generates a restrict that matches iff the keywords are allowed."""
if not accept_keywords and not self.profile.keywords:
return packages.PackageRestriction(
- "keywords", values.ContainmentMatch(frozenset(default_keys)))
+ "keywords", values.ContainmentMatch(frozenset(default_keys))
+ )
if self.unstable_arch not in default_keys:
# stable; thus empty entries == ~arch
@@ -457,9 +510,10 @@ class domain(config_domain):
if not v:
return r, self.unstable_arch
return r, v
+
data = collapsed_restrict_to_data(
- ((packages.AlwaysTrue, default_keys),),
- (f(*i) for i in accept_keywords))
+ ((packages.AlwaysTrue, default_keys),), (f(*i) for i in accept_keywords)
+ )
else:
if incremental:
f = collapsed_restrict_to_data
@@ -488,7 +542,7 @@ class domain(config_domain):
if atom.match(pkg):
pkg_keywords += keywords
allowed = data.pull_data(pkg)
- if '**' in allowed:
+ if "**" in allowed:
return True
if "*" in allowed:
for k in pkg_keywords:
@@ -503,13 +557,16 @@ class domain(config_domain):
@klass.jit_attr_none
def use_expand_re(self):
return re.compile(
- "^(?:[+-])?(%s)_(.*)$" %
- "|".join(x.lower() for x in self.profile.use_expand))
+ "^(?:[+-])?(%s)_(.*)$"
+ % "|".join(x.lower() for x in self.profile.use_expand)
+ )
def _split_use_expand_flags(self, use_stream):
stream = ((self.use_expand_re.match(x), x) for x in use_stream)
flags, ue_flags = predicate_split(bool, stream, itemgetter(0))
- return list(map(itemgetter(1), flags)), [(x[0].groups(), x[1]) for x in ue_flags]
+ return list(map(itemgetter(1), flags)), [
+ (x[0].groups(), x[1]) for x in ue_flags
+ ]
def get_package_use_unconfigured(self, pkg, for_metadata=True):
"""Determine use flags for a given package.
@@ -530,23 +587,28 @@ class domain(config_domain):
Three groups of use flags for the package in the following order:
immutable flags, enabled flags, and disabled flags.
"""
- pre_defaults = [x[1:] for x in pkg.iuse if x[0] == '+']
+ pre_defaults = [x[1:] for x in pkg.iuse if x[0] == "+"]
if pre_defaults:
pre_defaults, ue_flags = self._split_use_expand_flags(pre_defaults)
pre_defaults.extend(
- x[1] for x in ue_flags if x[0][0].upper() not in self.settings)
-
- attr = 'stable_' if self.stable_arch in pkg.keywords \
- and self.unstable_arch not in self.settings['ACCEPT_KEYWORDS'] else ''
- disabled = getattr(self.profile, attr + 'masked_use').pull_data(pkg)
- immutable = getattr(self, attr + 'forced_use').pull_data(pkg)
+ x[1] for x in ue_flags if x[0][0].upper() not in self.settings
+ )
+
+ attr = (
+ "stable_"
+ if self.stable_arch in pkg.keywords
+ and self.unstable_arch not in self.settings["ACCEPT_KEYWORDS"]
+ else ""
+ )
+ disabled = getattr(self.profile, attr + "masked_use").pull_data(pkg)
+ immutable = getattr(self, attr + "forced_use").pull_data(pkg)
# lock the configurable use flags to only what's in IUSE, and what's forced
# from the profiles (things like userland_GNU and arch)
enabled = self.enabled_use.pull_data(pkg, pre_defaults=pre_defaults)
# support globs for USE_EXPAND vars
- use_globs = [u for u in enabled if u.endswith('*')]
+ use_globs = [u for u in enabled if u.endswith("*")]
enabled_use_globs = []
for glob in use_globs:
for u in pkg.iuse_stripped:
@@ -565,7 +627,7 @@ class domain(config_domain):
def get_package_domain(self, pkg):
"""Get domain object with altered settings from matching package.env entries."""
- if getattr(pkg, '_domain', None) is not None:
+ if getattr(pkg, "_domain", None) is not None:
return pkg._domain
files = []
@@ -576,8 +638,12 @@ class domain(config_domain):
pkg_settings = dict(self._settings.orig.items())
for path in files:
PortageConfig.load_make_conf(
- pkg_settings, path, allow_sourcing=True,
- allow_recurse=False, incrementals=True)
+ pkg_settings,
+ path,
+ allow_sourcing=True,
+ allow_recurse=False,
+ incrementals=True,
+ )
# TODO: Improve pkg domain vs main domain proxying, e.g. static
# jitted attrs should always be generated and pulled from the main
@@ -588,7 +654,7 @@ class domain(config_domain):
pkg_domain = copy.copy(self)
pkg_domain._settings = ProtectedDict(pkg_settings)
# reset jitted attrs that can pull updated settings
- for attr in (x for x in dir(self) if x.startswith('_jit_reset_')):
+ for attr in (x for x in dir(self) if x.startswith("_jit_reset_")):
setattr(pkg_domain, attr, None)
# store altered domain on the pkg obj to avoid recreating pkg domain
object.__setattr__(pkg, "_domain", pkg_domain)
@@ -641,26 +707,26 @@ class domain(config_domain):
if repo_config.cache_format is not None:
# default to using md5 cache
- kwargs['cache'] = (md5_cache(path),)
+ kwargs["cache"] = (md5_cache(path),)
repo = ebuild_repo.tree(config, repo_config, **kwargs)
self.source_repos_raw += repo
# inject repo objects into config to dynamically register repo
data = {}
repo_conf_data = {
- 'class': 'pkgcore.ebuild.repo_objs.RepoConfig',
- 'location': path,
+ "class": "pkgcore.ebuild.repo_objs.RepoConfig",
+ "location": path,
}
repo_data = {
- 'inherit': ('ebuild-repo-common',),
- 'repo_config': f'conf:{path}',
+ "inherit": ("ebuild-repo-common",),
+ "repo_config": f"conf:{path}",
}
- data[f'conf:{path}'] = basics.AutoConfigSection(repo_conf_data)
+ data[f"conf:{path}"] = basics.AutoConfigSection(repo_conf_data)
data[path] = basics.AutoConfigSection(repo_data)
config.update(data)
# reset repo-related jit attrs
- for attr in (x for x in dir(self) if x.startswith('_jit_repo_')):
+ for attr in (x for x in dir(self) if x.startswith("_jit_repo_")):
setattr(self, attr, None)
if configure:
@@ -693,13 +759,21 @@ class domain(config_domain):
args.append(getattr(self, x))
except AttributeError as e:
raise Failure(
- f"failed configuring repo {repo!r}: "
- f"configurable missing: {e}") from e
+ f"failed configuring repo {repo!r}: " f"configurable missing: {e}"
+ ) from e
repo = repo.configure(*args)
return repo
- def filter_repo(self, repo, pkg_masks=None, pkg_unmasks=None, pkg_filters=None,
- pkg_accept_keywords=None, pkg_keywords=None, profile=True):
+ def filter_repo(
+ self,
+ repo,
+ pkg_masks=None,
+ pkg_unmasks=None,
+ pkg_filters=None,
+ pkg_accept_keywords=None,
+ pkg_keywords=None,
+ profile=True,
+ ):
"""Filter a configured repo."""
if pkg_masks is None:
pkg_masks = self.pkg_masks
@@ -726,40 +800,42 @@ class domain(config_domain):
filters = generate_filter(masks, unmasks, *pkg_filters)
return filtered.tree(repo, filters, True)
- @klass.jit_attr_named('_jit_reset_tmpdir', uncached_val=None)
+ @klass.jit_attr_named("_jit_reset_tmpdir", uncached_val=None)
def tmpdir(self):
"""Temporary directory for the system.
Uses PORTAGE_TMPDIR setting and falls back to using the system's TMPDIR if unset.
"""
- path = self.settings.get('PORTAGE_TMPDIR', '')
+ path = self.settings.get("PORTAGE_TMPDIR", "")
if not os.path.exists(path):
try:
os.mkdir(path)
except EnvironmentError:
path = tempfile.gettempdir()
- logger.warning(f'nonexistent PORTAGE_TMPDIR path, defaulting to {path!r}')
+ logger.warning(
+ f"nonexistent PORTAGE_TMPDIR path, defaulting to {path!r}"
+ )
return os.path.normpath(path)
@property
def pm_tmpdir(self):
"""Temporary directory for the package manager."""
- return pjoin(self.tmpdir, 'portage')
+ return pjoin(self.tmpdir, "portage")
@property
def repo_configs(self):
"""All defined repo configs."""
- return tuple(r.config for r in self.repos if hasattr(r, 'config'))
+ return tuple(r.config for r in self.repos if hasattr(r, "config"))
@klass.jit_attr
def KV(self):
"""The version of the running kernel."""
- ret, version = spawn_get_output(['uname', '-r'])
+ ret, version = spawn_get_output(["uname", "-r"])
if ret == 0:
return version[0].strip()
- raise ValueError('unknown kernel version')
+ raise ValueError("unknown kernel version")
- @klass.jit_attr_named('_jit_repo_source_repos_raw', uncached_val=None)
+ @klass.jit_attr_named("_jit_repo_source_repos_raw", uncached_val=None)
def source_repos_raw(self):
"""Group of package repos without filtering."""
repos = []
@@ -768,7 +844,8 @@ class domain(config_domain):
repo = r.instantiate()
if not repo.is_supported:
logger.warning(
- f'skipping {r.name!r} repo: unsupported EAPI {str(repo.eapi)!r}')
+ f"skipping {r.name!r} repo: unsupported EAPI {str(repo.eapi)!r}"
+ )
continue
repos.append(repo)
except config_errors.InstantiationError as e:
@@ -776,10 +853,10 @@ class domain(config_domain):
exc = find_user_exception(e)
if exc is None:
exc = e
- logger.warning(f'skipping {r.name!r} repo: {exc}')
+ logger.warning(f"skipping {r.name!r} repo: {exc}")
return RepositoryGroup(repos)
- @klass.jit_attr_named('_jit_repo_installed_repos_raw', uncached_val=None)
+ @klass.jit_attr_named("_jit_repo_installed_repos_raw", uncached_val=None)
def installed_repos_raw(self):
"""Group of installed repos without filtering."""
repos = [r.instantiate() for r in self.__vdb]
@@ -787,13 +864,12 @@ class domain(config_domain):
repos.append(self.profile.provides_repo)
return RepositoryGroup(repos)
- @klass.jit_attr_named('_jit_repo_repos_raw', uncached_val=None)
+ @klass.jit_attr_named("_jit_repo_repos_raw", uncached_val=None)
def repos_raw(self):
"""Group of all repos without filtering."""
- return RepositoryGroup(
- chain(self.source_repos_raw, self.installed_repos_raw))
+ return RepositoryGroup(chain(self.source_repos_raw, self.installed_repos_raw))
- @klass.jit_attr_named('_jit_repo_source_repos', uncached_val=None)
+ @klass.jit_attr_named("_jit_repo_source_repos", uncached_val=None)
def source_repos(self):
"""Group of configured, filtered package repos."""
repos = []
@@ -801,10 +877,10 @@ class domain(config_domain):
try:
repos.append(self._wrap_repo(repo, filtered=True))
except repo_errors.RepoError as e:
- logger.warning(f'skipping {repo.repo_id!r} repo: {e}')
+ logger.warning(f"skipping {repo.repo_id!r} repo: {e}")
return RepositoryGroup(repos)
- @klass.jit_attr_named('_jit_repo_installed_repos', uncached_val=None)
+ @klass.jit_attr_named("_jit_repo_installed_repos", uncached_val=None)
def installed_repos(self):
"""Group of configured, installed package repos."""
repos = []
@@ -812,63 +888,73 @@ class domain(config_domain):
try:
repos.append(self._wrap_repo(repo, filtered=False))
except repo_errors.RepoError as e:
- logger.warning(f'skipping {repo.repo_id!r} repo: {e}')
+ logger.warning(f"skipping {repo.repo_id!r} repo: {e}")
return RepositoryGroup(repos)
- @klass.jit_attr_named('_jit_repo_unfiltered_repos', uncached_val=None)
+ @klass.jit_attr_named("_jit_repo_unfiltered_repos", uncached_val=None)
def unfiltered_repos(self):
"""Group of all configured repos without filtering."""
repos = chain(self.source_repos, self.installed_repos)
return RepositoryGroup(
- (r.raw_repo if r.raw_repo is not None else r) for r in repos)
+ (r.raw_repo if r.raw_repo is not None else r) for r in repos
+ )
- @klass.jit_attr_named('_jit_repo_repos', uncached_val=None)
+ @klass.jit_attr_named("_jit_repo_repos", uncached_val=None)
def repos(self):
"""Group of all repos."""
- return RepositoryGroup(
- chain(self.source_repos, self.installed_repos))
+ return RepositoryGroup(chain(self.source_repos, self.installed_repos))
- @klass.jit_attr_named('_jit_repo_ebuild_repos', uncached_val=None)
+ @klass.jit_attr_named("_jit_repo_ebuild_repos", uncached_val=None)
def ebuild_repos(self):
"""Group of all ebuild repos bound with configuration data."""
return RepositoryGroup(
- x for x in self.source_repos
- if isinstance(x.raw_repo, ebuild_repo.ConfiguredTree))
+ x
+ for x in self.source_repos
+ if isinstance(x.raw_repo, ebuild_repo.ConfiguredTree)
+ )
- @klass.jit_attr_named('_jit_repo_ebuild_repos_unfiltered', uncached_val=None)
+ @klass.jit_attr_named("_jit_repo_ebuild_repos_unfiltered", uncached_val=None)
def ebuild_repos_unfiltered(self):
"""Group of all ebuild repos without package filtering."""
return RepositoryGroup(
- x for x in self.unfiltered_repos
- if isinstance(x, ebuild_repo.ConfiguredTree))
+ x
+ for x in self.unfiltered_repos
+ if isinstance(x, ebuild_repo.ConfiguredTree)
+ )
- @klass.jit_attr_named('_jit_repo_ebuild_repos_raw', uncached_val=None)
+ @klass.jit_attr_named("_jit_repo_ebuild_repos_raw", uncached_val=None)
def ebuild_repos_raw(self):
"""Group of all ebuild repos without filtering."""
return RepositoryGroup(
- x for x in self.source_repos_raw
- if isinstance(x, ebuild_repo.UnconfiguredTree))
+ x
+ for x in self.source_repos_raw
+ if isinstance(x, ebuild_repo.UnconfiguredTree)
+ )
- @klass.jit_attr_named('_jit_repo_binary_repos', uncached_val=None)
+ @klass.jit_attr_named("_jit_repo_binary_repos", uncached_val=None)
def binary_repos(self):
"""Group of all binary repos bound with configuration data."""
return RepositoryGroup(
- x for x in self.source_repos
- if isinstance(x.raw_repo, binary_repo.ConfiguredTree))
+ x
+ for x in self.source_repos
+ if isinstance(x.raw_repo, binary_repo.ConfiguredTree)
+ )
- @klass.jit_attr_named('_jit_repo_binary_repos_unfiltered', uncached_val=None)
+ @klass.jit_attr_named("_jit_repo_binary_repos_unfiltered", uncached_val=None)
def binary_repos_unfiltered(self):
"""Group of all binary repos without package filtering."""
return RepositoryGroup(
- x for x in self.unfiltered_repos
- if isinstance(x, binary_repo.ConfiguredTree))
+ x
+ for x in self.unfiltered_repos
+ if isinstance(x, binary_repo.ConfiguredTree)
+ )
- @klass.jit_attr_named('_jit_repo_binary_repos_raw', uncached_val=None)
+ @klass.jit_attr_named("_jit_repo_binary_repos_raw", uncached_val=None)
def binary_repos_raw(self):
"""Group of all binary repos without filtering."""
return RepositoryGroup(
- x for x in self.source_repos_raw
- if isinstance(x, binary_repo.tree))
+ x for x in self.source_repos_raw if isinstance(x, binary_repo.tree)
+ )
# multiplexed repos
all_repos = klass.alias_attr("repos.combined")
diff --git a/src/pkgcore/ebuild/eapi.py b/src/pkgcore/ebuild/eapi.py
index 65fb257d8..2ec7557e0 100644
--- a/src/pkgcore/ebuild/eapi.py
+++ b/src/pkgcore/ebuild/eapi.py
@@ -15,148 +15,108 @@ from snakeoil.process.spawn import bash_version
from ..log import logger
from . import atom, const
-demand_compile_regexp(
- '_valid_EAPI_regex', r"^[A-Za-z0-9_][A-Za-z0-9+_.-]*$"
+demand_compile_regexp("_valid_EAPI_regex", r"^[A-Za-z0-9_][A-Za-z0-9+_.-]*$")
+
+eapi_optionals = ImmutableDict(
+ {
+ # Controls whether PROPERTIES and RESTRICT are accumulated across eclasses.
+ "accumulate_properties_restrict": False,
+ # Controls what version of bash compatibility to force; see PMS.
+ "bash_compat": "3.2",
+ # Controls whether -r is allowed for dodoc.
+ "dodoc_allow_recursive": False,
+ # Controls the language awareness of doman; see PMS.
+ "doman_language_detect": False,
+ # Controls whether -i18n option is allowed.
+ "doman_language_override": False,
+ # Controls whether dosym -r option is allowed.
+ "dosym_relative": False,
+ # Controls whether an ebuild_phase function exists for ebuild consumption.
+ "ebuild_phase_func": False,
+ # Controls whether REPLACING vars are exported to ebuilds; see PMS.
+ "exports_replacing": False,
+ # Controls of whether failglob is enabled globally; see PMS.
+ "global_failglob": False,
+ # Controls whether MERGE vars are exported to ebuilds; see PMS.
+ "has_merge_type": False,
+ # Controls whether PORTDIR and ECLASSDIR are exported to ebuilds; see PMS.
+ "has_portdir": True,
+ # Controls whether DESTTREE and INSDESTTREE are exported during src_install; see PMS.
+ "has_desttree": True,
+ # Controls whether ROOT, EROOT, D, and ED end with a trailing slash; see PMS.
+ "trailing_slash": os.sep,
+ # Controls whether SYSROOT, ESYSROOT, and BROOT are defined; see PMS.
+ "has_sysroot": False,
+ # Controls whether package.provided files in profiles are supported; see PMS.
+ "profile_pkg_provided": True,
+ # Controls whether package.mask and other files in profiles can
+ # be directories; see PMS.
+ "has_profile_data_dirs": False,
+ # Controls whether REQUIRED_USE is supported, enforcing constraints on
+ # allowed use configuration states.
+ "has_required_use": False,
+ # Controls whether USE dependency defaults are supported, see PMS.
+ "has_use_dep_defaults": False,
+ # Controls whether ENV_UNSET is supported, see PMS.
+ "has_env_unset": False,
+ # Controls whether AA env var is exported to ebuilds; this is a flattened
+ # listing of each filename in SRC_URI.
+ "has_AA": True,
+ # Controls whether KV (kernel version; see PMS for details) is exported.
+ "has_KV": True,
+ # Controls whether or not pkgcore, or extensions loaded, actually fully
+ # support this EAPI.
+ "is_supported": True,
+ # Controls whether IUSE defaults are supported; see PMS.
+ "iuse_defaults": False,
+ # Controls whether new* style bash functions can take their content input
+ # from stdin, rather than an explicit ondisk file.
+ "new_reads_stdin": False,
+ # Controls whether utilities die on failure; see PMS.
+ "nonfatal": True,
+ # Controls whether die supports a nonfatal option; see PMS.
+ "nonfatal_die": False,
+ # Controls whether this EAPI supports prefix related variables/settings;
+ # prefix awareness basically. See PMS for full details.
+ "prefix_capable": False,
+ # Controls whether profile-defined IUSE injection is supported.
+ "profile_iuse_injection": False,
+ # Controls whether profiles support package.use.stable.* and use.stable.* files.
+ "profile_stable_use": False,
+ # Controls whether has_version/best_version supports --host-root option; see PMS.
+ "query_host_root": False,
+ # Controls whether has_version/best_version supports -b/-d/-r options; see PMS.
+ "query_deps": False,
+ # Controls whether SLOT values can actually be multi-part; see PMS EAPI 5.
+ # This is related to ABI breakage detection.
+ "sub_slotting": False,
+ # Controls whether REQUIRED_USE supports the ?? operator.
+ "required_use_one_of": False,
+ # Controls whether SRC_URI supports the '->' operator for url filename renaming.
+ "src_uri_renames": False,
+ # Controls whether SRC_URI supports fetch+ and mirror+ prefixes.
+ "src_uri_unrestrict": False,
+ # Controls whether or not use dependency atoms are able to control their enforced
+ # value relative to another; standard use deps just enforce either on or off; EAPIs
+ # supporting this allow syntax that can enforce (for example) X to be on if Y is on.
+ # See PMS EAPI 4 for full details.
+ "transitive_use_atoms": False,
+ # Controls whether or DEFINED_PHASES is mandated for this EAPI; if so, then we can
+ # trust the cache definition and skip invoking those phases if they're not defined.
+ # If the EAPI didn't mandate this var, then we can do our inference, but generally will
+ # invoke the phase in the absense of that metadata var since we have no other choice.
+ "trust_defined_phases_cache": False,
+ # Controls whether unpack supports absolute paths; see PMS.
+ "unpack_absolute_paths": False,
+ # Controls whether unpack supports absolute paths; see PMS.
+ "unpack_case_insensitive": False,
+ # Regular expression to filter out valid update files
+ "update_regex": re.compile(r"^([1-4])Q-(\d{4})$"),
+ # Controls whether user patches are supported.
+ "user_patches": False,
+ }
)
-eapi_optionals = ImmutableDict({
- # Controls whether PROPERTIES and RESTRICT are accumulated across eclasses.
- "accumulate_properties_restrict": False,
-
- # Controls what version of bash compatibility to force; see PMS.
- "bash_compat": '3.2',
-
- # Controls whether -r is allowed for dodoc.
- "dodoc_allow_recursive": False,
-
- # Controls the language awareness of doman; see PMS.
- "doman_language_detect": False,
-
- # Controls whether -i18n option is allowed.
- "doman_language_override": False,
-
- # Controls whether dosym -r option is allowed.
- "dosym_relative": False,
-
- # Controls whether an ebuild_phase function exists for ebuild consumption.
- 'ebuild_phase_func': False,
-
- # Controls whether REPLACING vars are exported to ebuilds; see PMS.
- "exports_replacing": False,
-
- # Controls of whether failglob is enabled globally; see PMS.
- "global_failglob": False,
-
- # Controls whether MERGE vars are exported to ebuilds; see PMS.
- "has_merge_type": False,
-
- # Controls whether PORTDIR and ECLASSDIR are exported to ebuilds; see PMS.
- "has_portdir": True,
-
- # Controls whether DESTTREE and INSDESTTREE are exported during src_install; see PMS.
- "has_desttree": True,
-
- # Controls whether ROOT, EROOT, D, and ED end with a trailing slash; see PMS.
- "trailing_slash": os.sep,
-
- # Controls whether SYSROOT, ESYSROOT, and BROOT are defined; see PMS.
- "has_sysroot": False,
-
- # Controls whether package.provided files in profiles are supported; see PMS.
- "profile_pkg_provided": True,
-
- # Controls whether package.mask and other files in profiles can
- # be directories; see PMS.
- "has_profile_data_dirs": False,
-
- # Controls whether REQUIRED_USE is supported, enforcing constraints on
- # allowed use configuration states.
- "has_required_use": False,
-
- # Controls whether USE dependency defaults are supported, see PMS.
- "has_use_dep_defaults": False,
-
- # Controls whether ENV_UNSET is supported, see PMS.
- "has_env_unset": False,
-
- # Controls whether AA env var is exported to ebuilds; this is a flattened
- # listing of each filename in SRC_URI.
- "has_AA": True,
-
- # Controls whether KV (kernel version; see PMS for details) is exported.
- "has_KV": True,
-
- # Controls whether or not pkgcore, or extensions loaded, actually fully
- # support this EAPI.
- 'is_supported': True,
-
- # Controls whether IUSE defaults are supported; see PMS.
- 'iuse_defaults': False,
-
- # Controls whether new* style bash functions can take their content input
- # from stdin, rather than an explicit ondisk file.
- 'new_reads_stdin': False,
-
- # Controls whether utilities die on failure; see PMS.
- 'nonfatal': True,
-
- # Controls whether die supports a nonfatal option; see PMS.
- "nonfatal_die": False,
-
- # Controls whether this EAPI supports prefix related variables/settings;
- # prefix awareness basically. See PMS for full details.
- "prefix_capable": False,
-
- # Controls whether profile-defined IUSE injection is supported.
- "profile_iuse_injection": False,
-
- # Controls whether profiles support package.use.stable.* and use.stable.* files.
- "profile_stable_use": False,
-
- # Controls whether has_version/best_version supports --host-root option; see PMS.
- 'query_host_root': False,
-
- # Controls whether has_version/best_version supports -b/-d/-r options; see PMS.
- 'query_deps': False,
-
- # Controls whether SLOT values can actually be multi-part; see PMS EAPI 5.
- # This is related to ABI breakage detection.
- 'sub_slotting': False,
-
- # Controls whether REQUIRED_USE supports the ?? operator.
- 'required_use_one_of': False,
-
- # Controls whether SRC_URI supports the '->' operator for url filename renaming.
- "src_uri_renames": False,
-
- # Controls whether SRC_URI supports fetch+ and mirror+ prefixes.
- "src_uri_unrestrict": False,
-
- # Controls whether or not use dependency atoms are able to control their enforced
- # value relative to another; standard use deps just enforce either on or off; EAPIs
- # supporting this allow syntax that can enforce (for example) X to be on if Y is on.
- # See PMS EAPI 4 for full details.
- "transitive_use_atoms": False,
-
- # Controls whether or DEFINED_PHASES is mandated for this EAPI; if so, then we can
- # trust the cache definition and skip invoking those phases if they're not defined.
- # If the EAPI didn't mandate this var, then we can do our inference, but generally will
- # invoke the phase in the absense of that metadata var since we have no other choice.
- "trust_defined_phases_cache": False,
-
- # Controls whether unpack supports absolute paths; see PMS.
- "unpack_absolute_paths": False,
-
- # Controls whether unpack supports absolute paths; see PMS.
- "unpack_case_insensitive": False,
-
- # Regular expression to filter out valid update files
- "update_regex": re.compile(r'^([1-4])Q-(\d{4})$'),
-
- # Controls whether user patches are supported.
- "user_patches": False,
-})
-
class _optionals_cls(ImmutableDict):
@@ -168,18 +128,28 @@ class EAPI(metaclass=klass.immutable_instance):
known_eapis = WeakValueDictionary()
unknown_eapis = WeakValueDictionary()
- def __init__(self, magic, parent=None, phases=(), default_phases=(),
- mandatory_keys=(), dep_keys=(), metadata_keys=(),
- eclass_keys=(), tracked_attributes=(), archive_exts=(),
- optionals=None, ebd_env_options=None):
+ def __init__(
+ self,
+ magic,
+ parent=None,
+ phases=(),
+ default_phases=(),
+ mandatory_keys=(),
+ dep_keys=(),
+ metadata_keys=(),
+ eclass_keys=(),
+ tracked_attributes=(),
+ archive_exts=(),
+ optionals=None,
+ ebd_env_options=None,
+ ):
sf = object.__setattr__
sf(self, "_magic", str(magic))
sf(self, "_parent", parent)
sf(self, "phases", ImmutableDict(phases))
- sf(self, "phases_rev", ImmutableDict((v, k) for k, v in
- self.phases.items()))
+ sf(self, "phases_rev", ImmutableDict((v, k) for k, v in self.phases.items()))
# We track the phases that have a default implementation- this is
# primarily due to DEFINED_PHASES cache values not including it.
@@ -187,17 +157,27 @@ class EAPI(metaclass=klass.immutable_instance):
sf(self, "mandatory_keys", frozenset(mandatory_keys))
sf(self, "dep_keys", frozenset(dep_keys))
- sf(self, "metadata_keys", (
- self.mandatory_keys | self.dep_keys | frozenset(metadata_keys)))
+ sf(
+ self,
+ "metadata_keys",
+ (self.mandatory_keys | self.dep_keys | frozenset(metadata_keys)),
+ )
# variables that eclasses have access to (used by pkgcheck eclass inherit checks)
- sf(self, "eclass_keys", self.mandatory_keys | self.dep_keys | frozenset(eclass_keys))
- sf(self, "tracked_attributes", (
- frozenset(tracked_attributes) | frozenset(x.lower() for x in dep_keys)))
+ sf(
+ self,
+ "eclass_keys",
+ self.mandatory_keys | self.dep_keys | frozenset(eclass_keys),
+ )
+ sf(
+ self,
+ "tracked_attributes",
+ (frozenset(tracked_attributes) | frozenset(x.lower() for x in dep_keys)),
+ )
sf(self, "archive_exts", frozenset(archive_exts))
if optionals is None:
optionals = {}
- sf(self, 'options', _optionals_cls(optionals))
+ sf(self, "options", _optionals_cls(optionals))
if ebd_env_options is None:
ebd_env_options = ()
sf(self, "_ebd_env_options", ebd_env_options)
@@ -208,14 +188,18 @@ class EAPI(metaclass=klass.immutable_instance):
pre_existing = cls.known_eapis.get(eapi._magic)
if pre_existing is not None:
raise ValueError(
- f"EAPI '{eapi}' is already known/instantiated- {pre_existing!r}")
+ f"EAPI '{eapi}' is already known/instantiated- {pre_existing!r}"
+ )
- if (getattr(eapi.options, 'bash_compat', False) and
- bash_version() < eapi.options.bash_compat):
+ if (
+ getattr(eapi.options, "bash_compat", False)
+ and bash_version() < eapi.options.bash_compat
+ ):
# hard exit if the system doesn't have an adequate bash installed
raise SystemExit(
f"EAPI '{eapi}' requires >=bash-{eapi.options.bash_compat}, "
- f"system version: {bash_version()}")
+ f"system version: {bash_version()}"
+ )
cls.known_eapis[eapi._magic] = eapi
# generate EAPI bash libs when running from git repo
@@ -237,102 +221,131 @@ class EAPI(metaclass=klass.immutable_instance):
"""Internally implemented global EAPI specific functions to skip when exporting."""
# TODO: This is currently duplicated across EAPI objs, but
# instead could be cached to a class attr.
- funcs = pjoin(const.EBD_PATH, '.generated', 'funcs', 'global')
+ funcs = pjoin(const.EBD_PATH, ".generated", "funcs", "global")
if not os.path.exists(funcs):
# we're probably running in a cacheless git repo, so generate a cached version
try:
os.makedirs(os.path.dirname(funcs), exist_ok=True)
- with open(funcs, 'w') as f:
+ with open(funcs, "w") as f:
subprocess.run(
- [pjoin(const.EBD_PATH, 'generate_global_func_list')],
- cwd=const.EBD_PATH, stdout=f)
+ [pjoin(const.EBD_PATH, "generate_global_func_list")],
+ cwd=const.EBD_PATH,
+ stdout=f,
+ )
except (IOError, subprocess.CalledProcessError) as e:
raise Exception(
- f"failed to generate list of global EAPI '{self}' specific functions: {str(e)}")
+ f"failed to generate list of global EAPI '{self}' specific functions: {str(e)}"
+ )
- with open(funcs, 'r') as f:
+ with open(funcs, "r") as f:
return frozenset(line.strip() for line in f)
@klass.jit_attr
def bash_funcs(self):
"""Internally implemented EAPI specific functions to skip when exporting."""
- funcs = pjoin(const.EBD_PATH, '.generated', 'funcs', self._magic)
+ funcs = pjoin(const.EBD_PATH, ".generated", "funcs", self._magic)
if not os.path.exists(funcs):
# we're probably running in a cacheless git repo, so generate a cached version
try:
os.makedirs(os.path.dirname(funcs), exist_ok=True)
- with open(funcs, 'w') as f:
+ with open(funcs, "w") as f:
subprocess.run(
- [pjoin(const.EBD_PATH, 'generate_eapi_func_list'), self._magic],
- cwd=const.EBD_PATH, stdout=f)
+ [pjoin(const.EBD_PATH, "generate_eapi_func_list"), self._magic],
+ cwd=const.EBD_PATH,
+ stdout=f,
+ )
except (IOError, subprocess.CalledProcessError) as e:
raise Exception(
- f"failed to generate list of EAPI '{self}' specific functions: {str(e)}")
+ f"failed to generate list of EAPI '{self}' specific functions: {str(e)}"
+ )
- with open(funcs, 'r') as f:
+ with open(funcs, "r") as f:
return frozenset(line.strip() for line in f)
@klass.jit_attr
def bash_cmds_internal(self):
"""EAPI specific commands for this EAPI."""
- cmds = pjoin(const.EBD_PATH, '.generated', 'cmds', self._magic, 'internal')
+ cmds = pjoin(const.EBD_PATH, ".generated", "cmds", self._magic, "internal")
if not os.path.exists(cmds):
# we're probably running in a cacheless git repo, so generate a cached version
try:
os.makedirs(os.path.dirname(cmds), exist_ok=True)
- with open(cmds, 'w') as f:
+ with open(cmds, "w") as f:
subprocess.run(
- [pjoin(const.EBD_PATH, 'generate_eapi_cmd_list'), '-i', self._magic],
- cwd=const.EBD_PATH, stdout=f)
+ [
+ pjoin(const.EBD_PATH, "generate_eapi_cmd_list"),
+ "-i",
+ self._magic,
+ ],
+ cwd=const.EBD_PATH,
+ stdout=f,
+ )
except (IOError, subprocess.CalledProcessError) as e:
raise Exception(
- f'failed to generate list of EAPI {self} internal commands: {str(e)}')
+ f"failed to generate list of EAPI {self} internal commands: {str(e)}"
+ )
- with open(cmds, 'r') as f:
+ with open(cmds, "r") as f:
return frozenset(line.strip() for line in f)
@klass.jit_attr
def bash_cmds_deprecated(self):
"""EAPI specific commands deprecated for this EAPI."""
- cmds = pjoin(const.EBD_PATH, '.generated', 'cmds', self._magic, 'deprecated')
+ cmds = pjoin(const.EBD_PATH, ".generated", "cmds", self._magic, "deprecated")
if not os.path.exists(cmds):
# we're probably running in a cacheless git repo, so generate a cached version
try:
os.makedirs(os.path.dirname(cmds), exist_ok=True)
- with open(cmds, 'w') as f:
+ with open(cmds, "w") as f:
subprocess.run(
- [pjoin(const.EBD_PATH, 'generate_eapi_cmd_list'), '-d', self._magic],
- cwd=const.EBD_PATH, stdout=f)
+ [
+ pjoin(const.EBD_PATH, "generate_eapi_cmd_list"),
+ "-d",
+ self._magic,
+ ],
+ cwd=const.EBD_PATH,
+ stdout=f,
+ )
except (IOError, subprocess.CalledProcessError) as e:
raise Exception(
- f'failed to generate list of EAPI {self} deprecated commands: {str(e)}')
+ f"failed to generate list of EAPI {self} deprecated commands: {str(e)}"
+ )
- with open(cmds, 'r') as f:
+ with open(cmds, "r") as f:
return frozenset(line.strip() for line in f)
@klass.jit_attr
def bash_cmds_banned(self):
"""EAPI specific commands banned for this EAPI."""
- cmds = pjoin(const.EBD_PATH, '.generated', 'cmds', self._magic, 'banned')
+ cmds = pjoin(const.EBD_PATH, ".generated", "cmds", self._magic, "banned")
if not os.path.exists(cmds):
# we're probably running in a cacheless git repo, so generate a cached version
try:
os.makedirs(os.path.dirname(cmds), exist_ok=True)
- with open(cmds, 'w') as f:
+ with open(cmds, "w") as f:
subprocess.run(
- [pjoin(const.EBD_PATH, 'generate_eapi_cmd_list'), '-b', self._magic],
- cwd=const.EBD_PATH, stdout=f)
+ [
+ pjoin(const.EBD_PATH, "generate_eapi_cmd_list"),
+ "-b",
+ self._magic,
+ ],
+ cwd=const.EBD_PATH,
+ stdout=f,
+ )
except (IOError, subprocess.CalledProcessError) as e:
raise Exception(
- f'failed to generate list of EAPI {self} banned commands: {str(e)}')
+ f"failed to generate list of EAPI {self} banned commands: {str(e)}"
+ )
- with open(cmds, 'r') as f:
+ with open(cmds, "r") as f:
return frozenset(line.strip() for line in f)
def bash_libs(self):
"""Generate internally implemented EAPI specific bash libs required by the ebd."""
- eapi_global_lib = pjoin(const.EBD_PATH, '.generated', 'libs', self._magic, 'global')
- script = pjoin(const.EBD_PATH, 'generate_eapi_lib')
+ eapi_global_lib = pjoin(
+ const.EBD_PATH, ".generated", "libs", self._magic, "global"
+ )
+ script = pjoin(const.EBD_PATH, "generate_eapi_lib")
# skip generation when installing as the install process takes care of it
if not os.path.exists(script):
return
@@ -340,46 +353,53 @@ class EAPI(metaclass=klass.immutable_instance):
if not os.path.exists(eapi_global_lib):
try:
os.makedirs(os.path.dirname(eapi_global_lib), exist_ok=True)
- with open(eapi_global_lib, 'w') as f:
+ with open(eapi_global_lib, "w") as f:
subprocess.run(
- [script, '-s', 'global', self._magic],
- cwd=const.EBD_PATH, stdout=f)
+ [script, "-s", "global", self._magic],
+ cwd=const.EBD_PATH,
+ stdout=f,
+ )
except (IOError, subprocess.CalledProcessError) as e:
raise Exception(
- f"failed to generate EAPI '{self}' global lib: {str(e)}")
+ f"failed to generate EAPI '{self}' global lib: {str(e)}"
+ )
for phase in self.phases.values():
- eapi_lib = pjoin(const.EBD_PATH, '.generated', 'libs', self._magic, phase)
+ eapi_lib = pjoin(const.EBD_PATH, ".generated", "libs", self._magic, phase)
if not os.path.exists(eapi_lib):
try:
os.makedirs(os.path.dirname(eapi_lib), exist_ok=True)
- with open(eapi_lib, 'w') as f:
+ with open(eapi_lib, "w") as f:
subprocess.run(
- [script, '-s', phase, self._magic],
- cwd=const.EBD_PATH, stdout=f)
+ [script, "-s", phase, self._magic],
+ cwd=const.EBD_PATH,
+ stdout=f,
+ )
except (IOError, subprocess.CalledProcessError) as e:
- raise Exception(f"failed to generate EAPI '{self}' phase {phase} lib: {str(e)}")
+ raise Exception(
+ f"failed to generate EAPI '{self}' phase {phase} lib: {str(e)}"
+ )
@klass.jit_attr
def archive_exts_regex_pattern(self):
"""Regex pattern for supported archive extensions."""
- pattern = '|'.join(map(re.escape, self.archive_exts))
+ pattern = "|".join(map(re.escape, self.archive_exts))
if self.options.unpack_case_insensitive:
- return f'(?i:({pattern}))'
- return f'({pattern})'
+ return f"(?i:({pattern}))"
+ return f"({pattern})"
@klass.jit_attr
def archive_exts_regex(self):
"""Regex matching strings ending with supported archive extensions."""
- return re.compile(rf'{self.archive_exts_regex_pattern}$')
+ return re.compile(rf"{self.archive_exts_regex_pattern}$")
@klass.jit_attr
def valid_slot_regex(self):
"""Regex matching valid SLOT values."""
- valid_slot = r'[A-Za-z0-9_][A-Za-z0-9+_.-]*'
+ valid_slot = r"[A-Za-z0-9_][A-Za-z0-9+_.-]*"
if self.options.sub_slotting:
- valid_slot += rf'(/{valid_slot})?'
- return re.compile(rf'^{valid_slot}$')
+ valid_slot += rf"(/{valid_slot})?"
+ return re.compile(rf"^{valid_slot}$")
@klass.jit_attr
def atom_kls(self):
@@ -416,19 +436,19 @@ class EAPI(metaclass=klass.immutable_instance):
"""Phase to directory mapping for EAPI specific helpers to add to $PATH."""
paths = defaultdict(list)
for eapi in self.inherits:
- paths['global'].append(pjoin(const.EBUILD_HELPERS_PATH, 'common'))
+ paths["global"].append(pjoin(const.EBUILD_HELPERS_PATH, "common"))
helper_dir = pjoin(const.EBUILD_HELPERS_PATH, eapi._magic)
for dirpath, dirnames, filenames in os.walk(helper_dir):
if not filenames:
continue
if dirpath == helper_dir:
- paths['global'].append(dirpath)
+ paths["global"].append(dirpath)
else:
phase = os.path.basename(dirpath)
if phase in self.phases_rev:
paths[phase].append(dirpath)
else:
- raise ValueError(f'unknown phase: {phase!r}')
+ raise ValueError(f"unknown phase: {phase!r}")
return ImmutableDict((k, tuple(v)) for k, v in paths.items())
@klass.jit_attr
@@ -437,7 +457,7 @@ class EAPI(metaclass=klass.immutable_instance):
d = {}
for k in self._ebd_env_options:
d[f"PKGCORE_{k.upper()}"] = str(getattr(self.options, k)).lower()
- d["PKGCORE_EAPI_INHERITS"] = ' '.join(x._magic for x in self.inherits)
+ d["PKGCORE_EAPI_INHERITS"] = " ".join(x._magic for x in self.inherits)
d["EAPI"] = self._magic
return ImmutableDict(d)
@@ -445,19 +465,19 @@ class EAPI(metaclass=klass.immutable_instance):
def get_eapi(magic, suppress_unsupported=True):
"""Return EAPI object for a given identifier."""
if _valid_EAPI_regex.match(magic) is None:
- eapi_str = f" {magic!r}" if magic else ''
- raise ValueError(f'invalid EAPI{eapi_str}')
+ eapi_str = f" {magic!r}" if magic else ""
+ raise ValueError(f"invalid EAPI{eapi_str}")
eapi = EAPI.known_eapis.get(magic)
if eapi is None and suppress_unsupported:
eapi = EAPI.unknown_eapis.get(magic)
if eapi is None:
- eapi = EAPI(magic=magic, optionals={'is_supported': False})
+ eapi = EAPI(magic=magic, optionals={"is_supported": False})
EAPI.unknown_eapis[eapi._magic] = eapi
return eapi
def _shorten_phase_name(func_name):
- if func_name.startswith(('src_', 'pkg_')):
+ if func_name.startswith(("src_", "pkg_")):
return func_name[4:]
return func_name
@@ -472,58 +492,115 @@ def _combine_dicts(*mappings):
# Note that pkg_setup is forced by default since this is how our env setup occurs.
common_default_phases = tuple(
- _shorten_phase_name(x) for x in
- ("pkg_setup", "src_unpack", "src_compile", "src_test", "pkg_nofetch"))
+ _shorten_phase_name(x)
+ for x in ("pkg_setup", "src_unpack", "src_compile", "src_test", "pkg_nofetch")
+)
common_phases = (
- "pkg_setup", "pkg_config", "pkg_info", "pkg_nofetch",
- "pkg_prerm", "pkg_postrm", "pkg_preinst", "pkg_postinst",
- "src_unpack", "src_compile", "src_test", "src_install",
+ "pkg_setup",
+ "pkg_config",
+ "pkg_info",
+ "pkg_nofetch",
+ "pkg_prerm",
+ "pkg_postrm",
+ "pkg_preinst",
+ "pkg_postinst",
+ "src_unpack",
+ "src_compile",
+ "src_test",
+ "src_install",
)
common_mandatory_metadata_keys = (
- "DESCRIPTION", "HOMEPAGE", "IUSE",
- "KEYWORDS", "LICENSE", "SLOT", "SRC_URI",
+ "DESCRIPTION",
+ "HOMEPAGE",
+ "IUSE",
+ "KEYWORDS",
+ "LICENSE",
+ "SLOT",
+ "SRC_URI",
)
common_dep_keys = (
- "DEPEND", "RDEPEND", "PDEPEND",
+ "DEPEND",
+ "RDEPEND",
+ "PDEPEND",
)
common_metadata_keys = (
- "RESTRICT", "PROPERTIES", "DEFINED_PHASES", "INHERIT", "INHERITED", "EAPI",
+ "RESTRICT",
+ "PROPERTIES",
+ "DEFINED_PHASES",
+ "INHERIT",
+ "INHERITED",
+ "EAPI",
)
common_eclass_keys = ("S", "RESTRICT", "PROPERTIES", "ECONF_SOURCE")
common_tracked_attributes = (
- "cflags", "cbuild", "chost", "ctarget", "cxxflags", "defined_phases",
- "description", "eapi", "distfiles", "fullslot", "homepage", "inherited",
- "iuse", "keywords", "ldflags", "license", "properties",
- "restrict", "source_repository",
+ "cflags",
+ "cbuild",
+ "chost",
+ "ctarget",
+ "cxxflags",
+ "defined_phases",
+ "description",
+ "eapi",
+ "distfiles",
+ "fullslot",
+ "homepage",
+ "inherited",
+ "iuse",
+ "keywords",
+ "ldflags",
+ "license",
+ "properties",
+ "restrict",
+ "source_repository",
)
common_archive_exts = (
".tar",
- ".tar.gz", ".tgz", ".tar.Z", ".tar.z",
- ".tar.bz2", ".tbz2", ".tbz",
- ".zip", ".ZIP", ".jar",
- ".gz", ".Z", ".z",
+ ".tar.gz",
+ ".tgz",
+ ".tar.Z",
+ ".tar.z",
+ ".tar.bz2",
+ ".tbz2",
+ ".tbz",
+ ".zip",
+ ".ZIP",
+ ".jar",
+ ".gz",
+ ".Z",
+ ".z",
".bz2",
- ".rar", ".RAR",
- ".lha", ".LHa", ".LHA", ".lzh",
- ".a", ".deb",
+ ".rar",
+ ".RAR",
+ ".lha",
+ ".LHa",
+ ".LHA",
+ ".lzh",
+ ".a",
+ ".deb",
".tar.lzma",
".lzma",
- ".7z", ".7Z",
+ ".7z",
+ ".7Z",
)
# Boolean variables exported to the bash side, e.g. ebuild_phase_func is
# exported as PKGCORE_EBUILD_PHASE_FUNC.
common_env_optionals = (
- "bash_compat", "ebuild_phase_func", "global_failglob",
- "new_reads_stdin", "nonfatal", "nonfatal_die",
- "has_desttree", "accumulate_properties_restrict",
+ "bash_compat",
+ "ebuild_phase_func",
+ "global_failglob",
+ "new_reads_stdin",
+ "nonfatal",
+ "nonfatal_die",
+ "has_desttree",
+ "accumulate_properties_restrict",
)
eapi0 = EAPI.register(
@@ -552,9 +629,12 @@ eapi1 = EAPI.register(
eclass_keys=eapi0.eclass_keys,
tracked_attributes=eapi0.tracked_attributes,
archive_exts=eapi0.archive_exts,
- optionals=_combine_dicts(eapi0.options, dict(
- iuse_defaults=True,
- )),
+ optionals=_combine_dicts(
+ eapi0.options,
+ dict(
+ iuse_defaults=True,
+ ),
+ ),
ebd_env_options=eapi0._ebd_env_options,
)
@@ -562,20 +642,25 @@ eapi2 = EAPI.register(
magic="2",
parent=eapi1,
phases=_combine_dicts(
- eapi1.phases, _mk_phase_func_map("src_prepare", "src_configure")),
+ eapi1.phases, _mk_phase_func_map("src_prepare", "src_configure")
+ ),
default_phases=eapi1.default_phases.union(
- list(map(_shorten_phase_name, ["src_prepare", "src_configure"]))),
+ list(map(_shorten_phase_name, ["src_prepare", "src_configure"]))
+ ),
mandatory_keys=eapi1.mandatory_keys,
dep_keys=eapi1.dep_keys,
metadata_keys=eapi1.metadata_keys,
eclass_keys=eapi1.eclass_keys,
tracked_attributes=eapi1.tracked_attributes,
archive_exts=eapi1.archive_exts,
- optionals=_combine_dicts(eapi1.options, dict(
- doman_language_detect=True,
- transitive_use_atoms=True,
- src_uri_renames=True,
- )),
+ optionals=_combine_dicts(
+ eapi1.options,
+ dict(
+ doman_language_detect=True,
+ transitive_use_atoms=True,
+ src_uri_renames=True,
+ ),
+ ),
ebd_env_options=eapi1._ebd_env_options,
)
@@ -587,12 +672,15 @@ eapi3 = EAPI.register(
mandatory_keys=eapi2.mandatory_keys,
dep_keys=eapi2.dep_keys,
metadata_keys=eapi2.metadata_keys,
- eclass_keys=eapi2.eclass_keys | frozenset(['EPREFIX', 'ED', 'EROOT']),
+ eclass_keys=eapi2.eclass_keys | frozenset(["EPREFIX", "ED", "EROOT"]),
tracked_attributes=eapi2.tracked_attributes,
archive_exts=eapi2.archive_exts | frozenset([".tar.xz", ".xz"]),
- optionals=_combine_dicts(eapi2.options, dict(
- prefix_capable=True,
- )),
+ optionals=_combine_dicts(
+ eapi2.options,
+ dict(
+ prefix_capable=True,
+ ),
+ ),
ebd_env_options=eapi2._ebd_env_options,
)
@@ -600,24 +688,28 @@ eapi4 = EAPI.register(
magic="4",
parent=eapi3,
phases=_combine_dicts(eapi3.phases, _mk_phase_func_map("pkg_pretend")),
- default_phases=eapi3.default_phases.union([_shorten_phase_name('src_install')]),
+ default_phases=eapi3.default_phases.union([_shorten_phase_name("src_install")]),
mandatory_keys=eapi3.mandatory_keys,
dep_keys=eapi3.dep_keys,
metadata_keys=eapi3.metadata_keys | frozenset(["REQUIRED_USE"]),
eclass_keys=eapi3.eclass_keys | frozenset(["DOCS", "REQUIRED_USE"]),
tracked_attributes=eapi3.tracked_attributes,
archive_exts=eapi3.archive_exts,
- optionals=_combine_dicts(eapi3.options, dict(
- dodoc_allow_recursive=True,
- doman_language_override=True,
- nonfatal=False,
- exports_replacing=True,
- has_AA=False, has_KV=False,
- has_merge_type=True,
- has_required_use=True,
- has_use_dep_defaults=True,
- trust_defined_phases_cache=True,
- )),
+ optionals=_combine_dicts(
+ eapi3.options,
+ dict(
+ dodoc_allow_recursive=True,
+ doman_language_override=True,
+ nonfatal=False,
+ exports_replacing=True,
+ has_AA=False,
+ has_KV=False,
+ has_merge_type=True,
+ has_required_use=True,
+ has_use_dep_defaults=True,
+ trust_defined_phases_cache=True,
+ ),
+ ),
ebd_env_options=eapi3._ebd_env_options,
)
@@ -632,15 +724,18 @@ eapi5 = EAPI.register(
eclass_keys=eapi4.eclass_keys,
tracked_attributes=eapi4.tracked_attributes | frozenset(["iuse_effective"]),
archive_exts=eapi4.archive_exts,
- optionals=_combine_dicts(eapi4.options, dict(
- ebuild_phase_func=True,
- profile_iuse_injection=True,
- profile_stable_use=True,
- query_host_root=True,
- new_reads_stdin=True,
- required_use_one_of=True,
- sub_slotting=True,
- )),
+ optionals=_combine_dicts(
+ eapi4.options,
+ dict(
+ ebuild_phase_func=True,
+ profile_iuse_injection=True,
+ profile_stable_use=True,
+ query_host_root=True,
+ new_reads_stdin=True,
+ required_use_one_of=True,
+ sub_slotting=True,
+ ),
+ ),
ebd_env_options=eapi4._ebd_env_options,
)
@@ -655,14 +750,17 @@ eapi6 = EAPI.register(
eclass_keys=eapi5.eclass_keys | frozenset(["HTML_DOCS", "PATCHES"]),
tracked_attributes=eapi5.tracked_attributes | frozenset(["user_patches"]),
archive_exts=eapi5.archive_exts | frozenset([".txz"]),
- optionals=_combine_dicts(eapi5.options, dict(
- global_failglob=True,
- nonfatal_die=True,
- unpack_absolute_paths=True,
- unpack_case_insensitive=True,
- user_patches=True,
- bash_compat='4.2',
- )),
+ optionals=_combine_dicts(
+ eapi5.options,
+ dict(
+ global_failglob=True,
+ nonfatal_die=True,
+ unpack_absolute_paths=True,
+ unpack_case_insensitive=True,
+ user_patches=True,
+ bash_compat="4.2",
+ ),
+ ),
ebd_env_options=eapi5._ebd_env_options,
)
@@ -677,17 +775,20 @@ eapi7 = EAPI.register(
eclass_keys=eapi6.eclass_keys,
tracked_attributes=eapi6.tracked_attributes,
archive_exts=eapi6.archive_exts,
- optionals=_combine_dicts(eapi6.options, dict(
- has_profile_data_dirs=True,
- has_portdir=False,
- has_desttree=False,
- profile_pkg_provided=False,
- query_host_root=False,
- query_deps=True,
- has_sysroot=True,
- has_env_unset=True,
- trailing_slash='',
- )),
+ optionals=_combine_dicts(
+ eapi6.options,
+ dict(
+ has_profile_data_dirs=True,
+ has_portdir=False,
+ has_desttree=False,
+ profile_pkg_provided=False,
+ query_host_root=False,
+ query_deps=True,
+ has_sysroot=True,
+ has_env_unset=True,
+ trailing_slash="",
+ ),
+ ),
ebd_env_options=eapi6._ebd_env_options,
)
@@ -701,17 +802,28 @@ eapi8 = EAPI.register(
metadata_keys=eapi7.metadata_keys,
eclass_keys=eapi7.eclass_keys,
tracked_attributes=eapi7.tracked_attributes,
- archive_exts=eapi7.archive_exts - frozenset([
- ".rar", ".RAR",
- ".lha", ".LHa", ".LHA", ".lzh",
- ".7z", ".7Z",
- ]),
- optionals=_combine_dicts(eapi7.options, dict(
- accumulate_properties_restrict=True,
- bash_compat='5.0',
- dosym_relative=True,
- src_uri_unrestrict=True,
- update_regex=re.compile(r'^[^.]'),
- )),
+ archive_exts=eapi7.archive_exts
+ - frozenset(
+ [
+ ".rar",
+ ".RAR",
+ ".lha",
+ ".LHa",
+ ".LHA",
+ ".lzh",
+ ".7z",
+ ".7Z",
+ ]
+ ),
+ optionals=_combine_dicts(
+ eapi7.options,
+ dict(
+ accumulate_properties_restrict=True,
+ bash_compat="5.0",
+ dosym_relative=True,
+ src_uri_unrestrict=True,
+ update_regex=re.compile(r"^[^.]"),
+ ),
+ ),
ebd_env_options=eapi7._ebd_env_options,
)
diff --git a/src/pkgcore/ebuild/ebd.py b/src/pkgcore/ebuild/ebd.py
index 7978c1f6e..10e26fb91 100644
--- a/src/pkgcore/ebuild/ebd.py
+++ b/src/pkgcore/ebuild/ebd.py
@@ -6,8 +6,14 @@ api, for example per phase methods.
"""
__all__ = (
- "ebd", "setup_mixin", "install_op", "uninstall_op", "replace_op",
- "buildable", "binpkg_localize")
+ "ebd",
+ "setup_mixin",
+ "install_op",
+ "uninstall_op",
+ "replace_op",
+ "buildable",
+ "binpkg_localize",
+)
import errno
import os
@@ -26,7 +32,12 @@ from snakeoil.contexts import chdir
from snakeoil.currying import post_curry, pretty_docs
from snakeoil.fileutils import touch
from snakeoil.osutils import ensure_dirs, listdir_files, normpath, pjoin
-from snakeoil.process.spawn import is_sandbox_capable, is_userpriv_capable, spawn, spawn_bash
+from snakeoil.process.spawn import (
+ is_sandbox_capable,
+ is_userpriv_capable,
+ spawn,
+ spawn_bash,
+)
from snakeoil.sequences import iflatten_instance, iter_stable_unique
from .. import const
@@ -35,14 +46,26 @@ from ..operations import format, observer
from ..os_data import portage_gid, portage_uid, xargs
from ..package.mutated import MutatedPkg
from . import ebd_ipc, ebuild_built, errors
-from .processor import (ProcessorError, chuck_UnhandledCommand, expected_ebuild_env,
- inherit_handler, release_ebuild_processor, request_ebuild_processor)
+from .processor import (
+ ProcessorError,
+ chuck_UnhandledCommand,
+ expected_ebuild_env,
+ inherit_handler,
+ release_ebuild_processor,
+ request_ebuild_processor,
+)
class ebd:
-
- def __init__(self, pkg, initial_env=None, env_data_source=None,
- observer=None, clean=True, tmp_offset=None):
+ def __init__(
+ self,
+ pkg,
+ initial_env=None,
+ env_data_source=None,
+ observer=None,
+ clean=True,
+ tmp_offset=None,
+ ):
"""
:param pkg:
:class:`pkgcore.ebuild.ebuild_src.package`
@@ -81,29 +104,35 @@ class ebd:
use_expand, value = m.groups()
d[use_expand.upper()].append(value)
for k, v in d.items():
- self.env[k] = ' '.join(sorted(v))
+ self.env[k] = " ".join(sorted(v))
self.bashrc = self.env.pop("bashrc", ())
self.features = set(x.lower() for x in self.domain.features)
- self.env["FEATURES"] = ' '.join(sorted(self.features))
+ self.env["FEATURES"] = " ".join(sorted(self.features))
self.set_path_vars(self.env, self.pkg, self.domain)
# internally implemented EAPI specific functions to skip when exporting env
- self.env["PKGCORE_EAPI_FUNCS"] = ' '.join(self.eapi.bash_funcs)
+ self.env["PKGCORE_EAPI_FUNCS"] = " ".join(self.eapi.bash_funcs)
self.env_data_source = env_data_source
- if (env_data_source is not None and
- not isinstance(env_data_source, data_source.base)):
+ if env_data_source is not None and not isinstance(
+ env_data_source, data_source.base
+ ):
raise TypeError(
"env_data_source must be None, or a pkgcore.data_source.base "
- f"derivative: {env_data_source.__class__}: {env_data_source}")
+ f"derivative: {env_data_source.__class__}: {env_data_source}"
+ )
- iuse_effective_regex = f"^({'|'.join(re.escape(x) for x in pkg.iuse_effective)})$"
- self.env["PKGCORE_IUSE_EFFECTIVE"] = iuse_effective_regex.replace("\\.\\*", ".*")
+ iuse_effective_regex = (
+ f"^({'|'.join(re.escape(x) for x in pkg.iuse_effective)})$"
+ )
+ self.env["PKGCORE_IUSE_EFFECTIVE"] = iuse_effective_regex.replace(
+ "\\.\\*", ".*"
+ )
expected_ebuild_env(pkg, self.env, env_source_override=self.env_data_source)
- self.env["PKGCORE_FINALIZED_RESTRICT"] = ' '.join(str(x) for x in pkg.restrict)
+ self.env["PKGCORE_FINALIZED_RESTRICT"] = " ".join(str(x) for x in pkg.restrict)
self.restrict = pkg.restrict
@@ -115,9 +144,13 @@ class ebd:
if "PORT_LOGDIR" in self.env:
self.logging = pjoin(
self.env["PORT_LOGDIR"],
- "%s:%s:%s.log" % (
- pkg.cpvstr, self.__class__.__name__,
- time.strftime("%Y%m%d-%H%M%S", time.localtime())))
+ "%s:%s:%s.log"
+ % (
+ pkg.cpvstr,
+ self.__class__.__name__,
+ time.strftime("%Y%m%d-%H%M%S", time.localtime()),
+ ),
+ )
del self.env["PORT_LOGDIR"]
else:
self.logging = False
@@ -127,12 +160,11 @@ class ebd:
# wipe variables listed in ENV_UNSET for supporting EAPIs
if self.eapi.options.has_env_unset:
- for x in self.env.pop('ENV_UNSET', ()):
+ for x in self.env.pop("ENV_UNSET", ()):
self.env.pop(x, None)
# wipe any remaining internal settings from the exported env
- wipes = [k for k, v in self.env.items()
- if not isinstance(v, str)]
+ wipes = [k for k, v in self.env.items() if not isinstance(v, str)]
for k in wipes:
del self.env[k]
@@ -143,34 +175,32 @@ class ebd:
# various IPC command support
self._ipc_helpers = {
# bash helpers
- 'doins': ebd_ipc.Doins(self),
- 'dodoc': ebd_ipc.Dodoc(self),
- 'dohtml': ebd_ipc.Dohtml(self),
- 'doinfo': ebd_ipc.Doinfo(self),
- 'dodir': ebd_ipc.Dodir(self),
- 'doexe': ebd_ipc.Doexe(self),
- 'dobin': ebd_ipc.Dobin(self),
- 'dosbin': ebd_ipc.Dosbin(self),
- 'dolib': ebd_ipc.Dolib(self),
- 'dolib.so': ebd_ipc.Dolib_so(self),
- 'dolib.a': ebd_ipc.Dolib_a(self),
- 'doman': ebd_ipc.Doman(self),
- 'domo': ebd_ipc.Domo(self),
- 'dosym': ebd_ipc.Dosym(self),
- 'dohard': ebd_ipc.Dohard(self),
- 'keepdir': ebd_ipc.Keepdir(self),
-
+ "doins": ebd_ipc.Doins(self),
+ "dodoc": ebd_ipc.Dodoc(self),
+ "dohtml": ebd_ipc.Dohtml(self),
+ "doinfo": ebd_ipc.Doinfo(self),
+ "dodir": ebd_ipc.Dodir(self),
+ "doexe": ebd_ipc.Doexe(self),
+ "dobin": ebd_ipc.Dobin(self),
+ "dosbin": ebd_ipc.Dosbin(self),
+ "dolib": ebd_ipc.Dolib(self),
+ "dolib.so": ebd_ipc.Dolib_so(self),
+ "dolib.a": ebd_ipc.Dolib_a(self),
+ "doman": ebd_ipc.Doman(self),
+ "domo": ebd_ipc.Domo(self),
+ "dosym": ebd_ipc.Dosym(self),
+ "dohard": ebd_ipc.Dohard(self),
+ "keepdir": ebd_ipc.Keepdir(self),
# bash functions
- 'has_version': ebd_ipc.Has_Version(self),
- 'best_version': ebd_ipc.Best_Version(self),
- 'unpack': ebd_ipc.Unpack(self),
- 'eapply': ebd_ipc.Eapply(self),
- 'eapply_user': ebd_ipc.Eapply_User(self),
- 'docompress': ebd_ipc.Docompress(self),
- 'dostrip': ebd_ipc.Dostrip(self),
-
+ "has_version": ebd_ipc.Has_Version(self),
+ "best_version": ebd_ipc.Best_Version(self),
+ "unpack": ebd_ipc.Unpack(self),
+ "eapply": ebd_ipc.Eapply(self),
+ "eapply_user": ebd_ipc.Eapply_User(self),
+ "docompress": ebd_ipc.Docompress(self),
+ "dostrip": ebd_ipc.Dostrip(self),
# internals
- 'filter_env': ebd_ipc.FilterEnv(self),
+ "filter_env": ebd_ipc.FilterEnv(self),
}
def start(self):
@@ -187,19 +217,20 @@ class ebd:
def set_path_vars(env, pkg, domain):
# XXX: note this is just EAPI 3 and EAPI 7 compatibility; not full prefix, soon..
trailing_slash = pkg.eapi.options.trailing_slash
- env['ROOT'] = domain.root.rstrip(os.sep) + trailing_slash
- env['PKGCORE_PREFIX_SUPPORT'] = 'false'
+ env["ROOT"] = domain.root.rstrip(os.sep) + trailing_slash
+ env["PKGCORE_PREFIX_SUPPORT"] = "false"
if pkg.eapi.options.prefix_capable:
- env['EPREFIX'] = domain.prefix.rstrip(os.sep)
- env['EROOT'] = (
- pjoin(env['ROOT'].rstrip(trailing_slash), env['EPREFIX'])
- + trailing_slash)
- env['PKGCORE_PREFIX_SUPPORT'] = 'true'
+ env["EPREFIX"] = domain.prefix.rstrip(os.sep)
+ env["EROOT"] = (
+ pjoin(env["ROOT"].rstrip(trailing_slash), env["EPREFIX"])
+ + trailing_slash
+ )
+ env["PKGCORE_PREFIX_SUPPORT"] = "true"
if pkg.eapi.options.has_sysroot:
- env['SYSROOT'] = env['ROOT']
- env['ESYSROOT'] = pjoin(env['SYSROOT'], env['EPREFIX'])
- env['BROOT'] = env['EPREFIX']
+ env["SYSROOT"] = env["ROOT"]
+ env["ESYSROOT"] = pjoin(env["SYSROOT"], env["EPREFIX"])
+ env["BROOT"] = env["EPREFIX"]
def _set_op_vars(self, tmp_offset):
# don't fool with this, without fooling with setup.
@@ -208,11 +239,13 @@ class ebd:
self.tmpdir = pjoin(self.tmpdir, tmp_offset.strip(os.sep))
self.builddir = pjoin(self.tmpdir, self.env["CATEGORY"], self.env["PF"])
- for x, y in (("T", "temp"),
- ("WORKDIR", "work"),
- ("D", "image"),
- ("HOME", "homedir"),
- ("PKGCORE_EMPTYDIR", "empty")):
+ for x, y in (
+ ("T", "temp"),
+ ("WORKDIR", "work"),
+ ("D", "image"),
+ ("HOME", "homedir"),
+ ("PKGCORE_EMPTYDIR", "empty"),
+ ):
self.env[x] = normpath(pjoin(self.builddir, y))
self.env["D"] += self.eapi.options.trailing_slash
self.env["PORTAGE_LOGFILE"] = normpath(pjoin(self.env["T"], "build.log"))
@@ -220,12 +253,13 @@ class ebd:
# XXX: Note that this is just EAPI 3 support, not yet prefix
# full awareness.
if self.pkg.eapi.options.prefix_capable:
- self.env["ED"] = normpath(
- pjoin(self.env["D"].rstrip(os.sep), self.env["EPREFIX"])) \
- + self.eapi.options.trailing_slash
+ self.env["ED"] = (
+ normpath(pjoin(self.env["D"].rstrip(os.sep), self.env["EPREFIX"]))
+ + self.eapi.options.trailing_slash
+ )
# temporary install dir correct for all EAPIs
- self.ED = self.env.get('ED', self.env['D'])
+ self.ED = self.env.get("ED", self.env["D"])
def get_env_source(self):
with open(pjoin(self.env["T"], "environment"), "rb") as f:
@@ -234,9 +268,10 @@ class ebd:
def _setup_env_data_source(self):
if not ensure_dirs(self.env["T"], mode=0o770, gid=portage_gid, minimal=True):
raise format.FailedDirectory(
- self.env['T'],
- "%s doesn't fulfill minimum mode %o and gid %i" % (
- self.env['T'], 0o770, portage_gid))
+ self.env["T"],
+ "%s doesn't fulfill minimum mode %o and gid %i"
+ % (self.env["T"], 0o770, portage_gid),
+ )
if self.env_data_source is not None:
fp = pjoin(self.env["T"], "environment")
@@ -252,25 +287,29 @@ class ebd:
self._setup_merge_type(phase, env)
# add phase specific helper paths to PATH if they exist
- ebuild_phase = self.eapi.phases.get(phase, '')
+ ebuild_phase = self.eapi.phases.get(phase, "")
if ebuild_phase in self.eapi.helpers:
- path = chain.from_iterable((
- const.PATH_FORCED_PREPEND,
- self.pkg.eapi.helpers.get('global', ()),
- self.eapi.helpers[ebuild_phase],
- os.environ.get('PATH', '').split(os.pathsep),
- ))
- env['PATH'] = os.pathsep.join(path)
+ path = chain.from_iterable(
+ (
+ const.PATH_FORCED_PREPEND,
+ self.pkg.eapi.helpers.get("global", ()),
+ self.eapi.helpers[ebuild_phase],
+ os.environ.get("PATH", "").split(os.pathsep),
+ )
+ )
+ env["PATH"] = os.pathsep.join(path)
def _setup_merge_type(self, phase, env):
# only allowed in pkg_ phases.
- if (not self.eapi.phases.get(phase, "").startswith("pkg_") and
- not phase == 'setup-binpkg'):
+ if (
+ not self.eapi.phases.get(phase, "").startswith("pkg_")
+ and not phase == "setup-binpkg"
+ ):
return
# note all pkgs have this attribute
- is_source = getattr(self.pkg, '_is_from_source', True)
+ is_source = getattr(self.pkg, "_is_from_source", True)
if self.eapi.options.has_merge_type:
env["MERGE_TYPE"] = (is_source and "source") or "binary"
@@ -281,11 +320,13 @@ class ebd:
env["EMERGE_FROM"] = (is_source and "ebuild") or "binary"
def setup_logging(self):
- if self.logging and not ensure_dirs(os.path.dirname(self.logging),
- mode=0o2770, gid=portage_gid):
+ if self.logging and not ensure_dirs(
+ os.path.dirname(self.logging), mode=0o2770, gid=portage_gid
+ ):
raise format.FailedDirectory(
os.path.dirname(self.logging),
- "PORT_LOGDIR, desired mode 02770 and gid %i" % portage_gid)
+ "PORT_LOGDIR, desired mode 02770 and gid %i" % portage_gid,
+ )
def setup_workdir(self):
# ensure dirs.
@@ -293,13 +334,22 @@ class ebd:
if not ensure_dirs(self.env[k], mode=0o4770, gid=portage_gid, minimal=True):
raise format.FailedDirectory(
self.env[k],
- "%s doesn't fulfill minimum mode %o and gid %i" % (k, 0o770, portage_gid))
+ "%s doesn't fulfill minimum mode %o and gid %i"
+ % (k, 0o770, portage_gid),
+ )
# XXX hack, just 'til pkgcore controls these directories
- if (os.stat(self.env[k]).st_mode & 0o2000):
+ if os.stat(self.env[k]).st_mode & 0o2000:
logger.warning(f"{self.env[k]} ( {k} ) is setgid")
- def _generic_phase(self, phase, userpriv, sandbox, extra_handlers={},
- failure_allowed=False, suppress_bashrc=False):
+ def _generic_phase(
+ self,
+ phase,
+ userpriv,
+ sandbox,
+ extra_handlers={},
+ failure_allowed=False,
+ suppress_bashrc=False,
+ ):
"""
:param phase: phase to execute
:param userpriv: will we drop to
@@ -310,9 +360,9 @@ class ebd:
if phase not in self.pkg.mandatory_phases:
# TODO(ferringb): Note the preinst hack; this will be removed once dyn_pkg_preinst
# is dead in full (currently it has a selinux labelling and suidctl ran from there)
- if phase != 'preinst':
+ if phase != "preinst":
return True
- if 'selinux' not in self.features and 'suidctl' not in self.features:
+ if "selinux" not in self.features and "suidctl" not in self.features:
return True
shutil.rmtree(self.env["PKGCORE_EMPTYDIR"], ignore_errors=True)
@@ -326,9 +376,15 @@ class ebd:
if not suppress_bashrc:
extra_handlers.setdefault("request_bashrcs", self._request_bashrcs)
return run_generic_phase(
- self.pkg, phase, self.env, userpriv, sandbox,
- extra_handlers=extra_handlers, failure_allowed=failure_allowed,
- logging=self.logging)
+ self.pkg,
+ phase,
+ self.env,
+ userpriv,
+ sandbox,
+ extra_handlers=extra_handlers,
+ failure_allowed=failure_allowed,
+ logging=self.logging,
+ )
def _request_bashrcs(self, ebd):
for source in self.domain.get_package_bashrcs(self.pkg):
@@ -338,21 +394,23 @@ class ebd:
raise NotImplementedError
else:
chuck_UnhandledCommand(
- ebd, "bashrc request: unable to process bashrc "
- f"due to source '{source}' due to lacking usable get_*")
+ ebd,
+ "bashrc request: unable to process bashrc "
+ f"due to source '{source}' due to lacking usable get_*",
+ )
if not ebd.expect("next"):
chuck_UnhandledCommand(
- ebd, "bashrc transfer, didn't receive 'next' response. "
- "failure?")
+ ebd, "bashrc transfer, didn't receive 'next' response. " "failure?"
+ )
ebd.write("end_request")
def set_is_replacing(self, *pkgs):
if self.eapi.options.exports_replacing:
- self.env['REPLACING_VERSIONS'] = " ".join(pkg.PVR for pkg in pkgs)
+ self.env["REPLACING_VERSIONS"] = " ".join(pkg.PVR for pkg in pkgs)
def set_is_being_replaced_by(self, pkg=None):
if self.eapi.options.exports_replacing and pkg is not None:
- self.env['REPLACED_BY_VERSION'] = pkg.PVR
+ self.env["REPLACED_BY_VERSION"] = pkg.PVR
def cleanup(self, disable_observer=False, force=False):
if not force:
@@ -379,7 +437,8 @@ class ebd:
raise
except EnvironmentError as e:
raise format.GenericBuildError(
- f"clean: Caught exception while cleansing: {e}") from e
+ f"clean: Caught exception while cleansing: {e}"
+ ) from e
return True
def feat_or_bool(self, name, extra_env=None):
@@ -404,7 +463,7 @@ class ebd:
def __stage_step_callback__(self, stage):
try:
- touch(pjoin(self.builddir, f'.{stage}'))
+ touch(pjoin(self.builddir, f".{stage}"))
except EnvironmentError:
# we really don't care...
pass
@@ -412,7 +471,8 @@ class ebd:
def _reload_state(self):
try:
self.__set_stage_state__(
- [x[1:] for x in listdir_files(self.builddir) if x.startswith(".")])
+ [x[1:] for x in listdir_files(self.builddir) if x.startswith(".")]
+ )
except EnvironmentError as e:
if e.errno not in (errno.ENOTDIR, errno.ENOENT):
raise
@@ -433,14 +493,27 @@ class setup_mixin:
phase_name = setup_phase_override
if self.setup_is_for_src:
- additional_commands["request_inherit"] = partial(inherit_handler, self.eclass_cache)
+ additional_commands["request_inherit"] = partial(
+ inherit_handler, self.eclass_cache
+ )
return self._generic_phase(
- phase_name, False, True, extra_handlers=additional_commands)
-
-
-def run_generic_phase(pkg, phase, env, userpriv, sandbox, fd_pipes=None,
- extra_handlers=None, failure_allowed=False, logging=None, **kwargs):
+ phase_name, False, True, extra_handlers=additional_commands
+ )
+
+
+def run_generic_phase(
+ pkg,
+ phase,
+ env,
+ userpriv,
+ sandbox,
+ fd_pipes=None,
+ extra_handlers=None,
+ failure_allowed=False,
+ logging=None,
+ **kwargs,
+):
"""
:param phase: phase to execute
:param env: environment mapping for the phase
@@ -460,12 +533,14 @@ def run_generic_phase(pkg, phase, env, userpriv, sandbox, fd_pipes=None,
userpriv = userpriv and is_userpriv_capable()
sandbox = sandbox and is_sandbox_capable()
- tmpdir = kwargs.get('tmpdir', env.get('T', None))
+ tmpdir = kwargs.get("tmpdir", env.get("T", None))
if env is None:
env = expected_ebuild_env(pkg)
- ebd = request_ebuild_processor(userpriv=userpriv, sandbox=sandbox, fd_pipes=fd_pipes)
+ ebd = request_ebuild_processor(
+ userpriv=userpriv, sandbox=sandbox, fd_pipes=fd_pipes
+ )
# this is a bit of a hack; used until ebd accepts observers that handle
# the output redirection on its own. Primary relevance is when
# stdout/stderr are pointed at a file; we leave buffering on, just
@@ -473,11 +548,18 @@ def run_generic_phase(pkg, phase, env, userpriv, sandbox, fd_pipes=None,
sys.stdout.flush()
sys.stderr.flush()
try:
- if not ebd.run_phase(phase, env, tmpdir=tmpdir, sandbox=sandbox,
- logging=logging, additional_commands=extra_handlers):
+ if not ebd.run_phase(
+ phase,
+ env,
+ tmpdir=tmpdir,
+ sandbox=sandbox,
+ logging=logging,
+ additional_commands=extra_handlers,
+ ):
if not failure_allowed:
raise format.GenericBuildError(
- phase + ": Failed building (False/0 return from handler)")
+ phase + ": Failed building (False/0 return from handler)"
+ )
logger.warning(f"executing phase {phase}: execution failed, ignoring")
except Exception as e:
if isinstance(e, ebd_ipc.IpcError):
@@ -500,7 +582,8 @@ def run_generic_phase(pkg, phase, env, userpriv, sandbox, fd_pipes=None,
elif isinstance(e, IGNORED_EXCEPTIONS + (format.GenericBuildError,)):
raise
raise format.GenericBuildError(
- f"Executing phase {phase}: Caught exception: {e}") from e
+ f"Executing phase {phase}: Caught exception: {e}"
+ ) from e
release_ebuild_processor(ebd)
return True
@@ -512,17 +595,26 @@ class install_op(ebd, format.install):
def __init__(self, domain, pkg, observer):
format.install.__init__(self, domain, pkg, observer)
ebd.__init__(
- self, pkg, observer=observer, initial_env=self.domain.settings,
- env_data_source=pkg.environment, clean=False)
+ self,
+ pkg,
+ observer=observer,
+ initial_env=self.domain.settings,
+ env_data_source=pkg.environment,
+ clean=False,
+ )
preinst = pretty_docs(
observer.decorate_build_method("preinst")(
- post_curry(ebd._generic_phase, "preinst", False, False)),
- "run the postinst phase")
+ post_curry(ebd._generic_phase, "preinst", False, False)
+ ),
+ "run the postinst phase",
+ )
postinst = pretty_docs(
observer.decorate_build_method("postinst")(
- post_curry(ebd._generic_phase, "postinst", False, False)),
- "run the postinst phase")
+ post_curry(ebd._generic_phase, "postinst", False, False)
+ ),
+ "run the postinst phase",
+ )
def add_triggers(self, domain_op, engine):
self.new_pkg.add_format_triggers(domain_op, self, engine)
@@ -534,20 +626,27 @@ class uninstall_op(ebd, format.uninstall):
def __init__(self, domain, pkg, observer):
format.uninstall.__init__(self, domain, pkg, observer)
ebd.__init__(
- self, pkg, observer=observer, initial_env=self.domain.settings,
- env_data_source=pkg.environment, clean=False,
- tmp_offset="unmerge")
+ self,
+ pkg,
+ observer=observer,
+ initial_env=self.domain.settings,
+ env_data_source=pkg.environment,
+ clean=False,
+ tmp_offset="unmerge",
+ )
prerm = pretty_docs(
observer.decorate_build_method("prerm")(
- post_curry(ebd._generic_phase, "prerm", False, False)),
- "run the prerm phase")
+ post_curry(ebd._generic_phase, "prerm", False, False)
+ ),
+ "run the prerm phase",
+ )
postrm = pretty_docs(
observer.decorate_build_method("postrm")(
- post_curry(
- ebd._generic_phase, "postrm", False, False,
- failure_allowed=True)),
- "run the postrm phase")
+ post_curry(ebd._generic_phase, "postrm", False, False, failure_allowed=True)
+ ),
+ "run the postrm phase",
+ )
def add_triggers(self, domain_op, engine):
self.old_pkg.add_format_triggers(domain_op, self, engine)
@@ -583,7 +682,7 @@ class replace_op(format.replace):
def finalize(self):
ret = self.uninstall_op.finish()
ret2 = self.install_op.finish()
- return (ret and ret2)
+ return ret and ret2
def add_triggers(self, domain_op, engine):
self.uninstall_op.add_triggers(domain_op, engine)
@@ -595,8 +694,16 @@ class buildable(ebd, setup_mixin, format.build):
# XXX this is unclean- should be handing in strictly what is build
# env, rather then dumping domain settings as env.
- def __init__(self, domain, pkg, verified_files, eclass_cache,
- observer=None, force_test=False, **kwargs):
+ def __init__(
+ self,
+ domain,
+ pkg,
+ verified_files,
+ eclass_cache,
+ observer=None,
+ force_test=False,
+ **kwargs,
+ ):
"""
:param pkg: :obj:`pkgcore.ebuild.ebuild_src.package` instance we'll be
building
@@ -613,20 +720,23 @@ class buildable(ebd, setup_mixin, format.build):
self.eclass_cache = eclass_cache
self.run_test = force_test or self.feat_or_bool("test", domain_settings)
- self.allow_failed_test = self.feat_or_bool("test-fail-continue", domain_settings)
+ self.allow_failed_test = self.feat_or_bool(
+ "test-fail-continue", domain_settings
+ )
if "test" in self.restrict:
self.run_test = False
elif not force_test and "test" not in pkg.use:
if self.run_test:
- logger.warning(f"disabling test for {pkg} due to test use flag being disabled")
+ logger.warning(
+ f"disabling test for {pkg} due to test use flag being disabled"
+ )
self.run_test = False
# XXX minor hack
path = self.env["PATH"].split(os.pathsep)
for s, default in (("DISTCC", ".distcc"), ("CCACHE", "ccache")):
- b = (self.feat_or_bool(s, domain_settings) and
- s not in self.restrict)
+ b = self.feat_or_bool(s, domain_settings) and s not in self.restrict
setattr(self, s.lower(), b)
if b:
# looks weird I realize, but
@@ -645,17 +755,17 @@ class buildable(ebd, setup_mixin, format.build):
else:
for y in ("_PATH", "_DIR"):
if s + y in self.env:
- del self.env[s+y]
+ del self.env[s + y]
self.env["PATH"] = os.pathsep.join(path)
# ordering must match appearance order in SRC_URI per PMS
- self.env["A"] = ' '.join(iter_stable_unique(pkg.distfiles))
+ self.env["A"] = " ".join(iter_stable_unique(pkg.distfiles))
if self.eapi.options.has_AA:
pkg = self.pkg
- while hasattr(pkg, '_raw_pkg'):
- pkg = getattr(pkg, '_raw_pkg')
- self.env["AA"] = ' '.join(set(iflatten_instance(pkg.distfiles)))
+ while hasattr(pkg, "_raw_pkg"):
+ pkg = getattr(pkg, "_raw_pkg")
+ self.env["AA"] = " ".join(set(iflatten_instance(pkg.distfiles)))
if self.eapi.options.has_KV:
self.env["KV"] = domain.KV
@@ -670,9 +780,9 @@ class buildable(ebd, setup_mixin, format.build):
if self.setup_is_for_src:
# TODO: PORTAGE_ACTUAL_DISTDIR usage by VCS eclasses needs to be
# dropped, but it's currently required for repo reuse.
- self.env['PORTAGE_ACTUAL_DISTDIR'] = domain.distdir
- self.env['DISTDIR'] = normpath(pjoin(self.builddir, 'distdir'))
- for k in ('PORTAGE_ACTUAL_DISTDIR', 'DISTDIR'):
+ self.env["PORTAGE_ACTUAL_DISTDIR"] = domain.distdir
+ self.env["DISTDIR"] = normpath(pjoin(self.builddir, "distdir"))
+ for k in ("PORTAGE_ACTUAL_DISTDIR", "DISTDIR"):
self.env[k] = os.path.realpath(self.env[k]).rstrip(os.sep) + os.sep
def _setup_distfiles(self):
@@ -688,31 +798,34 @@ class buildable(ebd, setup_mixin, format.build):
if self.verified_files:
try:
if os.path.exists(self.env["DISTDIR"]):
- if (os.path.isdir(self.env["DISTDIR"]) and
- not os.path.islink(self.env["DISTDIR"])):
+ if os.path.isdir(self.env["DISTDIR"]) and not os.path.islink(
+ self.env["DISTDIR"]
+ ):
shutil.rmtree(self.env["DISTDIR"])
else:
os.unlink(self.env["DISTDIR"])
except EnvironmentError as e:
raise format.FailedDirectory(
- self.env["DISTDIR"],
- f"failed removing existing file/dir/link: {e}") from e
+ self.env["DISTDIR"], f"failed removing existing file/dir/link: {e}"
+ ) from e
if not ensure_dirs(self.env["DISTDIR"], mode=0o770, gid=portage_gid):
raise format.FailedDirectory(
- self.env["DISTDIR"],
- "failed creating distdir symlink directory")
+ self.env["DISTDIR"], "failed creating distdir symlink directory"
+ )
try:
for src, dest in [
- (k, pjoin(self.env["DISTDIR"], v.filename))
- for (k, v) in self.verified_files.items()]:
+ (k, pjoin(self.env["DISTDIR"], v.filename))
+ for (k, v) in self.verified_files.items()
+ ]:
os.symlink(src, dest)
except EnvironmentError as e:
raise format.GenericBuildError(
- f"Failed symlinking in distfiles for src {src} -> {dest}: {e}") from e
+ f"Failed symlinking in distfiles for src {src} -> {dest}: {e}"
+ ) from e
@observer.decorate_build_method("setup")
def setup(self):
@@ -723,11 +836,13 @@ class buildable(ebd, setup_mixin, format.build):
"""
if self.distcc:
for p in ("", "/lock", "/state"):
- if not ensure_dirs(pjoin(self.env["DISTCC_DIR"], p),
- mode=0o2775, gid=portage_gid):
+ if not ensure_dirs(
+ pjoin(self.env["DISTCC_DIR"], p), mode=0o2775, gid=portage_gid
+ ):
raise format.FailedDirectory(
pjoin(self.env["DISTCC_DIR"], p),
- "failed creating needed distcc directory")
+ "failed creating needed distcc directory",
+ )
if self.ccache:
# yuck.
st = None
@@ -735,11 +850,12 @@ class buildable(ebd, setup_mixin, format.build):
st = os.stat(self.env["CCACHE_DIR"])
except OSError as e:
st = None
- if not ensure_dirs(self.env["CCACHE_DIR"], mode=0o2775,
- gid=portage_gid):
+ if not ensure_dirs(
+ self.env["CCACHE_DIR"], mode=0o2775, gid=portage_gid
+ ):
raise format.FailedDirectory(
- self.env["CCACHE_DIR"],
- "failed creation of ccache dir") from e
+ self.env["CCACHE_DIR"], "failed creation of ccache dir"
+ ) from e
# XXX this is more then mildly stupid.
st = os.stat(self.env["CCACHE_DIR"])
@@ -754,27 +870,34 @@ class buildable(ebd, setup_mixin, format.build):
os.chmod(self.env["CCACHE_DIR"], 0o2775)
os.chown(self.env["CCACHE_DIR"], -1, portage_gid)
if 0 != spawn(
- ["chgrp", "-R", str(portage_gid), self.env["CCACHE_DIR"]]):
+ ["chgrp", "-R", str(portage_gid), self.env["CCACHE_DIR"]]
+ ):
raise format.FailedDirectory(
self.env["CCACHE_DIR"],
- "failed changing ownership for CCACHE_DIR")
+ "failed changing ownership for CCACHE_DIR",
+ )
if 0 != spawn_bash(
- "find '%s' -type d -print0 | %s --null chmod 02775"
- % (self.env["CCACHE_DIR"], xargs)):
+ "find '%s' -type d -print0 | %s --null chmod 02775"
+ % (self.env["CCACHE_DIR"], xargs)
+ ):
raise format.FailedDirectory(
self.env["CCACHE_DIR"],
- "failed correcting perms for CCACHE_DIR")
+ "failed correcting perms for CCACHE_DIR",
+ )
if 0 != spawn_bash(
- "find '%s' -type f -print0 | %s --null chmod 0775"
- % (self.env["CCACHE_DIR"], xargs)):
+ "find '%s' -type f -print0 | %s --null chmod 0775"
+ % (self.env["CCACHE_DIR"], xargs)
+ ):
raise format.FailedDirectory(
self.env["CCACHE_DIR"],
- "failed correcting perms for CCACHE_DIR")
+ "failed correcting perms for CCACHE_DIR",
+ )
except OSError as e:
raise format.FailedDirectory(
self.env["CCACHE_DIR"],
- "failed ensuring perms/group owner for CCACHE_DIR") from e
+ "failed ensuring perms/group owner for CCACHE_DIR",
+ ) from e
return setup_mixin.setup(self)
@@ -796,11 +919,13 @@ class buildable(ebd, setup_mixin, format.build):
ret = True
if "prepare" in self.eapi.phases:
ret = self._generic_phase("prepare", True, True)
- if (self.eapi.options.user_patches and
- not os.path.exists(pjoin(self.env['T'], '.user_patches_applied'))):
+ if self.eapi.options.user_patches and not os.path.exists(
+ pjoin(self.env["T"], ".user_patches_applied")
+ ):
self.observer.error(
- 'eapply_user (or default) must be called in src_prepare()')
- raise format.GenericBuildError('missing eapply_user call')
+ "eapply_user (or default) must be called in src_prepare()"
+ )
+ raise format.GenericBuildError("missing eapply_user call")
return ret
def nofetch(self):
@@ -820,20 +945,24 @@ class buildable(ebd, setup_mixin, format.build):
os.chown(self.env["WORKDIR"], portage_uid, -1)
except OSError as e:
raise format.GenericBuildError(
- "failed forcing %i uid for WORKDIR: %s" %
- (portage_uid, e)) from e
+ "failed forcing %i uid for WORKDIR: %s" % (portage_uid, e)
+ ) from e
return self._generic_phase("unpack", True, True)
compile = pretty_docs(
observer.decorate_build_method("compile")(
- post_curry(ebd._generic_phase, "compile", True, True)),
- "Run the compile phase (maps to src_compile).")
+ post_curry(ebd._generic_phase, "compile", True, True)
+ ),
+ "Run the compile phase (maps to src_compile).",
+ )
@observer.decorate_build_method("install")
def install(self):
"""Run the install phase (maps to src_install)."""
# TODO: replace print() usage with observer
- print(f">>> Install {self.env['PF']} into {self.ED!r} category {self.env['CATEGORY']}")
+ print(
+ f">>> Install {self.env['PF']} into {self.ED!r} category {self.env['CATEGORY']}"
+ )
ret = self._generic_phase("install", False, True)
print(f">>> Completed installing {self.env['PF']} into {self.ED!r}")
return ret
@@ -844,7 +973,8 @@ class buildable(ebd, setup_mixin, format.build):
if not self.run_test:
return True
return self._generic_phase(
- "test", True, True, failure_allowed=self.allow_failed_test)
+ "test", True, True, failure_allowed=self.allow_failed_test
+ )
def finalize(self):
"""Finalize the operation.
@@ -857,7 +987,8 @@ class buildable(ebd, setup_mixin, format.build):
"""
factory = ebuild_built.fake_package_factory(self._built_class)
return factory.new_package(
- self.pkg, self.env["D"], pjoin(self.env["T"], "environment"))
+ self.pkg, self.env["D"], pjoin(self.env["T"], "environment")
+ )
class binpkg_localize(ebd, setup_mixin, format.build):
@@ -867,7 +998,9 @@ class binpkg_localize(ebd, setup_mixin, format.build):
def __init__(self, domain, pkg, **kwargs):
self._built_class = ebuild_built.package
- format.build.__init__(self, domain, pkg, {}, observer=kwargs.get("observer", None))
+ format.build.__init__(
+ self, domain, pkg, {}, observer=kwargs.get("observer", None)
+ )
ebd.__init__(self, pkg, **kwargs)
if self.eapi.options.has_merge_type:
self.env["MERGE_TYPE"] = "binpkg"
@@ -882,11 +1015,14 @@ class ebuild_operations:
def _register_check(checks):
"""Decorator to register sanity checks that will be run."""
+
def _wrap_func(func):
def wrapped(*args, **kwargs):
return func(*args, **kwargs)
+
checks.append(func)
return wrapped
+
return _wrap_func
def _cmd_implementation_sanity_check(self, domain):
@@ -905,22 +1041,24 @@ class ebuild_operations:
against a known set of enabled USE flags and is in collapsed form.
"""
if pkg.eapi.options.has_required_use:
- if failures := tuple(node for node in pkg.required_use if not node.match(pkg.use)):
+ if failures := tuple(
+ node for node in pkg.required_use if not node.match(pkg.use)
+ ):
return errors.RequiredUseError(pkg, failures)
@_register_check(_checks)
def _check_pkg_pretend(self, pkg, *, domain, **kwargs):
"""Run pkg_pretend phase."""
# pkg_pretend is not defined or required
- if 'pretend' not in pkg.mandatory_phases:
+ if "pretend" not in pkg.mandatory_phases:
return
commands = None
if not pkg.built:
commands = {
- 'request_inherit': partial(inherit_handler, self._eclass_cache),
- 'has_version': ebd_ipc.Has_Version(self),
- 'best_version': ebd_ipc.Best_Version(self),
+ "request_inherit": partial(inherit_handler, self._eclass_cache),
+ "has_version": ebd_ipc.Has_Version(self),
+ "best_version": ebd_ipc.Best_Version(self),
}
# Use base build tempdir for $T instead of full pkg specific path to
@@ -930,62 +1068,76 @@ class ebuild_operations:
self.env["T"] = domain.pm_tmpdir
ebd.set_path_vars(self.env, pkg, domain)
# avoid clipping eend() messages
- self.env["PKGCORE_RC_PREFIX"] = '2'
+ self.env["PKGCORE_RC_PREFIX"] = "2"
with TemporaryFile() as f:
# suppress bash output by default
fd_pipes = {1: f.fileno(), 2: f.fileno()}
try:
run_generic_phase(
- pkg, "pretend", self.env, tmpdir=None, fd_pipes=fd_pipes,
- userpriv=True, sandbox=True, extra_handlers=commands)
+ pkg,
+ "pretend",
+ self.env,
+ tmpdir=None,
+ fd_pipes=fd_pipes,
+ userpriv=True,
+ sandbox=True,
+ extra_handlers=commands,
+ )
except ProcessorError as e:
f.seek(0)
- output = f.read().decode().strip('\n')
+ output = f.read().decode().strip("\n")
return errors.PkgPretendError(pkg, output, e)
class src_operations(ebuild_operations, format.build_operations):
-
def __init__(self, domain, pkg, eclass_cache, observer=None):
format.build_operations.__init__(self, domain, pkg, observer=observer)
self._eclass_cache = eclass_cache
- def _cmd_implementation_build(self, observer, verified_files,
- clean=False, force_test=False):
+ def _cmd_implementation_build(
+ self, observer, verified_files, clean=False, force_test=False
+ ):
return buildable(
- self.domain, self.pkg, verified_files,
- self._eclass_cache, observer=observer,
- clean=clean, force_test=force_test)
+ self.domain,
+ self.pkg,
+ verified_files,
+ self._eclass_cache,
+ observer=observer,
+ clean=clean,
+ force_test=force_test,
+ )
class misc_operations(ebd):
-
def __init__(self, domain, *args, **kwds):
self.domain = domain
super().__init__(*args, **kwds)
def configure(self, observer=None):
- return self._generic_phase('config', False, True)
+ return self._generic_phase("config", False, True)
def info(self, observer=None):
- return self._generic_phase('info', True, True)
+ return self._generic_phase("info", True, True)
class built_operations(ebuild_operations, format.operations):
-
def __init__(self, domain, pkg, observer=None, initial_env=None):
format.operations.__init__(self, domain, pkg, observer=observer)
self._initial_env = initial_env
self._localized_ebd = None
def _cmd_implementation_localize(self, observer, force=False):
- if not force and getattr(self.pkg, '_is_from_source', False):
+ if not force and getattr(self.pkg, "_is_from_source", False):
return self.pkg
self._localized_ebd = op = binpkg_localize(
- self.domain, self.pkg, clean=False,
- initial_env=self._initial_env, env_data_source=self.pkg.environment,
- observer=observer)
+ self.domain,
+ self.pkg,
+ clean=False,
+ initial_env=self._initial_env,
+ env_data_source=self.pkg.environment,
+ observer=observer,
+ )
return op.finalize()
def _cmd_implementation_cleanup(self, observer, force=False):
@@ -995,13 +1147,14 @@ class built_operations(ebuild_operations, format.operations):
def _cmd_check_support_configure(self):
pkg = self.pkg
- if 'config' not in pkg.mandatory_phases:
+ if "config" not in pkg.mandatory_phases:
return False
return True
def _cmd_implementation_configure(self, observer):
misc = misc_operations(
- self.domain, self.pkg, env_data_source=self.pkg.environment, clean=True)
+ self.domain, self.pkg, env_data_source=self.pkg.environment, clean=True
+ )
try:
misc.start()
misc.configure()
diff --git a/src/pkgcore/ebuild/ebd_ipc.py b/src/pkgcore/ebuild/ebd_ipc.py
index 403bb352a..84842546a 100644
--- a/src/pkgcore/ebuild/ebd_ipc.py
+++ b/src/pkgcore/ebuild/ebd_ipc.py
@@ -28,7 +28,7 @@ from .misc import get_relative_dosym_target
class IpcError(PkgcoreException):
"""Generic IPC errors."""
- def __init__(self, msg='', code=1, name=None, **kwargs):
+ def __init__(self, msg="", code=1, name=None, **kwargs):
super().__init__(msg, **kwargs)
self.msg = msg
self.code = code
@@ -37,7 +37,7 @@ class IpcError(PkgcoreException):
def __str__(self):
if self.name:
- return f'{self.name}: {self.msg}'
+ return f"{self.name}: {self.msg}"
return self.msg
@@ -101,12 +101,12 @@ class IpcCommand:
ret = 0
# read info from bash side
- nonfatal = self.read() == 'true'
+ nonfatal = self.read() == "true"
self.cwd = self.read()
self.phase = self.read()
options = shlex.split(self.read())
- args = self.read().strip('\0')
- args = args.split('\0') if args else []
+ args = self.read().strip("\0")
+ args = args.split("\0") if args else []
# parse args and run command
with chdir(self.cwd):
@@ -121,7 +121,7 @@ class IpcCommand:
except KeyboardInterrupt:
raise
except Exception as e:
- raise IpcInternalError('internal failure') from e
+ raise IpcInternalError("internal failure") from e
# return completion status to the bash side
self.write(self._encode_ret(ret))
@@ -133,10 +133,10 @@ class IpcCommand:
return 0
elif isinstance(ret, tuple):
code, response = ret
- return f'{code}\x07{response}'
+ return f"{code}\x07{response}"
elif isinstance(ret, (int, str)):
- return f'0\x07{ret}'
- raise TypeError(f'unsupported return status type: {type(ret)}')
+ return f"0\x07{ret}"
+ raise TypeError(f"unsupported return status type: {type(ret)}")
def parse_args(self, options, args):
"""Parse internal args passed from the bash side."""
@@ -176,7 +176,7 @@ class IpcCommand:
Args:
msg: message to be output
"""
- self.observer.warn(f'{self.name}: {msg}')
+ self.observer.warn(f"{self.name}: {msg}")
self.observer.flush()
@@ -211,7 +211,7 @@ def command_options(s):
def existing_path(path):
"""Check if a given path exists (allows broken symlinks)."""
if not os.path.lexists(path):
- raise argparse.ArgumentTypeError(f'nonexistent path: {path!r}')
+ raise argparse.ArgumentTypeError(f"nonexistent path: {path!r}")
return path
@@ -219,29 +219,29 @@ class _InstallWrapper(IpcCommand):
"""Python wrapper for commands using `install`."""
parser = IpcArgumentParser()
- parser.add_argument('--dest', default='/')
- parser.add_argument('--insoptions', type=command_options)
- parser.add_argument('--diroptions', type=command_options)
+ parser.add_argument("--dest", default="/")
+ parser.add_argument("--insoptions", type=command_options)
+ parser.add_argument("--diroptions", type=command_options)
# defaults options for file and dir install actions
- insoptions_default = ''
- diroptions_default = ''
+ insoptions_default = ""
+ diroptions_default = ""
# supported install command options
install_parser = IpcArgumentParser()
- install_parser.add_argument('-g', '--group', default=-1, type=_parse_group)
- install_parser.add_argument('-o', '--owner', default=-1, type=_parse_user)
- install_parser.add_argument('-m', '--mode', default=0o755, type=_parse_mode)
- install_parser.add_argument('-p', '--preserve-timestamps', action='store_true')
+ install_parser.add_argument("-g", "--group", default=-1, type=_parse_group)
+ install_parser.add_argument("-o", "--owner", default=-1, type=_parse_user)
+ install_parser.add_argument("-m", "--mode", default=0o755, type=_parse_mode)
+ install_parser.add_argument("-p", "--preserve-timestamps", action="store_true")
arg_parser = IpcArgumentParser()
- arg_parser.add_argument('targets', nargs='+', type=existing_path)
+ arg_parser.add_argument("targets", nargs="+", type=existing_path)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.parser.set_defaults(
- insoptions=self.insoptions_default,
- diroptions=self.diroptions_default)
+ insoptions=self.insoptions_default, diroptions=self.diroptions_default
+ )
# initialize file/dir creation coroutines
self.install = self._install().send
@@ -275,7 +275,9 @@ class _InstallWrapper(IpcCommand):
True when all options are handled,
otherwise False if unknown/unhandled options exist.
"""
- opts, unknown = self.install_parser.parse_known_args(options, namespace=namespace)
+ opts, unknown = self.install_parser.parse_known_args(
+ options, namespace=namespace
+ )
if unknown or opts.mode is None:
msg = "falling back to 'install'"
if unknown:
@@ -289,8 +291,7 @@ class _InstallWrapper(IpcCommand):
dest_dir = pjoin(self.op.ED, self.opts.dest.lstrip(os.path.sep))
os.makedirs(dest_dir, exist_ok=True)
except OSError as e:
- raise IpcCommandError(
- f'failed creating dir: {dest_dir!r}: {e.strerror}')
+ raise IpcCommandError(f"failed creating dir: {dest_dir!r}: {e.strerror}")
self._install_targets(args.targets)
def _prefix_targets(self, targets, files=True):
@@ -299,10 +300,9 @@ class _InstallWrapper(IpcCommand):
if files:
return (
(s, pjoin(self.op.ED, dest_dir, d.lstrip(os.path.sep)))
- for s, d in targets)
- return (
- pjoin(self.op.ED, dest_dir, d.lstrip(os.path.sep))
- for d in targets)
+ for s, d in targets
+ )
+ return (pjoin(self.op.ED, dest_dir, d.lstrip(os.path.sep)) for d in targets)
def _install_targets(self, targets):
"""Install targets.
@@ -320,11 +320,13 @@ class _InstallWrapper(IpcCommand):
iterable of directories to install from
"""
while True:
- dirs = (yield)
+ dirs = yield
for d in dirs:
base_dir = os.path.basename(d.rstrip(os.path.sep))
for dirpath, dirnames, filenames in os.walk(d):
- dest_dir = os.path.normpath(pjoin(base_dir, os.path.relpath(dirpath, d)))
+ dest_dir = os.path.normpath(
+ pjoin(base_dir, os.path.relpath(dirpath, d))
+ )
self.install_dirs([dest_dir])
for dirname in dirnames:
source = pjoin(dirpath, dirname)
@@ -333,8 +335,7 @@ class _InstallWrapper(IpcCommand):
self.install_symlinks([(source, dest)])
if filenames:
self.install(
- (pjoin(dirpath, f), pjoin(dest_dir, f))
- for f in filenames
+ (pjoin(dirpath, f), pjoin(dest_dir, f)) for f in filenames
)
@staticmethod
@@ -351,7 +352,8 @@ class _InstallWrapper(IpcCommand):
os.chmod(path, opts.mode)
except OSError as e:
raise IpcCommandError(
- f'failed setting file attributes: {path!r}: {e.strerror}')
+ f"failed setting file attributes: {path!r}: {e.strerror}"
+ )
@staticmethod
def _set_timestamps(source_stat, dest):
@@ -386,7 +388,7 @@ class _InstallWrapper(IpcCommand):
# installing file to a new path
return True
except OSError as e:
- raise IpcCommandError(f'cannot stat {dest!r}: {e.strerror}')
+ raise IpcCommandError(f"cannot stat {dest!r}: {e.strerror}")
# installing symlink
if stat.S_ISLNK(dest_lstat.st_mode):
@@ -397,10 +399,12 @@ class _InstallWrapper(IpcCommand):
return True
# installing hardlink if source and dest are different
- if (dest_lstat.st_nlink > 1 and os.path.realpath(source) != os.path.realpath(dest)):
+ if dest_lstat.st_nlink > 1 and os.path.realpath(source) != os.path.realpath(
+ dest
+ ):
return True
- raise IpcCommandError(f'{source!r} and {dest!r} are identical')
+ raise IpcCommandError(f"{source!r} and {dest!r} are identical")
@coroutine
def _install(self):
@@ -412,13 +416,13 @@ class _InstallWrapper(IpcCommand):
IpcCommandError on failure
"""
while True:
- files = (yield)
+ files = yield
# TODO: skip/warn installing empty files
for source, dest in self._prefix_targets(files):
try:
sstat = os.stat(source)
except OSError as e:
- raise IpcCommandError(f'cannot stat {source!r}: {e.strerror}')
+ raise IpcCommandError(f"cannot stat {source!r}: {e.strerror}")
self._is_install_allowed(source, sstat, dest)
@@ -428,7 +432,9 @@ class _InstallWrapper(IpcCommand):
except FileNotFoundError:
pass
except OSError as e:
- raise IpcCommandError(f'failed removing file: {dest!r}: {e.strerror}')
+ raise IpcCommandError(
+ f"failed removing file: {dest!r}: {e.strerror}"
+ )
try:
shutil.copyfile(source, dest, follow_symlinks=False)
@@ -438,7 +444,8 @@ class _InstallWrapper(IpcCommand):
self._set_timestamps(sstat, dest)
except OSError as e:
raise IpcCommandError(
- f'failed copying file: {source!r} to {dest!r}: {e.strerror}')
+ f"failed copying file: {source!r} to {dest!r}: {e.strerror}"
+ )
@coroutine
def _install_cmd(self):
@@ -450,7 +457,7 @@ class _InstallWrapper(IpcCommand):
IpcCommandError on failure
"""
while True:
- files = (yield)
+ files = yield
# `install` forcibly resolves symlinks so split them out
files, symlinks = partition(files, predicate=lambda x: os.path.islink(x[0]))
@@ -460,10 +467,10 @@ class _InstallWrapper(IpcCommand):
files = sorted(self._prefix_targets(files), key=itemgetter(1))
for dest, files_group in itertools.groupby(files, itemgetter(1)):
sources = list(path for path, _ in files_group)
- command = ['install'] + self.opts.insoptions + sources + [dest]
+ command = ["install"] + self.opts.insoptions + sources + [dest]
ret, output = spawn.spawn_get_output(command, collect_fds=(2,))
if not ret:
- raise IpcCommandError('\n'.join(output), code=ret)
+ raise IpcCommandError("\n".join(output), code=ret)
@coroutine
def _install_dirs(self):
@@ -475,14 +482,14 @@ class _InstallWrapper(IpcCommand):
IpcCommandError on failure
"""
while True:
- dirs = (yield)
+ dirs = yield
try:
for d in self._prefix_targets(dirs, files=False):
os.makedirs(d, exist_ok=True)
if self.diroptions:
self._set_attributes(self.diroptions, d)
except OSError as e:
- raise IpcCommandError(f'failed creating dir: {d!r}: {e.strerror}')
+ raise IpcCommandError(f"failed creating dir: {d!r}: {e.strerror}")
@coroutine
def _install_dirs_cmd(self):
@@ -494,12 +501,12 @@ class _InstallWrapper(IpcCommand):
IpcCommandError on failure
"""
while True:
- dirs = (yield)
+ dirs = yield
dirs = self._prefix_targets(dirs, files=False)
- command = ['install', '-d'] + self.opts.diroptions + list(dirs)
+ command = ["install", "-d"] + self.opts.diroptions + list(dirs)
ret, output = spawn.spawn_get_output(command, collect_fds=(2,))
if not ret:
- raise IpcCommandError('\n'.join(output), code=ret)
+ raise IpcCommandError("\n".join(output), code=ret)
@coroutine
def _install_symlinks(self):
@@ -511,20 +518,21 @@ class _InstallWrapper(IpcCommand):
IpcCommandError on failure
"""
while True:
- symlinks = (yield)
+ symlinks = yield
try:
for symlink, dest in self._prefix_targets(symlinks):
os.symlink(os.readlink(symlink), dest)
except OSError as e:
raise IpcCommandError(
- f'failed creating symlink: {symlink!r} -> {dest!r}: {e.strerror}')
+ f"failed creating symlink: {symlink!r} -> {dest!r}: {e.strerror}"
+ )
class Doins(_InstallWrapper):
"""Python wrapper for doins."""
arg_parser = IpcArgumentParser(parents=(_InstallWrapper.arg_parser,))
- arg_parser.add_argument('-r', dest='recursive', action='store_true')
+ arg_parser.add_argument("-r", dest="recursive", action="store_true")
def _install_targets(self, targets):
files, dirs = partition(targets, predicate=os.path.isdir)
@@ -536,10 +544,10 @@ class Doins(_InstallWrapper):
class Dodoc(_InstallWrapper):
"""Python wrapper for dodoc."""
- insoptions_default = '-m0644'
+ insoptions_default = "-m0644"
arg_parser = IpcArgumentParser(parents=(_InstallWrapper.arg_parser,))
- arg_parser.add_argument('-r', dest='recursive', action='store_true')
+ arg_parser.add_argument("-r", dest="recursive", action="store_true")
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -553,24 +561,24 @@ class Dodoc(_InstallWrapper):
if self.opts.recursive and self.allow_recursive:
self.install_from_dirs(dirs)
else:
- missing_option = ', missing -r option?' if self.allow_recursive else ''
- raise IpcCommandError(f'{dirs[0]!r} is a directory{missing_option}')
+ missing_option = ", missing -r option?" if self.allow_recursive else ""
+ raise IpcCommandError(f"{dirs[0]!r} is a directory{missing_option}")
self.install((f, os.path.basename(f)) for f in files)
class Doinfo(_InstallWrapper):
"""Python wrapper for doinfo."""
- insoptions_default = '-m0644'
+ insoptions_default = "-m0644"
class Dodir(_InstallWrapper):
"""Python wrapper for dodir."""
- diroptions_default = '-m0755'
+ diroptions_default = "-m0755"
arg_parser = IpcArgumentParser()
- arg_parser.add_argument('targets', nargs='+')
+ arg_parser.add_argument("targets", nargs="+")
def run(self, args):
self.install_dirs(args.targets)
@@ -584,10 +592,10 @@ class Keepdir(Dodir):
super().run(args)
# create stub files
- filename = f'.keep_{self.pkg.category}_{self.pkg.PN}-{self.pkg.slot}'
+ filename = f".keep_{self.pkg.category}_{self.pkg.PN}-{self.pkg.slot}"
for x in args.targets:
path = pjoin(self.op.ED, x.lstrip(os.path.sep), filename)
- open(path, 'w').close()
+ open(path, "w").close()
class Doexe(_InstallWrapper):
@@ -600,7 +608,10 @@ class Dobin(_InstallWrapper):
def parse_install_options(self, *args, **kwargs):
# TODO: fix this to be prefix aware at some point
self.opts.insoptions = [
- '-m0755', f'-g{os_data.root_gid}', f'-o{os_data.root_uid}']
+ "-m0755",
+ f"-g{os_data.root_gid}",
+ f"-o{os_data.root_uid}",
+ ]
return super().parse_install_options(*args, **kwargs)
@@ -615,20 +626,20 @@ class Dolib(_InstallWrapper):
class Dolib_so(Dolib):
"""Python wrapper for dolib.so."""
- name = 'dolib.so'
+ name = "dolib.so"
class Dolib_a(Dolib):
"""Python wrapper for dolib.a."""
- name = 'dolib.a'
+ name = "dolib.a"
class _Symlink(_InstallWrapper):
arg_parser = IpcArgumentParser()
- arg_parser.add_argument('source')
- arg_parser.add_argument('target')
+ arg_parser.add_argument("source")
+ arg_parser.add_argument("target")
def run(self, args):
dest_dir = args.target.rsplit(os.path.sep, 1)[0]
@@ -646,7 +657,8 @@ class _Symlink(_InstallWrapper):
self._link(args.source, target)
except OSError as e:
raise IpcCommandError(
- f'failed creating link: {args.source!r} -> {args.target!r}: {e.strerror}')
+ f"failed creating link: {args.source!r} -> {args.target!r}: {e.strerror}"
+ )
class Dosym(_Symlink):
@@ -654,7 +666,7 @@ class Dosym(_Symlink):
_link = os.symlink
arg_parser = IpcArgumentParser(parents=(_Symlink.arg_parser,))
- arg_parser.add_argument('-r', dest='relative', action='store_true')
+ arg_parser.add_argument("-r", dest="relative", action="store_true")
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -662,16 +674,17 @@ class Dosym(_Symlink):
def run(self, args):
target = args.target
- if (target.endswith(os.path.sep) or
- (os.path.isdir(target) and not os.path.islink(target))):
+ if target.endswith(os.path.sep) or (
+ os.path.isdir(target) and not os.path.islink(target)
+ ):
# bug 379899
- raise IpcCommandError(f'missing filename target: {target!r}')
+ raise IpcCommandError(f"missing filename target: {target!r}")
if self.opts.relative:
if not self.dosym_relative:
- raise IpcCommandError(f'-r not permitted in EAPI {self.eapi}')
+ raise IpcCommandError(f"-r not permitted in EAPI {self.eapi}")
if not os.path.isabs(args.source):
- raise IpcCommandError('-r is only meaningful with absolute paths')
+ raise IpcCommandError("-r is only meaningful with absolute paths")
args.source = get_relative_dosym_target(args.source, target)
super().run(args)
@@ -686,13 +699,13 @@ class Dohard(_Symlink):
class Doman(_InstallWrapper):
"""Python wrapper for doman."""
- insoptions_default = '-m0644'
+ insoptions_default = "-m0644"
arg_parser = IpcArgumentParser(parents=(_InstallWrapper.arg_parser,))
- arg_parser.add_argument('-i18n', action='store_true', default='')
+ arg_parser.add_argument("-i18n", action="store_true", default="")
- detect_lang_re = re.compile(r'^(\w+)\.([a-z]{2}([A-Z]{2})?)\.(\w+)$')
- valid_mandir_re = re.compile(r'man[0-9n](f|p|pm)?$')
+ detect_lang_re = re.compile(r"^(\w+)\.([a-z]{2}([A-Z]{2})?)\.(\w+)$")
+ valid_mandir_re = re.compile(r"man[0-9n](f|p|pm)?$")
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -707,17 +720,17 @@ class Doman(_InstallWrapper):
if self.eapi.archive_exts_regex.match(ext):
# TODO: uncompress/warn?
- ext = os.path.splitext(basename.rsplit('.', 1)[0])[1]
+ ext = os.path.splitext(basename.rsplit(".", 1)[0])[1]
name = basename
- mandir = f'man{ext[1:]}'
+ mandir = f"man{ext[1:]}"
if self.language_override and self.opts.i18n:
mandir = pjoin(self.opts.i18n, mandir)
elif self.language_detect:
match = self.detect_lang_re.match(basename)
if match:
- name = f'{match.group(1)}.{match.group(4)}'
+ name = f"{match.group(1)}.{match.group(4)}"
mandir = pjoin(match.group(2), mandir)
if self.valid_mandir_re.match(os.path.basename(mandir)):
@@ -726,40 +739,51 @@ class Doman(_InstallWrapper):
dirs.add(mandir)
self.install([(x, pjoin(mandir, name))])
else:
- raise IpcCommandError(f'invalid man page: {x}')
+ raise IpcCommandError(f"invalid man page: {x}")
class Domo(_InstallWrapper):
"""Python wrapper for domo."""
- insoptions_default = '-m0644'
+ insoptions_default = "-m0644"
def _install_targets(self, targets):
dirs = set()
for x in targets:
- d = pjoin(os.path.splitext(os.path.basename(x))[0], 'LC_MESSAGES')
+ d = pjoin(os.path.splitext(os.path.basename(x))[0], "LC_MESSAGES")
if d not in dirs:
self.install_dirs([d])
dirs.add(d)
- self.install([(x, pjoin(d, f'{self.pkg.PN}.mo'))])
+ self.install([(x, pjoin(d, f"{self.pkg.PN}.mo"))])
class Dohtml(_InstallWrapper):
"""Python wrapper for dohtml."""
- insoptions_default = '-m0644'
+ insoptions_default = "-m0644"
arg_parser = IpcArgumentParser(parents=(_InstallWrapper.arg_parser,))
- arg_parser.add_argument('-r', dest='recursive', action='store_true')
- arg_parser.add_argument('-V', dest='verbose', action='store_true')
- arg_parser.add_argument('-A', dest='extra_allowed_file_exts', action='csv', default=[])
- arg_parser.add_argument('-a', dest='allowed_file_exts', action='csv', default=[])
- arg_parser.add_argument('-f', dest='allowed_files', action='csv', default=[])
- arg_parser.add_argument('-x', dest='excluded_dirs', action='csv', default=[])
- arg_parser.add_argument('-p', dest='doc_prefix', default='')
+ arg_parser.add_argument("-r", dest="recursive", action="store_true")
+ arg_parser.add_argument("-V", dest="verbose", action="store_true")
+ arg_parser.add_argument(
+ "-A", dest="extra_allowed_file_exts", action="csv", default=[]
+ )
+ arg_parser.add_argument("-a", dest="allowed_file_exts", action="csv", default=[])
+ arg_parser.add_argument("-f", dest="allowed_files", action="csv", default=[])
+ arg_parser.add_argument("-x", dest="excluded_dirs", action="csv", default=[])
+ arg_parser.add_argument("-p", dest="doc_prefix", default="")
# default allowed file extensions
- default_allowed_file_exts = ('css', 'gif', 'htm', 'html', 'jpeg', 'jpg', 'js', 'png')
+ default_allowed_file_exts = (
+ "css",
+ "gif",
+ "htm",
+ "html",
+ "jpeg",
+ "jpg",
+ "js",
+ "png",
+ )
def parse_args(self, *args, **kwargs):
args = super().parse_args(*args, **kwargs)
@@ -780,25 +804,26 @@ class Dohtml(_InstallWrapper):
return args
def __str__(self):
- msg = ['dohtml:', f' Installing to: {self.opts.dest}']
+ msg = ["dohtml:", f" Installing to: {self.opts.dest}"]
if self.opts.allowed_file_exts:
msg.append(
- f" Allowed extensions: {', '.join(sorted(self.opts.allowed_file_exts))}")
+ f" Allowed extensions: {', '.join(sorted(self.opts.allowed_file_exts))}"
+ )
if self.opts.excluded_dirs:
msg.append(
- f" Allowed extensions: {', '.join(sorted(self.opts.allowed_file_exts))}")
+ f" Allowed extensions: {', '.join(sorted(self.opts.allowed_file_exts))}"
+ )
if self.opts.allowed_files:
- msg.append(
- f" Allowed files: {', '.join(sorted(self.opts.allowed_files))}")
+ msg.append(f" Allowed files: {', '.join(sorted(self.opts.allowed_files))}")
if self.opts.doc_prefix:
msg.append(f" Document prefix: {self.opts.doc_prefix!r}")
- return '\n'.join(msg)
+ return "\n".join(msg)
def _allowed_file(self, path):
"""Determine if a file is allowed to be installed."""
basename = os.path.basename(path)
ext = os.path.splitext(basename)[1][1:]
- return (ext in self.opts.allowed_file_exts or basename in self.opts.allowed_files)
+ return ext in self.opts.allowed_file_exts or basename in self.opts.allowed_files
def _install_targets(self, targets):
files, dirs = partition(targets, predicate=os.path.isdir)
@@ -809,15 +834,15 @@ class Dohtml(_InstallWrapper):
dirs = (d for d in dirs if d not in self.opts.excluded_dirs)
self.install_from_dirs(dirs)
else:
- raise IpcCommandError(f'{dirs[0]!r} is a directory, missing -r option?')
+ raise IpcCommandError(f"{dirs[0]!r} is a directory, missing -r option?")
self.install((f, os.path.basename(f)) for f in files if self._allowed_file(f))
class _AlterFiles(IpcCommand):
arg_parser = IpcArgumentParser()
- arg_parser.add_argument('-x', dest='excludes', action='store_true')
- arg_parser.add_argument('targets', nargs='+')
+ arg_parser.add_argument("-x", dest="excludes", action="store_true")
+ arg_parser.add_argument("targets", nargs="+")
default_includes = ()
default_excludes = ()
@@ -837,11 +862,11 @@ class _AlterFiles(IpcCommand):
class Docompress(_AlterFiles):
"""Python wrapper for docompress."""
- default_includes = ('/usr/share/doc', '/usr/share/info', '/usr/share/man')
+ default_includes = ("/usr/share/doc", "/usr/share/info", "/usr/share/man")
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.excludes = {f'/usr/share/doc/{self.pkg.PF}/html'}
+ self.excludes = {f"/usr/share/doc/{self.pkg.PF}/html"}
class Dostrip(_AlterFiles):
@@ -849,60 +874,64 @@ class Dostrip(_AlterFiles):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- if 'strip' not in self.pkg.restrict:
- self.includes = {'/'}
+ if "strip" not in self.pkg.restrict:
+ self.includes = {"/"}
class _QueryCmd(IpcCommand):
arg_parser = IpcArgumentParser()
- arg_parser.add_argument('atom', type=atom_mod.atom)
+ arg_parser.add_argument("atom", type=atom_mod.atom)
# >= EAPI 5
host_root_parser = IpcArgumentParser()
- host_root_parser.add_argument('--host-root', action='store_true')
+ host_root_parser.add_argument("--host-root", action="store_true")
# >= EAPI 7
query_deps_parser = IpcArgumentParser()
dep_opts = query_deps_parser.add_mutually_exclusive_group()
- dep_opts.add_argument('-b', dest='bdepend', action='store_true')
- dep_opts.add_argument('-d', dest='depend', action='store_true')
- dep_opts.add_argument('-r', dest='rdepend', action='store_true')
+ dep_opts.add_argument("-b", dest="bdepend", action="store_true")
+ dep_opts.add_argument("-d", dest="depend", action="store_true")
+ dep_opts.add_argument("-r", dest="rdepend", action="store_true")
def parse_args(self, options, args):
# parse EAPI specific optionals then remaining args
if self.eapi.options.query_host_root:
- _, args = self.host_root_parser.parse_known_optionals(args, namespace=self.opts)
+ _, args = self.host_root_parser.parse_known_optionals(
+ args, namespace=self.opts
+ )
elif self.eapi.options.query_deps:
- _, args = self.query_deps_parser.parse_known_optionals(args, namespace=self.opts)
+ _, args = self.query_deps_parser.parse_known_optionals(
+ args, namespace=self.opts
+ )
args = super().parse_args(options, args)
root = None
self.opts.domain = self.op.domain
if self.eapi.options.query_host_root and self.opts.host_root:
- root = '/'
+ root = "/"
elif self.eapi.options.query_deps:
if self.opts.bdepend:
if self.pkg.eapi.options.prefix_capable:
# not using BROOT as that's only defined in src_* phases
- root = pjoin('/', self.op.env['EPREFIX'])
+ root = pjoin("/", self.op.env["EPREFIX"])
else:
- root = '/'
+ root = "/"
elif self.opts.depend:
if self.pkg.eapi.options.prefix_capable:
- root = self.op.env['ESYSROOT']
+ root = self.op.env["ESYSROOT"]
else:
- root = self.op.env['SYSROOT']
+ root = self.op.env["SYSROOT"]
else:
if self.pkg.eapi.options.prefix_capable:
- root = self.op.env['EROOT']
+ root = self.op.env["EROOT"]
else:
- root = self.op.env['ROOT']
+ root = self.op.env["ROOT"]
# TODO: find domain from given path, pointless until full prefix support works
if root and root != self.opts.domain.root:
- raise IpcCommandError('prefix support not implemented yet')
+ raise IpcCommandError("prefix support not implemented yet")
return args
@@ -927,25 +956,29 @@ class Eapply(IpcCommand):
"""Python wrapper for eapply."""
arg_parser = IpcArgumentParser()
- arg_parser.add_argument('targets', nargs='+', type=existing_path)
+ arg_parser.add_argument("targets", nargs="+", type=existing_path)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.patch_cmd = ['patch', '-p1', '-f', '-s', '-g0', '--no-backup-if-mismatch']
+ self.patch_cmd = ["patch", "-p1", "-f", "-s", "-g0", "--no-backup-if-mismatch"]
self.patch_opts = []
def _parse_patch_opts(self, args):
patch_opts = []
files = []
for i, arg in enumerate(args):
- if arg == '--':
+ if arg == "--":
if files:
- raise IpcCommandError('options must be specified before file arguments')
- files = args[i + 1:]
+ raise IpcCommandError(
+ "options must be specified before file arguments"
+ )
+ files = args[i + 1 :]
break
- elif arg.startswith('-'):
+ elif arg.startswith("-"):
if files:
- raise IpcCommandError('options must be specified before file arguments')
+ raise IpcCommandError(
+ "options must be specified before file arguments"
+ )
patch_opts.append(arg)
else:
files.append(arg)
@@ -956,55 +989,60 @@ class Eapply(IpcCommand):
if os.path.isdir(path):
for root, _dirs, files in os.walk(path):
patches = [
- pjoin(root, f) for f in sorted(files, key=locale.strxfrm)
- if f.endswith(('.diff', '.patch'))]
+ pjoin(root, f)
+ for f in sorted(files, key=locale.strxfrm)
+ if f.endswith((".diff", ".patch"))
+ ]
if not patches:
- raise IpcCommandError(f'no patches in directory: {path!r}')
+ raise IpcCommandError(f"no patches in directory: {path!r}")
yield path, patches
else:
yield None, [path]
def parse_args(self, options, args):
args, self.patch_opts = self._parse_patch_opts(args)
- args = super().parse_args(options, ['--'] + args)
+ args = super().parse_args(options, ["--"] + args)
return self._find_patches(args.targets)
def run(self, args, user=False):
if user:
- patch_type = 'user patches'
+ patch_type = "user patches"
output_func = self.observer.warn
else:
- patch_type = 'patches'
+ patch_type = "patches"
output_func = self.observer.info
- spawn_kwargs = {'collect_fds': (1, 2)}
+ spawn_kwargs = {"collect_fds": (1, 2)}
if self.op.userpriv:
- spawn_kwargs['uid'] = os_data.portage_uid
- spawn_kwargs['gid'] = os_data.portage_gid
+ spawn_kwargs["uid"] = os_data.portage_uid
+ spawn_kwargs["gid"] = os_data.portage_gid
for path, patches in args:
- prefix = ''
+ prefix = ""
if path is not None:
- output_func(f'Applying {patch_type} from {path!r}:')
- prefix = ' '
+ output_func(f"Applying {patch_type} from {path!r}:")
+ prefix = " "
for patch in patches:
if path is None:
- output_func(f'{prefix}Applying {os.path.basename(patch)}...')
+ output_func(f"{prefix}Applying {os.path.basename(patch)}...")
else:
- output_func(f'{prefix}{os.path.basename(patch)}...')
+ output_func(f"{prefix}{os.path.basename(patch)}...")
self.observer.flush()
try:
with open(patch) as f:
ret, output = spawn.spawn_get_output(
self.patch_cmd + self.patch_opts,
- fd_pipes={0: f.fileno()}, **spawn_kwargs)
+ fd_pipes={0: f.fileno()},
+ **spawn_kwargs,
+ )
if ret:
filename = os.path.basename(patch)
- msg = f'applying {filename!r} failed: {output[0]}'
+ msg = f"applying {filename!r} failed: {output[0]}"
raise IpcCommandError(msg, code=ret)
except OSError as e:
raise IpcCommandError(
- f'failed reading patch file: {patch!r}: {e.strerror}')
+ f"failed reading patch file: {patch!r}: {e.strerror}"
+ )
class Eapply_User(IpcCommand):
@@ -1015,30 +1053,32 @@ class Eapply_User(IpcCommand):
def run(self, args):
if self.pkg.user_patches:
- self.op._ipc_helpers['eapply'].run(self.pkg.user_patches, user=True)
+ self.op._ipc_helpers["eapply"].run(self.pkg.user_patches, user=True)
# create marker to skip additionals calls
patches = itertools.chain.from_iterable(
- files for _, files in self.pkg.user_patches)
- with open(pjoin(self.op.env['T'], '.user_patches_applied'), 'w') as f:
- f.write('\n'.join(patches))
+ files for _, files in self.pkg.user_patches
+ )
+ with open(pjoin(self.op.env["T"], ".user_patches_applied"), "w") as f:
+ f.write("\n".join(patches))
class Unpack(IpcCommand):
arg_parser = IpcArgumentParser()
- arg_parser.add_argument('targets', nargs='+')
+ arg_parser.add_argument("targets", nargs="+")
_file_mode = (
- stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
- | stat.S_IWUSR
- & ~stat.S_IWGRP & ~stat.S_IWOTH
+ stat.S_IRUSR
+ | stat.S_IRGRP
+ | stat.S_IROTH
+ | stat.S_IWUSR & ~stat.S_IWGRP & ~stat.S_IWOTH
)
_dir_mode = _file_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
def parse_args(self, *args, **kwargs):
args = super().parse_args(*args, **kwargs)
- self.opts.distdir = self.op.env['DISTDIR']
+ self.opts.distdir = self.op.env["DISTDIR"]
return args
def _filter_targets(self, targets):
@@ -1046,38 +1086,42 @@ class Unpack(IpcCommand):
if os.path.sep not in archive:
# regular filename get prefixed with ${DISTDIR}
srcdir = self.opts.distdir
- elif archive.startswith('./'):
+ elif archive.startswith("./"):
# relative paths get passed through
- srcdir = ''
+ srcdir = ""
else:
srcdir = self.opts.distdir
# >= EAPI 6 allows absolute paths
if self.eapi.options.unpack_absolute_paths:
- srcdir = ''
+ srcdir = ""
if archive.startswith(self.opts.distdir):
self.warn(
- f'argument contains redundant ${{DISTDIR}}: {archive!r}')
+ f"argument contains redundant ${{DISTDIR}}: {archive!r}"
+ )
elif archive.startswith(self.opts.distdir):
raise IpcCommandError(
- f'arguments must not begin with ${{DISTDIR}}: {archive!r}')
+ f"arguments must not begin with ${{DISTDIR}}: {archive!r}"
+ )
elif archive.startswith(os.path.sep):
raise IpcCommandError(
- f'arguments must not be absolute paths: {archive!r}')
+ f"arguments must not be absolute paths: {archive!r}"
+ )
else:
raise IpcCommandError(
- 'relative paths must be prefixed with '
- f"'./' in EAPI {self.eapi}")
+ "relative paths must be prefixed with "
+ f"'./' in EAPI {self.eapi}"
+ )
path = pjoin(srcdir, archive)
if not os.path.exists(path):
- raise IpcCommandError(f'nonexistent file: {archive!r}')
+ raise IpcCommandError(f"nonexistent file: {archive!r}")
elif os.stat(path).st_size == 0:
- raise IpcCommandError(f'empty file: {archive!r}')
+ raise IpcCommandError(f"empty file: {archive!r}")
match = self.eapi.archive_exts_regex.search(archive)
if not match:
- self.warn(f'skipping unrecognized file format: {archive!r}')
+ self.warn(f"skipping unrecognized file format: {archive!r}")
continue
ext = match.group(1)
@@ -1085,14 +1129,16 @@ class Unpack(IpcCommand):
def run(self, args):
spawn_kwargs = {}
- if self.op.userpriv and self.phase == 'unpack':
- spawn_kwargs['uid'] = os_data.portage_uid
- spawn_kwargs['gid'] = os_data.portage_gid
+ if self.op.userpriv and self.phase == "unpack":
+ spawn_kwargs["uid"] = os_data.portage_uid
+ spawn_kwargs["gid"] = os_data.portage_gid
for filename, ext, source in self._filter_targets(args.targets):
- self.observer.write(f'>>> Unpacking {filename} to {self.cwd}', autoline=True)
+ self.observer.write(
+ f">>> Unpacking {filename} to {self.cwd}", autoline=True
+ )
self.observer.flush()
- dest = pjoin(self.cwd, os.path.basename(filename[:-len(ext)]))
+ dest = pjoin(self.cwd, os.path.basename(filename[: -len(ext)]))
try:
target = ArComp(source, ext=ext)
target.unpack(dest=dest, **spawn_kwargs)
@@ -1114,24 +1160,38 @@ class FilterEnv(IpcCommand):
arg_parser = IpcArgumentParser()
filtering = arg_parser.add_argument_group("Environment filtering options")
filtering.add_argument(
- '-V', '--var-match', action='store_true', default=False,
+ "-V",
+ "--var-match",
+ action="store_true",
+ default=False,
help="invert the filtering- instead of removing a var if it matches "
- "remove all vars that do not match")
+ "remove all vars that do not match",
+ )
filtering.add_argument(
- '-F', '--func-match', action='store_true', default=False,
+ "-F",
+ "--func-match",
+ action="store_true",
+ default=False,
help="invert the filtering- instead of removing a function if it matches "
- "remove all functions that do not match")
+ "remove all functions that do not match",
+ )
filtering.add_argument(
- '-f', '--funcs', action='csv',
- help="comma separated list of regexes to match function names against for filtering")
+ "-f",
+ "--funcs",
+ action="csv",
+ help="comma separated list of regexes to match function names against for filtering",
+ )
filtering.add_argument(
- '-v', '--vars', action='csv',
- help="comma separated list of regexes to match variable names against for filtering")
- arg_parser.add_argument('files', nargs=2)
+ "-v",
+ "--vars",
+ action="csv",
+ help="comma separated list of regexes to match variable names against for filtering",
+ )
+ arg_parser.add_argument("files", nargs=2)
def run(self, args):
src_path, dest_path = args.files
- with open(src_path) as src, open(dest_path, 'wb') as dest:
+ with open(src_path) as src, open(dest_path, "wb") as dest:
filter_env.main_run(
- dest, src.read(), args.vars, args.funcs,
- args.var_match, args.func_match)
+ dest, src.read(), args.vars, args.funcs, args.var_match, args.func_match
+ )
diff --git a/src/pkgcore/ebuild/ebuild_built.py b/src/pkgcore/ebuild/ebuild_built.py
index a9f1a22d2..5cd9c512b 100644
--- a/src/pkgcore/ebuild/ebuild_built.py
+++ b/src/pkgcore/ebuild/ebuild_built.py
@@ -53,16 +53,18 @@ class package(ebuild_src.base):
# if returned data is effectively empty). Finally, note that this just maps the list across;
# it's expected that certain attributes that are known to have no meaning for a 'built' package
# are nulled (for example, fetchables: nothing to fetch).
- locals().update({
- attr_name: DynamicGetattrSetter.register(
- post_curry(
- _render_and_evaluate_attr,
- ebuild_src.package._get_attr[attr_name],
- render_func
+ locals().update(
+ {
+ attr_name: DynamicGetattrSetter.register(
+ post_curry(
+ _render_and_evaluate_attr,
+ ebuild_src.package._get_attr[attr_name],
+ render_func,
+ )
)
- )
- for attr_name, render_func in ebuild_src.package._config_wrappables.items()
- })
+ for attr_name, render_func in ebuild_src.package._config_wrappables.items()
+ }
+ )
@property
def _operations(self):
@@ -73,18 +75,20 @@ class package(ebuild_src.base):
built = True
- cbuild = DynamicGetattrSetter.register(partial(_chost_fallback, 'CBUILD'))
- chost = DynamicGetattrSetter.register(partial(_chost_fallback, 'CHOST'))
- ctarget = DynamicGetattrSetter.register(partial(_chost_fallback, 'CTARGET'))
- contents = DynamicGetattrSetter.register(post_curry(_passthrough, 'contents'))
- environment = DynamicGetattrSetter.register(post_curry(_passthrough, 'environment'))
+ cbuild = DynamicGetattrSetter.register(partial(_chost_fallback, "CBUILD"))
+ chost = DynamicGetattrSetter.register(partial(_chost_fallback, "CHOST"))
+ ctarget = DynamicGetattrSetter.register(partial(_chost_fallback, "CTARGET"))
+ contents = DynamicGetattrSetter.register(post_curry(_passthrough, "contents"))
+ environment = DynamicGetattrSetter.register(post_curry(_passthrough, "environment"))
@property
def tracked_attributes(self):
# tracked attributes varies depending on EAPI, thus this has to be runtime computed
- return tuple(itertools.chain(
- super().tracked_attributes, ('contents', 'use', 'environment')
- ))
+ return tuple(
+ itertools.chain(
+ super().tracked_attributes, ("contents", "use", "environment")
+ )
+ )
@DynamicGetattrSetter.register
def cflags(self):
@@ -116,14 +120,14 @@ class package(ebuild_src.base):
@DynamicGetattrSetter.register
def source_repository(self):
- repo = self.data.get('source_repository')
+ repo = self.data.get("source_repository")
if repo is None:
- repo = self.data.get('repository')
+ repo = self.data.get("repository")
# work around managers storing this in different places.
if repo is None:
# finally, do the strip ourselves since this can come
# back as '\n' from binpkg Packages caches...
- repo = self.data.get('REPO', '').strip()
+ repo = self.data.get("REPO", "").strip()
if not repo:
repo = None
if isinstance(repo, str):
@@ -131,7 +135,9 @@ class package(ebuild_src.base):
return repo if repo else None
@DynamicGetattrSetter.register
- def fetchables(self, ret=conditionals.DepSet.parse('', fetch.fetchable, operators={})):
+ def fetchables(
+ self, ret=conditionals.DepSet.parse("", fetch.fetchable, operators={})
+ ):
return ret
@DynamicGetattrSetter.register
@@ -145,7 +151,7 @@ class package(ebuild_src.base):
eapi_magic = self.data.pop("EAPI", "0")
if not eapi_magic:
# "" means EAPI 0
- eapi_magic = '0'
+ eapi_magic = "0"
eapi = get_eapi(str(eapi_magic).strip())
# This can return None... definitely the wrong thing right now
# for an unsupported eapi. Fix it later.
@@ -194,9 +200,12 @@ class fresh_built_package(package):
def generic_format_triggers(self, pkg, op_inst, format_op_inst, engine_inst):
- if (engine_inst.mode in (engine.REPLACE_MODE, engine.INSTALL_MODE)
- and pkg == engine_inst.new and pkg.repo is engine_inst.new.repo):
- if 'preinst' in pkg.mandatory_phases:
+ if (
+ engine_inst.mode in (engine.REPLACE_MODE, engine.INSTALL_MODE)
+ and pkg == engine_inst.new
+ and pkg.repo is engine_inst.new.repo
+ ):
+ if "preinst" in pkg.mandatory_phases:
t = triggers.preinst_contents_reset(format_op_inst)
t.register(engine_inst)
# for ebuild format, always check the syms.
diff --git a/src/pkgcore/ebuild/ebuild_src.py b/src/pkgcore/ebuild/ebuild_src.py
index 0a2d836c3..87a02295e 100644
--- a/src/pkgcore/ebuild/ebuild_src.py
+++ b/src/pkgcore/ebuild/ebuild_src.py
@@ -28,9 +28,9 @@ from .eapi import get_eapi
from .misc import sort_keywords
demand_compile_regexp(
- '_EAPI_regex', r"^EAPI=(['\"]?)(?P<EAPI>[A-Za-z0-9+_.-]*)\1[\t ]*(?:#.*)?")
-demand_compile_regexp(
- '_EAPI_str_regex', r"^EAPI=(['\"]?)(?P<EAPI>.*)\1")
+ "_EAPI_regex", r"^EAPI=(['\"]?)(?P<EAPI>[A-Za-z0-9+_.-]*)\1[\t ]*(?:#.*)?"
+)
+demand_compile_regexp("_EAPI_str_regex", r"^EAPI=(['\"]?)(?P<EAPI>.*)\1")
class base(metadata.package):
@@ -43,18 +43,28 @@ class base(metadata.package):
_config_wrappables = {
x: klass.alias_method("evaluate_depset")
for x in (
- "bdepend", "depend", "rdepend", "pdepend", "idepend",
- "fetchables", "license", "restrict", "required_use",
+ "bdepend",
+ "depend",
+ "rdepend",
+ "pdepend",
+ "idepend",
+ "fetchables",
+ "license",
+ "restrict",
+ "required_use",
)
}
- __slots__ = ('_pkg_metadata_shared',)
+ __slots__ = ("_pkg_metadata_shared",)
def _generate_depset(self, kls, key):
return conditionals.DepSet.parse(
- self.data.pop(key, ""), kls,
- attr=key, element_func=self.eapi.atom_kls,
- transitive_use_atoms=self.eapi.options.transitive_use_atoms)
+ self.data.pop(key, ""),
+ kls,
+ attr=key,
+ element_func=self.eapi.atom_kls,
+ transitive_use_atoms=self.eapi.options.transitive_use_atoms,
+ )
@DynamicGetattrSetter.register
def bdepend(self):
@@ -83,47 +93,58 @@ class base(metadata.package):
@DynamicGetattrSetter.register
def license(self):
return conditionals.DepSet.parse(
- self.data.pop('LICENSE', ''), str,
- operators={
- '||': boolean.OrRestriction,
- '': boolean.AndRestriction},
- attr='LICENSE', element_func=intern)
+ self.data.pop("LICENSE", ""),
+ str,
+ operators={"||": boolean.OrRestriction, "": boolean.AndRestriction},
+ attr="LICENSE",
+ element_func=intern,
+ )
@DynamicGetattrSetter.register
def fullslot(self):
- slot = self.data.get('SLOT', None)
+ slot = self.data.get("SLOT", None)
if not slot:
raise metadata_errors.MetadataException(
- self, 'slot', 'SLOT cannot be unset or empty')
+ self, "slot", "SLOT cannot be unset or empty"
+ )
if not self.eapi.valid_slot_regex.match(slot):
raise metadata_errors.MetadataException(
- self, 'slot', f'invalid SLOT: {slot!r}')
+ self, "slot", f"invalid SLOT: {slot!r}"
+ )
return slot
@DynamicGetattrSetter.register
def subslot(self):
- slot, _sep, subslot = self.fullslot.partition('/')
+ slot, _sep, subslot = self.fullslot.partition("/")
if not subslot:
return slot
return subslot
@DynamicGetattrSetter.register
def slot(self):
- return self.fullslot.partition('/')[0]
+ return self.fullslot.partition("/")[0]
def create_fetchable_from_uri(
- self, chksums, ignore_missing_chksums, ignore_unknown_mirrors,
- mirrors, default_mirrors, common_files, uri, filename=None):
+ self,
+ chksums,
+ ignore_missing_chksums,
+ ignore_unknown_mirrors,
+ mirrors,
+ default_mirrors,
+ common_files,
+ uri,
+ filename=None,
+ ):
default_filename = os.path.basename(uri)
if filename is not None:
# log redundant renames for pkgcheck to flag
if filename == default_filename:
- logger.info(f'redundant rename: {uri} -> {filename}')
+ logger.info(f"redundant rename: {uri} -> {filename}")
else:
filename = default_filename
if not filename:
- raise ValueError(f'missing filename: {uri!r}')
+ raise ValueError(f"missing filename: {uri!r}")
preexisting = common_files.get(filename)
@@ -142,11 +163,11 @@ class base(metadata.package):
unrestrict_mirror = unrestrict_fetch = False
if self.eapi.options.src_uri_unrestrict:
# mirror unrestriction implies fetch unrestriction
- unrestrict_mirror = uri.startswith('mirror+')
- unrestrict_fetch = uri.startswith('fetch+') or unrestrict_mirror
+ unrestrict_mirror = uri.startswith("mirror+")
+ unrestrict_fetch = uri.startswith("fetch+") or unrestrict_mirror
if unrestrict_fetch:
# strip the prefix
- uri = uri.partition('+')[2]
+ uri = uri.partition("+")[2]
allow_mirror = pkg_allow_mirror or unrestrict_mirror
@@ -168,16 +189,24 @@ class base(metadata.package):
uris.add_mirror(default_mirrors)
if preexisting is None:
- common_files[filename] = fetch.fetchable(filename, uris, chksums.get(filename))
+ common_files[filename] = fetch.fetchable(
+ filename, uris, chksums.get(filename)
+ )
return common_files[filename]
- def generate_fetchables(self, allow_missing_checksums=False,
- ignore_unknown_mirrors=False, skip_default_mirrors=False):
+ def generate_fetchables(
+ self,
+ allow_missing_checksums=False,
+ ignore_unknown_mirrors=False,
+ skip_default_mirrors=False,
+ ):
"""Generate fetchables object for a package."""
- chksums_can_be_missing = allow_missing_checksums or \
- bool(getattr(self.repo, '_allow_missing_chksums', False))
+ chksums_can_be_missing = allow_missing_checksums or bool(
+ getattr(self.repo, "_allow_missing_chksums", False)
+ )
chksums_can_be_missing, chksums = self.repo._get_digests(
- self, allow_missing=chksums_can_be_missing)
+ self, allow_missing=chksums_can_be_missing
+ )
mirrors = getattr(self._parent, "mirrors", {})
if skip_default_mirrors:
@@ -186,17 +215,26 @@ class base(metadata.package):
default_mirrors = getattr(self._parent, "default_mirrors", None)
common = {}
func = partial(
- self.create_fetchable_from_uri, chksums,
- chksums_can_be_missing, ignore_unknown_mirrors,
- mirrors, default_mirrors, common)
+ self.create_fetchable_from_uri,
+ chksums,
+ chksums_can_be_missing,
+ ignore_unknown_mirrors,
+ mirrors,
+ default_mirrors,
+ common,
+ )
try:
d = conditionals.DepSet.parse(
- self.data.get("SRC_URI", ""), fetch.fetchable, operators={},
- element_func=func, attr='SRC_URI',
- allow_src_uri_file_renames=self.eapi.options.src_uri_renames)
+ self.data.get("SRC_URI", ""),
+ fetch.fetchable,
+ operators={},
+ element_func=func,
+ attr="SRC_URI",
+ allow_src_uri_file_renames=self.eapi.options.src_uri_renames,
+ )
except ebuild_errors.DepsetParseError as e:
- raise metadata_errors.MetadataException(self, 'fetchables', str(e))
+ raise metadata_errors.MetadataException(self, "fetchables", str(e))
for v in common.values():
v.uri.finalize()
@@ -212,10 +250,15 @@ class base(metadata.package):
if filename is not None:
return filename
return os.path.basename(uri)
+
return conditionals.DepSet.parse(
- self.data.get("SRC_URI", ''), str, operators={}, attr='SRC_URI',
+ self.data.get("SRC_URI", ""),
+ str,
+ operators={},
+ attr="SRC_URI",
element_func=partial(_extract_distfile_from_uri),
- allow_src_uri_file_renames=self.eapi.options.src_uri_renames)
+ allow_src_uri_file_renames=self.eapi.options.src_uri_renames,
+ )
@DynamicGetattrSetter.register
def description(self):
@@ -233,13 +276,13 @@ class base(metadata.package):
@DynamicGetattrSetter.register
def restrict(self):
return conditionals.DepSet.parse(
- self.data.pop("RESTRICT", ''), str, operators={},
- attr='RESTRICT')
+ self.data.pop("RESTRICT", ""), str, operators={}, attr="RESTRICT"
+ )
@DynamicGetattrSetter.register
def eapi(self):
ebuild = self.ebuild
- eapi = '0'
+ eapi = "0"
if ebuild.path:
# Use readlines directly since it does whitespace stripping
# for us, far faster than native python can.
@@ -247,20 +290,20 @@ class base(metadata.package):
else:
i = (x.strip() for x in ebuild.text_fileobj())
for line in i:
- if line[0:1] in ('', '#'):
+ if line[0:1] in ("", "#"):
continue
- if (mo := _EAPI_str_regex.match(line)) and (eapi_str := mo.group('EAPI')):
+ if (mo := _EAPI_str_regex.match(line)) and (eapi_str := mo.group("EAPI")):
eapi = eapi_str
break
i.close()
try:
return get_eapi(eapi)
except ValueError as e:
- error = str(e) if eapi else f'{e}: {eapi_str!r}'
- raise metadata_errors.MetadataException(self, 'eapi', error)
+ error = str(e) if eapi else f"{e}: {eapi_str!r}"
+ raise metadata_errors.MetadataException(self, "eapi", error)
- is_supported = klass.alias_attr('eapi.is_supported')
- tracked_attributes = klass.alias_attr('eapi.tracked_attributes')
+ is_supported = klass.alias_attr("eapi.is_supported")
+ tracked_attributes = klass.alias_attr("eapi.tracked_attributes")
@DynamicGetattrSetter.register
def iuse(self):
@@ -269,7 +312,7 @@ class base(metadata.package):
@property
def iuse_stripped(self):
if self.eapi.options.iuse_defaults:
- return frozenset(x.lstrip('-+') if len(x) > 1 else x for x in self.iuse)
+ return frozenset(x.lstrip("-+") if len(x) > 1 else x for x in self.iuse)
return self.iuse
iuse_effective = klass.alias_attr("iuse_stripped")
@@ -281,13 +324,14 @@ class base(metadata.package):
@DynamicGetattrSetter.register
def properties(self):
return conditionals.DepSet.parse(
- self.data.pop("PROPERTIES", ''), str, operators={},
- attr='PROPERTIES')
+ self.data.pop("PROPERTIES", ""), str, operators={}, attr="PROPERTIES"
+ )
@DynamicGetattrSetter.register
def defined_phases(self):
return self.eapi.interpret_cache_defined_phases(
- map(intern, self.data.pop("DEFINED_PHASES", "").split()))
+ map(intern, self.data.pop("DEFINED_PHASES", "").split())
+ )
@DynamicGetattrSetter.register
def homepage(self):
@@ -305,7 +349,7 @@ class base(metadata.package):
@staticmethod
def _mk_required_use_node(data):
- if data[0] == '!':
+ if data[0] == "!":
return values.ContainmentMatch(data[1:], negate=True)
return values.ContainmentMatch(data)
@@ -317,22 +361,28 @@ class base(metadata.package):
operators = {
"||": boolean.OrRestriction,
"": boolean.AndRestriction,
- "^^": boolean.JustOneRestriction
+ "^^": boolean.JustOneRestriction,
}
def _invalid_op(msg, *args):
- raise metadata_errors.MetadataException(self, 'eapi', f'REQUIRED_USE: {msg}')
+ raise metadata_errors.MetadataException(
+ self, "eapi", f"REQUIRED_USE: {msg}"
+ )
if self.eapi.options.required_use_one_of:
- operators['??'] = boolean.AtMostOneOfRestriction
+ operators["??"] = boolean.AtMostOneOfRestriction
else:
- operators['??'] = partial(
- _invalid_op, f"EAPI '{self.eapi}' doesn't support '??' operator")
+ operators["??"] = partial(
+ _invalid_op, f"EAPI '{self.eapi}' doesn't support '??' operator"
+ )
return conditionals.DepSet.parse(
data,
- values.ContainmentMatch, operators=operators,
- element_func=self._mk_required_use_node, attr='REQUIRED_USE')
+ values.ContainmentMatch,
+ operators=operators,
+ element_func=self._mk_required_use_node,
+ attr="REQUIRED_USE",
+ )
return conditionals.DepSet()
source_repository = klass.alias_attr("repo.repo_id")
@@ -343,12 +393,11 @@ class base(metadata.package):
@property
def mandatory_phases(self):
- return frozenset(
- chain(self.defined_phases, self.eapi.default_phases))
+ return frozenset(chain(self.defined_phases, self.eapi.default_phases))
@property
def live(self):
- return 'live' in self.properties
+ return "live" in self.properties
@property
def P(self):
@@ -360,7 +409,7 @@ class base(metadata.package):
@property
def PR(self):
- return f'r{self.revision}'
+ return f"r{self.revision}"
@property
def path(self):
@@ -393,7 +442,9 @@ class package(base):
local_use = klass.alias_attr("_shared_pkg_data.metadata_xml.local_use")
longdescription = klass.alias_attr("_shared_pkg_data.metadata_xml.longdescription")
manifest = klass.alias_attr("_shared_pkg_data.manifest")
- stabilize_allarches = klass.alias_attr("_shared_pkg_data.metadata_xml.stabilize_allarches")
+ stabilize_allarches = klass.alias_attr(
+ "_shared_pkg_data.metadata_xml.stabilize_allarches"
+ )
@property
def _mtime_(self):
@@ -416,8 +467,9 @@ class package_factory(metadata.factory):
# For the plugin system.
priority = 5
- def __init__(self, parent, cachedb, eclass_cache, mirrors, default_mirrors,
- *args, **kwargs):
+ def __init__(
+ self, parent, cachedb, eclass_cache, mirrors, default_mirrors, *args, **kwargs
+ ):
super().__init__(parent, *args, **kwargs)
self._cache = cachedb
self._ecache = eclass_cache
@@ -427,7 +479,9 @@ class package_factory(metadata.factory):
self.mirrors = mirrors
if default_mirrors:
- self.default_mirrors = fetch.default_mirror(default_mirrors, "conf. default mirror")
+ self.default_mirrors = fetch.default_mirror(
+ default_mirrors, "conf. default mirror"
+ )
else:
self.default_mirrors = None
@@ -466,36 +520,39 @@ class package_factory(metadata.factory):
def _update_metadata(self, pkg, ebp=None):
parsed_eapi = pkg.eapi
if not parsed_eapi.is_supported:
- return {'EAPI': str(parsed_eapi)}
+ return {"EAPI": str(parsed_eapi)}
with processor.reuse_or_request(ebp) as my_proc:
try:
mydata = my_proc.get_keys(pkg, self._ecache)
except processor.ProcessorError as e:
raise metadata_errors.MetadataException(
- pkg, 'data', 'failed sourcing ebuild', e)
+ pkg, "data", "failed sourcing ebuild", e
+ )
# Rewrite defined_phases as needed, since we now know the EAPI.
- eapi = get_eapi(mydata.get('EAPI', '0'))
+ eapi = get_eapi(mydata.get("EAPI", "0"))
if parsed_eapi != eapi:
raise metadata_errors.MetadataException(
- pkg, 'eapi',
- f"parsed EAPI '{parsed_eapi}' doesn't match sourced EAPI '{eapi}'")
+ pkg,
+ "eapi",
+ f"parsed EAPI '{parsed_eapi}' doesn't match sourced EAPI '{eapi}'",
+ )
wipes = set(mydata)
wipes.difference_update(eapi.metadata_keys)
- if mydata["DEFINED_PHASES"] != '-':
+ if mydata["DEFINED_PHASES"] != "-":
phases = mydata["DEFINED_PHASES"].split()
d = eapi.phases_rev
phases = set(d.get(x) for x in phases)
# Discard is required should we have gotten
# a phase that isn't actually in this EAPI.
phases.discard(None)
- mydata["DEFINED_PHASES"] = ' '.join(sorted(phases))
+ mydata["DEFINED_PHASES"] = " ".join(sorted(phases))
if inherited := mydata.pop("INHERITED", None):
mydata["_eclasses_"] = self._ecache.get_eclass_data(inherited.split())
- mydata['_chf_'] = chksum.LazilyHashedPath(pkg.path)
+ mydata["_chf_"] = chksum.LazilyHashedPath(pkg.path)
for x in wipes:
del mydata[x]
@@ -523,8 +580,7 @@ class package_factory(metadata.factory):
if inst is None:
# key being cat/pkg
mxml = self._parent_repo._get_shared_pkg_data(args[0], args[1])
- inst = self._cached_instances[args] = self.child_class(
- mxml, self, *args)
+ inst = self._cached_instances[args] = self.child_class(mxml, self, *args)
return inst
diff --git a/src/pkgcore/ebuild/eclass.py b/src/pkgcore/ebuild/eclass.py
index 9c7d1fd20..66e07766a 100644
--- a/src/pkgcore/ebuild/eclass.py
+++ b/src/pkgcore/ebuild/eclass.py
@@ -26,7 +26,7 @@ class AttrDict(ImmutableDict):
object.__getattribute__(self, name)
except AttributeError as e:
try:
- return object.__getattribute__(self, '_dict')[name]
+ return object.__getattribute__(self, "_dict")[name]
except KeyError:
raise e
@@ -41,7 +41,7 @@ def _rst_header(char, text, leading=False, newline=False):
if leading:
data = [sep] + data
if newline:
- data.append('')
+ data.append("")
return data
@@ -71,18 +71,21 @@ class ParseEclassDoc:
def __init__(self, tags):
self.tags = tags
# regex matching all known tags for the eclass doc block
- self._block_tags_re = re.compile(rf'^(?P<tag>{"|".join(self.tags)})(?P<value>.*)')
+ self._block_tags_re = re.compile(
+ rf'^(?P<tag>{"|".join(self.tags)})(?P<value>.*)'
+ )
# regex matching @CODE tags
- self._code_tag = re.compile(r'^\s*@CODE\s*$')
+ self._code_tag = re.compile(r"^\s*@CODE\s*$")
# regex matching @SUBSECTION tags
- self._subsection_tag = re.compile(r'^\s*@SUBSECTION (?P<title>.+)$')
+ self._subsection_tag = re.compile(r"^\s*@SUBSECTION (?P<title>.+)$")
def _tag_bool(self, block, tag, lineno):
"""Parse boolean tags."""
try:
args = next(x for x in block if x)
logger.warning(
- f'{repr(tag)}, line {lineno}: tag takes no args, got {repr(args)}')
+ f"{repr(tag)}, line {lineno}: tag takes no args, got {repr(args)}"
+ )
except StopIteration:
pass
return True
@@ -90,9 +93,9 @@ class ParseEclassDoc:
def _tag_inline_arg(self, block, tag, lineno):
"""Parse tags with inline argument."""
if not block[0]:
- logger.warning(f'{repr(tag)}, line {lineno}: missing inline arg')
+ logger.warning(f"{repr(tag)}, line {lineno}: missing inline arg")
elif len(block) > 1:
- logger.warning(f'{repr(tag)}, line {lineno}: non-inline arg')
+ logger.warning(f"{repr(tag)}, line {lineno}: non-inline arg")
return block[0]
def _tag_inline_list(self, block, tag, lineno):
@@ -103,9 +106,9 @@ class ParseEclassDoc:
def _tag_multiline_args(self, block, tag, lineno):
"""Parse tags with multiline arguments."""
if block[0]:
- logger.warning(f'{repr(tag)}, line {lineno}: invalid inline arg')
+ logger.warning(f"{repr(tag)}, line {lineno}: invalid inline arg")
if not block[1:]:
- logger.warning(f'{repr(tag)}, line {lineno}: missing args')
+ logger.warning(f"{repr(tag)}, line {lineno}: missing args")
return tuple(block[1:])
def _tag_multiline_str(self, block, tag, lineno):
@@ -115,31 +118,31 @@ class ParseEclassDoc:
return None
# use literal blocks for all multiline text
- data = ['::', '\n\n']
+ data = ["::", "\n\n"]
for i, line in enumerate(lines, 1):
if self._code_tag.match(line):
continue
elif mo := self._subsection_tag.match(line):
- header = _rst_header('~', mo.group('title'))
- data.extend(f'{x}\n' for x in header)
- data.extend(['::', '\n\n'])
+ header = _rst_header("~", mo.group("title"))
+ data.extend(f"{x}\n" for x in header)
+ data.extend(["::", "\n\n"])
elif line:
- data.append(f' {line}\n')
+ data.append(f" {line}\n")
else:
- data.append('\n')
+ data.append("\n")
- return ''.join(data).rstrip('\n')
+ return "".join(data).rstrip("\n")
def _tag_multiline_rst(self, block, tag, lineno):
"""Parse tags with multiline rST formatting."""
lines = self._tag_multiline_args(block, tag, lineno)
- return ''.join(lines).rstrip('\n')
+ return "".join(lines).rstrip("\n")
def _tag_deprecated(self, block, tag, lineno):
"""Parse deprecated tags."""
arg = self._tag_inline_arg(block, tag, lineno)
- return True if arg.lower() == 'none' else arg
+ return True if arg.lower() == "none" else arg
@klass.jit_attr
def _required(self):
@@ -156,8 +159,11 @@ class ParseEclassDoc:
variables = []
# use no-op to fake a pipeline so pipeline specific vars are defined
p = subprocess.run(
- ['bash', '-c', ':; compgen -A variable'],
- stderr=subprocess.DEVNULL, stdout=subprocess.PIPE, encoding='utf8')
+ ["bash", "-c", ":; compgen -A variable"],
+ stderr=subprocess.DEVNULL,
+ stdout=subprocess.PIPE,
+ encoding="utf8",
+ )
if p.returncode == 0:
variables = p.stdout.splitlines()
return frozenset(variables)
@@ -176,10 +182,10 @@ class ParseEclassDoc:
# split eclass doc block into separate blocks by tag
for i, line in enumerate(lines):
- if (mo := self._block_tags_re.match(line)):
- tag = mo.group('tag')
+ if mo := self._block_tags_re.match(line):
+ tag = mo.group("tag")
missing_tags.discard(tag)
- value = mo.group('value').strip()
+ value = mo.group("value").strip()
blocks.append((tag, line_ind + i, [value]))
else:
blocks[-1][-1].append(line)
@@ -191,10 +197,9 @@ class ParseEclassDoc:
# check if any required tags are missing
if missing_tags:
- missing_tags_str = ', '.join(map(repr, missing_tags))
+ missing_tags_str = ", ".join(map(repr, missing_tags))
s = pluralism(missing_tags)
- logger.warning(
- f'{repr(lines[0])}: missing tag{s}: {missing_tags_str}')
+ logger.warning(f"{repr(lines[0])}: missing tag{s}: {missing_tags_str}")
return AttrDict(data)
@@ -202,21 +207,21 @@ class ParseEclassDoc:
class EclassBlock(ParseEclassDoc):
"""ECLASS doc block."""
- tag = '@ECLASS:'
+ tag = "@ECLASS:"
def __init__(self):
tags = {
- '@ECLASS:': ('name', True, self._tag_inline_arg, None),
- '@VCSURL:': ('vcsurl', False, self._tag_inline_arg, None),
- '@BLURB:': ('blurb', True, self._tag_inline_arg, None),
- '@DEPRECATED:': ('deprecated', False, self._tag_deprecated, False),
- '@PROVIDES:': ('raw_provides', False, self._tag_inline_list, ()),
- '@MAINTAINER:': ('maintainers', True, self._tag_multiline_args, None),
- '@AUTHOR:': ('authors', False, self._tag_multiline_args, None),
- '@BUGREPORTS:': ('bugreports', False, self._tag_multiline_str, None),
- '@DESCRIPTION:': ('description', False, self._tag_multiline_str, None),
- '@EXAMPLE:': ('example', False, self._tag_multiline_str, None),
- '@SUPPORTED_EAPIS:': ('supported_eapis', False, self._supported_eapis, ()),
+ "@ECLASS:": ("name", True, self._tag_inline_arg, None),
+ "@VCSURL:": ("vcsurl", False, self._tag_inline_arg, None),
+ "@BLURB:": ("blurb", True, self._tag_inline_arg, None),
+ "@DEPRECATED:": ("deprecated", False, self._tag_deprecated, False),
+ "@PROVIDES:": ("raw_provides", False, self._tag_inline_list, ()),
+ "@MAINTAINER:": ("maintainers", True, self._tag_multiline_args, None),
+ "@AUTHOR:": ("authors", False, self._tag_multiline_args, None),
+ "@BUGREPORTS:": ("bugreports", False, self._tag_multiline_str, None),
+ "@DESCRIPTION:": ("description", False, self._tag_multiline_str, None),
+ "@EXAMPLE:": ("example", False, self._tag_multiline_str, None),
+ "@SUPPORTED_EAPIS:": ("supported_eapis", False, self._supported_eapis, ()),
}
super().__init__(tags)
@@ -228,30 +233,31 @@ class EclassBlock(ParseEclassDoc):
unknown = set(eapis) - self._known_eapis
if unknown:
s = pluralism(unknown)
- unknown_str = ' '.join(sorted(unknown))
+ unknown_str = " ".join(sorted(unknown))
logger.warning(
- f'{repr(tag)}, line {lineno}: unknown EAPI{s}: {unknown_str}')
+ f"{repr(tag)}, line {lineno}: unknown EAPI{s}: {unknown_str}"
+ )
return OrderedSet(eapis)
class EclassVarBlock(ParseEclassDoc):
"""ECLASS_VARIABLE doc block."""
- tag = '@ECLASS_VARIABLE:'
- key = 'variables'
+ tag = "@ECLASS_VARIABLE:"
+ key = "variables"
default = True
def __init__(self):
tags = {
- '@ECLASS_VARIABLE:': ('name', True, self._tag_inline_arg, None),
- '@DEPRECATED:': ('deprecated', False, self._tag_deprecated, False),
- '@DEFAULT_UNSET': ('default_unset', False, self._tag_bool, False),
- '@INTERNAL': ('internal', False, self._tag_bool, False),
- '@REQUIRED': ('required', False, self._tag_bool, False),
- '@PRE_INHERIT': ('pre_inherit', False, self._tag_bool, False),
- '@USER_VARIABLE': ('user_variable', False, self._tag_bool, False),
- '@OUTPUT_VARIABLE': ('output_variable', False, self._tag_bool, False),
- '@DESCRIPTION:': ('description', True, self._tag_multiline_str, None),
+ "@ECLASS_VARIABLE:": ("name", True, self._tag_inline_arg, None),
+ "@DEPRECATED:": ("deprecated", False, self._tag_deprecated, False),
+ "@DEFAULT_UNSET": ("default_unset", False, self._tag_bool, False),
+ "@INTERNAL": ("internal", False, self._tag_bool, False),
+ "@REQUIRED": ("required", False, self._tag_bool, False),
+ "@PRE_INHERIT": ("pre_inherit", False, self._tag_bool, False),
+ "@USER_VARIABLE": ("user_variable", False, self._tag_bool, False),
+ "@OUTPUT_VARIABLE": ("output_variable", False, self._tag_bool, False),
+ "@DESCRIPTION:": ("description", True, self._tag_multiline_str, None),
}
super().__init__(tags)
@@ -260,48 +266,50 @@ class EclassVarBlock(ParseEclassDoc):
class EclassVarBlockCompat(ParseEclassDoc):
"""ECLASS-VARIABLE doc block."""
- tag = '@ECLASS-VARIABLE:'
- key = 'variables'
+ tag = "@ECLASS-VARIABLE:"
+ key = "variables"
default = True
def __init__(self):
tags = {
- '@ECLASS-VARIABLE:': ('name', True, self._eclass_variable, None),
- '@DEPRECATED:': ('deprecated', False, self._tag_deprecated, False),
- '@DEFAULT_UNSET': ('default_unset', False, self._tag_bool, False),
- '@INTERNAL': ('internal', False, self._tag_bool, False),
- '@REQUIRED': ('required', False, self._tag_bool, False),
- '@PRE_INHERIT': ('pre_inherit', False, self._tag_bool, False),
- '@USER_VARIABLE': ('user_variable', False, self._tag_bool, False),
- '@OUTPUT_VARIABLE': ('output_variable', False, self._tag_bool, False),
- '@DESCRIPTION:': ('description', True, self._tag_multiline_str, None),
+ "@ECLASS-VARIABLE:": ("name", True, self._eclass_variable, None),
+ "@DEPRECATED:": ("deprecated", False, self._tag_deprecated, False),
+ "@DEFAULT_UNSET": ("default_unset", False, self._tag_bool, False),
+ "@INTERNAL": ("internal", False, self._tag_bool, False),
+ "@REQUIRED": ("required", False, self._tag_bool, False),
+ "@PRE_INHERIT": ("pre_inherit", False, self._tag_bool, False),
+ "@USER_VARIABLE": ("user_variable", False, self._tag_bool, False),
+ "@OUTPUT_VARIABLE": ("output_variable", False, self._tag_bool, False),
+ "@DESCRIPTION:": ("description", True, self._tag_multiline_str, None),
}
super().__init__(tags)
def _eclass_variable(self, block, tag, lineno):
"""Parse @ECLASS-VARIABLE tag."""
- logger.warning(f"{repr(tag)}, line {lineno}: deprecated, use '@ECLASS_VARIABLE' instead")
+ logger.warning(
+ f"{repr(tag)}, line {lineno}: deprecated, use '@ECLASS_VARIABLE' instead"
+ )
return self._tag_inline_arg(block, tag, lineno)
class EclassFuncBlock(ParseEclassDoc):
"""FUNCTION doc block."""
- tag = '@FUNCTION:'
- key = 'functions'
+ tag = "@FUNCTION:"
+ key = "functions"
default = True
def __init__(self):
tags = {
- '@FUNCTION:': ('name', True, self._tag_inline_arg, None),
- '@RETURN:': ('returns', False, self._tag_inline_arg, None),
- '@DEPRECATED:': ('deprecated', False, self._tag_deprecated, False),
- '@INTERNAL': ('internal', False, self._tag_bool, False),
- '@MAINTAINER:': ('maintainers', False, self._tag_multiline_args, None),
- '@DESCRIPTION:': ('description', False, self._tag_multiline_str, None),
+ "@FUNCTION:": ("name", True, self._tag_inline_arg, None),
+ "@RETURN:": ("returns", False, self._tag_inline_arg, None),
+ "@DEPRECATED:": ("deprecated", False, self._tag_deprecated, False),
+ "@INTERNAL": ("internal", False, self._tag_bool, False),
+ "@MAINTAINER:": ("maintainers", False, self._tag_multiline_args, None),
+ "@DESCRIPTION:": ("description", False, self._tag_multiline_str, None),
# TODO: The devmanual states this is required, but disabling for now since
# many phase override functions don't document usage.
- '@USAGE:': ('usage', False, self._usage, None),
+ "@USAGE:": ("usage", False, self._usage, None),
}
super().__init__(tags)
@@ -311,37 +319,40 @@ class EclassFuncBlock(ParseEclassDoc):
Empty usage is allowed for functions with no arguments.
"""
if len(block) > 1:
- logger.warning(f'{repr(tag)}, line {lineno}: non-inline arg')
+ logger.warning(f"{repr(tag)}, line {lineno}: non-inline arg")
return block[0]
def parse(self, *args):
data = super().parse(*args)
if not (data.returns or data.description):
- logger.warning(f"'{self.tag}:{data.name}', @RETURN or @DESCRIPTION required")
+ logger.warning(
+ f"'{self.tag}:{data.name}', @RETURN or @DESCRIPTION required"
+ )
return data
class EclassFuncVarBlock(ParseEclassDoc):
"""VARIABLE doc block."""
- tag = '@VARIABLE:'
- key = 'function_variables'
+ tag = "@VARIABLE:"
+ key = "function_variables"
default = True
def __init__(self):
tags = {
- '@VARIABLE:': ('name', True, self._tag_inline_arg, None),
- '@DEPRECATED:': ('deprecated', False, self._tag_deprecated, False),
- '@DEFAULT_UNSET': ('default_unset', False, self._tag_bool, False),
- '@INTERNAL': ('internal', False, self._tag_bool, False),
- '@REQUIRED': ('required', False, self._tag_bool, False),
- '@DESCRIPTION:': ('description', True, self._tag_multiline_str, None),
+ "@VARIABLE:": ("name", True, self._tag_inline_arg, None),
+ "@DEPRECATED:": ("deprecated", False, self._tag_deprecated, False),
+ "@DEFAULT_UNSET": ("default_unset", False, self._tag_bool, False),
+ "@INTERNAL": ("internal", False, self._tag_bool, False),
+ "@REQUIRED": ("required", False, self._tag_bool, False),
+ "@DESCRIPTION:": ("description", True, self._tag_multiline_str, None),
}
super().__init__(tags)
_eclass_blocks_re = re.compile(
- rf'^(?P<prefix>\s*#) (?P<tag>{"|".join(ParseEclassDoc.blocks)})(?P<value>.*)')
+ rf'^(?P<prefix>\s*#) (?P<tag>{"|".join(ParseEclassDoc.blocks)})(?P<value>.*)'
+)
class EclassDoc(AttrDict):
@@ -355,9 +366,9 @@ class EclassDoc(AttrDict):
# parse eclass doc
data = self.parse(path)
- data['provides'] = None
+ data["provides"] = None
if repo is not None:
- data['provides'] = self._get_provides(data['raw_provides'], repo)
+ data["provides"] = self._get_provides(data["raw_provides"], repo)
# inject full lists of exported funcs and vars
if sourced:
@@ -384,24 +395,32 @@ class EclassDoc(AttrDict):
# TODO: support this via pkgcore's ebd
# source eclass to determine PROPERTIES
p = subprocess.run(
- ['env', '-i', 'bash', '-c',
- f'source {shlex.quote(path)}; '
- f'compgen -A function; '
+ [
+ "env",
+ "-i",
+ "bash",
+ "-c",
+ f"source {shlex.quote(path)}; "
+ f"compgen -A function; "
f'echo "#"; '
- f'compgen -A variable; '
+ f"compgen -A variable; "
f'echo "#"; '
- f'echo ${{PROPERTIES}}'],
- stderr=subprocess.DEVNULL, stdout=subprocess.PIPE, encoding='utf8')
+ f"echo ${{PROPERTIES}}",
+ ],
+ stderr=subprocess.DEVNULL,
+ stdout=subprocess.PIPE,
+ encoding="utf8",
+ )
if p.returncode == 0:
- eclass_obj = ParseEclassDoc.blocks['@ECLASS:']
- funcs, variables, properties = p.stdout.split('#\n')
- data['_exported_funcs'] = tuple(funcs.split())
- data['_exported_vars'] = tuple(
- x for x in variables.split()
- if x not in eclass_obj.bash_env_vars
+ eclass_obj = ParseEclassDoc.blocks["@ECLASS:"]
+ funcs, variables, properties = p.stdout.split("#\n")
+ data["_exported_funcs"] = tuple(funcs.split())
+ data["_exported_vars"] = tuple(
+ x for x in variables.split() if x not in eclass_obj.bash_env_vars
+ )
+ data["_properties"] = conditionals.DepSet.parse(
+ properties, str, operators={}, attr="PROPERTIES"
)
- data['_properties'] = conditionals.DepSet.parse(
- properties, str, operators={}, attr='PROPERTIES')
return data
@property
@@ -415,13 +434,13 @@ class EclassDoc(AttrDict):
# include all internal tagged functions
s = {x.name for x in self.functions if x.internal}
# and all exported, underscore-prefixed functions
- s.update(x for x in self._dict.get('_exported_funcs', ()) if x.startswith('_'))
+ s.update(x for x in self._dict.get("_exported_funcs", ()) if x.startswith("_"))
return frozenset(s)
@property
def exported_function_names(self):
"""Set of all exported function names in the eclass."""
- return frozenset(self._dict.get('_exported_funcs', ()))
+ return frozenset(self._dict.get("_exported_funcs", ()))
@property
def variable_names(self):
@@ -434,7 +453,7 @@ class EclassDoc(AttrDict):
# include all internal tagged variables
s = {x.name for x in self.variables if x.internal}
# and all exported, underscore-prefixed variables
- s.update(x for x in self._dict.get('_exported_vars', ()) if x.startswith('_'))
+ s.update(x for x in self._dict.get("_exported_vars", ()) if x.startswith("_"))
return frozenset(s)
@property
@@ -444,7 +463,7 @@ class EclassDoc(AttrDict):
Ignores variables that start with underscores since
it's assumed they are private.
"""
- return frozenset(self._dict.get('_exported_vars', ()))
+ return frozenset(self._dict.get("_exported_vars", ()))
@property
def function_variable_names(self):
@@ -454,7 +473,7 @@ class EclassDoc(AttrDict):
@property
def live(self):
"""Eclass implements functionality to support a version control system."""
- return 'live' in self._dict.get('_properties', ())
+ return "live" in self._dict.get("_properties", ())
@staticmethod
def parse(path):
@@ -466,18 +485,18 @@ class EclassDoc(AttrDict):
line_ind = 0
while line_ind < len(lines):
- if (mo := _eclass_blocks_re.match(lines[line_ind])):
+ if mo := _eclass_blocks_re.match(lines[line_ind]):
# Isolate identified doc block by pulling all following
# lines with a matching prefix.
- prefix = mo.group('prefix')
- tag = mo.group('tag')
+ prefix = mo.group("prefix")
+ tag = mo.group("tag")
block = []
block_start = line_ind + 1
while line_ind < len(lines):
line = lines[line_ind]
if not line.startswith(prefix):
break
- line = line[len(prefix) + 1:]
+ line = line[len(prefix) + 1 :]
block.append(line)
line_ind += 1
blocks.append((tag, block, block_start))
@@ -485,17 +504,23 @@ class EclassDoc(AttrDict):
# set default fields
data = {}
- data.update(ParseEclassDoc.blocks['@ECLASS:'].defaults)
+ data.update(ParseEclassDoc.blocks["@ECLASS:"].defaults)
for block_obj in ParseEclassDoc.blocks.values():
if block_obj.default:
data[block_obj.key] = OrderedSet()
- data.update({block.key: OrderedSet() for block in ParseEclassDoc.blocks.values() if block.default})
+ data.update(
+ {
+ block.key: OrderedSet()
+ for block in ParseEclassDoc.blocks.values()
+ if block.default
+ }
+ )
# @ECLASS block must exist and be first in eclasses
if not blocks:
logger.error("'@ECLASS:' block missing")
return data
- elif blocks[0][0] != '@ECLASS:':
+ elif blocks[0][0] != "@ECLASS:":
logger.warning("'@ECLASS:' block not first")
# track duplicate tags
@@ -509,21 +534,22 @@ class EclassDoc(AttrDict):
if block_obj.key is None:
# main @ECLASS block
if duplicates[tag]:
- logger.warning(
- f"'@ECLASS:', line {block_start}: duplicate block")
+ logger.warning(f"'@ECLASS:', line {block_start}: duplicate block")
duplicates[tag] = True
# verify name is correct
file_name = os.path.basename(path)
if block_data.name != file_name:
logger.warning(
- f"'@ECLASS:' invalid name {block_data.name!r} (should be {file_name!r})")
+ f"'@ECLASS:' invalid name {block_data.name!r} (should be {file_name!r})"
+ )
data.update(block_data)
else:
# item block
- name = block_data['name']
+ name = block_data["name"]
if name in duplicates[tag]:
logger.warning(
- f'{repr(block[0])}, line {block_start}: duplicate block')
+ f"{repr(block[0])}, line {block_start}: duplicate block"
+ )
duplicates[tag].add(name)
data[block_obj.key].add(block_data)
@@ -532,106 +558,108 @@ class EclassDoc(AttrDict):
def to_rst(self):
"""Convert eclassdoc object to reStructuredText."""
if self.name is None:
- raise ValueError('eclass lacking doc support')
+ raise ValueError("eclass lacking doc support")
_header_only = partial(_rst_header, newline=True)
- rst = _header_only('=', self.name, leading=True)
+ rst = _header_only("=", self.name, leading=True)
if self.blurb:
- rst.extend(_header_only('-', self.blurb, leading=True))
+ rst.extend(_header_only("-", self.blurb, leading=True))
if self.description:
- rst.extend(_rst_header('-', 'Description'))
+ rst.extend(_rst_header("-", "Description"))
rst.append(self.description)
- rst.append('')
+ rst.append("")
if self.deprecated:
- rst.extend(_rst_header('-', 'Deprecated'))
+ rst.extend(_rst_header("-", "Deprecated"))
if isinstance(self.deprecated, bool):
- replacement = 'none'
+ replacement = "none"
else:
replacement = self.deprecated
- rst.append(f'Replacement: {replacement}')
- rst.append('')
+ rst.append(f"Replacement: {replacement}")
+ rst.append("")
if self.supported_eapis:
- rst.extend(_rst_header('-', 'Supported EAPIs'))
- rst.append(' '.join(self.supported_eapis))
- rst.append('')
+ rst.extend(_rst_header("-", "Supported EAPIs"))
+ rst.append(" ".join(self.supported_eapis))
+ rst.append("")
if self.raw_provides:
- rst.extend(_rst_header('-', 'Transitively Provided Eclasses'))
- rst.append(' '.join(self.raw_provides))
- rst.append('')
+ rst.extend(_rst_header("-", "Transitively Provided Eclasses"))
+ rst.append(" ".join(self.raw_provides))
+ rst.append("")
if self.example:
- rst.extend(_rst_header('-', 'Example'))
+ rst.extend(_rst_header("-", "Example"))
rst.append(self.example)
- rst.append('')
+ rst.append("")
if external_funcs := [x for x in self.functions if not x.internal]:
- rst.extend(_header_only('-', 'Functions'))
+ rst.extend(_header_only("-", "Functions"))
for func in external_funcs:
header = [func.name]
if func.usage:
header.append(func.usage)
- rst.extend(_rst_header('~', ' '.join(header)))
+ rst.extend(_rst_header("~", " ".join(header)))
if func.description:
rst.append(func.description)
if func.returns:
if func.description:
- rst.append('')
- rst.append(f'Return value: {func.returns}')
- rst.append('')
+ rst.append("")
+ rst.append(f"Return value: {func.returns}")
+ rst.append("")
if external_vars := [x for x in self.variables if not x.internal]:
- rst.extend(_header_only('-', 'Variables'))
+ rst.extend(_header_only("-", "Variables"))
for var in external_vars:
- vartype = ''
+ vartype = ""
if var.required:
- vartype += ' (REQUIRED)'
+ vartype += " (REQUIRED)"
if var.pre_inherit:
- vartype += ' (SET BEFORE INHERIT)'
+ vartype += " (SET BEFORE INHERIT)"
if var.user_variable:
- vartype += ' (USER VARIABLE)'
+ vartype += " (USER VARIABLE)"
if var.output_variable:
- vartype += ' (OUTPUT VARIABLE)'
+ vartype += " (OUTPUT VARIABLE)"
- rst.extend(_rst_header('~', var.name + vartype))
+ rst.extend(_rst_header("~", var.name + vartype))
if var.description:
rst.append(var.description)
- rst.append('')
+ rst.append("")
if external_func_vars := [x for x in self.function_variables if not x.internal]:
- rst.extend(_header_only('-', 'Function Variables'))
+ rst.extend(_header_only("-", "Function Variables"))
for var in external_func_vars:
- vartype = ''
+ vartype = ""
if var.required:
- vartype += ' (REQUIRED)'
+ vartype += " (REQUIRED)"
- rst.extend(_rst_header('~', var.name + vartype))
+ rst.extend(_rst_header("~", var.name + vartype))
if var.description:
rst.append(var.description)
- rst.append('')
+ rst.append("")
if self.authors:
- rst.extend(_rst_header('-', 'Authors'))
- rst.append('\n'.join(f'| {x}' for x in self.authors))
- rst.append('')
+ rst.extend(_rst_header("-", "Authors"))
+ rst.append("\n".join(f"| {x}" for x in self.authors))
+ rst.append("")
if self.maintainers:
- rst.extend(_rst_header('-', 'Maintainers'))
- rst.append('\n'.join(f'| {x}' for x in self.maintainers))
- rst.append('')
+ rst.extend(_rst_header("-", "Maintainers"))
+ rst.append("\n".join(f"| {x}" for x in self.maintainers))
+ rst.append("")
if self.bugreports:
- rst.extend(_rst_header('-', 'Bug Reports'))
+ rst.extend(_rst_header("-", "Bug Reports"))
rst.append(self.bugreports)
- rst.append('')
+ rst.append("")
- return '\n'.join(rst)
+ return "\n".join(rst)
def _to_docutils(self, writer):
"""Convert eclassdoc object using docutils."""
from docutils.core import publish_string
+
return publish_string(
- source=self.to_rst(), writer=writer,
+ source=self.to_rst(),
+ writer=writer,
settings_overrides={
- 'input_encoding': 'unicode',
- 'output_encoding': 'unicode',
- }
+ "input_encoding": "unicode",
+ "output_encoding": "unicode",
+ },
)
def to_man(self):
@@ -639,17 +667,17 @@ class EclassDoc(AttrDict):
from docutils.writers import manpage
man_data = {
- 'manual_section': '5',
- 'manual_group': 'eclass-manpages',
- 'date': datetime.utcnow().strftime('%Y-%m-%d'),
- 'version': 'Gentoo Linux',
+ "manual_section": "5",
+ "manual_group": "eclass-manpages",
+ "date": datetime.utcnow().strftime("%Y-%m-%d"),
+ "version": "Gentoo Linux",
}
if self.blurb:
- man_data['subtitle'] = self.blurb
+ man_data["subtitle"] = self.blurb
# add pkgcore version to header comment
- pkgcore_version = get_version(__title__, __file__).split(' --')[0]
- header_comment = f'\nCreated by {pkgcore_version}.'
+ pkgcore_version = get_version(__title__, __file__).split(" --")[0]
+ header_comment = f"\nCreated by {pkgcore_version}."
class Translator(manpage.Translator):
"""Override docutils man page metadata defaults."""
@@ -667,4 +695,5 @@ class EclassDoc(AttrDict):
def to_html(self):
"""Convert eclassdoc object to an HTML 5 document."""
from docutils.writers import html5_polyglot
+
return self._to_docutils(html5_polyglot.Writer())
diff --git a/src/pkgcore/ebuild/eclass_cache.py b/src/pkgcore/ebuild/eclass_cache.py
index bc6fb56cf..c3dea7dff 100644
--- a/src/pkgcore/ebuild/eclass_cache.py
+++ b/src/pkgcore/ebuild/eclass_cache.py
@@ -75,18 +75,19 @@ class base:
def __getstate__(self):
d = self.__dict__.copy()
- del d['_eclass_data_inst_cache']
+ del d["_eclass_data_inst_cache"]
return d
def __setstate__(self, state):
self.__dict__ = state.copy()
- self.__dict__['_eclass_data_inst_cache'] = WeakValueDictionary()
+ self.__dict__["_eclass_data_inst_cache"] = WeakValueDictionary()
class cache(base):
- pkgcore_config_type = ConfigHint({"path":"str", "location":"str"},
- typename='eclass_cache')
+ pkgcore_config_type = ConfigHint(
+ {"path": "str", "location": "str"}, typename="eclass_cache"
+ )
def __init__(self, path, location=None):
"""
@@ -107,7 +108,8 @@ class cache(base):
continue
ys = y[:-eclass_len]
ec[intern(ys)] = LazilyHashedPath(
- pjoin(self.eclassdir, y), eclassdir=self.eclassdir)
+ pjoin(self.eclassdir, y), eclassdir=self.eclassdir
+ )
return ImmutableDict(ec)
@@ -120,8 +122,9 @@ class StackedCaches(base):
"""
pkgcore_config_type = ConfigHint(
- {'caches': 'refs:eclass_cache', 'location': 'str', 'eclassdir': 'str'},
- typename='eclass_cache')
+ {"caches": "refs:eclass_cache", "location": "str", "eclassdir": "str"},
+ typename="eclass_cache",
+ )
def __init__(self, caches, **kwds):
"""
@@ -132,11 +135,12 @@ class StackedCaches(base):
cache.
"""
if len(caches) < 2:
- raise TypeError(
- "%s requires at least two eclass_caches" % self.__class__)
+ raise TypeError("%s requires at least two eclass_caches" % self.__class__)
kwds.setdefault("eclassdir", caches[0].eclassdir)
- kwds.setdefault("location", os.path.dirname(kwds["eclassdir"].rstrip(os.path.sep)))
+ kwds.setdefault(
+ "location", os.path.dirname(kwds["eclassdir"].rstrip(os.path.sep))
+ )
self._caches = caches
base.__init__(self, **kwds)
diff --git a/src/pkgcore/ebuild/errors.py b/src/pkgcore/ebuild/errors.py
index 64030d6d5..d7923c70b 100644
--- a/src/pkgcore/ebuild/errors.py
+++ b/src/pkgcore/ebuild/errors.py
@@ -22,18 +22,19 @@ class MalformedAtom(errors.InvalidDependency):
super().__init__(str(self))
def __str__(self):
- msg = f'invalid package atom: {self.atom!r}'
+ msg = f"invalid package atom: {self.atom!r}"
if self.err:
- msg += f': {self.err}'
+ msg += f": {self.err}"
return msg
class InvalidVersion(errors.InvalidDependency):
"""Package version doesn't follow required specifications."""
- def __init__(self, ver, rev, err=''):
+ def __init__(self, ver, rev, err=""):
super().__init__(
- f"Version restriction ver='{ver}', rev='{rev}', is malformed: error {err}")
+ f"Version restriction ver='{ver}', rev='{rev}', is malformed: error {err}"
+ )
self.ver, self.rev, self.err = ver, rev, err
@@ -42,7 +43,6 @@ class InvalidCPV(errors.InvalidPackageName):
class DepsetParseError(errors.InvalidDependency):
-
def __init__(self, s, token=None, msg=None, attr=None):
self.dep_str = s
self.token = token
@@ -52,23 +52,23 @@ class DepsetParseError(errors.InvalidDependency):
def __str__(self):
msg = []
if self.attr is not None:
- msg.append(f'failed parsing {self.attr}')
- msg.append(f'{self.dep_str!r} is unparseable')
+ msg.append(f"failed parsing {self.attr}")
+ msg.append(f"{self.dep_str!r} is unparseable")
if self.token is not None:
- msg.append(f'flagged token- {self.token}')
+ msg.append(f"flagged token- {self.token}")
if self.msg is not None:
- msg.append(f'{self.msg}')
- return ': '.join(msg)
+ msg.append(f"{self.msg}")
+ return ": ".join(msg)
class SanityCheckError(PkgcoreException):
"""Generic error for sanity check failures."""
- def msg(self, verbosity, prefix=' '):
+ def msg(self, verbosity, prefix=" "):
if verbosity > 0:
return self.verbose_msg(prefix)
else:
- return f'{prefix}{self}'
+ return f"{prefix}{self}"
class PkgPretendError(SanityCheckError):
@@ -79,17 +79,17 @@ class PkgPretendError(SanityCheckError):
self.output = output
self.error = error
- def msg(self, verbosity=0, prefix=' '):
- header = f'>>> {self.pkg.cpvstr}: failed pkg_pretend'
+ def msg(self, verbosity=0, prefix=" "):
+ header = f">>> {self.pkg.cpvstr}: failed pkg_pretend"
msg = []
error_msg = self.error.msg(verbosity=verbosity)
if verbosity > 0:
msg.extend(self.output.splitlines())
msg.extend(error_msg.splitlines())
- msg = [f'{prefix}{l}' for l in msg]
+ msg = [f"{prefix}{l}" for l in msg]
elif error_msg:
- header += f': {error_msg}'
- return '\n'.join([header] + msg)
+ header += f": {error_msg}"
+ return "\n".join([header] + msg)
class RequiredUseError(SanityCheckError):
@@ -99,16 +99,18 @@ class RequiredUseError(SanityCheckError):
self.pkg = pkg
self.unmatched = unmatched
- def msg(self, verbosity=0, prefix=' '):
- header = f'>>> {self.pkg.cpvstr}: failed REQUIRED_USE'
+ def msg(self, verbosity=0, prefix=" "):
+ header = f">>> {self.pkg.cpvstr}: failed REQUIRED_USE"
errors = []
for node in self.unmatched:
- errors.append(textwrap.dedent(
- f"""
+ errors.append(
+ textwrap.dedent(
+ f"""
Failed to match: {node}
from: {self.pkg.required_use}
for USE: {' '.join(sorted(self.pkg.use))}
"""
- ))
- msg = [f'{prefix}{line}' for e in errors for line in e.strip().splitlines()]
- return '\n'.join([header] + msg)
+ )
+ )
+ msg = [f"{prefix}{line}" for e in errors for line in e.strip().splitlines()]
+ return "\n".join([header] + msg)
diff --git a/src/pkgcore/ebuild/filter_env.py b/src/pkgcore/ebuild/filter_env.py
index cc2960652..9e5b18b35 100644
--- a/src/pkgcore/ebuild/filter_env.py
+++ b/src/pkgcore/ebuild/filter_env.py
@@ -10,9 +10,14 @@ from ..log import logger
COMMAND_PARSING, SPACE_PARSING = list(range(2))
-def run(out, file_buff, var_match, func_match,
- global_envvar_callback=None,
- func_callback=None):
+def run(
+ out,
+ file_buff,
+ var_match,
+ func_match,
+ global_envvar_callback=None,
+ func_callback=None,
+):
"""Print a filtered environment.
:param out: file-like object to write to.
@@ -22,8 +27,16 @@ def run(out, file_buff, var_match, func_match,
:param func_match: result of build_regex_string or C{None}, for functions.
"""
- process_scope(out, file_buff, 0, var_match, func_match, '\0',
- global_envvar_callback, func_callback=func_callback)
+ process_scope(
+ out,
+ file_buff,
+ 0,
+ var_match,
+ func_match,
+ "\0",
+ global_envvar_callback,
+ func_callback=func_callback,
+ )
def build_regex_string(tokens, invert=False):
@@ -34,7 +47,7 @@ def build_regex_string(tokens, invert=False):
s = tokens[0]
else:
s = f"(?:{'|'.join(tokens)})"
- s = f'^{s}$'
+ s = f"^{s}$"
if invert:
s = f"(?!{s})"
try:
@@ -43,15 +56,16 @@ def build_regex_string(tokens, invert=False):
raise Exception(f"failed compiling {s!r}:\n\nerror: {e}")
-FUNC_LEN = len('function')
+FUNC_LEN = len("function")
+
def is_function(buff, pos):
""":return: start, end, pos or None, None, None tuple."""
isspace = str.isspace
try:
- while buff[pos] in ' \t':
+ while buff[pos] in " \t":
pos += 1
- if buff[pos:pos + FUNC_LEN] == 'function':
+ if buff[pos : pos + FUNC_LEN] == "function":
try:
if isspace(buff[pos + FUNC_LEN]):
pos += FUNC_LEN + 1
@@ -62,24 +76,24 @@ def is_function(buff, pos):
while isspace(buff[pos]):
pos += 1
start = pos
- while buff[pos] not in '\0 \t\n="\'()':
+ while buff[pos] not in "\0 \t\n=\"'()":
pos += 1
end = pos
if end == start:
return None, None, None
- while buff[pos] in ' \t':
+ while buff[pos] in " \t":
pos += 1
- if buff[pos] != '(':
+ if buff[pos] != "(":
return None, None, None
pos += 1
- while buff[pos] in ' \t':
+ while buff[pos] in " \t":
pos += 1
- if buff[pos] != ')':
+ if buff[pos] != ")":
return None, None, None
pos += 1
while isspace(buff[pos]):
pos += 1
- if buff[pos] != '{':
+ if buff[pos] != "{":
return None, None, None
return start, end, pos + 1
except IndexError:
@@ -90,13 +104,13 @@ def is_function(buff, pos):
def is_envvar(buff, pos):
""":return: start, end, pos or None, None, None tuple."""
try:
- while buff[pos] in ' \t':
+ while buff[pos] in " \t":
pos += 1
start = pos
while True:
- if buff[pos] in '\0"\'()- \t\n':
+ if buff[pos] in "\0\"'()- \t\n":
return None, None, None
- if buff[pos] == '=':
+ if buff[pos] == "=":
if pos == start:
return None, None, None
return start, pos, pos + 1
@@ -104,9 +118,18 @@ def is_envvar(buff, pos):
except IndexError:
return None, None, None
-def process_scope(out, buff, pos, var_match, func_match, endchar,
- envvar_callback=None, func_callback=None,
- func_level=0):
+
+def process_scope(
+ out,
+ buff,
+ pos,
+ var_match,
+ func_match,
+ endchar,
+ envvar_callback=None,
+ func_callback=None,
+ func_level=0,
+):
window_start = pos
window_end = None
isspace = str.isspace
@@ -115,7 +138,7 @@ def process_scope(out, buff, pos, var_match, func_match, endchar,
# Wander forward to the next non space.
if window_end is not None:
if out is not None:
- out.write(buff[window_start:window_end].encode('utf-8'))
+ out.write(buff[window_start:window_end].encode("utf-8"))
window_start = pos
window_end = None
com_start = pos
@@ -125,22 +148,29 @@ def process_scope(out, buff, pos, var_match, func_match, endchar,
continue
# Ignore comments.
- if ch == '#':
+ if ch == "#":
pos = walk_statement_pound(buff, pos, endchar)
continue
new_start, new_end, new_p = is_function(buff, pos)
if new_p is not None:
func_name = buff[new_start:new_end]
- logger.debug(f'matched func name {func_name!r}')
- new_p = process_scope(None, buff, new_p, None, None, '}',
- func_callback=func_callback,
- func_level=func_level+1)
- logger.debug(f'ended processing {func_name!r}')
+ logger.debug(f"matched func name {func_name!r}")
+ new_p = process_scope(
+ None,
+ buff,
+ new_p,
+ None,
+ None,
+ "}",
+ func_callback=func_callback,
+ func_level=func_level + 1,
+ )
+ logger.debug(f"ended processing {func_name!r}")
if func_callback is not None:
func_callback(func_level, func_name, buff[new_start:new_p])
if func_match is not None and func_match(func_name):
- logger.debug(f'filtering func {func_name!r}')
+ logger.debug(f"filtering func {func_name!r}")
window_end = com_start
pos = new_p
pos += 1
@@ -159,7 +189,7 @@ def process_scope(out, buff, pos, var_match, func_match, endchar,
pos = new_p
if envvar_callback:
envvar_callback(var_name)
- logger.debug(f'matched env assign {var_name!r}')
+ logger.debug(f"matched env assign {var_name!r}")
if var_match is not None and var_match(var_name):
# This would be filtered.
@@ -169,16 +199,14 @@ def process_scope(out, buff, pos, var_match, func_match, endchar,
if pos >= end:
return pos
- while (pos < end and not isspace(buff[pos])
- and buff[pos] != ';'):
+ while pos < end and not isspace(buff[pos]) and buff[pos] != ";":
if buff[pos] == "'":
pos = walk_statement_no_parsing(buff, pos + 1, "'") + 1
elif buff[pos] in '"`':
- pos = walk_command_escaped_parsing(buff, pos + 1,
- buff[pos]) + 1
- elif buff[pos] == '(':
- pos = walk_command_escaped_parsing(buff, pos + 1, ')') + 1
- elif buff[pos] == '$':
+ pos = walk_command_escaped_parsing(buff, pos + 1, buff[pos]) + 1
+ elif buff[pos] == "(":
+ pos = walk_command_escaped_parsing(buff, pos + 1, ")") + 1
+ elif buff[pos] == "$":
pos += 1
if pos >= end:
continue
@@ -186,14 +214,14 @@ def process_scope(out, buff, pos, var_match, func_match, endchar,
continue
else:
# blah=cah ; single word
- pos = walk_command_complex(buff, pos, ' ', SPACE_PARSING)
+ pos = walk_command_complex(buff, pos, " ", SPACE_PARSING)
if out is not None:
if window_end is None:
window_end = pos
if window_end > end:
window_end = end
- out.write(buff[window_start:window_end].encode('utf-8'))
+ out.write(buff[window_start:window_end].encode("utf-8"))
return pos
@@ -210,7 +238,7 @@ def walk_statement_dollared_quote_parsing(buff, pos, endchar):
while pos < end:
if buff[pos] == endchar:
return pos
- elif buff[pos] == '\\':
+ elif buff[pos] == "\\":
pos += 1
pos += 1
return pos
@@ -218,21 +246,23 @@ def walk_statement_dollared_quote_parsing(buff, pos, endchar):
def walk_here_statement(buff, pos):
pos += 1
- logger.debug('starting here processing for COMMAND for level 2 at p == %.10s', pos)
- if buff[pos] == '<':
- logger.debug("correction, it's a third level here. Handing back to command parsing")
+ logger.debug("starting here processing for COMMAND for level 2 at p == %.10s", pos)
+ if buff[pos] == "<":
+ logger.debug(
+ "correction, it's a third level here. Handing back to command parsing"
+ )
return pos + 1
isspace = str.isspace
end = len(buff)
- while pos < end and (isspace(buff[pos]) or buff[pos] == '-'):
+ while pos < end and (isspace(buff[pos]) or buff[pos] == "-"):
pos += 1
if buff[pos] in "'\"":
end_here = walk_statement_no_parsing(buff, pos + 1, buff[pos])
pos += 1
else:
- end_here = walk_command_complex(buff, pos, ' ', SPACE_PARSING)
+ end_here = walk_command_complex(buff, pos, " ", SPACE_PARSING)
here_word = buff[pos:end_here]
- logger.debug(f'matched len({len(here_word)})/{here_word!r} for a here word')
+ logger.debug(f"matched len({len(here_word)})/{here_word!r} for a here word")
# XXX watch this. Potential for horkage. Need to do the quote
# removal thing. This sucks.
end_here += 1
@@ -243,11 +273,11 @@ def walk_here_statement(buff, pos):
end_here = buff.find(here_word, end_here)
while end_here != -1:
i = here_len + end_here
- if buff[i] in ';\n\r})':
+ if buff[i] in ";\n\r})":
i = end_here - 1
- while i >= 0 and buff[i] in '\t ':
+ while i >= 0 and buff[i] in "\t ":
i -= 1
- if i >= 0 and buff[i] == '\n':
+ if i >= 0 and buff[i] == "\n":
break
end_here = buff.find(here_word, end_here + here_len)
@@ -257,10 +287,10 @@ def walk_here_statement(buff, pos):
def walk_statement_pound(buff, pos, endchar=None):
- if pos and not buff[pos-1].isspace():
+ if pos and not buff[pos - 1].isspace():
return pos + 1
- if endchar == '`':
- i = buff.find('\n', pos)
+ if endchar == "`":
+ i = buff.find("\n", pos)
i2 = buff.find(endchar, pos)
if i == -1:
if i2 != -1:
@@ -271,7 +301,7 @@ def walk_statement_pound(buff, pos, endchar=None):
return i
return len(buff) - 1
- pos = buff.find('\n', pos)
+ pos = buff.find("\n", pos)
if pos == -1:
pos = len(buff) - 1
return pos
@@ -284,94 +314,100 @@ def walk_command_complex(buff, pos, endchar, interpret_level):
while pos < end:
ch = buff[pos]
if ch == endchar:
- if endchar != '}':
+ if endchar != "}":
return pos
if start == pos:
return pos
if buff[pos - 1] in ";\n":
return pos
- elif (interpret_level == COMMAND_PARSING and ch in ';\n') or \
- (interpret_level == SPACE_PARSING and isspace(ch)):
+ elif (interpret_level == COMMAND_PARSING and ch in ";\n") or (
+ interpret_level == SPACE_PARSING and isspace(ch)
+ ):
return pos
- elif ch == '\\':
+ elif ch == "\\":
pos += 1
- elif ch == '<':
- if (pos < end - 1 and buff[pos + 1] == '<' and
- interpret_level == COMMAND_PARSING):
+ elif ch == "<":
+ if (
+ pos < end - 1
+ and buff[pos + 1] == "<"
+ and interpret_level == COMMAND_PARSING
+ ):
pos = walk_here_statement(buff, pos + 1)
# we continue immediately; walk_here deposits us at the end
# of the here op, not consuming the final delimiting char
# since it may be an endchar
continue
else:
- logger.debug(f'noticed <, interpret_level={interpret_level}')
- elif ch == '#':
- if start == pos or isspace(buff[pos - 1]) or buff[pos - 1] == ';':
+ logger.debug(f"noticed <, interpret_level={interpret_level}")
+ elif ch == "#":
+ if start == pos or isspace(buff[pos - 1]) or buff[pos - 1] == ";":
pos = walk_statement_pound(buff, pos)
continue
- elif ch == '$':
+ elif ch == "$":
pos = walk_dollar_expansion(buff, pos + 1, end, endchar)
continue
- elif ch == '{':
- pos = walk_command_escaped_parsing(buff, pos + 1, '}')
- elif ch == '(' and interpret_level == COMMAND_PARSING:
- pos = walk_command_escaped_parsing(buff, pos + 1, ')')
+ elif ch == "{":
+ pos = walk_command_escaped_parsing(buff, pos + 1, "}")
+ elif ch == "(" and interpret_level == COMMAND_PARSING:
+ pos = walk_command_escaped_parsing(buff, pos + 1, ")")
elif ch in '`"':
pos = walk_command_escaped_parsing(buff, pos + 1, ch)
elif ch == "'" and endchar != '"':
- pos = walk_statement_no_parsing(buff, pos +1, "'")
+ pos = walk_statement_no_parsing(buff, pos + 1, "'")
pos += 1
return pos
+
def raw_walk_command_escaped_parsing(buff, pos, endchar):
end = len(buff)
while pos < end:
ch = buff[pos]
if ch == endchar:
return pos
- elif ch == '\\':
+ elif ch == "\\":
pos += 1
- elif ch == '{':
+ elif ch == "{":
if endchar != '"':
- pos = raw_walk_command_escaped_parsing(
- buff, pos + 1, '}')
- elif ch == '(':
+ pos = raw_walk_command_escaped_parsing(buff, pos + 1, "}")
+ elif ch == "(":
if endchar != '"':
- pos = raw_walk_command_escaped_parsing(
- buff, pos + 1, ')')
+ pos = raw_walk_command_escaped_parsing(buff, pos + 1, ")")
elif ch in '`"':
pos = raw_walk_command_escaped_parsing(buff, pos + 1, ch)
elif ch == "'" and endchar != '"':
pos = walk_statement_no_parsing(buff, pos + 1, "'")
- elif ch == '$':
- pos = walk_dollar_expansion(buff, pos + 1, end, endchar,
- disable_quote = endchar == '"')
+ elif ch == "$":
+ pos = walk_dollar_expansion(
+ buff, pos + 1, end, endchar, disable_quote=endchar == '"'
+ )
continue
- elif ch == '#' and endchar != '"':
+ elif ch == "#" and endchar != '"':
pos = walk_statement_pound(buff, pos, endchar)
continue
pos += 1
return pos
+
walk_command_escaped_parsing = raw_walk_command_escaped_parsing
+
def walk_dollar_expansion(buff, pos, end, endchar, disable_quote=False):
- if buff[pos] == '(':
- return process_scope(None, buff, pos + 1, None, None, ')') + 1
+ if buff[pos] == "(":
+ return process_scope(None, buff, pos + 1, None, None, ")") + 1
if buff[pos] == "'" and not disable_quote:
- return walk_statement_dollared_quote_parsing(buff, pos +1, "'") + 1
- if buff[pos] != '{':
- if buff[pos] == '$':
+ return walk_statement_dollared_quote_parsing(buff, pos + 1, "'") + 1
+ if buff[pos] != "{":
+ if buff[pos] == "$":
# short circuit it.
return pos + 1
while pos < end and buff[pos] != endchar:
if buff[pos].isspace():
return pos
- if buff[pos] == '$':
+ if buff[pos] == "$":
# shouldn't this be passing disable_quote ?
return walk_dollar_expansion(buff, pos + 1, end, endchar)
if not buff[pos].isalnum():
- if buff[pos] != '_':
+ if buff[pos] != "_":
return pos
pos += 1
@@ -381,10 +417,10 @@ def walk_dollar_expansion(buff, pos, end, endchar, disable_quote=False):
pos += 1
# shortcut ${$} to avoid going too deep. ${$a} isn't valid, so no concern
- if pos == '$':
+ if pos == "$":
return pos + 1
- while pos < end and buff[pos] != '}':
- if buff[pos] == '$':
+ while pos < end and buff[pos] != "}":
+ if buff[pos] == "$":
# disable_quote?
pos = walk_dollar_expansion(buff, pos + 1, end, endchar)
else:
@@ -392,8 +428,16 @@ def walk_dollar_expansion(buff, pos, end, endchar, disable_quote=False):
return pos + 1
-def main_run(out_handle, data, vars_to_filter=(), funcs_to_filter=(), vars_is_whitelist=False, funcs_is_whitelist=False,
- global_envvar_callback=None, func_callback=None):
+def main_run(
+ out_handle,
+ data,
+ vars_to_filter=(),
+ funcs_to_filter=(),
+ vars_is_whitelist=False,
+ funcs_is_whitelist=False,
+ global_envvar_callback=None,
+ func_callback=None,
+):
vars = funcs = None
if vars_to_filter:
vars = build_regex_string(vars_to_filter, invert=vars_is_whitelist).match
@@ -403,11 +447,11 @@ def main_run(out_handle, data, vars_to_filter=(), funcs_to_filter=(), vars_is_wh
raise ValueError("funcs_str should not be a string; should be a sequence.")
funcs = build_regex_string(funcs_to_filter, invert=funcs_is_whitelist).match
- data = data + '\0'
- kwds = {'global_envvar_callback': global_envvar_callback}
+ data = data + "\0"
+ kwds = {"global_envvar_callback": global_envvar_callback}
if func_callback:
- kwds['func_callback'] = func_callback
+ kwds["func_callback"] = func_callback
if out_handle is None:
out_handle = io.BytesIO()
diff --git a/src/pkgcore/ebuild/formatter.py b/src/pkgcore/ebuild/formatter.py
index 2b9429ce1..a592ac70b 100644
--- a/src/pkgcore/ebuild/formatter.py
+++ b/src/pkgcore/ebuild/formatter.py
@@ -1,9 +1,13 @@
"""pmerge formatting module"""
__all__ = (
- "Formatter", "use_expand_filter",
- "BasicFormatter", "PkgcoreFormatter", "CountingFormatter",
- "PortageFormatter", "PortageVerboseFormatter",
+ "Formatter",
+ "use_expand_filter",
+ "BasicFormatter",
+ "PkgcoreFormatter",
+ "CountingFormatter",
+ "PortageFormatter",
+ "PortageVerboseFormatter",
)
import operator
@@ -20,7 +24,6 @@ from ..log import logger
class use_expand_filter:
-
def __init__(self, use_expand, use_expand_hidden):
"""
:type use_expand: iterable of strings
@@ -29,8 +32,9 @@ class use_expand_filter:
:param use_expand_hidden: names of use-expanded vars that should not
be added to the dict.
"""
- self.expand_filters = {x.lower(): (x not in use_expand_hidden, x)
- for x in use_expand}
+ self.expand_filters = {
+ x.lower(): (x not in use_expand_hidden, x) for x in use_expand
+ }
self.use_expand = use_expand
self.use_expand_hidden = use_expand_hidden
self.known_flags = {}
@@ -62,7 +66,10 @@ class use_expand_filter:
expand_state = ef[split_flag[0]]
if expand_state[0]:
# not hidden
- kf[flag] = data = (expand_state[1], flag[len(split_flag[0]) + 1:])
+ kf[flag] = data = (
+ expand_state[1],
+ flag[len(split_flag[0]) + 1 :],
+ )
else:
kf[flag] = data = False
break
@@ -84,7 +91,7 @@ class use_expand_filter:
class Formatter:
"""Base Formatter class: All formatters should be subclasses of this."""
- pkgcore_config_type = ConfigHint(typename='pmerge_formatter', raw_class=True)
+ pkgcore_config_type = ConfigHint(typename="pmerge_formatter", raw_class=True)
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
@@ -94,8 +101,7 @@ class Formatter:
raise NotImplementedError(self.format)
def ask(self, question, responses=None, default_answer=None, limit=3):
- return userquery(
- question, self.out, self.err, responses, default_answer, limit)
+ return userquery(question, self.out, self.err, responses, default_answer, limit)
def end(self):
"""Called at the end, normally for summary information"""
@@ -112,7 +118,7 @@ class VerboseFormatter(Formatter):
"""Formatter with output forced into verbose mode."""
def __init__(self, **kwargs):
- kwargs['verbosity'] = 1
+ kwargs["verbosity"] = 1
super().__init__(**kwargs)
@@ -120,7 +126,7 @@ class PkgcoreFormatter(Formatter):
"""The original pkgcore output"""
def format(self, op):
- repo = getattr(op.pkg.repo, 'repo_id', None)
+ repo = getattr(op.pkg.repo, "repo_id", None)
if not repo:
p = str(op.pkg.cpvstr)
else:
@@ -151,22 +157,21 @@ class CountingFormatter(Formatter):
self.out.write()
if self.verbosity > 0:
total = sum(self.package_data.values())
- self.out.write(
- f"Total: {total} package{pluralism(total)}", autoline=False)
+ self.out.write(f"Total: {total} package{pluralism(total)}", autoline=False)
d = dict(self.package_data.items())
op_types = (
- ('add', 'new'),
- ('upgrade', 'upgrade'),
- ('downgrade', 'downgrade'),
- ('slotted_add', 'in new slot'),
- ('replace', 'reinstall'),
+ ("add", "new"),
+ ("upgrade", "upgrade"),
+ ("downgrade", "downgrade"),
+ ("slotted_add", "in new slot"),
+ ("replace", "reinstall"),
)
op_list = []
for op_type, op_str in op_types:
num_ops = d.pop(op_type, 0)
if num_ops:
- if op_str == 'new':
+ if op_str == "new":
op_list.append(f"{num_ops} {op_str}")
else:
op_list.append(f"{num_ops} {op_str}{pluralism(num_ops)}")
@@ -176,7 +181,10 @@ class CountingFormatter(Formatter):
self.out.write(f" ({', '.join(op_list)})", autoline=False)
if self.download_size:
self.out.write(
- ', Size of downloads: ', sizeof_fmt(self.download_size), autoline=False)
+ ", Size of downloads: ",
+ sizeof_fmt(self.download_size),
+ autoline=False,
+ )
self.out.write()
@@ -187,8 +195,7 @@ class PortageFormatter(CountingFormatter):
kwargs.setdefault("use_expand", set())
kwargs.setdefault("use_expand_hidden", set())
super().__init__(**kwargs)
- self.use_splitter = use_expand_filter(
- self.use_expand, self.use_expand_hidden)
+ self.use_splitter = use_expand_filter(self.use_expand, self.use_expand_hidden)
# Map repo location to an index.
self.repos = {}
# set of files to be downloaded
@@ -214,76 +221,80 @@ class PortageFormatter(CountingFormatter):
out.autoline = False
self.pkg_disabled_use = self.pkg_forced_use = set()
- if hasattr(self, 'pkg_get_use'):
+ if hasattr(self, "pkg_get_use"):
self.pkg_forced_use, _, self.pkg_disabled_use = self.pkg_get_use(op.pkg)
# This is for the summary at the end
if self.quiet_repo_display:
- self.repos.setdefault(op.pkg.repo, len(self.repos)+1)
+ self.repos.setdefault(op.pkg.repo, len(self.repos) + 1)
- pkg_is_bold = any(x.match(op.pkg) for x in getattr(self, 'world_list', ()))
+ pkg_is_bold = any(x.match(op.pkg) for x in getattr(self, "world_list", ()))
# We don't do blockers or --tree stuff yet
- data = ['[']
+ data = ["["]
pkg_coloring = []
if pkg_is_bold:
pkg_coloring.append(out.bold)
- if op.desc == 'remove':
- pkg_coloring.insert(0, out.fg('red'))
- data += pkg_coloring + ['uninstall']
- elif getattr(op.pkg, 'built', False):
- pkg_coloring.insert(0, out.fg('magenta'))
- data += pkg_coloring + ['binary']
+ if op.desc == "remove":
+ pkg_coloring.insert(0, out.fg("red"))
+ data += pkg_coloring + ["uninstall"]
+ elif getattr(op.pkg, "built", False):
+ pkg_coloring.insert(0, out.fg("magenta"))
+ data += pkg_coloring + ["binary"]
else:
- pkg_coloring.insert(0, out.fg('green'))
- data += pkg_coloring + ['ebuild']
+ pkg_coloring.insert(0, out.fg("green"))
+ data += pkg_coloring + ["ebuild"]
- data += [out.reset, ' ']
+ data += [out.reset, " "]
out.write(*data)
# Order is important here - look at the above diagram
op_type = op.desc
- op_chars = [[' '] for x in range(7)]
- if 'fetch' in op.pkg.restrict:
- if all(os.path.isfile(pjoin(self.distdir, f))
- for f in op.pkg.distfiles):
- fetched = [out.fg('green'), out.bold, 'f', out.reset]
+ op_chars = [[" "] for x in range(7)]
+ if "fetch" in op.pkg.restrict:
+ if all(os.path.isfile(pjoin(self.distdir, f)) for f in op.pkg.distfiles):
+ fetched = [out.fg("green"), out.bold, "f", out.reset]
else:
- fetched = [out.fg('red'), out.bold, 'F', out.reset]
+ fetched = [out.fg("red"), out.bold, "F", out.reset]
op_chars[3] = fetched
if op.desc == "add":
- op_chars[1] = [out.fg('green'), out.bold, 'N', out.reset]
- if op.pkg.slot != '0' and self.installed_repos.match(op.pkg.unversioned_atom):
- op_chars[2] = [out.fg('green'), out.bold, 'S', out.reset]
- op_type = 'slotted_add'
+ op_chars[1] = [out.fg("green"), out.bold, "N", out.reset]
+ if op.pkg.slot != "0" and self.installed_repos.match(
+ op.pkg.unversioned_atom
+ ):
+ op_chars[2] = [out.fg("green"), out.bold, "S", out.reset]
+ op_type = "slotted_add"
elif op.desc == "replace":
if op.pkg == op.old_pkg:
- op_chars[2] = [out.fg('yellow'), out.bold, 'R', out.reset]
+ op_chars[2] = [out.fg("yellow"), out.bold, "R", out.reset]
else:
- op_chars[4] = [out.fg('cyan'), out.bold, 'U', out.reset]
+ op_chars[4] = [out.fg("cyan"), out.bold, "U", out.reset]
if op.pkg > op.old_pkg:
- op_type = 'upgrade'
+ op_type = "upgrade"
else:
- op_chars[5] = [out.fg('blue'), out.bold, 'D', out.reset]
- op_type = 'downgrade'
- elif op.desc == 'remove':
+ op_chars[5] = [out.fg("blue"), out.bold, "D", out.reset]
+ op_type = "downgrade"
+ elif op.desc == "remove":
pass
else:
logger.warning("unformattable op type: desc(%r), %r", op.desc, op)
if self.verbosity > 0:
- if (self.unstable_arch in op.pkg.keywords and
- self.unstable_arch not in op.pkg.repo.domain_settings['ACCEPT_KEYWORDS']):
- op_chars[6] = [out.fg('yellow'), out.bold, '~', out.reset]
+ if (
+ self.unstable_arch in op.pkg.keywords
+ and self.unstable_arch
+ not in op.pkg.repo.domain_settings["ACCEPT_KEYWORDS"]
+ ):
+ op_chars[6] = [out.fg("yellow"), out.bold, "~", out.reset]
elif not op.pkg.keywords:
- op_chars[6] = [out.fg('red'), out.bold, '*', out.reset]
+ op_chars[6] = [out.fg("red"), out.bold, "*", out.reset]
else:
if op.pkg.repo.masked.match(op.pkg.versioned_atom):
- op_chars[6] = [out.fg('red'), out.bold, '#', out.reset]
+ op_chars[6] = [out.fg("red"), out.bold, "#", out.reset]
out.write(*(iflatten_instance(op_chars)))
- out.write('] ')
+ out.write("] ")
self.visit_op(op_type)
@@ -291,49 +302,57 @@ class PortageFormatter(CountingFormatter):
if self.verbosity > 0:
if op.pkg.subslot != op.pkg.slot:
pkg.append(f":{op.pkg.slot}/{op.pkg.subslot}")
- elif op.pkg.slot != '0':
+ elif op.pkg.slot != "0":
pkg.append(f":{op.pkg.slot}")
if not self.quiet_repo_display and op.pkg.source_repository:
pkg.append(f"::{op.pkg.source_repository}")
out.write(*(pkg_coloring + pkg + [out.reset]))
installed = []
- if op.desc == 'replace':
+ if op.desc == "replace":
old_pkg = [op.old_pkg.fullver]
if self.verbosity > 0:
if op.old_pkg.subslot != op.old_pkg.slot:
old_pkg.append(f":{op.old_pkg.slot}/{op.old_pkg.subslot}")
- elif op.old_pkg.slot != '0':
+ elif op.old_pkg.slot != "0":
old_pkg.append(f":{op.old_pkg.slot}")
if not self.quiet_repo_display and op.old_pkg.source_repository:
old_pkg.append(f"::{op.old_pkg.source_repository}")
- if op_type != 'replace' or op.pkg.source_repository != op.old_pkg.source_repository:
- installed = ''.join(old_pkg)
- elif op_type == 'slotted_add':
+ if (
+ op_type != "replace"
+ or op.pkg.source_repository != op.old_pkg.source_repository
+ ):
+ installed = "".join(old_pkg)
+ elif op_type == "slotted_add":
if self.verbosity > 0:
pkgs = sorted(
- f"{x.fullver}:{x.slot}" for x in
- self.installed_repos.match(op.pkg.unversioned_atom))
+ f"{x.fullver}:{x.slot}"
+ for x in self.installed_repos.match(op.pkg.unversioned_atom)
+ )
else:
pkgs = sorted(
- x.fullver for x in
- self.installed_repos.match(op.pkg.unversioned_atom))
- installed = ', '.join(pkgs)
+ x.fullver
+ for x in self.installed_repos.match(op.pkg.unversioned_atom)
+ )
+ installed = ", ".join(pkgs)
# output currently installed versions
if installed:
- out.write(' ', out.fg('blue'), out.bold, f'[{installed}]', out.reset)
+ out.write(" ", out.fg("blue"), out.bold, f"[{installed}]", out.reset)
# Build a list of (useflags, use_expand_dicts) tuples.
# HACK: if we are in "replace" mode we build a list of length
# 4, else this is a list of length 2. We then pass this to
# format_use which can take either 2 or 4 arguments.
uses = ((), ())
- if op.desc == 'replace':
+ if op.desc == "replace":
uses = (
- op.pkg.iuse_stripped, op.pkg.use,
- op.old_pkg.iuse_stripped, op.old_pkg.use)
- elif op.desc == 'add':
+ op.pkg.iuse_stripped,
+ op.pkg.use,
+ op.old_pkg.iuse_stripped,
+ op.old_pkg.use,
+ )
+ elif op.desc == "add":
uses = (op.pkg.iuse_stripped, op.pkg.use)
stuff = list(map(self.use_splitter, uses))
@@ -342,7 +361,7 @@ class PortageFormatter(CountingFormatter):
uselists, usedicts = list(zip(*stuff))
# output USE flags
- self.format_use('use', *uselists)
+ self.format_use("use", *uselists)
# output USE_EXPAND flags
for expand in sorted(self.use_expand - self.use_expand_hidden):
@@ -358,24 +377,36 @@ class PortageFormatter(CountingFormatter):
if self.verbosity > 0:
if not op.pkg.built:
downloads = set(
- f for f in op.pkg.distfiles
- if not os.path.isfile(pjoin(self.distdir, f)))
+ f
+ for f in op.pkg.distfiles
+ if not os.path.isfile(pjoin(self.distdir, f))
+ )
if downloads.difference(self.downloads):
self.downloads.update(downloads)
size = sum(
- v['size'] for dist, v in
- op.pkg.manifest.distfiles.items() if dist in downloads)
+ v["size"]
+ for dist, v in op.pkg.manifest.distfiles.items()
+ if dist in downloads
+ )
if size:
self.download_size += size
- out.write(' ', sizeof_fmt(size))
+ out.write(" ", sizeof_fmt(size))
if self.quiet_repo_display:
- out.write(out.fg('cyan'), f" [{self.repos[op.pkg.repo]}]")
+ out.write(out.fg("cyan"), f" [{self.repos[op.pkg.repo]}]")
- out.write('\n')
+ out.write("\n")
out.autoline = origautoline
- def format_use(self, attr, pkg_iuse, pkg_use, old_pkg_iuse=None, old_pkg_use=None, sorter=lambda k: k):
+ def format_use(
+ self,
+ attr,
+ pkg_iuse,
+ pkg_use,
+ old_pkg_iuse=None,
+ old_pkg_use=None,
+ sorter=lambda k: k,
+ ):
"""Write the current selection from a set of flags to a formatter.
:type attr: string
@@ -390,10 +421,10 @@ class PortageFormatter(CountingFormatter):
:param old_pkg_use: enabled use flags in the previous version
"""
out = self.out
- red = out.fg('red')
- green = out.fg('green')
- blue = out.fg('blue')
- yellow = out.fg('yellow')
+ red = out.fg("red")
+ green = out.fg("green")
+ blue = out.fg("blue")
+ yellow = out.fg("yellow")
bold = out.bold
reset = out.reset
@@ -409,74 +440,82 @@ class PortageFormatter(CountingFormatter):
removed = set(old_pkg_iuse) - set(pkg_iuse)
for flag in sorted(enabled, key=sorter):
- expanded_flag = '_'.join((attr.lower(), flag)) if attr != 'use' else flag
+ expanded_flag = (
+ "_".join((attr.lower(), flag)) if attr != "use" else flag
+ )
if flag in old_enabled:
# unchanged
if self.verbosity > 0:
if expanded_flag in self.pkg_forced_use:
- flags.extend(('(', red, bold, flag, reset, ')', ' '))
+ flags.extend(("(", red, bold, flag, reset, ")", " "))
else:
- flags.extend((red, bold, flag, reset, ' '))
+ flags.extend((red, bold, flag, reset, " "))
elif flag in old_disabled:
# toggled
if expanded_flag in self.pkg_forced_use:
- flags.extend(('(', green, bold, flag, reset, '*)', ' '))
+ flags.extend(("(", green, bold, flag, reset, "*)", " "))
else:
- flags.extend((green, bold, flag, reset, '*', ' '))
+ flags.extend((green, bold, flag, reset, "*", " "))
else:
# new
if expanded_flag in self.pkg_forced_use:
- flags.extend(('(', yellow, bold, flag, reset, '%*)', ' '))
+ flags.extend(("(", yellow, bold, flag, reset, "%*)", " "))
else:
- flags.extend((yellow, bold, flag, reset, '%*', ' '))
+ flags.extend((yellow, bold, flag, reset, "%*", " "))
for flag in sorted(disabled, key=sorter):
- expanded_flag = '_'.join((attr.lower(), flag)) if attr != 'use' else flag
+ expanded_flag = (
+ "_".join((attr.lower(), flag)) if attr != "use" else flag
+ )
if flag in old_disabled:
# unchanged
if self.verbosity > 0:
if expanded_flag in self.pkg_disabled_use:
- flags.extend(('(', blue, bold, '-', flag, reset, ')', ' '))
+ flags.extend(("(", blue, bold, "-", flag, reset, ")", " "))
else:
- flags.extend((blue, bold, '-', flag, reset, ' '))
+ flags.extend((blue, bold, "-", flag, reset, " "))
elif flag in old_enabled:
# toggled
if expanded_flag in self.pkg_disabled_use:
- flags.extend(('(', green, bold, '-', flag, reset, '*)', ' '))
+ flags.extend(("(", green, bold, "-", flag, reset, "*)", " "))
else:
- flags.extend((green, bold, '-', flag, reset, '*', ' '))
+ flags.extend((green, bold, "-", flag, reset, "*", " "))
else:
# new
if expanded_flag in self.pkg_disabled_use:
- flags.extend(('(', yellow, bold, '-', flag, reset, '%)', ' '))
+ flags.extend(("(", yellow, bold, "-", flag, reset, "%)", " "))
else:
- flags.extend((yellow, bold, '-', flag, reset, '%', ' '))
+ flags.extend((yellow, bold, "-", flag, reset, "%", " "))
if self.verbosity > 0:
for flag in sorted(removed, key=sorter):
if flag in old_enabled:
- flags.extend(('(', yellow, bold, '-', flag, reset, '%*)', ' '))
+ flags.extend(("(", yellow, bold, "-", flag, reset, "%*)", " "))
else:
- flags.extend(('(', yellow, bold, '-', flag, reset, '%)', ' '))
+ flags.extend(("(", yellow, bold, "-", flag, reset, "%)", " "))
# new pkg install
else:
for flag in sorted(enabled, key=sorter):
- expanded_flag = '_'.join((attr.lower(), flag)) if attr != 'use' else flag
+ expanded_flag = (
+ "_".join((attr.lower(), flag)) if attr != "use" else flag
+ )
if expanded_flag in self.pkg_forced_use:
- flags.extend(('(', red, bold, flag, reset, ')', ' '))
+ flags.extend(("(", red, bold, flag, reset, ")", " "))
else:
- flags.extend((red, bold, flag, reset, ' '))
+ flags.extend((red, bold, flag, reset, " "))
for flag in sorted(disabled, key=sorter):
- expanded_flag = '_'.join((attr.lower(), flag)) if attr != 'use' else flag
+ expanded_flag = (
+ "_".join((attr.lower(), flag)) if attr != "use" else flag
+ )
if expanded_flag in self.pkg_disabled_use:
- flags.extend(('(', blue, bold, '-', flag, reset, ')', ' '))
+ flags.extend(("(", blue, bold, "-", flag, reset, ")", " "))
else:
- flags.extend((blue, bold, '-', flag, reset, ' '))
+ flags.extend((blue, bold, "-", flag, reset, " "))
# Only write this if we have something to write
if flags:
- out.write(' ', attr.upper(), '="')
+ out.write(" ", attr.upper(), '="')
# Omit the final space.
out.write(*flags[:-1])
out.write('"')
@@ -491,16 +530,24 @@ class PortageFormatter(CountingFormatter):
repos = list(self.repos.items())
repos.sort(key=operator.itemgetter(1))
for k, v in repos:
- reponame = getattr(k, 'repo_id', 'unknown repo id')
- location = getattr(k, 'location', 'unspecified location')
+ reponame = getattr(k, "repo_id", "unknown repo id")
+ location = getattr(k, "location", "unspecified location")
if reponame != location:
self.out.write(
- ' ', self.out.fg('cyan'), f"[{v}]",
- self.out.reset, f" {reponame} ({location})")
+ " ",
+ self.out.fg("cyan"),
+ f"[{v}]",
+ self.out.reset,
+ f" {reponame} ({location})",
+ )
else:
self.out.write(
- ' ', self.out.fg('cyan'), f"[{v}]",
- self.out.reset, f" {location}")
+ " ",
+ self.out.fg("cyan"),
+ f"[{v}]",
+ self.out.reset,
+ f" {location}",
+ )
class PortageVerboseFormatter(VerboseFormatter, PortageFormatter):
diff --git a/src/pkgcore/ebuild/inspect_profile.py b/src/pkgcore/ebuild/inspect_profile.py
index dc8c5942d..8ef11ff37 100644
--- a/src/pkgcore/ebuild/inspect_profile.py
+++ b/src/pkgcore/ebuild/inspect_profile.py
@@ -17,21 +17,22 @@ commands = []
class _base(arghparse.ArgparseCommand):
-
@staticmethod
def _validate_args(parser, namespace):
path = namespace.profile
if path is None:
if namespace.repo is not None:
# default to the repo's main profiles dir
- path = pjoin(namespace.repo.location, 'profiles')
+ path = pjoin(namespace.repo.location, "profiles")
else:
# default to the configured system profile if none is selected
path = namespace.config.get_default("domain").profile.profile
else:
- if namespace.repo is not None and getattr(namespace.repo, 'location', False):
- if not path.startswith('/'):
- path = pjoin(namespace.repo.location, 'profiles', path)
+ if namespace.repo is not None and getattr(
+ namespace.repo, "location", False
+ ):
+ if not path.startswith("/"):
+ path = pjoin(namespace.repo.location, "profiles", path)
try:
stack = profiles.ProfileStack(arghparse.existent_path(path))
except argparse.ArgumentTypeError as e:
@@ -42,9 +43,9 @@ class _base(arghparse.ArgparseCommand):
def bind_to_parser(self, parser):
arghparse.ArgparseCommand.bind_to_parser(self, parser)
- parser.add_argument('profile', help='path to the profile to inspect')
+ parser.add_argument("profile", help="path to the profile to inspect")
name = self.__class__.__name__
- kwds = {(f'_{name}_suppress'): arghparse.DelayedDefault.wipe(('domain'), 50)}
+ kwds = {(f"_{name}_suppress"): arghparse.DelayedDefault.wipe(("domain"), 50)}
parser.set_defaults(**kwds)
parser.bind_final_check(self._validate_args)
self._subclass_bind(parser)
@@ -66,9 +67,9 @@ class parent(_base, metaclass=_register_command):
if namespace.repo is None:
out.write("\n".join(x.path for x in namespace.profile.stack))
else:
- repo_dir = pjoin(namespace.repo.location, 'profiles')
+ repo_dir = pjoin(namespace.repo.location, "profiles")
for x in namespace.profile.stack:
- out.write(x.path[len(repo_dir):].lstrip('/'))
+ out.write(x.path[len(repo_dir) :].lstrip("/"))
class eapi(_base, metaclass=_register_command):
@@ -83,14 +84,17 @@ class status(_base, metaclass=_register_command):
"""output profile status"""
def __call__(self, namespace, out, err):
- profiles_dir = pjoin(namespace.profile.node.repoconfig.location, 'profiles')
- profile_rel_path = namespace.profile.path[len(profiles_dir):].lstrip('/')
+ profiles_dir = pjoin(namespace.profile.node.repoconfig.location, "profiles")
+ profile_rel_path = namespace.profile.path[len(profiles_dir) :].lstrip("/")
arch_profiles = namespace.profile.node.repoconfig.arch_profiles
- statuses = [(path, status) for path, status in chain.from_iterable(arch_profiles.values())
- if path.startswith(profile_rel_path)]
+ statuses = [
+ (path, status)
+ for path, status in chain.from_iterable(arch_profiles.values())
+ if path.startswith(profile_rel_path)
+ ]
if len(statuses) > 1:
for path, status in sorted(statuses):
- out.write(f'{path}: {status}')
+ out.write(f"{path}: {status}")
elif statuses:
out.write(statuses[0][1])
@@ -99,17 +103,25 @@ class deprecated(_base, metaclass=_register_command):
"""dump deprecation notices, if any"""
def __call__(self, namespace, out, err):
- for idx, profile in enumerate(x for x in namespace.profile.stack if x.deprecated):
+ for idx, profile in enumerate(
+ x for x in namespace.profile.stack if x.deprecated
+ ):
if idx:
out.write()
out.write(out.bold, out.fg("cyan"), profile.path, out.reset, ":")
data = profile.deprecated
if data[0]:
- out.write(" ", out.fg("yellow"), "replacement profile", out.reset, f": {data[0]}")
+ out.write(
+ " ",
+ out.fg("yellow"),
+ "replacement profile",
+ out.reset,
+ f": {data[0]}",
+ )
if data[1]:
out.write(" ", out.fg("yellow"), "deprecation message", out.reset, ":")
for line in data[1].split("\n"):
- out.write(line, prefix=' ')
+ out.write(line, prefix=" ")
class provided(_base, metaclass=_register_command):
@@ -126,8 +138,12 @@ class provided(_base, metaclass=_register_command):
for pkg_name, pkgs in sorted(targets.items(), key=operator.itemgetter(0)):
out.write(
- out.fg("cyan"), pkg_name, out.reset, ": ",
- ", ".join(x.fullver for x in sorted(pkgs)))
+ out.fg("cyan"),
+ pkg_name,
+ out.reset,
+ ": ",
+ ", ".join(x.fullver for x in sorted(pkgs)),
+ )
class system(_base, metaclass=_register_command):
@@ -147,12 +163,8 @@ class use_expand(_base, metaclass=_register_command):
"""
def __call__(self, namespace, out, err):
- out.write(
- "flags: ",
- ', '.join(sorted(namespace.profile.use_expand)))
- out.write(
- "hidden: ",
- ', '.join(sorted(namespace.profile.use_expand_hidden)))
+ out.write("flags: ", ", ".join(sorted(namespace.profile.use_expand)))
+ out.write("hidden: ", ", ".join(sorted(namespace.profile.use_expand_hidden)))
class iuse_effective(_base, metaclass=_register_command):
@@ -160,7 +172,7 @@ class iuse_effective(_base, metaclass=_register_command):
def __call__(self, namespace, out, err):
if namespace.profile.iuse_effective:
- out.write(' '.join(sorted(namespace.profile.iuse_effective)))
+ out.write(" ".join(sorted(namespace.profile.iuse_effective)))
class masks(_base, metaclass=_register_command):
@@ -193,7 +205,7 @@ class package_bashrc(_base, metaclass=_register_command):
def __call__(self, namespace, out, err):
for package, bashrcs in namespace.profile.pkg_bashrcs:
bashrcs = ", ".join(s.path for s in bashrcs)
- out.write(f'{package}: {bashrcs}')
+ out.write(f"{package}: {bashrcs}")
class keywords(_base, metaclass=_register_command):
@@ -217,10 +229,9 @@ class accept_keywords(_base, metaclass=_register_command):
class _use(_base):
-
def _output_use(self, neg, pos):
- neg = ('-' + x for x in neg)
- return ' '.join(sorted(chain(neg, pos)))
+ neg = ("-" + x for x in neg)
+ return " ".join(sorted(chain(neg, pos)))
def __call__(self, namespace, out, err):
global_use = []
@@ -241,11 +252,11 @@ class _use(_base):
global_use = (neg, pos)
if global_use:
- out.write(f'*/*: {self._output_use(*global_use)}')
+ out.write(f"*/*: {self._output_use(*global_use)}")
if pkg_use:
for pkg, (neg, pos) in sorted(pkg_use.items()):
if neg or pos:
- out.write(f'{pkg}: {self._output_use(neg, pos)}')
+ out.write(f"{pkg}: {self._output_use(neg, pos)}")
class use(_use, metaclass=_register_command):
@@ -303,10 +314,12 @@ class defaults(_base, metaclass=_register_command):
def _subclass_bind(self, parser):
parser.add_argument(
- "variables", nargs='*',
+ "variables",
+ nargs="*",
help="if not specified, all settings are displayed"
- ". If given, output is limited to just those settings if "
- "they exist")
+ ". If given, output is limited to just those settings if "
+ "they exist",
+ )
def __call__(self, namespace, out, err):
var_filter = namespace.variables
@@ -322,7 +335,7 @@ class defaults(_base, metaclass=_register_command):
if not val:
continue
if isinstance(val, tuple):
- val = ' '.join(val)
+ val = " ".join(val)
out.write(f'{key}="{val}"')
@@ -338,8 +351,8 @@ def bind_parser(parser, name):
subparsers = parser.add_subparsers(description=f"{name} commands")
for command in commands:
# Split docstrings into summaries and extended docs.
- help, _, docs = command.__doc__.partition('\n')
+ help, _, docs = command.__doc__.partition("\n")
subparser = subparsers.add_parser(
- command.__name__.lower(),
- help=help, docs=docs)
+ command.__name__.lower(), help=help, docs=docs
+ )
command().bind_to_parser(subparser)
diff --git a/src/pkgcore/ebuild/misc.py b/src/pkgcore/ebuild/misc.py
index b12d4de8d..19fa5270a 100644
--- a/src/pkgcore/ebuild/misc.py
+++ b/src/pkgcore/ebuild/misc.py
@@ -3,11 +3,17 @@ misc. stuff we've not found a spot for yet.
"""
__all__ = (
- "ChunkedDataDict", "IncrementalsDict", "PayloadDict",
- "chunked_data", "collapsed_restrict_to_data", "get_relative_dosym_target",
- "incremental_chunked", "incremental_expansion",
+ "ChunkedDataDict",
+ "IncrementalsDict",
+ "PayloadDict",
+ "chunked_data",
+ "collapsed_restrict_to_data",
+ "get_relative_dosym_target",
+ "incremental_chunked",
+ "incremental_expansion",
"incremental_expansion_license",
- "non_incremental_collapsed_restrict_to_data", "optimize_incrementals",
+ "non_incremental_collapsed_restrict_to_data",
+ "optimize_incrementals",
"sort_keywords",
)
@@ -29,9 +35,11 @@ chunked_data = namedtuple("chunked_data", ("key", "neg", "pos"))
def sort_keywords(keywords):
"""Sort keywords in the proper order: i.e. glob-arches, arch, prefix-arches."""
+
def _sort_kwds(kw):
- parts = tuple(reversed(kw.lstrip('~-').partition('-')))
+ parts = tuple(reversed(kw.lstrip("~-").partition("-")))
return parts[0], parts[2]
+
return sorted(keywords, key=_sort_kwds)
@@ -41,11 +49,11 @@ def optimize_incrementals(sequence):
# is the terminal point- no point in having -x.
finalized = set()
for item in reversed(sequence):
- if item[0] == '-':
+ if item[0] == "-":
i = item[1:]
if not i:
raise ValueError("encountered an incomplete negation (just -, no flag)")
- if i == '*':
+ if i == "*":
# seen enough.
yield item
return
@@ -64,17 +72,18 @@ def incremental_chunked(orig, iterables):
orig.update(cinst.pos)
-def incremental_expansion(iterable, orig=None, msg_prefix='', finalize=True):
+def incremental_expansion(iterable, orig=None, msg_prefix="", finalize=True):
if orig is None:
orig = set()
for token in iterable:
- if token[0] == '-':
+ if token[0] == "-":
i = token[1:]
if not i:
raise ValueError(
- f"{msg_prefix} encountered an incomplete negation, '-'")
- if i == '*':
+ f"{msg_prefix} encountered an incomplete negation, '-'"
+ )
+ if i == "*":
orig.clear()
else:
orig.discard(i)
@@ -87,33 +96,38 @@ def incremental_expansion(iterable, orig=None, msg_prefix='', finalize=True):
return orig
-def incremental_expansion_license(pkg, licenses, license_groups, iterable, msg_prefix=''):
+def incremental_expansion_license(
+ pkg, licenses, license_groups, iterable, msg_prefix=""
+):
seen = set()
for token in iterable:
- if token[0] == '-':
+ if token[0] == "-":
i = token[1:]
if not i:
raise ValueError(
- f"{pkg}: {msg_prefix}encountered an incomplete negation, '-'")
- if i == '*':
+ f"{pkg}: {msg_prefix}encountered an incomplete negation, '-'"
+ )
+ if i == "*":
seen.clear()
else:
- if i[0] == '@':
+ if i[0] == "@":
i = i[1:]
if not i:
raise ValueError(
f"{pkg}: {msg_prefix}encountered an incomplete negation"
- " of a license group, '-@'")
+ " of a license group, '-@'"
+ )
seen.difference_update(license_groups.get(i, ()))
else:
seen.discard(i)
- elif token[0] == '@':
+ elif token[0] == "@":
i = token[1:]
if not i:
raise ValueError(
- f"{pkg}: {msg_prefix}encountered an incomplete license group, '@'")
+ f"{pkg}: {msg_prefix}encountered an incomplete license group, '@'"
+ )
seen.update(license_groups.get(i, ()))
- elif token == '*':
+ elif token == "*":
seen.update(licenses)
else:
seen.add(token)
@@ -132,14 +146,14 @@ class IncrementalsDict(mappings.DictMixin):
def __setitem__(self, key, value):
if key in self._incrementals:
if key in self._dict:
- self._dict[key] += f' {value}'
+ self._dict[key] += f" {value}"
else:
self._dict[key] = value
else:
self._dict[key] = value
for x in "getitem delitem len iter".split():
- x = f'__{x}__'
+ x = f"__{x}__"
locals()[x] = alias_method(f"_dict.{x}")
s = "pop clear keys items values"
for x in s.split():
@@ -149,7 +163,7 @@ class IncrementalsDict(mappings.DictMixin):
class collapsed_restrict_to_data(metaclass=generic_equality):
- __attr_comparison__ = ('defaults', 'freeform', 'atoms', '__class__')
+ __attr_comparison__ = ("defaults", "freeform", "atoms", "__class__")
incremental = True
def __init__(self, *restrict_sources, **kwds):
@@ -182,7 +196,13 @@ class collapsed_restrict_to_data(metaclass=generic_equality):
always.extend(data)
for atomlist in atom_d.values():
atomlist.append(
- (a, set([flag for flag in data if flag.startswith("-")])))
+ (
+ a,
+ set(
+ [flag for flag in data if flag.startswith("-")]
+ ),
+ )
+ )
elif isinstance(a, atom.atom):
atom_d.setdefault(a.key, []).append((a, data))
elif isinstance(a, boolean.AndRestriction):
@@ -197,14 +217,18 @@ class collapsed_restrict_to_data(metaclass=generic_equality):
else:
raise ValueError(
f"{a!r} doesn't operate on "
- f"package/category/repo: data {data!r}")
+ f"package/category/repo: data {data!r}"
+ )
else:
raise ValueError(
f"{a!r} is not an AlwaysBool, PackageRestriction, "
- f"or atom: data {data!r}")
+ f"or atom: data {data!r}"
+ )
if always:
- always = incremental_expansion(always, finalize=kwds.get("finalize_defaults", True))
+ always = incremental_expansion(
+ always, finalize=kwds.get("finalize_defaults", True)
+ )
else:
always = set()
self.defaults = always
@@ -249,7 +273,6 @@ class collapsed_restrict_to_data(metaclass=generic_equality):
class non_incremental_collapsed_restrict_to_data(collapsed_restrict_to_data):
-
def pull_data(self, pkg, force_copy=False):
l = []
for specific in self.freeform:
@@ -284,7 +307,9 @@ def _cached_build_cp_atom_payload(cache, sequence, restrict, payload_form=False)
key = (payload_form, restrict, tuple(sequence))
val = cache.get(key)
if val is None:
- val = cache[key] = _build_cp_atom_payload(sequence, restrict, payload_form=payload_form)
+ val = cache[key] = _build_cp_atom_payload(
+ sequence, restrict, payload_form=payload_form
+ )
return val
@@ -295,8 +320,10 @@ def _build_cp_atom_payload(sequence, restrict, payload_form=False):
l = []
if payload_form:
+
def f(r, neg, pos):
- return restrict_payload(r, tuple(chain(('-' + x for x in neg), pos)))
+ return restrict_payload(r, tuple(chain(("-" + x for x in neg), pos)))
+
else:
f = chunked_data
@@ -309,7 +336,7 @@ def _build_cp_atom_payload(sequence, restrict, payload_form=False):
i = reversed(i)
for data in i:
- if data.key == packages.AlwaysTrue or getattr(data.key, 'is_simple', False):
+ if data.key == packages.AlwaysTrue or getattr(data.key, "is_simple", False):
for n in data.neg:
ldefault(n, False)
for p in data.pos:
@@ -329,11 +356,13 @@ def _build_cp_atom_payload(sequence, restrict, payload_form=False):
# all is specific/non-simple, just reverse and return
return tuple(f(*vals) for vals in reversed(l))
- new_l = [f(
- restrict,
- tuple(k for k, v in locked.items() if not v), # neg
- tuple(k for k, v in locked.items() if v) # pos
- )]
+ new_l = [
+ f(
+ restrict,
+ tuple(k for k, v in locked.items() if not v), # neg
+ tuple(k for k, v in locked.items() if v), # pos
+ )
+ ]
# we exploit a few things this time around in reusing the algo from above
# we know there is only going to be one global (which we just added),
# and that everything is specific.
@@ -352,7 +381,7 @@ def _build_cp_atom_payload(sequence, restrict, payload_form=False):
class ChunkedDataDict(metaclass=generic_equality):
- __attr_comparison__ = ('_global_settings', '_dict')
+ __attr_comparison__ = ("_global_settings", "_dict")
def __init__(self):
self._global_settings = []
@@ -401,11 +430,13 @@ class ChunkedDataDict(metaclass=generic_equality):
if not isinstance(cdict, ChunkedDataDict):
raise TypeError(
"merge expects a ChunkedDataDict instance; "
- f"got type {type(cdict)}, {cdict!r}")
+ f"got type {type(cdict)}, {cdict!r}"
+ )
if isinstance(cdict, PayloadDict) and not isinstance(self, PayloadDict):
raise TypeError(
"merge expects a PayloadDataDict instance; "
- f"got type {type(cdict)}, {cdict!r}")
+ f"got type {type(cdict)}, {cdict!r}"
+ )
# straight extensions for this, rather than update_from_stream.
d = self._dict
for key, values in cdict._dict.items():
@@ -427,22 +458,26 @@ class ChunkedDataDict(metaclass=generic_equality):
# while a chain seems obvious here, reversed is used w/in _build_cp_atom;
# reversed doesn't like chain, so we just modify the list and do it this way.
self._global_settings.extend(new_globals)
- restrict = getattr(new_globals[0], 'key', packages.AlwaysTrue)
+ restrict = getattr(new_globals[0], "key", packages.AlwaysTrue)
if restrict == packages.AlwaysTrue:
self._global_settings[:] = list(
- _build_cp_atom_payload(self._global_settings, restrict))
+ _build_cp_atom_payload(self._global_settings, restrict)
+ )
def add(self, cinst):
self.update_from_stream([cinst])
def update_from_stream(self, stream):
for cinst in stream:
- if getattr(cinst.key, 'key', None) is not None:
+ if getattr(cinst.key, "key", None) is not None:
# atom, or something similar. use the key lookup.
# hack also... recreate the restriction; this is due to
# internal idiocy in ChunkedDataDict that will be fixed.
- new_globals = (x for x in self._global_settings
- if x not in self._dict[cinst.key.key])
+ new_globals = (
+ x
+ for x in self._global_settings
+ if x not in self._dict[cinst.key.key]
+ )
self._dict[cinst.key.key].extend(new_globals)
self._dict[cinst.key.key].append(cinst)
else:
@@ -451,25 +486,32 @@ class ChunkedDataDict(metaclass=generic_equality):
def freeze(self):
if not isinstance(self._dict, mappings.ImmutableDict):
self._dict = mappings.ImmutableDict(
- (k, tuple(v))
- for k, v in self._dict.items())
+ (k, tuple(v)) for k, v in self._dict.items()
+ )
self._global_settings = tuple(self._global_settings)
def optimize(self, cache=None):
if cache is None:
d_stream = (
(k, _build_cp_atom_payload(v, atom.atom(k), False))
- for k, v in self._dict.items())
- g_stream = (_build_cp_atom_payload(
+ for k, v in self._dict.items()
+ )
+ g_stream = _build_cp_atom_payload(
self._global_settings,
- packages.AlwaysTrue, payload_form=isinstance(self, PayloadDict)))
+ packages.AlwaysTrue,
+ payload_form=isinstance(self, PayloadDict),
+ )
else:
- d_stream = ((k, _cached_build_cp_atom_payload(
- cache, v, atom.atom(k), False))
- for k, v in self._dict.items())
- g_stream = (_cached_build_cp_atom_payload(
- cache, self._global_settings,
- packages.AlwaysTrue, payload_form=isinstance(self, PayloadDict)))
+ d_stream = (
+ (k, _cached_build_cp_atom_payload(cache, v, atom.atom(k), False))
+ for k, v in self._dict.items()
+ )
+ g_stream = _cached_build_cp_atom_payload(
+ cache,
+ self._global_settings,
+ packages.AlwaysTrue,
+ payload_form=isinstance(self, PayloadDict),
+ )
if self.frozen:
self._dict = mappings.ImmutableDict(d_stream)
@@ -486,12 +528,14 @@ class ChunkedDataDict(metaclass=generic_equality):
def render_to_payload(self):
d = PayloadDict()
- d = {atom.atom(k): _build_cp_atom_payload(v, atom.atom(k), True)
- for k, v in self._dict.items()}
+ d = {
+ atom.atom(k): _build_cp_atom_payload(v, atom.atom(k), True)
+ for k, v in self._dict.items()
+ }
if self._global_settings:
data = _build_cp_atom_payload(
- self._global_settings,
- packages.AlwaysTrue, payload_form=True)
+ self._global_settings, packages.AlwaysTrue, payload_form=True
+ )
d[packages.AlwaysTrue] = tuple(data)
return d
@@ -513,23 +557,22 @@ class ChunkedDataDict(metaclass=generic_equality):
class PayloadDict(ChunkedDataDict):
-
def mk_item(self, key, neg, pos):
return restrict_payload(key, tuple(chain(("-" + x for x in neg), pos)))
def add_bare_global(self, payload):
- neg = [x[1:] for x in payload if x[0] == '-']
- pos = [x for x in payload if x[0] != '-']
+ neg = [x[1:] for x in payload if x[0] == "-"]
+ pos = [x for x in payload if x[0] != "-"]
ChunkedDataDict.add_bare_global(self, neg, pos)
def add_global(self, pinst):
- neg = [x[1:] for x in pinst.data if x[0] == '-']
- pos = [x for x in pinst.data if x[0] != '-']
+ neg = [x[1:] for x in pinst.data if x[0] == "-"]
+ pos = [x for x in pinst.data if x[0] != "-"]
return ChunkedDataDict.add_global(self, chunked_data(pinst.restrict, neg, pos))
def update_from_stream(self, stream):
for pinst in stream:
- if getattr(pinst.restrict, 'key', None) is not None:
+ if getattr(pinst.restrict, "key", None) is not None:
# atom, or something similar. use the key lookup.
# hack also... recreate the restriction; this is due to
# internal idiocy in ChunkedDataDict that will be fixed.
@@ -543,7 +586,8 @@ class PayloadDict(ChunkedDataDict):
items = self._global_settings
s = set(pre_defaults)
data = chain.from_iterable(
- item.data for item in items if item.restrict.match(pkg))
+ item.data for item in items if item.restrict.match(pkg)
+ )
return incremental_expansion(data, orig=s)
pull_data = render_pkg
@@ -563,4 +607,4 @@ def run_sanity_checks(pkgs, domain, threads=None):
def get_relative_dosym_target(source, target):
"""Get relative path from target to source, for symlink target."""
# NB: as dosym arg, initial slash can be omitted
- return os.path.relpath(source, os.path.join('/', os.path.dirname(target)))
+ return os.path.relpath(source, os.path.join("/", os.path.dirname(target)))
diff --git a/src/pkgcore/ebuild/pkg_updates.py b/src/pkgcore/ebuild/pkg_updates.py
index 0ab28180e..662d4040e 100644
--- a/src/pkgcore/ebuild/pkg_updates.py
+++ b/src/pkgcore/ebuild/pkg_updates.py
@@ -16,14 +16,15 @@ def _scan_directory(path, eapi):
if match is not None:
files.append(filename)
else:
- logger.error(f'incorrectly named update file: {filename!r}')
+ logger.error(f"incorrectly named update file: {filename!r}")
return sorted(files)
def read_updates(path, eapi):
def f():
d = deque()
- return [d,d]
+ return [d, d]
+
# mods tracks the start point [0], and the tail, [1].
# via this, pkg moves into a specific pkg can pick up
# changes past that point, while ignoring changes prior
@@ -37,15 +38,15 @@ def read_updates(path, eapi):
try:
for fp in _scan_directory(path, eapi):
with open(pjoin(path, fp)) as f:
- data = (line.rstrip('\n') for line in f)
+ data = (line.rstrip("\n") for line in f)
_process_updates(data, fp, mods, moved)
except FileNotFoundError:
pass
# force a walk of the tree, flattening it
- commands = {k: list(iflatten_instance(v[0], tuple)) for k,v in mods.items()}
+ commands = {k: list(iflatten_instance(v[0], tuple)) for k, v in mods.items()}
# filter out empty nodes.
- commands = {k: v for k,v in commands.items() if v}
+ commands = {k: v for k, v in commands.items() if v}
return commands
@@ -54,49 +55,55 @@ def _process_updates(sequence, filename, mods, moved):
for lineno, raw_line in enumerate(sequence, 1):
line = raw_line.strip()
if not line:
- logger.error(f'file {filename!r}: empty line {lineno}')
+ logger.error(f"file {filename!r}: empty line {lineno}")
continue
elif line != raw_line:
logger.error(
- f'file {filename!r}: extra whitespace in {raw_line!r} on line {lineno}')
+ f"file {filename!r}: extra whitespace in {raw_line!r} on line {lineno}"
+ )
line = line.split()
- if line[0] == 'move':
+ if line[0] == "move":
if len(line) != 3:
logger.error(
- f'file {filename!r}: {raw_line!r} on line {lineno}: bad move form')
+ f"file {filename!r}: {raw_line!r} on line {lineno}: bad move form"
+ )
continue
src, trg = atom(line[1]), atom(line[2])
if src.fullver is not None:
logger.error(
f"file {filename!r}: {raw_line!r} on line {lineno}: "
- f"atom {src} must be versionless")
+ f"atom {src} must be versionless"
+ )
continue
elif trg.fullver is not None:
logger.error(
f"file {filename!r}: {raw_line!r} on line {lineno}: "
- f"atom {trg} must be versionless")
+ f"atom {trg} must be versionless"
+ )
continue
if src.key in moved:
logger.warning(
f"file {filename!r}: {raw_line!r} on line {lineno}: "
f"{src} was already moved to {moved[src.key]}, "
- "this line is redundant")
+ "this line is redundant"
+ )
continue
d = deque()
- mods[src.key][1].extend([('move', src, trg), d])
+ mods[src.key][1].extend([("move", src, trg), d])
# start essentially a new checkpoint in the trg
mods[trg.key][1].append(d)
mods[trg.key][1] = d
moved[src.key] = trg
- elif line[0] == 'slotmove':
+ elif line[0] == "slotmove":
if len(line) != 4:
logger.error(
f"file {filename!r}: {raw_line!r} on line {lineno}: "
- "bad slotmove form")
+ "bad slotmove form"
+ )
continue
src = atom(line[1])
@@ -104,18 +111,21 @@ def _process_updates(sequence, filename, mods, moved):
logger.warning(
f"file {filename!r}: {raw_line!r} on line {lineno}: "
f"{src} was already moved to {moved[src.key]}, "
- "this line is redundant")
+ "this line is redundant"
+ )
continue
elif src.slot is not None:
logger.error(
f"file {filename!r}: {raw_line!r} on line {lineno}: "
- "slotted atom makes no sense for slotmoves")
+ "slotted atom makes no sense for slotmoves"
+ )
continue
- src_slot = atom(f'{src}:{line[2]}')
- trg_slot = atom(f'{src.key}:{line[3]}')
+ src_slot = atom(f"{src}:{line[2]}")
+ trg_slot = atom(f"{src.key}:{line[3]}")
- mods[src.key][1].append(('slotmove', src_slot, line[3]))
+ mods[src.key][1].append(("slotmove", src_slot, line[3]))
else:
logger.error(
- f'file {filename!r}: {raw_line!r} on line {lineno}: unknown command')
+ f"file {filename!r}: {raw_line!r} on line {lineno}: unknown command"
+ )
diff --git a/src/pkgcore/ebuild/portage_conf.py b/src/pkgcore/ebuild/portage_conf.py
index c90f09c6a..d2865c0bb 100644
--- a/src/pkgcore/ebuild/portage_conf.py
+++ b/src/pkgcore/ebuild/portage_conf.py
@@ -4,7 +4,8 @@ Converts portage config files into :obj:`pkgcore.config` form.
"""
__all__ = (
- 'PortageConfig', 'SecurityUpgradesViaProfile',
+ "PortageConfig",
+ "SecurityUpgradesViaProfile",
)
import configparser
@@ -34,14 +35,16 @@ from .repository import errors as repo_errors
def my_convert_hybrid(manager, val, arg_type):
"""Modified convert_hybrid using a sequence of strings for section_refs."""
- if arg_type.startswith('refs:'):
- subtype = 'ref:' + arg_type.split(':', 1)[1]
+ if arg_type.startswith("refs:"):
+ subtype = "ref:" + arg_type.split(":", 1)[1]
return [basics.LazyNamedSectionRef(manager, subtype, name) for name in val]
return basics.convert_hybrid(manager, val, arg_type)
-@configurable({'ebuild_repo': 'ref:repo', 'vdb': 'ref:repo',
- 'profile': 'ref:profile'}, typename='pkgset')
+@configurable(
+ {"ebuild_repo": "ref:repo", "vdb": "ref:repo", "profile": "ref:profile"},
+ typename="pkgset",
+)
def SecurityUpgradesViaProfile(ebuild_repo, vdb, profile):
"""generate a GLSA vuln. pkgset limited by profile
@@ -107,12 +110,12 @@ class PortageConfig(DictMixin):
dict: config settings
"""
self._config = {}
- stubconfig = pjoin(const.DATA_PATH, 'stubconfig')
+ stubconfig = pjoin(const.DATA_PATH, "stubconfig")
if location is None:
path = os.path.abspath(sys.prefix)
while (parent := os.path.dirname(path)) != path:
- config_root = pjoin(parent, 'etc/portage')
+ config_root = pjoin(parent, "etc/portage")
if os.path.exists(config_root):
location = config_root
break
@@ -123,7 +126,7 @@ class PortageConfig(DictMixin):
# override profile when using stub config
if location == stubconfig:
- profile_override = pjoin(const.DATA_PATH, 'stubrepo/profiles/default')
+ profile_override = pjoin(const.DATA_PATH, "stubrepo/profiles/default")
self.dir = location
@@ -135,69 +138,89 @@ class PortageConfig(DictMixin):
make_conf = {}
try:
- self.load_make_conf(make_conf, pjoin(const.CONFIG_PATH, 'make.globals'))
+ self.load_make_conf(make_conf, pjoin(const.CONFIG_PATH, "make.globals"))
except IGNORED_EXCEPTIONS:
raise
except Exception as e:
raise config_errors.ParsingError("failed to load make.globals") from e
self.load_make_conf(
- make_conf, pjoin(self.dir, 'make.conf'), required=False,
- allow_sourcing=True, incrementals=True)
-
- self.root = kwargs.pop('root', make_conf.get("ROOT", "/"))
+ make_conf,
+ pjoin(self.dir, "make.conf"),
+ required=False,
+ allow_sourcing=True,
+ incrementals=True,
+ )
+
+ self.root = kwargs.pop("root", make_conf.get("ROOT", "/"))
gentoo_mirrors = [
- x.rstrip("/") + "/distfiles" for x in make_conf.pop("GENTOO_MIRRORS", "").split()]
+ x.rstrip("/") + "/distfiles"
+ for x in make_conf.pop("GENTOO_MIRRORS", "").split()
+ ]
self.features = frozenset(
- optimize_incrementals(make_conf.get('FEATURES', '').split()))
+ optimize_incrementals(make_conf.get("FEATURES", "").split())
+ )
self._add_sets()
self._add_profile(profile_override)
- self['vdb'] = basics.AutoConfigSection({
- 'class': 'pkgcore.vdb.ondisk.tree',
- 'location': pjoin(self.root, 'var', 'db', 'pkg'),
- 'cache_location': '/var/cache/edb/dep/var/db/pkg',
- })
+ self["vdb"] = basics.AutoConfigSection(
+ {
+ "class": "pkgcore.vdb.ondisk.tree",
+ "location": pjoin(self.root, "var", "db", "pkg"),
+ "cache_location": "/var/cache/edb/dep/var/db/pkg",
+ }
+ )
try:
repos_conf_defaults, repos_conf = self.load_repos_conf(
- pjoin(self.dir, 'repos.conf'))
+ pjoin(self.dir, "repos.conf")
+ )
except config_errors.ParsingError as e:
- if not getattr(getattr(e, 'exc', None), 'errno', None) == errno.ENOENT:
+ if not getattr(getattr(e, "exc", None), "errno", None) == errno.ENOENT:
raise
try:
# fallback to defaults provided by pkgcore
repos_conf_defaults, repos_conf = self.load_repos_conf(
- pjoin(const.CONFIG_PATH, 'repos.conf'))
+ pjoin(const.CONFIG_PATH, "repos.conf")
+ )
except IGNORED_EXCEPTIONS:
raise
except Exception as e:
- raise config_errors.ParsingError('failed to find a usable repos.conf') from e
-
- self['ebuild-repo-common'] = basics.AutoConfigSection({
- 'class': 'pkgcore.ebuild.repository.tree',
- 'default_mirrors': gentoo_mirrors,
- 'inherit-only': True,
- })
+ raise config_errors.ParsingError(
+ "failed to find a usable repos.conf"
+ ) from e
+
+ self["ebuild-repo-common"] = basics.AutoConfigSection(
+ {
+ "class": "pkgcore.ebuild.repository.tree",
+ "default_mirrors": gentoo_mirrors,
+ "inherit-only": True,
+ }
+ )
repo_map = {}
repos = []
for repo_name, repo_opts in list(repos_conf.items()):
- repo_cls = repo_opts.pop('repo-type')
+ repo_cls = repo_opts.pop("repo-type")
try:
repo = repo_cls(
- self, repo_name=repo_name, repo_opts=repo_opts,
- repo_map=repo_map, defaults=repos_conf_defaults)
+ self,
+ repo_name=repo_name,
+ repo_opts=repo_opts,
+ repo_map=repo_map,
+ defaults=repos_conf_defaults,
+ )
except repo_errors.UnsupportedRepo as e:
logger.warning(
- f'skipping {repo_name!r} repo: unsupported EAPI {str(e.repo.eapi)!r}')
+ f"skipping {repo_name!r} repo: unsupported EAPI {str(e.repo.eapi)!r}"
+ )
del repos_conf[repo_name]
continue
# only register existent repos
- if os.path.exists(repo_opts['location']):
+ if os.path.exists(repo_opts["location"]):
self[repo_name] = basics.AutoConfigSection(repo)
repos.append(repo_name)
@@ -207,36 +230,44 @@ class PortageConfig(DictMixin):
self._make_repo_syncers(repos_conf, make_conf)
if repos:
- self['repo-stack'] = basics.FakeIncrementalDictConfigSection(
- my_convert_hybrid, {
- 'class': 'pkgcore.repository.multiplex.config_tree',
- 'repos': tuple(repos)})
-
- self['vuln'] = basics.AutoConfigSection({
- 'class': SecurityUpgradesViaProfile,
- 'ebuild_repo': 'repo-stack',
- 'vdb': 'vdb',
- 'profile': 'profile',
- })
+ self["repo-stack"] = basics.FakeIncrementalDictConfigSection(
+ my_convert_hybrid,
+ {
+ "class": "pkgcore.repository.multiplex.config_tree",
+ "repos": tuple(repos),
+ },
+ )
+
+ self["vuln"] = basics.AutoConfigSection(
+ {
+ "class": SecurityUpgradesViaProfile,
+ "ebuild_repo": "repo-stack",
+ "vdb": "vdb",
+ "profile": "profile",
+ }
+ )
# check if package building was forced on by the user
- forced_buildpkg = kwargs.pop('buildpkg', False)
+ forced_buildpkg = kwargs.pop("buildpkg", False)
if forced_buildpkg:
- make_conf['FEATURES'] += ' buildpkg'
+ make_conf["FEATURES"] += " buildpkg"
# finally... domain.
- make_conf.update({
- 'class': 'pkgcore.ebuild.domain.domain',
- 'repos': tuple(repos),
- 'default': True,
- 'vdb': ('vdb',),
- 'profile': 'profile',
- 'root': self.root,
- 'config_dir': self.dir,
- })
-
- self['livefs'] = basics.FakeIncrementalDictConfigSection(
- my_convert_hybrid, make_conf)
+ make_conf.update(
+ {
+ "class": "pkgcore.ebuild.domain.domain",
+ "repos": tuple(repos),
+ "default": True,
+ "vdb": ("vdb",),
+ "profile": "profile",
+ "root": self.root,
+ "config_dir": self.dir,
+ }
+ )
+
+ self["livefs"] = basics.FakeIncrementalDictConfigSection(
+ my_convert_hybrid, make_conf
+ )
def __setitem__(self, key, value):
self._config[key] = value
@@ -251,8 +282,14 @@ class PortageConfig(DictMixin):
return iter(self._config.keys())
@staticmethod
- def load_make_conf(vars_dict, path, allow_sourcing=False, required=True,
- allow_recurse=True, incrementals=False):
+ def load_make_conf(
+ vars_dict,
+ path,
+ allow_sourcing=False,
+ required=True,
+ allow_recurse=True,
+ incrementals=False,
+ ):
"""parse make.conf files
Args:
@@ -261,24 +298,31 @@ class PortageConfig(DictMixin):
directory, if a directory is passed all the non-hidden files within
that directory are parsed in alphabetical order.
"""
- sourcing_command = 'source' if allow_sourcing else None
+ sourcing_command = "source" if allow_sourcing else None
if allow_recurse:
files = sorted_scan(
- os.path.realpath(path), follow_symlinks=True, nonexistent=True,
- hidden=False, backup=False)
+ os.path.realpath(path),
+ follow_symlinks=True,
+ nonexistent=True,
+ hidden=False,
+ backup=False,
+ )
else:
files = (path,)
for fp in files:
try:
new_vars = read_bash_dict(
- fp, vars_dict=vars_dict, sourcing_command=sourcing_command)
+ fp, vars_dict=vars_dict, sourcing_command=sourcing_command
+ )
except PermissionError as e:
raise base_errors.PermissionDenied(fp, write=False) from e
except EnvironmentError as e:
if e.errno != errno.ENOENT or required:
- raise config_errors.ParsingError(f"parsing {fp!r}", exception=e) from e
+ raise config_errors.ParsingError(
+ f"parsing {fp!r}", exception=e
+ ) from e
return
if incrementals:
@@ -307,8 +351,12 @@ class PortageConfig(DictMixin):
parser = ParseConfig()
for fp in sorted_scan(
- os.path.realpath(path), follow_symlinks=True, nonexistent=True,
- hidden=False, backup=False):
+ os.path.realpath(path),
+ follow_symlinks=True,
+ nonexistent=True,
+ hidden=False,
+ backup=False,
+ ):
try:
with open(fp) as f:
defaults, repo_confs = parser.parse_file(f)
@@ -317,10 +365,14 @@ class PortageConfig(DictMixin):
except EnvironmentError as e:
raise config_errors.ParsingError(f"parsing {fp!r}", exception=e) from e
except configparser.Error as e:
- raise config_errors.ParsingError(f"repos.conf: {fp!r}", exception=e) from e
+ raise config_errors.ParsingError(
+ f"repos.conf: {fp!r}", exception=e
+ ) from e
if defaults and main_defaults:
- logger.warning(f"repos.conf: parsing {fp!r}: overriding DEFAULT section")
+ logger.warning(
+ f"repos.conf: parsing {fp!r}: overriding DEFAULT section"
+ )
main_defaults.update(defaults)
if not repo_confs:
@@ -328,39 +380,44 @@ class PortageConfig(DictMixin):
for name, repo_conf in repo_confs.items():
if name in repos:
- logger.warning(f"repos.conf: parsing {fp!r}: overriding {name!r} repo")
+ logger.warning(
+ f"repos.conf: parsing {fp!r}: overriding {name!r} repo"
+ )
# ignore repo if location is unset
- location = repo_conf.get('location', None)
+ location = repo_conf.get("location", None)
if location is None:
logger.warning(
f"repos.conf: parsing {fp!r}: "
- f"{name!r} repo missing location setting, ignoring repo")
+ f"{name!r} repo missing location setting, ignoring repo"
+ )
continue
location = os.path.expanduser(location)
if os.path.isabs(location):
- repo_conf['location'] = location
+ repo_conf["location"] = location
else:
# support relative paths based on where repos.conf is located
- repo_conf['location'] = os.path.abspath(
- pjoin(os.path.dirname(path), location))
+ repo_conf["location"] = os.path.abspath(
+ pjoin(os.path.dirname(path), location)
+ )
# repo type defaults to ebuild for compat with portage
- repo_type = repo_conf.get('repo-type', 'ebuild-v1')
+ repo_type = repo_conf.get("repo-type", "ebuild-v1")
try:
- repo_conf['repo-type'] = cls._supported_repo_types[repo_type]
+ repo_conf["repo-type"] = cls._supported_repo_types[repo_type]
except KeyError:
logger.warning(
f"repos.conf: parsing {fp!r}: "
f"{name!r} repo has unsupported repo-type {repo_type!r}, "
- "ignoring repo")
+ "ignoring repo"
+ )
continue
# Priority defaults to zero if unset or invalid for ebuild repos
# while binpkg repos have the lowest priority by default.
- priority = repo_conf.get('priority', None)
+ priority = repo_conf.get("priority", None)
if priority is None:
- if repo_type.startswith('binpkg'):
+ if repo_type.startswith("binpkg"):
priority = -10000
else:
priority = 0
@@ -370,88 +427,96 @@ class PortageConfig(DictMixin):
except ValueError:
logger.warning(
f"repos.conf: parsing {fp!r}: {name!r} repo has invalid priority "
- f"setting: {priority!r} (defaulting to 0)")
+ f"setting: {priority!r} (defaulting to 0)"
+ )
priority = 0
finally:
- repo_conf['priority'] = priority
+ repo_conf["priority"] = priority
# register repo
repos[name] = repo_conf
if repos:
# the default repo is gentoo if unset and gentoo exists
- default_repo = main_defaults.get('main-repo', 'gentoo')
+ default_repo = main_defaults.get("main-repo", "gentoo")
if default_repo not in repos:
raise config_errors.UserConfigError(
- f"repos.conf: default repo {default_repo!r} is undefined or invalid")
+ f"repos.conf: default repo {default_repo!r} is undefined or invalid"
+ )
- if 'main-repo' not in main_defaults:
- main_defaults['main-repo'] = default_repo
+ if "main-repo" not in main_defaults:
+ main_defaults["main-repo"] = default_repo
# the default repo has a low priority if unset or zero
- if repos[default_repo]['priority'] == 0:
- repos[default_repo]['priority'] = -1000
+ if repos[default_repo]["priority"] == 0:
+ repos[default_repo]["priority"] = -1000
# sort repos via priority, in this case high values map to high priorities
repos = OrderedDict(
- (k, v) for k, v in
- sorted(repos.items(), key=lambda d: d[1]['priority'], reverse=True))
+ (k, v)
+ for k, v in sorted(
+ repos.items(), key=lambda d: d[1]["priority"], reverse=True
+ )
+ )
return main_defaults, repos
def _make_repo_syncers(self, repos_conf, make_conf, allow_timestamps=True):
"""generate syncing configs for known repos"""
rsync_opts = None
- usersync = 'usersync' in self.features
+ usersync = "usersync" in self.features
for repo_name, repo_opts in repos_conf.items():
- d = {'basedir': repo_opts['location'], 'usersync': usersync}
+ d = {"basedir": repo_opts["location"], "usersync": usersync}
- sync_type = repo_opts.get('sync-type', None)
- sync_uri = repo_opts.get('sync-uri', None)
+ sync_type = repo_opts.get("sync-type", None)
+ sync_uri = repo_opts.get("sync-uri", None)
if sync_uri:
# prefix non-native protocols
- if (sync_type is not None and not sync_uri.startswith(sync_type)):
- sync_uri = f'{sync_type}+{sync_uri}'
+ if sync_type is not None and not sync_uri.startswith(sync_type):
+ sync_uri = f"{sync_type}+{sync_uri}"
- d['uri'] = sync_uri
- d['opts'] = repo_opts.get('sync-opts', '')
+ d["uri"] = sync_uri
+ d["opts"] = repo_opts.get("sync-opts", "")
- if sync_type == 'rsync':
+ if sync_type == "rsync":
if rsync_opts is None:
# various make.conf options used by rsync-based syncers
rsync_opts = self._isolate_rsync_opts(make_conf)
d.update(rsync_opts)
if allow_timestamps:
- d['class'] = 'pkgcore.sync.rsync.rsync_timestamp_syncer'
+ d["class"] = "pkgcore.sync.rsync.rsync_timestamp_syncer"
else:
- d['class'] = 'pkgcore.sync.rsync.rsync_syncer'
+ d["class"] = "pkgcore.sync.rsync.rsync_syncer"
else:
- d['class'] = 'pkgcore.sync.base.GenericSyncer'
+ d["class"] = "pkgcore.sync.base.GenericSyncer"
elif sync_uri is None:
# try to autodetect syncing mechanism if sync-uri is missing
- d['class'] = 'pkgcore.sync.base.AutodetectSyncer'
+ d["class"] = "pkgcore.sync.base.AutodetectSyncer"
else:
# disable syncing if sync-uri is explicitly unset
- d['class'] = 'pkgcore.sync.base.DisabledSync'
+ d["class"] = "pkgcore.sync.base.DisabledSync"
- name = 'sync:' + repo_name
+ name = "sync:" + repo_name
self[name] = basics.AutoConfigSection(d)
def _add_sets(self):
- self["world"] = basics.AutoConfigSection({
- "class": "pkgcore.pkgsets.filelist.WorldFile",
- "location": pjoin(self.root, econst.WORLD_FILE.lstrip('/'))})
- self["system"] = basics.AutoConfigSection({
- "class": "pkgcore.pkgsets.system.SystemSet",
- "profile": "profile"})
- self["installed"] = basics.AutoConfigSection({
- "class": "pkgcore.pkgsets.installed.Installed",
- "vdb": "vdb"})
- self["versioned-installed"] = basics.AutoConfigSection({
- "class": "pkgcore.pkgsets.installed.VersionedInstalled",
- "vdb": "vdb"})
+ self["world"] = basics.AutoConfigSection(
+ {
+ "class": "pkgcore.pkgsets.filelist.WorldFile",
+ "location": pjoin(self.root, econst.WORLD_FILE.lstrip("/")),
+ }
+ )
+ self["system"] = basics.AutoConfigSection(
+ {"class": "pkgcore.pkgsets.system.SystemSet", "profile": "profile"}
+ )
+ self["installed"] = basics.AutoConfigSection(
+ {"class": "pkgcore.pkgsets.installed.Installed", "vdb": "vdb"}
+ )
+ self["versioned-installed"] = basics.AutoConfigSection(
+ {"class": "pkgcore.pkgsets.installed.VersionedInstalled", "vdb": "vdb"}
+ )
set_fp = pjoin(self.dir, "sets")
try:
@@ -461,28 +526,36 @@ class PortageConfig(DictMixin):
if setname in ("system", "world"):
logger.warning(
"user defined set %r is disallowed; ignoring",
- pjoin(set_fp, setname))
+ pjoin(set_fp, setname),
+ )
continue
- self[setname] = basics.AutoConfigSection({
- "class": "pkgcore.pkgsets.filelist.FileList",
- "location": pjoin(set_fp, setname)})
+ self[setname] = basics.AutoConfigSection(
+ {
+ "class": "pkgcore.pkgsets.filelist.FileList",
+ "location": pjoin(set_fp, setname),
+ }
+ )
except FileNotFoundError:
pass
def _find_profile_path(self, profile_override):
if profile_override is None:
- make_profile = pjoin(self.dir, 'make.profile')
+ make_profile = pjoin(self.dir, "make.profile")
if not os.path.islink(make_profile):
- raise config_errors.UserConfigError(f'invalid symlink: {make_profile!r}')
+ raise config_errors.UserConfigError(
+ f"invalid symlink: {make_profile!r}"
+ )
path = os.path.realpath(make_profile)
else:
path = os.path.realpath(profile_override)
if not os.path.exists(path):
if profile_override is None:
- raise config_errors.UserConfigError(f'broken symlink: {make_profile!r}')
+ raise config_errors.UserConfigError(f"broken symlink: {make_profile!r}")
else:
- raise config_errors.UserConfigError(f'nonexistent profile: {profile_override!r}')
+ raise config_errors.UserConfigError(
+ f"nonexistent profile: {profile_override!r}"
+ )
return path
def _add_profile(self, profile_override=None):
@@ -490,23 +563,28 @@ class PortageConfig(DictMixin):
paths = profiles.OnDiskProfile.split_abspath(profile)
if paths is None:
raise config_errors.UserConfigError(
- '%r expands to %r, but no profile detected' %
- (pjoin(self.dir, 'make.profile'), profile))
+ "%r expands to %r, but no profile detected"
+ % (pjoin(self.dir, "make.profile"), profile)
+ )
- user_profile_path = pjoin(self.dir, 'profile')
+ user_profile_path = pjoin(self.dir, "profile")
if os.path.isdir(user_profile_path):
- self["profile"] = basics.AutoConfigSection({
- "class": "pkgcore.ebuild.profiles.UserProfile",
- "parent_path": paths[0],
- "parent_profile": paths[1],
- "user_path": user_profile_path,
- })
+ self["profile"] = basics.AutoConfigSection(
+ {
+ "class": "pkgcore.ebuild.profiles.UserProfile",
+ "parent_path": paths[0],
+ "parent_profile": paths[1],
+ "user_path": user_profile_path,
+ }
+ )
else:
- self["profile"] = basics.AutoConfigSection({
- "class": "pkgcore.ebuild.profiles.OnDiskProfile",
- "basepath": paths[0],
- "profile": paths[1],
- })
+ self["profile"] = basics.AutoConfigSection(
+ {
+ "class": "pkgcore.ebuild.profiles.OnDiskProfile",
+ "basepath": paths[0],
+ "profile": paths[1],
+ }
+ )
def _isolate_rsync_opts(self, options):
"""
@@ -517,31 +595,31 @@ class PortageConfig(DictMixin):
opts = []
extra_opts = []
- opts.extend(options.pop('PORTAGE_RSYNC_OPTS', '').split())
- extra_opts.extend(options.pop('PORTAGE_RSYNC_EXTRA_OPTS', '').split())
+ opts.extend(options.pop("PORTAGE_RSYNC_OPTS", "").split())
+ extra_opts.extend(options.pop("PORTAGE_RSYNC_EXTRA_OPTS", "").split())
- timeout = options.pop('PORTAGE_RSYNC_INITIAL_TIMEOUT', None)
+ timeout = options.pop("PORTAGE_RSYNC_INITIAL_TIMEOUT", None)
if timeout is not None:
- base['conn_timeout'] = timeout
+ base["conn_timeout"] = timeout
- retries = options.pop('PORTAGE_RSYNC_RETRIES', None)
+ retries = options.pop("PORTAGE_RSYNC_RETRIES", None)
if retries is not None:
try:
retries = int(retries)
if retries < 0:
retries = 10000
- base['retries'] = str(retries)
+ base["retries"] = str(retries)
except ValueError:
pass
- proxy = options.pop('RSYNC_PROXY', None)
+ proxy = options.pop("RSYNC_PROXY", None)
if proxy is not None:
- base['proxy'] = proxy.strip()
+ base["proxy"] = proxy.strip()
if opts:
- base['opts'] = tuple(opts)
+ base["opts"] = tuple(opts)
if extra_opts:
- base['extra_opts'] = tuple(extra_opts)
+ base["extra_opts"] = tuple(extra_opts)
return base
@@ -549,40 +627,44 @@ class PortageConfig(DictMixin):
"""Configure repo cache."""
# Use md5 cache if it exists or the option is selected, otherwise default
# to the old flat hash format in /var/cache/edb/dep/*.
- if (os.path.exists(pjoin(repo_path, 'metadata', 'md5-cache')) or
- cache_format == 'md5-dict'):
- kls = 'pkgcore.cache.flat_hash.md5_cache'
- cache_parent_dir = pjoin(repo_path, 'metadata', 'md5-cache')
+ if (
+ os.path.exists(pjoin(repo_path, "metadata", "md5-cache"))
+ or cache_format == "md5-dict"
+ ):
+ kls = "pkgcore.cache.flat_hash.md5_cache"
+ cache_parent_dir = pjoin(repo_path, "metadata", "md5-cache")
else:
- kls = 'pkgcore.cache.flat_hash.database'
- repo_path = pjoin('/var/cache/edb/dep', repo_path.lstrip('/'))
+ kls = "pkgcore.cache.flat_hash.database"
+ repo_path = pjoin("/var/cache/edb/dep", repo_path.lstrip("/"))
cache_parent_dir = repo_path
while not os.path.exists(cache_parent_dir):
cache_parent_dir = os.path.dirname(cache_parent_dir)
- readonly = (not os.access(cache_parent_dir, os.W_OK | os.X_OK))
+ readonly = not os.access(cache_parent_dir, os.W_OK | os.X_OK)
- return basics.AutoConfigSection({
- 'class': kls,
- 'location': repo_path,
- 'readonly': readonly
- })
+ return basics.AutoConfigSection(
+ {"class": kls, "location": repo_path, "readonly": readonly}
+ )
def _register_repo_type(supported_repo_types):
"""Decorator to register supported repo types."""
+
def _wrap_func(func):
def wrapped(*args, **kwargs):
return func(*args, **kwargs)
- name = func.__name__[6:].replace('_', '-')
+
+ name = func.__name__[6:].replace("_", "-")
supported_repo_types[name] = func
return wrapped
+
return _wrap_func
@_register_repo_type(_supported_repo_types)
- def _repo_ebuild_v1(self, repo_name, repo_opts, repo_map,
- defaults, repo_obj=None, repo_dict=None):
+ def _repo_ebuild_v1(
+ self, repo_name, repo_opts, repo_map, defaults, repo_obj=None, repo_dict=None
+ ):
"""Create ebuild repo v1 configuration."""
- repo_path = repo_opts['location']
+ repo_path = repo_opts["location"]
# XXX: Hack for portage-2 profile format support.
if repo_obj is None:
@@ -591,57 +673,59 @@ class PortageConfig(DictMixin):
# repo configs
repo_conf = {
- 'class': 'pkgcore.ebuild.repo_objs.RepoConfig',
- 'config_name': repo_name,
- 'location': repo_path,
- 'syncer': 'sync:' + repo_name,
+ "class": "pkgcore.ebuild.repo_objs.RepoConfig",
+ "config_name": repo_name,
+ "location": repo_path,
+ "syncer": "sync:" + repo_name,
}
if repo_dict is not None:
repo_conf.update(repo_dict)
# repo trees
repo = {
- 'inherit': ('ebuild-repo-common',),
- 'repo_config': 'conf:' + repo_name,
+ "inherit": ("ebuild-repo-common",),
+ "repo_config": "conf:" + repo_name,
}
# metadata cache
if repo_obj.cache_format is not None:
- cache_name = 'cache:' + repo_name
+ cache_name = "cache:" + repo_name
self[cache_name] = self._make_cache(repo_obj.cache_format, repo_path)
- repo['cache'] = cache_name
+ repo["cache"] = cache_name
- if repo_name == defaults['main-repo']:
- repo_conf['default'] = True
- repo['default'] = True
+ if repo_name == defaults["main-repo"]:
+ repo_conf["default"] = True
+ repo["default"] = True
- self['conf:' + repo_name] = basics.AutoConfigSection(repo_conf)
+ self["conf:" + repo_name] = basics.AutoConfigSection(repo_conf)
return repo
@_register_repo_type(_supported_repo_types)
def _repo_sqfs_v1(self, *args, **kwargs):
"""Create ebuild squashfs repo v1 configuration."""
- repo_name = kwargs['repo_name']
- repo_opts = kwargs['repo_opts']
+ repo_name = kwargs["repo_name"]
+ repo_opts = kwargs["repo_opts"]
- repo_path = repo_opts['location']
- sqfs_file = os.path.basename(repo_opts['sync-uri'])
+ repo_path = repo_opts["location"]
+ sqfs_file = os.path.basename(repo_opts["sync-uri"])
# XXX: Hack for portage-2 profile format support.
- kwargs['repo_obj'] = repo_objs.SquashfsRepoConfig(sqfs_file, repo_path, repo_name)
+ kwargs["repo_obj"] = repo_objs.SquashfsRepoConfig(
+ sqfs_file, repo_path, repo_name
+ )
repo_dict = {
- 'class': 'pkgcore.ebuild.repo_objs.SquashfsRepoConfig',
- 'sqfs_file': sqfs_file,
+ "class": "pkgcore.ebuild.repo_objs.SquashfsRepoConfig",
+ "sqfs_file": sqfs_file,
}
- kwargs['repo_dict'] = repo_dict
+ kwargs["repo_dict"] = repo_dict
return self._repo_ebuild_v1(*args, **kwargs)
@_register_repo_type(_supported_repo_types)
def _repo_binpkg_v1(self, repo_name, repo_opts, **kwargs):
"""Create binpkg repo v1 configuration."""
repo = {
- 'class': 'pkgcore.binpkg.repository.tree',
- 'repo_id': repo_name,
- 'location': repo_opts['location'],
+ "class": "pkgcore.binpkg.repository.tree",
+ "repo_id": repo_name,
+ "location": repo_opts["location"],
}
return repo
diff --git a/src/pkgcore/ebuild/portageq.py b/src/pkgcore/ebuild/portageq.py
index 910984691..250736f20 100644
--- a/src/pkgcore/ebuild/portageq.py
+++ b/src/pkgcore/ebuild/portageq.py
@@ -27,25 +27,34 @@ def get_atom_kls(value):
raise ValueError(f"EAPI {value} isn't known/supported")
return eapi.atom_kls
+
def default_portageq_args(parser):
- parser.add_argument("--eapi", dest='atom_kls', type=get_atom_kls,
+ parser.add_argument(
+ "--eapi",
+ dest="atom_kls",
+ type=get_atom_kls,
default=atom.atom,
- help="limit all operations to just what the given EAPI supports.")
- parser.add_argument("--use", default=None,
+ help="limit all operations to just what the given EAPI supports.",
+ )
+ parser.add_argument(
+ "--use",
+ default=None,
help="override the use flags used for transititive USE deps- "
- "dev-lang/python[threads=] for example")
+ "dev-lang/python[threads=] for example",
+ )
def make_atom(value):
return arghparse.DelayedValue(partial(_render_atom, value), 100)
+
def _render_atom(value, namespace, attr):
a = namespace.atom_kls(value)
if isinstance(a, atom.transitive_use_atom):
a.restrictions
# XXX bit of a hack.
a = conditionals.DepSet(a.restrictions, atom.atom, True)
- a = a.evaluate_depset(getattr(namespace, 'use', ()))
+ a = a.evaluate_depset(getattr(namespace, "use", ()))
a = AndRestriction(*a.restrictions)
setattr(namespace, attr, a)
@@ -67,40 +76,55 @@ class BaseCommand(arghparse.ArgparseCommand):
kwds["nargs"] = "?"
kwds["default"] = self._compat_root_default
parser.add_argument(
- dest="domain", metavar="root",
+ dest="domain",
+ metavar="root",
action=commandline.DomainFromPath,
- help="the domain that lives at root will be used", **kwds)
+ help="the domain that lives at root will be used",
+ **kwds,
+ )
else:
mux = parser.add_mutually_exclusive_group()
commandline._mk_domain(mux)
mux.add_argument(
- '--domain-at-root', action=commandline.DomainFromPath,
- dest="domain", help="specify the domain to use via its root path")
+ "--domain-at-root",
+ action=commandline.DomainFromPath,
+ dest="domain",
+ help="specify the domain to use via its root path",
+ )
for token in self.arg_spec:
kwds = {}
- if token[-1] in '+?*':
+ if token[-1] in "+?*":
kwds["nargs"] = token[-1]
token = token[:-1]
- if token == 'atom':
+ if token == "atom":
parser.add_argument(
- 'atom', help="atom to inspect",
- type=make_atom, **kwds)
+ "atom", help="atom to inspect", type=make_atom, **kwds
+ )
else:
- parser.add_argument(
- token, help=f"{token} to inspect", **kwds)
+ parser.add_argument(token, help=f"{token} to inspect", **kwds)
@classmethod
- def make_command(cls, arg_spec='', requires_root=True, bind=None,
- root_default=None, name=None, **kwds):
+ def make_command(
+ cls,
+ arg_spec="",
+ requires_root=True,
+ bind=None,
+ root_default=None,
+ name=None,
+ **kwds,
+ ):
kwds = dict(
- arg_spec=tuple(arg_spec.split()), requires_root=requires_root,
- _compat_root_default=root_default, **kwds)
+ arg_spec=tuple(arg_spec.split()),
+ requires_root=requires_root,
+ _compat_root_default=root_default,
+ **kwds,
+ )
def internal_function(functor, name=name):
class mycommand(BaseCommand):
function = __call__ = staticmethod(functor)
- __doc__ = getattr(functor, '__doc__', None)
+ __doc__ = getattr(functor, "__doc__", None)
locals().update(kwds)
if name is None:
@@ -118,27 +142,31 @@ common_commands = []
query_commands = []
portageq_commands = []
+
@BaseCommand.make_command("variable+", bind=query_commands)
def env_var(options, out, err):
"""
return configuration defined variables.
"""
for x in options.variable:
- val = options.domain.settings.get(x, '')
+ val = options.domain.settings.get(x, "")
if not isinstance(val, str):
- val = ' '.join(val)
+ val = " ".join(val)
out.write(str(val))
return 0
-@BaseCommand.make_command("variable+", bind=portageq_commands, name='envvar',
- root_default='/')
+
+@BaseCommand.make_command(
+ "variable+", bind=portageq_commands, name="envvar", root_default="/"
+)
def portageq_envvar(options, out, err):
"""
return configuration defined variables. Use envvar2 instead, this will be removed.
"""
return env_var.function(options, out, err)
-@BaseCommand.make_command("variable+", bind=portageq_commands, name='envvar2')
+
+@BaseCommand.make_command("variable+", bind=portageq_commands, name="envvar2")
def portageq_envvar2(options, out, err):
"""
return configuration defined variables.
@@ -161,19 +189,20 @@ def _best_version(domain, restrict):
p = max(domain.all_installed_repos.itermatch(restrict))
except ValueError:
# empty sequence.
- return ''
+ return ""
return str_pkg(p)
+
@BaseCommand.make_command("atom+", bind=common_commands)
def mass_best_version(options, out, err):
"""
multiple best_version calls.
"""
for x in options.atom:
- out.write("%s:%s" %
- (x, _best_version(options.domain, x).rstrip()))
+ out.write("%s:%s" % (x, _best_version(options.domain, x).rstrip()))
return 0
+
@BaseCommand.make_command("atom", bind=common_commands)
def best_version(options, out, err):
"""
@@ -192,11 +221,11 @@ def match(options, out, err):
return 0
-@BaseCommand.make_command(bind=common_commands, root_default='/')
+@BaseCommand.make_command(bind=common_commands, root_default="/")
def get_repos(options, out, err):
l = []
for repo in options.domain.ebuild_repos_raw:
- repo_id = getattr(repo, 'repo_id', getattr(repo, 'location', None))
+ repo_id = getattr(repo, "repo_id", getattr(repo, "location", None))
l.append(repo_id)
for x in sorted(set(l)):
out.write(x)
@@ -205,22 +234,25 @@ def get_repos(options, out, err):
def find_profile_paths_by_repo_id(config, repo_id, fullpath=False):
repo = config.repo.get(repo_id, None)
- if repo is not None and getattr(repo, 'location', None) is not None:
+ if repo is not None and getattr(repo, "location", None) is not None:
profiles = repo.config.profiles.arch_profiles
for arch in profiles.keys():
for path, stability in profiles[arch]:
if fullpath:
- path = os.path.join(repo.location, 'profiles', path)
+ path = os.path.join(repo.location, "profiles", path)
yield path
@BaseCommand.make_command("repo_id", bind=query_commands)
def get_profiles(options, out, err):
- if options.repo_id == 'all':
+ if options.repo_id == "all":
profiles = (
- profile for repo in options.domain.ebuild_repos_raw
+ profile
+ for repo in options.domain.ebuild_repos_raw
for profile in find_profile_paths_by_repo_id(
- options.config, repo.repo_id, fullpath=True))
+ options.config, repo.repo_id, fullpath=True
+ )
+ )
else:
profiles = find_profile_paths_by_repo_id(options.config, options.repo_id)
for x in sorted(set(profiles)):
@@ -231,24 +263,27 @@ def get_profiles(options, out, err):
@BaseCommand.make_command("repo_id", bind=portageq_commands)
def get_repo_path(options, out, err):
repo = options.config.repo.get(options.repo_id, None)
- if repo is not None and getattr(repo, 'location', None) is not None:
+ if repo is not None and getattr(repo, "location", None) is not None:
out.write(repo.location)
return 0
return 1
+
get_repo_path = BaseCommand.make_command(
- "repo_id", bind=query_commands, name='get_repo_path')(get_repo_path.function)
+ "repo_id", bind=query_commands, name="get_repo_path"
+)(get_repo_path.function)
@BaseCommand.make_command("repo_id", bind=portageq_commands)
def get_repo_news_path(options, out, err):
repo = options.config.repo.get(options.repo_id, None)
- if repo is not None and getattr(repo, 'location', None) is not None:
- out.write(osutils.normpath(osutils.pjoin(repo.location, 'metadata', 'news')))
+ if repo is not None and getattr(repo, "location", None) is not None:
+ out.write(osutils.normpath(osutils.pjoin(repo.location, "metadata", "news")))
return 0
return 1
-def bind_parser(parser, compat=False, name='portageq'):
+
+def bind_parser(parser, compat=False, name="portageq"):
subparsers = parser.add_subparsers(description=f"{name} commands")
l = common_commands[:]
if compat:
@@ -256,7 +291,8 @@ def bind_parser(parser, compat=False, name='portageq'):
else:
l += query_commands
- for command in sorted(l, key=lambda x:x.__name__):
+ for command in sorted(l, key=lambda x: x.__name__):
subparser = subparsers.add_parser(
- command.__name__, help=command.__doc__, description=command.__doc__)
+ command.__name__, help=command.__doc__, description=command.__doc__
+ )
command().bind_to_parser(subparser, compat=compat)
diff --git a/src/pkgcore/ebuild/processor.py b/src/pkgcore/ebuild/processor.py
index 8e9bf13cd..a4eec7afe 100644
--- a/src/pkgcore/ebuild/processor.py
+++ b/src/pkgcore/ebuild/processor.py
@@ -15,8 +15,12 @@ design) reduces regen time by over 40% compared to portage-2.1
# originally, but it still isn't what I would define as 'right'
__all__ = (
- "request_ebuild_processor", "release_ebuild_processor", "EbuildProcessor",
- "UnhandledCommand", "expected_ebuild_env")
+ "request_ebuild_processor",
+ "release_ebuild_processor",
+ "EbuildProcessor",
+ "UnhandledCommand",
+ "expected_ebuild_env",
+)
import contextlib
import errno
@@ -43,10 +47,12 @@ active_ebp_list = []
def _singled_threaded(functor):
"""Decorator that forces method to run under single thread."""
+
@wraps(functor)
def _inner(*args, **kwargs):
with _global_ebp_lock:
return functor(*args, **kwargs)
+
return _inner
@@ -56,15 +62,15 @@ def shutdown_all_processors():
try:
while active_ebp_list:
try:
- active_ebp_list.pop().shutdown_processor(
- ignore_keyboard_interrupt=True)
+ active_ebp_list.pop().shutdown_processor(ignore_keyboard_interrupt=True)
except EnvironmentError:
pass
while inactive_ebp_list:
try:
inactive_ebp_list.pop().shutdown_processor(
- ignore_keyboard_interrupt=True)
+ ignore_keyboard_interrupt=True
+ )
except EnvironmentError:
pass
except Exception as e:
@@ -222,25 +228,26 @@ class EbdError(ProcessorError):
"""Extract error message from verbose output depending on verbosity level."""
if verbosity <= 0:
# strip ANSI escapes from output
- lines = (bash.ansi_escape_re.sub('', x) for x in self.error.split('\n'))
+ lines = (bash.ansi_escape_re.sub("", x) for x in self.error.split("\n"))
# pull eerror cmd output and strip prefixes
- bash_error = [x.lstrip(' *') for x in lines if x.startswith(' *')]
+ bash_error = [x.lstrip(" *") for x in lines if x.startswith(" *")]
try:
# output specific error message if it exists in the expected format
error = bash_error[1]
try:
# add non-helper die context if it exists and is from an eclass
die_context = next(
- x for x in reversed(bash_error) if x.endswith('called die'))
- if die_context.split(',', 1)[0].endswith('.eclass'):
- error += f', ({die_context})'
+ x for x in reversed(bash_error) if x.endswith("called die")
+ )
+ if die_context.split(",", 1)[0].endswith(".eclass"):
+ error += f", ({die_context})"
except StopIteration:
pass
return error
except IndexError:
pass
# show full bash output in verbose mode
- return self.error.strip('\n')
+ return self.error.strip("\n")
def chuck_DyingInterrupt(ebp, logfile=None, *args):
@@ -249,15 +256,15 @@ def chuck_DyingInterrupt(ebp, logfile=None, *args):
error = []
while True:
line = ebp.read()
- if line.strip() == 'dead':
+ if line.strip() == "dead":
break
error.append(line)
drop_ebuild_processor(ebp)
ebp.shutdown_processor(force=True)
if logfile:
- with open(logfile, 'at') as f:
- f.write(''.join(error))
- raise EbdError(''.join(error))
+ with open(logfile, "at") as f:
+ f.write("".join(error))
+ raise EbdError("".join(error))
def chuck_KeyboardInterrupt(*args):
@@ -294,8 +301,8 @@ def chuck_UnhandledCommand(ebp, line):
def chuck_StoppingCommand(ebp, line):
"""Event handler for successful phase/command completion."""
- args = line.split(' ', 1)
- if args[0] == 'succeeded':
+ args = line.split(" ", 1)
+ if args[0] == "succeeded":
raise FinishedProcessing(True)
else:
# IndexError is explicitly left unhandled to force visibility
@@ -326,31 +333,36 @@ class EbuildProcessor:
self._metadata_paths = None
self.pid = None
- spawn_opts = {'umask': 0o002}
+ spawn_opts = {"umask": 0o002}
if self.userpriv:
- spawn_opts.update({
- "uid": os_data.portage_uid,
- "gid": os_data.portage_gid,
- "groups": [os_data.portage_gid],
- })
+ spawn_opts.update(
+ {
+ "uid": os_data.portage_uid,
+ "gid": os_data.portage_gid,
+ "groups": [os_data.portage_gid],
+ }
+ )
elif spawn.is_userpriv_capable():
- spawn_opts.update({
- "gid": os_data.portage_gid,
- "groups": [0, os_data.portage_gid],
- })
+ spawn_opts.update(
+ {
+ "gid": os_data.portage_gid,
+ "groups": [0, os_data.portage_gid],
+ }
+ )
# force invalid bashrc
env = {x: "/not/valid" for x in ("BASHRC", "BASH_ENV")}
- if int(os.environ.get('PKGCORE_PERF_DEBUG', 0)):
- env["PKGCORE_PERF_DEBUG"] = os.environ['PKGCORE_PERF_DEBUG']
- if int(os.environ.get('PKGCORE_DEBUG', 0)):
- env["PKGCORE_DEBUG"] = os.environ['PKGCORE_DEBUG']
+ if int(os.environ.get("PKGCORE_PERF_DEBUG", 0)):
+ env["PKGCORE_PERF_DEBUG"] = os.environ["PKGCORE_PERF_DEBUG"]
+ if int(os.environ.get("PKGCORE_DEBUG", 0)):
+ env["PKGCORE_DEBUG"] = os.environ["PKGCORE_DEBUG"]
# prepend script dir to PATH for git repo or unpacked tarball, for
# installed versions it's empty
env["PATH"] = os.pathsep.join(
- list(const.PATH_FORCED_PREPEND) + [os.environ["PATH"]])
+ list(const.PATH_FORCED_PREPEND) + [os.environ["PATH"]]
+ )
if self.sandbox:
if not spawn.is_sandbox_capable():
@@ -367,10 +379,12 @@ class EbuildProcessor:
# starting with max-3 to avoid a bug in older bash versions where it
# doesn't check if an fd is in use before claiming it.
max_fd = min(spawn.max_fd_limit, 1024)
- env.update({
- "PKGCORE_EBD_READ_FD": str(max_fd - 4),
- "PKGCORE_EBD_WRITE_FD": str(max_fd - 3),
- })
+ env.update(
+ {
+ "PKGCORE_EBD_READ_FD": str(max_fd - 4),
+ "PKGCORE_EBD_WRITE_FD": str(max_fd - 3),
+ }
+ )
cread = cwrite = dread = dwrite = None
# open pipes used for communication
@@ -387,17 +401,20 @@ class EbuildProcessor:
self.pid = spawn_func(
[spawn.BASH_BINARY, self.ebd, "daemonize"],
- fd_pipes=ebd_pipes, returnpid=True, env=env,
+ fd_pipes=ebd_pipes,
+ returnpid=True,
+ env=env,
# force each ebd instance to be a process group leader so everything
# can be easily terminated
pgid=0,
- **spawn_opts)[0]
+ **spawn_opts,
+ )[0]
except:
- if cwrite is not None:
- os.close(cwrite)
- if dread is not None:
- os.close(dread)
- raise
+ if cwrite is not None:
+ os.close(cwrite)
+ if dread is not None:
+ os.close(dread)
+ raise
finally:
if cread is not None:
os.close(cread)
@@ -409,7 +426,9 @@ class EbuildProcessor:
# verify ebd is running
self.write("ebd?")
if not self.expect("ebd!"):
- raise InternalError("expected 'ebd!' response from ebd, which wasn't received")
+ raise InternalError(
+ "expected 'ebd!' response from ebd, which wasn't received"
+ )
if self.sandbox:
self.write("sandbox_log?")
@@ -420,8 +439,15 @@ class EbuildProcessor:
# locking isn't used much, but w/ threading this will matter
self.unlock()
- def run_phase(self, phase, env, tmpdir=None, logging=None,
- additional_commands=None, sandbox=True):
+ def run_phase(
+ self,
+ phase,
+ env,
+ tmpdir=None,
+ logging=None,
+ additional_commands=None,
+ sandbox=True,
+ ):
"""Utility function, to initialize the processor for a phase.
Used to combine multiple calls into one, leaving the processor
@@ -447,8 +473,9 @@ class EbuildProcessor:
self.write("start_processing")
return self.generic_handler(additional_commands=additional_commands)
- def write(self, string, flush=True, disable_runtime_exceptions=False,
- append_newline=True):
+ def write(
+ self, string, flush=True, disable_runtime_exceptions=False, append_newline=True
+ ):
"""Send something to the bash side.
:param string: string to write to the bash processor.
@@ -460,7 +487,7 @@ class EbuildProcessor:
string = str(string)
try:
if append_newline:
- if string != '\n':
+ if string != "\n":
string += "\n"
self.ebd_write.write(string)
if flush:
@@ -473,8 +500,8 @@ class EbuildProcessor:
def _consume_async_expects(self):
if any(x[0] for x in self._outstanding_expects):
self.ebd_write.flush()
- got = [x.rstrip('\n') for x in self.readlines(len(self._outstanding_expects))]
- ret = (got == [x[1] for x in self._outstanding_expects])
+ got = [x.rstrip("\n") for x in self.readlines(len(self._outstanding_expects))]
+ ret = got == [x[1] for x in self._outstanding_expects]
self._outstanding_expects = []
return ret
@@ -498,7 +525,7 @@ class EbuildProcessor:
self.ebd_write.flush()
if not self._outstanding_expects:
try:
- return want == self.read().rstrip('\n')
+ return want == self.read().rstrip("\n")
except TimeoutError:
return False
finally:
@@ -513,12 +540,12 @@ class EbuildProcessor:
mydata = []
while lines > 0:
mydata.append(self.ebd_read.readline())
- cmd, _, args_str = mydata[-1].strip().partition(' ')
- if cmd == 'SIGINT':
+ cmd, _, args_str = mydata[-1].strip().partition(" ")
+ if cmd == "SIGINT":
chuck_KeyboardInterrupt(self, args_str)
- elif cmd == 'SIGTERM':
+ elif cmd == "SIGTERM":
chuck_TermInterrupt(self, args_str)
- elif cmd == 'dying':
+ elif cmd == "dying":
chuck_DyingInterrupt(self, args_str)
lines -= 1
return mydata
@@ -548,22 +575,30 @@ class EbuildProcessor:
elif move_log != self.__sandbox_log:
with open(move_log) as myf:
for x in violations:
- myf.write(x+"\n")
+ myf.write(x + "\n")
# XXX this is fugly, use a colorizer or something
# (but it is better than "from output import red" (portage's output))
def red(text):
- return '\x1b[31;1m%s\x1b[39;49;00m' % (text,)
-
- self.write(red(
- "--------------------------- ACCESS VIOLATION SUMMARY "
- "---------------------------")+"\n")
- self.write(red(f"LOG FILE = \"{move_log}\"")+"\n\n")
+ return "\x1b[31;1m%s\x1b[39;49;00m" % (text,)
+
+ self.write(
+ red(
+ "--------------------------- ACCESS VIOLATION SUMMARY "
+ "---------------------------"
+ )
+ + "\n"
+ )
+ self.write(red(f'LOG FILE = "{move_log}"') + "\n\n")
for x in violations:
- self.write(x+"\n")
- self.write(red(
- "-----------------------------------------------------"
- "---------------------------")+"\n")
+ self.write(x + "\n")
+ self.write(
+ red(
+ "-----------------------------------------------------"
+ "---------------------------"
+ )
+ + "\n"
+ )
self.write("end_sandbox_summary")
try:
os.remove(self.__sandbox_log)
@@ -639,7 +674,7 @@ class EbuildProcessor:
"""Unlock the processor."""
self.processing_lock = False
- is_locked = klass.alias_attr('processing_lock')
+ is_locked = klass.alias_attr("processing_lock")
@property
def is_alive(self):
@@ -708,11 +743,14 @@ class EbuildProcessor:
raise KeyError(f"{key}: bash doesn't allow digits as the first char")
if not isinstance(val, (str, list, tuple)):
raise ValueError(
- f"_generate_env_str was fed a bad value; key={key}, val={val}")
+ f"_generate_env_str was fed a bad value; key={key}, val={val}"
+ )
if isinstance(val, (list, tuple)):
- data.append("%s=(%s)" % (key, ' '.join(
- f'[{i}]="{value}"' for i, value in enumerate(val))))
+ data.append(
+ "%s=(%s)"
+ % (key, " ".join(f'[{i}]="{value}"' for i, value in enumerate(val)))
+ )
elif val.isalnum():
data.append(f"{key}={val}")
elif "'" not in val:
@@ -734,18 +772,18 @@ class EbuildProcessor:
data = self._generate_env_str(env_dict)
old_umask = os.umask(0o002)
if tmpdir:
- path = pjoin(tmpdir, 'ebd-env-transfer')
- with open(path, 'w') as file:
+ path = pjoin(tmpdir, "ebd-env-transfer")
+ with open(path, "w") as file:
file.write(data)
self.write(f"start_receiving_env file {path}")
else:
self.write(
- f"start_receiving_env bytes {len(data)}\n{data}",
- append_newline=False)
+ f"start_receiving_env bytes {len(data)}\n{data}", append_newline=False
+ )
os.umask(old_umask)
return self.expect("env_received", async_req=async_req, flush=True)
- def set_logfile(self, logfile=''):
+ def set_logfile(self, logfile=""):
"""Set the logfile (location to log to).
Relevant only when the daemon is sandboxed.
@@ -777,8 +815,9 @@ class EbuildProcessor:
if self.expect("metadata_path_received", flush=True):
self._metadata_paths = paths
- def _run_depend_like_phase(self, command, package_inst, eclass_cache,
- env=None, extra_commands={}):
+ def _run_depend_like_phase(
+ self, command, package_inst, eclass_cache, env=None, extra_commands={}
+ ):
# ebuild is not allowed to run any external programs during
# depend phases; use /dev/null since "" == "."
self._ensure_metadata_paths(("/dev/null",))
@@ -791,7 +830,9 @@ class EbuildProcessor:
if self._eclass_caching:
updates = set()
commands = extra_commands.copy()
- commands["request_inherit"] = partial(inherit_handler, eclass_cache, updates=updates)
+ commands["request_inherit"] = partial(
+ inherit_handler, eclass_cache, updates=updates
+ )
self.generic_handler(additional_commands=commands)
if updates:
self.preload_eclasses(eclass_cache, limited_to=updates, async_req=True)
@@ -815,15 +856,20 @@ class EbuildProcessor:
raise InternalError(line, "receive_env was invoked twice.")
line = line.strip()
if not line:
- raise InternalError(line, "During env receive, ebd didn't give us a size.")
+ raise InternalError(
+ line, "During env receive, ebd didn't give us a size."
+ )
elif not line.isdigit():
raise InternalError(line, "Returned size wasn't an integer")
# This is a raw transfer, for obvious reasons.
environ.append(self.ebd_read.read(int(line)))
self._run_depend_like_phase(
- 'gen_ebuild_env', package_inst, eclass_cache,
- extra_commands={'receive_env': receive_env})
+ "gen_ebuild_env",
+ package_inst,
+ eclass_cache,
+ extra_commands={"receive_env": receive_env},
+ )
if not environ:
raise InternalError(None, "receive_env was never invoked.")
# Dump any leading/trailing spaces.
@@ -848,13 +894,17 @@ class EbuildProcessor:
# pass down phase and metadata key lists to avoid hardcoding them on the bash side
env = {
- 'PKGCORE_EBUILD_PHASES': tuple(package_inst.eapi.phases.values()),
- 'PKGCORE_METADATA_KEYS': tuple(package_inst.eapi.metadata_keys),
+ "PKGCORE_EBUILD_PHASES": tuple(package_inst.eapi.phases.values()),
+ "PKGCORE_METADATA_KEYS": tuple(package_inst.eapi.metadata_keys),
}
self._run_depend_like_phase(
- 'gen_metadata', package_inst, eclass_cache, env=env,
- extra_commands={'key': receive_key})
+ "gen_metadata",
+ package_inst,
+ eclass_cache,
+ env=env,
+ extra_commands={"key": receive_key},
+ )
return metadata_keys
@@ -908,10 +958,11 @@ class EbuildProcessor:
while True:
line = self.read().strip()
# split on first whitespace
- cmd, _, args_str = line.partition(' ')
+ cmd, _, args_str = line.partition(" ")
if not cmd:
raise InternalError(
- f"Expected command; instead got nothing from {line!r}")
+ f"Expected command; instead got nothing from {line!r}"
+ )
if cmd in handlers:
args = []
if args_str:
@@ -989,19 +1040,21 @@ def expected_ebuild_env(pkg, d=None, env_source_override=None, depends=False):
d.update(pkg.eapi.ebd_env)
if not depends:
- path = chain.from_iterable((
- const.PATH_FORCED_PREPEND,
- pkg.eapi.helpers.get('global', ()),
- d.get("PATH", "").split(os.pathsep),
- os.environ.get("PATH", "").split(os.pathsep),
- ))
+ path = chain.from_iterable(
+ (
+ const.PATH_FORCED_PREPEND,
+ pkg.eapi.helpers.get("global", ()),
+ d.get("PATH", "").split(os.pathsep),
+ os.environ.get("PATH", "").split(os.pathsep),
+ )
+ )
d["PATH"] = os.pathsep.join(filter(None, path))
- d["INHERITED"] = ' '.join(pkg.data.get("_eclasses_", ()))
- d["USE"] = ' '.join(sorted(str(x) for x in pkg.use))
+ d["INHERITED"] = " ".join(pkg.data.get("_eclasses_", ()))
+ d["USE"] = " ".join(sorted(str(x) for x in pkg.use))
d["SLOT"] = pkg.fullslot
# temp hack.
- for x in ('chost', 'cbuild', 'ctarget'):
+ for x in ("chost", "cbuild", "ctarget"):
val = getattr(pkg, x)
if val is not None:
d[x.upper()] = val
diff --git a/src/pkgcore/ebuild/profiles.py b/src/pkgcore/ebuild/profiles.py
index 17805edaa..a20443fa6 100644
--- a/src/pkgcore/ebuild/profiles.py
+++ b/src/pkgcore/ebuild/profiles.py
@@ -1,5 +1,8 @@
__all__ = (
- "ProfileError", "ProfileNode", "EmptyRootNode", "OnDiskProfile",
+ "ProfileError",
+ "ProfileNode",
+ "EmptyRootNode",
+ "OnDiskProfile",
"UserProfile",
)
@@ -28,7 +31,6 @@ from .eapi import EAPI, get_eapi
class ProfileError(errors.ParsingError):
-
def __init__(self, path, filename, error):
self.path, self.filename, self.error = path, filename, error
@@ -50,19 +52,27 @@ def _read_profile_files(files, allow_line_cont=False):
for path in files:
# determine file path relative to the profiles dir
try:
- relpath = path.split('/profiles/')[1]
+ relpath = path.split("/profiles/")[1]
except IndexError:
# profiles base path
relpath = os.path.basename(path)
for lineno, line in iter_read_bash(
- path, allow_line_cont=allow_line_cont, enum_line=True):
+ path, allow_line_cont=allow_line_cont, enum_line=True
+ ):
yield line, lineno, relpath
-def load_property(filename, *, read_func=_read_profile_files, fallback=(),
- parse_func=lambda x: x, allow_line_cont=False, allow_recurse=False,
- eapi_optional=None):
+def load_property(
+ filename,
+ *,
+ read_func=_read_profile_files,
+ fallback=(),
+ parse_func=lambda x: x,
+ allow_line_cont=False,
+ allow_recurse=False,
+ eapi_optional=None,
+):
"""Decorator simplifying parsing profile files to generate a profile property.
:param filename: The filename to parse within that profile directory.
@@ -78,20 +88,43 @@ def load_property(filename, *, read_func=_read_profile_files, fallback=(),
the fallback is returned and no ondisk activity occurs.
:return: A :py:`klass.jit.attr_named` property instance.
"""
+
def f(func):
- f2 = klass.jit_attr_named(f'_{func.__name__}')
- return f2(partial(
- _load_and_invoke, func, filename, read_func, fallback,
- allow_recurse, allow_line_cont, parse_func, eapi_optional))
+ f2 = klass.jit_attr_named(f"_{func.__name__}")
+ return f2(
+ partial(
+ _load_and_invoke,
+ func,
+ filename,
+ read_func,
+ fallback,
+ allow_recurse,
+ allow_line_cont,
+ parse_func,
+ eapi_optional,
+ )
+ )
+
return f
-def _load_and_invoke(func, filename, read_func, fallback, allow_recurse,
- allow_line_cont, parse_func, eapi_optional, self):
- if eapi_optional is not None and not getattr(self.eapi.options, eapi_optional, None):
+def _load_and_invoke(
+ func,
+ filename,
+ read_func,
+ fallback,
+ allow_recurse,
+ allow_line_cont,
+ parse_func,
+ eapi_optional,
+ self,
+):
+ if eapi_optional is not None and not getattr(
+ self.eapi.options, eapi_optional, None
+ ):
return func(self, fallback)
- profile_path = self.path.rstrip('/')
+ profile_path = self.path.rstrip("/")
base = pjoin(profile_path, filename)
files = []
@@ -108,8 +141,7 @@ def _load_and_invoke(func, filename, read_func, fallback, allow_recurse,
if read_func is None:
data = parse_func(files)
else:
- data = parse_func(read_func(
- files, allow_line_cont=allow_line_cont))
+ data = parse_func(read_func(files, allow_line_cont=allow_line_cont))
else:
data = fallback
return func(self, data)
@@ -117,10 +149,12 @@ def _load_and_invoke(func, filename, read_func, fallback, allow_recurse,
raise ProfileError(profile_path, filename, e) from e
except IsADirectoryError as e:
raise ProfileError(
- self.path, filename,
+ self.path,
+ filename,
"path is a directory, but this profile is PMS format- "
"directories aren't allowed. See layout.conf profile-formats "
- "to enable directory support") from e
+ "to enable directory support",
+ ) from e
_make_incrementals_dict = partial(misc.IncrementalsDict, const.incrementals)
@@ -142,7 +176,7 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
return f"profile at {self.path!r}"
def __repr__(self):
- return '<%s path=%r, @%#8x>' % (self.__class__.__name__, self.path, id(self))
+ return "<%s path=%r, @%#8x>" % (self.__class__.__name__, self.path, id(self))
system = klass.alias_attr("packages.system")
profile_set = klass.alias_attr("packages.profile")
@@ -151,38 +185,44 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
def name(self):
"""Relative path to the profile from the profiles directory."""
try:
- return self.path.split('/profiles/')[1]
+ return self.path.split("/profiles/")[1]
except IndexError:
# profiles base path
- return ''
+ return ""
@load_property("packages")
def packages(self, data):
repo_config = self.repoconfig
# TODO: get profile-set support into PMS
- profile_set = repo_config is not None and 'profile-set' in repo_config.profile_formats
+ profile_set = (
+ repo_config is not None and "profile-set" in repo_config.profile_formats
+ )
sys, neg_sys, pro, neg_pro = [], [], [], []
neg_wildcard = False
for line, lineno, relpath in data:
try:
- if line[0] == '-':
- if line == '-*':
+ if line[0] == "-":
+ if line == "-*":
neg_wildcard = True
- elif line[1] == '*':
+ elif line[1] == "*":
neg_sys.append(self.eapi_atom(line[2:]))
elif profile_set:
neg_pro.append(self.eapi_atom(line[1:]))
else:
- logger.error(f'{relpath!r}: invalid line format, line {lineno}: {line!r}')
+ logger.error(
+ f"{relpath!r}: invalid line format, line {lineno}: {line!r}"
+ )
else:
- if line[0] == '*':
+ if line[0] == "*":
sys.append(self.eapi_atom(line[1:]))
elif profile_set:
pro.append(self.eapi_atom(line))
else:
- logger.error(f'{relpath!r}: invalid line format, line {lineno}: {line!r}')
+ logger.error(
+ f"{relpath!r}: invalid line format, line {lineno}: {line!r}"
+ )
except ebuild_errors.MalformedAtom as e:
- logger.error(f'{relpath!r}, line {lineno}: parsing error: {e}')
+ logger.error(f"{relpath!r}, line {lineno}: parsing error: {e}")
system = [tuple(neg_sys), tuple(sys)]
profile = [tuple(neg_pro), tuple(pro)]
if neg_wildcard:
@@ -193,10 +233,10 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
@load_property("parent")
def parent_paths(self, data):
repo_config = self.repoconfig
- if repo_config is not None and 'portage-2' in repo_config.profile_formats:
+ if repo_config is not None and "portage-2" in repo_config.profile_formats:
l = []
for line, lineno, relpath in data:
- repo_id, separator, profile_path = line.partition(':')
+ repo_id, separator, profile_path = line.partition(":")
if separator:
if repo_id:
try:
@@ -209,22 +249,30 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
location = repo_config.location
else:
logger.error(
- f'repo {repo_config.repo_id!r}: '
+ f"repo {repo_config.repo_id!r}: "
f"{relpath!r} (line {lineno}), "
- f'bad profile parent {line!r}: '
- f'unknown repo {repo_id!r}'
+ f"bad profile parent {line!r}: "
+ f"unknown repo {repo_id!r}"
)
continue
- l.append((abspath(pjoin(location, 'profiles', profile_path)), line, lineno))
+ l.append(
+ (
+ abspath(pjoin(location, "profiles", profile_path)),
+ line,
+ lineno,
+ )
+ )
else:
l.append((abspath(pjoin(self.path, repo_id)), line, lineno))
return tuple(l)
- return tuple((abspath(pjoin(self.path, line)), line, lineno)
- for line, lineno, relpath in data)
+ return tuple(
+ (abspath(pjoin(self.path, line)), line, lineno)
+ for line, lineno, relpath in data
+ )
@klass.jit_attr
def parents(self):
- kls = getattr(self, 'parent_node_kls', self.__class__)
+ kls = getattr(self, "parent_node_kls", self.__class__)
parents = []
for path, line, lineno in self.parent_paths:
try:
@@ -233,19 +281,21 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
repo_id = self.repoconfig.repo_id
logger.error(
f"repo {repo_id!r}: '{self.name}/parent' (line {lineno}), "
- f'bad profile parent {line!r}: {e.error}'
+ f"bad profile parent {line!r}: {e.error}"
)
continue
return tuple(parents)
- @load_property("package.provided", allow_recurse=True,
- eapi_optional='profile_pkg_provided')
+ @load_property(
+ "package.provided", allow_recurse=True, eapi_optional="profile_pkg_provided"
+ )
def pkg_provided(self, data):
def _parse_cpv(s):
try:
return cpv.VersionedCPV(s)
except cpv.InvalidCPV:
- logger.error(f'invalid package.provided entry: {s!r}')
+ logger.error(f"invalid package.provided entry: {s!r}")
+
data = (x[0] for x in data)
return split_negations(data, _parse_cpv)
@@ -253,10 +303,12 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
"""Parse files containing optionally negated package atoms."""
neg, pos = [], []
for line, lineno, relpath in data:
- if line[0] == '-':
+ if line[0] == "-":
line = line[1:]
if not line:
- logger.error(f"{relpath!r}, line {lineno}: '-' negation without an atom")
+ logger.error(
+ f"{relpath!r}, line {lineno}: '-' negation without an atom"
+ )
continue
l = neg
else:
@@ -264,7 +316,7 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
try:
l.append(self.eapi_atom(line))
except ebuild_errors.MalformedAtom as e:
- logger.error(f'{relpath!r}, line {lineno}: parsing error: {e}')
+ logger.error(f"{relpath!r}, line {lineno}: parsing error: {e}")
return tuple(neg), tuple(pos)
def _package_keywords_splitter(self, iterable):
@@ -274,7 +326,7 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
try:
yield (atom(v[0]), tuple(stable_unique(v[1:])))
except ebuild_errors.MalformedAtom as e:
- logger.error(f'{relpath!r}, line {lineno}: parsing error: {e}')
+ logger.error(f"{relpath!r}, line {lineno}: parsing error: {e}")
@load_property("package.mask", allow_recurse=True)
def masks(self, data):
@@ -300,7 +352,8 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
def pkg_use(self, data):
c = misc.ChunkedDataDict()
c.update_from_stream(
- chain.from_iterable(self._parse_package_use(data).values()))
+ chain.from_iterable(self._parse_package_use(data).values())
+ )
c.freeze()
return c
@@ -324,18 +377,23 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
try:
a = self.eapi_atom(l[0])
except ebuild_errors.MalformedAtom as e:
- logger.error(f'{relpath!r}, line {lineno}: parsing error: {e}')
+ logger.error(f"{relpath!r}, line {lineno}: parsing error: {e}")
continue
if len(l) == 1:
- logger.error(f'{relpath!r}, line {lineno}: missing USE flag(s): {line!r}')
+ logger.error(
+ f"{relpath!r}, line {lineno}: missing USE flag(s): {line!r}"
+ )
continue
- if any(s.endswith(':') for s in l[1:]):
- logger.error(f'{relpath!r}, line {lineno}: USE_EXPAND syntax is invalid in this context: {line!r}')
+ if any(s.endswith(":") for s in l[1:]):
+ logger.error(
+ f"{relpath!r}, line {lineno}: USE_EXPAND syntax is invalid in this context: {line!r}"
+ )
continue
d[a.key].append(misc.chunked_data(a, *split_negations(l[1:])))
- return ImmutableDict((k, misc._build_cp_atom_payload(v, atom(k)))
- for k, v in d.items())
+ return ImmutableDict(
+ (k, misc._build_cp_atom_payload(v, atom(k))) for k, v in d.items()
+ )
def _parse_use(self, data):
c = misc.ChunkedDataDict()
@@ -350,8 +408,9 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
def use_force(self, data):
return self._parse_use(data)
- @load_property("use.stable.force", allow_recurse=True,
- eapi_optional='profile_stable_use')
+ @load_property(
+ "use.stable.force", allow_recurse=True, eapi_optional="profile_stable_use"
+ )
def use_stable_force(self, data):
return self._parse_use(data)
@@ -359,8 +418,11 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
def pkg_use_force(self, data):
return self._parse_package_use(data)
- @load_property("package.use.stable.force", allow_recurse=True,
- eapi_optional='profile_stable_use')
+ @load_property(
+ "package.use.stable.force",
+ allow_recurse=True,
+ eapi_optional="profile_stable_use",
+ )
def pkg_use_stable_force(self, data):
return self._parse_package_use(data)
@@ -368,8 +430,9 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
def use_mask(self, data):
return self._parse_use(data)
- @load_property("use.stable.mask", allow_recurse=True,
- eapi_optional='profile_stable_use')
+ @load_property(
+ "use.stable.mask", allow_recurse=True, eapi_optional="profile_stable_use"
+ )
def use_stable_mask(self, data):
return self._parse_use(data)
@@ -377,8 +440,11 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
def pkg_use_mask(self, data):
return self._parse_package_use(data)
- @load_property("package.use.stable.mask", allow_recurse=True,
- eapi_optional='profile_stable_use')
+ @load_property(
+ "package.use.stable.mask",
+ allow_recurse=True,
+ eapi_optional="profile_stable_use",
+ )
def pkg_use_stable_mask(self, data):
return self._parse_package_use(data)
@@ -387,8 +453,7 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
c = self.use_mask
if self.pkg_use_mask:
c = c.clone(unfreeze=True)
- c.update_from_stream(
- chain.from_iterable(self.pkg_use_mask.values()))
+ c.update_from_stream(chain.from_iterable(self.pkg_use_mask.values()))
c.freeze()
return c
@@ -398,11 +463,9 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
if self.use_stable_mask:
c.merge(self.use_stable_mask)
if self.pkg_use_mask:
- c.update_from_stream(
- chain.from_iterable(self.pkg_use_mask.values()))
+ c.update_from_stream(chain.from_iterable(self.pkg_use_mask.values()))
if self.pkg_use_stable_mask:
- c.update_from_stream(
- chain.from_iterable(self.pkg_use_stable_mask.values()))
+ c.update_from_stream(chain.from_iterable(self.pkg_use_stable_mask.values()))
c.freeze()
return c
@@ -411,8 +474,7 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
c = self.use_force
if self.pkg_use_force:
c = c.clone(unfreeze=True)
- c.update_from_stream(
- chain.from_iterable(self.pkg_use_force.values()))
+ c.update_from_stream(chain.from_iterable(self.pkg_use_force.values()))
c.freeze()
return c
@@ -422,22 +484,22 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
if self.use_stable_force:
c.merge(self.use_stable_force)
if self.pkg_use_force:
- c.update_from_stream(
- chain.from_iterable(self.pkg_use_force.values()))
+ c.update_from_stream(chain.from_iterable(self.pkg_use_force.values()))
if self.pkg_use_stable_force:
c.update_from_stream(
- chain.from_iterable(self.pkg_use_stable_force.values()))
+ chain.from_iterable(self.pkg_use_stable_force.values())
+ )
c.freeze()
return c
- @load_property('make.defaults', read_func=None, fallback=None)
+ @load_property("make.defaults", read_func=None, fallback=None)
def make_defaults(self, data):
d = {}
if data is not None:
d.update(read_bash_dict(data[0]))
return ImmutableDict(d)
- @load_property('make.defaults', read_func=None, fallback=None)
+ @load_property("make.defaults", read_func=None, fallback=None)
def default_env(self, data):
rendered = _make_incrementals_dict()
for parent in self.parents:
@@ -458,7 +520,7 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
@load_property("package.bashrc", allow_recurse=True)
def pkg_bashrc(self, data):
repo_config = self.repoconfig
- if repo_config is None or 'profile-bashrcs' not in repo_config.profile_formats:
+ if repo_config is None or "profile-bashrcs" not in repo_config.profile_formats:
return ()
d = defaultdict(list)
@@ -467,16 +529,18 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
try:
a = self.eapi_atom(l[0])
except ebuild_errors.MalformedAtom as exc:
- logger.error(f'{relpath!r}, line {lineno}: parsing error: {exc}')
+ logger.error(f"{relpath!r}, line {lineno}: parsing error: {exc}")
continue
if len(l) == 1:
- logger.error(f'{relpath!r}, line {lineno}: missing bashrc files: {line!r}')
+ logger.error(
+ f"{relpath!r}, line {lineno}: missing bashrc files: {line!r}"
+ )
continue
for filename in l[1:]:
- d[a].append(local_source(pjoin(self.path, 'bashrc', filename)))
+ d[a].append(local_source(pjoin(self.path, "bashrc", filename)))
return tuple((k, tuple(v)) for k, v in d.items())
- @load_property('eapi', fallback='0')
+ @load_property("eapi", fallback="0")
def eapi(self, data):
# handle fallback
if isinstance(data, str):
@@ -485,19 +549,19 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
try:
line, lineno, relpath = next(data)
except StopIteration:
- relpath = pjoin(self.name, 'eapi')
- logger.error(f'{relpath!r}: empty file')
- return get_eapi('0')
+ relpath = pjoin(self.name, "eapi")
+ logger.error(f"{relpath!r}: empty file")
+ return get_eapi("0")
try:
next(data)
- logger.error(f'{relpath!r}: multiple lines detected')
+ logger.error(f"{relpath!r}: multiple lines detected")
except StopIteration:
pass
eapi_str = line.strip()
if eapi_str not in EAPI.known_eapis:
- logger.error(f'{relpath!r}: unknown EAPI {eapi_str!r}')
+ logger.error(f"{relpath!r}: unknown EAPI {eapi_str!r}")
return get_eapi(eapi_str)
eapi_atom = klass.alias_attr("eapi.atom_kls")
@@ -510,13 +574,13 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
def _load_repoconfig_from_path(path):
path = abspath(path)
# strip '/' so we don't get '/usr/portage' == ('', 'usr', 'portage')
- chunks = path.lstrip('/').split('/')
+ chunks = path.lstrip("/").split("/")
try:
- pindex = max(idx for idx, x in enumerate(chunks) if x == 'profiles')
+ pindex = max(idx for idx, x in enumerate(chunks) if x == "profiles")
except ValueError:
# not in a repo...
return None
- repo_path = pjoin('/', *chunks[:pindex])
+ repo_path = pjoin("/", *chunks[:pindex])
return repo_objs.RepoConfig(repo_path)
@classmethod
@@ -528,13 +592,13 @@ class ProfileNode(metaclass=caching.WeakInstMeta):
# caching is a bit overprotective, even if pms_strict defaults to True,
# cls(path) is not cls(path, pms_strict=True)
- if repo_config is not None and 'pms' not in repo_config.profile_formats:
+ if repo_config is not None and "pms" not in repo_config.profile_formats:
profile = cls(path, pms_strict=False)
else:
profile = cls(path)
# optimization to avoid re-parsing what we already did.
- object.__setattr__(profile, '_repoconfig', repo_config)
+ object.__setattr__(profile, "_repoconfig", repo_config)
return profile
@@ -544,7 +608,9 @@ class EmptyRootNode(ProfileNode):
parents = ()
deprecated = None
- pkg_use = masked_use = stable_masked_use = forced_use = stable_forced_use = misc.ChunkedDataDict()
+ pkg_use = (
+ masked_use
+ ) = stable_masked_use = forced_use = stable_forced_use = misc.ChunkedDataDict()
forced_use.freeze()
pkg_bashrc = ()
pkg_use_force = pkg_use_mask = ImmutableDict()
@@ -577,12 +643,13 @@ class ProfileStack:
repo_id = node.repoconfig.repo_id
logger.error(
f"repo {repo_id!r}: '{self.name}/parent' (line {lineno}), "
- f'bad profile parent {line!r}: {e.error}'
+ f"bad profile parent {line!r}: {e.error}"
)
continue
for y in f(x):
yield y
yield node
+
return tuple(f(self.node))
@klass.jit_attr
@@ -639,13 +706,14 @@ class ProfileStack:
def default_env(self):
d = dict(self.node.default_env.items())
for incremental in const.incrementals:
- v = d.pop(incremental, '').split()
+ v = d.pop(incremental, "").split()
if v:
if incremental in const.incrementals_unfinalized:
d[incremental] = tuple(v)
else:
v = misc.incremental_expansion(
- v, msg_prefix=f"While expanding {incremental}, value {v!r}: ")
+ v, msg_prefix=f"While expanding {incremental}, value {v!r}: "
+ )
if v:
d[incremental] = tuple(v)
return ImmutableDict(d.items())
@@ -666,7 +734,7 @@ class ProfileStack:
@klass.jit_attr
def use(self):
"""USE flag settings for the profile."""
- return tuple(list(self.default_env.get('USE', ())) + list(self.expand_use()))
+ return tuple(list(self.default_env.get("USE", ())) + list(self.expand_use()))
def expand_use(self, env=None):
"""Expand USE_EXPAND settings to USE flags."""
@@ -678,7 +746,7 @@ class ProfileStack:
value = env.get(u)
if value is None:
continue
- u2 = u.lower() + '_'
+ u2 = u.lower() + "_"
use.extend(u2 + x for x in value.split())
return tuple(use)
@@ -714,7 +782,9 @@ class ProfileStack:
if self._system_profile.eapi.options.profile_iuse_injection:
iuse_effective.extend(self.iuse_implicit)
for v in self.use_expand_implicit.intersection(self.use_expand_unprefixed):
- iuse_effective.extend(self.default_env.get("USE_EXPAND_VALUES_" + v, "").split())
+ iuse_effective.extend(
+ self.default_env.get("USE_EXPAND_VALUES_" + v, "").split()
+ )
for v in self.use_expand.intersection(self.use_expand_implicit):
for x in self.default_env.get("USE_EXPAND_VALUES_" + v, "").split():
iuse_effective.append(v.lower() + "_" + x)
@@ -730,7 +800,8 @@ class ProfileStack:
def provides_repo(self):
# delay importing to avoid circular imports
from .repository import ProvidesRepo
- pkgs = self._collapse_generic('pkg_provided')
+
+ pkgs = self._collapse_generic("pkg_provided")
try:
arches = self._system_profile.repoconfig.known_arches
except AttributeError:
@@ -744,7 +815,7 @@ class ProfileStack:
@klass.jit_attr
def unmasks(self):
- return frozenset(self._collapse_generic('unmasks'))
+ return frozenset(self._collapse_generic("unmasks"))
@klass.jit_attr
def pkg_deprecated(self):
@@ -781,19 +852,19 @@ class ProfileStack:
@klass.jit_attr
def system(self):
- return frozenset(self._collapse_generic('system', clear=True))
+ return frozenset(self._collapse_generic("system", clear=True))
@klass.jit_attr
def profile_set(self):
- return frozenset(self._collapse_generic('profile_set', clear=True))
+ return frozenset(self._collapse_generic("profile_set", clear=True))
class OnDiskProfile(ProfileStack):
pkgcore_config_type = ConfigHint(
- {'basepath': 'str', 'profile': 'str'},
- required=('basepath', 'profile'),
- typename='profile',
+ {"basepath": "str", "profile": "str"},
+ required=("basepath", "profile"),
+ typename="profile",
)
def __init__(self, basepath, profile, load_profile_base=True):
@@ -809,11 +880,11 @@ class OnDiskProfile(ProfileStack):
chunks = [x for x in path.split("/") if x]
try:
# poor mans rindex.
- pbase = max(idx for idx, x in enumerate(chunks) if x == 'profiles')
+ pbase = max(idx for idx, x in enumerate(chunks) if x == "profiles")
except ValueError:
# no base found.
return None
- return pjoin("/", *chunks[:pbase+1]), '/'.join(chunks[pbase+1:])
+ return pjoin("/", *chunks[: pbase + 1]), "/".join(chunks[pbase + 1 :])
@classmethod
def from_abspath(cls, path):
@@ -864,9 +935,9 @@ class UserProfileNode(ProfileNode):
class UserProfile(OnDiskProfile):
pkgcore_config_type = ConfigHint(
- {'user_path': 'str', 'parent_path': 'str', 'parent_profile': 'str'},
- required=('user_path', 'parent_path', 'parent_profile'),
- typename='profile',
+ {"user_path": "str", "parent_path": "str", "parent_profile": "str"},
+ required=("user_path", "parent_path", "parent_profile"),
+ typename="profile",
)
def __init__(self, user_path, parent_path, parent_profile, load_profile_base=True):
diff --git a/src/pkgcore/ebuild/repo_objs.py b/src/pkgcore/ebuild/repo_objs.py
index 78371e29a..e6477b581 100644
--- a/src/pkgcore/ebuild/repo_objs.py
+++ b/src/pkgcore/ebuild/repo_objs.py
@@ -3,9 +3,17 @@ package class for buildable ebuilds
"""
__all__ = (
- "Maintainer", "MetadataXml", "LocalMetadataXml",
- "SharedPkgData", "Licenses", "OverlayedProfiles",
- "Project", "ProjectMember", "Subproject", "ProjectsXml", "LocalProjectsXml"
+ "Maintainer",
+ "MetadataXml",
+ "LocalMetadataXml",
+ "SharedPkgData",
+ "Licenses",
+ "OverlayedProfiles",
+ "Project",
+ "ProjectMember",
+ "Subproject",
+ "ProjectsXml",
+ "LocalProjectsXml",
)
import contextlib
@@ -56,11 +64,13 @@ class Maintainer:
:ivar proxied: proxied maintainer status (yes, no, proxy)
"""
- __slots__ = ('email', 'description', 'name', 'maint_type', 'proxied')
+ __slots__ = ("email", "description", "name", "maint_type", "proxied")
- def __init__(self, email=None, name=None, description=None, maint_type=None, proxied=None):
+ def __init__(
+ self, email=None, name=None, description=None, maint_type=None, proxied=None
+ ):
if email is None and name is None:
- raise ValueError('need at least one of name and email')
+ raise ValueError("need at least one of name and email")
self.email = email
self.name = name
self.description = description
@@ -70,13 +80,13 @@ class Maintainer:
def __str__(self):
if self.name is not None:
if self.email is not None:
- res = f'{self.name} <{self.email}>'
+ res = f"{self.name} <{self.email}>"
else:
res = self.name
else:
res = self.email
if self.description is not None:
- return f'{res} ({self.description})'
+ return f"{res} ({self.description})"
return res
def __eq__(self, other):
@@ -94,7 +104,7 @@ class Maintainer:
class Upstream:
"""Data on a single upstream."""
- __slots__ = ('type', 'name')
+ __slots__ = ("type", "name")
def __init__(self, type, name):
self.type = type
@@ -119,8 +129,13 @@ class MetadataXml:
"""
__slots__ = (
- "__weakref__", "_maintainers", "_upstreams", "_local_use",
- "_longdescription", "_source", "_stabilize_allarches",
+ "__weakref__",
+ "_maintainers",
+ "_upstreams",
+ "_local_use",
+ "_longdescription",
+ "_source",
+ "_stabilize_allarches",
)
def __init__(self, source):
@@ -131,8 +146,13 @@ class MetadataXml:
self._parse_xml()
return getattr(self, attr)
- for attr in ("maintainers", "upstreams", "local_use",
- "longdescription", "stabilize_allarches"):
+ for attr in (
+ "maintainers",
+ "upstreams",
+ "local_use",
+ "longdescription",
+ "stabilize_allarches",
+ ):
locals()[attr] = property(post_curry(_generic_attr, "_" + attr))
del attr
@@ -160,30 +180,37 @@ class MetadataXml:
name = e.text
elif e.tag == "email":
email = e.text
- elif e.tag == 'description' and e.get('lang', 'en') == 'en':
+ elif e.tag == "description" and e.get("lang", "en") == "en":
description = e.text
try:
- maintainers.append(Maintainer(
- name=name, email=email, description=description,
- maint_type=x.get('type'), proxied=x.get('proxied')))
+ maintainers.append(
+ Maintainer(
+ name=name,
+ email=email,
+ description=description,
+ maint_type=x.get("type"),
+ proxied=x.get("proxied"),
+ )
+ )
except ValueError:
# ignore invalid maintainers that should be caught by pkgcheck
pass
self._maintainers = tuple(maintainers)
self._upstreams = tuple(
- Upstream(e.get('type'), e.text) for e in chain.from_iterable(tree.findall("upstream"))
- if e.tag == 'remote-id'
+ Upstream(e.get("type"), e.text)
+ for e in chain.from_iterable(tree.findall("upstream"))
+ if e.tag == "remote-id"
)
# Could be unicode!
self._longdescription = None
for x in tree.findall("longdescription"):
- if x.get('lang', 'en') != 'en':
+ if x.get("lang", "en") != "en":
continue
- longdesc = ''.join(x.itertext())
+ longdesc = "".join(x.itertext())
if longdesc:
- self._longdescription = ' '.join(longdesc.split())
+ self._longdescription = " ".join(longdesc.split())
break
self._source = None
@@ -191,12 +218,12 @@ class MetadataXml:
# lang="" is property of <use/>
self._local_use = mappings.ImmutableDict()
for x in tree.findall("use"):
- if x.get('lang', 'en') != 'en':
+ if x.get("lang", "en") != "en":
continue
self._local_use = mappings.ImmutableDict(
- (e.attrib['name'], ' '.join(''.join(e.itertext()).split()))
- for e in x.findall('flag')
- if 'name' in e.attrib
+ (e.attrib["name"], " ".join("".join(e.itertext()).split()))
+ for e in x.findall("flag")
+ if "name" in e.attrib
)
break
@@ -242,12 +269,12 @@ class ProjectMember(metaclass=klass.generic_equality):
:ivar is_lead: whether the member is a project lead.
"""
- __slots__ = ('email', 'name', 'role', 'is_lead')
- __attr_comparison__ = ('email', 'name', 'role', 'is_lead')
+ __slots__ = ("email", "name", "role", "is_lead")
+ __attr_comparison__ = ("email", "name", "role", "is_lead")
def __init__(self, email, name=None, role=None, is_lead=None):
if email is None:
- raise ValueError('email for project member must not be null')
+ raise ValueError("email for project member must not be null")
self.email = email
self.name = name
self.role = role
@@ -255,11 +282,11 @@ class ProjectMember(metaclass=klass.generic_equality):
def __str__(self):
if self.name is not None:
- res = f'{self.name} <{self.email}>'
+ res = f"{self.name} <{self.email}>"
else:
res = self.email
if self.role is not None:
- return f'{res} ({self.role})'
+ return f"{res} ({self.role})"
return res
@@ -270,11 +297,11 @@ class Subproject:
:ivar inherit_members: whether the parent project inherits members from this subproject
"""
- __slots__ = ('_ref', 'inherit_members', '_projects_xml', '_project')
+ __slots__ = ("_ref", "inherit_members", "_projects_xml", "_project")
def __init__(self, ref, projects_xml, inherit_members=None):
if ref is None:
- raise ValueError('ref for subproject must not be null')
+ raise ValueError("ref for subproject must not be null")
self._ref = ref
self.inherit_members = inherit_members
self._projects_xml = projects_xml
@@ -284,11 +311,11 @@ class Subproject:
try:
return self._projects_xml.projects[self._ref]
except KeyError:
- logger.error(f'projects.xml: subproject {self._ref!r} does not exist')
+ logger.error(f"projects.xml: subproject {self._ref!r} does not exist")
return None
- __getattr__ = klass.GetAttrProxy('project')
- __dir__ = klass.DirProxy('project')
+ __getattr__ = klass.GetAttrProxy("project")
+ __dir__ = klass.DirProxy("project")
class Project:
@@ -308,12 +335,13 @@ class Project:
:ivar subprojects: subprojects
"""
- __slots__ = ('email', 'name', 'url', 'description', 'members', 'subprojects')
+ __slots__ = ("email", "name", "url", "description", "members", "subprojects")
- def __init__(self, email, name=None, url=None, description=None,
- members=(), subprojects=()):
+ def __init__(
+ self, email, name=None, url=None, description=None, members=(), subprojects=()
+ ):
if email is None:
- raise ValueError('email for project must not be null')
+ raise ValueError("email for project must not be null")
self.email = email
self.name = name
self.url = url
@@ -323,11 +351,11 @@ class Project:
def __str__(self):
if self.name is not None:
- res = f'{self.name} <{self.email}>'
+ res = f"{self.name} <{self.email}>"
else:
res = self.email
if self.url is not None:
- return f'{res} ({self.url})'
+ return f"{res} ({self.url})"
return res
@property
@@ -339,8 +367,10 @@ class Project:
def recursive_members(self):
"""All project members, including members inherited from subprojects."""
subprojects = list(
- sp for sp in self.subprojects
- if sp.inherit_members and sp.project is not None)
+ sp
+ for sp in self.subprojects
+ if sp.inherit_members and sp.project is not None
+ )
subproject_emails = set(sp.email for sp in subprojects)
# recursively collect all subprojects from which to inherit
@@ -360,7 +390,8 @@ class Project:
if m.email not in members:
# drop lead bit
m = ProjectMember(
- email=m.email, name=m.name, role=m.role, is_lead=False)
+ email=m.email, name=m.name, role=m.role, is_lead=False
+ )
members[m.email] = m
return tuple(members.values())
@@ -372,7 +403,7 @@ class ProjectsXml:
if loaded.
"""
- __slots__ = ('__weakref__', '_projects', '_source')
+ __slots__ = ("__weakref__", "_projects", "_source")
def __init__(self, source):
self._source = source
@@ -389,39 +420,46 @@ class ProjectsXml:
try:
tree = etree.parse(source)
except etree.XMLSyntaxError as e:
- logger.error(f'failed parsing projects.xml: {e}')
+ logger.error(f"failed parsing projects.xml: {e}")
return mappings.ImmutableDict()
projects = {}
- for p in tree.findall('project'):
+ for p in tree.findall("project"):
kwargs = {}
- for k in ('email', 'name', 'url', 'description'):
+ for k in ("email", "name", "url", "description"):
kwargs[k] = p.findtext(k)
members = []
- for m in p.findall('member'):
+ for m in p.findall("member"):
m_kwargs = {}
- for k in ('email', 'name', 'role'):
+ for k in ("email", "name", "role"):
m_kwargs[k] = m.findtext(k)
- m_kwargs['is_lead'] = m.get('is-lead', '') == '1'
+ m_kwargs["is_lead"] = m.get("is-lead", "") == "1"
try:
members.append(ProjectMember(**m_kwargs))
except ValueError:
- logger.error(f"project {kwargs['email']} has <member/> with no email")
- kwargs['members'] = members
+ logger.error(
+ f"project {kwargs['email']} has <member/> with no email"
+ )
+ kwargs["members"] = members
subprojects = []
- for sp in p.findall('subproject'):
+ for sp in p.findall("subproject"):
try:
- subprojects.append(Subproject(
- ref=sp.get('ref'),
- inherit_members=sp.get('inherit-members', '') == '1',
- projects_xml=self))
+ subprojects.append(
+ Subproject(
+ ref=sp.get("ref"),
+ inherit_members=sp.get("inherit-members", "") == "1",
+ projects_xml=self,
+ )
+ )
except ValueError:
- logger.error(f"project {kwargs['email']} has <subproject/> with no ref")
- kwargs['subprojects'] = subprojects
+ logger.error(
+ f"project {kwargs['email']} has <subproject/> with no ref"
+ )
+ kwargs["subprojects"] = subprojects
- projects[kwargs['email']] = Project(**kwargs)
+ projects[kwargs["email"]] = Project(**kwargs)
return mappings.ImmutableDict(projects)
@@ -441,15 +479,30 @@ class LocalProjectsXml(ProjectsXml):
class Licenses(metaclass=WeakInstMeta):
__inst_caching__ = True
- __slots__ = ('_base', '_licenses', '_groups', 'license_groups_path', 'licenses_dir', '_repo_masters', '_license_instances')
+ __slots__ = (
+ "_base",
+ "_licenses",
+ "_groups",
+ "license_groups_path",
+ "licenses_dir",
+ "_repo_masters",
+ "_license_instances",
+ )
- def __init__(self, repo, *repo_masters,
- licenses_dir='licenses', license_groups='profiles/license_groups'):
+ def __init__(
+ self,
+ repo,
+ *repo_masters,
+ licenses_dir="licenses",
+ license_groups="profiles/license_groups",
+ ):
repo_base = repo.location
- object.__setattr__(self, '_base', repo_base)
- object.__setattr__(self, 'license_groups_path', pjoin(repo_base, license_groups))
- object.__setattr__(self, 'licenses_dir', pjoin(repo_base, licenses_dir))
- object.__setattr__(self, '_repo_masters', repo_masters)
+ object.__setattr__(self, "_base", repo_base)
+ object.__setattr__(
+ self, "license_groups_path", pjoin(repo_base, license_groups)
+ )
+ object.__setattr__(self, "licenses_dir", pjoin(repo_base, licenses_dir))
+ object.__setattr__(self, "_repo_masters", repo_masters)
self._load_license_instances()
def _load_license_instances(self):
@@ -457,9 +510,9 @@ class Licenses(metaclass=WeakInstMeta):
for x in self._repo_masters:
if isinstance(x, Licenses):
l.append(x)
- elif hasattr(x, 'licenses'):
+ elif hasattr(x, "licenses"):
l.append(x.licenses)
- object.__setattr__(self, '_license_instances', tuple(l))
+ object.__setattr__(self, "_license_instances", tuple(l))
@klass.jit_attr_none
def licenses(self):
@@ -474,7 +527,7 @@ class Licenses(metaclass=WeakInstMeta):
def groups(self):
"""Return the mapping of defined license groups to licenses for a repo."""
try:
- d = read_dict(self.license_groups_path, splitter=' ')
+ d = read_dict(self.license_groups_path, splitter=" ")
for k, v in d.items():
d[k] = set(v.split())
except EnvironmentError:
@@ -496,20 +549,22 @@ class Licenses(metaclass=WeakInstMeta):
while keep_going:
keep_going = False
for k, v in groups.items():
- if not any(x[0] == '@' for x in v):
+ if not any(x[0] == "@" for x in v):
continue
keep_going = True
l = []
for v2 in v:
- if v2[0] == '@':
+ if v2[0] == "@":
v2 = v2[1:]
if not v2 or v2 not in groups:
logger.error(
- f"invalid license group reference: {v2!r} in {self}")
+ f"invalid license group reference: {v2!r} in {self}"
+ )
continue
elif v2 == k:
logger.error(
- f"cyclic license group references for {v2!r} in {self}")
+ f"cyclic license group references for {v2!r} in {self}"
+ )
continue
l.extend(groups[v2])
else:
@@ -551,18 +606,22 @@ class _immutable_attr_dict(mappings.ImmutableDict):
mappings.inject_getitem_as_getattr(locals())
-_KnownProfile = namedtuple('_KnownProfile', ['base', 'arch', 'path', 'status', 'deprecated'])
+_KnownProfile = namedtuple(
+ "_KnownProfile", ["base", "arch", "path", "status", "deprecated"]
+)
class Profiles(klass.ImmutableInstance):
- __slots__ = ('config', 'profiles_base', '_profiles')
+ __slots__ = ("config", "profiles_base", "_profiles")
__inst_caching__ = True
def __init__(self, repo_config, profiles_base=None):
- object.__setattr__(self, 'config', repo_config)
- profiles_base = profiles_base if profiles_base is not None else repo_config.profiles_base
- object.__setattr__(self, 'profiles_base', profiles_base)
+ object.__setattr__(self, "config", repo_config)
+ profiles_base = (
+ profiles_base if profiles_base is not None else repo_config.profiles_base
+ )
+ object.__setattr__(self, "profiles_base", profiles_base)
@klass.jit_attr_none
def profiles(self):
@@ -572,7 +631,7 @@ class Profiles(klass.ImmutableInstance):
def parse(profiles_base, repo_id, known_status=None, known_arch=None):
"""Return the mapping of arches to profiles for a repo."""
l = []
- fp = pjoin(profiles_base, 'profiles.desc')
+ fp = pjoin(profiles_base, "profiles.desc")
try:
for lineno, line in iter_read_bash(fp, enum_line=True):
try:
@@ -581,21 +640,25 @@ class Profiles(klass.ImmutableInstance):
logger.error(
f"{repo_id}::profiles/profiles.desc, "
f"line {lineno}: invalid profile line format: "
- "should be 'arch profile status'")
+ "should be 'arch profile status'"
+ )
continue
if known_status is not None and status not in known_status:
logger.warning(
f"{repo_id}::profiles/profiles.desc, "
- f"line {lineno}: unknown profile status: {status!r}")
+ f"line {lineno}: unknown profile status: {status!r}"
+ )
if known_arch is not None and arch not in known_arch:
logger.warning(
f"{repo_id}::profiles/profiles.desc, "
- f"line {lineno}: unknown arch: {arch!r}")
+ f"line {lineno}: unknown arch: {arch!r}"
+ )
# Normalize the profile name on the offchance someone slipped an extra /
# into it.
- path = '/'.join(filter(None, profile.split('/')))
+ path = "/".join(filter(None, profile.split("/")))
deprecated = os.path.exists(
- os.path.join(profiles_base, path, 'deprecated'))
+ os.path.join(profiles_base, path, "deprecated")
+ )
l.append(_KnownProfile(profiles_base, arch, path, status, deprecated))
except FileNotFoundError:
# no profiles exist
@@ -609,7 +672,7 @@ class Profiles(klass.ImmutableInstance):
yield from self.profiles
def __getitem__(self, path):
- if path[0] == '/':
+ if path[0] == "/":
path = path.lstrip(self.profiles_base).lstrip(os.sep)
for p in self.profiles:
if p.path == path:
@@ -617,7 +680,7 @@ class Profiles(klass.ImmutableInstance):
raise KeyError(path)
def __contains__(self, path):
- if path[0] == '/':
+ if path[0] == "/":
path = path.lstrip(self.profiles_base).lstrip(os.sep)
for p in self.profiles:
if p.path == path:
@@ -638,7 +701,7 @@ class Profiles(klass.ImmutableInstance):
def get_profiles(self, status):
"""Yield profiles matching a given status."""
for p in self.profiles:
- if status == p.status or (status == 'deprecated' and p.deprecated):
+ if status == p.status or (status == "deprecated" and p.deprecated):
yield p
def create_profile(self, node, **kwargs):
@@ -649,10 +712,10 @@ class Profiles(klass.ImmutableInstance):
class OverlayedProfiles(Profiles):
__inst_caching__ = True
- __slots__ = ('_profiles_instances', '_profiles_sources')
+ __slots__ = ("_profiles_instances", "_profiles_sources")
def __init__(self, *profiles_sources):
- object.__setattr__(self, '_profiles_sources', profiles_sources)
+ object.__setattr__(self, "_profiles_sources", profiles_sources)
self._load_profiles_instances()
@klass.jit_attr_none
@@ -670,9 +733,9 @@ class OverlayedProfiles(Profiles):
for x in self._profiles_sources:
if isinstance(x, Profiles):
l.append(x)
- elif hasattr(x, 'profiles'):
+ elif hasattr(x, "profiles"):
l.append(x.profiles)
- object.__setattr__(self, '_profiles_instances', tuple(l))
+ object.__setattr__(self, "_profiles_instances", tuple(l))
class RepoConfig(syncable.tree, klass.ImmutableInstance, metaclass=WeakInstMeta):
@@ -680,26 +743,35 @@ class RepoConfig(syncable.tree, klass.ImmutableInstance, metaclass=WeakInstMeta)
layout_offset = "metadata/layout.conf"
- default_hashes = ('size', 'blake2b', 'sha512')
- default_required_hashes = ('size', 'blake2b')
- supported_profile_formats = ('pms', 'portage-1', 'portage-2', 'profile-bashrcs', 'profile-set')
- supported_cache_formats = ('md5-dict', 'pms')
+ default_hashes = ("size", "blake2b", "sha512")
+ default_required_hashes = ("size", "blake2b")
+ supported_profile_formats = (
+ "pms",
+ "portage-1",
+ "portage-2",
+ "profile-bashrcs",
+ "profile-set",
+ )
+ supported_cache_formats = ("md5-dict", "pms")
__inst_caching__ = True
pkgcore_config_type = ConfigHint(
- typename='repo_config',
+ typename="repo_config",
types={
- 'config_name': 'str',
- 'syncer': 'lazy_ref:syncer',
- })
+ "config_name": "str",
+ "syncer": "lazy_ref:syncer",
+ },
+ )
- def __init__(self, location, config_name=None, syncer=None, profiles_base='profiles'):
+ def __init__(
+ self, location, config_name=None, syncer=None, profiles_base="profiles"
+ ):
super().__init__(syncer)
- object.__setattr__(self, 'config_name', config_name)
- object.__setattr__(self, 'external', (config_name is None))
- object.__setattr__(self, 'location', location)
- object.__setattr__(self, 'profiles_base', pjoin(self.location, profiles_base))
+ object.__setattr__(self, "config_name", config_name)
+ object.__setattr__(self, "external", (config_name is None))
+ object.__setattr__(self, "location", location)
+ object.__setattr__(self, "profiles_base", pjoin(self.location, profiles_base))
try:
self._parse_config()
@@ -713,96 +785,138 @@ class RepoConfig(syncable.tree, klass.ImmutableInstance, metaclass=WeakInstMeta)
"""Load data from the repo's metadata/layout.conf file."""
path = pjoin(self.location, self.layout_offset)
data = read_dict(
- iter_read_bash(readlines(path, strip_whitespace=True, swallow_missing=True)),
- source_isiter=True, strip=True, filename=path, ignore_errors=True)
+ iter_read_bash(
+ readlines(path, strip_whitespace=True, swallow_missing=True)
+ ),
+ source_isiter=True,
+ strip=True,
+ filename=path,
+ ignore_errors=True,
+ )
sf = object.__setattr__
- sf(self, 'repo_name', data.get('repo-name', None))
+ sf(self, "repo_name", data.get("repo-name", None))
- hashes = data.get('manifest-hashes', '').lower().split()
+ hashes = data.get("manifest-hashes", "").lower().split()
if hashes:
- hashes = ['size'] + hashes
+ hashes = ["size"] + hashes
hashes = tuple(iter_stable_unique(hashes))
else:
hashes = self.default_hashes
- required_hashes = data.get('manifest-required-hashes', '').lower().split()
+ required_hashes = data.get("manifest-required-hashes", "").lower().split()
if required_hashes:
- required_hashes = ['size'] + required_hashes
+ required_hashes = ["size"] + required_hashes
required_hashes = tuple(iter_stable_unique(required_hashes))
else:
required_hashes = self.default_required_hashes
- manifest_policy = data.get('use-manifests', 'strict').lower()
+ manifest_policy = data.get("use-manifests", "strict").lower()
d = {
- 'disabled': (manifest_policy == 'false'),
- 'strict': (manifest_policy == 'strict'),
- 'thin': (data.get('thin-manifests', '').lower() == 'true'),
- 'signed': (data.get('sign-manifests', 'true').lower() == 'true'),
- 'hashes': hashes,
- 'required_hashes': required_hashes,
+ "disabled": (manifest_policy == "false"),
+ "strict": (manifest_policy == "strict"),
+ "thin": (data.get("thin-manifests", "").lower() == "true"),
+ "signed": (data.get("sign-manifests", "true").lower() == "true"),
+ "hashes": hashes,
+ "required_hashes": required_hashes,
}
- sf(self, 'manifests', _immutable_attr_dict(d))
- masters = data.get('masters')
+ sf(self, "manifests", _immutable_attr_dict(d))
+ masters = data.get("masters")
_missing_masters = False
if masters is None:
if not self.is_empty:
logger.warning(
f"{self.repo_id} repo at {self.location!r}, doesn't "
"specify masters in metadata/layout.conf. Please explicitly "
- "set masters (use \"masters =\" if the repo is standalone).")
+ 'set masters (use "masters =" if the repo is standalone).'
+ )
_missing_masters = True
masters = ()
else:
masters = tuple(iter_stable_unique(masters.split()))
- sf(self, '_missing_masters', _missing_masters)
- sf(self, 'masters', masters)
- aliases = data.get('aliases', '').split() + [
- self.config_name, self.repo_name, self.pms_repo_name, self.location]
- sf(self, 'aliases', tuple(filter(None, iter_stable_unique(aliases))))
- sf(self, 'eapis_deprecated', tuple(iter_stable_unique(data.get('eapis-deprecated', '').split())))
- sf(self, 'eapis_banned', tuple(iter_stable_unique(data.get('eapis-banned', '').split())))
- sf(self, 'eapis_testing', tuple(iter_stable_unique(data.get('eapis-testing', '').split())))
- sf(self, 'profile_eapis_deprecated', tuple(iter_stable_unique(data.get('profile-eapis-deprecated', '').split())))
- sf(self, 'profile_eapis_banned', tuple(iter_stable_unique(data.get('profile-eapis-banned', '').split())))
- sf(self, 'properties_allowed', tuple(iter_stable_unique(data.get('properties-allowed', '').split())))
- sf(self, 'restrict_allowed', tuple(iter_stable_unique(data.get('restrict-allowed', '').split())))
- sf(self, 'sign_commits', data.get('sign-commits', 'false').lower() == 'true')
-
- v = set(data.get('cache-formats', 'md5-dict').lower().split())
+ sf(self, "_missing_masters", _missing_masters)
+ sf(self, "masters", masters)
+ aliases = data.get("aliases", "").split() + [
+ self.config_name,
+ self.repo_name,
+ self.pms_repo_name,
+ self.location,
+ ]
+ sf(self, "aliases", tuple(filter(None, iter_stable_unique(aliases))))
+ sf(
+ self,
+ "eapis_deprecated",
+ tuple(iter_stable_unique(data.get("eapis-deprecated", "").split())),
+ )
+ sf(
+ self,
+ "eapis_banned",
+ tuple(iter_stable_unique(data.get("eapis-banned", "").split())),
+ )
+ sf(
+ self,
+ "eapis_testing",
+ tuple(iter_stable_unique(data.get("eapis-testing", "").split())),
+ )
+ sf(
+ self,
+ "profile_eapis_deprecated",
+ tuple(iter_stable_unique(data.get("profile-eapis-deprecated", "").split())),
+ )
+ sf(
+ self,
+ "profile_eapis_banned",
+ tuple(iter_stable_unique(data.get("profile-eapis-banned", "").split())),
+ )
+ sf(
+ self,
+ "properties_allowed",
+ tuple(iter_stable_unique(data.get("properties-allowed", "").split())),
+ )
+ sf(
+ self,
+ "restrict_allowed",
+ tuple(iter_stable_unique(data.get("restrict-allowed", "").split())),
+ )
+ sf(self, "sign_commits", data.get("sign-commits", "false").lower() == "true")
+
+ v = set(data.get("cache-formats", "md5-dict").lower().split())
if not v:
v = [None]
else:
# sort into favored order
v = [f for f in self.supported_cache_formats if f in v]
if not v:
- logger.warning('unknown cache format: falling back to md5-dict format')
- v = ['md5-dict']
- sf(self, 'cache_format', list(v)[0])
+ logger.warning("unknown cache format: falling back to md5-dict format")
+ v = ["md5-dict"]
+ sf(self, "cache_format", list(v)[0])
- profile_formats = set(data.get('profile-formats', 'pms').lower().split())
+ profile_formats = set(data.get("profile-formats", "pms").lower().split())
if not profile_formats:
logger.info(
f"{self.repo_id!r} repo at {self.location!r} has explicitly "
- "unset profile-formats, defaulting to pms")
- profile_formats = {'pms'}
+ "unset profile-formats, defaulting to pms"
+ )
+ profile_formats = {"pms"}
unknown = profile_formats.difference(self.supported_profile_formats)
if unknown:
logger.info(
"%r repo at %r has unsupported profile format%s: %s",
- self.repo_id, self.location, pluralism(unknown),
- ', '.join(sorted(unknown)))
+ self.repo_id,
+ self.location,
+ pluralism(unknown),
+ ", ".join(sorted(unknown)),
+ )
profile_formats.difference_update(unknown)
- profile_formats.add('pms')
- sf(self, 'profile_formats', profile_formats)
+ profile_formats.add("pms")
+ sf(self, "profile_formats", profile_formats)
@klass.jit_attr
def known_arches(self):
"""All valid KEYWORDS for the repo."""
try:
- return frozenset(iter_read_bash(
- pjoin(self.profiles_base, 'arch.list')))
+ return frozenset(iter_read_bash(pjoin(self.profiles_base, "arch.list")))
except FileNotFoundError:
return frozenset()
@@ -812,8 +926,8 @@ class RepoConfig(syncable.tree, klass.ImmutableInstance, metaclass=WeakInstMeta)
See https://www.gentoo.org/glep/glep-0072.html for more details.
"""
- fp = pjoin(self.profiles_base, 'arches.desc')
- d = {'stable': set(), 'transitional': set(), 'testing': set()}
+ fp = pjoin(self.profiles_base, "arches.desc")
+ d = {"stable": set(), "transitional": set(), "testing": set()}
try:
for lineno, line in iter_read_bash(fp, enum_line=True):
try:
@@ -822,17 +936,20 @@ class RepoConfig(syncable.tree, klass.ImmutableInstance, metaclass=WeakInstMeta)
logger.error(
f"{self.repo_id}::profiles/arches.desc, "
f"line {lineno}: invalid line format: "
- "should be '<arch> <status>'")
+ "should be '<arch> <status>'"
+ )
continue
if arch not in self.known_arches:
logger.warning(
f"{self.repo_id}::profiles/arches.desc, "
- f"line {lineno}: unknown arch: {arch!r}")
+ f"line {lineno}: unknown arch: {arch!r}"
+ )
continue
if status not in d:
logger.warning(
f"{self.repo_id}::profiles/arches.desc, "
- f"line {lineno}: unknown status: {status!r}")
+ f"line {lineno}: unknown status: {status!r}"
+ )
continue
d[status].add(arch)
except FileNotFoundError:
@@ -846,23 +963,25 @@ class RepoConfig(syncable.tree, klass.ImmutableInstance, metaclass=WeakInstMeta)
# conversion of ValueErrors...
def converter(key):
return (packages.AlwaysTrue, key)
- return tuple(self._split_use_desc_file('use.desc', converter))
+
+ return tuple(self._split_use_desc_file("use.desc", converter))
@klass.jit_attr
def use_local_desc(self):
"""Local USE flags for the repo."""
+
def converter(key):
# todo: convert this to using a common exception base, with
# conversion of ValueErrors/atom exceptions...
- chunks = key.split(':', 1)
+ chunks = key.split(":", 1)
return (atom.atom(chunks[0]), chunks[1])
- return tuple(self._split_use_desc_file('use.local.desc', converter))
+ return tuple(self._split_use_desc_file("use.local.desc", converter))
@klass.jit_attr
def use_expand_desc(self):
"""USE_EXPAND settings for the repo."""
- base = pjoin(self.profiles_base, 'desc')
+ base = pjoin(self.profiles_base, "desc")
d = {}
try:
targets = listdir_files(base)
@@ -870,17 +989,19 @@ class RepoConfig(syncable.tree, klass.ImmutableInstance, metaclass=WeakInstMeta)
targets = []
for use_group in targets:
- group = use_group.split('.', 1)[0]
+ group = use_group.split(".", 1)[0]
d[group] = tuple(
self._split_use_desc_file(
- f'desc/{use_group}', lambda k: f'{group}_{k}', matcher=False))
+ f"desc/{use_group}", lambda k: f"{group}_{k}", matcher=False
+ )
+ )
return mappings.ImmutableDict(d)
@klass.jit_attr
def use_expand_sort(self):
"""Mapping of USE_EXPAND sorting keys for the repo."""
- base = pjoin(self.profiles_base, 'desc')
+ base = pjoin(self.profiles_base, "desc")
d = {}
try:
targets = listdir_files(base)
@@ -888,9 +1009,13 @@ class RepoConfig(syncable.tree, klass.ImmutableInstance, metaclass=WeakInstMeta)
targets = []
for use_group in targets:
- group = use_group.split('.', 1)[0]
- use_expand = (x[0] for x in self._split_use_desc_file(
- f'desc/{use_group}', lambda k: k, matcher=False))
+ group = use_group.split(".", 1)[0]
+ use_expand = (
+ x[0]
+ for x in self._split_use_desc_file(
+ f"desc/{use_group}", lambda k: k, matcher=False
+ )
+ )
d[group] = {use: i for i, use in enumerate(use_expand)}
return mappings.ImmutableDict(d)
@@ -904,15 +1029,15 @@ class RepoConfig(syncable.tree, klass.ImmutableInstance, metaclass=WeakInstMeta)
key, val = line.split(None, 1)
key = converter(key)
if matcher:
- yield key[0], (key[1], val.split('-', 1)[1].strip())
+ yield key[0], (key[1], val.split("-", 1)[1].strip())
else:
- yield key, val.split('-', 1)[1].strip()
+ yield key, val.split("-", 1)[1].strip()
except ValueError as e:
- logger.error(f'failed parsing {fp!r}, line {line!r}: {e}')
+ logger.error(f"failed parsing {fp!r}, line {line!r}: {e}")
except FileNotFoundError:
pass
except ValueError as e:
- logger.error(f'failed parsing {fp!r}: {e}')
+ logger.error(f"failed parsing {fp!r}: {e}")
@klass.jit_attr
def is_empty(self):
@@ -935,9 +1060,9 @@ class RepoConfig(syncable.tree, klass.ImmutableInstance, metaclass=WeakInstMeta)
We're more lenient than the spec and don't verify it conforms to the
specified format.
"""
- name = readfile(pjoin(self.profiles_base, 'repo_name'), none_on_missing=True)
+ name = readfile(pjoin(self.profiles_base, "repo_name"), none_on_missing=True)
if name is not None:
- name = name.split('\n', 1)[0].strip()
+ name = name.split("\n", 1)[0].strip()
return name
@klass.jit_attr
@@ -951,7 +1076,7 @@ class RepoConfig(syncable.tree, klass.ImmutableInstance, metaclass=WeakInstMeta)
if self.config_name:
return self.config_name
# repo_name might not be parsed yet if failure occurs during init
- if repo_name := getattr(self, 'repo_name', None):
+ if repo_name := getattr(self, "repo_name", None):
return repo_name
if self.pms_repo_name:
return self.pms_repo_name
@@ -962,13 +1087,15 @@ class RepoConfig(syncable.tree, klass.ImmutableInstance, metaclass=WeakInstMeta)
@klass.jit_attr
def updates(self):
"""Package updates for the repo defined in profiles/updates/*."""
- updates_dir = pjoin(self.profiles_base, 'updates')
+ updates_dir = pjoin(self.profiles_base, "updates")
d = pkg_updates.read_updates(updates_dir, eapi=self.eapi)
return mappings.ImmutableDict(d)
@klass.jit_attr
def categories(self):
- categories = readlines(pjoin(self.profiles_base, 'categories'), True, True, True)
+ categories = readlines(
+ pjoin(self.profiles_base, "categories"), True, True, True
+ )
if categories is not None:
return tuple(map(intern, categories))
return ()
@@ -979,7 +1106,7 @@ class RepoConfig(syncable.tree, klass.ImmutableInstance, metaclass=WeakInstMeta)
@klass.jit_attr
def base_profile(self):
- pms_strict = 'pms' in self.profile_formats
+ pms_strict = "pms" in self.profile_formats
return profiles.EmptyRootNode(self.profiles_base, pms_strict=pms_strict)
@klass.jit_attr
@@ -987,7 +1114,7 @@ class RepoConfig(syncable.tree, klass.ImmutableInstance, metaclass=WeakInstMeta)
try:
return self.base_profile.eapi
except profiles.NonexistentProfile:
- return get_eapi('0')
+ return get_eapi("0")
@klass.jit_attr
def pkg_masks(self):
@@ -1009,16 +1136,17 @@ class SquashfsRepoConfig(RepoConfig):
def __init__(self, sqfs_file, location, *args, **kwargs):
sqfs_path = pjoin(location, sqfs_file)
- object.__setattr__(self, '_sqfs', sqfs_path)
- object.__setattr__(self, 'location', location)
+ object.__setattr__(self, "_sqfs", sqfs_path)
+ object.__setattr__(self, "location", location)
# if squashfs archive exists in the repo, try to mount it over itself
if os.path.exists(self._sqfs):
try:
self._mount_archive()
except PermissionError:
- if platform.uname().release < '4.18':
+ if platform.uname().release < "4.18":
raise repo_errors.InitializationError(
- 'fuse mounts in user namespaces require linux >= 4.18')
+ "fuse mounts in user namespaces require linux >= 4.18"
+ )
raise
super().__init__(location, *args, **kwargs)
@@ -1036,7 +1164,7 @@ class SquashfsRepoConfig(RepoConfig):
def _failed_cmd(self, process, action):
if process.returncode:
stderr = process.stderr.decode().strip().lower()
- msg = f'failed {action} squashfs archive: {stderr}'
+ msg = f"failed {action} squashfs archive: {stderr}"
if process.returncode == 1:
raise PermissionDenied(self._sqfs, msg)
else:
@@ -1045,35 +1173,38 @@ class SquashfsRepoConfig(RepoConfig):
def _mount_archive(self):
"""Mount the squashfs archive onto the repo in a mount namespace."""
# enable a user namespace if not running as root
- unshare_kwds = {'mount': True, 'user': not os.getuid() == 0}
+ unshare_kwds = {"mount": True, "user": not os.getuid() == 0}
try:
simple_unshare(**unshare_kwds)
except OSError as e:
raise repo_errors.InitializationError(
- f'namespace support unavailable: {e.strerror}')
+ f"namespace support unavailable: {e.strerror}"
+ )
# First try using mount binary to automatically handle setting up loop
# device -- this only works with real root perms since loopback device
# mounting (losetup) doesn't work in user namespaces.
- p = subprocess.run(['mount', self._sqfs, self.location], capture_output=True)
+ p = subprocess.run(["mount", self._sqfs, self.location], capture_output=True)
if p.returncode == 0:
return
elif p.returncode not in (1, 32):
# fail out if not a permissions issue (regular or loopback failure inside userns)
- self._failed_cmd(p, 'mounting')
+ self._failed_cmd(p, "mounting")
# fallback to using squashfuse
try:
p = subprocess.run(
- ['squashfuse', '-o', 'nonempty', self._sqfs, self.location],
- capture_output=True)
+ ["squashfuse", "-o", "nonempty", self._sqfs, self.location],
+ capture_output=True,
+ )
except FileNotFoundError as e:
raise repo_errors.InitializationError(
- f'failed mounting squashfs archive: {e.filename} required')
+ f"failed mounting squashfs archive: {e.filename} required"
+ )
if p.returncode:
- self._failed_cmd(p, 'mounting')
+ self._failed_cmd(p, "mounting")
def _umount_archive(self):
"""Unmount the squashfs archive."""
@@ -1082,19 +1213,22 @@ class SquashfsRepoConfig(RepoConfig):
return
except FileNotFoundError as e:
raise repo_errors.InitializationError(
- f'failed unmounting squashfs archive: {e.filename} required')
+ f"failed unmounting squashfs archive: {e.filename} required"
+ )
except OSError as e:
# fail out if not a permissions issue (regular or loopback failure inside userns)
if e.errno not in (errno.EPERM, errno.EPIPE):
raise repo_errors.InitializationError(
- f'failed unmounting squashfs archive: {e.strerror}')
+ f"failed unmounting squashfs archive: {e.strerror}"
+ )
# fallback to using fusermount
try:
- p = subprocess.run(['fusermount', '-u', self.location], capture_output=True)
+ p = subprocess.run(["fusermount", "-u", self.location], capture_output=True)
except FileNotFoundError as e:
raise repo_errors.InitializationError(
- f'failed unmounting squashfs archive: {e.filename} required')
+ f"failed unmounting squashfs archive: {e.filename} required"
+ )
if p.returncode:
- self._failed_cmd(p, 'unmounting')
+ self._failed_cmd(p, "unmounting")
diff --git a/src/pkgcore/ebuild/repository.py b/src/pkgcore/ebuild/repository.py
index 9af41e997..2e27ada7a 100644
--- a/src/pkgcore/ebuild/repository.py
+++ b/src/pkgcore/ebuild/repository.py
@@ -38,12 +38,12 @@ from .eapi import get_eapi
class repo_operations(_repo_ops.operations):
-
- def _cmd_implementation_manifest(self, domain, restriction, observer,
- mirrors=False, force=False, distdir=None):
+ def _cmd_implementation_manifest(
+ self, domain, restriction, observer, mirrors=False, force=False, distdir=None
+ ):
manifest_config = self.repo.config.manifests
if manifest_config.disabled:
- observer.info(f'{self.repo.repo_id} repo has manifests disabled')
+ observer.info(f"{self.repo.repo_id} repo has manifests disabled")
return
required_chksums = set(manifest_config.required_hashes)
write_chksums = manifest_config.hashes
@@ -69,17 +69,22 @@ class repo_operations(_repo_ops.operations):
all_pkgdir_fetchables = {
pkg: {
- fetchable.filename: fetchable for fetchable in
- iflatten_instance(pkg.generate_fetchables(
- allow_missing_checksums=True,
- skip_default_mirrors=(not mirrors)),
- fetch.fetchable)
- } for pkg in self.repo.itermatch(pkgs[0].unversioned_atom)
+ fetchable.filename: fetchable
+ for fetchable in iflatten_instance(
+ pkg.generate_fetchables(
+ allow_missing_checksums=True,
+ skip_default_mirrors=(not mirrors),
+ ),
+ fetch.fetchable,
+ )
+ }
+ for pkg in self.repo.itermatch(pkgs[0].unversioned_atom)
}
# all pkgdir fetchables
- pkgdir_fetchables = dict(chain.from_iterable(
- all_pkgdir_fetchables[pkg].items() for pkg in pkgs))
+ pkgdir_fetchables = dict(
+ chain.from_iterable(all_pkgdir_fetchables[pkg].items() for pkg in pkgs)
+ )
# fetchables targeted for (re-)manifest generation
fetchables = {}
@@ -87,7 +92,8 @@ class repo_operations(_repo_ops.operations):
for filename, fetchable in pkgdir_fetchables.items():
if force or not required_chksums.issubset(fetchable.chksums):
fetchable.chksums = {
- k: v for k, v in fetchable.chksums.items() if k in chksum_set}
+ k: v for k, v in fetchable.chksums.items() if k in chksum_set
+ }
fetchables[filename] = fetchable
# Manifest files aren't necessary with thin manifests and no distfiles
@@ -97,8 +103,9 @@ class repo_operations(_repo_ops.operations):
os.remove(manifest.path)
except EnvironmentError as exc:
observer.error(
- 'failed removing old manifest: '
- f'{key}::{self.repo.repo_id}: {exc}')
+ "failed removing old manifest: "
+ f"{key}::{self.repo.repo_id}: {exc}"
+ )
ret.add(key)
continue
@@ -109,7 +116,9 @@ class repo_operations(_repo_ops.operations):
# fetch distfiles
pkg_ops = domain.pkg_operations(pkgs[0], observer=observer)
try:
- if not pkg_ops.fetch(list(fetchables.values()), observer, distdir=distdir):
+ if not pkg_ops.fetch(
+ list(fetchables.values()), observer, distdir=distdir
+ ):
ret.add(key)
continue
except OperationError:
@@ -117,12 +126,14 @@ class repo_operations(_repo_ops.operations):
try:
os.makedirs(distdir, exist_ok=True)
except OSError as exc:
- observer.error(f'failed to create distdir {distdir!r}: {exc.strerror}')
- return ('failed to create distdir', )
+ observer.error(
+ f"failed to create distdir {distdir!r}: {exc.strerror}"
+ )
+ return ("failed to create distdir",)
if not os.access(distdir, os.W_OK):
- observer.error(f'no write access to distdir: {distdir!r}')
- return ('no write access to distdir', )
+ observer.error(f"no write access to distdir: {distdir!r}")
+ return ("no write access to distdir",)
raise
@@ -130,18 +141,21 @@ class repo_operations(_repo_ops.operations):
try:
for fetchable in fetchables.values():
chksums = chksum.get_chksums(
- pjoin(distdir, fetchable.filename), *write_chksums)
+ pjoin(distdir, fetchable.filename), *write_chksums
+ )
fetchable.chksums = dict(zip(write_chksums, chksums))
except chksum.MissingChksumHandler as exc:
- observer.error(f'failed generating chksum: {exc}')
+ observer.error(f"failed generating chksum: {exc}")
ret.add(key)
break
if key not in ret:
- all_fetchables = {filename: fetchable
+ all_fetchables = {
+ filename: fetchable
for fetchables in all_pkgdir_fetchables.values()
for filename, fetchable in fetchables.items()
- if required_chksums.issubset(fetchable.chksums)}
+ if required_chksums.issubset(fetchable.chksums)
+ }
all_fetchables.update(fetchables)
observer.info(f"generating manifest: {key}::{self.repo.repo_id}")
manifest.update(sorted(all_fetchables.values()), chfs=write_chksums)
@@ -154,7 +168,7 @@ def _sort_eclasses(config, repo_config):
masters = repo_config.masters
eclasses = []
- default = config.get_default('repo_config')
+ default = config.get_default("repo_config")
if repo_config._missing_masters and default is not None:
# use default repo's eclasses for overlays with missing masters
location = default.location
@@ -165,8 +179,10 @@ def _sort_eclasses(config, repo_config):
eclasses = [location]
else:
repo_map = {
- alias: r.location for r in config.objects['repo_config'].values()
- for alias in r.aliases}
+ alias: r.location
+ for r in config.objects["repo_config"].values()
+ for alias in r.aliases
+ }
eclasses = [repo_map[x] for x in masters]
# add the repo's eclass directories if it's not specified.
@@ -176,15 +192,17 @@ def _sort_eclasses(config, repo_config):
if repo_path not in eclasses:
eclasses.append(repo_path)
- eclasses = [eclass_cache_mod.cache(pjoin(x, 'eclass'), location=location)
- for x in eclasses]
+ eclasses = [
+ eclass_cache_mod.cache(pjoin(x, "eclass"), location=location) for x in eclasses
+ ]
if len(eclasses) == 1:
eclasses = eclasses[0]
else:
eclasses = list(reversed(eclasses))
eclasses = eclass_cache_mod.StackedCaches(
- eclasses, location=location, eclassdir=location)
+ eclasses, location=location, eclassdir=location
+ )
return eclasses
@@ -192,13 +210,12 @@ class ProvidesRepo(util.SimpleTree):
"""Fake, installed repo populated with entries from package.provided."""
class PkgProvidedParent:
-
def __init__(self, **kwds):
self.__dict__.update(kwds)
class PkgProvided(ebuild_src.base):
- __slots__ = ('arches', 'use')
+ __slots__ = ("arches", "use")
package_is_real = False
__inst_caching__ = True
@@ -212,16 +229,22 @@ class ProvidesRepo(util.SimpleTree):
object.__setattr__(self, "use", [])
object.__setattr__(self, "arches", arches)
object.__setattr__(self, "data", {"SLOT": "0"})
- object.__setattr__(self, "eapi", get_eapi('0'))
+ object.__setattr__(self, "eapi", get_eapi("0"))
- def __init__(self, pkgs, arches, repo_id='package.provided'):
+ def __init__(self, pkgs, arches, repo_id="package.provided"):
d = {}
for pkg in pkgs:
- d.setdefault(pkg.category, {}).setdefault(pkg.package, []).append(pkg.fullver)
+ d.setdefault(pkg.category, {}).setdefault(pkg.package, []).append(
+ pkg.fullver
+ )
intermediate_parent = self.PkgProvidedParent()
super().__init__(
- d, pkg_klass=partial(self.PkgProvided, intermediate_parent, arches=arches),
- livefs=True, frozen=True, repo_id=repo_id)
+ d,
+ pkg_klass=partial(self.PkgProvided, intermediate_parent, arches=arches),
+ livefs=True,
+ frozen=True,
+ repo_id=repo_id,
+ )
intermediate_parent._parent_repo = self
if not d:
@@ -248,24 +271,34 @@ class UnconfiguredTree(prototype.tree):
configurables = ("domain", "settings")
package_factory = staticmethod(ebuild_src.generate_new_factory)
enable_gpg = False
- extension = '.ebuild'
+ extension = ".ebuild"
operations_kls = repo_operations
- pkgcore_config_type = ConfigHint({
- 'location': 'str',
- 'eclass_cache': 'ref:eclass_cache',
- 'masters': 'refs:repo',
- 'cache': 'refs:cache',
- 'default_mirrors': 'list',
- 'allow_missing_manifests': 'bool',
- 'repo_config': 'ref:repo_config',
+ pkgcore_config_type = ConfigHint(
+ {
+ "location": "str",
+ "eclass_cache": "ref:eclass_cache",
+ "masters": "refs:repo",
+ "cache": "refs:cache",
+ "default_mirrors": "list",
+ "allow_missing_manifests": "bool",
+ "repo_config": "ref:repo_config",
},
- typename='repo')
-
- def __init__(self, location, eclass_cache=None, masters=(), cache=(),
- default_mirrors=None, allow_missing_manifests=False, package_cache=True,
- repo_config=None):
+ typename="repo",
+ )
+
+ def __init__(
+ self,
+ location,
+ eclass_cache=None,
+ masters=(),
+ cache=(),
+ default_mirrors=None,
+ allow_missing_manifests=False,
+ package_cache=True,
+ repo_config=None,
+ ):
"""
:param location: on disk location of the tree
:param cache: sequence of :obj:`pkgcore.cache.template.database` instances
@@ -288,7 +321,9 @@ class UnconfiguredTree(prototype.tree):
# profiles dir is required by PMS
if not os.path.isdir(self.config.profiles_base):
- raise errors.InvalidRepo(f'missing required profiles dir: {self.location!r}')
+ raise errors.InvalidRepo(
+ f"missing required profiles dir: {self.location!r}"
+ )
# verify we support the repo's EAPI
if not self.is_supported:
@@ -296,7 +331,8 @@ class UnconfiguredTree(prototype.tree):
if eclass_cache is None:
eclass_cache = eclass_cache_mod.cache(
- pjoin(self.location, 'eclass'), location=self.location)
+ pjoin(self.location, "eclass"), location=self.location
+ )
self.eclass_cache = eclass_cache
self.masters = tuple(masters)
@@ -323,19 +359,21 @@ class UnconfiguredTree(prototype.tree):
self.cache = cache
self._allow_missing_chksums = allow_missing_manifests
self.package_class = self.package_factory(
- self, cache, self.eclass_cache, self.mirrors, self.default_mirrors)
+ self, cache, self.eclass_cache, self.mirrors, self.default_mirrors
+ )
self._shared_pkg_cache = WeakValueDictionary()
- self._bad_masked = RestrictionRepo(repo_id='bad_masked')
+ self._bad_masked = RestrictionRepo(repo_id="bad_masked")
self.projects_xml = repo_objs.LocalProjectsXml(
- pjoin(self.location, 'metadata', 'projects.xml'))
+ pjoin(self.location, "metadata", "projects.xml")
+ )
- repo_id = klass.alias_attr('config.repo_id')
- repo_name = klass.alias_attr('config.repo_name')
- aliases = klass.alias_attr('config.aliases')
- eapi = klass.alias_attr('config.eapi')
- is_supported = klass.alias_attr('config.eapi.is_supported')
- external = klass.alias_attr('config.external')
- pkg_masks = klass.alias_attr('config.pkg_masks')
+ repo_id = klass.alias_attr("config.repo_id")
+ repo_name = klass.alias_attr("config.repo_name")
+ aliases = klass.alias_attr("config.aliases")
+ eapi = klass.alias_attr("config.eapi")
+ is_supported = klass.alias_attr("config.eapi.is_supported")
+ external = klass.alias_attr("config.external")
+ pkg_masks = klass.alias_attr("config.pkg_masks")
def configure(self, *args):
return ConfiguredTree(self, *args)
@@ -343,8 +381,7 @@ class UnconfiguredTree(prototype.tree):
@klass.jit_attr
def known_arches(self):
"""Return all known arches for a repo (including masters)."""
- return frozenset(chain.from_iterable(
- r.config.known_arches for r in self.trees))
+ return frozenset(chain.from_iterable(r.config.known_arches for r in self.trees))
def path_restrict(self, path):
"""Return a restriction from a given path in a repo.
@@ -362,16 +399,17 @@ class UnconfiguredTree(prototype.tree):
path_chunks = path.split(os.path.sep)
else:
path = os.path.realpath(os.path.abspath(path))
- relpath = path[len(os.path.realpath(self.location)):].strip('/')
+ relpath = path[len(os.path.realpath(self.location)) :].strip("/")
path_chunks = relpath.split(os.path.sep)
if os.path.isfile(path):
- if not path.endswith('.ebuild'):
+ if not path.endswith(".ebuild"):
raise ValueError(f"file is not an ebuild: {path!r}")
elif len(path_chunks) != 3:
# ebuild isn't in a category/PN directory
raise ValueError(
- f"ebuild not in the correct directory layout: {path!r}")
+ f"ebuild not in the correct directory layout: {path!r}"
+ )
restrictions = []
@@ -381,8 +419,12 @@ class UnconfiguredTree(prototype.tree):
if path_chunks[0] in self.categories:
restrictions.append(restricts.CategoryDep(path_chunks[0]))
restrictions.append(restricts.PackageDep(path_chunks[1]))
- base = cpv.VersionedCPV(f"{path_chunks[0]}/{os.path.splitext(path_chunks[2])[0]}")
- restrictions.append(restricts.VersionMatch('=', base.version, rev=base.revision))
+ base = cpv.VersionedCPV(
+ f"{path_chunks[0]}/{os.path.splitext(path_chunks[2])[0]}"
+ )
+ restrictions.append(
+ restricts.VersionMatch("=", base.version, rev=base.revision)
+ )
except IndexError:
pass
return packages.AndRestriction(*restrictions)
@@ -407,7 +449,7 @@ class UnconfiguredTree(prototype.tree):
@klass.jit_attr
def thirdpartymirrors(self):
mirrors = {}
- fp = pjoin(self.location, 'profiles', 'thirdpartymirrors')
+ fp = pjoin(self.location, "profiles", "thirdpartymirrors")
try:
for k, v in read_dict(fp, splitter=None).items():
v = v.split()
@@ -446,9 +488,15 @@ class UnconfiguredTree(prototype.tree):
@klass.jit_attr
def category_dirs(self):
try:
- return frozenset(map(intern, filterfalse(
- self.false_categories.__contains__,
- (x for x in listdir_dirs(self.base) if not x.startswith('.')))))
+ return frozenset(
+ map(
+ intern,
+ filterfalse(
+ self.false_categories.__contains__,
+ (x for x in listdir_dirs(self.base) if not x.startswith(".")),
+ ),
+ )
+ )
except EnvironmentError as e:
logger.error(f"failed listing categories: {e}")
return ()
@@ -459,7 +507,9 @@ class UnconfiguredTree(prototype.tree):
if optional_category:
# raise KeyError
return ()
- categories = frozenset(chain.from_iterable(repo.config.categories for repo in self.trees))
+ categories = frozenset(
+ chain.from_iterable(repo.config.categories for repo in self.trees)
+ )
if categories:
return categories
return self.category_dirs
@@ -475,7 +525,8 @@ class UnconfiguredTree(prototype.tree):
except EnvironmentError as e:
category = pjoin(self.base, category.lstrip(os.path.sep))
raise KeyError(
- f'failed fetching packages for category {category}: {e}') from e
+ f"failed fetching packages for category {category}: {e}"
+ ) from e
def _get_versions(self, catpkg):
"""Determine available versions for a given package.
@@ -483,18 +534,21 @@ class UnconfiguredTree(prototype.tree):
Ebuilds with mismatched or invalid package names are ignored.
"""
cppath = pjoin(self.base, catpkg[0], catpkg[1])
- pkg = f'{catpkg[-1]}-'
+ pkg = f"{catpkg[-1]}-"
lp = len(pkg)
extension = self.extension
ext_len = -len(extension)
try:
return tuple(
- x[lp:ext_len] for x in listdir_files(cppath)
- if x[ext_len:] == extension and x[:lp] == pkg)
+ x[lp:ext_len]
+ for x in listdir_files(cppath)
+ if x[ext_len:] == extension and x[:lp] == pkg
+ )
except EnvironmentError as e:
raise KeyError(
- "failed fetching versions for package %s: %s" %
- (pjoin(self.base, '/'.join(catpkg)), str(e))) from e
+ "failed fetching versions for package %s: %s"
+ % (pjoin(self.base, "/".join(catpkg)), str(e))
+ ) from e
def _pkg_filter(self, raw, error_callback, pkgs):
"""Filter packages with bad metadata."""
@@ -509,14 +563,18 @@ class UnconfiguredTree(prototype.tree):
if raw:
yield pkg
- elif self._bad_masked.has_match(pkg.versioned_atom) and error_callback is not None:
+ elif (
+ self._bad_masked.has_match(pkg.versioned_atom)
+ and error_callback is not None
+ ):
error_callback(self._bad_masked[pkg.versioned_atom])
else:
# check pkgs for unsupported/invalid EAPIs and bad metadata
try:
if not pkg.is_supported:
exc = pkg_errors.MetadataException(
- pkg, 'eapi', f"EAPI '{pkg.eapi}' is not supported")
+ pkg, "eapi", f"EAPI '{pkg.eapi}' is not supported"
+ )
self._bad_masked[pkg.versioned_atom] = exc
if error_callback is not None:
error_callback(exc)
@@ -533,18 +591,21 @@ class UnconfiguredTree(prototype.tree):
yield pkg
def itermatch(self, *args, **kwargs):
- raw = 'raw_pkg_cls' in kwargs or not kwargs.get('versioned', True)
- error_callback = kwargs.pop('error_callback', None)
- kwargs.setdefault('pkg_filter', partial(self._pkg_filter, raw, error_callback))
+ raw = "raw_pkg_cls" in kwargs or not kwargs.get("versioned", True)
+ error_callback = kwargs.pop("error_callback", None)
+ kwargs.setdefault("pkg_filter", partial(self._pkg_filter, raw, error_callback))
return super().itermatch(*args, **kwargs)
def _get_ebuild_path(self, pkg):
return pjoin(
- self.base, pkg.category, pkg.package,
- f"{pkg.package}-{pkg.fullver}{self.extension}")
+ self.base,
+ pkg.category,
+ pkg.package,
+ f"{pkg.package}-{pkg.fullver}{self.extension}",
+ )
def _get_ebuild_src(self, pkg):
- return local_source(self._get_ebuild_path(pkg), encoding='utf8')
+ return local_source(self._get_ebuild_path(pkg), encoding="utf8")
def _get_shared_pkg_data(self, category, package):
key = (category, package)
@@ -557,14 +618,16 @@ class UnconfiguredTree(prototype.tree):
return o
def _get_metadata_xml(self, category, package):
- return repo_objs.LocalMetadataXml(pjoin(
- self.base, category, package, "metadata.xml"))
+ return repo_objs.LocalMetadataXml(
+ pjoin(self.base, category, package, "metadata.xml")
+ )
def _get_manifest(self, category, package):
- return digest.Manifest(pjoin(
- self.base, category, package, "Manifest"),
+ return digest.Manifest(
+ pjoin(self.base, category, package, "Manifest"),
thin=self.config.manifests.thin,
- enforce_gpg=self.enable_gpg)
+ enforce_gpg=self.enable_gpg,
+ )
def _get_digests(self, pkg, allow_missing=False):
if self.config.manifests.disabled:
@@ -576,11 +639,14 @@ class UnconfiguredTree(prototype.tree):
except pkg_errors.ParseChksumError as e:
if e.missing and allow_missing:
return allow_missing, {}
- raise pkg_errors.MetadataException(pkg, 'manifest', str(e))
+ raise pkg_errors.MetadataException(pkg, "manifest", str(e))
def __repr__(self):
return "<ebuild %s location=%r @%#8x>" % (
- self.__class__.__name__, self.base, id(self))
+ self.__class__.__name__,
+ self.base,
+ id(self),
+ )
@klass.jit_attr
def deprecated(self):
@@ -592,30 +658,41 @@ class UnconfiguredTree(prototype.tree):
def _regen_operation_helper(self, **kwds):
return _RegenOpHelper(
- self, force=bool(kwds.get('force', False)),
- eclass_caching=bool(kwds.get('eclass_caching', True)))
+ self,
+ force=bool(kwds.get("force", False)),
+ eclass_caching=bool(kwds.get("eclass_caching", True)),
+ )
def __getstate__(self):
d = self.__dict__.copy()
- del d['_shared_pkg_cache']
+ del d["_shared_pkg_cache"]
return d
def __setstate__(self, state):
self.__dict__ = state.copy()
- self.__dict__['_shared_pkg_cache'] = WeakValueDictionary()
+ self.__dict__["_shared_pkg_cache"] = WeakValueDictionary()
@configurable(
- typename='repo',
+ typename="repo",
types={
- 'repo_config': 'ref:repo_config', 'cache': 'refs:cache',
- 'eclass_cache': 'ref:eclass_cache',
- 'default_mirrors': 'list',
- 'allow_missing_manifests': 'bool'},
- requires_config='config')
-def tree(config, repo_config, cache=(), eclass_cache=None,
- default_mirrors=None, allow_missing_manifests=False,
- tree_cls=UnconfiguredTree):
+ "repo_config": "ref:repo_config",
+ "cache": "refs:cache",
+ "eclass_cache": "ref:eclass_cache",
+ "default_mirrors": "list",
+ "allow_missing_manifests": "bool",
+ },
+ requires_config="config",
+)
+def tree(
+ config,
+ repo_config,
+ cache=(),
+ eclass_cache=None,
+ default_mirrors=None,
+ allow_missing_manifests=False,
+ tree_cls=UnconfiguredTree,
+):
"""Initialize an unconfigured ebuild repository."""
repo_id = repo_config.repo_id
repo_path = repo_config.location
@@ -623,46 +700,55 @@ def tree(config, repo_config, cache=(), eclass_cache=None,
if repo_config.masters is None:
# if it's None, that means it's not a standalone, and is PMS, or misconfigured.
# empty tuple means it's a standalone repository
- default = config.get_default('repo_config')
+ default = config.get_default("repo_config")
if default is None:
raise errors.InitializationError(
- f"repo {repo_id!r} at {repo_path!r} requires missing default repo")
+ f"repo {repo_id!r} at {repo_path!r} requires missing default repo"
+ )
# map external repo ids to their config names
config_map = {
- r.repo_id: r.location for r in config.objects['repo_config'].values() if r.external}
+ r.repo_id: r.location
+ for r in config.objects["repo_config"].values()
+ if r.external
+ }
try:
masters = []
missing = []
for r in repo_config.masters:
- if repo := config.objects['repo'].get(config_map.get(r, r)):
+ if repo := config.objects["repo"].get(config_map.get(r, r)):
masters.append(repo)
else:
missing.append(r)
except RecursionError:
repo_id = repo_config.repo_id
- masters = ', '.join(repo_config.masters)
+ masters = ", ".join(repo_config.masters)
raise errors.InitializationError(
- f'{repo_id!r} repo has cyclic masters: {masters}')
+ f"{repo_id!r} repo has cyclic masters: {masters}"
+ )
if missing:
- missing = ', '.join(map(repr, sorted(missing)))
+ missing = ", ".join(map(repr, sorted(missing)))
raise errors.InitializationError(
- f'repo {repo_id!r} at path {repo_path!r} has missing masters: {missing}')
+ f"repo {repo_id!r} at path {repo_path!r} has missing masters: {missing}"
+ )
if eclass_cache is None:
eclass_cache = _sort_eclasses(config, repo_config)
return tree_cls(
- repo_config.location, eclass_cache=eclass_cache, masters=masters, cache=cache,
+ repo_config.location,
+ eclass_cache=eclass_cache,
+ masters=masters,
+ cache=cache,
default_mirrors=default_mirrors,
allow_missing_manifests=allow_missing_manifests,
- repo_config=repo_config)
+ repo_config=repo_config,
+ )
class _RegenOpHelper:
-
def __init__(self, repo, force=False, eclass_caching=True):
self.force = force
self.eclass_caching = eclass_caching
@@ -695,8 +781,16 @@ class ConfiguredTree(configured.tree):
config_wrappables = {
x: klass.alias_method("evaluate_depset")
for x in (
- "bdepend", "depend", "rdepend", "pdepend", "idepend",
- "fetchables", "license", "src_uri", "restrict", "required_use",
+ "bdepend",
+ "depend",
+ "rdepend",
+ "pdepend",
+ "idepend",
+ "fetchables",
+ "license",
+ "src_uri",
+ "restrict",
+ "required_use",
)
}
@@ -705,22 +799,24 @@ class ConfiguredTree(configured.tree):
:param raw_repo: :obj:`UnconfiguredTree` instance
:param domain_settings: environment settings to bind
"""
- required_settings = {'USE', 'CHOST'}
+ required_settings = {"USE", "CHOST"}
if missing_settings := required_settings.difference(domain_settings):
s = pluralism(missing_settings)
raise errors.InitializationError(
f"{self.__class__} missing required setting{s}: "
- f"{', '.join(map(repr, missing_settings))}")
+ f"{', '.join(map(repr, missing_settings))}"
+ )
- chost = domain_settings['CHOST']
- scope_update = {'chost': chost}
+ chost = domain_settings["CHOST"]
+ scope_update = {"chost": chost}
scope_update.update(
- (x, domain_settings.get(x.upper(), chost))
- for x in ('cbuild', 'ctarget'))
+ (x, domain_settings.get(x.upper(), chost)) for x in ("cbuild", "ctarget")
+ )
scope_update.update(
- (x, domain_settings.get(x.upper(), ''))
- for x in ('cflags', 'cxxflags', 'ldflags'))
- scope_update['operations_callback'] = self._generate_pkg_operations
+ (x, domain_settings.get(x.upper(), ""))
+ for x in ("cflags", "cxxflags", "ldflags")
+ )
+ scope_update["operations_callback"] = self._generate_pkg_operations
# update wrapped attr funcs requiring access to the class instance
for k, v in self.config_wrappables.items():
@@ -728,7 +824,8 @@ class ConfiguredTree(configured.tree):
self.config_wrappables[k] = getattr(self, v)
super().__init__(
- raw_repo, self.config_wrappables, pkg_kls_injections=scope_update)
+ raw_repo, self.config_wrappables, pkg_kls_injections=scope_update
+ )
self.domain = domain
self.domain_settings = domain_settings
@@ -736,13 +833,16 @@ class ConfiguredTree(configured.tree):
def _wrap_attr(config_wrappables):
"""Register wrapped attrs that require class instance access."""
+
def _wrap_func(func):
@wraps(func)
def wrapped(*args, **kwargs):
return func(*args, **kwargs)
- attr = func.__name__.lstrip('_')
+
+ attr = func.__name__.lstrip("_")
config_wrappables[attr] = func.__name__
return wrapped
+
return _wrap_func
@_wrap_attr(config_wrappables)
@@ -762,20 +862,22 @@ class ConfiguredTree(configured.tree):
# determine available user patches for >= EAPI 6
if pkg.eapi.options.user_patches:
patches = []
- patchroot = pjoin(self.domain.config_dir, 'patches')
+ patchroot = pjoin(self.domain.config_dir, "patches")
patch_dirs = [
pkg.PF,
- f'{pkg.PF}:{pkg.slot}',
+ f"{pkg.PF}:{pkg.slot}",
pkg.P,
- f'{pkg.P}:{pkg.slot}',
+ f"{pkg.P}:{pkg.slot}",
pkg.PN,
- f'{pkg.PN}:{pkg.slot}',
+ f"{pkg.PN}:{pkg.slot}",
]
for d in patch_dirs:
for root, _dirs, files in os.walk(pjoin(patchroot, pkg.category, d)):
files = (
- pjoin(root, f) for f in sorted(files, key=locale.strxfrm)
- if f.endswith(('.diff', '.patch')))
+ pjoin(root, f)
+ for f in sorted(files, key=locale.strxfrm)
+ if f.endswith((".diff", ".patch"))
+ )
patches.append((root, tuple(files)))
return tuple(patches)
return None
@@ -788,7 +890,9 @@ class ConfiguredTree(configured.tree):
return {
"initial_settings": enabled,
"unchangable_settings": self._delayed_iuse(
- self._get_delayed_immutable, pkg, immutable)}
+ self._get_delayed_immutable, pkg, immutable
+ ),
+ }
def _generate_pkg_operations(self, domain, pkg, **kwds):
return ebd.src_operations(domain, pkg, pkg.repo.eclass_cache, **kwds)
diff --git a/src/pkgcore/ebuild/resolver.py b/src/pkgcore/ebuild/resolver.py
index ce250618f..03b40f3fb 100644
--- a/src/pkgcore/ebuild/resolver.py
+++ b/src/pkgcore/ebuild/resolver.py
@@ -13,9 +13,15 @@ from ..restrictions import packages, values
from .atom import atom
-def upgrade_resolver(vdbs, dbs, verify_vdb=True, nodeps=False,
- force_replace=False, resolver_cls=plan.merge_plan,
- **kwds):
+def upgrade_resolver(
+ vdbs,
+ dbs,
+ verify_vdb=True,
+ nodeps=False,
+ force_replace=False,
+ resolver_cls=plan.merge_plan,
+ **kwds,
+):
"""
generate and configure a resolver for upgrading all processed nodes.
@@ -44,8 +50,14 @@ def upgrade_resolver(vdbs, dbs, verify_vdb=True, nodeps=False,
def downgrade_resolver(
- vdbs, dbs, verify_vdb=True, nodeps=False, force_replace=False,
- resolver_cls=plan.merge_plan, **kwds):
+ vdbs,
+ dbs,
+ verify_vdb=True,
+ nodeps=False,
+ force_replace=False,
+ resolver_cls=plan.merge_plan,
+ **kwds,
+):
"""
generate and configure a resolver for downgrading all processed nodes.
@@ -58,7 +70,8 @@ def downgrade_resolver(
:return: :obj:`pkgcore.resolver.plan.merge_plan` instance
"""
restrict = packages.OrRestriction(
- *list(atom(f'>={x.cpvstr}') for x in chain.from_iterable(vdbs)))
+ *list(atom(f">={x.cpvstr}") for x in chain.from_iterable(vdbs))
+ )
f = partial(plan.merge_plan.prefer_downgrade_version_strategy, restrict)
dbs = list(map(partial(misc.restrict_repo, restrict), dbs))
# hack.
@@ -74,9 +87,15 @@ def downgrade_resolver(
return resolver_cls(dbs + vdbs, plan.pkg_sort_highest, f, **kwds)
-def min_install_resolver(vdbs, dbs, verify_vdb=True, nodeps=False,
- force_replace=False, resolver_cls=plan.merge_plan,
- **kwds):
+def min_install_resolver(
+ vdbs,
+ dbs,
+ verify_vdb=True,
+ nodeps=False,
+ force_replace=False,
+ resolver_cls=plan.merge_plan,
+ **kwds,
+):
"""
Resolver that tries to minimize the number of changes while installing.
@@ -102,18 +121,18 @@ def min_install_resolver(vdbs, dbs, verify_vdb=True, nodeps=False,
if force_replace:
resolver_cls = generate_replace_resolver_kls(resolver_cls)
- return resolver_cls(vdbs + dbs, plan.pkg_sort_highest,
- plan.merge_plan.prefer_reuse_strategy, **kwds)
+ return resolver_cls(
+ vdbs + dbs, plan.pkg_sort_highest, plan.merge_plan.prefer_reuse_strategy, **kwds
+ )
+
_vdb_restrict = packages.OrRestriction(
packages.PackageRestriction("repo.livefs", values.EqualityMatch(False)),
packages.AndRestriction(
- packages.PackageRestriction(
- "category", values.StrExactMatch("virtual")),
- packages.PackageRestriction(
- "package_is_real", values.EqualityMatch(False)),
- ),
- )
+ packages.PackageRestriction("category", values.StrExactMatch("virtual")),
+ packages.PackageRestriction("package_is_real", values.EqualityMatch(False)),
+ ),
+)
class empty_tree_merge_plan(plan.merge_plan):
@@ -130,18 +149,20 @@ class empty_tree_merge_plan(plan.merge_plan):
super().__init__(dbs, *args, **kwds)
# XXX *cough*, hack.
self.default_dbs = multiplex.tree(
- *[x for x in self.all_raw_dbs if not x.livefs])
+ *[x for x in self.all_raw_dbs if not x.livefs]
+ )
def generate_replace_resolver_kls(resolver_kls):
-
class replace_resolver(resolver_kls):
overriding_resolver_kls = resolver_kls
_vdb_restriction = _vdb_restrict
def add_atoms(self, restricts, **kwds):
- restricts = [packages.KeyedAndRestriction(self._vdb_restriction, x, key=x.key)
- for x in restricts]
+ restricts = [
+ packages.KeyedAndRestriction(self._vdb_restriction, x, key=x.key)
+ for x in restricts
+ ]
return self.overriding_resolver_kls.add_atoms(self, restricts, **kwds)
return replace_resolver
diff --git a/src/pkgcore/ebuild/restricts.py b/src/pkgcore/ebuild/restricts.py
index 3b847fa55..9c05ed327 100644
--- a/src/pkgcore/ebuild/restricts.py
+++ b/src/pkgcore/ebuild/restricts.py
@@ -5,8 +5,15 @@
atom version restrict
"""
-__all__ = ("CategoryDep", "PackageDep", "RepositoryDep", "SlotDep",
- "StaticUseDep", "SubSlotDep", "UseDepDefault", "VersionMatch"
+__all__ = (
+ "CategoryDep",
+ "PackageDep",
+ "RepositoryDep",
+ "SlotDep",
+ "StaticUseDep",
+ "SubSlotDep",
+ "UseDepDefault",
+ "VersionMatch",
)
from snakeoil.klass import generic_equality
@@ -28,7 +35,7 @@ class _VersionMatch(restriction.base, metaclass=generic_equality):
__slots__ = ("ver", "rev", "vals", "droprev", "negate")
- __attr_comparison__ = ('negate', 'rev', 'droprev', 'vals')
+ __attr_comparison__ = ("negate", "rev", "droprev", "vals")
type = restriction.value_type
attr = "fullver"
@@ -62,13 +69,13 @@ class _VersionMatch(restriction.base, metaclass=generic_equality):
sf(self, "rev", rev)
if operator != "~" and operator not in self._convert_str2op:
raise errors.InvalidVersion(
- self.ver, self.rev, f"invalid operator, '{operator}'")
+ self.ver, self.rev, f"invalid operator, '{operator}'"
+ )
sf(self, "negate", negate)
if operator == "~":
if ver is None:
- raise ValueError(
- "for ~ op, version must be something other then None")
+ raise ValueError("for ~ op, version must be something other then None")
sf(self, "droprev", True)
sf(self, "vals", (0,))
else:
@@ -92,7 +99,7 @@ class _VersionMatch(restriction.base, metaclass=generic_equality):
if self.negate:
n = "not "
else:
- n = ''
+ n = ""
if self.droprev or not self.rev:
return f"ver {n}{s} {self.ver}"
@@ -104,7 +111,11 @@ class _VersionMatch(restriction.base, metaclass=generic_equality):
if self.rev:
s += f"-r{self.rev}"
return "<%s %s negate=%s droprrev=%s @#x>" % (
- self.__class__.__name__, s, self.negate, self.droprev)
+ self.__class__.__name__,
+ s,
+ self.negate,
+ self.droprev,
+ )
@staticmethod
def _convert_ops(inst):
@@ -118,8 +129,11 @@ class _VersionMatch(restriction.base, metaclass=generic_equality):
if self is other:
return True
if isinstance(other, self.__class__):
- if self.droprev != other.droprev or self.ver != other.ver \
- or self.rev != other.rev:
+ if (
+ self.droprev != other.droprev
+ or self.ver != other.ver
+ or self.rev != other.rev
+ ):
return False
return self._convert_ops(self) == self._convert_ops(other)
@@ -136,7 +150,7 @@ class VersionMatch(packages.PackageRestriction):
def __init__(self, *args, **kwds):
v = _VersionMatch(*args, **kwds)
- super().__init__('fullver', v, negate=kwds.get('negate', False))
+ super().__init__("fullver", v, negate=kwds.get("negate", False))
def match(self, pkg, *args, **kwds):
return self.restriction.match(pkg)
@@ -149,7 +163,7 @@ class SlotDep(packages.PackageRestriction):
def __init__(self, slot, **kwds):
v = values.StrExactMatch(slot)
- super().__init__('slot', v, negate=kwds.get('negate', False))
+ super().__init__("slot", v, negate=kwds.get("negate", False))
class SubSlotDep(packages.PackageRestriction):
@@ -159,7 +173,7 @@ class SubSlotDep(packages.PackageRestriction):
def __init__(self, subslot, **kwds):
v = values.StrExactMatch(subslot)
- super().__init__('subslot', v, negate=kwds.get('negate', False))
+ super().__init__("subslot", v, negate=kwds.get("negate", False))
class CategoryDep(packages.PackageRestriction):
@@ -168,7 +182,7 @@ class CategoryDep(packages.PackageRestriction):
__inst_caching__ = True
def __init__(self, category, negate=False):
- super().__init__('category', values.StrExactMatch(category, negate=negate))
+ super().__init__("category", values.StrExactMatch(category, negate=negate))
class PackageDep(packages.PackageRestriction):
@@ -177,7 +191,7 @@ class PackageDep(packages.PackageRestriction):
__inst_caching__ = True
def __init__(self, package, negate=False):
- super().__init__('package', values.StrExactMatch(package, negate=negate))
+ super().__init__("package", values.StrExactMatch(package, negate=negate))
class RepositoryDep(packages.PackageRestriction):
@@ -186,7 +200,7 @@ class RepositoryDep(packages.PackageRestriction):
__inst_caching__ = True
def __init__(self, repo_id, negate=False):
- super().__init__('repo.repo_id', values.StrExactMatch(repo_id), negate=negate)
+ super().__init__("repo.repo_id", values.StrExactMatch(repo_id), negate=negate)
class StaticUseDep(packages.PackageRestriction):
@@ -209,15 +223,15 @@ class StaticUseDep(packages.PackageRestriction):
else:
v = values.AlwaysTrue
- super().__init__('use', v)
+ super().__init__("use", v)
class _UseDepDefaultContainment(values.ContainmentMatch):
- __slots__ = ('if_missing',)
+ __slots__ = ("if_missing",)
def __init__(self, if_missing, vals, negate=False):
- object.__setattr__(self, 'if_missing', bool(if_missing))
+ object.__setattr__(self, "if_missing", bool(if_missing))
super().__init__(vals, negate=negate, match_all=True)
def match(self, val):
@@ -236,7 +250,9 @@ class _UseDepDefaultContainment(values.ContainmentMatch):
# recall that negate is unfortunately a double negative in labeling...
reduced_vals = reduced_vals.intersection(iuse_stripped)
if reduced_vals:
- return values.ContainmentMatch.match(self, use, _values_override=reduced_vals)
+ return values.ContainmentMatch.match(
+ self, use, _values_override=reduced_vals
+ )
# nothing to match means all are missing, but the default makes them considered a match.
return True
@@ -245,12 +261,14 @@ class _UseDepDefaultContainment(values.ContainmentMatch):
# see comments in .match for clarification of logic.
iuse_stripped, use = val
if reduced_vals.issubset(iuse_stripped):
- return values.ContainmentMatch.force_False(self, pkg, 'use', use)
+ return values.ContainmentMatch.force_False(self, pkg, "use", use)
if self.if_missing == self.negate:
return False
reduced_vals = reduced_vals.intersection(iuse_stripped)
if reduced_vals:
- return values.ContainmentMatch.force_False(self, pkg, 'use', use, reduced_vals)
+ return values.ContainmentMatch.force_False(
+ self, pkg, "use", use, reduced_vals
+ )
return True
def force_True(self, pkg, attr, val):
@@ -258,12 +276,14 @@ class _UseDepDefaultContainment(values.ContainmentMatch):
# see comments in .match for clarification of logic.
iuse_stripped, use = val
if reduced_vals.issubset(iuse_stripped):
- return values.ContainmentMatch.force_True(self, pkg, 'use', use)
+ return values.ContainmentMatch.force_True(self, pkg, "use", use)
if self.if_missing == self.negate:
return False
reduced_vals = reduced_vals.intersection(iuse_stripped)
if reduced_vals:
- return values.ContainmentMatch.force_True(self, pkg, 'use', use, reduced_vals)
+ return values.ContainmentMatch.force_True(
+ self, pkg, "use", use, reduced_vals
+ )
return True
@@ -287,7 +307,7 @@ class UseDepDefault(packages.PackageRestrictionMulti):
else:
v = values.AlwaysTrue
- super().__init__(('iuse_stripped', 'use'), v)
+ super().__init__(("iuse_stripped", "use"), v)
def _parse_nontransitive_use(sequence):
@@ -295,15 +315,15 @@ def _parse_nontransitive_use(sequence):
default_on = [[], []]
normal = [[], []]
for token in sequence:
- if token[-1] == ')':
- if token[-2] == '+':
+ if token[-1] == ")":
+ if token[-2] == "+":
trg = default_on
else:
trg = default_off
token = token[:-3]
else:
trg = normal
- if token[0] == '-':
+ if token[0] == "-":
trg[0].append(token[1:])
else:
trg[1].append(token)
diff --git a/src/pkgcore/ebuild/triggers.py b/src/pkgcore/ebuild/triggers.py
index f54fa46ed..10c51464c 100644
--- a/src/pkgcore/ebuild/triggers.py
+++ b/src/pkgcore/ebuild/triggers.py
@@ -1,10 +1,19 @@
"""gentoo/ebuild specific triggers"""
__all__ = (
- "collapse_envd", "string_collapse_envd", "env_update",
- "ConfigProtectInstall", "ConfigProtectUninstall", "preinst_contents_reset",
- "CollisionProtect", "ProtectOwned", "install_into_symdir_protect",
- "InfoRegen", "SFPerms", "FixImageSymlinks", "GenerateTriggers",
+ "collapse_envd",
+ "string_collapse_envd",
+ "env_update",
+ "ConfigProtectInstall",
+ "ConfigProtectUninstall",
+ "preinst_contents_reset",
+ "CollisionProtect",
+ "ProtectOwned",
+ "install_into_symdir_protect",
+ "InfoRegen",
+ "SFPerms",
+ "FixImageSymlinks",
+ "GenerateTriggers",
)
import fnmatch
@@ -21,16 +30,39 @@ from ..merge import const, errors, triggers
from ..restrictions import values
from ..system import libtool
-colon_parsed = frozenset([
- "ADA_INCLUDE_PATH", "ADA_OBJECTS_PATH", "INFODIR", "INFOPATH", "LDPATH",
- "MANPATH", "PATH", "PYTHONPATH", "PKG_CONFIG_PATH", "ROOTPATH"
-])
+colon_parsed = frozenset(
+ [
+ "ADA_INCLUDE_PATH",
+ "ADA_OBJECTS_PATH",
+ "INFODIR",
+ "INFOPATH",
+ "LDPATH",
+ "MANPATH",
+ "PATH",
+ "PYTHONPATH",
+ "PKG_CONFIG_PATH",
+ "ROOTPATH",
+ ]
+)
-incrementals = frozenset([
- 'ADA_INCLUDE_PATH', 'ADA_OBJECTS_PATH', 'CLASSPATH', 'CONFIG_PROTECT',
- 'CONFIG_PROTECT_MASK', 'INFODIR', 'INFOPATH', 'KDEDIRS', 'LDPATH',
- 'MANPATH', 'PATH', 'PYTHONPATH', 'ROOTPATH', 'PKG_CONFIG_PATH'
-])
+incrementals = frozenset(
+ [
+ "ADA_INCLUDE_PATH",
+ "ADA_OBJECTS_PATH",
+ "CLASSPATH",
+ "CONFIG_PROTECT",
+ "CONFIG_PROTECT_MASK",
+ "INFODIR",
+ "INFOPATH",
+ "KDEDIRS",
+ "LDPATH",
+ "MANPATH",
+ "PATH",
+ "PYTHONPATH",
+ "ROOTPATH",
+ "PKG_CONFIG_PATH",
+ ]
+)
def collapse_envd(base):
@@ -41,8 +73,13 @@ def collapse_envd(base):
pass
else:
for x in env_d_files:
- if x.endswith(".bak") or x.endswith("~") or x.startswith("._cfg") \
- or len(x) <= 2 or not x[0:2].isdigit():
+ if (
+ x.endswith(".bak")
+ or x.endswith("~")
+ or x.startswith("._cfg")
+ or len(x) <= 2
+ or not x[0:2].isdigit()
+ ):
continue
d = read_bash_dict(pjoin(base, x))
# inefficient, but works.
@@ -76,11 +113,13 @@ def collapse_envd(base):
collapsed_d[k] = v[-1]
continue
if k in loc_colon_parsed:
- collapsed_d[k] = [_f for _f in iflatten_instance(
- x.split(':') for x in v) if _f]
+ collapsed_d[k] = [
+ _f for _f in iflatten_instance(x.split(":") for x in v) if _f
+ ]
else:
- collapsed_d[k] = [_f for _f in iflatten_instance(
- x.split() for x in v) if _f]
+ collapsed_d[k] = [
+ _f for _f in iflatten_instance(x.split() for x in v) if _f
+ ]
return collapsed_d, loc_incrementals, loc_colon_parsed
@@ -91,19 +130,18 @@ def string_collapse_envd(envd_dict, incrementals, colon_incrementals):
if k not in incrementals:
continue
if k in colon_incrementals:
- envd_dict[k] = ':'.join(v)
+ envd_dict[k] = ":".join(v)
else:
- envd_dict[k] = ' '.join(v)
+ envd_dict[k] = " ".join(v)
-def update_ldso(ld_search_path, offset='/'):
+def update_ldso(ld_search_path, offset="/"):
# we do an atomic rename instead of open and write quick
# enough (avoid the race iow)
- fp = pjoin(offset, 'etc', 'ld.so.conf')
- new_f = AtomicWriteFile(
- fp, uid=os_data.root_uid, gid=os_data.root_uid, perms=0o644)
+ fp = pjoin(offset, "etc", "ld.so.conf")
+ new_f = AtomicWriteFile(fp, uid=os_data.root_uid, gid=os_data.root_uid, perms=0o644)
new_f.write("# automatically generated, edit env.d files instead\n")
- new_f.writelines(x.strip()+"\n" for x in ld_search_path)
+ new_f.writelines(x.strip() + "\n" for x in ld_search_path)
new_f.close()
@@ -118,13 +156,19 @@ def perform_env_update(root, skip_ldso_update=False):
new_f = AtomicWriteFile(
pjoin(root, "etc", "profile.env"),
- uid=os_data.root_uid, gid=os_data.root_gid, perms=0o644)
+ uid=os_data.root_uid,
+ gid=os_data.root_gid,
+ perms=0o644,
+ )
new_f.write("# autogenerated. update env.d instead\n")
new_f.writelines(f'export {k}="{d[k]}"\n' for k in sorted(d))
new_f.close()
new_f = AtomicWriteFile(
pjoin(root, "etc", "profile.csh"),
- uid=os_data.root_uid, gid=os_data.root_gid, perms=0o644)
+ uid=os_data.root_uid,
+ gid=os_data.root_gid,
+ perms=0o644,
+ )
new_f.write("# autogenerated, update env.d instead\n")
new_f.writelines(f'setenv {k}="{d[k]}"\n' for k in sorted(d))
new_f.close()
@@ -134,7 +178,7 @@ class env_update(triggers.base):
required_csets = ()
priority = 5
- _hooks = ('post_unmerge', 'post_merge')
+ _hooks = ("post_unmerge", "post_merge")
def trigger(self, engine):
perform_env_update(engine.offset)
@@ -160,8 +204,10 @@ def gen_config_protect_filter(offset, extra_protects=(), extra_disables=()):
collapsed_d.setdefault("CONFIG_PROTECT", []).extend(extra_protects)
collapsed_d.setdefault("CONFIG_PROTECT_MASK", []).extend(extra_disables)
- r = [values.StrGlobMatch(normpath(x).rstrip("/") + "/")
- for x in set(stable_unique(collapsed_d["CONFIG_PROTECT"] + ["/etc"]))]
+ r = [
+ values.StrGlobMatch(normpath(x).rstrip("/") + "/")
+ for x in set(stable_unique(collapsed_d["CONFIG_PROTECT"] + ["/etc"]))
+ ]
if len(r) > 1:
r = values.OrRestriction(*r)
else:
@@ -169,13 +215,12 @@ def gen_config_protect_filter(offset, extra_protects=(), extra_disables=()):
neg = stable_unique(collapsed_d["CONFIG_PROTECT_MASK"])
if neg:
if len(neg) == 1:
- r2 = values.StrGlobMatch(normpath(neg[0]).rstrip("/") + "/",
- negate=True)
+ r2 = values.StrGlobMatch(normpath(neg[0]).rstrip("/") + "/", negate=True)
else:
r2 = values.OrRestriction(
negate=True,
- *[values.StrGlobMatch(normpath(x).rstrip("/") + "/")
- for x in set(neg)])
+ *[values.StrGlobMatch(normpath(x).rstrip("/") + "/") for x in set(neg)],
+ )
r = values.AndRestriction(r, r2)
return r
@@ -190,8 +235,7 @@ def gen_collision_ignore_filter(offset, extra_ignores=()):
for i, x in enumerate(ignored):
if not x.endswith("/*") and os.path.isdir(x):
ignored[i] = ignored.rstrip("/") + "/*"
- ignored = [values.StrRegex(fnmatch.translate(x))
- for x in stable_unique(ignored)]
+ ignored = [values.StrRegex(fnmatch.translate(x)) for x in stable_unique(ignored)]
if len(ignored) == 1:
return ignored[0]
return values.OrRestriction(*ignored)
@@ -199,9 +243,9 @@ def gen_collision_ignore_filter(offset, extra_ignores=()):
class ConfigProtectInstall(triggers.base):
- required_csets = ('install_existing', 'install')
+ required_csets = ("install_existing", "install")
priority = 100
- _hooks = ('pre_merge',)
+ _hooks = ("pre_merge",)
def __init__(self, extra_protects=(), extra_disables=()):
super().__init__()
@@ -217,7 +261,8 @@ class ConfigProtectInstall(triggers.base):
def trigger(self, engine, existing_cset, install_cset):
# hackish, but it works.
protected_filter = gen_config_protect_filter(
- engine.offset, self.extra_protects, self.extra_disables).match
+ engine.offset, self.extra_protects, self.extra_disables
+ ).match
ignore_filter = gen_collision_ignore_filter(engine.offset).match
protected = {}
@@ -226,16 +271,19 @@ class ConfigProtectInstall(triggers.base):
replacement = install_cset[x]
if not simple_chksum_compare(replacement, x):
protected.setdefault(
- pjoin(engine.offset,
- os.path.dirname(x.location).lstrip(os.path.sep)),
- []).append((os.path.basename(replacement.location),
- replacement))
+ pjoin(
+ engine.offset,
+ os.path.dirname(x.location).lstrip(os.path.sep),
+ ),
+ [],
+ ).append((os.path.basename(replacement.location), replacement))
for dir_loc, entries in protected.items():
updates = {x[0]: [] for x in entries}
try:
- existing = sorted(x for x in listdir_files(dir_loc)
- if x.startswith("._cfg"))
+ existing = sorted(
+ x for x in listdir_files(dir_loc) if x.startswith("._cfg")
+ )
except FileNotFoundError:
# this shouldn't occur.
continue
@@ -257,8 +305,9 @@ class ConfigProtectInstall(triggers.base):
# check for any updates with the same chksums.
count = 0
for cfg_count, cfg_fname in updates[fname]:
- if simple_chksum_compare(livefs.gen_obj(
- pjoin(dir_loc, cfg_fname)), entry):
+ if simple_chksum_compare(
+ livefs.gen_obj(pjoin(dir_loc, cfg_fname)), entry
+ ):
count = cfg_count
break
count = max(count, cfg_count + 1)
@@ -276,9 +325,9 @@ class ConfigProtectInstall(triggers.base):
class ConfigProtectInstall_restore(triggers.base):
- required_csets = ('install',)
+ required_csets = ("install",)
priority = 10
- _hooks = ('post_merge',)
+ _hooks = ("post_merge",)
def __init__(self, renames_dict):
super().__init__()
@@ -296,8 +345,8 @@ class ConfigProtectInstall_restore(triggers.base):
class ConfigProtectUninstall(triggers.base):
- required_csets = ('uninstall_existing', 'uninstall')
- _hooks = ('pre_unmerge',)
+ required_csets = ("uninstall_existing", "uninstall")
+ _hooks = ("pre_unmerge",)
def trigger(self, engine, existing_cset, uninstall_cset):
protected_filter = gen_config_protect_filter(engine.offset).match
@@ -322,11 +371,11 @@ class ConfigProtectUninstall(triggers.base):
class UninstallIgnore(triggers.base):
required_csets = {
- const.REPLACE_MODE: ('uninstall_existing', 'uninstall', 'old_cset'),
- const.UNINSTALL_MODE: ('uninstall_existing', 'uninstall'),
+ const.REPLACE_MODE: ("uninstall_existing", "uninstall", "old_cset"),
+ const.UNINSTALL_MODE: ("uninstall_existing", "uninstall"),
}
- _hooks = ('pre_unmerge',)
+ _hooks = ("pre_unmerge",)
_engine_types = triggers.UNINSTALLING_MODES
def __init__(self, uninstall_ignore=()):
@@ -334,12 +383,13 @@ class UninstallIgnore(triggers.base):
self.uninstall_ignore = uninstall_ignore
def trigger(self, engine, existing_cset, uninstall_cset, old_cset={}):
- ignore = [values.StrRegex(fnmatch.translate(x), match=True)
- for x in self.uninstall_ignore]
+ ignore = [
+ values.StrRegex(fnmatch.translate(x), match=True)
+ for x in self.uninstall_ignore
+ ]
ignore_filter = values.OrRestriction(*ignore).match
- remove = [x for x in existing_cset.iterfiles()
- if ignore_filter(x.location)]
+ remove = [x for x in existing_cset.iterfiles() if ignore_filter(x.location)]
for x in remove:
# don't remove matching files being uninstalled
del uninstall_cset[x]
@@ -349,9 +399,9 @@ class UninstallIgnore(triggers.base):
class preinst_contents_reset(triggers.base):
- required_csets = ('new_cset',)
+ required_csets = ("new_cset",)
priority = 1
- _hooks = ('pre_merge',)
+ _hooks = ("pre_merge",)
def __init__(self, format_op):
super().__init__()
@@ -362,7 +412,7 @@ class preinst_contents_reset(triggers.base):
# modifications to the fs
cset.clear()
cs = engine.new._parent.scan_contents(self.format_op.env["D"])
- if engine.offset != '/':
+ if engine.offset != "/":
cs = cs.insert_offset(engine.offset)
cset.update(cs)
@@ -371,11 +421,11 @@ class FileCollision(triggers.base):
"""Generic livefs file collision trigger."""
required_csets = {
- const.INSTALL_MODE: ('install', 'install_existing'),
- const.REPLACE_MODE: ('install', 'install_existing', 'old_cset')
+ const.INSTALL_MODE: ("install", "install_existing"),
+ const.REPLACE_MODE: ("install", "install_existing", "old_cset"),
}
- _hooks = ('sanity_check',)
+ _hooks = ("sanity_check",)
_engine_types = triggers.INSTALLING_MODES
suppress_exceptions = False
@@ -402,9 +452,11 @@ class FileCollision(triggers.base):
# hackish, but it works.
protected_filter = gen_config_protect_filter(
- engine.offset, self.extra_protects, self.extra_disables).match
+ engine.offset, self.extra_protects, self.extra_disables
+ ).match
ignore_filter = gen_collision_ignore_filter(
- engine.offset, self.extra_ignores).match
+ engine.offset, self.extra_ignores
+ ).match
ignores = []
for x in colliding:
@@ -426,15 +478,15 @@ class FileCollision(triggers.base):
class CollisionProtect(FileCollision):
-
def collision(self, colliding):
raise errors.BlockModification(
- self, "collision-protect: file(s) already exist: ( %s )" %
- ', '.join(repr(x) for x in sorted(colliding)))
+ self,
+ "collision-protect: file(s) already exist: ( %s )"
+ % ", ".join(repr(x) for x in sorted(colliding)),
+ )
class ProtectOwned(FileCollision):
-
def __init__(self, vdb, *args):
super().__init__(*args)
self.vdb = vdb
@@ -451,11 +503,16 @@ class ProtectOwned(FileCollision):
if collisions:
pkg_collisions = [
- "( %s ) owned by '%s'" %
- (', '.join(repr(x) for x in sorted(collisions[pkg_cpvstr])), pkg_cpvstr)
- for pkg_cpvstr in sorted(collisions.keys())]
+ "( %s ) owned by '%s'"
+ % (
+ ", ".join(repr(x) for x in sorted(collisions[pkg_cpvstr])),
+ pkg_cpvstr,
+ )
+ for pkg_cpvstr in sorted(collisions.keys())
+ ]
raise errors.BlockModification(
- self, "protect-owned: %s" % (', '.join(pkg_collisions),))
+ self, "protect-owned: %s" % (", ".join(pkg_collisions),)
+ )
# TODO: output a file override warning here
@@ -463,11 +520,11 @@ class ProtectOwned(FileCollision):
class install_into_symdir_protect(triggers.base):
required_csets = {
- const.INSTALL_MODE: ('install', 'install_existing'),
- const.REPLACE_MODE: ('install', 'install_existing', 'old_cset')
+ const.INSTALL_MODE: ("install", "install_existing"),
+ const.REPLACE_MODE: ("install", "install_existing", "old_cset"),
}
- _hooks = ('sanity_check',)
+ _hooks = ("sanity_check",)
_engine_types = triggers.INSTALLING_MODES
def __init__(self, extra_protects=(), extra_disables=()):
@@ -487,15 +544,16 @@ class install_into_symdir_protect(triggers.base):
if linkset:
for inst_file in install.iterfiles():
for sym in linkset:
- if inst_file.location.startswith(sym.location + '/'):
+ if inst_file.location.startswith(sym.location + "/"):
install_into_symdir.append(inst_file)
if install_into_symdir:
raise errors.BlockModification(
self,
"file(s) installed into symlinked dir, will break when "
- "removing files from the original dir: ( %s )" %
- ', '.join(repr(x) for x in sorted(install_into_symdir)))
+ "removing files from the original dir: ( %s )"
+ % ", ".join(repr(x) for x in sorted(install_into_symdir)),
+ )
class InfoRegen(triggers.InfoRegen):
@@ -508,15 +566,13 @@ class InfoRegen(triggers.InfoRegen):
if x not in engine.hooks:
continue
# yucky, but works.
- wipes = [y for y in engine.hooks[x]
- if y.label == triggers.InfoRegen._label]
+ wipes = [y for y in engine.hooks[x] if y.label == triggers.InfoRegen._label]
for y in wipes:
engine.hooks[x].remove(y)
triggers.InfoRegen.register(self, engine)
def should_skip_directory(self, basepath, files):
- return any(x.startswith(".keepinfodir")
- for x in files)
+ return any(x.startswith(".keepinfodir") for x in files)
def trigger(self, engine, *args):
self.engine = engine
@@ -535,8 +591,8 @@ class InfoRegen(triggers.InfoRegen):
class SFPerms(triggers.base):
- required_csets = ('new_cset',)
- _hooks = ('pre_merge',)
+ required_csets = ("new_cset",)
+ _hooks = ("pre_merge",)
_engine_types = triggers.INSTALLING_MODES
def trigger(self, engine, cset):
@@ -545,22 +601,21 @@ class SFPerms(triggers.base):
if x.mode & 0o4000:
if x.mode & 0o044:
engine.observer.warn(
- "sfperms: dropping group/world read "
- f"due to SetGID: {x!r}")
+ "sfperms: dropping group/world read " f"due to SetGID: {x!r}"
+ )
resets.append(x.change_attributes(mode=x.mode & ~0o44))
if x.mode & 0o2000:
if x.mode & 0o004:
engine.observer.warn(
- f"sfperms: dropping world read due to SetUID: {x!r}")
+ f"sfperms: dropping world read due to SetUID: {x!r}"
+ )
resets.append(x.change_attributes(mode=x.mode & ~0o04))
cset.update(resets)
def register_multilib_strict_trigger(opts):
locations = opts.get("MULTILIB_STRICT_DIRS")
- exempt = opts.get(
- "MULTILIB_STRICT_EXEMPT",
- "(perl5|gcc|gcc-lib)")
+ exempt = opts.get("MULTILIB_STRICT_EXEMPT", "(perl5|gcc|gcc-lib)")
deny_pattern = opts.get("MULTILIB_STRICT_DENY")
if None in (locations, deny_pattern):
return
@@ -581,8 +636,8 @@ def register_multilib_strict_trigger(opts):
class FixImageSymlinks(triggers.base):
- required_csets = ('new_cset',)
- _hooks = ('pre_merge',)
+ required_csets = ("new_cset",)
+ _hooks = ("pre_merge",)
def __init__(self, format_op):
super().__init__()
@@ -598,13 +653,10 @@ class FixImageSymlinks(triggers.base):
d_len = len(d)
# drop the leading ${D}, and force an abspath via '/'
- cset.update(
- x.change_attributes(target=pjoin('/', x.target[d_len:]))
- for x in l)
+ cset.update(x.change_attributes(target=pjoin("/", x.target[d_len:])) for x in l)
class GenerateTriggers:
-
def __init__(self, domain, settings):
self.domain = domain
self.opts = {}
@@ -614,8 +666,11 @@ class GenerateTriggers:
# ebuild env.
config_lists = (
- "CONFIG_PROTECT", "CONFIG_PROTECT_MASK", "COLLISION_IGNORE",
- "INSTALL_MASK", "UNINSTALL_IGNORE",
+ "CONFIG_PROTECT",
+ "CONFIG_PROTECT_MASK",
+ "COLLISION_IGNORE",
+ "INSTALL_MASK",
+ "UNINSTALL_IGNORE",
)
for x in config_lists:
self.opts[x] = settings.pop(x, [])
@@ -624,8 +679,12 @@ class GenerateTriggers:
config_opts = (
"PKGDIR",
- "MULTILIB_STRICT_DIRS", "MULTILIB_STRICT_EXEMPT", "MULTILIB_STRICT_DENY",
- "DEB_REPO_ROOT", "DEB_MAINAINER", "DEB_ARCHITECTURE",
+ "MULTILIB_STRICT_DIRS",
+ "MULTILIB_STRICT_EXEMPT",
+ "MULTILIB_STRICT_DENY",
+ "DEB_REPO_ROOT",
+ "DEB_MAINAINER",
+ "DEB_ARCHITECTURE",
)
for x in config_opts:
self.opts[x] = settings.pop(x, None)
@@ -634,18 +693,27 @@ class GenerateTriggers:
yield env_update()
yield ConfigProtectInstall(
- self.opts["CONFIG_PROTECT"], self.opts["CONFIG_PROTECT_MASK"])
+ self.opts["CONFIG_PROTECT"], self.opts["CONFIG_PROTECT_MASK"]
+ )
yield ConfigProtectUninstall()
if "collision-protect" in self.domain.features:
yield CollisionProtect(
- self.opts["CONFIG_PROTECT"], self.opts["CONFIG_PROTECT_MASK"],
- self.opts["COLLISION_IGNORE"])
-
- if "protect-owned" in self.domain.features and "collision-protect" not in self.domain.features:
+ self.opts["CONFIG_PROTECT"],
+ self.opts["CONFIG_PROTECT_MASK"],
+ self.opts["COLLISION_IGNORE"],
+ )
+
+ if (
+ "protect-owned" in self.domain.features
+ and "collision-protect" not in self.domain.features
+ ):
yield ProtectOwned(
- self.domain.installed_repos, self.opts["CONFIG_PROTECT"],
- self.opts["CONFIG_PROTECT_MASK"], self.opts["COLLISION_IGNORE"])
+ self.domain.installed_repos,
+ self.opts["CONFIG_PROTECT"],
+ self.opts["CONFIG_PROTECT_MASK"],
+ self.opts["COLLISION_IGNORE"],
+ )
if "multilib-strict" in self.domain.features:
yield register_multilib_strict_trigger(self.opts)
@@ -654,7 +722,8 @@ class GenerateTriggers:
yield SFPerms()
yield install_into_symdir_protect(
- self.opts["CONFIG_PROTECT"], self.opts["CONFIG_PROTECT_MASK"])
+ self.opts["CONFIG_PROTECT"], self.opts["CONFIG_PROTECT_MASK"]
+ )
# TODO: support multiple binpkg repo targets?
pkgdir = self.opts.get("PKGDIR", None)
@@ -667,22 +736,27 @@ class GenerateTriggers:
except IndexError:
target_repo = None
if target_repo is not None:
- if 'buildpkg' in self.domain.features:
- yield triggers.SavePkg(pristine='no', target_repo=target_repo)
- elif 'pristine-buildpkg' in self.domain.features:
- yield triggers.SavePkg(pristine='yes', target_repo=target_repo)
- elif 'buildsyspkg' in self.domain.features:
+ if "buildpkg" in self.domain.features:
+ yield triggers.SavePkg(pristine="no", target_repo=target_repo)
+ elif "pristine-buildpkg" in self.domain.features:
+ yield triggers.SavePkg(pristine="yes", target_repo=target_repo)
+ elif "buildsyspkg" in self.domain.features:
yield triggers.SavePkgIfInPkgset(
- pristine='yes', target_repo=target_repo, pkgset=self.domain.profile.system)
- elif 'unmerge-backup' in self.domain.features:
+ pristine="yes",
+ target_repo=target_repo,
+ pkgset=self.domain.profile.system,
+ )
+ elif "unmerge-backup" in self.domain.features:
yield triggers.SavePkgUnmerging(target_repo=target_repo)
- if 'splitdebug' in self.domain.features:
- yield triggers.BinaryDebug(mode='split', compress=('compressdebug' in self.domain.features))
- elif 'strip' in self.domain.features or 'nostrip' not in self.domain.features:
- yield triggers.BinaryDebug(mode='strip')
+ if "splitdebug" in self.domain.features:
+ yield triggers.BinaryDebug(
+ mode="split", compress=("compressdebug" in self.domain.features)
+ )
+ elif "strip" in self.domain.features or "nostrip" not in self.domain.features:
+ yield triggers.BinaryDebug(mode="strip")
- if '-fixlafiles' not in self.domain.features:
+ if "-fixlafiles" not in self.domain.features:
yield libtool.FixLibtoolArchivesTrigger()
for x in ("man", "info", "doc"):
diff --git a/src/pkgcore/exceptions.py b/src/pkgcore/exceptions.py
index 875a6d449..8a907742d 100644
--- a/src/pkgcore/exceptions.py
+++ b/src/pkgcore/exceptions.py
@@ -12,18 +12,17 @@ class PkgcoreUserException(PkgcoreException, UserException):
class PermissionDenied(PermissionError, PkgcoreUserException):
-
def __init__(self, path, message=None, write=False):
if message is None:
if write:
- message = 'write access required'
+ message = "write access required"
else:
- message = 'read access required'
+ message = "read access required"
self.path = path
self.message = message
def __str__(self):
- s = f'permission denied to {self.path!r}'
+ s = f"permission denied to {self.path!r}"
if self.message:
- s += f'; {self.message}'
+ s += f"; {self.message}"
return s
diff --git a/src/pkgcore/fetch/__init__.py b/src/pkgcore/fetch/__init__.py
index 7150a44b7..39526c6e6 100644
--- a/src/pkgcore/fetch/__init__.py
+++ b/src/pkgcore/fetch/__init__.py
@@ -28,13 +28,17 @@ class fetchable(metaclass=generic_equality):
self.filename = filename
def __str__(self):
- chksums = ', '.join(self.chksums)
- return f'({self.filename!r}, {self.uri!r}, {chksums})'
+ chksums = ", ".join(self.chksums)
+ return f"({self.filename!r}, {self.uri!r}, {chksums})"
def __repr__(self):
return "<%s filename=%r uri=%r chksums=%r @%#8x>" % (
- self.__class__.__name__, self.filename, self.uri, self.chksums,
- id(self))
+ self.__class__.__name__,
+ self.filename,
+ self.uri,
+ self.chksums,
+ id(self),
+ )
def __lt__(self, other):
return self.filename < other.filename
@@ -52,7 +56,7 @@ class fetchable(metaclass=generic_equality):
class mirror(metaclass=generic_equality):
"""uri source representing a mirror tier"""
- __attr_comparison__ = ('mirror_name', 'mirrors')
+ __attr_comparison__ = ("mirror_name", "mirrors")
__slots__ = ("mirrors", "mirror_name")
@@ -112,14 +116,16 @@ class uri_list:
if not isinstance(mirror_inst, mirror):
raise TypeError("mirror must be a pkgcore.fetch.mirror instance")
if sub_uri is not None:
- self._uri_source.append((mirror_inst, sub_uri.lstrip('/')))
+ self._uri_source.append((mirror_inst, sub_uri.lstrip("/")))
else:
self._uri_source.append(mirror_inst)
def remove_mirrors(self):
"""Return a new URI source list after dropping all mirror-based URIs."""
uri_list = self.__class__(self.filename)
- uri_list._uri_source = tuple(x for x in self._uri_source if not isinstance(x, mirror))
+ uri_list._uri_source = tuple(
+ x for x in self._uri_source if not isinstance(x, mirror)
+ )
return uri_list
def add_uri(self, uri):
@@ -160,7 +166,7 @@ class uri_list:
i += 1
def __str__(self):
- uris = ', '.join(str(x) for x in self._uri_source)
+ uris = ", ".join(str(x) for x in self._uri_source)
return f"file: {self.filename}, uri: {uris}"
def __bool__(self):
@@ -179,8 +185,10 @@ class uri_list:
def visit_mirrors(self, invert=False, treat_default_as_mirror=True):
def is_mirror(item):
- return isinstance(item, mirror) and \
- treat_default_as_mirror == isinstance(item, default_mirror)
+ return isinstance(item, mirror) and treat_default_as_mirror == isinstance(
+ item, default_mirror
+ )
+
for item in self._uri_source:
if isinstance(item, tuple):
if invert != is_mirror(item[0]):
diff --git a/src/pkgcore/fetch/base.py b/src/pkgcore/fetch/base.py
index 9ffbb4ab7..296a8079a 100644
--- a/src/pkgcore/fetch/base.py
+++ b/src/pkgcore/fetch/base.py
@@ -12,7 +12,6 @@ from . import errors
class fetcher:
-
def _verify(self, file_location, target, all_chksums=True, handlers=None):
"""Internal function for derivatives.
@@ -35,7 +34,8 @@ class fetcher:
handlers = get_handlers(target.chksums)
except MissingChksumHandler as e:
raise errors.MissingChksumHandler(
- f'missing required checksum handler: {e}')
+ f"missing required checksum handler: {e}"
+ )
if all_chksums:
missing = set(target.chksums).difference(handlers)
if missing:
@@ -48,14 +48,18 @@ class fetcher:
if val != target.chksums["size"]:
if val < target.chksums["size"]:
raise errors.FetchFailed(
- file_location, 'file is too small', resumable=True)
+ file_location, "file is too small", resumable=True
+ )
raise errors.ChksumFailure(
- file_location, chksum='size', expected=target.chksums["size"], value=val)
+ file_location,
+ chksum="size",
+ expected=target.chksums["size"],
+ value=val,
+ )
elif not os.path.exists(file_location):
raise errors.MissingDistfile(file_location)
elif not os.stat(file_location).st_size:
- raise errors.FetchFailed(
- file_location, 'file is empty', resumable=False)
+ raise errors.FetchFailed(file_location, "file is empty", resumable=False)
chfs = set(target.chksums).intersection(handlers)
chfs.discard("size")
@@ -65,14 +69,16 @@ class fetcher:
val = handlers[x](file_location)
if val != target.chksums[x]:
raise errors.ChksumFailure(
- file_location, chksum=x, expected=target.chksums[x], value=val)
+ file_location, chksum=x, expected=target.chksums[x], value=val
+ )
else:
desired_vals = [target.chksums[x] for x in chfs]
calced = get_chksums(file_location, *chfs)
for desired, got, chf in zip(desired_vals, calced, chfs):
if desired != got:
raise errors.ChksumFailure(
- file_location, chksum=chf, expected=desired, value=got)
+ file_location, chksum=chf, expected=desired, value=got
+ )
def __call__(self, fetchable):
if not fetchable.uri:
diff --git a/src/pkgcore/fetch/custom.py b/src/pkgcore/fetch/custom.py
index 99030ccd2..c1f673a3b 100644
--- a/src/pkgcore/fetch/custom.py
+++ b/src/pkgcore/fetch/custom.py
@@ -2,7 +2,10 @@
fetcher class that pulls files via executing another program to do the fetching
"""
-__all__ = ("MalformedCommand", "fetcher",)
+__all__ = (
+ "MalformedCommand",
+ "fetcher",
+)
import os
@@ -15,22 +18,35 @@ from . import base, errors, fetchable
class MalformedCommand(errors.FetchError):
-
def __init__(self, command):
- super().__init__(f'fetchcommand is malformed: {command}')
+ super().__init__(f"fetchcommand is malformed: {command}")
self.command = command
class fetcher(base.fetcher):
pkgcore_config_type = ConfigHint(
- {'userpriv': 'bool', 'required_chksums': 'list',
- 'distdir': 'str', 'command': 'str', 'resume_command': 'str'},
- allow_unknowns=True)
-
- def __init__(self, distdir, command, resume_command=None,
- required_chksums=None, userpriv=True, attempts=10,
- readonly=False, **extra_env):
+ {
+ "userpriv": "bool",
+ "required_chksums": "list",
+ "distdir": "str",
+ "command": "str",
+ "resume_command": "str",
+ },
+ allow_unknowns=True,
+ )
+
+ def __init__(
+ self,
+ distdir,
+ command,
+ resume_command=None,
+ required_chksums=None,
+ userpriv=True,
+ attempts=10,
+ readonly=False,
+ **extra_env,
+ ):
"""
:param distdir: directory to download files to
:type distdir: string
@@ -91,13 +107,12 @@ class fetcher(base.fetcher):
else on disk location of the copied file
"""
if not isinstance(target, fetchable):
- raise TypeError(
- f"target must be fetchable instance/derivative: {target}")
+ raise TypeError(f"target must be fetchable instance/derivative: {target}")
path = pjoin(self.distdir, target.filename)
uris = iter(target.uri)
last_exc = RuntimeError("fetching failed for an unknown reason")
- spawn_opts = {'umask': 0o002, 'env': self.extra_env}
+ spawn_opts = {"umask": 0o002, "env": self.extra_env}
if self.userpriv and is_userpriv_capable():
spawn_opts.update({"uid": portage_uid, "gid": portage_gid})
@@ -125,11 +140,12 @@ class fetcher(base.fetcher):
# fetcher's exit code, trust our chksums instead.
try:
spawn_bash(
- command % {"URI": next(uris), "FILE": target.filename},
- **spawn_opts)
+ command % {"URI": next(uris), "FILE": target.filename}, **spawn_opts
+ )
except StopIteration:
raise errors.FetchFailed(
- target.filename, "ran out of urls to fetch from")
+ target.filename, "ran out of urls to fetch from"
+ )
else:
raise last_exc
diff --git a/src/pkgcore/fetch/errors.py b/src/pkgcore/fetch/errors.py
index a92cf6a39..16636a926 100644
--- a/src/pkgcore/fetch/errors.py
+++ b/src/pkgcore/fetch/errors.py
@@ -10,15 +10,12 @@ class FetchError(PkgcoreUserException):
class UnmodifiableFile(FetchError):
-
- def __init__(self, filename, extra=''):
- super().__init__(
- f'unable to update file {filename}, unmodifiable {extra}')
+ def __init__(self, filename, extra=""):
+ super().__init__(f"unable to update file {filename}, unmodifiable {extra}")
self.filename = filename
class FetchFailed(FetchError):
-
def __init__(self, filename, message, resumable=False):
super().__init__(message)
self.filename = filename
@@ -30,7 +27,6 @@ class FetchFailed(FetchError):
class MissingDistfile(FetchFailed):
-
def __init__(self, filename):
super().__init__(filename, "doesn't exist", resumable=True)
diff --git a/src/pkgcore/fs/contents.py b/src/pkgcore/fs/contents.py
index 0202543cf..434bd5bea 100644
--- a/src/pkgcore/fs/contents.py
+++ b/src/pkgcore/fs/contents.py
@@ -23,9 +23,11 @@ def change_offset_rewriter(orig_offset, new_offset, iterable):
# slip in the '/' default to force it to still generate a
# full path still
yield x.change_attributes(
- location=npf(pjoin(new_offset, x.location[offset_len:].lstrip(path_sep))))
+ location=npf(pjoin(new_offset, x.location[offset_len:].lstrip(path_sep)))
+ )
-offset_rewriter = partial(change_offset_rewriter, '/')
+
+offset_rewriter = partial(change_offset_rewriter, "/")
def check_instance(obj):
@@ -36,9 +38,9 @@ def check_instance(obj):
class contentsSet(metaclass=generic_equality):
"""set of :class:`pkgcore.fs.fs.fsBase` objects"""
- __attr_comparison__ = ('_dict',)
- __dict_kls__ = dict
+ __attr_comparison__ = ("_dict",)
+ __dict_kls__ = dict
def __init__(self, initial=None, mutable=True):
@@ -54,14 +56,14 @@ class contentsSet(metaclass=generic_equality):
def __str__(self):
name = self.__class__.__name__
- contents = ', '.join(map(str, self))
- return f'{name}([{contents}])'
+ contents = ", ".join(map(str, self))
+ return f"{name}([{contents}])"
def __repr__(self):
name = self.__class__.__name__
- contents = ', '.join(map(repr, self))
+ contents = ", ".join(map(repr, self))
# this should include the id among other things
- return f'{name}([{contents}])'
+ return f"{name}([{contents}])"
def add(self, obj):
@@ -73,8 +75,7 @@ class contentsSet(metaclass=generic_equality):
if not self.mutable:
# weird, but keeping with set.
- raise AttributeError(
- f'{self.__class__} is frozen; no add functionality')
+ raise AttributeError(f"{self.__class__} is frozen; no add functionality")
if not fs.isfs_obj(obj):
raise TypeError(f"'{obj}' is not a fs.fsBase class")
self._dict[obj.location] = obj
@@ -91,8 +92,7 @@ class contentsSet(metaclass=generic_equality):
if not self.mutable:
# weird, but keeping with set.
- raise AttributeError(
- f'{self.__class__} is frozen; no remove functionality')
+ raise AttributeError(f"{self.__class__} is frozen; no remove functionality")
if fs.isfs_obj(obj):
del self._dict[obj.location]
else:
@@ -124,8 +124,7 @@ class contentsSet(metaclass=generic_equality):
"""
if not self.mutable:
# weird, but keeping with set.
- raise AttributeError(
- f'{self.__class__} is frozen; no clear functionality')
+ raise AttributeError(f"{self.__class__} is frozen; no clear functionality")
self._dict.clear()
@staticmethod
@@ -142,18 +141,19 @@ class contentsSet(metaclass=generic_equality):
f = fs.isfs_obj
for x in iterable:
if not f(x):
- raise ValueError(f'must be an fsBase derivative: got {x!r}')
+ raise ValueError(f"must be an fsBase derivative: got {x!r}")
yield x
def difference(self, other):
- if not hasattr(other, '__contains__'):
+ if not hasattr(other, "__contains__"):
other = set(self._convert_loc(other))
- return contentsSet((x for x in self if x.location not in other),
- mutable=self.mutable)
+ return contentsSet(
+ (x for x in self if x.location not in other), mutable=self.mutable
+ )
def difference_update(self, other):
if not self.mutable:
- raise TypeError(f'immutable type {self!r}')
+ raise TypeError(f"immutable type {self!r}")
rem = self.remove
for x in other:
@@ -161,13 +161,12 @@ class contentsSet(metaclass=generic_equality):
rem(x)
def intersection(self, other):
- return contentsSet((x for x in other if x in self),
- mutable=self.mutable)
+ return contentsSet((x for x in other if x in self), mutable=self.mutable)
def intersection_update(self, other):
if not self.mutable:
- raise TypeError(f'immutable type {self!r}')
- if not hasattr(other, '__contains__'):
+ raise TypeError(f"immutable type {self!r}")
+ if not hasattr(other, "__contains__"):
other = set(self._convert_loc(other))
l = [x for x in self if x.location not in other]
@@ -175,17 +174,17 @@ class contentsSet(metaclass=generic_equality):
self.remove(x)
def issubset(self, other):
- if not hasattr(other, '__contains__'):
+ if not hasattr(other, "__contains__"):
other = set(self._convert_loc(other))
return all(x in other for x in self._dict)
def issuperset(self, other):
- if not hasattr(other, '__contains__'):
+ if not hasattr(other, "__contains__"):
other = set(self._convert_loc(other))
return all(x in self for x in other)
def isdisjoint(self, other):
- if not hasattr(other, '__contains__'):
+ if not hasattr(other, "__contains__"):
other = set(self._convert_loc(other))
return not any(x in other for x in self._dict)
@@ -204,13 +203,13 @@ class contentsSet(metaclass=generic_equality):
c = contentsSet(mutable=True)
c.update(self)
c.symmetric_difference_update(other)
- object.__setattr__(c, 'mutable', self.mutable)
+ object.__setattr__(c, "mutable", self.mutable)
return c
def symmetric_difference_update(self, other):
if not self.mutable:
- raise TypeError(f'immutable type {self!r}')
- if not hasattr(other, '__contains__'):
+ raise TypeError(f"immutable type {self!r}")
+ if not hasattr(other, "__contains__"):
other = contentsSet(self._ensure_fsbase(other))
l = []
for x in self:
@@ -239,7 +238,7 @@ class contentsSet(metaclass=generic_equality):
if invert:
return (x for x in self if not x.is_reg)
- return filter(attrgetter('is_reg'), self)
+ return filter(attrgetter("is_reg"), self)
def files(self, invert=False):
"""Returns a list of just :obj:`pkgcore.fs.fs.fsFile` instances.
@@ -252,7 +251,7 @@ class contentsSet(metaclass=generic_equality):
def iterdirs(self, invert=False):
if invert:
return (x for x in self if not x.is_dir)
- return filter(attrgetter('is_dir'), self)
+ return filter(attrgetter("is_dir"), self)
def dirs(self, invert=False):
return list(self.iterdirs(invert=invert))
@@ -260,18 +259,18 @@ class contentsSet(metaclass=generic_equality):
def itersymlinks(self, invert=False):
if invert:
return (x for x in self if not x.is_sym)
- return filter(attrgetter('is_sym'), self)
+ return filter(attrgetter("is_sym"), self)
def symlinks(self, invert=False):
return list(self.iterlinks(invert=invert))
- iterlinks = alias_method('itersymlinks')
- links = alias_method('symlinks')
+ iterlinks = alias_method("itersymlinks")
+ links = alias_method("symlinks")
def iterdevs(self, invert=False):
if invert:
return (x for x in self if not x.is_dev)
- return filter(attrgetter('is_dev'), self)
+ return filter(attrgetter("is_dev"), self)
def devs(self, invert=False):
return list(self.iterdevs(invert=invert))
@@ -279,16 +278,18 @@ class contentsSet(metaclass=generic_equality):
def iterfifos(self, invert=False):
if invert:
return (x for x in self if not x.is_fifo)
- return filter(attrgetter('is_fifo'), self)
+ return filter(attrgetter("is_fifo"), self)
def fifos(self, invert=False):
return list(self.iterfifos(invert=invert))
- for k in ('file', 'dir', 'symlink', 'dev', 'fifo'):
- locals()[f'iter{k}s'].__doc__ = \
- iterfiles.__doc__.replace('fsFile', f'fs{k.capitalize()}')
- locals()[f'{k}s'].__doc__ = \
- files.__doc__.replace('fsFile', f'fs{k.capitalize()}')
+ for k in ("file", "dir", "symlink", "dev", "fifo"):
+ locals()[f"iter{k}s"].__doc__ = iterfiles.__doc__.replace(
+ "fsFile", f"fs{k.capitalize()}"
+ )
+ locals()[f"{k}s"].__doc__ = files.__doc__.replace(
+ "fsFile", f"fs{k.capitalize()}"
+ )
del k
def inode_map(self):
@@ -358,7 +359,9 @@ class contentsSet(metaclass=generic_equality):
obj.remove(conflict)
subset = obj.child_nodes(conflict.location)
obj.difference_update(subset)
- subset = subset.change_offset(conflict.location, conflict.resolved_target)
+ subset = subset.change_offset(
+ conflict.location, conflict.resolved_target
+ )
obj.update(subset)
if add_conflicting_sym:
obj.add(other[conflicts_d[conflict]])
@@ -382,14 +385,14 @@ class contentsSet(metaclass=generic_equality):
missing.add(target)
target = os.path.dirname(target)
missing.discard("/")
- self.update(fs.fsDir(location=x, mode=mode, uid=uid, gid=gid, mtime=mtime)
- for x in missing)
+ self.update(
+ fs.fsDir(location=x, mode=mode, uid=uid, gid=gid, mtime=mtime)
+ for x in missing
+ )
class OrderedContentsSet(contentsSet):
-
- def __init__(self, initial=None, mutable=False,
- add_missing_directories=False):
+ def __init__(self, initial=None, mutable=False, add_missing_directories=False):
contentsSet.__init__(self, mutable=True)
self._dict = OrderedDict()
if initial:
diff --git a/src/pkgcore/fs/fs.py b/src/pkgcore/fs/fs.py
index 9ed839b4b..f1bffe373 100644
--- a/src/pkgcore/fs/fs.py
+++ b/src/pkgcore/fs/fs.py
@@ -17,49 +17,53 @@ from snakeoil.osutils import normpath, pjoin
# goofy set of classes representating the fs objects pkgcore knows of.
-__all__ = [
- "fsFile", "fsDir", "fsSymlink", "fsDev", "fsFifo"]
-__all__.extend(
- f"is{x}" for x in ("dir", "reg", "sym", "fifo", "dev", "fs_obj"))
+__all__ = ["fsFile", "fsDir", "fsSymlink", "fsDev", "fsFifo"]
+__all__.extend(f"is{x}" for x in ("dir", "reg", "sym", "fifo", "dev", "fs_obj"))
# following are used to generate appropriate __init__, wiped from the
# namespace at the end of the module
_fs_doc = {
- "mode":""":keyword mode: int, the mode of this entry. """
- """required if strict is set""",
- "mtime":""":keyword mtime: long, the mtime of this entry. """
- """required if strict is set""",
- "uid":""":keyword uid: int, the uid of this entry. """
- """required if strict is set""",
- "gid":""":keyword gid: int, the gid of this entry. """
- """required if strict is set""",
+ "mode": """:keyword mode: int, the mode of this entry. """
+ """required if strict is set""",
+ "mtime": """:keyword mtime: long, the mtime of this entry. """
+ """required if strict is set""",
+ "uid": """:keyword uid: int, the uid of this entry. """
+ """required if strict is set""",
+ "gid": """:keyword gid: int, the gid of this entry. """
+ """required if strict is set""",
}
+
def gen_doc_additions(init, slots):
if init.__doc__ is None:
- d = \
-"""
+ d = """
:param location: location (real or intended) for this entry
:param strict: is this fully representative of the entry, or only partially
:raise KeyError: if strict is enabled, and not all args are passed in
-""".split("\n")
+""".split(
+ "\n"
+ )
else:
d = init.__doc__.split("\n")
- init.__doc__ = "\n".join(k.lstrip() for k in d) + \
- "\n".join(_fs_doc[k] for k in _fs_doc if k in slots)
+ init.__doc__ = "\n".join(k.lstrip() for k in d) + "\n".join(
+ _fs_doc[k] for k in _fs_doc if k in slots
+ )
class fsBase:
"""base class, all extensions must derive from this class"""
+
__slots__ = ("location", "mtime", "mode", "uid", "gid")
__attrs__ = __slots__
__default_attrs__ = {}
- locals().update((x.replace("is", "is_"), False) for x in
- __all__ if x.startswith("is") and x.islower() and not
- x.endswith("fs_obj"))
+ locals().update(
+ (x.replace("is", "is_"), False)
+ for x in __all__
+ if x.startswith("is") and x.islower() and not x.endswith("fs_obj")
+ )
klass.inject_richcmp_methods_from_cmp(locals())
klass.inject_immutable_instance(locals())
@@ -75,11 +79,11 @@ class fsBase:
else:
for k, v in d.items():
s(self, k, v)
+
gen_doc_additions(__init__, __attrs__)
def change_attributes(self, **kwds):
- d = {x: getattr(self, x)
- for x in self.__attrs__ if hasattr(self, x)}
+ d = {x: getattr(self, x) for x in self.__attrs__ if hasattr(self, x)}
d.update(kwds)
# split location out
location = d.pop("location")
@@ -148,7 +152,7 @@ class fsFile(fsBase):
__slots__ = ("chksums", "data", "dev", "inode")
__attrs__ = fsBase.__attrs__ + __slots__
- __default_attrs__ = {"mtime":0, 'dev':None, 'inode':None}
+ __default_attrs__ = {"mtime": 0, "dev": None, "inode": None}
is_reg = True
@@ -157,7 +161,7 @@ class fsFile(fsBase):
:param chksums: dict of checksums, key chksum_type: val hash val.
See :obj:`snakeoil.chksum`.
"""
- assert 'data_source' not in kwds
+ assert "data_source" not in kwds
if data is None:
data = local_source(location)
kwds["data"] = data
@@ -171,6 +175,7 @@ class fsFile(fsBase):
chksums = _LazyChksums(chf_types, self._chksum_callback)
kwds["chksums"] = chksums
fsBase.__init__(self, location, **kwds)
+
gen_doc_additions(__init__, __slots__)
def __repr__(self):
@@ -182,9 +187,10 @@ class fsFile(fsBase):
return list(zip(chfs, get_chksums(self.data, *chfs)))
def change_attributes(self, **kwds):
- if 'data' in kwds and ('chksums' not in kwds and
- isinstance(self.chksums, _LazyChksums)):
- kwds['chksums'] = None
+ if "data" in kwds and (
+ "chksums" not in kwds and isinstance(self.chksums, _LazyChksums)
+ ):
+ kwds["chksums"] = None
return fsBase.change_attributes(self, **kwds)
def _can_be_hardlinked(self, other):
@@ -194,7 +200,7 @@ class fsFile(fsBase):
if None in (self.inode, self.dev):
return False
- for attr in ('dev', 'inode', 'uid', 'gid', 'mode', 'mtime'):
+ for attr in ("dev", "inode", "uid", "gid", "mode", "mtime"):
if getattr(self, attr) != getattr(other, attr):
return False
return True
@@ -225,11 +231,11 @@ class fsLink(fsBase):
"""
kwargs["target"] = target
fsBase.__init__(self, location, **kwargs)
+
gen_doc_additions(__init__, __slots__)
def change_attributes(self, **kwds):
- d = {x: getattr(self, x)
- for x in self.__attrs__ if hasattr(self, x)}
+ d = {x: getattr(self, x) for x in self.__attrs__ if hasattr(self, x)}
d.update(kwds)
# split location out
location = d.pop("location")
@@ -243,7 +249,7 @@ class fsLink(fsBase):
def resolved_target(self):
if self.target.startswith("/"):
return self.target
- return normpath(pjoin(self.location, '../', self.target))
+ return normpath(pjoin(self.location, "../", self.target))
def __cmp__(self, other):
c = cmp(self.location, other.location)
@@ -254,7 +260,7 @@ class fsLink(fsBase):
return 0
def __str__(self):
- return f'{self.location} -> {self.target}'
+ return f"{self.location} -> {self.target}"
def __repr__(self):
return f"symlink:{self.location}->{self.target}"
@@ -269,33 +275,31 @@ class fsDev(fsBase):
__slots__ = ("major", "minor")
__attrs__ = fsBase.__attrs__ + __slots__
- __default_attrs__ = {"major":-1, "minor":-1}
+ __default_attrs__ = {"major": -1, "minor": -1}
is_dev = True
def __init__(self, path, major=-1, minor=-1, **kwds):
if kwds.get("strict", True):
if major == -1 or minor == -1:
- raise TypeError(
- "major/minor must be specified and positive ints")
+ raise TypeError("major/minor must be specified and positive ints")
if not stat.S_IFMT(kwds["mode"]):
raise TypeError(
- "mode %o: must specify the device type (got %o)" % (
- kwds["mode"], stat.S_IFMT(kwds["mode"])))
+ "mode %o: must specify the device type (got %o)"
+ % (kwds["mode"], stat.S_IFMT(kwds["mode"]))
+ )
kwds["major"] = major
kwds["minor"] = minor
else:
if major != -1:
major = int(major)
if major < 0:
- raise TypeError(
- "major/minor must be specified and positive ints")
+ raise TypeError("major/minor must be specified and positive ints")
kwds["major"] = major
if minor != -1:
minor = int(minor)
if minor < 0:
- raise TypeError(
- "major/minor must be specified and positive ints")
+ raise TypeError("major/minor must be specified and positive ints")
kwds["minor"] = minor
fsBase.__init__(self, path, **kwds)
@@ -308,7 +312,7 @@ def get_major_minor(stat_inst):
"""get major/minor from a stat instance
:return: major,minor tuple of ints
"""
- return ( stat_inst.st_rdev >> 8 ) & 0xff, stat_inst.st_rdev & 0xff
+ return (stat_inst.st_rdev >> 8) & 0xFF, stat_inst.st_rdev & 0xFF
class fsFifo(fsBase):
@@ -321,18 +325,24 @@ class fsFifo(fsBase):
def __repr__(self):
return f"fifo:{self.location}"
+
def mk_check(name):
- return pretty_docs(post_curry(getattr, 'is_' + name, False),
+ return pretty_docs(
+ post_curry(getattr, "is_" + name, False),
extradocs=("return True if obj is an instance of :obj:`%s`, else False" % name),
- name=("is" +name)
- )
-
-isdir = mk_check('dir')
-isreg = mk_check('reg')
-issym = mk_check('sym')
-isfifo = mk_check('fifo')
-isdev = mk_check('dev')
-isfs_obj = pretty_docs(post_curry(isinstance, fsBase), name='isfs_obj',
- extradocs='return True if obj is an fsBase derived object')
+ name=("is" + name),
+ )
+
+
+isdir = mk_check("dir")
+isreg = mk_check("reg")
+issym = mk_check("sym")
+isfifo = mk_check("fifo")
+isdev = mk_check("dev")
+isfs_obj = pretty_docs(
+ post_curry(isinstance, fsBase),
+ name="isfs_obj",
+ extradocs="return True if obj is an fsBase derived object",
+)
del gen_doc_additions, mk_check
diff --git a/src/pkgcore/fs/livefs.py b/src/pkgcore/fs/livefs.py
index 206c649e1..14c41f23d 100644
--- a/src/pkgcore/fs/livefs.py
+++ b/src/pkgcore/fs/livefs.py
@@ -21,11 +21,18 @@ __all__ = ["gen_obj", "scan", "iter_scan", "sorted_scan"]
def gen_chksums(handlers, location):
def f(key):
return handlers[key](location)
+
return LazyValDict(handlers, f)
-def gen_obj(path, stat=None, chksum_handlers=None, real_location=None,
- stat_func=os.lstat, **overrides):
+def gen_obj(
+ path,
+ stat=None,
+ chksum_handlers=None,
+ real_location=None,
+ stat_func=os.lstat,
+ **overrides
+):
"""
given a fs path, and an optional stat, create an appropriate fs obj.
@@ -47,8 +54,12 @@ def gen_obj(path, stat=None, chksum_handlers=None, real_location=None,
stat = os.lstat(real_location)
mode = stat.st_mode
- d = {"mtime":stat.st_mtime, "mode":S_IMODE(mode),
- "uid":stat.st_uid, "gid":stat.st_gid}
+ d = {
+ "mtime": stat.st_mtime,
+ "mode": S_IMODE(mode),
+ "uid": stat.st_uid,
+ "gid": stat.st_gid,
+ }
if S_ISREG(mode):
d["size"] = stat.st_size
d["data"] = local_source(real_location)
@@ -84,37 +95,42 @@ def gen_obj(path, stat=None, chksum_handlers=None, real_location=None,
# fine doing it this way (specially since we're relying on
# os.path.sep, not '/' :P)
-def _internal_iter_scan(path, chksum_handlers, stat_func=os.lstat,
- hidden=True, backup=True):
+
+def _internal_iter_scan(
+ path, chksum_handlers, stat_func=os.lstat, hidden=True, backup=True
+):
dirs = collections.deque([normpath(path)])
- obj = gen_obj(dirs[0], chksum_handlers=chksum_handlers,
- stat_func=stat_func)
+ obj = gen_obj(dirs[0], chksum_handlers=chksum_handlers, stat_func=stat_func)
yield obj
if not obj.is_dir:
return
while dirs:
base = dirs.popleft()
for x in listdir(base):
- if not hidden and x.startswith('.'):
+ if not hidden and x.startswith("."):
continue
- if not backup and x.endswith('~'):
+ if not backup and x.endswith("~"):
continue
path = pjoin(base, x)
- obj = gen_obj(path, chksum_handlers=chksum_handlers,
- real_location=path, stat_func=stat_func)
+ obj = gen_obj(
+ path,
+ chksum_handlers=chksum_handlers,
+ real_location=path,
+ stat_func=stat_func,
+ )
yield obj
if obj.is_dir:
dirs.append(path)
-def _internal_offset_iter_scan(path, chksum_handlers, offset, stat_func=os.lstat,
- hidden=True, backup=True):
+def _internal_offset_iter_scan(
+ path, chksum_handlers, offset, stat_func=os.lstat, hidden=True, backup=True
+):
offset = normpath(offset)
path = normpath(path)
- dirs = collections.deque([path[len(offset):]])
+ dirs = collections.deque([path[len(offset) :]])
if dirs[0]:
- yield gen_obj(dirs[0], chksum_handlers=chksum_handlers,
- stat_func=stat_func)
+ yield gen_obj(dirs[0], chksum_handlers=chksum_handlers, stat_func=stat_func)
sep = os.path.sep
while dirs:
@@ -122,21 +138,30 @@ def _internal_offset_iter_scan(path, chksum_handlers, offset, stat_func=os.lstat
real_base = pjoin(offset, base.lstrip(sep))
base = base.rstrip(sep) + sep
for x in listdir(real_base):
- if not hidden and x.startswith('.'):
+ if not hidden and x.startswith("."):
continue
- if not backup and x.endswith('~'):
+ if not backup and x.endswith("~"):
continue
path = pjoin(base, x)
- obj = gen_obj(path, chksum_handlers=chksum_handlers,
- real_location=pjoin(real_base, x),
- stat_func=os.lstat)
+ obj = gen_obj(
+ path,
+ chksum_handlers=chksum_handlers,
+ real_location=pjoin(real_base, x),
+ stat_func=os.lstat,
+ )
yield obj
if obj.is_dir:
dirs.append(path)
-def iter_scan(path, offset=None, follow_symlinks=False, chksum_types=None,
- hidden=True, backup=True):
+def iter_scan(
+ path,
+ offset=None,
+ follow_symlinks=False,
+ chksum_types=None,
+ hidden=True,
+ backup=True,
+):
"""
Recursively scan a path.
@@ -156,9 +181,11 @@ def iter_scan(path, offset=None, follow_symlinks=False, chksum_types=None,
stat_func = follow_symlinks and os.stat or os.lstat
if offset is None:
return _internal_iter_scan(
- path, chksum_handlers, stat_func, hidden=hidden, backup=backup)
+ path, chksum_handlers, stat_func, hidden=hidden, backup=backup
+ )
return _internal_offset_iter_scan(
- path, chksum_handlers, offset, stat_func, hidden=hidden, backup=backup)
+ path, chksum_handlers, offset, stat_func, hidden=hidden, backup=backup
+ )
def sorted_scan(path, nonexistent=False, *args, **kwargs):
@@ -197,6 +224,7 @@ def scan(*a, **kw):
mutable = kw.pop("mutable", True)
return contentsSet(iter_scan(*a, **kw), mutable=mutable)
+
class _realpath_dir:
_realpath_func = staticmethod(os.path.realpath)
@@ -220,7 +248,7 @@ def intersect(cset, realpath=False):
if realpath:
f2 = _realpath_dir()
else:
- f2 = lambda x:x
+ f2 = lambda x: x
for x in cset:
try:
yield f(f2(x.location))
diff --git a/src/pkgcore/fs/ops.py b/src/pkgcore/fs/ops.py
index 32924af30..1f83b9f02 100644
--- a/src/pkgcore/fs/ops.py
+++ b/src/pkgcore/fs/ops.py
@@ -43,19 +43,19 @@ def ensure_perms(d1, d2=None):
# if it's preexisting, keep its perms.
do_mode = False
else:
- do_mode = (m is not None and m != d2.mode)
+ do_mode = m is not None and m != d2.mode
except AttributeError:
# yes. this _is_ stupid. vdb's don't always store all attributes
do_mode = False
do_chown = False
try:
- do_chown = (o != d2.uid or g != d2.gid)
+ do_chown = o != d2.uid or g != d2.gid
except AttributeError:
do_chown = True
try:
- do_mtime = (t != d2.mtime)
+ do_mtime = t != d2.mtime
except AttributeError:
do_mtime = True
@@ -85,16 +85,17 @@ def mkdir(d):
ensure_perms(d)
return True
+
# minor hack.
-class FailedCopy(TypeError):
+class FailedCopy(TypeError):
def __init__(self, obj, msg):
self.obj = obj
self.msg = msg
def __str__(self):
- return f'failed copying {self.obj}: {self.msg}'
+ return f"failed copying {self.obj}: {self.msg}"
class CannotOverwrite(FailedCopy):
@@ -102,7 +103,7 @@ class CannotOverwrite(FailedCopy):
self.obj, self.existing = obj, existing
def __str__(self):
- return f'cannot write {self.obj} due to {self.existing} existing'
+ return f"cannot write {self.obj} due to {self.existing} existing"
def copyfile(obj, mkdirs=False):
@@ -117,9 +118,9 @@ def copyfile(obj, mkdirs=False):
existent = False
if not fs.isfs_obj(obj):
- raise TypeError(f'obj must be fsBase derivative: {obj!r}')
+ raise TypeError(f"obj must be fsBase derivative: {obj!r}")
elif fs.isdir(obj):
- raise TypeError(f'obj must not be a fsDir instance: {obj!r}')
+ raise TypeError(f"obj must not be a fsDir instance: {obj!r}")
try:
existing = gen_obj(obj.location)
@@ -154,7 +155,7 @@ def copyfile(obj, mkdirs=False):
else:
ret = spawn([CP_BINARY, "-Rp", obj.location, fp])
if ret != 0:
- raise FailedCopy(obj, f'got {ret} from {CP_BINARY} -Rp')
+ raise FailedCopy(obj, f"got {ret} from {CP_BINARY} -Rp")
ensure_perms(obj.change_attributes(location=fp))
@@ -162,6 +163,7 @@ def copyfile(obj, mkdirs=False):
os.rename(existent_fp, obj.location)
return True
+
def do_link(src, trg):
try:
os.link(src.location, trg.location)
@@ -174,7 +176,7 @@ def do_link(src, trg):
return False
raise
- path = trg.location + '#new'
+ path = trg.location + "#new"
unlink_if_exists(path)
try:
os.link(src.location, path)
@@ -211,15 +213,15 @@ def merge_contents(cset, offset=None, callback=None):
"""
if callback is None:
- callback = lambda obj:None
+ callback = lambda obj: None
if not isinstance(cset, contents.contentsSet):
- raise TypeError(f'cset must be a contentsSet, got {cset!r}')
+ raise TypeError(f"cset must be a contentsSet, got {cset!r}")
if offset is not None:
if os.path.exists(offset):
if not os.path.isdir(offset):
- raise TypeError(f'offset must be a dir, or not exist: {offset}')
+ raise TypeError(f"offset must be a dir, or not exist: {offset}")
else:
mkdir(fs.fsDir(offset, strict=False))
iterate = partial(contents.offset_rewriter, offset.rstrip(os.path.sep))
@@ -268,8 +270,10 @@ def merge_contents(cset, offset=None, callback=None):
# overlayfs's potentially. Brute force is in use either
# way.
candidates = merged_inodes.setdefault(key, [])
- if any(target._can_be_hardlinked(x) and do_link(target, x)
- for target in candidates):
+ if any(
+ target._can_be_hardlinked(x) and do_link(target, x)
+ for target in candidates
+ ):
continue
candidates.append(x)
@@ -322,8 +326,13 @@ def unmerge_contents(cset, offset=None, callback=None):
try:
os.rmdir(x.location)
except OSError as e:
- if not e.errno in (errno.ENOTEMPTY, errno.ENOENT, errno.ENOTDIR,
- errno.EBUSY, errno.EEXIST):
+ if not e.errno in (
+ errno.ENOTEMPTY,
+ errno.ENOENT,
+ errno.ENOTDIR,
+ errno.EBUSY,
+ errno.EEXIST,
+ ):
raise
else:
callback(x)
diff --git a/src/pkgcore/fs/tar.py b/src/pkgcore/fs/tar.py
index 00801dfa4..bdef5113b 100644
--- a/src/pkgcore/fs/tar.py
+++ b/src/pkgcore/fs/tar.py
@@ -20,25 +20,27 @@ _unique_inode = count(2**32).__next__
known_compressors = {
"bz2": tarfile.TarFile.bz2open,
"gz": tarfile.TarFile.gzopen,
- None: tarfile.TarFile.open}
+ None: tarfile.TarFile.open,
+}
-def write_set(contents_set, filepath, compressor='bzip2', absolute_paths=False,
- parallelize=False):
- if compressor == 'bz2':
- compressor = 'bzip2'
+def write_set(
+ contents_set, filepath, compressor="bzip2", absolute_paths=False, parallelize=False
+):
+ if compressor == "bz2":
+ compressor = "bzip2"
tar_handle = None
- handle = compression.compress_handle(compressor, filepath,
- parallelize=parallelize)
+ handle = compression.compress_handle(compressor, filepath, parallelize=parallelize)
try:
- tar_handle = tarfile.TarFile(name=filepath, fileobj=handle, mode='w')
+ tar_handle = tarfile.TarFile(name=filepath, fileobj=handle, mode="w")
add_contents_to_tarfile(contents_set, tar_handle)
finally:
if tar_handle is not None:
tar_handle.close()
handle.close()
+
def add_contents_to_tarfile(contents_set, tar_fd, absolute_paths=False):
# first add directories, then everything else
# this is just a pkgcore optimization, it prefers to see the dirs first.
@@ -57,13 +59,13 @@ def add_contents_to_tarfile(contents_set, tar_fd, absolute_paths=False):
if existing is not None:
if x._can_be_hardlinked(existing):
t.type = tarfile.LNKTYPE
- t.linkname = './%s' % existing.location.lstrip('/')
+ t.linkname = "./%s" % existing.location.lstrip("/")
t.size = 0
else:
inodes[key] = x
data = x.data.bytes_fileobj()
tar_fd.addfile(t, fileobj=data)
- #tar_fd.addfile(t, fileobj=x.data.bytes_fileobj())
+ # tar_fd.addfile(t, fileobj=x.data.bytes_fileobj())
else:
tar_fd.addfile(t)
@@ -78,8 +80,11 @@ def archive_to_fsobj(src_tar):
inodes = {}
for member in src_tar:
d = {
- "uid":member.uid, "gid":member.gid,
- "mtime":member.mtime, "mode":member.mode}
+ "uid": member.uid,
+ "gid": member.gid,
+ "mtime": member.mtime,
+ "mode": member.mode,
+ }
location = os.path.abspath(os.path.join(psep, member.name.strip(psep)))
if member.isdir():
if member.name.strip(psep) == ".":
@@ -95,7 +100,8 @@ def archive_to_fsobj(src_tar):
"Tarfile file %r is a hardlink to %r, but we can't "
"find the resolved hardlink target %r in the archive. "
"This means either a bug in pkgcore, or a malformed "
- "tarball." % (member.name, member.linkname, target))
+ "tarball." % (member.name, member.linkname, target)
+ )
d["inode"] = inode
else:
d["inode"] = inode = _unique_inode()
@@ -104,9 +110,11 @@ def archive_to_fsobj(src_tar):
# to ensure 'y' is in the cache alongside it's target z to support 'x'
# later lookup.
inodes[location] = inode
- d["data"] = invokable_data_source.wrap_function(partial(
- src_tar.extractfile, member.name), returns_text=False,
- returns_handle=True)
+ d["data"] = invokable_data_source.wrap_function(
+ partial(src_tar.extractfile, member.name),
+ returns_text=False,
+ returns_handle=True,
+ )
yield fsFile(location, **d)
elif member.issym() or member.islnk():
yield fsSymlink(location, member.linkname, **d)
@@ -118,8 +126,10 @@ def archive_to_fsobj(src_tar):
yield fsDev(location, **d)
else:
raise AssertionError(
- "unknown type %r, %r was encounted walking tarmembers" %
- (member, member.type))
+ "unknown type %r, %r was encounted walking tarmembers"
+ % (member, member.type)
+ )
+
def fsobj_to_tarinfo(fsobj, absolute_path=True):
t = tarfile.TarInfo()
@@ -142,7 +152,7 @@ def fsobj_to_tarinfo(fsobj, absolute_path=True):
t.devminor = fsobj.minor
t.name = fsobj.location
if not absolute_path:
- t.name = './%s' % (fsobj.location.lstrip("/"),)
+ t.name = "./%s" % (fsobj.location.lstrip("/"),)
t.mode = fsobj.mode
t.uid = fsobj.uid
t.gid = fsobj.gid
@@ -159,15 +169,16 @@ def generate_contents(filepath, compressor="bz2", parallelize=True):
:obj:`known_compressors` for list of valid compressors
"""
- if compressor == 'bz2':
- compressor = 'bzip2'
+ if compressor == "bz2":
+ compressor = "bzip2"
tar_handle = None
- handle = compression.decompress_handle(compressor, filepath,
- parallelize=parallelize)
+ handle = compression.decompress_handle(
+ compressor, filepath, parallelize=parallelize
+ )
try:
- tar_handle = tarfile.TarFile(name=filepath, fileobj=handle, mode='r')
+ tar_handle = tarfile.TarFile(name=filepath, fileobj=handle, mode="r")
except tarfile.ReadError as e:
if not e.message.endswith("empty header"):
raise
@@ -232,8 +243,7 @@ def convert_archive(archive):
return +1
elif x.is_reg:
if y.is_reg:
- return cmp(files_ordering[x.data],
- files_ordering[y.data])
+ return cmp(files_ordering[x.data], files_ordering[y.data])
return +1
elif y.is_reg:
return -1
diff --git a/src/pkgcore/gpg.py b/src/pkgcore/gpg.py
index d978aea55..888a5c98a 100644
--- a/src/pkgcore/gpg.py
+++ b/src/pkgcore/gpg.py
@@ -2,21 +2,22 @@ __all__ = ("skip_signatures",)
msg_header = "-----BEGIN PGP SIGNED MESSAGE-----\n"
msg_header_len = len(msg_header)
-msg_hash = 'Hash:'
+msg_hash = "Hash:"
msg_hash_len = len(msg_hash)
sig_header = "-----BEGIN PGP SIGNATURE-----\n"
sig_header_len = len(sig_header)
sig_footer = "-----END PGP SIGNATURE-----\n"
sig_footer_len = len(sig_footer)
+
def skip_signatures(iterable):
i = iter(iterable)
-# format is-
-#"""
-#-----BEGIN PGP SIGNED MESSAGE-----
-#Hash: SHA1
-#
-#"""
+ # format is-
+ # """
+ # -----BEGIN PGP SIGNED MESSAGE-----
+ # Hash: SHA1
+ #
+ # """
for line in i:
# so... prune msg first, then
diff --git a/src/pkgcore/log.py b/src/pkgcore/log.py
index 095f32f3c..d0fd521a1 100644
--- a/src/pkgcore/log.py
+++ b/src/pkgcore/log.py
@@ -15,4 +15,4 @@ import logging
logging.basicConfig()
# Our main logger.
-logger = logging.getLogger('pkgcore')
+logger = logging.getLogger("pkgcore")
diff --git a/src/pkgcore/merge/engine.py b/src/pkgcore/merge/engine.py
index e3ebf7ade..efbd9e817 100644
--- a/src/pkgcore/merge/engine.py
+++ b/src/pkgcore/merge/engine.py
@@ -37,7 +37,7 @@ def alias_cset(alias, engine, csets):
return csets[alias]
-def map_new_cset_livefs(engine, csets, cset_name='new_cset'):
+def map_new_cset_livefs(engine, csets, cset_name="new_cset"):
"""Find symlinks on disk that redirect new_cset, and return a livefs localized cset."""
initial = csets[cset_name]
ondisk = contents.contentsSet(livefs.intersect(initial.iterdirs(), realpath=False))
@@ -48,19 +48,23 @@ def map_new_cset_livefs(engine, csets, cset_name='new_cset'):
class MergeEngine:
- install_hooks = {x: [] for x in
- ("sanity_check", "pre_merge", "merge", "post_merge", "final")}
- uninstall_hooks = {x: [] for x in
- ("sanity_check", "pre_unmerge", "unmerge", "post_unmerge", "final")}
- replace_hooks = {x: [] for x in
- set(chain(install_hooks.keys(), uninstall_hooks.keys()))}
+ install_hooks = {
+ x: [] for x in ("sanity_check", "pre_merge", "merge", "post_merge", "final")
+ }
+ uninstall_hooks = {
+ x: []
+ for x in ("sanity_check", "pre_unmerge", "unmerge", "post_unmerge", "final")
+ }
+ replace_hooks = {
+ x: [] for x in set(chain(install_hooks.keys(), uninstall_hooks.keys()))
+ }
install_csets = {
"install_existing": "get_install_livefs_intersect",
"resolved_install": map_new_cset_livefs,
- 'new_cset': partial(alias_cset, 'raw_new_cset'),
- "install": partial(alias_cset, 'new_cset'),
- "replace": partial(alias_cset, 'new_cset'),
+ "new_cset": partial(alias_cset, "raw_new_cset"),
+ "install": partial(alias_cset, "new_cset"),
+ "replace": partial(alias_cset, "new_cset"),
}
uninstall_csets = {
"uninstall_existing": partial(alias_cset, "uninstall"),
@@ -69,8 +73,9 @@ class MergeEngine:
}
replace_csets = install_csets.copy()
replace_csets.update(uninstall_csets)
- replace_csets["modifying"] = (
- lambda e, c: c["resolved_install"].intersection(c["uninstall"]))
+ replace_csets["modifying"] = lambda e, c: c["resolved_install"].intersection(
+ c["uninstall"]
+ )
replace_csets["uninstall"] = "get_remove_cset"
replace_csets["replace"] = "get_replace_cset"
replace_csets["install_existing"] = "get_install_livefs_intersect"
@@ -81,14 +86,24 @@ class MergeEngine:
allow_reuse = True
- def __init__(self, mode, tempdir, hooks, csets, preserves, observer,
- offset=None, disable_plugins=False, parallelism=None):
+ def __init__(
+ self,
+ mode,
+ tempdir,
+ hooks,
+ csets,
+ preserves,
+ observer,
+ offset=None,
+ disable_plugins=False,
+ parallelism=None,
+ ):
if observer is None:
observer = observer_mod.repo_observer(observer_mod.null_output)
self.observer = observer
self.mode = mode
if tempdir is not None:
- tempdir = normpath(tempdir) + '/'
+ tempdir = normpath(tempdir) + "/"
self.tempdir = tempdir
self.parallelism = parallelism if parallelism is not None else cpu_count()
@@ -97,15 +112,15 @@ class MergeEngine:
self.preserve_csets = []
self.cset_sources = {}
# instantiate these separately so their values are preserved
- self.preserved_csets = LazyValDict(
- self.preserve_csets, self._get_cset_source)
+ self.preserved_csets = LazyValDict(self.preserve_csets, self._get_cset_source)
for k, v in csets.items():
if isinstance(v, str):
v = getattr(self, v, v)
if not callable(v):
raise TypeError(
"cset values must be either the string name of "
- f"existing methods, or callables (got {v})")
+ f"existing methods, or callables (got {v})"
+ )
if k in preserves:
self.add_preserved_cset(k, v)
@@ -131,8 +146,7 @@ class MergeEngine:
setattr(self, x, partial(self.execute_hook, x))
@classmethod
- def install(cls, tempdir, pkg, offset=None, observer=None,
- disable_plugins=False):
+ def install(cls, tempdir, pkg, offset=None, observer=None, disable_plugins=False):
"""Generate a MergeEngine instance configured for installing a pkg.
:param tempdir: tempspace for the merger to use; this space it must
@@ -147,20 +161,28 @@ class MergeEngine:
csets = cls.install_csets.copy()
if "raw_new_cset" not in csets:
csets["raw_new_cset"] = post_curry(cls.get_pkg_contents, pkg)
- o = cls(INSTALL_MODE, tempdir, hooks, csets, cls.install_csets_preserve,
- observer, offset=offset, disable_plugins=disable_plugins)
-
- if o.offset != '/':
+ o = cls(
+ INSTALL_MODE,
+ tempdir,
+ hooks,
+ csets,
+ cls.install_csets_preserve,
+ observer,
+ offset=offset,
+ disable_plugins=disable_plugins,
+ )
+
+ if o.offset != "/":
# wrap the results of new_cset to pass through an offset generator
o.cset_sources["raw_new_cset"] = post_curry(
- o.generate_offset_cset, o.cset_sources["raw_new_cset"])
+ o.generate_offset_cset, o.cset_sources["raw_new_cset"]
+ )
o.new = pkg
return o
@classmethod
- def uninstall(cls, tempdir, pkg, offset=None, observer=None,
- disable_plugins=False):
+ def uninstall(cls, tempdir, pkg, offset=None, observer=None, disable_plugins=False):
"""Generate a MergeEngine instance configured for uninstalling a pkg.
:param tempdir: tempspace for the merger to use; this space it must
@@ -176,20 +198,30 @@ class MergeEngine:
if "raw_old_cset" not in csets:
csets["raw_old_cset"] = post_curry(cls.get_pkg_contents, pkg)
- o = cls(UNINSTALL_MODE, tempdir, hooks, csets, cls.uninstall_csets_preserve,
- observer, offset=offset, disable_plugins=disable_plugins)
-
- if o.offset != '/':
+ o = cls(
+ UNINSTALL_MODE,
+ tempdir,
+ hooks,
+ csets,
+ cls.uninstall_csets_preserve,
+ observer,
+ offset=offset,
+ disable_plugins=disable_plugins,
+ )
+
+ if o.offset != "/":
# wrap the results of new_cset to pass through an offset generator
o.cset_sources["old_cset"] = post_curry(
- o.generate_offset_cset, o.cset_sources["old_cset"])
+ o.generate_offset_cset, o.cset_sources["old_cset"]
+ )
o.old = pkg
return o
@classmethod
- def replace(cls, tempdir, old, new, offset=None, observer=None,
- disable_plugins=False):
+ def replace(
+ cls, tempdir, old, new, offset=None, observer=None, disable_plugins=False
+ ):
"""Generate a MergeEngine instance configured for replacing a pkg.
:param tempdir: tempspace for the merger to use; this space it must
@@ -205,18 +237,27 @@ class MergeEngine:
csets = cls.replace_csets.copy()
- csets.setdefault('raw_old_cset', post_curry(cls.get_pkg_contents, old))
- csets.setdefault('raw_new_cset', post_curry(cls.get_pkg_contents, new))
-
- o = cls(REPLACE_MODE, tempdir, hooks, csets, cls.replace_csets_preserve,
- observer, offset=offset, disable_plugins=disable_plugins)
-
- if o.offset != '/':
+ csets.setdefault("raw_old_cset", post_curry(cls.get_pkg_contents, old))
+ csets.setdefault("raw_new_cset", post_curry(cls.get_pkg_contents, new))
+
+ o = cls(
+ REPLACE_MODE,
+ tempdir,
+ hooks,
+ csets,
+ cls.replace_csets_preserve,
+ observer,
+ offset=offset,
+ disable_plugins=disable_plugins,
+ )
+
+ if o.offset != "/":
for k in ("raw_old_cset", "raw_new_cset"):
# wrap the results of new_cset to pass through an
# offset generator
o.cset_sources[k] = post_curry(
- o.generate_offset_cset, o.cset_sources[k])
+ o.generate_offset_cset, o.cset_sources[k]
+ )
o.old = old
o.new = new
@@ -241,8 +282,9 @@ class MergeEngine:
Used in transitioning between hook points
"""
- self.csets = StackedDict(self.preserved_csets,
- LazyValDict(self.cset_sources, self._get_cset_source))
+ self.csets = StackedDict(
+ self.preserved_csets, LazyValDict(self.cset_sources, self._get_cset_source)
+ )
def _get_cset_source(self, key):
return self.cset_sources[key](self, self.csets)
@@ -279,8 +321,7 @@ class MergeEngine:
:param trigger: :class:`pkgcore.merge.triggers.base` to add
"""
if hook_name not in self.hooks:
- raise KeyError(
- f"trigger {trigger!r}'s hook {hook_name} isn't a known hook")
+ raise KeyError(f"trigger {trigger!r}'s hook {hook_name} isn't a known hook")
if required_csets is not None:
for rcs in required_csets:
@@ -295,7 +336,9 @@ class MergeEngine:
try:
self.phase = hook
self.regenerate_csets()
- for trigger in sorted(self.hooks[hook], key=operator.attrgetter("priority")):
+ for trigger in sorted(
+ self.hooks[hook], key=operator.attrgetter("priority")
+ ):
# error checking needed here.
self.observer.trigger_start(hook, trigger)
try:
@@ -305,11 +348,13 @@ class MergeEngine:
raise
except errors.BlockModification as e:
self.observer.error(
- f"modification was blocked by trigger {trigger!r}: {e}")
+ f"modification was blocked by trigger {trigger!r}: {e}"
+ )
raise
except errors.ModificationError as e:
self.observer.error(
- f"modification error occurred during trigger {trigger!r}: {e}")
+ f"modification error occurred during trigger {trigger!r}: {e}"
+ )
raise
except Exception as e:
if not trigger.suppress_exceptions:
@@ -350,7 +395,9 @@ class MergeEngine:
@staticmethod
def _get_livefs_intersect_cset(engine, csets, cset_name, realpath=False):
"""Generate the livefs intersection against a cset."""
- return contents.contentsSet(livefs.intersect(csets[cset_name], realpath=realpath))
+ return contents.contentsSet(
+ livefs.intersect(csets[cset_name], realpath=realpath)
+ )
@staticmethod
def get_install_livefs_intersect(engine, csets):
@@ -364,8 +411,8 @@ class MergeEngine:
def get_merged_cset(self, strip_offset=True):
cset = self.csets["install"]
- if self.offset not in (None, '/') and strip_offset:
- rewrite = contents.change_offset_rewriter(self.offset, '/', cset)
+ if self.offset not in (None, "/") and strip_offset:
+ rewrite = contents.change_offset_rewriter(self.offset, "/", cset)
cset = contents.contentsSet(rewrite)
return cset
@@ -395,7 +442,7 @@ class MergeEngine:
# clone it into tempspace; it's required we control the tempspace,
# so this function is safe in our usage.
- fd, path = tempfile.mkstemp(prefix='merge-engine-', dir=self.tempdir)
+ fd, path = tempfile.mkstemp(prefix="merge-engine-", dir=self.tempdir)
# XXX: annoying quirk of python, we don't want append mode, so 'a+'
# isn't viable; wr will truncate the file, so data_source uses r+.
@@ -404,7 +451,8 @@ class MergeEngine:
# just touch the filepath.
touch(path)
new_source = data_source.local_source(
- path, True, encoding=getattr(fsobj, 'encoding', None))
+ path, True, encoding=getattr(fsobj, "encoding", None)
+ )
if source and not empty:
data_source.transfer(source.bytes_fsobj(), new_source.bytes_fsobj(True))
diff --git a/src/pkgcore/merge/errors.py b/src/pkgcore/merge/errors.py
index 7d82aa894..7292cc817 100644
--- a/src/pkgcore/merge/errors.py
+++ b/src/pkgcore/merge/errors.py
@@ -22,7 +22,10 @@ class BlockModification(ModificationError):
def __str__(self):
return "Modification was blocked by %s: %s" % (
- self.trigger.__class__.__name__, self.msg)
+ self.trigger.__class__.__name__,
+ self.msg,
+ )
+
class TriggerUnknownCset(ModificationError):
"""Trigger's required content set isn't known"""
@@ -31,5 +34,6 @@ class TriggerUnknownCset(ModificationError):
if not isinstance(csets, (tuple, list)):
csets = (csets,)
super().__init__(
- f"{self.__class__}: trigger {trigger!r} unknown cset: {csets!r}")
+ f"{self.__class__}: trigger {trigger!r} unknown cset: {csets!r}"
+ )
self.trigger, self.csets = trigger, csets
diff --git a/src/pkgcore/merge/triggers.py b/src/pkgcore/merge/triggers.py
index 41ffcfa08..3994c21ac 100644
--- a/src/pkgcore/merge/triggers.py
+++ b/src/pkgcore/merge/triggers.py
@@ -118,7 +118,7 @@ class base:
return [csets[x] for x in required_csets]
def trigger(self, engine, csets):
- raise NotImplementedError(self, 'trigger')
+ raise NotImplementedError(self, "trigger")
def __call__(self, engine, csets):
"""execute the trigger"""
@@ -133,15 +133,12 @@ class base:
return f"{self.label}: cset({self.required_csets}) ftrigger({self.trigger})"
def __repr__(self):
- return "<%s cset=%r @#%x>" % (
- self.label,
- self.required_csets, id(self))
+ return "<%s cset=%r @#%x>" % (self.label, self.required_csets, id(self))
class ThreadedTrigger(base):
-
def identify_work(self, engine, *csets):
- raise NotImplementedError(self, 'identify_work')
+ raise NotImplementedError(self, "identify_work")
def _run_job(self, observer, functor, args, kwds):
try:
@@ -168,8 +165,9 @@ class ThreadedTrigger(base):
args = (observer,) + self.threading_get_args(engine, *csets)
kwargs = self.threading_get_kwargs(engine, *csets)
# Grab PKGCORE_TRIGGER_PARALLELISM to make development easier
- kwargs['threads'] = int(
- os.environ.get("PKGCORE_TRIGGER_PARALLELISM", engine.parallelism))
+ kwargs["threads"] = int(
+ os.environ.get("PKGCORE_TRIGGER_PARALLELISM", engine.parallelism)
+ )
work = list(self.identify_work(engine, *csets))
thread_pool.map_async(work, self.thread_trigger, *args, **kwargs)
@@ -275,7 +273,7 @@ class mtime_watcher:
def update_elf_hints(root):
- return spawn.spawn(["/sbin/ldconfig", "-X", "-r", root], fd_pipes={1:1, 2:2})
+ return spawn.spawn(["/sbin/ldconfig", "-X", "-r", root], fd_pipes={1: 1, 2: 2})
class ldconfig(base):
@@ -283,9 +281,9 @@ class ldconfig(base):
required_csets = ()
priority = 10
_engine_types = None
- _hooks = ('pre_merge', 'post_merge', 'pre_unmerge', 'post_unmerge')
+ _hooks = ("pre_merge", "post_merge", "pre_unmerge", "post_unmerge")
- default_ld_path = ['usr/lib', 'usr/lib64', 'usr/lib32', 'lib', 'lib64', 'lib32']
+ default_ld_path = ["usr/lib", "usr/lib64", "usr/lib32", "lib", "lib64", "lib32"]
def __init__(self, ld_so_conf_path="etc/ld.so.conf"):
self.ld_so_conf_path = ld_so_conf_path.lstrip(os.path.sep)
@@ -308,8 +306,8 @@ class ldconfig(base):
def _mk_ld_so_conf(self, fp):
if not ensure_dirs(os.path.dirname(fp), mode=0o755, minimal=True):
raise errors.BlockModification(
- self,
- f"failed creating/setting {fp} to 0755, root/root for uid/gid")
+ self, f"failed creating/setting {fp} to 0755, root/root for uid/gid"
+ )
try:
touch(fp)
except EnvironmentError as e:
@@ -317,11 +315,11 @@ class ldconfig(base):
def trigger(self, engine):
# ldconfig is only meaningful in GNU/Linux
- if platform.system() != 'Linux':
+ if platform.system() != "Linux":
return
locations = self.read_ld_so_conf(engine.offset)
- if engine.phase.startswith('pre_'):
+ if engine.phase.startswith("pre_"):
self.saved_mtimes.set_state(locations)
return
@@ -342,30 +340,31 @@ class InfoRegen(base):
# could implement this to look at csets, and do incremental removal and
# addition; doesn't seem worth while though for the additional complexity
- _hooks = ('pre_merge', 'post_merge', 'pre_unmerge', 'post_unmerge')
+ _hooks = ("pre_merge", "post_merge", "pre_unmerge", "post_unmerge")
_engine_types = None
_label = "gnu info regen"
- locations = ('/usr/share/info',)
+ locations = ("/usr/share/info",)
def __init__(self):
self.saved_mtimes = mtime_watcher()
def get_binary_path(self):
try:
- return process.find_binary('install-info')
+ return process.find_binary("install-info")
except process.CommandNotFound:
# swallow it.
return None
def trigger(self, engine):
- locations = [pjoin(engine.offset, x.lstrip(os.path.sep))
- for x in self.locations]
+ locations = [
+ pjoin(engine.offset, x.lstrip(os.path.sep)) for x in self.locations
+ ]
- if engine.phase.startswith('pre_'):
+ if engine.phase.startswith("pre_"):
self.saved_mtimes.set_state(locations)
return
- elif engine.phase == 'post_merge' and engine.mode == const.REPLACE_MODE:
+ elif engine.phase == "post_merge" and engine.mode == const.REPLACE_MODE:
# skip post_merge for replace.
# we catch it on unmerge...
return
@@ -376,14 +375,14 @@ class InfoRegen(base):
regens = set(x.location for x in self.saved_mtimes.get_changes(locations))
# force regeneration of any directory lacking the info index.
- regens.update(x for x in locations if not os.path.isfile(pjoin(x, 'dir')))
+ regens.update(x for x in locations if not os.path.isfile(pjoin(x, "dir")))
bad = []
for x in regens:
bad.extend(self.regen(bin_path, x))
if bad and engine.observer is not None:
- bad_info = ', '.join(map(repr, sorted(bad)))
+ bad_info = ", ".join(map(repr, sorted(bad)))
engine.observer.warn(f"bad info files: {bad_info}")
def should_skip_directory(self, basepath, files):
@@ -403,26 +402,31 @@ class InfoRegen(base):
for x in set(ignores).intersection(files):
os.remove(pjoin(basepath, x))
- index = pjoin(basepath, 'dir')
+ index = pjoin(basepath, "dir")
for x in files:
if x in ignores or x.startswith("."):
continue
ret, data = spawn.spawn_get_output(
- [binary, '--quiet', pjoin(basepath, x), '--dir-file', index],
- collect_fds=(1, 2), split_lines=False)
-
- if not data or "already exists" in data or \
- "warning: no info dir entry" in data:
+ [binary, "--quiet", pjoin(basepath, x), "--dir-file", index],
+ collect_fds=(1, 2),
+ split_lines=False,
+ )
+
+ if (
+ not data
+ or "already exists" in data
+ or "warning: no info dir entry" in data
+ ):
continue
yield pjoin(basepath, x)
class merge(base):
- required_csets = ('install',)
+ required_csets = ("install",)
_engine_types = INSTALLING_MODES
- _hooks = ('merge',)
+ _hooks = ("merge",)
suppress_exceptions = False
@@ -432,35 +436,49 @@ class merge(base):
class unmerge(base):
- required_csets = ('uninstall',)
+ required_csets = ("uninstall",)
_engine_types = UNINSTALLING_MODES
- _hooks = ('unmerge',)
+ _hooks = ("unmerge",)
suppress_exceptions = False
def trigger(self, engine, unmerging_cset):
- return unmerge_contents(unmerging_cset, callback=engine.observer.removing_fs_obj)
+ return unmerge_contents(
+ unmerging_cset, callback=engine.observer.removing_fs_obj
+ )
class BaseSystemUnmergeProtection(base):
- required_csets = ('uninstall',)
+ required_csets = ("uninstall",)
priority = -100
_engine_types = UNINSTALLING_MODES
- _hooks = ('unmerge',)
+ _hooks = ("unmerge",)
suppress_exceptions = False
_preserve_sequence = (
- '/usr', '/usr/lib', '/usr/lib64', '/usr/lib32',
- '/usr/bin', '/usr/sbin', '/bin', '/sbin', '/lib', '/lib32', '/lib64',
- '/etc', '/var', '/home', '/root',
+ "/usr",
+ "/usr/lib",
+ "/usr/lib64",
+ "/usr/lib32",
+ "/usr/bin",
+ "/usr/sbin",
+ "/bin",
+ "/sbin",
+ "/lib",
+ "/lib32",
+ "/lib64",
+ "/etc",
+ "/var",
+ "/home",
+ "/root",
)
def __init__(self, preserve_sequence=None):
if preserve_sequence is None:
preserve_sequence = self._preserve_sequence
- self._block = tuple(x.lstrip('/') for x in preserve_sequence)
+ self._block = tuple(x.lstrip("/") for x in preserve_sequence)
def trigger(self, engine, uninstall):
uninstall.difference_update(pjoin(engine.offset, x) for x in self._block)
@@ -469,12 +487,11 @@ class BaseSystemUnmergeProtection(base):
class fix_uid_perms(base):
- required_csets = ('new_cset',)
- _hooks = ('pre_merge',)
+ required_csets = ("new_cset",)
+ _hooks = ("pre_merge",)
_engine_types = INSTALLING_MODES
- def __init__(self, uid=os_data.portage_uid,
- replacement=os_data.root_uid):
+ def __init__(self, uid=os_data.portage_uid, replacement=os_data.root_uid):
super().__init__()
self.bad_uid = uid
self.good_uid = replacement
@@ -488,12 +505,11 @@ class fix_uid_perms(base):
class fix_gid_perms(base):
- required_csets = ('new_cset',)
- _hooks = ('pre_merge',)
+ required_csets = ("new_cset",)
+ _hooks = ("pre_merge",)
_engine_types = INSTALLING_MODES
- def __init__(self, gid=os_data.portage_gid,
- replacement=os_data.root_gid):
+ def __init__(self, gid=os_data.portage_gid, replacement=os_data.root_gid):
super().__init__()
self.bad_gid = gid
self.good_gid = replacement
@@ -502,28 +518,30 @@ class fix_gid_perms(base):
good = self.good_gid
bad = self.bad_gid
- cset.update(x.change_attributes(gid=good)
- for x in cset if x.gid == bad)
+ cset.update(x.change_attributes(gid=good) for x in cset if x.gid == bad)
class fix_set_bits(base):
- required_csets = ('new_cset',)
- _hooks = ('pre_merge',)
+ required_csets = ("new_cset",)
+ _hooks = ("pre_merge",)
_engine_types = INSTALLING_MODES
def trigger(self, engine, cset):
reporter = engine.observer
# if s(uid|gid) *and* world writable...
- l = [x for x in cset.iterlinks(True) if
- (x.mode & 0o6000) and (x.mode & 0o002)]
+ l = [x for x in cset.iterlinks(True) if (x.mode & 0o6000) and (x.mode & 0o002)]
if reporter is not None:
for x in l:
if x.mode & 0o4000:
- reporter.warn(f"correcting unsafe world writable SetGID: {x.location}")
+ reporter.warn(
+ f"correcting unsafe world writable SetGID: {x.location}"
+ )
else:
- reporter.warn(f"correcting unsafe world writable SetUID: {x.location}")
+ reporter.warn(
+ f"correcting unsafe world writable SetUID: {x.location}"
+ )
if l:
# wipe setgid/setuid
cset.update(x.change_attributes(mode=x.mode & ~0o6002) for x in l)
@@ -531,8 +549,8 @@ class fix_set_bits(base):
class detect_world_writable(base):
- required_csets = ('new_cset',)
- _hooks = ('pre_merge',)
+ required_csets = ("new_cset",)
+ _hooks = ("pre_merge",)
_engine_types = INSTALLING_MODES
def __init__(self, fix_perms=False):
@@ -555,8 +573,8 @@ class detect_world_writable(base):
class PruneFiles(base):
- required_csets = ('new_cset',)
- _hooks = ('pre_merge',)
+ required_csets = ("new_cset",)
+ _hooks = ("pre_merge",)
_engine_types = INSTALLING_MODES
def __init__(self, sentinel_func):
@@ -577,15 +595,16 @@ class PruneFiles(base):
class CommonDirectoryModes(base):
- required_csets = ('new_cset',)
- _hooks = ('pre_merge',)
+ required_csets = ("new_cset",)
+ _hooks = ("pre_merge",)
_engine_types = INSTALLING_MODES
- directories = [pjoin('/usr', x) for x in ('.', 'lib', 'lib64', 'lib32',
- 'bin', 'sbin', 'local')]
- directories.extend(pjoin('/usr/share', x) for x in ('.', 'man', 'info'))
- directories.extend(f'/usr/share/man/man{x}' for x in range(1, 10))
- directories.extend(['/lib', '/lib32', '/lib64', '/etc', '/bin', '/sbin', '/var'])
+ directories = [
+ pjoin("/usr", x) for x in (".", "lib", "lib64", "lib32", "bin", "sbin", "local")
+ ]
+ directories.extend(pjoin("/usr/share", x) for x in (".", "man", "info"))
+ directories.extend(f"/usr/share/man/man{x}" for x in range(1, 10))
+ directories.extend(["/lib", "/lib32", "/lib64", "/etc", "/bin", "/sbin", "/var"])
directories = frozenset(map(normpath, directories))
def trigger(self, engine, cset):
@@ -596,13 +615,13 @@ class CommonDirectoryModes(base):
if x.location not in self.directories:
continue
if x.mode != 0o755:
- r.warn(f'{x.location} path has mode {oct(x.mode)}, should be 0755')
+ r.warn(f"{x.location} path has mode {oct(x.mode)}, should be 0755")
class BlockFileType(base):
- required_csets = ('new_cset',)
- _hooks = ('pre_merge',)
+ required_csets = ("new_cset",)
+ _hooks = ("pre_merge",)
_engine_types = INSTALLING_MODES
def __init__(self, bad_regex, regex_to_check=None, fatal=True):
@@ -613,7 +632,7 @@ class BlockFileType(base):
file_typer = file_type.file_identifier()
if self.filter_regex is None:
- filter_re = lambda x:True
+ filter_re = lambda x: True
else:
filter_re = re.compile(self.filter_regex).match
bad_pat = re.compile(self.bad_regex).match
@@ -627,46 +646,51 @@ class BlockFileType(base):
if self.fatal and bad_files:
raise errors.BlockModification(
self,
- ("blacklisted filetypes were encountered- "
- f"pattern {self.bad_regex!r} matched files: {sorted(bad_files)}"))
+ (
+ "blacklisted filetypes were encountered- "
+ f"pattern {self.bad_regex!r} matched files: {sorted(bad_files)}"
+ ),
+ )
class SavePkg(base):
- required_csets = ('raw_new_cset',)
+ required_csets = ("raw_new_cset",)
priority = 90
- _hooks = ('sanity_check',)
+ _hooks = ("sanity_check",)
_engine_types = INSTALLING_MODES
- _copy_source = 'new'
+ _copy_source = "new"
def __init__(self, target_repo, pristine=True, skip_if_source=True):
if not pristine:
- self._hooks = ('pre_merge',)
- self.required_csets = ('install',)
+ self._hooks = ("pre_merge",)
+ self.required_csets = ("install",)
self.skip_if_source = skip_if_source
self.target_repo = target_repo
def trigger(self, engine, cset):
pkg = getattr(engine, self._copy_source)
# don't build binpkgs of target repo binpkgs
- if self.skip_if_source and str(getattr(pkg, 'repo')) == self.target_repo.repo_id:
+ if (
+ self.skip_if_source
+ and str(getattr(pkg, "repo")) == self.target_repo.repo_id
+ ):
return
old_pkg = self.target_repo.match(pkg.versioned_atom)
- wrapped_pkg = MutatedPkg(pkg, {'contents':cset})
+ wrapped_pkg = MutatedPkg(pkg, {"contents": cset})
if old_pkg:
- txt = 'replacing'
+ txt = "replacing"
op = self.target_repo.operations.replace(*(old_pkg + [wrapped_pkg]))
else:
- txt = 'installing'
+ txt = "installing"
op = self.target_repo.operations.install(wrapped_pkg)
engine.observer.info(f"{txt} {pkg} to {self.target_repo.location}")
op.finish()
class SavePkgIfInPkgset(SavePkg):
-
def __init__(self, target_repo, pkgset, pristine=True, skip_if_source=True):
super().__init__(target_repo, pristine=pristine, skip_if_source=skip_if_source)
self.pkgset = pkgset
@@ -678,16 +702,15 @@ class SavePkgIfInPkgset(SavePkg):
class SavePkgUnmerging(SavePkg):
- required_csets = ('old_cset',)
+ required_csets = ("old_cset",)
_engine_types = UNINSTALLING_MODES
- _copy_source = 'old'
+ _copy_source = "old"
def __init__(self, target_repo):
self.target_repo = target_repo
class SavePkgUnmergingIfInPkgset(SavePkgUnmerging):
-
def __init__(self, target_repo, pkgset, pristine=True):
super().__init__(target_repo, pristine=pristine)
self.pkgset = pkgset
@@ -700,22 +723,29 @@ class SavePkgUnmergingIfInPkgset(SavePkgUnmerging):
class BinaryDebug(ThreadedTrigger):
- required_csets = ('install',)
+ required_csets = ("install",)
_engine_types = INSTALLING_MODES
- _hooks = ('pre_merge',)
+ _hooks = ("pre_merge",)
- default_strip_flags = ('--strip-unneeded', '-R', '.comment')
- elf_regex = r'(^| )ELF +(\d+-bit )'
+ default_strip_flags = ("--strip-unneeded", "-R", ".comment")
+ elf_regex = r"(^| )ELF +(\d+-bit )"
- def __init__(self, mode='split', strip_binary=None, objcopy_binary=None,
- extra_strip_flags=(), debug_storage='/usr/lib/debug/', compress=False):
+ def __init__(
+ self,
+ mode="split",
+ strip_binary=None,
+ objcopy_binary=None,
+ extra_strip_flags=(),
+ debug_storage="/usr/lib/debug/",
+ compress=False,
+ ):
self.mode = mode = mode.lower()
- if mode not in ('split', 'strip'):
+ if mode not in ("split", "strip"):
raise TypeError(f"mode {mode!r} is unknown; must be either split or strip")
- self.thread_trigger = getattr(self, f'_{mode}')
- self.threading_setup = getattr(self, f'_{mode}_setup')
- self.threading_finish = getattr(self, f'_{mode}_finish')
+ self.thread_trigger = getattr(self, f"_{mode}")
+ self.threading_setup = getattr(self, f"_{mode}_setup")
+ self.threading_finish = getattr(self, f"_{mode}_finish")
self._strip_binary = strip_binary
self._objcopy_binary = objcopy_binary
@@ -732,14 +762,14 @@ class BinaryDebug(ThreadedTrigger):
obj = process.find_binary(f"{pkg.chost}-{x}")
except process.CommandNotFound:
obj = process.find_binary(x)
- setattr(self, f'{x}_binary', obj)
+ setattr(self, f"{x}_binary", obj)
def _strip_fsobj(self, fs_obj, ftype, reporter, quiet=False):
args = self._strip_flags
if "executable" in ftype or "shared object" in ftype:
args += self._extra_strip_flags
elif "current ar archive" in ftype:
- args = ['-g']
+ args = ["-g"]
if not quiet:
reporter.info(f"stripping: {fs_obj} {' '.join(args)}")
ret = spawn.spawn([self.strip_binary] + args + [fs_obj.data.path])
@@ -763,7 +793,7 @@ class BinaryDebug(ThreadedTrigger):
return (engine, cset)
def _strip_setup(self, engine, cset):
- if 'strip' in getattr(engine.new, 'restrict', ()):
+ if "strip" in getattr(engine.new, "restrict", ()):
engine.observer.info(f"stripping disabled for {engine.new}")
return False
self._initialize_paths(engine.new, ("strip",))
@@ -779,15 +809,18 @@ class BinaryDebug(ThreadedTrigger):
if len(fs_objs) > 1:
self._modified.update(
stripped.change_attributes(location=fs_obj.location)
- for fs_obj in fs_objs[1:])
+ for fs_obj in fs_objs[1:]
+ )
def _strip_finish(self, engine, cset):
- if hasattr(self, '_modified'):
+ if hasattr(self, "_modified"):
cset.update(self._modified)
del self._modified
def _split_setup(self, engine, cset):
- skip = frozenset(['strip', 'splitdebug']).intersection(getattr(engine.new, 'restrict', ()))
+ skip = frozenset(["strip", "splitdebug"]).intersection(
+ getattr(engine.new, "restrict", ())
+ )
skip = bool(skip)
if not skip:
for fs_obj in cset:
@@ -795,7 +828,9 @@ class BinaryDebug(ThreadedTrigger):
skip = True
break
if skip:
- engine.observer.info(f"splitdebug disabled for {engine.new}, skipping splitdebug")
+ engine.observer.info(
+ f"splitdebug disabled for {engine.new}, skipping splitdebug"
+ )
return False
self._initialize_paths(engine.new, ("strip", "objcopy"))
@@ -803,48 +838,59 @@ class BinaryDebug(ThreadedTrigger):
return True
def _split(self, iterable, observer, engine, cset):
- debug_store = pjoin(engine.offset, self._debug_storage.lstrip('/'))
+ debug_store = pjoin(engine.offset, self._debug_storage.lstrip("/"))
- objcopy_args = [self.objcopy_binary, '--only-keep-debug']
+ objcopy_args = [self.objcopy_binary, "--only-keep-debug"]
if self._compress:
- objcopy_args.append('--compress-debug-sections')
+ objcopy_args.append("--compress-debug-sections")
for fs_objs, ftype in iterable:
- if 'ar archive' in ftype:
+ if "ar archive" in ftype:
continue
- if 'relocatable' in ftype:
+ if "relocatable" in ftype:
if not any(x.basename.endswith(".ko") for x in fs_objs):
continue
fs_obj = fs_objs[0]
- debug_loc = pjoin(debug_store, fs_obj.location.lstrip('/') + ".debug")
+ debug_loc = pjoin(debug_store, fs_obj.location.lstrip("/") + ".debug")
if debug_loc in cset:
continue
fpath = fs_obj.data.path
- debug_ondisk = pjoin(os.path.dirname(fpath), os.path.basename(fpath) + ".debug")
+ debug_ondisk = pjoin(
+ os.path.dirname(fpath), os.path.basename(fpath) + ".debug"
+ )
# note that we tell the UI the final pathway- not the intermediate one.
observer.info(f"splitdebug'ing {fs_obj.location} into {debug_loc}")
ret = spawn.spawn(objcopy_args + [fpath, debug_ondisk])
if ret != 0:
- observer.warn(f"splitdebug'ing {fs_obj.location} failed w/ exitcode {ret}")
+ observer.warn(
+ f"splitdebug'ing {fs_obj.location} failed w/ exitcode {ret}"
+ )
continue
# note that the given pathway to the debug file /must/ be relative to ${D};
# it must exist at the time of invocation.
- ret = spawn.spawn([self.objcopy_binary,
- '--add-gnu-debuglink', debug_ondisk, fpath])
+ ret = spawn.spawn(
+ [self.objcopy_binary, "--add-gnu-debuglink", debug_ondisk, fpath]
+ )
if ret != 0:
observer.warn(
f"splitdebug created debug file {debug_ondisk!r}, but "
- f"failed adding links to {fpath!r} ({ret!r})")
- observer.debug("failed splitdebug command was %r",
- (self.objcopy_binary, '--add-gnu-debuglink', debug_ondisk, fpath))
+ f"failed adding links to {fpath!r} ({ret!r})"
+ )
+ observer.debug(
+ "failed splitdebug command was %r",
+ (self.objcopy_binary, "--add-gnu-debuglink", debug_ondisk, fpath),
+ )
continue
-
- debug_obj = gen_obj(debug_loc, real_location=debug_ondisk,
- uid=os_data.root_uid, gid=os_data.root_gid)
+ debug_obj = gen_obj(
+ debug_loc,
+ real_location=debug_ondisk,
+ uid=os_data.root_uid,
+ gid=os_data.root_gid,
+ )
stripped_fsobj = self._strip_fsobj(fs_obj, ftype, observer, quiet=True)
@@ -852,14 +898,18 @@ class BinaryDebug(ThreadedTrigger):
self._modified.add(debug_obj)
for fs_obj in fs_objs[1:]:
- debug_loc = pjoin(debug_store, fs_obj.location.lstrip('/') + ".debug")
+ debug_loc = pjoin(debug_store, fs_obj.location.lstrip("/") + ".debug")
linked_debug_obj = debug_obj.change_attributes(location=debug_loc)
- observer.info(f"splitdebug hardlinking {debug_obj.location} to {debug_loc}")
+ observer.info(
+ f"splitdebug hardlinking {debug_obj.location} to {debug_loc}"
+ )
self._modified.add(linked_debug_obj)
- self._modified.add(stripped_fsobj.change_attributes(location=fs_obj.location))
+ self._modified.add(
+ stripped_fsobj.change_attributes(location=fs_obj.location)
+ )
def _split_finish(self, engine, cset):
- if not hasattr(self, '_modified'):
+ if not hasattr(self, "_modified"):
return
self._modified.add_missing_directories(mode=0o775)
# add the non directories first.
@@ -872,8 +922,15 @@ class BinaryDebug(ThreadedTrigger):
def default_plugins_triggers() -> tuple[type[base]]:
triggers = (
- ldconfig, merge, unmerge,
- fix_uid_perms, fix_gid_perms, fix_set_bits, detect_world_writable,
- InfoRegen, CommonDirectoryModes, BaseSystemUnmergeProtection,
+ ldconfig,
+ merge,
+ unmerge,
+ fix_uid_perms,
+ fix_gid_perms,
+ fix_set_bits,
+ detect_world_writable,
+ InfoRegen,
+ CommonDirectoryModes,
+ BaseSystemUnmergeProtection,
)
return tuple(sorted(triggers, reverse=True, key=lambda x: (x.priority, x.__name__)))
diff --git a/src/pkgcore/operations/__init__.py b/src/pkgcore/operations/__init__.py
index 6eb6422e3..b59789a50 100644
--- a/src/pkgcore/operations/__init__.py
+++ b/src/pkgcore/operations/__init__.py
@@ -20,7 +20,6 @@ from ..operations import observer as _observer
class OperationError(PkgcoreException):
-
def __init__(self, api, exc=None):
self._api = api
self._exc = exc
@@ -50,8 +49,11 @@ class base:
@klass.cached_property
def raw_operations(self):
- return frozenset(x[len("_cmd_api_"):] for x in dir(self.__class__)
- if x.startswith("_cmd_api_"))
+ return frozenset(
+ x[len("_cmd_api_") :]
+ for x in dir(self.__class__)
+ if x.startswith("_cmd_api_")
+ )
@klass.cached_property
def enabled_operations(self):
@@ -74,7 +76,8 @@ class base:
def _wrap_exception(self, functor, name):
f = partial(
- self._recast_exception_decorator, self.__casting_exception__, name, functor)
+ self._recast_exception_decorator, self.__casting_exception__, name, functor
+ )
return pretty_docs(f)
def _setup_api(self):
@@ -84,15 +87,15 @@ class base:
else:
f = self._wrap_exception
for op in self.enabled_operations:
- setattr(self, op, f(getattr(self, '_cmd_api_%s' % op), op))
+ setattr(self, op, f(getattr(self, "_cmd_api_%s" % op), op))
def _filter_disabled_commands(self, sequence):
for command in sequence:
- obj = getattr(self, '_cmd_api_%s' % command, None)
- if not getattr(obj, '_is_standalone', False):
- if not hasattr(self, '_cmd_implementation_%s' % command):
+ obj = getattr(self, "_cmd_api_%s" % command, None)
+ if not getattr(obj, "_is_standalone", False):
+ if not hasattr(self, "_cmd_implementation_%s" % command):
continue
- check_f = getattr(self, '_cmd_check_support_%s' % command, None)
+ check_f = getattr(self, "_cmd_check_support_%s" % command, None)
if check_f is not None and not check_f():
continue
yield command
@@ -117,7 +120,7 @@ class base:
:return: Either the value of or_return, or if the operation is
supported, the return value from that operation
"""
- kwds.setdefault('observer', getattr(self, 'observer', self._get_observer()))
+ kwds.setdefault("observer", getattr(self, "observer", self._get_observer()))
ret = kwds.pop("or_return", self.UNSUPPORTED)
if self.supports(operation_name):
ret = getattr(self, operation_name)(*args, **kwds)
diff --git a/src/pkgcore/operations/domain.py b/src/pkgcore/operations/domain.py
index 6f0a6a13c..b3a703699 100644
--- a/src/pkgcore/operations/domain.py
+++ b/src/pkgcore/operations/domain.py
@@ -18,7 +18,6 @@ from ..package.mutated import MutatedPkg
class fake_lock:
-
def __init__(self):
pass
@@ -50,13 +49,13 @@ class base(metaclass=ForcedDepends):
self.lock = fake_lock()
def create_op(self):
- raise NotImplementedError(self, 'create_op')
+ raise NotImplementedError(self, "create_op")
def create_repo_op(self):
- raise NotImplementedError(self, 'create_repo_op')
+ raise NotImplementedError(self, "create_repo_op")
def create_engine(self):
- raise NotImplementedError(self, 'create_repo_op')
+ raise NotImplementedError(self, "create_repo_op")
def _create_tempspace(self):
location = self.domain.pm_tmpdir
@@ -107,7 +106,7 @@ class base(metaclass=ForcedDepends):
raise NotImplementedError
def __del__(self):
- if getattr(self, 'underway', False):
+ if getattr(self, "underway", False):
logger.warning(f"{self} merge was underway, but wasn't completed")
self.lock.release_write_lock()
self.clean_tempdir()
@@ -138,8 +137,9 @@ class install(base):
super().__init__(domain, repo, observer, offset)
def create_op(self):
- self.format_op = getattr(
- self.new_pkg, self.format_install_op_name)(self.domain, self.observer)
+ self.format_op = getattr(self.new_pkg, self.format_install_op_name)(
+ self.domain, self.observer
+ )
def create_repo_op(self):
self.repo_op = self.repo.operations.install(self.new_pkg, self.observer)
@@ -147,8 +147,8 @@ class install(base):
def create_engine(self):
return self.engine_kls(
- self.tempspace, self.new_pkg,
- offset=self.offset, observer=self.observer)
+ self.tempspace, self.new_pkg, offset=self.offset, observer=self.observer
+ )
def preinst(self):
"""execute any pre-transfer steps required"""
@@ -203,9 +203,9 @@ class uninstall(base):
super().__init__(domain, repo, observer, offset)
def create_op(self):
- self.format_op = getattr(
- self.old_pkg,
- self.format_uninstall_op_name)(self.domain, self.observer)
+ self.format_op = getattr(self.old_pkg, self.format_uninstall_op_name)(
+ self.domain, self.observer
+ )
def create_repo_op(self):
self.repo_op = self.repo.operations.uninstall(self.old_pkg, self.observer)
@@ -213,8 +213,8 @@ class uninstall(base):
def create_engine(self):
return self.engine_kls(
- self.tempspace, self.old_pkg,
- offset=self.offset, observer=self.observer)
+ self.tempspace, self.old_pkg, offset=self.offset, observer=self.observer
+ )
def prerm(self):
"""execute any pre-removal steps required"""
@@ -222,7 +222,11 @@ class uninstall(base):
def remove(self):
"""execute any removal steps required"""
- for unmerge_phase in (self.me.pre_unmerge, self.me.unmerge, self.me.post_unmerge):
+ for unmerge_phase in (
+ self.me.pre_unmerge,
+ self.me.unmerge,
+ self.me.post_unmerge,
+ ):
unmerge_phase()
return True
@@ -237,11 +241,13 @@ class uninstall(base):
ret = self.format_op.finalize()
self.format_op.cleanup(disable_observer=True)
if not ret:
- logger.warning(f"ignoring unexpected result from uninstall finalize- {ret!r}")
+ logger.warning(
+ f"ignoring unexpected result from uninstall finalize- {ret!r}"
+ )
return base.finish(self)
def __del__(self):
- if getattr(self, 'underway', False):
+ if getattr(self, "underway", False):
logger.warning(f"{self.old_pkg} unmerge was underway, but wasn't completed")
self.lock.release_write_lock()
@@ -267,8 +273,14 @@ class replace(install, uninstall):
}
stage_hooks = [
- "merge_metadata", "unmerge_metadata", "postrm", "prerm", "postinst",
- "preinst", "unmerge_metadata", "merge_metadata",
+ "merge_metadata",
+ "unmerge_metadata",
+ "postrm",
+ "prerm",
+ "postinst",
+ "preinst",
+ "unmerge_metadata",
+ "merge_metadata",
]
engine_kls = staticmethod(MergeEngine.replace)
format_replace_op_name = "_repo_replace_op"
@@ -280,18 +292,24 @@ class replace(install, uninstall):
def create_op(self):
self.format_op = getattr(self.new_pkg, self.format_replace_op_name)(
- self.domain, self.old_pkg, self.observer)
+ self.domain, self.old_pkg, self.observer
+ )
return True
def create_repo_op(self):
self.repo_op = self.repo.operations.replace(
- self.old_pkg, self.new_pkg, self.observer)
+ self.old_pkg, self.new_pkg, self.observer
+ )
return True
def create_engine(self):
return self.engine_kls(
- self.tempspace, self.old_pkg, self.new_pkg,
- offset=self.offset, observer=self.observer)
+ self.tempspace,
+ self.old_pkg,
+ self.new_pkg,
+ offset=self.offset,
+ observer=self.observer,
+ )
def finish(self):
ret = self.format_op.finalize()
@@ -300,8 +318,9 @@ class replace(install, uninstall):
return base.finish(self)
def __del__(self):
- if getattr(self, 'underway', False):
+ if getattr(self, "underway", False):
logger.warning(
f"{self.old_pkg} -> {self.new_pkg} replacement was underway, but "
- "wasn't completed")
+ "wasn't completed"
+ )
self.lock.release_write_lock()
diff --git a/src/pkgcore/operations/format.py b/src/pkgcore/operations/format.py
index d66eaf192..dfbd615a9 100644
--- a/src/pkgcore/operations/format.py
+++ b/src/pkgcore/operations/format.py
@@ -3,8 +3,15 @@ build operation
"""
__all__ = (
- 'build_base', 'install', 'uninstall', 'replace', 'fetch_base',
- 'empty_build_op', 'FailedDirectory', 'GenericBuildError', 'FetchError',
+ "build_base",
+ "install",
+ "uninstall",
+ "replace",
+ "fetch_base",
+ "empty_build_op",
+ "FailedDirectory",
+ "GenericBuildError",
+ "FetchError",
)
import os
@@ -20,7 +27,6 @@ from ..fetch import errors as fetch_errors
class fetch_base:
-
def __init__(self, domain, pkg, fetchables, distdir=None):
self.verified_files = {}
self._basenames = set()
@@ -30,11 +36,12 @@ class fetch_base:
self.distdir = distdir if distdir is not None else domain.distdir
# create fetcher
- fetchcmd = domain.settings['FETCHCOMMAND']
- resumecmd = domain.settings.get('RESUMECOMMAND', fetchcmd)
- attempts = int(domain.settings.get('FETCH_ATTEMPTS', 10))
+ fetchcmd = domain.settings["FETCHCOMMAND"]
+ resumecmd = domain.settings.get("RESUMECOMMAND", fetchcmd)
+ attempts = int(domain.settings.get("FETCH_ATTEMPTS", 10))
self.fetcher = fetch_custom.fetcher(
- self.distdir, fetchcmd, resumecmd, attempts=attempts)
+ self.distdir, fetchcmd, resumecmd, attempts=attempts
+ )
def fetch_all(self, observer):
# TODO: add parallel fetch support
@@ -54,13 +61,15 @@ class fetch_base:
except fetch_errors.ChksumFailure as e:
# checksum failed, rename file and try refetching
path = pjoin(self.distdir, fetchable.filename)
- failed_filename = f'{fetchable.filename}._failed_chksum_'
+ failed_filename = f"{fetchable.filename}._failed_chksum_"
failed_path = pjoin(self.distdir, failed_filename)
os.rename(path, failed_path)
if retry:
raise
observer.error(str(e))
- observer.error(f'renaming to {failed_filename!r} and refetching from upstream')
+ observer.error(
+ f"renaming to {failed_filename!r} and refetching from upstream"
+ )
observer.flush()
# refetch directly from upstream
return self.fetch_one(fetchable.upstream, observer, retry=True)
@@ -77,8 +86,9 @@ class operations(_operations_mod.base):
_fetch_kls = fetch_base
- def __init__(self, domain, pkg, observer=None, disable_overrides=(),
- enable_overrides=()):
+ def __init__(
+ self, domain, pkg, observer=None, disable_overrides=(), enable_overrides=()
+ ):
self.observer = observer
self.pkg = pkg
self.domain = domain
@@ -90,7 +100,7 @@ class operations(_operations_mod.base):
@_operations_mod.is_standalone
def _cmd_api_mergable(self):
- return getattr(self.pkg, 'built', False)
+ return getattr(self.pkg, "built", False)
def _cmd_api_sanity_check(self):
return self._cmd_implementation_sanity_check(self.domain)
@@ -101,17 +111,18 @@ class operations(_operations_mod.base):
def _cmd_api_localize(self, force=False, observer=klass.sentinel):
observer = observer if observer is not klass.sentinel else self.observer
return self._cmd_implementation_localize(
- self._get_observer(observer), force=force)
+ self._get_observer(observer), force=force
+ )
def _cmd_api_cleanup(self, force=False, observer=klass.sentinel):
observer = observer if observer is not klass.sentinel else self.observer
return self._cmd_implementation_cleanup(
- self._get_observer(observer), force=force)
+ self._get_observer(observer), force=force
+ )
def _cmd_api_configure(self, observer=klass.sentinel):
observer = observer if observer is not klass.sentinel else self.observer
- return self._cmd_implementation_configure(
- self._get_observer(observer))
+ return self._cmd_implementation_configure(self._get_observer(observer))
@_operations_mod.is_standalone
def _cmd_api_fetch(self, fetchables=None, observer=klass.sentinel, distdir=None):
@@ -125,7 +136,7 @@ class operations(_operations_mod.base):
if failures:
# run pkg_nofetch phase for fetch restricted pkgs
- if 'fetch' in self.pkg.restrict:
+ if "fetch" in self.pkg.restrict:
# This requires wrapped packages from a configured repo, otherwise
# buildables aren't available to run the pkg_nofetch phase.
configured_repo = self.domain.unfiltered_repos[self.pkg.repo.repo_id]
@@ -134,8 +145,12 @@ class operations(_operations_mod.base):
build_ops.nofetch()
build_ops.cleanup(force=True)
for fetchable in failures:
- observer.error('failed fetching %s', fetchable.uri)
- observer.error('failed fetching files for package %s::%s', self.pkg.unversioned_atom, self.pkg.repo.repo_id)
+ observer.error("failed fetching %s", fetchable.uri)
+ observer.error(
+ "failed fetching files for package %s::%s",
+ self.pkg.unversioned_atom,
+ self.pkg.repo.repo_id,
+ )
raise FetchError(failures)
self.verified_files = verified
@@ -148,9 +163,8 @@ class build_operations(operations):
def _cmd_api_build(self, observer=None, failed=False, clean=True, **kwargs):
return self._cmd_implementation_build(
- self._get_observer(observer),
- self.verified_files,
- clean=clean, **kwargs)
+ self._get_observer(observer), self.verified_files, clean=clean, **kwargs
+ )
def _cmd_api_buildable(self, domain):
return self._cmd_implementation_buildable(domain)
@@ -161,7 +175,7 @@ class build_operations(operations):
class build_base(metaclass=ForcedDepends):
- stage_depends = {'finish': 'start'}
+ stage_depends = {"finish": "start"}
def __init__(self, domain, observer):
self.domain = domain
@@ -224,12 +238,15 @@ class build(build_base):
for k in ("setup", "unpack", "configure", "compile", "test", "install"):
locals()[k].__doc__ = (
"execute any %s steps required; "
- "implementations of this interface should overide this as needed"
- % k)
+ "implementations of this interface should overide this as needed" % k
+ )
for k in ("setup", "unpack", "configure", "compile", "test", "install", "finalize"):
o = locals()[k]
- o.__doc__ = "\n".join(x.lstrip() for x in o.__doc__.split("\n") + [
- ":return: True on success, False on failure"])
+ o.__doc__ = "\n".join(
+ x.lstrip()
+ for x in o.__doc__.split("\n")
+ + [":return: True on success, False on failure"]
+ )
del o, k
diff --git a/src/pkgcore/operations/observer.py b/src/pkgcore/operations/observer.py
index 8444b462d..fdc0d9587 100644
--- a/src/pkgcore/operations/observer.py
+++ b/src/pkgcore/operations/observer.py
@@ -1,6 +1,10 @@
__all__ = (
- "null_output", "formatter_output", "file_handle_output",
- "phase_observer", "repo_observer", "decorate_build_method",
+ "null_output",
+ "formatter_output",
+ "file_handle_output",
+ "phase_observer",
+ "repo_observer",
+ "decorate_build_method",
)
import threading
@@ -16,24 +20,25 @@ def _convert(msg, args=(), kwds={}):
if kwds:
raise TypeError(
"both position and optional args cannot be "
- "supplied: given msg(%r), args(%r), kwds(%r)"
- % (msg, args, kwds))
+ "supplied: given msg(%r), args(%r), kwds(%r)" % (msg, args, kwds)
+ )
try:
return msg % args
except (ValueError, TypeError) as e:
raise TypeError(
- f"observer interpolation error: {e}, msg={msg!r}, args={args!r}")
+ f"observer interpolation error: {e}, msg={msg!r}, args={args!r}"
+ )
elif kwds:
try:
return msg % kwds
except (KeyError, TypeError, ValueError) as e:
raise TypeError(
- f"observer interpolation error: {e}, msg={msg!r}, kwds={kwds!r}")
+ f"observer interpolation error: {e}, msg={msg!r}, kwds={kwds!r}"
+ )
return msg
class null_output:
-
def warn(self, msg, *args, **kwds):
pass
@@ -54,27 +59,29 @@ class null_output:
class formatter_output(null_output):
-
def __init__(self, out):
self._out = out
- self.verbosity = getattr(out, 'verbosity', 0)
+ self.verbosity = getattr(out, "verbosity", 0)
def debug(self, msg, *args, **kwds):
self._out.write(_convert("debug: " + msg, args, kwds))
def error(self, msg, *args, **kwds):
prefixes = kwds.pop(
- 'prefixes', (self._out.fg('red'), self._out.bold, ' * ', self._out.reset))
+ "prefixes", (self._out.fg("red"), self._out.bold, " * ", self._out.reset)
+ )
self._out.write(_convert(msg, args, kwds), prefixes=prefixes)
def info(self, msg, *args, **kwds):
prefixes = kwds.pop(
- 'prefixes', (self._out.fg('green'), self._out.bold, ' * ', self._out.reset))
+ "prefixes", (self._out.fg("green"), self._out.bold, " * ", self._out.reset)
+ )
self._out.write(_convert(msg, args, kwds), prefixes=prefixes)
def warn(self, msg, *args, **kwds):
prefixes = kwds.pop(
- 'prefixes', (self._out.fg('yellow'), self._out.bold, ' * ', self._out.reset))
+ "prefixes", (self._out.fg("yellow"), self._out.bold, " * ", self._out.reset)
+ )
self._out.write(_convert(msg, args, kwds), prefixes=prefixes)
def write(self, msg, *args, autoline=False, **kwds):
@@ -85,7 +92,6 @@ class formatter_output(null_output):
class file_handle_output(formatter_output):
-
def debug(self, msg, *args, **kwds):
self._out.write(f"debug: {_convert(msg, args, kwds)}\n")
@@ -103,10 +109,9 @@ class file_handle_output(formatter_output):
class phase_observer:
-
def __init__(self, output, debug=False):
self._output = output
- self.verbosity = getattr(output, 'verbosity', 0)
+ self.verbosity = getattr(output, "verbosity", 0)
self._debug = debug
def phase_start(self, phase):
@@ -129,7 +134,6 @@ class phase_observer:
class repo_observer(phase_observer):
-
def trigger_start(self, hook, trigger):
if self._debug:
self._output.write(f"hook {hook}: trigger: starting {trigger!r}\n", hook)
@@ -153,11 +157,11 @@ def _mk_observer_proxy(target):
class foo(target):
for x in set(dir(target)).difference(dir(object)):
locals()[x] = pre_curry(_reflection_func, x)
+
return foo
class threadsafe_repo_observer(_mk_observer_proxy(repo_observer)):
-
def __init__(self, observer):
self._observer = observer
self._lock = threading.Lock()
@@ -172,7 +176,7 @@ class threadsafe_repo_observer(_mk_observer_proxy(repo_observer)):
def wrap_build_method(phase, method, self, *args, **kwds):
disable_observer = kwds.pop("disable_observer", False)
- if not hasattr(self.observer, 'phase_start') or disable_observer:
+ if not hasattr(self.observer, "phase_start") or disable_observer:
return method(self, *args, **kwds)
self.observer.phase_start(phase)
ret = False
@@ -186,4 +190,5 @@ def wrap_build_method(phase, method, self, *args, **kwds):
def decorate_build_method(phase):
def f(func):
return pre_curry(wrap_build_method, phase, func)
+
return f
diff --git a/src/pkgcore/operations/regen.py b/src/pkgcore/operations/regen.py
index 893a78783..11ef28988 100644
--- a/src/pkgcore/operations/regen.py
+++ b/src/pkgcore/operations/regen.py
@@ -20,12 +20,12 @@ def regen_iter(iterable, regen_func, observer):
yield pkg, e
-def regen_repository(repo, pkgs, observer, threads=1, pkg_attr='keywords', **kwargs):
+def regen_repository(repo, pkgs, observer, threads=1, pkg_attr="keywords", **kwargs):
helpers = []
def _get_repo_helper():
- if not hasattr(repo, '_regen_operation_helper'):
- return lambda pkg: getattr(pkg, 'keywords')
+ if not hasattr(repo, "_regen_operation_helper"):
+ return lambda pkg: getattr(pkg, "keywords")
# for an actual helper, track it and invoke .finish if it exists.
helper = repo._regen_operation_helper(**kwargs)
helpers.append(helper)
diff --git a/src/pkgcore/operations/repo.py b/src/pkgcore/operations/repo.py
index 1106f5dd2..fedbc67b8 100644
--- a/src/pkgcore/operations/repo.py
+++ b/src/pkgcore/operations/repo.py
@@ -3,8 +3,13 @@ repository modifications (installing, removing, replacing)
"""
__all__ = (
- "Failure", "base", "install", "uninstall", "replace",
- "operations", "operations_proxy"
+ "Failure",
+ "base",
+ "install",
+ "uninstall",
+ "replace",
+ "operations",
+ "operations_proxy",
)
from functools import partial
@@ -56,7 +61,7 @@ class base(metaclass=ForcedDepends):
return True
def finalize_data(self):
- raise NotImplementedError(self, 'finalize_data')
+ raise NotImplementedError(self, "finalize_data")
def finish(self):
self.lock.release_write_lock()
@@ -67,10 +72,10 @@ class base(metaclass=ForcedDepends):
class install(base):
stage_depends = {
- 'finish': '_notify_repo_add',
- '_notify_repo_add': 'finalize_data',
- 'finalize_data': 'add_data',
- 'add_data': 'start'
+ "finish": "_notify_repo_add",
+ "_notify_repo_add": "finalize_data",
+ "finalize_data": "add_data",
+ "add_data": "start",
}
description = "install"
@@ -84,7 +89,7 @@ class install(base):
return True
def add_data(self):
- raise NotImplementedError(self, 'add_data')
+ raise NotImplementedError(self, "add_data")
def _update_pkg_contents(self, contents):
self.new_pkg = MutatedPkg(self.new_pkg, {"contents": contents})
@@ -93,10 +98,10 @@ class install(base):
class uninstall(base):
stage_depends = {
- 'finish': '_notify_repo_remove',
- '_notify_repo_remove': 'finalize_data',
- 'finalize_data': 'remove_data',
- 'remove_data': 'start'
+ "finish": "_notify_repo_remove",
+ "_notify_repo_remove": "finalize_data",
+ "finalize_data": "remove_data",
+ "remove_data": "start",
}
description = "uninstall"
@@ -110,18 +115,18 @@ class uninstall(base):
return True
def remove_data(self):
- raise NotImplementedError(self, 'remove_data')
+ raise NotImplementedError(self, "remove_data")
class replace(install, uninstall):
stage_depends = {
- 'finish': '_notify_repo_add',
- '_notify_repo_add': 'finalize_data',
- 'finalize_data': ('add_data', '_notify_repo_remove'),
- '_notify_repo_remove': 'remove_data',
- 'remove_data': 'start',
- 'add_data': 'start'
+ "finish": "_notify_repo_add",
+ "_notify_repo_add": "finalize_data",
+ "finalize_data": ("add_data", "_notify_repo_remove"),
+ "_notify_repo_remove": "remove_data",
+ "remove_data": "start",
+ "add_data": "start",
}
description = "replace"
@@ -133,7 +138,6 @@ class replace(install, uninstall):
class sync_operations(operations_mod.base):
-
def __init__(self, repository, disable_overrides=(), enable_overrides=()):
self.repo = repository
super().__init__(disable_overrides, enable_overrides)
@@ -148,12 +152,12 @@ class sync_operations(operations_mod.base):
return ret
def _get_syncer(self, lazy=False):
- syncer = getattr(self.repo, '_syncer', klass.sentinel)
+ syncer = getattr(self.repo, "_syncer", klass.sentinel)
if syncer is klass.sentinel:
# raw repo's vs non-raw; drive down to the raw repo.
# see pkgcore.ebuild.repository for an example
- syncer = getattr(self.repo, 'config', None)
- syncer = getattr(syncer, '_syncer', None)
+ syncer = getattr(self.repo, "config", None)
+ syncer = getattr(syncer, "_syncer", None)
if not lazy and not isinstance(syncer, _sync_base.Syncer):
syncer = syncer.instantiate()
@@ -167,12 +171,13 @@ class sync_operations(operations_mod.base):
class operations(sync_operations):
-
def _disabled_if_frozen(self, command):
if self.repo.frozen:
logger.debug(
"disabling repo(%r) command(%r) due to repo being frozen",
- self.repo, command)
+ self.repo,
+ command,
+ )
return not self.repo.frozen
def _get_observer(self, observer=None):
@@ -181,20 +186,20 @@ class operations(sync_operations):
return observer
def _cmd_api_install(self, pkg, observer=None):
- return self._cmd_implementation_install(
- pkg, self._get_observer(observer))
+ return self._cmd_implementation_install(pkg, self._get_observer(observer))
def _cmd_api_uninstall(self, pkg, observer=None):
- return self._cmd_implementation_uninstall(
- pkg, self._get_observer(observer))
+ return self._cmd_implementation_uninstall(pkg, self._get_observer(observer))
def _cmd_api_replace(self, oldpkg, newpkg, observer=None):
return self._cmd_implementation_replace(
- oldpkg, newpkg, self._get_observer(observer))
+ oldpkg, newpkg, self._get_observer(observer)
+ )
def _cmd_api_install_or_replace(self, newpkg, observer=None):
return self._cmd_implementation_install_or_replace(
- newpkg, self._get_observer(observer))
+ newpkg, self._get_observer(observer)
+ )
def _cmd_implementation_install_or_replace(self, newpkg, observer=None):
match = self.repo.match(newpkg.versioned_atom)
@@ -204,14 +209,14 @@ class operations(sync_operations):
return self.replace(match[0], newpkg, observer=observer)
for x in ("install", "uninstall", "replace", "install_or_replace"):
- locals()["_cmd_check_support_%s" % x] = post_curry(
- _disabled_if_frozen, x)
+ locals()["_cmd_check_support_%s" % x] = post_curry(_disabled_if_frozen, x)
del x
def _cmd_api_configure(self, pkg, observer=None):
return self._cmd_implementation_configure(
- self.repo, pkg, self._get_observer(observer))
+ self.repo, pkg, self._get_observer(observer)
+ )
def _cmd_implementation_clean_cache(self, pkgs=None):
"""Clean stale and invalid cache entries up."""
@@ -227,10 +232,10 @@ class operations(sync_operations):
@operations_mod.is_standalone
def _cmd_api_regen_cache(self, observer=None, threads=1, **kwargs):
- cache = getattr(self.repo, 'cache', None)
- if not cache and not kwargs.get('force', False):
+ cache = getattr(self.repo, "cache", None)
+ if not cache and not kwargs.get("force", False):
return
- sync_rate = getattr(cache, 'sync_rate', None)
+ sync_rate = getattr(cache, "sync_rate", None)
try:
if sync_rate is not None:
cache.set_sync_rate(1000000)
@@ -243,15 +248,18 @@ class operations(sync_operations):
observer = self._get_observer(observer)
for pkg, e in regen.regen_repository(
- self.repo, pkgs, observer=observer, threads=threads, **kwargs):
- observer.error(f'caught exception {e} while processing {pkg.cpvstr}')
+ self.repo, pkgs, observer=observer, threads=threads, **kwargs
+ ):
+ observer.error(f"caught exception {e} while processing {pkg.cpvstr}")
errors += 1
# report pkgs with bad metadata -- relies on iterating over the
# unfiltered repo to populate the masked repo
pkgs = frozenset(pkg.cpvstr for pkg in self.repo)
for pkg in sorted(self.repo._bad_masked):
- observer.error(f'{pkg.cpvstr}: {pkg.data.msg(verbosity=observer.verbosity)}')
+ observer.error(
+ f"{pkg.cpvstr}: {pkg.data.msg(verbosity=observer.verbosity)}"
+ )
errors += 1
# remove old/invalid cache entries
@@ -264,8 +272,8 @@ class operations(sync_operations):
self.repo.operations.run_if_supported("flush_cache")
def _get_caches(self):
- caches = getattr(self.repo, 'cache', ())
- if not hasattr(caches, 'commit'):
+ caches = getattr(self.repo, "cache", ())
+ if not hasattr(caches, "commit"):
return caches
return [caches]
@@ -276,7 +284,9 @@ class operations(sync_operations):
def _cmd_api_manifest(self, domain, restriction, observer=None, **kwargs):
observer = self._get_observer(observer)
- return self._cmd_implementation_manifest(domain, restriction, observer, **kwargs)
+ return self._cmd_implementation_manifest(
+ domain, restriction, observer, **kwargs
+ )
class operations_proxy(operations):
diff --git a/src/pkgcore/os_data.py b/src/pkgcore/os_data.py
index 940159fa2..d2c4e7667 100644
--- a/src/pkgcore/os_data.py
+++ b/src/pkgcore/os_data.py
@@ -5,8 +5,14 @@ This will be killed off and bound into configuration subsystem at some point
"""
__all__ = (
- "ostype", "portage_gid", "portage_uid", "root_gid", "root_uid",
- "userland", "wheelgid", "xargs",
+ "ostype",
+ "portage_gid",
+ "portage_uid",
+ "root_gid",
+ "root_uid",
+ "userland",
+ "wheelgid",
+ "xargs",
)
import grp
@@ -48,8 +54,9 @@ except KeyError:
try:
portage_uid = pwd.getpwnam("portage").pw_uid
portage_gid = grp.getgrnam("portage").gr_gid
- portage_user_groups = tuple(x.gr_name for x in grp.getgrall()
- if 'portage' in x.gr_mem)
+ portage_user_groups = tuple(
+ x.gr_name for x in grp.getgrall() if "portage" in x.gr_mem
+ )
except KeyError:
portage_uid = 0
portage_gid = wheelgid
diff --git a/src/pkgcore/package/base.py b/src/pkgcore/package/base.py
index bbba3d5ca..8a4e0c16b 100644
--- a/src/pkgcore/package/base.py
+++ b/src/pkgcore/package/base.py
@@ -83,7 +83,7 @@ class wrapper(base):
built = klass.alias_attr("_raw_pkg.built")
versioned_atom = klass.alias_attr("_raw_pkg.versioned_atom")
unversioned_atom = klass.alias_attr("_raw_pkg.unversioned_atom")
- is_supported = klass.alias_attr('_raw_pkg.is_supported')
+ is_supported = klass.alias_attr("_raw_pkg.is_supported")
def __hash__(self):
return hash(self._raw_pkg)
@@ -126,7 +126,7 @@ class DynamicGetattrSetter(type):
class register:
"""Decorator used to mark a function as an attribute loader."""
- __slots__ = ('functor',)
+ __slots__ = ("functor",)
def __init__(self, functor):
self.functor = functor
@@ -140,25 +140,26 @@ class DynamicGetattrSetter(type):
existing = {}
for base in bases:
- existing.update(getattr(base, '_get_attr', {}))
+ existing.update(getattr(base, "_get_attr", {}))
- slots = class_dict.get('__slots__', None)
+ slots = class_dict.get("__slots__", None)
if slots is not None:
# only add slots for new attr's; assume the layer above already slotted
# if this layer is setting slots.
- class_dict['__slots__'] = tuple(
+ class_dict["__slots__"] = tuple(
sequences.iter_stable_unique(
- itertools.chain(
- slots,
- set(new_functions).difference(existing)
- )
+ itertools.chain(slots, set(new_functions).difference(existing))
)
)
- d = existing if class_dict.pop('__DynamicGetattrSetter_auto_inherit__', True) else {}
+ d = (
+ existing
+ if class_dict.pop("__DynamicGetattrSetter_auto_inherit__", True)
+ else {}
+ )
d.update(new_functions)
- d.update(class_dict.pop('_get_attr', {}))
- class_dict['_get_attr'] = d
- class_dict.setdefault('__getattr__', dynamic_getattr_dict)
+ d.update(class_dict.pop("_get_attr", {}))
+ class_dict["_get_attr"] = d
+ class_dict.setdefault("__getattr__", dynamic_getattr_dict)
return type.__new__(cls, name, bases, class_dict)
diff --git a/src/pkgcore/package/conditionals.py b/src/pkgcore/package/conditionals.py
index aeda11176..40d2ecaad 100644
--- a/src/pkgcore/package/conditionals.py
+++ b/src/pkgcore/package/conditionals.py
@@ -19,15 +19,15 @@ def _getattr_wrapped(attr, self):
o = self._cached_wrapped.get(attr)
if o is None or o[0] != self._reuse_pt:
o = self._wrapped_attr[attr](
- getattr(self._raw_pkg, attr),
- self._configurable,
- pkg=self)
+ getattr(self._raw_pkg, attr), self._configurable, pkg=self
+ )
o = self._cached_wrapped[attr] = (self._reuse_pt, o)
return o[1]
-def make_wrapper(wrapped_repo, configurable_attribute_name, attributes_to_wrap=(),
- kls_injections={}):
+def make_wrapper(
+ wrapped_repo, configurable_attribute_name, attributes_to_wrap=(), kls_injections={}
+):
"""
:param configurable_attribute_name: attribute name to add,
and that is used for evaluating attributes_to_wrap
@@ -37,16 +37,22 @@ def make_wrapper(wrapped_repo, configurable_attribute_name, attributes_to_wrap=(
"""
if configurable_attribute_name.find(".") != -1:
- raise ValueError("can only wrap first level attributes, "
- "'obj.dar' fex, not '%s'" %
- (configurable_attribute_name))
+ raise ValueError(
+ "can only wrap first level attributes, "
+ "'obj.dar' fex, not '%s'" % (configurable_attribute_name)
+ )
class PackageWrapper(wrapper):
"""Add a new attribute, and evaluate attributes of a wrapped pkg."""
__slots__ = (
- "_unchangable", "_configurable", "_reuse_pt",
- "_cached_wrapped", "_disabled", "_domain", "repo",
+ "_unchangable",
+ "_configurable",
+ "_reuse_pt",
+ "_cached_wrapped",
+ "_disabled",
+ "_domain",
+ "repo",
)
_wrapped_attr = attributes_to_wrap
@@ -59,11 +65,16 @@ def make_wrapper(wrapped_repo, configurable_attribute_name, attributes_to_wrap=(
locals()[configurable_attribute_name] = property(attrgetter("_configurable"))
locals().update(
- (x, property(partial(_getattr_wrapped, x)))
- for x in attributes_to_wrap)
+ (x, property(partial(_getattr_wrapped, x))) for x in attributes_to_wrap
+ )
- def __init__(self, pkg_instance, initial_settings=None,
- disabled_settings=None, unchangable_settings=None):
+ def __init__(
+ self,
+ pkg_instance,
+ initial_settings=None,
+ disabled_settings=None,
+ unchangable_settings=None,
+ ):
"""
:type pkg_instance: :obj:`pkgcore.package.metadata.package`
:param pkg_instance: instance to wrap.
@@ -83,23 +94,28 @@ def make_wrapper(wrapped_repo, configurable_attribute_name, attributes_to_wrap=(
unchangable_settings = []
sf = object.__setattr__
- sf(self, '_unchangable', unchangable_settings)
- sf(self, '_configurable', LimitedChangeSet(
- initial_settings, unchangable_settings))
- sf(self, '_disabled', disabled_settings)
- sf(self, '_reuse_pt', 0)
- sf(self, 'repo', wrapped_repo)
- sf(self, '_cached_wrapped', {})
- sf(self, '_domain', None)
+ sf(self, "_unchangable", unchangable_settings)
+ sf(
+ self,
+ "_configurable",
+ LimitedChangeSet(initial_settings, unchangable_settings),
+ )
+ sf(self, "_disabled", disabled_settings)
+ sf(self, "_reuse_pt", 0)
+ sf(self, "repo", wrapped_repo)
+ sf(self, "_cached_wrapped", {})
+ sf(self, "_domain", None)
super().__init__(pkg_instance)
def __copy__(self):
return self.__class__(
- self._raw_pkg, self._configurable_name,
+ self._raw_pkg,
+ self._configurable_name,
initial_settings=set(self._configurable),
disabled_settings=self._disabled,
unchangable_settings=self._unchangable,
- attributes_to_wrap=self._wrapped_attr)
+ attributes_to_wrap=self._wrapped_attr,
+ )
def rollback(self, point=0):
"""rollback changes to the configurable attribute to an earlier point
@@ -109,7 +125,7 @@ def make_wrapper(wrapped_repo, configurable_attribute_name, attributes_to_wrap=(
self._configurable.rollback(point)
# yes, nuking objs isn't necessarily required. easier this way though.
# XXX: optimization point
- object.__setattr__(self, '_reuse_pt', self._reuse_pt + 1)
+ object.__setattr__(self, "_reuse_pt", self._reuse_pt + 1)
def commit(self):
"""Commit current changes.
@@ -117,7 +133,7 @@ def make_wrapper(wrapped_repo, configurable_attribute_name, attributes_to_wrap=(
This means that those changes can be reverted from this point out.
"""
self._configurable.commit()
- object.__setattr__(self, '_reuse_pt', 0)
+ object.__setattr__(self, "_reuse_pt", 0)
def changes_count(self):
"""current commit point for the configurable"""
@@ -141,7 +157,7 @@ def make_wrapper(wrapped_repo, configurable_attribute_name, attributes_to_wrap=(
entry_point = self.changes_count()
try:
list(map(self._configurable.add, vals))
- object.__setattr__(self, '_reuse_pt', self._reuse_pt + 1)
+ object.__setattr__(self, "_reuse_pt", self._reuse_pt + 1)
return True
except Unchangable:
self.rollback(entry_point)
@@ -166,7 +182,7 @@ def make_wrapper(wrapped_repo, configurable_attribute_name, attributes_to_wrap=(
except Unchangable:
self.rollback(entry_point)
return False
- object.__setattr__(self, '_reuse_pt', self._reuse_pt + 1)
+ object.__setattr__(self, "_reuse_pt", self._reuse_pt + 1)
return True
def request_disable(self, attr, *vals):
@@ -211,17 +227,19 @@ def make_wrapper(wrapped_repo, configurable_attribute_name, attributes_to_wrap=(
except Unchangable:
self.rollback(entry_point)
return False
- object.__setattr__(self, '_reuse_pt', self._reuse_pt + 1)
+ object.__setattr__(self, "_reuse_pt", self._reuse_pt + 1)
return True
def __str__(self):
- return "config wrapped(%s): %s" % (self._configurable_name,
- self._raw_pkg)
+ return "config wrapped(%s): %s" % (self._configurable_name, self._raw_pkg)
def __repr__(self):
return "<%s pkg=%r wrapped=%r @%#8x>" % (
- self.__class__.__name__, self._raw_pkg, self._configurable_name,
- id(self))
+ self.__class__.__name__,
+ self._raw_pkg,
+ self._configurable_name,
+ id(self),
+ )
def freeze(self):
o = copy(self)
@@ -233,9 +251,9 @@ def make_wrapper(wrapped_repo, configurable_attribute_name, attributes_to_wrap=(
commit any outstanding changes and lock the configuration.
"""
self.commit()
- object.__setattr__(self, '_configurable', list(self._configurable))
+ object.__setattr__(self, "_configurable", list(self._configurable))
- if 'operations_callback' in kls_injections:
+ if "operations_callback" in kls_injections:
_operations = kls_injections.pop("operations_callback")
locals().update(kls_injections)
diff --git a/src/pkgcore/package/errors.py b/src/pkgcore/package/errors.py
index eacbbcb42..d152ee179 100644
--- a/src/pkgcore/package/errors.py
+++ b/src/pkgcore/package/errors.py
@@ -1,6 +1,11 @@
__all__ = (
- "PackageError", "InvalidPackageName", "MetadataException", "InvalidDependency",
- "ChksumError", "MissingChksum", "ParseChksumError",
+ "PackageError",
+ "InvalidPackageName",
+ "MetadataException",
+ "InvalidDependency",
+ "ChksumError",
+ "MissingChksum",
+ "ParseChksumError",
)
import os
@@ -22,7 +27,7 @@ class InvalidPackageName(PackageError):
def __str__(self):
msg = self.name
if self.msg is not None:
- msg += f': {self.msg}'
+ msg += f": {self.msg}"
return msg
@@ -38,9 +43,9 @@ class MetadataException(PackageError):
s = self.error
if self.verbose:
if verbosity > 0:
- s += ':\n'
+ s += ":\n"
else:
- s += ': '
+ s += ": "
s += self.verbose.msg(verbosity)
return s
@@ -54,21 +59,21 @@ class ChksumError(PkgcoreUserException):
class MissingChksum(ChksumError):
-
def __init__(self, pkg, filename):
super().__init__(
- f"{pkg.cpvstr}::{pkg.repo} missing chksum data for {filename!r}")
+ f"{pkg.cpvstr}::{pkg.repo} missing chksum data for {filename!r}"
+ )
self.pkg = pkg
self.file = filename
class ParseChksumError(ChksumError):
-
def __init__(self, filename, error, missing=False):
filename = os.sep.join(filename.split(os.sep)[-3:])
if missing:
super().__init__(
- f"failed parsing {filename!r}; data isn't available: {error}")
+ f"failed parsing {filename!r}; data isn't available: {error}"
+ )
else:
super().__init__(f"failed parsing {filename!r}: {error}")
self.file = filename
diff --git a/src/pkgcore/package/metadata.py b/src/pkgcore/package/metadata.py
index 4cda06a19..65176cbc1 100644
--- a/src/pkgcore/package/metadata.py
+++ b/src/pkgcore/package/metadata.py
@@ -20,11 +20,15 @@ def DeriveMetadataKls(original_kls):
built = False
__slots__ = ("_parent", "data", "_domain")
try:
- __doc__ = "package class with metadata bound to it for attribute " \
- "generation\n\n" + \
- "\n".join(x.lstrip()
- for x in original_kls.__doc__.split("\n")
- if ":ivar" in x or ":cvar" in x)
+ __doc__ = (
+ "package class with metadata bound to it for attribute "
+ "generation\n\n"
+ + "\n".join(
+ x.lstrip()
+ for x in original_kls.__doc__.split("\n")
+ if ":ivar" in x or ":cvar" in x
+ )
+ )
__doc__ += "\n:ivar repo: parent repository"
except AttributeError:
# wee, must be in -OO mode.
@@ -35,7 +39,7 @@ def DeriveMetadataKls(original_kls):
def __init__(self, parent_repository, *args, **kwds):
f"""wrapper for {original_kls}.__init__
-
+
See {original_kls}.__init__ for allowed args/kwds, they're passed
directly to it.
@@ -44,7 +48,7 @@ def DeriveMetadataKls(original_kls):
instance
"""
super().__init__(*args, **kwds)
- object.__setattr__(self, '_parent', parent_repository)
+ object.__setattr__(self, "_parent", parent_repository)
@base.DynamicGetattrSetter.register
def data(self):
@@ -62,13 +66,13 @@ def DeriveMetadataKls(original_kls):
if all:
try:
- object.__delattr__(self, 'data')
+ object.__delattr__(self, "data")
except AttributeError:
pass
@property
def slotted_atom(self):
- return atom(f'{self.key}:{self.slot}')
+ return atom(f"{self.key}:{self.slot}")
def _fetch_metadata(self):
"""Pull the metadata for this package.
@@ -82,8 +86,10 @@ def DeriveMetadataKls(original_kls):
return package
+
package = DeriveMetadataKls(cpv.VersionedCPV)
+
class factory:
"""package generator
@@ -128,9 +134,9 @@ class factory:
def __getstate__(self):
d = self.__dict__.copy()
- del d['_cached_instances']
+ del d["_cached_instances"]
return d
def __setstate__(self, state):
self.__dict__ = state.copy()
- self.__dict__['_cached_instances'] = WeakValueDictionary()
+ self.__dict__["_cached_instances"] = WeakValueDictionary()
diff --git a/src/pkgcore/package/mutated.py b/src/pkgcore/package/mutated.py
index 26f4cf5d8..7be6e3ca2 100644
--- a/src/pkgcore/package/mutated.py
+++ b/src/pkgcore/package/mutated.py
@@ -26,10 +26,16 @@ class MutatedPkg(wrapper):
return getattr(self._raw_pkg, attr)
def __repr__(self):
- return '<%s pkg=%r overrides=%r @%#8x>' % (
- self.__class__.__name__, self._raw_pkg, tuple(self._overrides),
- id(self))
+ return "<%s pkg=%r overrides=%r @%#8x>" % (
+ self.__class__.__name__,
+ self._raw_pkg,
+ tuple(self._overrides),
+ id(self),
+ )
def __str__(self):
- return '%s(%s, overrides=%s)' % \
- (self.__class__.__name__, self._raw_pkg, tuple(self._overrides))
+ return "%s(%s, overrides=%s)" % (
+ self.__class__.__name__,
+ self._raw_pkg,
+ tuple(self._overrides),
+ )
diff --git a/src/pkgcore/package/virtual.py b/src/pkgcore/package/virtual.py
index e6c3e049b..29c163b6e 100644
--- a/src/pkgcore/package/virtual.py
+++ b/src/pkgcore/package/virtual.py
@@ -23,8 +23,8 @@ class package(metadata.package):
def __init__(self, repo, provider, *a, **kwds):
super().__init__(repo, *a, **kwds)
- object.__setattr__(self, 'provider', provider)
- object.__setattr__(self, 'data', {})
+ object.__setattr__(self, "provider", provider)
+ object.__setattr__(self, "data", {})
def __getattr__(self, key):
val = None
diff --git a/src/pkgcore/pkgsets/filelist.py b/src/pkgcore/pkgsets/filelist.py
index 505c193a9..8597d6962 100644
--- a/src/pkgcore/pkgsets/filelist.py
+++ b/src/pkgcore/pkgsets/filelist.py
@@ -17,7 +17,7 @@ from ..package.errors import InvalidDependency
class FileList:
- pkgcore_config_type = ConfigHint({'location': 'str'}, typename='pkgset')
+ pkgcore_config_type = ConfigHint({"location": "str"}, typename="pkgset")
error_on_subsets = True
def __init__(self, location, gid=os_data.portage_gid, mode=0o644):
@@ -36,12 +36,15 @@ class FileList:
elif x.startswith("@"):
if self.error_on_subsets:
raise ValueError(
- "set %s isn't a valid atom in pkgset %r" %
- (x, self.path))
+ "set %s isn't a valid atom in pkgset %r" % (x, self.path)
+ )
logger.warning(
"set item %r found in pkgset %r: it will be "
"wiped on update since portage/pkgcore store set items "
- "in a separate way", x[1:], self.path)
+ "in a separate way",
+ x[1:],
+ self.path,
+ )
continue
s.add(atom(x))
except InvalidDependency as e:
@@ -78,11 +81,11 @@ class FileList:
class WorldFile(FileList):
"""Set of packages contained in the world file."""
- pkgcore_config_type = ConfigHint(typename='pkgset')
+
+ pkgcore_config_type = ConfigHint(typename="pkgset")
error_on_subsets = False
- def __init__(self, location=const.WORLD_FILE,
- gid=os_data.portage_gid, mode=0o644):
+ def __init__(self, location=const.WORLD_FILE, gid=os_data.portage_gid, mode=0o644):
FileList.__init__(self, location, gid=gid, mode=mode)
def add(self, atom_inst):
@@ -94,7 +97,7 @@ class WorldFile(FileList):
def _modify(self, atom_inst, func):
if atom_inst.slot:
for slot in atom_inst.slot:
- if slot == '0':
+ if slot == "0":
new_atom_inst = atom(atom_inst.key)
else:
new_atom_inst = atom(atom_inst.key + ":" + slot)
diff --git a/src/pkgcore/pkgsets/glsa.py b/src/pkgcore/pkgsets/glsa.py
index 496a5a511..45c0213db 100644
--- a/src/pkgcore/pkgsets/glsa.py
+++ b/src/pkgcore/pkgsets/glsa.py
@@ -27,9 +27,9 @@ class GlsaDirSet(metaclass=generic_equality):
(rsync tree is the usual source.)
"""
- pkgcore_config_type = ConfigHint({'src': 'ref:repo'}, typename='pkgset')
+ pkgcore_config_type = ConfigHint({"src": "ref:repo"}, typename="pkgset")
op_translate = {"ge": ">=", "gt": ">", "lt": "<", "le": "<=", "eq": "="}
- __attr_comparison__ = ('paths',)
+ __attr_comparison__ = ("paths",)
def __init__(self, src):
"""
@@ -39,11 +39,18 @@ class GlsaDirSet(metaclass=generic_equality):
"""
if not isinstance(src, str):
- src = tuple(sorted(
- filter(os.path.isdir, (pjoin(
- repo.base, 'metadata', 'glsa') for repo in
- get_virtual_repos(src, False) if hasattr(repo, 'base'))
- )))
+ src = tuple(
+ sorted(
+ filter(
+ os.path.isdir,
+ (
+ pjoin(repo.base, "metadata", "glsa")
+ for repo in get_virtual_repos(src, False)
+ if hasattr(repo, "base")
+ ),
+ )
+ )
+ )
else:
src = [src]
self.paths = src
@@ -51,7 +58,8 @@ class GlsaDirSet(metaclass=generic_equality):
def __iter__(self):
for glsa, catpkg, pkgatom, vuln in self.iter_vulnerabilities():
yield packages.KeyedAndRestriction(
- pkgatom, vuln, key=catpkg, tag="GLSA vulnerable:")
+ pkgatom, vuln, key=catpkg, tag="GLSA vulnerable:"
+ )
def pkg_grouped_iter(self, sorter=None):
"""yield GLSA restrictions grouped by package key
@@ -69,7 +77,8 @@ class GlsaDirSet(metaclass=generic_equality):
for pkgname in sorter(pkgs):
yield packages.KeyedAndRestriction(
- pkgatoms[pkgname], packages.OrRestriction(*pkgs[pkgname]), key=pkgname)
+ pkgatoms[pkgname], packages.OrRestriction(*pkgs[pkgname]), key=pkgname
+ )
def iter_vulnerabilities(self):
"""generator yielding each GLSA restriction"""
@@ -77,33 +86,35 @@ class GlsaDirSet(metaclass=generic_equality):
for fn in listdir_files(path):
# glsa-1234-12.xml
if not (fn.startswith("glsa-") and fn.endswith(".xml")):
- logger.warning(f'invalid glsa file name: {fn!r}')
+ logger.warning(f"invalid glsa file name: {fn!r}")
continue
# This verifies the filename is of the correct syntax.
try:
[int(x) for x in fn[5:-4].split("-")]
except ValueError:
- logger.warning(f'invalid glsa file name: {fn!r}')
+ logger.warning(f"invalid glsa file name: {fn!r}")
continue
root = etree.parse(pjoin(path, fn))
glsa_node = root.getroot()
- if glsa_node.tag != 'glsa':
- logger.warning(f'glsa file without glsa root node: {fn!r}')
+ if glsa_node.tag != "glsa":
+ logger.warning(f"glsa file without glsa root node: {fn!r}")
continue
- for affected in root.findall('affected'):
- for pkg in affected.findall('package'):
+ for affected in root.findall("affected"):
+ for pkg in affected.findall("package"):
try:
- pkgname = str(pkg.get('name')).strip()
- pkg_vuln_restrict = \
- self.generate_intersects_from_pkg_node(
- pkg, tag="glsa(%s)" % fn[5:-4])
+ pkgname = str(pkg.get("name")).strip()
+ pkg_vuln_restrict = self.generate_intersects_from_pkg_node(
+ pkg, tag="glsa(%s)" % fn[5:-4]
+ )
if pkg_vuln_restrict is None:
continue
pkgatom = atom.atom(pkgname)
yield fn[5:-4], pkgname, pkgatom, pkg_vuln_restrict
except (TypeError, ValueError) as e:
# thrown from cpv.
- logger.warning(f"invalid glsa file {fn!r}, package {pkgname}: {e}")
+ logger.warning(
+ f"invalid glsa file {fn!r}, package {pkgname}: {e}"
+ )
except IGNORED_EXCEPTIONS:
raise
except Exception as e:
@@ -126,14 +137,19 @@ class GlsaDirSet(metaclass=generic_equality):
vuln_list = [self.generate_restrict_from_range(vuln[0])]
vuln = vuln_list[0]
if arch is not None:
- vuln = packages.AndRestriction(vuln, packages.PackageRestriction(
- "keywords", values.ContainmentMatch(arch, match_all=False)))
- invuln = (pkg_node.findall("unaffected"))
+ vuln = packages.AndRestriction(
+ vuln,
+ packages.PackageRestriction(
+ "keywords", values.ContainmentMatch(arch, match_all=False)
+ ),
+ )
+ invuln = pkg_node.findall("unaffected")
if not invuln:
# wrap it.
return packages.KeyedAndRestriction(vuln, tag=tag)
- invuln_list = [self.generate_restrict_from_range(x, negate=True)
- for x in invuln]
+ invuln_list = [
+ self.generate_restrict_from_range(x, negate=True) for x in invuln
+ ]
invuln = [x for x in invuln_list if x not in vuln_list]
if not invuln:
if tag is None:
@@ -148,7 +164,7 @@ class GlsaDirSet(metaclass=generic_equality):
try:
restrict = self.op_translate[op.lstrip("r")]
except KeyError:
- raise ValueError(f'unknown operator: {op!r}')
+ raise ValueError(f"unknown operator: {op!r}")
if node.text is None:
raise ValueError(f"{op!r} node missing version")
@@ -162,18 +178,20 @@ class GlsaDirSet(metaclass=generic_equality):
if op != "eq":
raise ValueError(f"glob cannot be used with {op} ops")
return packages.PackageRestriction(
- "fullver", values.StrGlobMatch(base.fullver))
+ "fullver", values.StrGlobMatch(base.fullver)
+ )
restrictions = []
if op.startswith("r"):
if not base.revision:
- if op == "rlt": # rlt -r0 can never match
+ if op == "rlt": # rlt -r0 can never match
# this is a non-range.
raise ValueError(
- "range %s version %s is a guaranteed empty set" %
- (op, str(node.text.strip())))
- elif op == "rle": # rle -r0 -> = -r0
+ "range %s version %s is a guaranteed empty set"
+ % (op, str(node.text.strip()))
+ )
+ elif op == "rle": # rle -r0 -> = -r0
return atom_restricts.VersionMatch("=", base.version, negate=negate)
- elif op == "rge": # rge -r0 -> ~
+ elif op == "rge": # rge -r0 -> ~
return atom_restricts.VersionMatch("~", base.version, negate=negate)
# rgt -r0 passes through to regular ~ + >
restrictions.append(atom_restricts.VersionMatch("~", base.version))
@@ -208,9 +226,9 @@ def find_vulnerable_repo_pkgs(glsa_src, repo, grouped=False, arch=None):
arch = tuple(arch)
wrapper = lambda p: mutated.MutatedPkg(p, {"keywords": arch})
for restrict in i:
- matches = caching_iter(wrapper(x)
- for x in repo.itermatch(restrict,
- sorter=sorted))
+ matches = caching_iter(
+ wrapper(x) for x in repo.itermatch(restrict, sorter=sorted)
+ )
if matches:
yield restrict, matches
@@ -218,10 +236,10 @@ def find_vulnerable_repo_pkgs(glsa_src, repo, grouped=False, arch=None):
class SecurityUpgrades(metaclass=generic_equality):
"""Set of packages for available security upgrades."""
- pkgcore_config_type = ConfigHint({'ebuild_repo': 'ref:repo',
- 'vdb': 'ref:vdb'},
- typename='pkgset')
- __attr_comparison__ = ('arch', 'glsa_src', 'vdb')
+ pkgcore_config_type = ConfigHint(
+ {"ebuild_repo": "ref:repo", "vdb": "ref:vdb"}, typename="pkgset"
+ )
+ __attr_comparison__ = ("arch", "glsa_src", "vdb")
def __init__(self, ebuild_repo, vdb, arch):
self.glsa_src = GlsaDirSet(ebuild_repo)
@@ -230,5 +248,6 @@ class SecurityUpgrades(metaclass=generic_equality):
def __iter__(self):
for glsa, matches in find_vulnerable_repo_pkgs(
- self.glsa_src, self.vdb, grouped=True, arch=self.arch):
+ self.glsa_src, self.vdb, grouped=True, arch=self.arch
+ ):
yield packages.KeyedAndRestriction(glsa[0], restriction.Negate(glsa[1]))
diff --git a/src/pkgcore/pkgsets/installed.py b/src/pkgcore/pkgsets/installed.py
index 124b00120..1b8f6fe3d 100644
--- a/src/pkgcore/pkgsets/installed.py
+++ b/src/pkgcore/pkgsets/installed.py
@@ -14,7 +14,8 @@ class _Base:
def __iter__(self):
restrict = packages.PackageRestriction(
- "package_is_real", values.EqualityMatch(True))
+ "package_is_real", values.EqualityMatch(True)
+ )
for repo in self.vdbs:
for pkg in repo.itermatch(restrict):
yield self.getter(pkg)
@@ -22,11 +23,13 @@ class _Base:
class Installed(_Base):
"""Set of packages holding slotted atoms of all installed packages."""
- pkgcore_config_type = ConfigHint({'vdb': 'refs:repo'}, typename='pkgset')
- getter = operator.attrgetter('slotted_atom')
+
+ pkgcore_config_type = ConfigHint({"vdb": "refs:repo"}, typename="pkgset")
+ getter = operator.attrgetter("slotted_atom")
class VersionedInstalled(_Base):
"""Set of packages holding versioned atoms of all installed packages."""
- pkgcore_config_type = ConfigHint({'vdb': 'refs:repo'}, typename='pkgset')
- getter = operator.attrgetter('versioned_atom')
+
+ pkgcore_config_type = ConfigHint({"vdb": "refs:repo"}, typename="pkgset")
+ getter = operator.attrgetter("versioned_atom")
diff --git a/src/pkgcore/pkgsets/live_rebuild_set.py b/src/pkgcore/pkgsets/live_rebuild_set.py
index 012e48d1f..ec18902ac 100644
--- a/src/pkgcore/pkgsets/live_rebuild_set.py
+++ b/src/pkgcore/pkgsets/live_rebuild_set.py
@@ -11,11 +11,8 @@ from .installed import VersionedInstalled
class EclassConsumerSet(VersionedInstalled):
pkgcore_config_type = ConfigHint(
- typename='pkgset',
- types={
- 'vdb': 'refs:repo',
- 'repos': 'refs:repo',
- 'eclasses': 'list'},
+ typename="pkgset",
+ types={"vdb": "refs:repo", "repos": "refs:repo", "eclasses": "list"},
)
def __init__(self, vdb, repos, eclasses):
@@ -29,7 +26,7 @@ class EclassConsumerSet(VersionedInstalled):
if not pkgs:
# pkg is installed but no longer in any repo, just ignore it.
continue
- assert len(pkgs) == 1, 'I do not know what I am doing: %r' % (pkgs,)
+ assert len(pkgs) == 1, "I do not know what I am doing: %r" % (pkgs,)
pkg = pkgs[0]
- if self.eclasses.isdisjoint(pkg.data.get('_eclasses_', ())):
+ if self.eclasses.isdisjoint(pkg.data.get("_eclasses_", ())):
yield atom
diff --git a/src/pkgcore/pkgsets/system.py b/src/pkgcore/pkgsets/system.py
index 8c491fc8d..596f7ef6f 100644
--- a/src/pkgcore/pkgsets/system.py
+++ b/src/pkgcore/pkgsets/system.py
@@ -9,7 +9,8 @@ from ..config.hint import ConfigHint
class SystemSet:
"""Set of packages defined by the selected profile."""
- pkgcore_config_type = ConfigHint({'profile': 'ref:profile'}, typename='pkgset')
+
+ pkgcore_config_type = ConfigHint({"profile": "ref:profile"}, typename="pkgset")
def __init__(self, profile):
self.system = frozenset(profile.system)
diff --git a/src/pkgcore/plugin.py b/src/pkgcore/plugin.py
index 812fc0be9..a41097c71 100644
--- a/src/pkgcore/plugin.py
+++ b/src/pkgcore/plugin.py
@@ -27,23 +27,24 @@ from snakeoil.osutils import ensure_dirs, listdir_files, pjoin, unlink_if_exists
from . import const, os_data
from .log import logger
-_plugin_data = namedtuple(
- "_plugin_data", ["key", "priority", "source", "target"])
+_plugin_data = namedtuple("_plugin_data", ["key", "priority", "source", "target"])
-PLUGIN_ATTR = 'pkgcore_plugins'
+PLUGIN_ATTR = "pkgcore_plugins"
-CACHE_HEADER = 'pkgcore plugin cache v3'
-CACHE_FILENAME = 'plugincache'
+CACHE_HEADER = "pkgcore plugin cache v3"
+CACHE_FILENAME = "plugincache"
def _clean_old_caches(path):
- for name in ('plugincache2',):
+ for name in ("plugincache2",):
try:
unlink_if_exists(pjoin(path, name))
except EnvironmentError as e:
logger.error(
"attempting to clean old plugin cache %r failed with %s",
- pjoin(path, name), e)
+ pjoin(path, name),
+ e,
+ )
def sort_plugs(plugs):
@@ -67,26 +68,31 @@ def _process_plugin(package, plug, filter_disabled=False):
if len(plugs) <= plug.target:
logger.exception(
"plugin cache for %s, %s, %s is somehow wrong; no item at position %s",
- package.__name__, plug.source, plug.key, plug.target)
+ package.__name__,
+ plug.source,
+ plug.key,
+ plug.target,
+ )
return None
plug = plugs[plug.target]
else:
logger.error(
- "package %s, plug %s; non int, non string. wtf?",
- package.__name__, plug)
+ "package %s, plug %s; non int, non string. wtf?", package.__name__, plug
+ )
return None
if filter_disabled:
- if getattr(plug, 'disabled', False):
+ if getattr(plug, "disabled", False):
logger.debug("plugin %s is disabled, skipping", plug)
return None
- f = getattr(plug, '_plugin_disabled_check', None)
+ f = getattr(plug, "_plugin_disabled_check", None)
if f is not None and f():
logger.debug("plugin %s is disabled, skipping", plug)
return None
return plug
+
def _read_cache_file(package, cache_path):
"""Read an existing cache file."""
stored_cache = {}
@@ -94,7 +100,8 @@ def _read_cache_file(package, cache_path):
if len(cache_data) >= 1:
if cache_data[0] != CACHE_HEADER:
logger.warning(
- "plugin cache has a wrong header: %r, regenerating", cache_data[0])
+ "plugin cache has a wrong header: %r, regenerating", cache_data[0]
+ )
cache_data = []
else:
cache_data = cache_data[1:]
@@ -102,29 +109,35 @@ def _read_cache_file(package, cache_path):
return {}
try:
for line in cache_data:
- module, mtime, entries = line.split(':', 2)
+ module, mtime, entries = line.split(":", 2)
mtime = int(mtime)
# Needed because ''.split(':') == [''], not []
if not entries:
entries = set()
else:
- entries = entries.replace(':', ',').split(',')
+ entries = entries.replace(":", ",").split(",")
if not len(entries) % 3 == 0:
logger.error(
"failed reading cache %s; entries field isn't "
- "divisable by 3: %r", cache_path, entries)
+ "divisable by 3: %r",
+ cache_path,
+ entries,
+ )
continue
entries = iter(entries)
+
def f(val):
if val.isdigit():
val = int(val)
return val
+
entries = set(
_plugin_data(
- key, int(priority),
- f'{package.__name__}.{module}', f(target))
- for (key, priority, target) in zip(entries, entries, entries))
+ key, int(priority), f"{package.__name__}.{module}", f(target)
+ )
+ for (key, priority, target) in zip(entries, entries, entries)
+ )
stored_cache[(module, mtime)] = entries
except IGNORED_EXCEPTIONS:
raise
@@ -134,18 +147,24 @@ def _read_cache_file(package, cache_path):
return stored_cache
+
def _write_cache_file(path, data, uid=-1, gid=-1):
"""Write a new cache file."""
cachefile = None
try:
try:
cachefile = AtomicWriteFile(
- path, binary=False, perms=0o664, uid=uid, gid=gid)
+ path, binary=False, perms=0o664, uid=uid, gid=gid
+ )
cachefile.write(CACHE_HEADER + "\n")
- for (module, mtime), plugs in sorted(data.items(), key=operator.itemgetter(0)):
+ for (module, mtime), plugs in sorted(
+ data.items(), key=operator.itemgetter(0)
+ ):
plugs = sort_plugs(plugs)
- plugs = ':'.join(f'{plug.key},{plug.priority},{plug.target}' for plug in plugs)
- cachefile.write(f'{module}:{mtime}:{plugs}\n')
+ plugs = ":".join(
+ f"{plug.key},{plug.priority},{plug.target}" for plug in plugs
+ )
+ cachefile.write(f"{module}:{mtime}:{plugs}\n")
cachefile.close()
except EnvironmentError as e:
# We cannot write a new cache. We should log this
@@ -153,9 +172,9 @@ def _write_cache_file(path, data, uid=-1, gid=-1):
# Use error, not exception for this one: the traceback
# is not necessary and too alarming.
- logger.error('Cannot write cache for %s: %s. '
- 'Try running pplugincache.',
- path, e)
+ logger.error(
+ "Cannot write cache for %s: %s. " "Try running pplugincache.", path, e
+ )
finally:
if cachefile is not None:
cachefile.discard()
@@ -197,7 +216,7 @@ def initialize_cache(package, force=False, cache_dir=None):
# put pkgcore consumer plugins (e.g. pkgcheck) inside pkgcore cache dir
if cache_dir in (const.SYSTEM_CACHE_PATH, const.USER_CACHE_PATH):
- chunks = package.__name__.split('.', 1)
+ chunks = package.__name__.split(".", 1)
if chunks[0] != os.path.basename(cache_dir):
cache_dir = pjoin(cache_dir, chunks[0])
@@ -212,8 +231,9 @@ def initialize_cache(package, force=False, cache_dir=None):
# Directory cache, mapping modulename to
# (mtime, set([keys]))
modlist = listdir_files(modpath)
- modlist = set(x for x in modlist if os.path.splitext(x)[1] == '.py'
- and x != '__init__.py')
+ modlist = set(
+ x for x in modlist if os.path.splitext(x)[1] == ".py" and x != "__init__.py"
+ )
cache_stale = False
# Hunt for modules.
@@ -227,28 +247,35 @@ def initialize_cache(package, force=False, cache_dir=None):
if vals is None or force:
# Cache entry is stale.
logger.debug(
- 'stale because of %s: actual %s != stored %s',
- modname, mtime, stored_cache.get(modname, (0, ()))[0])
+ "stale because of %s: actual %s != stored %s",
+ modname,
+ mtime,
+ stored_cache.get(modname, (0, ()))[0],
+ )
cache_stale = True
entries = []
- qualname = '.'.join((package.__name__, modname))
+ qualname = ".".join((package.__name__, modname))
module = import_module(qualname)
registry = getattr(module, PLUGIN_ATTR, {})
vals = set()
for key, plugs in registry.items():
for idx, plug_name in enumerate(plugs):
if isinstance(plug_name, str):
- plug = _process_plugin(package, _plugin_data(key, 0, qualname, plug_name))
+ plug = _process_plugin(
+ package, _plugin_data(key, 0, qualname, plug_name)
+ )
else:
plug = plug_name
if plug is None:
# import failure, ignore it, error already logged
continue
- priority = getattr(plug, 'priority', 0)
+ priority = getattr(plug, "priority", 0)
if not isinstance(priority, int):
logger.error(
"ignoring plugin %s: has a non integer priority: %s",
- plug, priority)
+ plug,
+ priority,
+ )
continue
if plug_name is plug:
# this means it's an object, rather than a string; store
@@ -260,7 +287,7 @@ def initialize_cache(package, force=False, cache_dir=None):
for data in vals:
package_cache[data.key].add(data)
if force or set(stored_cache) != set(actual_cache):
- logger.debug('updating cache %r for new plugins', stored_cache_name)
+ logger.debug("updating cache %r for new plugins", stored_cache_name)
ensure_dirs(cache_dir, uid=uid, gid=gid, mode=mode)
_write_cache_file(stored_cache_name, actual_cache, uid=uid, gid=gid)
@@ -273,7 +300,7 @@ def get_plugins(key, package=None):
Plugins with a C{disabled} attribute evaluating to C{True} are skipped.
"""
if package is None:
- package = import_module('.plugins', __name__.split('.')[0])
+ package = import_module(".plugins", __name__.split(".")[0])
cache = _global_cache[package]
for plug in _process_plugins(package, cache.get(key, ()), filter_disabled=True):
@@ -289,7 +316,7 @@ def get_plugin(key, package=None):
:return: highest-priority plugin or None if no plugin available.
"""
if package is None:
- package = import_module('.plugins', __name__.split('.')[0])
+ package = import_module(".plugins", __name__.split(".")[0])
cache = _global_cache[package]
for plug in _process_plugins(package, cache.get(key, ()), filter_disabled=True):
@@ -313,7 +340,7 @@ def extend_path(path, name):
# frozen package. Return the path unchanged in that case.
return
# Reconstitute as relative path.
- pname = pjoin(*name.split('.'))
+ pname = pjoin(*name.split("."))
for entry in sys.path:
if not isinstance(entry, str) or not os.path.isdir(entry):
diff --git a/src/pkgcore/pytest/plugin.py b/src/pkgcore/pytest/plugin.py
index 10739079d..142292bf4 100644
--- a/src/pkgcore/pytest/plugin.py
+++ b/src/pkgcore/pytest/plugin.py
@@ -18,45 +18,55 @@ class GitRepo:
dependency requirements.
"""
- def __init__(self, path, bare=False, branch='main', commit=False, clone=False):
+ def __init__(self, path, bare=False, branch="main", commit=False, clone=False):
self.path = path
if clone:
os.makedirs(self.path)
- self.run(['git', 'clone', clone, self.path])
+ self.run(["git", "clone", clone, self.path])
else:
- self.run(['git', 'init', '-b', branch] + (['--bare'] if bare else []) + [self.path])
- self.run(['git', 'config', 'user.email', 'first.last@email.com'])
- self.run(['git', 'config', 'user.name', 'First Last'])
+ self.run(
+ ["git", "init", "-b", branch]
+ + (["--bare"] if bare else [])
+ + [self.path]
+ )
+ self.run(["git", "config", "user.email", "first.last@email.com"])
+ self.run(["git", "config", "user.name", "First Last"])
if commit:
if self.changes:
# if files exist in the repo, add them in an initial commit
- self.add_all(msg='initial commit')
+ self.add_all(msg="initial commit")
else:
# otherwise add a stub initial commit
- self.add(pjoin(self.path, '.init'), create=True)
+ self.add(pjoin(self.path, ".init"), create=True)
def run(self, cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, **kwargs):
return subprocess.run(
- cmd, cwd=self.path, encoding='utf8', check=True,
- stdout=stdout, stderr=stderr, **kwargs)
+ cmd,
+ cwd=self.path,
+ encoding="utf8",
+ check=True,
+ stdout=stdout,
+ stderr=stderr,
+ **kwargs,
+ )
def log(self, args):
"""Run ``git log`` with given args and return a list of outputted lines."""
- p = self.run(['git', 'log'] + args, stdout=subprocess.PIPE)
+ p = self.run(["git", "log"] + args, stdout=subprocess.PIPE)
return p.stdout.strip().splitlines()
@property
def changes(self):
"""Return a list of any untracked or modified files in the repo."""
- cmd = ['git', 'ls-files', '-mo', '--exclude-standard']
+ cmd = ["git", "ls-files", "-mo", "--exclude-standard"]
p = self.run(cmd, stdout=subprocess.PIPE)
return p.stdout.splitlines()
@property
def HEAD(self):
"""Return the commit hash for git HEAD."""
- p = self.run(['git', 'rev-parse', '--short', 'HEAD'], stdout=subprocess.PIPE)
+ p = self.run(["git", "rev-parse", "--short", "HEAD"], stdout=subprocess.PIPE)
return p.stdout.strip()
def __str__(self):
@@ -67,39 +77,39 @@ class GitRepo:
if isinstance(msg, str):
msg = msg.splitlines()
if signoff:
- msg.extend(['', 'Signed-off-by: First Last <first.last@email.com>'])
- self.run(['git', 'commit', '-m', '\n'.join(msg)])
+ msg.extend(["", "Signed-off-by: First Last <first.last@email.com>"])
+ self.run(["git", "commit", "-m", "\n".join(msg)])
- def add(self, file_path, msg='commit', commit=True, create=False, signoff=False):
+ def add(self, file_path, msg="commit", commit=True, create=False, signoff=False):
"""Add a file and commit it to the repo."""
if create:
touch(pjoin(self.path, file_path))
- self.run(['git', 'add', file_path])
+ self.run(["git", "add", file_path])
if commit:
self.commit(msg, signoff)
- def add_all(self, msg='commit-all', commit=True, signoff=False):
+ def add_all(self, msg="commit-all", commit=True, signoff=False):
"""Add and commit all tracked and untracked files."""
- self.run(['git', 'add', '--all'])
+ self.run(["git", "add", "--all"])
if commit:
self.commit(msg, signoff)
- def remove(self, path, msg='remove', commit=True, signoff=False):
+ def remove(self, path, msg="remove", commit=True, signoff=False):
"""Remove a given file path and commit the change."""
- self.run(['git', 'rm', path])
+ self.run(["git", "rm", path])
if commit:
self.commit(msg, signoff)
- def remove_all(self, path, msg='remove-all', commit=True, signoff=False):
+ def remove_all(self, path, msg="remove-all", commit=True, signoff=False):
"""Remove all files from a given path and commit the changes."""
- self.run(['git', 'rm', '-rf', path])
+ self.run(["git", "rm", "-rf", path])
if commit:
self.commit(msg, signoff)
def move(self, path, new_path, msg=None, commit=True, signoff=False):
"""Move a given file path and commit the change."""
- msg = msg if msg is not None else f'{path} -> {new_path}'
- self.run(['git', 'mv', path, new_path])
+ msg = msg if msg is not None else f"{path} -> {new_path}"
+ self.run(["git", "mv", path, new_path])
if commit:
self.commit(msg, signoff)
@@ -107,15 +117,17 @@ class GitRepo:
@pytest.fixture
def git_repo(tmp_path_factory):
"""Create an empty git repo with an initial commit."""
- return GitRepo(str(tmp_path_factory.mktemp('git-repo')), commit=True)
+ return GitRepo(str(tmp_path_factory.mktemp("git-repo")), commit=True)
@pytest.fixture
def make_git_repo(tmp_path_factory):
"""Factory for git repo creation."""
+
def _make_git_repo(path=None, **kwargs):
- path = str(tmp_path_factory.mktemp('git-repo')) if path is None else path
+ path = str(tmp_path_factory.mktemp("git-repo")) if path is None else path
return GitRepo(path, **kwargs)
+
return _make_git_repo
@@ -127,8 +139,8 @@ class _FileSet(MutableSet):
self._set = set()
def _sync(self):
- with open(self._path, 'w') as f:
- f.write('\n'.join(self._set) + '\n')
+ with open(self._path, "w") as f:
+ f.write("\n".join(self._set) + "\n")
def __contains__(self, key):
return key in self._set
@@ -167,26 +179,30 @@ class _FileSet(MutableSet):
class EbuildRepo:
"""Class for creating/manipulating ebuild repos."""
- def __init__(self, path, repo_id='fake', eapi='5', masters=(), arches=()):
+ def __init__(self, path, repo_id="fake", eapi="5", masters=(), arches=()):
self.path = path
- self.arches = _FileSet(pjoin(self.path, 'profiles', 'arch.list'))
+ self.arches = _FileSet(pjoin(self.path, "profiles", "arch.list"))
self._today = datetime.today()
try:
- os.makedirs(pjoin(path, 'profiles'))
- with open(pjoin(path, 'profiles', 'repo_name'), 'w') as f:
- f.write(f'{repo_id}\n')
- with open(pjoin(path, 'profiles', 'eapi'), 'w') as f:
- f.write(f'{eapi}\n')
- os.makedirs(pjoin(path, 'metadata'))
- with open(pjoin(path, 'metadata', 'layout.conf'), 'w') as f:
- f.write(textwrap.dedent(f"""\
+ os.makedirs(pjoin(path, "profiles"))
+ with open(pjoin(path, "profiles", "repo_name"), "w") as f:
+ f.write(f"{repo_id}\n")
+ with open(pjoin(path, "profiles", "eapi"), "w") as f:
+ f.write(f"{eapi}\n")
+ os.makedirs(pjoin(path, "metadata"))
+ with open(pjoin(path, "metadata", "layout.conf"), "w") as f:
+ f.write(
+ textwrap.dedent(
+ f"""\
masters = {' '.join(masters)}
cache-formats =
thin-manifests = true
- """))
+ """
+ )
+ )
if arches:
self.arches.update(arches)
- os.makedirs(pjoin(path, 'eclass'))
+ os.makedirs(pjoin(path, "eclass"))
except FileExistsError:
pass
self.sync()
@@ -195,47 +211,55 @@ class EbuildRepo:
"""Forcibly create underlying repo object avoiding cache usage."""
# avoid issues loading modules that set signal handlers
from pkgcore.ebuild import repo_objs, repository
- repo_config = repo_objs.RepoConfig(location=self.path, disable_inst_caching=True)
+
+ repo_config = repo_objs.RepoConfig(
+ location=self.path, disable_inst_caching=True
+ )
self._repo = repository.UnconfiguredTree(self.path, repo_config=repo_config)
def create_profiles(self, profiles):
for p in profiles:
- os.makedirs(pjoin(self.path, 'profiles', p.path), exist_ok=True)
- with open(pjoin(self.path, 'profiles', 'profiles.desc'), 'a+') as f:
- f.write(f'{p.arch} {p.path} {p.status}\n')
+ os.makedirs(pjoin(self.path, "profiles", p.path), exist_ok=True)
+ with open(pjoin(self.path, "profiles", "profiles.desc"), "a+") as f:
+ f.write(f"{p.arch} {p.path} {p.status}\n")
if p.deprecated:
- with open(pjoin(self.path, 'profiles', p.path, 'deprecated'), 'w') as f:
+ with open(pjoin(self.path, "profiles", p.path, "deprecated"), "w") as f:
f.write("# deprecated\ndeprecation reason\n")
- with open(pjoin(self.path, 'profiles', p.path, 'make.defaults'), 'w') as f:
+ with open(pjoin(self.path, "profiles", p.path, "make.defaults"), "w") as f:
if p.defaults is not None:
- f.write('\n'.join(p.defaults))
+ f.write("\n".join(p.defaults))
else:
- f.write(f'ARCH={p.arch}\n')
+ f.write(f"ARCH={p.arch}\n")
if p.eapi:
- with open(pjoin(self.path, 'profiles', p.path, 'eapi'), 'w') as f:
- f.write(f'{p.eapi}\n')
+ with open(pjoin(self.path, "profiles", p.path, "eapi"), "w") as f:
+ f.write(f"{p.eapi}\n")
def create_ebuild(self, cpvstr, data=None, **kwargs):
from pkgcore.ebuild import cpv as cpv_mod
+
cpv = cpv_mod.VersionedCPV(cpvstr)
self._repo.notify_add_package(cpv)
ebuild_dir = pjoin(self.path, cpv.category, cpv.package)
os.makedirs(ebuild_dir, exist_ok=True)
# use defaults for some ebuild metadata if unset
- eapi = kwargs.pop('eapi', '7')
- slot = kwargs.pop('slot', '0')
- desc = kwargs.pop('description', 'stub package description')
- homepage = kwargs.pop('homepage', 'https://pkgcore.github.io/pkgcheck')
- license = kwargs.pop('license', 'blank')
-
- ebuild_path = pjoin(ebuild_dir, f'{cpv.package}-{cpv.fullver}.ebuild')
- with open(ebuild_path, 'w') as f:
- if self.repo_id == 'gentoo':
- f.write(textwrap.dedent(f"""\
+ eapi = kwargs.pop("eapi", "7")
+ slot = kwargs.pop("slot", "0")
+ desc = kwargs.pop("description", "stub package description")
+ homepage = kwargs.pop("homepage", "https://pkgcore.github.io/pkgcheck")
+ license = kwargs.pop("license", "blank")
+
+ ebuild_path = pjoin(ebuild_dir, f"{cpv.package}-{cpv.fullver}.ebuild")
+ with open(ebuild_path, "w") as f:
+ if self.repo_id == "gentoo":
+ f.write(
+ textwrap.dedent(
+ f"""\
# Copyright 1999-{self._today.year} Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
- """))
+ """
+ )
+ )
f.write(f'EAPI="{eapi}"\n')
f.write(f'DESCRIPTION="{desc}"\n')
f.write(f'HOMEPAGE="{homepage}"\n')
@@ -244,36 +268,38 @@ class EbuildRepo:
if license:
f.write(f'LICENSE="{license}"\n')
# create a fake license
- os.makedirs(pjoin(self.path, 'licenses'), exist_ok=True)
- touch(pjoin(self.path, 'licenses', license))
+ os.makedirs(pjoin(self.path, "licenses"), exist_ok=True)
+ touch(pjoin(self.path, "licenses", license))
for k, v in kwargs.items():
# handle sequences such as KEYWORDS and IUSE
if isinstance(v, (tuple, list)):
- v = ' '.join(v)
+ v = " ".join(v)
f.write(f'{k.upper()}="{v}"\n')
if data:
- f.write(data.strip() + '\n')
+ f.write(data.strip() + "\n")
return ebuild_path
def __iter__(self):
yield from iter(self._repo)
- __getattr__ = klass.GetAttrProxy('_repo')
- __dir__ = klass.DirProxy('_repo')
+ __getattr__ = klass.GetAttrProxy("_repo")
+ __dir__ = klass.DirProxy("_repo")
@pytest.fixture
def repo(tmp_path_factory):
"""Create a generic ebuild repository."""
- return EbuildRepo(str(tmp_path_factory.mktemp('repo')))
+ return EbuildRepo(str(tmp_path_factory.mktemp("repo")))
@pytest.fixture
def make_repo(tmp_path_factory):
"""Factory for ebuild repo creation."""
+
def _make_repo(path=None, **kwargs):
- path = str(tmp_path_factory.mktemp('repo')) if path is None else path
+ path = str(tmp_path_factory.mktemp("repo")) if path is None else path
return EbuildRepo(path, **kwargs)
+
return _make_repo
diff --git a/src/pkgcore/repository/configured.py b/src/pkgcore/repository/configured.py
index 93d019cd4..024626a8b 100644
--- a/src/pkgcore/repository/configured.py
+++ b/src/pkgcore/repository/configured.py
@@ -32,8 +32,11 @@ class tree(prototype.tree):
def _mk_kls(self, pkg_kls_injections):
return make_wrapper(
- self, self.configurable, self.wrapped_attrs,
- kls_injections=pkg_kls_injections)
+ self,
+ self.configurable,
+ self.wrapped_attrs,
+ kls_injections=pkg_kls_injections,
+ )
def _get_pkg_kwds(self, pkg):
raise NotImplementedError
@@ -59,7 +62,8 @@ class tree(prototype.tree):
return self.raw_repo.itermatch(restrict, **kwds)
itermatch.__doc__ = prototype.tree.itermatch.__doc__.replace(
- "@param", "@keyword").replace(":keyword restrict:", ":param restrict:")
+ "@param", "@keyword"
+ ).replace(":keyword restrict:", ":param restrict:")
def __getitem__(self, key):
obj = self.package_class(self.raw_repo[key])
@@ -68,8 +72,10 @@ class tree(prototype.tree):
return obj
def __repr__(self):
- return '<%s.%s raw_repo=%r wrapped=%r @%#8x>' % (
- self.__class__.__module__, self.__class__.__name__,
- getattr(self, 'raw_repo', 'unset'),
- list(getattr(self, 'wrapped_attrs', {}).keys()),
- id(self))
+ return "<%s.%s raw_repo=%r wrapped=%r @%#8x>" % (
+ self.__class__.__module__,
+ self.__class__.__name__,
+ getattr(self, "raw_repo", "unset"),
+ list(getattr(self, "wrapped_attrs", {}).keys()),
+ id(self),
+ )
diff --git a/src/pkgcore/repository/errors.py b/src/pkgcore/repository/errors.py
index 4318950fe..eb3362398 100644
--- a/src/pkgcore/repository/errors.py
+++ b/src/pkgcore/repository/errors.py
@@ -36,6 +36,6 @@ class UnsupportedRepo(RepoError, PkgcoreUserException):
def __str__(self):
return (
- f'{self.repo.repo_id!r} repo: '
- f'unsupported repo EAPI {str(self.repo.eapi)!r}'
+ f"{self.repo.repo_id!r} repo: "
+ f"unsupported repo EAPI {str(self.repo.eapi)!r}"
)
diff --git a/src/pkgcore/repository/filtered.py b/src/pkgcore/repository/filtered.py
index d1b7f09b5..2e700d7e4 100644
--- a/src/pkgcore/repository/filtered.py
+++ b/src/pkgcore/repository/filtered.py
@@ -21,9 +21,10 @@ class tree(prototype.tree):
def __init__(self, repo, restrict, sentinel_val=False):
self.raw_repo = repo
self.sentinel_val = sentinel_val
- if not hasattr(self.raw_repo, 'itermatch'):
+ if not hasattr(self.raw_repo, "itermatch"):
raise errors.InitializationError(
- f"{self.raw_repo} is not a repository tree derivative")
+ f"{self.raw_repo} is not a repository tree derivative"
+ )
if not isinstance(restrict, restriction.base):
raise errors.InitializationError(f"{restrict} is not a restriction")
self.restrict = restrict
@@ -40,10 +41,12 @@ class tree(prototype.tree):
# (determined by repo's attributes) versus what does cost
# (metadata pull for example).
return self._filterfunc(
- self.restrict.match, self.raw_repo.itermatch(restrict, **kwds))
+ self.restrict.match, self.raw_repo.itermatch(restrict, **kwds)
+ )
itermatch.__doc__ = prototype.tree.itermatch.__doc__.replace(
- "@param", "@keyword").replace(":keyword restrict:", ":param restrict:")
+ "@param", "@keyword"
+ ).replace(":keyword restrict:", ":param restrict:")
def __len__(self):
count = 0
@@ -61,9 +64,10 @@ class tree(prototype.tree):
return v
def __repr__(self):
- return '<%s raw_repo=%r restrict=%r sentinel=%r @%#8x>' % (
+ return "<%s raw_repo=%r restrict=%r sentinel=%r @%#8x>" % (
self.__class__.__name__,
- getattr(self, 'raw_repo', 'unset'),
- getattr(self, 'restrict', 'unset'),
- getattr(self, 'sentinel_val', 'unset'),
- id(self))
+ getattr(self, "raw_repo", "unset"),
+ getattr(self, "restrict", "unset"),
+ getattr(self, "sentinel_val", "unset"),
+ id(self),
+ )
diff --git a/src/pkgcore/repository/misc.py b/src/pkgcore/repository/misc.py
index f913c04f3..d7f0ab008 100644
--- a/src/pkgcore/repository/misc.py
+++ b/src/pkgcore/repository/misc.py
@@ -12,7 +12,9 @@ from ..restrictions import packages
class nodeps_repo:
"""Repository wrapper that returns wrapped pkgs with deps wiped."""
- default_bdepend = default_depend = default_rdepend = default_pdepend = default_idepend = DepSet()
+ default_bdepend = (
+ default_depend
+ ) = default_rdepend = default_pdepend = default_idepend = DepSet()
def __init__(self, repo):
"""
@@ -21,14 +23,19 @@ class nodeps_repo:
self.raw_repo = repo
def itermatch(self, *a, **kwds):
- return (MutatedPkg(
- x, overrides={"bdepend": self.default_bdepend,
- "depend": self.default_depend,
- "rdepend": self.default_rdepend,
- "pdepend": self.default_pdepend,
- "idepend": self.default_idepend}
+ return (
+ MutatedPkg(
+ x,
+ overrides={
+ "bdepend": self.default_bdepend,
+ "depend": self.default_depend,
+ "rdepend": self.default_rdepend,
+ "pdepend": self.default_pdepend,
+ "idepend": self.default_idepend,
+ },
)
- for x in self.raw_repo.itermatch(*a, **kwds))
+ for x in self.raw_repo.itermatch(*a, **kwds)
+ )
def match(self, *a, **kwds):
return list(self.itermatch(*a, **kwds))
@@ -53,8 +60,8 @@ class restrict_repo:
def itermatch(self, *a, **kwds):
return (
- x for x in self.raw_repo.itermatch(*a, **kwds)
- if not self.restrict.match(x))
+ x for x in self.raw_repo.itermatch(*a, **kwds) if not self.restrict.match(x)
+ )
def match(self, *a, **kwds):
return list(self.itermatch(*a, **kwds))
@@ -102,9 +109,9 @@ class caching_repo:
def match(self, restrict):
v = self.__cache__.get(restrict)
if v is None:
- v = self.__cache__[restrict] = \
- caching_iter(
- self.__db__.itermatch(restrict, sorter=self.__strategy__))
+ v = self.__cache__[restrict] = caching_iter(
+ self.__db__.itermatch(restrict, sorter=self.__strategy__)
+ )
return v
def itermatch(self, restrict):
@@ -118,7 +125,6 @@ class caching_repo:
class multiplex_sorting_repo:
-
def __init__(self, sorter, repos):
self.__repos__ = tuple(repos)
self.__sorter__ = sorter
diff --git a/src/pkgcore/repository/multiplex.py b/src/pkgcore/repository/multiplex.py
index 1e7969d83..02e754c14 100644
--- a/src/pkgcore/repository/multiplex.py
+++ b/src/pkgcore/repository/multiplex.py
@@ -21,18 +21,23 @@ from . import errors, prototype
class operations(repo_interface.operations_proxy):
- ops_stop_after_first_supported = frozenset(
- ["install", "uninstall", "replace"])
+ ops_stop_after_first_supported = frozenset(["install", "uninstall", "replace"])
@klass.cached_property
def raw_operations(self):
- return frozenset(chain.from_iterable(
- tree.operations.raw_operations for tree in self.repo.trees))
+ return frozenset(
+ chain.from_iterable(
+ tree.operations.raw_operations for tree in self.repo.trees
+ )
+ )
@klass.cached_property
def enabled_operations(self):
- s = set(chain.from_iterable(
- tree.operations.enabled_operations for tree in self.repo.trees))
+ s = set(
+ chain.from_iterable(
+ tree.operations.enabled_operations for tree in self.repo.trees
+ )
+ )
return frozenset(self._apply_overrides(s))
def _setup_api(self):
@@ -58,7 +63,7 @@ class operations(repo_interface.operations_proxy):
return ret
-@configurable({'repos': 'refs:repo'}, typename='repo')
+@configurable({"repos": "refs:repo"}, typename="repo")
def config_tree(repos):
return tree(*repos)
@@ -83,9 +88,10 @@ class tree(prototype.tree):
def __init__(self, *trees):
super().__init__()
for x in trees:
- if not hasattr(x, 'itermatch'):
+ if not hasattr(x, "itermatch"):
raise errors.InitializationError(
- f'{x} is not a repository tree derivative')
+ f"{x} is not a repository tree derivative"
+ )
self.trees = trees
def _get_categories(self, *optional_category):
@@ -106,8 +112,7 @@ class tree(prototype.tree):
failures += 1
if failures == len(self.trees):
if optional_category:
- raise KeyError("category base '%s' not found" %
- str(optional_category))
+ raise KeyError("category base '%s' not found" % str(optional_category))
raise KeyError("failed getting categories")
return tuple(d)
@@ -120,7 +125,7 @@ class tree(prototype.tree):
except (errors.RepoError, KeyError):
failures += 1
if failures == len(self.trees):
- raise KeyError(f'category {category!r} not found')
+ raise KeyError(f"category {category!r} not found")
return tuple(d)
def _get_versions(self, package):
@@ -133,7 +138,7 @@ class tree(prototype.tree):
failures += 1
if failures == len(self.trees):
- raise KeyError(f'category {package!r} not found')
+ raise KeyError(f"category {package!r} not found")
return tuple(d)
def path_restrict(self, path):
@@ -156,13 +161,16 @@ class tree(prototype.tree):
return repo.path_restrict(path)
except ValueError:
raise
- raise ValueError(f'no repo contains: {path!r}')
+ raise ValueError(f"no repo contains: {path!r}")
def itermatch(self, restrict, **kwds):
sorter = kwds.get("sorter", iter)
if sorter is iter:
- return (match for repo in self.trees
- for match in repo.itermatch(restrict, **kwds))
+ return (
+ match
+ for repo in self.trees
+ for match in repo.itermatch(restrict, **kwds)
+ )
# ugly, and a bit slow, but works.
def f(x, y):
@@ -170,12 +178,13 @@ class tree(prototype.tree):
if l[0] == y:
return 1
return -1
+
f = post_curry(sorted_cmp, f, key=itemgetter(0))
- return iter_sort(
- f, *[repo.itermatch(restrict, **kwds) for repo in self.trees])
+ return iter_sort(f, *[repo.itermatch(restrict, **kwds) for repo in self.trees])
itermatch.__doc__ = prototype.tree.itermatch.__doc__.replace(
- "@param", "@keyword").replace(":keyword restrict:", ":param restrict:")
+ "@param", "@keyword"
+ ).replace(":keyword restrict:", ":param restrict:")
def __iter__(self):
return (pkg for repo in self.trees for pkg in repo)
@@ -214,7 +223,7 @@ class tree(prototype.tree):
except KeyError:
pass
# made it here, no match.
- raise KeyError(f'package {key} not found')
+ raise KeyError(f"package {key} not found")
def __add__(self, other):
if isinstance(other, prototype.tree):
@@ -225,7 +234,8 @@ class tree(prototype.tree):
return tree(*(self.trees + other.trees))
raise TypeError(
"cannot add '%s' and '%s' objects"
- % (self.__class__.__name__, other.__class__.__name__))
+ % (self.__class__.__name__, other.__class__.__name__)
+ )
def __radd__(self, other):
if isinstance(other, prototype.tree):
@@ -236,13 +246,16 @@ class tree(prototype.tree):
return tree(*(other.trees + self.trees))
raise TypeError(
"cannot add '%s' and '%s' objects"
- % (other.__class__.__name__, self.__class__.__name__))
+ % (other.__class__.__name__, self.__class__.__name__)
+ )
def __repr__(self):
- return '<%s.%s trees=%r @%#8x>' % (
- self.__class__.__module__, self.__class__.__name__,
- getattr(self, 'trees', 'unset'),
- id(self))
+ return "<%s.%s trees=%r @%#8x>" % (
+ self.__class__.__module__,
+ self.__class__.__name__,
+ getattr(self, "trees", "unset"),
+ id(self),
+ )
@property
def pkg_masks(self):
diff --git a/src/pkgcore/repository/prototype.py b/src/pkgcore/repository/prototype.py
index 9c615dba8..65d926147 100644
--- a/src/pkgcore/repository/prototype.py
+++ b/src/pkgcore/repository/prototype.py
@@ -2,9 +2,7 @@
base repository template
"""
-__all__ = (
- "CategoryIterValLazyDict", "PackageMapping", "VersionMapping", "tree"
-)
+__all__ = ("CategoryIterValLazyDict", "PackageMapping", "VersionMapping", "tree")
from pathlib import Path
@@ -55,7 +53,6 @@ class CategoryIterValLazyDict(IterValLazyDict):
class PackageMapping(DictMixin):
-
def __init__(self, parent_mapping, pull_vals):
self._cache = {}
self._parent = parent_mapping
@@ -84,7 +81,6 @@ class PackageMapping(DictMixin):
class VersionMapping(DictMixin):
-
def __init__(self, parent_mapping, pull_vals):
self._cache = {}
self._parent = parent_mapping
@@ -146,7 +142,8 @@ class tree:
def __init__(self, frozen=False):
self.categories = CategoryIterValLazyDict(
- self._get_categories, self._get_categories)
+ self._get_categories, self._get_categories
+ )
self.packages = PackageMapping(self.categories, self._get_packages)
self.versions = VersionMapping(self.packages, self._get_versions)
@@ -197,7 +194,7 @@ class tree:
if isinstance(obj, str):
path = Path(obj)
try:
- repo_path = Path(getattr(self, 'location')).resolve()
+ repo_path = Path(getattr(self, "location")).resolve()
except AttributeError:
return False
@@ -229,8 +226,17 @@ class tree:
def match(self, atom, **kwds):
return list(self.itermatch(atom, **kwds))
- def itermatch(self, restrict, sorter=None, pkg_filter=None, versioned=True,
- raw_pkg_cls=None, pkg_cls=None, force=None, yield_none=False):
+ def itermatch(
+ self,
+ restrict,
+ sorter=None,
+ pkg_filter=None,
+ versioned=True,
+ raw_pkg_cls=None,
+ pkg_cls=None,
+ force=None,
+ yield_none=False,
+ ):
"""Generator that yields packages match a restriction.
:type restrict: :obj:`pkgcore.restrictions.packages.PackageRestriction`
@@ -254,7 +260,8 @@ class tree:
if not isinstance(restrict, restriction.base):
raise TypeError(
f"restrict must be a pkgcore.restriction.restrictions.base instance: "
- f"got {restrict!r}")
+ f"got {restrict!r}"
+ )
if sorter is None:
sorter = iter
@@ -278,14 +285,24 @@ class tree:
else:
match = restrict.force_False
return self._internal_match(
- candidates, match, raw_pkg_cls=raw_pkg_cls, pkg_cls=pkg_cls,
- yield_none=yield_none, sorter=sorter, pkg_filter=pkg_filter,
- versioned=versioned)
-
- def _internal_gen_candidates(self, candidates, sorter, raw_pkg_cls, pkg_filter, versioned):
+ candidates,
+ match,
+ raw_pkg_cls=raw_pkg_cls,
+ pkg_cls=pkg_cls,
+ yield_none=yield_none,
+ sorter=sorter,
+ pkg_filter=pkg_filter,
+ versioned=versioned,
+ )
+
+ def _internal_gen_candidates(
+ self, candidates, sorter, raw_pkg_cls, pkg_filter, versioned
+ ):
for cp in sorter(candidates):
if versioned:
- pkgs = (raw_pkg_cls(cp[0], cp[1], ver) for ver in self.versions.get(cp, ()))
+ pkgs = (
+ raw_pkg_cls(cp[0], cp[1], ver) for ver in self.versions.get(cp, ())
+ )
else:
if self.versions.get(cp, ()):
pkgs = (raw_pkg_cls(cp[0], cp[1]),)
@@ -294,7 +311,9 @@ class tree:
pkgs = iter(pkgs)
yield from sorter(pkg_filter(pkgs))
- def _internal_match(self, candidates, match_func, pkg_cls, yield_none=False, **kwargs):
+ def _internal_match(
+ self, candidates, match_func, pkg_cls, yield_none=False, **kwargs
+ ):
for pkg in self._internal_gen_candidates(candidates, **kwargs):
if pkg_cls is not None:
pkg = pkg_cls(pkg)
@@ -309,11 +328,12 @@ class tree:
if not isinstance(restrict, boolean.base) or isinstance(restrict, atom):
return self._fast_identify_candidates(restrict, sorter)
dsolutions = [
- ([c.restriction
- for c in collect_package_restrictions(x, ("category",))],
- [p.restriction
- for p in collect_package_restrictions(x, ("package",))])
- for x in restrict.iter_dnf_solutions(True)]
+ (
+ [c.restriction for c in collect_package_restrictions(x, ("category",))],
+ [p.restriction for p in collect_package_restrictions(x, ("package",))],
+ )
+ for x in restrict.iter_dnf_solutions(True)
+ ]
# see if any solution state isn't dependent on cat/pkg in anyway.
# if so, search whole search space.
@@ -324,7 +344,8 @@ class tree:
return (
(c, p)
for c in sorter(self.categories)
- for p in sorter(self.packages.get(c, ())))
+ for p in sorter(self.packages.get(c, ()))
+ )
# simple cases first.
# if one specifies categories, and one doesn't
@@ -340,22 +361,24 @@ class tree:
# ok. so... one doesn't specify a category, but they all
# specify packages (or don't)
pr = values.OrRestriction(
- *tuple(iflatten_instance(
- (x[1] for x in dsolutions if x[1]), values.base)))
+ *tuple(
+ iflatten_instance((x[1] for x in dsolutions if x[1]), values.base)
+ )
+ )
return (
(c, p)
for c in sorter(self.categories)
- for p in sorter(pgetter(c, [])) if pr.match(p))
+ for p in sorter(pgetter(c, []))
+ if pr.match(p)
+ )
elif any(True for x in dsolutions[1:] if bool(x[1]) != pkg_specified):
# one (or more) don't specify pkgs, but they all specify cats.
cr = values.OrRestriction(
- *tuple(iflatten_instance(
- (x[0] for x in dsolutions), values.base)))
+ *tuple(iflatten_instance((x[0] for x in dsolutions), values.base))
+ )
cats_iter = (c for c in sorter(self.categories) if cr.match(c))
- return (
- (c, p)
- for c in cats_iter for p in sorter(pgetter(c, [])))
+ return ((c, p) for c in cats_iter for p in sorter(pgetter(c, [])))
return self._fast_identify_candidates(restrict, sorter)
@@ -365,16 +388,20 @@ class tree:
cat_exact = set()
pkg_exact = set()
- for x in collect_package_restrictions(restrict,
- ("category", "package",)):
+ for x in collect_package_restrictions(
+ restrict,
+ (
+ "category",
+ "package",
+ ),
+ ):
if x.attr == "category":
cat_restrict.add(x.restriction)
elif x.attr == "package":
pkg_restrict.add(x.restriction)
for e, s in ((pkg_exact, pkg_restrict), (cat_exact, cat_restrict)):
- l = [x for x in s
- if isinstance(x, values.StrExactMatch) and not x.negate]
+ l = [x for x in s if isinstance(x, values.StrExactMatch) and not x.negate]
s.difference_update(l)
e.update(x.exact for x in l)
del l
@@ -396,8 +423,7 @@ class tree:
cat_restrict.add(values.ContainmentMatch(frozenset(cat_exact)))
cats_iter = sorter(self._cat_filter(cat_restrict))
elif cat_restrict:
- cats_iter = self._cat_filter(
- cat_restrict, negate=restrict.negate)
+ cats_iter = self._cat_filter(cat_restrict, negate=restrict.negate)
else:
cats_iter = sorter(self.categories)
@@ -407,25 +433,20 @@ class tree:
pkg_exact = tuple(pkg_exact)
else:
pkg_exact = sorter(pkg_exact)
- return (
- (c, p)
- for c in cats_iter for p in pkg_exact)
+ return ((c, p) for c in cats_iter for p in pkg_exact)
else:
pkg_restrict.add(values.ContainmentMatch(frozenset(pkg_exact)))
if pkg_restrict:
- return self._package_filter(
- cats_iter, pkg_restrict, negate=restrict.negate)
+ return self._package_filter(cats_iter, pkg_restrict, negate=restrict.negate)
elif not cat_restrict:
if sorter is iter and not cat_exact:
return self.versions
else:
return (
- (c, p) for c in
- cats_iter for p in sorter(self.packages.get(c, ())))
- return (
- (c, p)
- for c in cats_iter for p in sorter(self.packages.get(c, ())))
+ (c, p) for c in cats_iter for p in sorter(self.packages.get(c, ()))
+ )
+ return ((c, p) for c in cats_iter for p in sorter(self.packages.get(c, ())))
def _cat_filter(self, cat_restricts, negate=False):
sentinel = not negate
@@ -496,7 +517,7 @@ class tree:
@property
def aliases(self):
- potentials = (getattr(self, key, None) for key in ('repo_id', 'location'))
+ potentials = (getattr(self, key, None) for key in ("repo_id", "location"))
return tuple(x for x in potentials if x is not None)
@jit_attr
diff --git a/src/pkgcore/repository/syncable.py b/src/pkgcore/repository/syncable.py
index 574c2af46..8bdc56236 100644
--- a/src/pkgcore/repository/syncable.py
+++ b/src/pkgcore/repository/syncable.py
@@ -8,7 +8,7 @@ class tree:
operations_kls = sync_operations
def __init__(self, sync=None):
- object.__setattr__(self, '_syncer', sync)
+ object.__setattr__(self, "_syncer", sync)
@property
def operations(self):
diff --git a/src/pkgcore/repository/util.py b/src/pkgcore/repository/util.py
index 9273b29b8..013c16bd3 100644
--- a/src/pkgcore/repository/util.py
+++ b/src/pkgcore/repository/util.py
@@ -1,6 +1,8 @@
__all__ = (
- "SimpleTree", "RepositoryGroup",
- "get_raw_repos", "get_virtual_repos",
+ "SimpleTree",
+ "RepositoryGroup",
+ "get_raw_repos",
+ "get_virtual_repos",
)
from snakeoil import klass
@@ -21,8 +23,9 @@ class SimpleTree(prototype.tree):
repo_id (str): repo ID
"""
- def __init__(self, cpv_dict, pkg_klass=None, livefs=False, frozen=True,
- repo_id=None):
+ def __init__(
+ self, cpv_dict, pkg_klass=None, livefs=False, frozen=True, repo_id=None
+ ):
self.cpv_dict = cpv_dict
if pkg_klass is None:
pkg_klass = VersionedCPV
@@ -54,8 +57,9 @@ class SimpleTree(prototype.tree):
super().notify_remove_package(pkg)
def notify_add_package(self, pkg):
- self.cpv_dict.setdefault(
- pkg.category, {}).setdefault(pkg.package, []).append(pkg.fullver)
+ self.cpv_dict.setdefault(pkg.category, {}).setdefault(pkg.package, []).append(
+ pkg.fullver
+ )
super().notify_add_package(pkg)
@@ -119,7 +123,8 @@ class RepositoryGroup(DictMixin):
return RepositoryGroup(self.repos + tuple(other))
raise TypeError(
"cannot add '%s' and '%s' objects"
- % (self.__class__.__name__, other.__class__.__name__))
+ % (self.__class__.__name__, other.__class__.__name__)
+ )
def __radd__(self, other):
if isinstance(other, prototype.tree):
@@ -133,7 +138,8 @@ class RepositoryGroup(DictMixin):
return RepositoryGroup(tuple(other) + self.repos)
raise TypeError(
"cannot add '%s' and '%s' objects"
- % (other.__class__.__name__, self.__class__.__name__))
+ % (other.__class__.__name__, self.__class__.__name__)
+ )
@classmethod
def change_repos(cls, repos):
diff --git a/src/pkgcore/repository/virtual.py b/src/pkgcore/repository/virtual.py
index 610cee464..42f43fa7a 100644
--- a/src/pkgcore/repository/virtual.py
+++ b/src/pkgcore/repository/virtual.py
@@ -36,9 +36,10 @@ class tree(prototype.tree):
pkls = self.package_class
for cp in candidates:
for pkg in sorter(
- pkls(provider, cp[0], cp[1], ver)
- for ver in self.versions.get(cp, ())
- for provider in self._expand_vers(cp, ver)):
+ pkls(provider, cp[0], cp[1], ver)
+ for ver in self.versions.get(cp, ())
+ for provider in self._expand_vers(cp, ver)
+ ):
yield pkg
def _get_categories(self, *optional_category):
@@ -51,8 +52,8 @@ class tree(prototype.tree):
raise NotImplementedError(self, "_load_data")
def _get_packages(self, category):
- if category != 'virtual':
- raise KeyError(f'no {category} category for this repository')
+ if category != "virtual":
+ raise KeyError(f"no {category} category for this repository")
self._load_data()
return self.packages[category]
@@ -60,10 +61,21 @@ class tree(prototype.tree):
class InjectedPkg(pkg_base.wrapper):
__slots__ = (
- "bdepend", "depend", "rdepend", "pdepend", "idepend",
- "repo", "repo_id", "built", "versioned_atom", "unversioned_atom", "data",
+ "bdepend",
+ "depend",
+ "rdepend",
+ "pdepend",
+ "idepend",
+ "repo",
+ "repo_id",
+ "built",
+ "versioned_atom",
+ "unversioned_atom",
+ "data",
)
- default_bdepend = default_depend = default_rdepend = default_pdepend = default_idepend = DepSet()
+ default_bdepend = (
+ default_depend
+ ) = default_rdepend = default_pdepend = default_idepend = DepSet()
package_is_real = False
is_supported = True
@@ -111,7 +123,7 @@ class InjectedPkg(pkg_base.wrapper):
return not self._raw_pkg.intersects(other)
def __str__(self):
- return f'injected restriction pkg: {self._raw_pkg}'
+ return f"injected restriction pkg: {self._raw_pkg}"
def __repr__(self):
return "<%s cpv=%r @%#8x>" % (self.__class__, self.cpvstr, id(self))
diff --git a/src/pkgcore/repository/wrapper.py b/src/pkgcore/repository/wrapper.py
index eebc75402..5bfd0131f 100644
--- a/src/pkgcore/repository/wrapper.py
+++ b/src/pkgcore/repository/wrapper.py
@@ -26,7 +26,8 @@ class tree(prototype.tree):
self.raw_repo = repo
if not isinstance(self.raw_repo, prototype.tree):
raise errors.InitializationError(
- f'{self.raw_repo!r} is not a repository tree derivative')
+ f"{self.raw_repo!r} is not a repository tree derivative"
+ )
self.package_class = package_class
self.raw_repo = repo
diff --git a/src/pkgcore/resolver/choice_point.py b/src/pkgcore/resolver/choice_point.py
index ea336c9a2..02bcf6cba 100644
--- a/src/pkgcore/resolver/choice_point.py
+++ b/src/pkgcore/resolver/choice_point.py
@@ -7,8 +7,17 @@ from snakeoil.sequences import iter_stable_unique
class choice_point:
__slots__ = (
- "__weakref__", "atom", "matches", "matches_cur", "solution_filters",
- "_prdeps", "_rdeps", "_deps", "_bdeps", "_ideps")
+ "__weakref__",
+ "atom",
+ "matches",
+ "matches_cur",
+ "solution_filters",
+ "_prdeps",
+ "_rdeps",
+ "_deps",
+ "_bdeps",
+ "_ideps",
+ )
def __init__(self, a, matches):
self.atom = a
@@ -31,13 +40,16 @@ class choice_point:
and post merge deps.
"""
m = self.matches_cur
- return (len(self.solution_filters),
- m.repo, m,
+ return (
+ len(self.solution_filters),
+ m.repo,
+ m,
self.matches,
self._bdeps,
self._deps,
self._rdeps,
- self._prdeps)
+ self._prdeps,
+ )
@staticmethod
def _filter_choices(cnf_reqs, filterset):
@@ -182,5 +194,4 @@ class choice_point:
return True
def __str__(self):
- return "%s: (%s, %s)" % (self.__class__.__name__,
- self.atom, self.matches_cur)
+ return "%s: (%s, %s)" % (self.__class__.__name__, self.atom, self.matches_cur)
diff --git a/src/pkgcore/resolver/pigeonholes.py b/src/pkgcore/resolver/pigeonholes.py
index b0f085d86..24314ddea 100644
--- a/src/pkgcore/resolver/pigeonholes.py
+++ b/src/pkgcore/resolver/pigeonholes.py
@@ -46,7 +46,8 @@ class PigeonHoledSlots:
"""add a limiter, returning any conflicting objs"""
if not isinstance(atom, restriction.base):
raise TypeError(
- f"atom must be a restriction.base derivative: got {atom!r}, key={key!r}")
+ f"atom must be a restriction.base derivative: got {atom!r}, key={key!r}"
+ )
# debug.
if key is None:
diff --git a/src/pkgcore/resolver/plan.py b/src/pkgcore/resolver/plan.py
index 21d44c72f..548643854 100644
--- a/src/pkgcore/resolver/plan.py
+++ b/src/pkgcore/resolver/plan.py
@@ -41,6 +41,7 @@ def highest_iter_sort(l, pkg_grabber=pkg_grabber):
:param pkg_grabber: function to use as an attrgetter
:return: sorted list of packages
"""
+
def f(x, y):
c = cmp(x, y)
if c:
@@ -52,6 +53,7 @@ def highest_iter_sort(l, pkg_grabber=pkg_grabber):
elif y.repo.livefs:
return -1
return 0
+
sort_cmp(l, f, key=pkg_grabber, reverse=True)
return l
@@ -63,6 +65,7 @@ def downgrade_iter_sort(restrict, l, pkg_grabber=pkg_grabber):
:param pkg_grabber: function to use as an attrgetter
:return: sorted list of packages
"""
+
def f(x, y):
c = cmp(x, y)
if x.repo.livefs:
@@ -78,6 +81,7 @@ def downgrade_iter_sort(restrict, l, pkg_grabber=pkg_grabber):
elif restrict.match(y):
return 1
return c
+
sort_cmp(l, f, key=pkg_grabber, reverse=True)
return l
@@ -89,6 +93,7 @@ def lowest_iter_sort(l, pkg_grabber=pkg_grabber):
:param pkg_grabber: function to use as an attrgetter
:return: sorted list of packages
"""
+
def f(x, y):
c = cmp(x, y)
if c:
@@ -100,29 +105,53 @@ def lowest_iter_sort(l, pkg_grabber=pkg_grabber):
elif y.repo.livefs:
return 1
return 0
+
sort_cmp(l, f, key=pkg_grabber)
return l
class MutableContainmentRestriction(values.base):
- __slots__ = ('_blacklist', 'match')
+ __slots__ = ("_blacklist", "match")
def __init__(self, blacklist):
sf = object.__setattr__
- sf(self, '_blacklist', blacklist)
- sf(self, 'match', self._blacklist.__contains__)
+ sf(self, "_blacklist", blacklist)
+ sf(self, "match", self._blacklist.__contains__)
class resolver_frame:
- __slots__ = ("parent", "atom", "choices", "mode", "start_point", "dbs",
- "depth", "drop_cycles", "__weakref__", "ignored", "vdb_limited",
- "events", "succeeded")
-
- def __init__(self, parent, mode, atom, choices, dbs, start_point, depth,
- drop_cycles, ignored=False, vdb_limited=False):
- assert hasattr(dbs, 'itermatch')
+ __slots__ = (
+ "parent",
+ "atom",
+ "choices",
+ "mode",
+ "start_point",
+ "dbs",
+ "depth",
+ "drop_cycles",
+ "__weakref__",
+ "ignored",
+ "vdb_limited",
+ "events",
+ "succeeded",
+ )
+
+ def __init__(
+ self,
+ parent,
+ mode,
+ atom,
+ choices,
+ dbs,
+ start_point,
+ depth,
+ drop_cycles,
+ ignored=False,
+ vdb_limited=False,
+ ):
+ assert hasattr(dbs, "itermatch")
self.parent = parent
self.atom = atom
self.choices = choices
@@ -149,7 +178,7 @@ class resolver_frame:
pkg = "exhausted"
else:
cpv = pkg.cpvstr
- pkg = getattr(pkg.repo, 'repo_id', None)
+ pkg = getattr(pkg.repo, "repo_id", None)
if pkg is not None:
pkg = f"{cpv}::{pkg}"
else:
@@ -158,11 +187,15 @@ class resolver_frame:
result = ": %s" % (self.succeeded and "succeeded" or "failed")
else:
result = ""
- return "frame%s: mode %r: atom %s: current %s%s%s%s" % \
- (result, self.mode, self.atom, pkg,
- self.drop_cycles and ": cycle dropping" or '',
- self.ignored and ": ignored" or '',
- self.vdb_limited and ": vdb limited" or '')
+ return "frame%s: mode %r: atom %s: current %s%s%s%s" % (
+ result,
+ self.mode,
+ self.atom,
+ pkg,
+ self.drop_cycles and ": cycle dropping" or "",
+ self.ignored and ": ignored" or "",
+ self.vdb_limited and ": vdb limited" or "",
+ )
@property
def current_pkg(self):
@@ -178,29 +211,38 @@ class resolver_stack(deque):
frame_klass = resolver_frame
depth = property(len)
current_frame = property(operator.itemgetter(-1))
- _filter_ignored = staticmethod(
- partial(filterfalse, operator.attrgetter("ignored")))
+ _filter_ignored = staticmethod(partial(filterfalse, operator.attrgetter("ignored")))
# this *has* to be a property, else it creates a cycle.
- parent = property(lambda s:s)
+ parent = property(lambda s: s)
def __init__(self):
self.events = []
def __str__(self):
- return 'resolver stack:\n %s' % '\n '.join(str(x) for x in self)
+ return "resolver stack:\n %s" % "\n ".join(str(x) for x in self)
def __repr__(self):
- return '<%s: %r>' % (self.__class__.__name__,
- tuple(repr(x) for x in self))
+ return "<%s: %r>" % (self.__class__.__name__, tuple(repr(x) for x in self))
- def add_frame(self, mode, atom, choices, dbs, start_point, drop_cycles, vdb_limited=False):
+ def add_frame(
+ self, mode, atom, choices, dbs, start_point, drop_cycles, vdb_limited=False
+ ):
if not self:
parent = self
else:
parent = self[-1]
- frame = self.frame_klass(parent, mode, atom, choices, dbs, start_point,
- self.depth + 1, drop_cycles, vdb_limited=vdb_limited)
+ frame = self.frame_klass(
+ parent,
+ mode,
+ atom,
+ choices,
+ dbs,
+ start_point,
+ self.depth + 1,
+ drop_cycles,
+ vdb_limited=vdb_limited,
+ )
self.append(frame)
return frame
@@ -219,9 +261,12 @@ class resolver_stack(deque):
pkg = trg_frame.current_pkg
slot = pkg.slot
key = pkg.key
- kwds['skip_trg_frame'] = True
- return (frame for frame in self._cycles(trg_frame, **kwds)
- if key == frame.current_pkg.key and slot == frame.current_pkg.slot)
+ kwds["skip_trg_frame"] = True
+ return (
+ frame
+ for frame in self._cycles(trg_frame, **kwds)
+ if key == frame.current_pkg.key and slot == frame.current_pkg.slot
+ )
def _cycles(self, trg_frame, start=0, reverse=False, skip_trg_frame=True):
if reverse:
@@ -247,12 +292,22 @@ class resolver_stack(deque):
class merge_plan:
- vdb_restrict = packages.PackageRestriction("repo.livefs", values.EqualityMatch(True))
-
- def __init__(self, dbs, per_repo_strategy, global_strategy=None,
- depset_reorder_strategy=None, process_built_depends=False,
- drop_cycles=False, debug=False, debug_handle=None,
- pdb_intercept=None):
+ vdb_restrict = packages.PackageRestriction(
+ "repo.livefs", values.EqualityMatch(True)
+ )
+
+ def __init__(
+ self,
+ dbs,
+ per_repo_strategy,
+ global_strategy=None,
+ depset_reorder_strategy=None,
+ process_built_depends=False,
+ drop_cycles=False,
+ debug=False,
+ debug_handle=None,
+ pdb_intercept=None,
+ ):
if debug:
if debug_handle is None:
debug_handle = sys.stdout
@@ -284,19 +339,23 @@ class merge_plan:
self.state = state.plan_state()
vdb_state_filter_restrict = MutableContainmentRestriction(self.state.vdb_filter)
self.livefs_dbs = multiplex.tree(
- *[filtered.tree(x, vdb_state_filter_restrict)
- for x in self.all_raw_dbs if x.livefs])
+ *[
+ filtered.tree(x, vdb_state_filter_restrict)
+ for x in self.all_raw_dbs
+ if x.livefs
+ ]
+ )
self.insoluble = set()
self.vdb_preloaded = False
- self._ensure_livefs_is_loaded = \
- self._ensure_livefs_is_loaded_nonpreloaded
+ self._ensure_livefs_is_loaded = self._ensure_livefs_is_loaded_nonpreloaded
self.drop_cycles = drop_cycles
self.process_built_depends = process_built_depends
self._debugging = debug
if debug:
- self._rec_add_atom = partial(self._stack_debugging_rec_add_atom,
- self._rec_add_atom)
+ self._rec_add_atom = partial(
+ self._stack_debugging_rec_add_atom, self._rec_add_atom
+ )
self._debugging_depth = 0
self._debugging_drop_cycles = False
@@ -309,47 +368,54 @@ class merge_plan:
def notify_starting_mode(self, mode, stack):
if mode == "pdepend":
- mode = 'prdepends'
+ mode = "prdepends"
self._dprint(
"%s:%s%s: started: %s",
- (mode, ' ' * ((stack.current_frame.depth * 2) + 12 - len(mode)),
+ (
+ mode,
+ " " * ((stack.current_frame.depth * 2) + 12 - len(mode)),
stack.current_frame.atom,
- stack.current_frame.choices.current_pkg)
- )
+ stack.current_frame.choices.current_pkg,
+ ),
+ )
def notify_trying_choice(self, stack, atom, choices):
self._dprint(
- "choose for %s%s, %s",
- (stack.depth *2*" ", atom, choices.current_pkg))
- stack.add_event(('inspecting', choices.current_pkg))
+ "choose for %s%s, %s", (stack.depth * 2 * " ", atom, choices.current_pkg)
+ )
+ stack.add_event(("inspecting", choices.current_pkg))
def notify_choice_failed(self, stack, atom, choices, msg, msg_args=()):
- stack[-1].events.append(("choice", str(choices.current_pkg), False, msg % msg_args))
+ stack[-1].events.append(
+ ("choice", str(choices.current_pkg), False, msg % msg_args)
+ )
if msg:
- msg = ': %s' % (msg % msg_args)
+ msg = ": %s" % (msg % msg_args)
self._dprint(
"choice for %s%s, %s failed%s",
- (stack.depth * 2 * ' ', atom, choices.current_pkg, msg))
+ (stack.depth * 2 * " ", atom, choices.current_pkg, msg),
+ )
- def notify_choice_succeeded(self, stack, atom, choices, msg='', msg_args=()):
+ def notify_choice_succeeded(self, stack, atom, choices, msg="", msg_args=()):
stack[-1].events.append(("choice", str(choices.current_pkg), True, msg))
if msg:
- msg = ': %s' % (msg % msg_args)
+ msg = ": %s" % (msg % msg_args)
self._dprint(
"choice for %s%s, %s succeeded%s",
- (stack.depth * 2 * ' ', atom, choices.current_pkg, msg))
+ (stack.depth * 2 * " ", atom, choices.current_pkg, msg),
+ )
- def notify_viable(self, stack, atom, viable, msg='', pre_solved=False):
+ def notify_viable(self, stack, atom, viable, msg="", pre_solved=False):
t_viable = viable and "processing" or "not viable"
if pre_solved and viable:
t_viable = "pre-solved"
- t_msg = msg and (" "+msg) or ''
- s=''
+ t_msg = msg and (" " + msg) or ""
+ s = ""
if stack:
s = " for %s " % (stack[-1].atom)
self._dprint(
- "%s%s%s%s%s",
- (t_viable.ljust(13), " "*stack.depth, atom, s, t_msg))
+ "%s%s%s%s%s", (t_viable.ljust(13), " " * stack.depth, atom, s, t_msg)
+ )
stack.add_event(("viable", viable, pre_solved, atom, msg))
def load_vdb_state(self):
@@ -359,11 +425,10 @@ class merge_plan:
self._dprint("insertion of %s: %s", (pkg, ret), "vdb")
if ret:
raise Exception(
- "couldn't load vdb state, %s %s" %
- (pkg.versioned_atom, ret))
+ "couldn't load vdb state, %s %s" % (pkg.versioned_atom, ret)
+ )
self.vdb_preloaded = True
- self._ensure_livefs_is_loaded = \
- self._ensure_livefs_is_loaded_preloaded
+ self._ensure_livefs_is_loaded = self._ensure_livefs_is_loaded_preloaded
def add_atoms(self, restricts, finalize=False):
if restricts:
@@ -401,17 +466,17 @@ class merge_plan:
def _stack_debugging_rec_add_atom(self, func, atom, stack, dbs, **kwds):
current = len(stack)
- cycles = kwds.get('drop_cycles', False)
+ cycles = kwds.get("drop_cycles", False)
reset_cycles = False
if cycles and not self._debugging_drop_cycles:
self._debugging_drop_cycles = reset_cycles = True
if not reset_cycles:
self._debugging_depth += 1
- assert current == self._debugging_depth -1
+ assert current == self._debugging_depth - 1
ret = func(atom, stack, dbs, **kwds)
assert current == len(stack)
- assert current == self._debugging_depth -1
+ assert current == self._debugging_depth - 1
if not reset_cycles:
self._debugging_depth -= 1
else:
@@ -424,7 +489,7 @@ class merge_plan:
:return: False on no issues (inserted succesfully),
else a list of the stack that screwed it up.
"""
- assert hasattr(dbs, 'itermatch')
+ assert hasattr(dbs, "itermatch")
limit_to_vdb = dbs == self.livefs_dbs
matches = self._viable(stack, mode, atom, dbs, drop_cycles, limit_to_vdb)
@@ -442,13 +507,15 @@ class merge_plan:
if limit_to_vdb:
self._dprint(
"processing %s%s [%s]; mode %s vdb bound",
- (depth*2*" ", atom, stack[-1].atom, mode))
+ (depth * 2 * " ", atom, stack[-1].atom, mode),
+ )
else:
self._dprint(
"processing %s%s [%s]; mode %s",
- (depth*2*" ", atom, stack[-1].atom, mode))
+ (depth * 2 * " ", atom, stack[-1].atom, mode),
+ )
else:
- self._dprint("processing %s%s", (depth*2*" ", atom))
+ self._dprint("processing %s%s", (depth * 2 * " ", atom))
ret = self.check_for_cycles(stack, stack.current_frame)
if ret is not True:
@@ -467,9 +534,16 @@ class merge_plan:
"no state change detected, "
"old %r != new %r\nchoices(%r)\ncurrent(%r)\n"
"bdepend(%r)\ndepend(%r)\nrdepend(%r)\npdepend(%r)\n"
- "idepend(%r)" % (
- last_state, new_state, tuple(choices.matches), choices.current_pkg,
- choices.bdepend, choices.depend, choices.rdepend, choices.pdepend,
+ "idepend(%r)"
+ % (
+ last_state,
+ new_state,
+ tuple(choices.matches),
+ choices.current_pkg,
+ choices.bdepend,
+ choices.depend,
+ choices.rdepend,
+ choices.pdepend,
choices.idepend,
)
)
@@ -480,26 +554,30 @@ class merge_plan:
if not choices.current_pkg.built or self.process_built_depends:
new_additions, failures = self.process_dependencies_and_blocks(
- stack, choices, 'depend', atom, depth)
+ stack, choices, "depend", atom, depth
+ )
if failures:
continue
additions += new_additions
new_additions, failures = self.process_dependencies_and_blocks(
- stack, choices, 'bdepend', atom, depth)
+ stack, choices, "bdepend", atom, depth
+ )
if failures:
continue
additions += new_additions
new_additions, failures = self.process_dependencies_and_blocks(
- stack, choices, 'rdepend', atom, depth)
+ stack, choices, "rdepend", atom, depth
+ )
if failures:
continue
additions += new_additions
# TODO: do we need a conditional for merging a pkg here?
new_additions, failures = self.process_dependencies_and_blocks(
- stack, choices, 'idepend', atom, depth)
+ stack, choices, "idepend", atom, depth
+ )
if failures:
continue
additions += new_additions
@@ -509,21 +587,22 @@ class merge_plan:
# this means somehow the node already slipped in.
# so we exit now, we are satisfied
self.notify_choice_succeeded(
- stack, atom, choices,
- "already exists in the state plan")
+ stack, atom, choices, "already exists in the state plan"
+ )
stack.pop_frame(True)
return None
elif l is not None:
# failure.
self.notify_choice_failed(
- stack, atom, choices,
- "failed inserting: %s", l)
+ stack, atom, choices, "failed inserting: %s", l
+ )
self.state.backtrack(stack.current_frame.start_point)
choices.force_next_pkg()
continue
new_additions, failures = self.process_dependencies_and_blocks(
- stack, choices, 'pdepend', atom, depth)
+ stack, choices, "pdepend", atom, depth
+ )
if failures:
continue
additions += new_additions
@@ -532,21 +611,25 @@ class merge_plan:
stack.pop_frame(True)
return None
- self._dprint("no solution %s%s", (depth*2*" ", atom))
- stack.add_event(("debug", "ran out of choices",))
+ self._dprint("no solution %s%s", (depth * 2 * " ", atom))
+ stack.add_event(
+ (
+ "debug",
+ "ran out of choices",
+ )
+ )
self.state.backtrack(stack.current_frame.start_point)
# saving roll. if we're allowed to drop cycles, try it again.
# this needs to be *far* more fine grained also. it'll try
# regardless of if it's a cycle issue
if not drop_cycles and self.drop_cycles:
- stack.add_event(("cycle", stack.current_frame, "trying to drop any cycles"),)
- self._dprint(
- "trying saving throw for %s ignoring cycles",
- atom, "cycle")
+ stack.add_event(
+ ("cycle", stack.current_frame, "trying to drop any cycles"),
+ )
+ self._dprint("trying saving throw for %s ignoring cycles", atom, "cycle")
# note everything is retored to a pristine state prior also.
stack[-1].ignored = True
- l = self._rec_add_atom(atom, stack, dbs,
- mode=mode, drop_cycles=True)
+ l = self._rec_add_atom(atom, stack, dbs, mode=mode, drop_cycles=True)
if not l:
stack.pop_frame(True)
return None
@@ -571,15 +654,17 @@ class merge_plan:
:obj:`caching_iter` (not solved, but viable), :obj:`choice_point`
"""
if self.pdb_intercept.match(atom):
- import pdb;pdb.set_trace()
+ import pdb
+
+ pdb.set_trace()
choices = ret = None
if atom in self.insoluble:
- ret = ((False, "globally insoluble"),{})
+ ret = ((False, "globally insoluble"), {})
matches = ()
else:
matches = self.state.match_atom(atom)
if matches:
- ret = ((True,), {"pre_solved":True})
+ ret = ((True,), {"pre_solved": True})
else:
# not in the plan thus far.
matches = caching_iter(dbs.itermatch(atom))
@@ -590,16 +675,25 @@ class merge_plan:
if not choices:
# and was intractable because it has a hard dep on an
# unsolvable atom.
- ret = ((False, "pruning of insoluble deps "
- "left no choices"), {})
+ ret = (
+ (False, "pruning of insoluble deps " "left no choices"),
+ {},
+ )
else:
ret = ((False, "no matches"), {})
if choices is None:
choices = choice_point(atom, matches)
- stack.add_frame(mode, atom, choices, dbs,
- self.state.current_state, drop_cycles, vdb_limited=limit_to_vdb)
+ stack.add_frame(
+ mode,
+ atom,
+ choices,
+ dbs,
+ self.state.current_state,
+ drop_cycles,
+ vdb_limited=limit_to_vdb,
+ )
if not limit_to_vdb and not matches:
self.insoluble.add(atom)
@@ -621,10 +715,12 @@ class merge_plan:
"""
force_vdb = False
for frame in stack.slot_cycles(cur_frame, reverse=True):
- if not any(f.mode == 'pdepend' for f in
- islice(stack, stack.index(frame), stack.index(cur_frame))):
+ if not any(
+ f.mode == "pdepend"
+ for f in islice(stack, stack.index(frame), stack.index(cur_frame))
+ ):
# exact same pkg.
- if frame.mode in ('bdepend', 'depend'):
+ if frame.mode in ("bdepend", "depend"):
# ok, we *must* go vdb if not already.
if frame.current_pkg.repo.livefs:
if cur_frame.current_pkg.repo.livefs:
@@ -632,8 +728,10 @@ class merge_plan:
# force it to vdb.
if cur_frame.current_pkg.repo.livefs:
return True
- elif cur_frame.current_pkg == frame.current_pkg and \
- cur_frame.mode == 'pdepend':
+ elif (
+ cur_frame.current_pkg == frame.current_pkg
+ and cur_frame.mode == "pdepend"
+ ):
# if non vdb and it's a post_rdeps cycle for the cur
# node, exempt it; assuming the stack succeeds,
# it's satisfied
@@ -655,12 +753,17 @@ class merge_plan:
# sidestep this.
cur_frame.parent.events.append(("cycle", cur_frame, "limiting to vdb"))
cur_frame.ignored = True
- return self._rec_add_atom(cur_frame.atom, stack,
- self.livefs_dbs, mode=cur_frame.mode,
- drop_cycles = cur_frame.drop_cycles)
+ return self._rec_add_atom(
+ cur_frame.atom,
+ stack,
+ self.livefs_dbs,
+ mode=cur_frame.mode,
+ drop_cycles=cur_frame.drop_cycles,
+ )
- def process_dependencies_and_blocks(self, stack, choices, attr,
- atom=None, depth=None):
+ def process_dependencies_and_blocks(
+ self, stack, choices, attr, atom=None, depth=None
+ ):
if atom is None:
atom = stack.current_frame.atom
if depth is None:
@@ -670,7 +773,8 @@ class merge_plan:
if len(l) == 1:
self._dprint(
"resetting for %s%s because of %s: %s",
- (depth*2*" ", atom, attr, l[0]))
+ (depth * 2 * " ", atom, attr, l[0]),
+ )
self.state.backtrack(stack.current_frame.start_point)
return [], l[0]
@@ -679,7 +783,10 @@ class merge_plan:
def process_dependencies(self, stack, choices, mode, depset, atom):
failure = []
- additions, blocks, = [], []
+ additions, blocks, = (
+ [],
+ [],
+ )
cur_frame = stack.current_frame
self.notify_starting_mode(mode, stack)
for potentials in depset:
@@ -691,9 +798,13 @@ class merge_plan:
blocks.append(or_node)
break
else:
- failure = self._rec_add_atom(or_node, stack,
- cur_frame.dbs, mode=mode,
- drop_cycles=cur_frame.drop_cycles)
+ failure = self._rec_add_atom(
+ or_node,
+ stack,
+ cur_frame.dbs,
+ mode=mode,
+ drop_cycles=cur_frame.drop_cycles,
+ )
if not failure:
additions.append(or_node)
break
@@ -701,10 +812,10 @@ class merge_plan:
# XXX kill it; purpose seems... questionable.
if cur_frame.drop_cycles:
self._dprint(
- "%s level cycle: %s: "
- "dropping cycle for %s from %s",
+ "%s level cycle: %s: " "dropping cycle for %s from %s",
(mode, cur_frame.atom, or_node, cur_frame.current_pkg),
- "cycle")
+ "cycle",
+ )
failure = None
break
@@ -712,10 +823,10 @@ class merge_plan:
# pkg changed.
return [failure]
continue
- else: # didn't find any solutions to this or block.
+ else: # didn't find any solutions to this or block.
cur_frame.reduce_solutions(potentials)
return [potentials]
- else: # all potentials were usable.
+ else: # all potentials were usable.
return additions, blocks
def process_blocker(self, stack, choices, blocker, mode, atom):
@@ -723,8 +834,12 @@ class merge_plan:
if ret is None:
return []
self.notify_choice_failed(
- stack, atom, choices,
- "%s blocker: %s conflicts w/ %s", (mode, ret[0], ret[1]))
+ stack,
+ atom,
+ choices,
+ "%s blocker: %s conflicts w/ %s",
+ (mode, ret[0], ret[1]),
+ )
return [ret[0]]
def _ensure_livefs_is_loaded_preloaded(self, restrict):
@@ -759,18 +874,26 @@ class merge_plan:
# Note that virtual pkg conflicts are skipped since it's assumed
# they are injected.
- virtual = (any(not getattr(x, 'package_is_real', True) for x in conflicts)
- or not choices.current_pkg.package_is_real)
- if (virtual or (len(conflicts) == 1 and conflicts[0] == choices.current_pkg and
- (conflicts[0].repo.livefs == choices.current_pkg.repo.livefs and
- atom.match(conflicts[0])))):
+ virtual = (
+ any(not getattr(x, "package_is_real", True) for x in conflicts)
+ or not choices.current_pkg.package_is_real
+ )
+ if virtual or (
+ len(conflicts) == 1
+ and conflicts[0] == choices.current_pkg
+ and (
+ conflicts[0].repo.livefs == choices.current_pkg.repo.livefs
+ and atom.match(conflicts[0])
+ )
+ ):
# early exit. means that a cycle came about, but exact
# same result slipped through.
return False
self._dprint(
"was trying to insert atom '%s' pkg '%s',\nbut '[%s]' exists already",
- (atom, choices.current_pkg, ", ".join(map(str, conflicts))))
+ (atom, choices.current_pkg, ", ".join(map(str, conflicts))),
+ )
try_rematch = False
if any(True for x in conflicts if isinstance(x, restriction.base)):
@@ -780,15 +903,22 @@ class merge_plan:
# vdb entry, replace.
if self.vdb_restrict.match(choices.current_pkg):
# we're replacing a vdb entry with a vdb entry? wtf.
- print("internal weirdness spotted- vdb restrict matches, "
- "but current doesn't, bailing")
- raise Exception("internal weirdness- vdb restrict matches ",
- "but current doesn't. bailing- run w/ --debug")
- conflicts = state.replace_op(choices, choices.current_pkg).apply(self.state)
+ print(
+ "internal weirdness spotted- vdb restrict matches, "
+ "but current doesn't, bailing"
+ )
+ raise Exception(
+ "internal weirdness- vdb restrict matches ",
+ "but current doesn't. bailing- run w/ --debug",
+ )
+ conflicts = state.replace_op(choices, choices.current_pkg).apply(
+ self.state
+ )
if not conflicts:
self._dprint(
"replacing vdb entry for '%s' with pkg '%s'",
- (atom, choices.current_pkg))
+ (atom, choices.current_pkg),
+ )
else:
try_rematch = True
@@ -810,12 +940,17 @@ class merge_plan:
# note the second Or clause is a bit loose; allows any version to
# slip through instead of blocking everything that isn't the
# parent pkg
- if blocker.category != 'virtual':
+ if blocker.category != "virtual":
return blocker
- return packages.AndRestriction(blocker,
- packages.PackageRestriction("provider.key",
+ return packages.AndRestriction(
+ blocker,
+ packages.PackageRestriction(
+ "provider.key",
values.StrExactMatch(choices.current_pkg.key),
- negate=True, ignore_missing=True))
+ negate=True,
+ ignore_missing=True,
+ ),
+ )
def insert_blockers(self, stack, choices, blocks):
# level blockers.
@@ -830,14 +965,20 @@ class merge_plan:
# blocker caught something. yay.
self._dprint(
"%s blocker %s hit %s for atom %s pkg %s",
- (stack[-1].mode, x, l, stack[-1].atom, choices.current_pkg))
+ (stack[-1].mode, x, l, stack[-1].atom, choices.current_pkg),
+ )
if x.weak_blocker:
# note that we use the top frame of the stacks' dbs; this
# is to allow us to upgrade as needed.
# For this to match, it's *only* possible if the blocker is resolved
# since the limiter is already in place.
- result = self._rec_add_atom(packages.KeyedAndRestriction(
- restriction.Negate(x), _atom.atom(x.key), key=x.key), stack, stack[0].dbs)
+ result = self._rec_add_atom(
+ packages.KeyedAndRestriction(
+ restriction.Negate(x), _atom.atom(x.key), key=x.key
+ ),
+ stack,
+ stack[0].dbs,
+ )
if not result:
# ok, inserted a new version. did it take care of the conflict?
# it /may/ not have, via filling a different slot...
@@ -910,7 +1051,8 @@ class merge_plan:
@classmethod
def prefer_highest_version_strategy(cls, dbs):
return misc.multiplex_sorting_repo(
- highest_iter_sort, cls.prefer_livefs_dbs(dbs))
+ highest_iter_sort, cls.prefer_livefs_dbs(dbs)
+ )
@staticmethod
def prefer_lowest_version_strategy(dbs):
@@ -919,14 +1061,12 @@ class merge_plan:
@classmethod
def prefer_downgrade_version_strategy(cls, restrict, dbs):
return misc.multiplex_sorting_repo(
- partial(downgrade_iter_sort, restrict),
- cls.prefer_nonlivefs_dbs(dbs))
+ partial(downgrade_iter_sort, restrict), cls.prefer_nonlivefs_dbs(dbs)
+ )
@classmethod
def prefer_reuse_strategy(cls, dbs):
return multiplex.tree(
- misc.multiplex_sorting_repo(
- highest_iter_sort, cls.just_livefs_dbs(dbs)),
- misc.multiplex_sorting_repo(
- highest_iter_sort, cls.just_nonlivefs_dbs(dbs)),
+ misc.multiplex_sorting_repo(highest_iter_sort, cls.just_livefs_dbs(dbs)),
+ misc.multiplex_sorting_repo(highest_iter_sort, cls.just_nonlivefs_dbs(dbs)),
)
diff --git a/src/pkgcore/resolver/state.py b/src/pkgcore/resolver/state.py
index c9a7c988d..991da29d1 100644
--- a/src/pkgcore/resolver/state.py
+++ b/src/pkgcore/resolver/state.py
@@ -1,7 +1,14 @@
__all__ = (
- "plan_state", "base_op_state", "add_op", "add_hardref_op",
- "add_backref_op", "remove_op", "replace_op", "blocker_base_op",
- "incref_forward_block_op", "decref_forward_block_op",
+ "plan_state",
+ "base_op_state",
+ "add_op",
+ "add_hardref_op",
+ "add_backref_op",
+ "remove_op",
+ "replace_op",
+ "blocker_base_op",
+ "incref_forward_block_op",
+ "decref_forward_block_op",
)
from snakeoil.containers import RefCountingSet
@@ -10,7 +17,6 @@ from .pigeonholes import PigeonHoledSlots
class plan_state:
-
def __init__(self):
self.state = PigeonHoledSlots()
self.plan = []
@@ -64,8 +70,7 @@ class plan_state:
iterable = (x for x in self.plan if not x.internal)
if return_livefs:
return iterable
- return (y for y in iterable
- if not y.pkg.repo.livefs or y.desc == 'remove')
+ return (y for y in iterable if not y.pkg.repo.livefs or y.desc == "remove")
def ops(self, livefs=False, only_real=False):
i = self.iter_ops(livefs)
@@ -82,7 +87,6 @@ class plan_state:
class ops_sequence:
-
def __init__(self, sequence, is_livefs=True):
self._ops = tuple(sequence)
self.is_livefs = is_livefs
@@ -111,21 +115,25 @@ class base_op_state:
self.force = force
def __str__(self):
- s = ''
+ s = ""
if self.force:
- s = ' forced'
+ s = " forced"
return "%s: %s%s" % (self.desc, self.pkg, s)
def __repr__(self):
- return '<%s choices=%r pkg=%r force=%s @#%x>' % (
- self.__class__.__name__, self.choices, self.pkg, self.force,
- id(self))
+ return "<%s choices=%r pkg=%r force=%s @#%x>" % (
+ self.__class__.__name__,
+ self.choices,
+ self.pkg,
+ self.force,
+ id(self),
+ )
def apply(self, plan):
- raise NotImplemented(self, 'apply')
+ raise NotImplemented(self, "apply")
def revert(self, plan):
- raise NotImplemented(self, 'revert')
+ raise NotImplemented(self, "revert")
class add_op(base_op_state):
@@ -147,7 +155,7 @@ class add_op(base_op_state):
class add_hardref_op(base_op_state):
- __slots__ = ('restriction',)
+ __slots__ = ("restriction",)
desc = "hardref"
internal = True
force = True
@@ -244,21 +252,32 @@ class replace_op(base_op_state):
if bool(l) != self.force_old:
raise AssertionError(
"Internal error detected, unable to revert %s; got %s, "
- "force_old=%s " % (self, l, self.force_old))
+ "force_old=%s " % (self, l, self.force_old)
+ )
del plan.pkg_choices[self.pkg]
plan.pkg_choices[self.old_pkg] = self.old_choices
plan.vdb_filter.remove(self.old_pkg)
def __str__(self):
- s = ''
+ s = ""
if self.force:
- s = ' forced'
+ s = " forced"
return "replace: %s with %s%s" % (self.old_pkg, self.pkg, s)
def __repr__(self):
- return '<%s old choices=%r new choices=%r old_pkg=%r new_pkg=%r ' \
- 'force=%s @#%x>' % (self.__class__.__name__, self.old_choices,
- self.choices, self.old_pkg, self.pkg, self.force, id(self))
+ return (
+ "<%s old choices=%r new choices=%r old_pkg=%r new_pkg=%r "
+ "force=%s @#%x>"
+ % (
+ self.__class__.__name__,
+ self.old_choices,
+ self.choices,
+ self.old_pkg,
+ self.pkg,
+ self.force,
+ id(self),
+ )
+ )
class blocker_base_op:
@@ -276,19 +295,27 @@ class blocker_base_op:
self.blocker = blocker
def __str__(self):
- return "%s: key %s, %s from %s" % (self.__class__.__name__, self.key,
- self.blocker, self.choices)
+ return "%s: key %s, %s from %s" % (
+ self.__class__.__name__,
+ self.key,
+ self.blocker,
+ self.choices,
+ )
def __repr__(self):
- return '<%s choices=%r blocker=%r key=%r @#%x>' % (
- self.__class__.__name__, self.choices, self.blocker, self.key,
- id(self))
+ return "<%s choices=%r blocker=%r key=%r @#%x>" % (
+ self.__class__.__name__,
+ self.choices,
+ self.blocker,
+ self.key,
+ id(self),
+ )
def apply(self, plan):
- raise NotImplementedError(self, 'apply')
+ raise NotImplementedError(self, "apply")
def revert(self, plan):
- raise NotImplementedError(self, 'revert')
+ raise NotImplementedError(self, "revert")
class incref_forward_block_op(blocker_base_op):
@@ -301,8 +328,7 @@ class incref_forward_block_op(blocker_base_op):
l = plan.state.add_limiter(self.blocker, self.key)
else:
l = []
- plan.rev_blockers.setdefault(self.choices, []).append(
- (self.blocker, self.key))
+ plan.rev_blockers.setdefault(self.choices, []).append((self.blocker, self.key))
plan.blockers_refcnt.add(self.blocker)
return l
@@ -330,8 +356,7 @@ class decref_forward_block_op(blocker_base_op):
del plan.rev_blockers[self.choices]
def revert(self, plan):
- plan.rev_blockers.setdefault(self.choices, []).append(
- (self.blocker, self.key))
+ plan.rev_blockers.setdefault(self.choices, []).append((self.blocker, self.key))
if self.blocker not in plan.blockers_refcnt:
plan.state.add_limiter(self.blocker, self.key)
plan.blockers_refcnt.add(self.blocker)
diff --git a/src/pkgcore/resolver/util.py b/src/pkgcore/resolver/util.py
index 790049373..37583da7a 100644
--- a/src/pkgcore/resolver/util.py
+++ b/src/pkgcore/resolver/util.py
@@ -1,25 +1,30 @@
-__all__ = ("group_attempts", "fails_filter", "reduce_to_failures",)
+__all__ = (
+ "group_attempts",
+ "fails_filter",
+ "reduce_to_failures",
+)
def group_attempts(sequence, filter_func=None):
if filter_func is None:
- filter_func = lambda x:True
+ filter_func = lambda x: True
last, l = None, []
for x in sequence:
- if isinstance(x, tuple) and x[0] == 'inspecting':
+ if isinstance(x, tuple) and x[0] == "inspecting":
if l:
yield last, l
last, l = x[1], []
elif last is not None:
if filter_func(x):
# inline ignored frames
- if getattr(x, 'ignored', False):
+ if getattr(x, "ignored", False):
l.extend(y for y in x.events if filter_func(y))
else:
l.append(x)
if l:
yield last, l
+
def fails_filter(x):
if not isinstance(x, tuple):
return not x.succeeded
@@ -27,6 +32,7 @@ def fails_filter(x):
return not x[1]
return x[0] != "inspecting"
+
def reduce_to_failures(frame):
if frame.succeeded:
return []
diff --git a/src/pkgcore/restrictions/boolean.py b/src/pkgcore/restrictions/boolean.py
index e15cf39af..02195dc84 100644
--- a/src/pkgcore/restrictions/boolean.py
+++ b/src/pkgcore/restrictions/boolean.py
@@ -16,8 +16,9 @@ from . import restriction
class base(restriction.base, metaclass=generic_equality):
"""base template for boolean restrictions"""
- __attr_comparison__ = ('negate', 'type', 'restrictions')
- __slots__ = ('restrictions', 'type', 'negate', '_hash')
+
+ __attr_comparison__ = ("negate", "type", "restrictions")
+ __slots__ = ("restrictions", "type", "negate", "_hash")
_evaluate_collapsible = False
_evaluate_wipe_empty = True
@@ -54,12 +55,13 @@ class base(restriction.base, metaclass=generic_equality):
if r.type is not None and r.type != node_type:
raise TypeError(
"instance '%s' is restriction type '%s', "
- "must be '%s'" % (r, r.type, node_type))
+ "must be '%s'" % (r, r.type, node_type)
+ )
except AttributeError:
raise TypeError(
"type '%s' instance '%s' has no restriction type, "
- "'%s' required" % (
- r.__class__, r, node_type))
+ "'%s' required" % (r.__class__, r, node_type)
+ )
if kwds.pop("finalize", True):
if not isinstance(restrictions, tuple):
@@ -72,15 +74,15 @@ class base(restriction.base, metaclass=generic_equality):
if kwds:
kwds.pop("disable_inst_caching", None)
if kwds:
- raise TypeError(
- "unknown keywords to %s: %s" %
- (self.__class__, kwds))
+ raise TypeError("unknown keywords to %s: %s" % (self.__class__, kwds))
def change_restrictions(self, *restrictions, **kwds):
"""return a new instance of self.__class__, using supplied restrictions"""
if self.type is not None:
- if self.__class__.type not in restriction.valid_types or \
- self.__class__.type != self.type:
+ if (
+ self.__class__.type not in restriction.valid_types
+ or self.__class__.type != self.type
+ ):
kwds["node_type"] = self.type
kwds.setdefault("negate", self.negate)
return self.__class__(*restrictions, **kwds)
@@ -88,9 +90,10 @@ class base(restriction.base, metaclass=generic_equality):
def remove_restriction(self, restriction_types=(), *restrictions):
"""return a new instance of self.__class__, dropping supplied restrictions or types"""
new_restrictions = tuple(
- r for r in self.restrictions if
- not isinstance(r, tuple(restriction_types))
- and r not in restrictions)
+ r
+ for r in self.restrictions
+ if not isinstance(r, tuple(restriction_types)) and r not in restrictions
+ )
if new_restrictions != self.restrictions:
return self.change_restrictions(*new_restrictions)
return self
@@ -109,12 +112,13 @@ class base(restriction.base, metaclass=generic_equality):
if r.type is not None and r.type != self.type:
raise TypeError(
"instance '%s' is restriction type '%s', "
- "must be '%s'" % (r, r.type, self.type))
+ "must be '%s'" % (r, r.type, self.type)
+ )
except AttributeError:
raise TypeError(
"type '%s' instance '%s' has no restriction type, "
- "'%s' required" % (
- r.__class__, r, getattr(self, "type", "unset")))
+ "'%s' required" % (r.__class__, r, getattr(self, "type", "unset"))
+ )
try:
self.restrictions.extend(new_restrictions)
@@ -126,10 +130,14 @@ class base(restriction.base, metaclass=generic_equality):
object.__setattr__(self, "restrictions", tuple(self.restrictions))
def __repr__(self):
- return '<%s negate=%r type=%r finalized=%r restrictions=%r @%#8x>' % (
- self.__class__.__name__, self.negate, getattr(self, 'type', None),
- isinstance(self.restrictions, tuple), self.restrictions,
- id(self))
+ return "<%s negate=%r type=%r finalized=%r restrictions=%r @%#8x>" % (
+ self.__class__.__name__,
+ self.negate,
+ getattr(self, "type", None),
+ isinstance(self.restrictions, tuple),
+ self.restrictions,
+ id(self),
+ )
def __len__(self):
return len(self.restrictions)
@@ -158,11 +166,17 @@ class base(restriction.base, metaclass=generic_equality):
def __getitem__(self, key):
return self.restrictions[key]
- def evaluate_conditionals(self, parent_cls, parent_seq, enabled,
- tristate_locked=None, force_collapse=False):
+ def evaluate_conditionals(
+ self,
+ parent_cls,
+ parent_seq,
+ enabled,
+ tristate_locked=None,
+ force_collapse=False,
+ ):
l = []
for restrict in self:
- f = getattr(restrict, 'evaluate_conditionals', None)
+ f = getattr(restrict, "evaluate_conditionals", None)
if f is None:
l.append(restrict)
else:
@@ -170,17 +184,27 @@ class base(restriction.base, metaclass=generic_equality):
if not self._evaluate_wipe_empty or l:
if force_collapse or (
- (issubclass(parent_cls, self.__class__) and self._evaluate_collapsible) or
- len(l) <= 1):
+ (issubclass(parent_cls, self.__class__) and self._evaluate_collapsible)
+ or len(l) <= 1
+ ):
parent_seq.extend(l)
else:
parent_seq.append(self.__class__(*l))
# this beast, handles N^2 permutations. convert to stack based.
-def iterative_quad_toggling(pkg, pvals, restrictions, starting, end, truths,
- filter_func, desired_false=None, desired_true=None,
- kill_switch=None):
+def iterative_quad_toggling(
+ pkg,
+ pvals,
+ restrictions,
+ starting,
+ end,
+ truths,
+ filter_func,
+ desired_false=None,
+ desired_true=None,
+ kill_switch=None,
+):
if desired_false is None:
desired_false = lambda r, a: r.force_False(*a)
if desired_true is None:
@@ -202,9 +226,17 @@ def iterative_quad_toggling(pkg, pvals, restrictions, starting, end, truths,
if filter_func(t):
yield True
for i in iterative_quad_toggling(
- pkg, pvals, restrictions, index + 1, end, t, filter_func,
- desired_false=desired_false, desired_true=desired_true,
- kill_switch=kill_switch):
+ pkg,
+ pvals,
+ restrictions,
+ index + 1,
+ end,
+ t,
+ filter_func,
+ desired_false=desired_false,
+ desired_true=desired_true,
+ kill_switch=kill_switch,
+ ):
yield True
reset = True
else:
@@ -218,8 +250,16 @@ def iterative_quad_toggling(pkg, pvals, restrictions, starting, end, truths,
if filter_func(t):
yield True
for x in iterative_quad_toggling(
- pkg, pvals, restrictions, index + 1, end, t, filter_func,
- desired_false=desired_false, desired_true=desired_true):
+ pkg,
+ pvals,
+ restrictions,
+ index + 1,
+ end,
+ t,
+ filter_func,
+ desired_false=desired_false,
+ desired_true=desired_true,
+ ):
yield True
reset = True
elif index == end:
@@ -235,6 +275,7 @@ def iterative_quad_toggling(pkg, pvals, restrictions, starting, end, truths,
class AndRestriction(base):
"""Boolean AND grouping of restrictions. negation is a NAND"""
+
__slots__ = ()
_evaluate_collapsible = True
@@ -267,9 +308,15 @@ class AndRestriction(base):
def filter_func(truths):
return False in truths
- for i in iterative_quad_toggling(pkg, pvals, self.restrictions, 0,
- len(self.restrictions), truths,
- filter_func):
+ for i in iterative_quad_toggling(
+ pkg,
+ pvals,
+ self.restrictions,
+ 0,
+ len(self.restrictions),
+ truths,
+ filter_func,
+ ):
return True
return False
@@ -294,9 +341,16 @@ class AndRestriction(base):
def filter_func(truths):
return False in truths
- for i in iterative_quad_toggling(pkg, pvals, self.restrictions, 0,
- len(self.restrictions), truths,
- filter_func):
+
+ for i in iterative_quad_toggling(
+ pkg,
+ pvals,
+ self.restrictions,
+ 0,
+ len(self.restrictions),
+ truths,
+ filter_func,
+ ):
return True
return False
@@ -307,12 +361,12 @@ class AndRestriction(base):
(break apart atoms for example); this isn't likely what you want
"""
if self.negate:
-# raise NotImplementedError("negation for dnf_solutions on "
-# "AndRestriction isn't implemented yet")
+ # raise NotImplementedError("negation for dnf_solutions on "
+ # "AndRestriction isn't implemented yet")
# hack- this is an experiment
for r in OrRestriction(
- node_type=self.type, *[restriction.Negate(x)
- for x in self.restrictions]).iter_dnf_solutions():
+ node_type=self.type, *[restriction.Negate(x) for x in self.restrictions]
+ ).iter_dnf_solutions():
yield r
return
if not self.restrictions:
@@ -321,7 +375,7 @@ class AndRestriction(base):
hardreqs = []
optionals = []
for x in self.restrictions:
- method = getattr(x, 'dnf_solutions', None)
+ method = getattr(x, "dnf_solutions", None)
if method is None:
hardreqs.append(x)
else:
@@ -357,10 +411,11 @@ class AndRestriction(base):
"""
if self.negate:
- raise NotImplementedError("negation for solutions on "
- "AndRestriction isn't implemented yet")
+ raise NotImplementedError(
+ "negation for solutions on " "AndRestriction isn't implemented yet"
+ )
for x in self.restrictions:
- method = getattr(x, 'iter_cnf_solutions', None)
+ method = getattr(x, "iter_cnf_solutions", None)
if method is None:
yield [x]
else:
@@ -375,11 +430,12 @@ class AndRestriction(base):
"""
if self.negate:
- raise NotImplementedError("negation for solutions on "
- "AndRestriction isn't implemented yet")
+ raise NotImplementedError(
+ "negation for solutions on " "AndRestriction isn't implemented yet"
+ )
andreqs = []
for x in self.restrictions:
- method = getattr(x, 'iter_cnf_solutions', None)
+ method = getattr(x, "iter_cnf_solutions", None)
if method is None:
andreqs.append([x])
else:
@@ -388,12 +444,13 @@ class AndRestriction(base):
def __str__(self):
restricts_str = " && ".join(map(str, self.restrictions))
- negate = 'not ' if self.negate else ''
- return f'{negate}( {restricts_str} )'
+ negate = "not " if self.negate else ""
+ return f"{negate}( {restricts_str} )"
class OrRestriction(base):
"""Boolean OR grouping of restrictions."""
+
__slots__ = ()
_evaluate_collapsible = True
@@ -412,7 +469,8 @@ class OrRestriction(base):
"""
if self.negate:
raise NotImplementedError(
- "OrRestriction.solutions doesn't yet support self.negate")
+ "OrRestriction.solutions doesn't yet support self.negate"
+ )
if not self.restrictions:
return []
@@ -420,7 +478,7 @@ class OrRestriction(base):
dcnf = []
cnf = []
for x in self.restrictions:
- method = getattr(x, 'dnf_solutions', None)
+ method = getattr(x, "dnf_solutions", None)
if method is None:
dcnf.append(x)
else:
@@ -450,15 +508,14 @@ class OrRestriction(base):
if self.negate:
# hack- this is an experiment
for x in AndRestriction(
- node_type=self.type,
- *[restriction.Negate(x)
- for x in self.restrictions]).iter_dnf_solutions():
+ node_type=self.type, *[restriction.Negate(x) for x in self.restrictions]
+ ).iter_dnf_solutions():
yield x
if not self.restrictions:
yield []
return
for x in self.restrictions:
- method = getattr(x, 'iter_dnf_solutions', None)
+ method = getattr(x, "iter_dnf_solutions", None)
if method is None:
yield [x]
else:
@@ -490,9 +547,16 @@ class OrRestriction(base):
def filter_func(truths):
return True in truths
- for i in iterative_quad_toggling(pkg, pvals, self.restrictions, 0,
- len(self.restrictions), truths,
- filter_func):
+
+ for i in iterative_quad_toggling(
+ pkg,
+ pvals,
+ self.restrictions,
+ 0,
+ len(self.restrictions),
+ truths,
+ filter_func,
+ ):
return True
return False
@@ -518,15 +582,22 @@ class OrRestriction(base):
def filter_func(truths):
return True in truths
- for i in iterative_quad_toggling(pkg, pvals, self.restrictions, 0,
- len(self.restrictions), truths,
- filter_func):
+
+ for i in iterative_quad_toggling(
+ pkg,
+ pvals,
+ self.restrictions,
+ 0,
+ len(self.restrictions),
+ truths,
+ filter_func,
+ ):
yield True
def __str__(self):
restricts_str = " || ".join(map(str, self.restrictions))
- negate = 'not ' if self.negate else ''
- return f'{negate}( {restricts_str} )'
+ negate = "not " if self.negate else ""
+ return f"{negate}( {restricts_str} )"
class JustOneRestriction(base):
@@ -554,8 +625,8 @@ class JustOneRestriction(base):
def __str__(self):
restricts_str = " ".join(map(str, self.restrictions))
- negate = 'not ' if self.negate else ''
- return f'{negate}exactly-one-of ( {restricts_str} )'
+ negate = "not " if self.negate else ""
+ return f"{negate}exactly-one-of ( {restricts_str} )"
class AtMostOneOfRestriction(base):
@@ -578,5 +649,5 @@ class AtMostOneOfRestriction(base):
def __str__(self):
restricts_str = " ".join(map(str, self.restrictions))
- negate = 'not ' if self.negate else ''
- return f'{negate}at-most-one-of ( {restricts_str} )'
+ negate = "not " if self.negate else ""
+ return f"{negate}at-most-one-of ( {restricts_str} )"
diff --git a/src/pkgcore/restrictions/delegated.py b/src/pkgcore/restrictions/delegated.py
index b82e9052b..ecb6f7d0e 100644
--- a/src/pkgcore/restrictions/delegated.py
+++ b/src/pkgcore/restrictions/delegated.py
@@ -16,7 +16,7 @@ class delegate(restriction.base):
:obj:`pkgcore.ebuild.domain`.
"""
- __slots__ = ('_transform', 'negate')
+ __slots__ = ("_transform", "negate")
type = restriction.package_type
inst_caching = False
diff --git a/src/pkgcore/restrictions/packages.py b/src/pkgcore/restrictions/packages.py
index e623c2d36..ff6abddde 100644
--- a/src/pkgcore/restrictions/packages.py
+++ b/src/pkgcore/restrictions/packages.py
@@ -13,7 +13,13 @@ from . import boolean, restriction
class PackageRestriction(restriction.base, metaclass=generic_equality):
"""Package data restriction."""
- __slots__ = ('_pull_attr_func', '_attr_split', 'restriction', 'ignore_missing', 'negate')
+ __slots__ = (
+ "_pull_attr_func",
+ "_attr_split",
+ "restriction",
+ "ignore_missing",
+ "negate",
+ )
__attr_comparison__ = ("__class__", "negate", "_attr_split", "restriction")
__inst_caching__ = True
@@ -47,7 +53,7 @@ class PackageRestriction(restriction.base, metaclass=generic_equality):
def _parse_attr(self, attr):
object.__setattr__(self, "_pull_attr_func", static_attrgetter(attr))
- object.__setattr__(self, "_attr_split", attr.split('.'))
+ object.__setattr__(self, "_attr_split", attr.split("."))
def _pull_attr(self, pkg):
try:
@@ -69,8 +75,11 @@ class PackageRestriction(restriction.base, metaclass=generic_equality):
if isinstance(exc, AttributeError):
if not self.ignore_missing:
logger.exception(
- "failed getting attribute %s from %s, "
- "exception %s", '.'.join(attr_split), str(pkg), str(exc))
+ "failed getting attribute %s from %s, " "exception %s",
+ ".".join(attr_split),
+ str(pkg),
+ str(exc),
+ )
eargs = [x for x in exc.args if isinstance(x, str)]
if any(x in attr_split for x in eargs):
@@ -83,8 +92,11 @@ class PackageRestriction(restriction.base, metaclass=generic_equality):
# if it doesn't match, exception is thrown.
return False
logger.exception(
- "caught unexpected exception accessing %s from %s, "
- "exception %s", '.'.join(attr_split), str(pkg), str(exc))
+ "caught unexpected exception accessing %s from %s, " "exception %s",
+ ".".join(attr_split),
+ str(pkg),
+ str(exc),
+ )
return True
def force_False(self, pkg):
@@ -112,22 +124,21 @@ class PackageRestriction(restriction.base, metaclass=generic_equality):
return hash((self.negate, self.attrs, self.restriction))
def __str__(self):
- s = f'{self.attrs} '
+ s = f"{self.attrs} "
if self.negate:
s += "not "
return s + str(self.restriction)
def __repr__(self):
if self.negate:
- string = '<%s attr=%r restriction=%r negated @%#8x>'
+ string = "<%s attr=%r restriction=%r negated @%#8x>"
else:
- string = '<%s attr=%r restriction=%r @%#8x>'
- return string % (
- self.__class__.__name__, self.attr, self.restriction, id(self))
+ string = "<%s attr=%r restriction=%r @%#8x>"
+ return string % (self.__class__.__name__, self.attr, self.restriction, id(self))
@property
def attr(self):
- return '.'.join(self._attr_split)
+ return ".".join(self._attr_split)
@property
def attrs(self):
@@ -158,11 +169,13 @@ class PackageRestrictionMulti(PackageRestriction):
@property
def attrs(self):
- return tuple('.'.join(x) for x in self._attr_split)
+ return tuple(".".join(x) for x in self._attr_split)
def _parse_attr(self, attrs):
- object.__setattr__(self, '_pull_attr_func', tuple(map(static_attrgetter, attrs)))
- object.__setattr__(self, '_attr_split', tuple(x.split('.') for x in attrs))
+ object.__setattr__(
+ self, "_pull_attr_func", tuple(map(static_attrgetter, attrs))
+ )
+ object.__setattr__(self, "_attr_split", tuple(x.split(".") for x in attrs))
def _pull_attr(self, pkg):
val = []
@@ -187,7 +200,7 @@ class Conditional(PackageRestriction, metaclass=generic_equality):
Used to control whether a payload of restrictions are accessible or not.
"""
- __slots__ = ('payload',)
+ __slots__ = ("payload",)
__attr_comparison__ = ("__class__", "negate", "attr", "restriction", "payload")
conditional = True
@@ -208,17 +221,21 @@ class Conditional(PackageRestriction, metaclass=generic_equality):
def __str__(self):
s = PackageRestriction.__str__(self)
- payload = ', '.join(str(x) for x in self.payload)
- return f'( Conditional: {s} payload: [ {payload} ] )'
+ payload = ", ".join(str(x) for x in self.payload)
+ return f"( Conditional: {s} payload: [ {payload} ] )"
def __repr__(self):
if self.negate:
- string = '<%s attr=%r restriction=%r payload=%r negated @%#8x>'
+ string = "<%s attr=%r restriction=%r payload=%r negated @%#8x>"
else:
- string = '<%s attr=%r restriction=%r payload=%r @%#8x>'
+ string = "<%s attr=%r restriction=%r payload=%r @%#8x>"
return string % (
- self.__class__.__name__, self.attr, self.restriction, self.payload,
- id(self))
+ self.__class__.__name__,
+ self.attr,
+ self.restriction,
+ self.payload,
+ id(self),
+ )
def __iter__(self):
return iter(self.payload)
@@ -226,7 +243,9 @@ class Conditional(PackageRestriction, metaclass=generic_equality):
def __hash__(self):
return hash((self.attr, self.negate, self.restriction, self.payload))
- def evaluate_conditionals(self, parent_cls, parent_seq, enabled, tristate_locked=None):
+ def evaluate_conditionals(
+ self, parent_cls, parent_seq, enabled, tristate_locked=None
+ ):
if tristate_locked is not None:
assert len(self.restriction.vals) == 1
val = list(self.restriction.vals)[0]
@@ -241,19 +260,23 @@ class Conditional(PackageRestriction, metaclass=generic_equality):
if self.payload:
boolean.AndRestriction(*self.payload).evaluate_conditionals(
- parent_cls, parent_seq, enabled, tristate_locked)
+ parent_cls, parent_seq, enabled, tristate_locked
+ )
# "Invalid name" (pylint uses the module const regexp, not the class regexp)
# pylint: disable-msg=C0103
-AndRestriction = restriction.curry_node_type(boolean.AndRestriction,
- restriction.package_type)
-OrRestriction = restriction.curry_node_type(boolean.OrRestriction,
- restriction.package_type)
+AndRestriction = restriction.curry_node_type(
+ boolean.AndRestriction, restriction.package_type
+)
+OrRestriction = restriction.curry_node_type(
+ boolean.OrRestriction, restriction.package_type
+)
-AlwaysBool = restriction.curry_node_type(restriction.AlwaysBool,
- restriction.package_type)
+AlwaysBool = restriction.curry_node_type(
+ restriction.AlwaysBool, restriction.package_type
+)
class KeyedAndRestriction(boolean.AndRestriction):
@@ -273,7 +296,7 @@ class KeyedAndRestriction(boolean.AndRestriction):
boolean_str = boolean.AndRestriction.__str__(self)
if self.tag is None:
return boolean_str
- return f'{self.tag} {boolean_str}'
+ return f"{self.tag} {boolean_str}"
AlwaysTrue = AlwaysBool(negate=True)
diff --git a/src/pkgcore/restrictions/required_use.py b/src/pkgcore/restrictions/required_use.py
index a1906d4b0..ec41c2b28 100644
--- a/src/pkgcore/restrictions/required_use.py
+++ b/src/pkgcore/restrictions/required_use.py
@@ -6,75 +6,111 @@ from . import restriction, boolean, packages, values
class _use_constraint(Protocol):
def __call__(self, on: frozenset[str]) -> bool:
- raise NotImplementedError('Constraint', '__call__')
+ raise NotImplementedError("Constraint", "__call__")
def __use_flags_state_any(negate: bool, vals: frozenset[str]) -> _use_constraint:
def check(on: frozenset[str]):
return vals.isdisjoint(on) == negate
+
return check
-def __condition(negate: bool, vals: frozenset[str], *children: _use_constraint) -> _use_constraint:
+def __condition(
+ negate: bool, vals: frozenset[str], *children: _use_constraint
+) -> _use_constraint:
def check(on: frozenset[str]):
return vals.issubset(on) == negate or all(c(on) for c in children)
+
return check
def __or_constraint(negate: bool, *children: _use_constraint) -> _use_constraint:
def check(on: frozenset[str]):
return any(c(on) for c in children) != negate
+
return check
def __and_constraint(negate: bool, *children: _use_constraint) -> _use_constraint:
def check(on: frozenset[str]):
return all(c(on) for c in children) != negate
+
return check
def __just_one_constraint(negate: bool, *children: _use_constraint) -> _use_constraint:
def check(on: frozenset[str]):
return (1 == sum(c(on) for c in children)) != negate
+
return check
-def __at_most_one_constraint(negate: bool, *children: _use_constraint) -> _use_constraint:
+def __at_most_one_constraint(
+ negate: bool, *children: _use_constraint
+) -> _use_constraint:
def check(on: frozenset[str]):
return (1 >= sum(c(on) for c in children)) != negate
+
return check
def __to_single_constraint(restrict) -> tuple[_use_constraint, frozenset[str]]:
if isinstance(restrict, values.ContainmentMatch):
assert not restrict.all
- return __use_flags_state_any(restrict.negate, frozenset(restrict.vals)), frozenset(restrict.vals)
+ return __use_flags_state_any(
+ restrict.negate, frozenset(restrict.vals)
+ ), frozenset(restrict.vals)
elif isinstance(restrict, packages.Conditional):
assert isinstance(x := restrict.restriction, values.ContainmentMatch)
- children, variables = zip(*(__to_single_constraint(c) for c in restrict.payload))
- return __condition(x.negate, frozenset(x.vals), *children), frozenset(x.vals).union(*variables)
+ children, variables = zip(
+ *(__to_single_constraint(c) for c in restrict.payload)
+ )
+ return __condition(x.negate, frozenset(x.vals), *children), frozenset(
+ x.vals
+ ).union(*variables)
elif isinstance(restrict, boolean.OrRestriction):
- children, variables = zip(*(__to_single_constraint(c) for c in restrict.restrictions))
- return __or_constraint(restrict.negate, *children), frozenset().union(*variables)
+ children, variables = zip(
+ *(__to_single_constraint(c) for c in restrict.restrictions)
+ )
+ return __or_constraint(restrict.negate, *children), frozenset().union(
+ *variables
+ )
elif isinstance(restrict, boolean.AndRestriction):
- children, variables = zip(*(__to_single_constraint(c) for c in restrict.restrictions))
- return __and_constraint(restrict.negate, *children), frozenset().union(*variables)
+ children, variables = zip(
+ *(__to_single_constraint(c) for c in restrict.restrictions)
+ )
+ return __and_constraint(restrict.negate, *children), frozenset().union(
+ *variables
+ )
elif isinstance(restrict, boolean.JustOneRestriction):
- children, variables = zip(*(__to_single_constraint(c) for c in restrict.restrictions))
- return __just_one_constraint(restrict.negate, *children), frozenset().union(*variables)
+ children, variables = zip(
+ *(__to_single_constraint(c) for c in restrict.restrictions)
+ )
+ return __just_one_constraint(restrict.negate, *children), frozenset().union(
+ *variables
+ )
elif isinstance(restrict, boolean.AtMostOneOfRestriction):
- children, variables = zip(*(__to_single_constraint(c) for c in restrict.restrictions))
- return __at_most_one_constraint(restrict.negate, *children), frozenset().union(*variables)
+ children, variables = zip(
+ *(__to_single_constraint(c) for c in restrict.restrictions)
+ )
+ return __at_most_one_constraint(restrict.negate, *children), frozenset().union(
+ *variables
+ )
else:
- raise NotImplementedError('build_constraint', type(restrict))
+ raise NotImplementedError("build_constraint", type(restrict))
-def __to_multiple_constraint(restrict) -> Iterator[tuple[_use_constraint, frozenset[str]]]:
+def __to_multiple_constraint(
+ restrict,
+) -> Iterator[tuple[_use_constraint, frozenset[str]]]:
if isinstance(restrict, packages.Conditional):
assert isinstance(x := restrict.restriction, values.ContainmentMatch)
for rule in restrict.payload:
for func, variables in __to_multiple_constraint(rule):
- yield __condition(x.negate, frozenset(x.vals), func), frozenset(x.vals).union(variables)
+ yield __condition(x.negate, frozenset(x.vals), func), frozenset(
+ x.vals
+ ).union(variables)
elif isinstance(restrict, boolean.AndRestriction):
assert not restrict.negate
for rule in restrict.restrictions:
@@ -86,10 +122,17 @@ def __to_multiple_constraint(restrict) -> Iterator[tuple[_use_constraint, frozen
def __wrapper(constraint_func: _use_constraint) -> Constraint:
def check(**kwargs):
return constraint_func(frozenset(k for k, v in kwargs.items() if v))
+
return check
-def find_constraint_satisfaction(restricts: restriction.base, iuse: set[str], force_true=(), force_false=(), prefer_true=()) -> Iterator[dict[str, bool]]:
+def find_constraint_satisfaction(
+ restricts: restriction.base,
+ iuse: set[str],
+ force_true=(),
+ force_false=(),
+ prefer_true=(),
+) -> Iterator[dict[str, bool]]:
"""Return iterator for use flags combination satisfying REQUIRED_USE
:param restricts: Parsed restricts of REQUIRED_USE
@@ -106,13 +149,16 @@ def find_constraint_satisfaction(restricts: restriction.base, iuse: set[str], fo
prefer_false = iuse.difference(force_true, force_false, prefer_true)
problem.add_variable((True, False), *prefer_false)
- problem.add_variable((False, True), *iuse.intersection(prefer_true).difference(force_false, force_true))
- problem.add_variable((False, ), *iuse.intersection(force_false))
- problem.add_variable((True, ), *iuse.intersection(force_true))
+ problem.add_variable(
+ (False, True),
+ *iuse.intersection(prefer_true).difference(force_false, force_true),
+ )
+ problem.add_variable((False,), *iuse.intersection(force_false))
+ problem.add_variable((True,), *iuse.intersection(force_true))
for rule in restricts:
for constraint_func, variables in __to_multiple_constraint(rule):
if missing_vars := variables - problem.variables.keys():
- problem.add_variable((False, ), *missing_vars)
+ problem.add_variable((False,), *missing_vars)
problem.add_constraint(__wrapper(constraint_func), variables)
return iter(problem)
diff --git a/src/pkgcore/restrictions/restriction.py b/src/pkgcore/restrictions/restriction.py
index 7f2624ab8..b77856be0 100644
--- a/src/pkgcore/restrictions/restriction.py
+++ b/src/pkgcore/restrictions/restriction.py
@@ -14,6 +14,7 @@ class base(klass.SlotsPicklingMixin, metaclass=caching.WeakInstMeta):
all derivatives *should* be __slots__ based (lot of instances may
wind up in memory).
"""
+
__inst_caching__ = True
# __weakref__ here is implicit via the metaclass
@@ -68,8 +69,7 @@ class AlwaysBool(base):
return f"always '{self.negate}'"
def __repr__(self):
- return '<%s always %r @%#8x>' % (
- self.__class__.__name__, self.negate, id(self))
+ return "<%s always %r @%#8x>" % (self.__class__.__name__, self.negate, id(self))
def __getstate__(self):
return self.negate, self.type
@@ -128,7 +128,7 @@ class FakeType(base):
class AnyMatch(base):
"""Apply a nested restriction to every item in a sequence."""
- __slots__ = ('restriction', 'type', 'negate')
+ __slots__ = ("restriction", "type", "negate")
def __init__(self, childrestriction, node_type, negate=False):
"""Initialize.
@@ -153,8 +153,11 @@ class AnyMatch(base):
return "any: %s match" % (self.restriction,)
def __repr__(self):
- return '<%s restriction=%r @%#8x>' % (
- self.__class__.__name__, self.restriction, id(self))
+ return "<%s restriction=%r @%#8x>" % (
+ self.__class__.__name__,
+ self.restriction,
+ id(self),
+ )
def curry_node_type(cls, node_type, extradoc=None):
@@ -175,7 +178,7 @@ def curry_node_type(cls, node_type, extradoc=None):
doc = cls.__doc__
result = partial(cls, node_type=node_type)
if doc is None:
- doc = ''
+ doc = ""
else:
# do this so indentation on pydoc __doc__ is sane
doc = "\n".join(line.lstrip() for line in doc.split("\n")) + "\n"
diff --git a/src/pkgcore/restrictions/util.py b/src/pkgcore/restrictions/util.py
index 735464b5e..40674dda4 100644
--- a/src/pkgcore/restrictions/util.py
+++ b/src/pkgcore/restrictions/util.py
@@ -8,8 +8,9 @@ from . import boolean, packages, restriction
def _is_package_instance(inst):
- return (getattr(inst, "type", None) == restriction.package_type and not
- isinstance(inst, boolean.base))
+ return getattr(inst, "type", None) == restriction.package_type and not isinstance(
+ inst, boolean.base
+ )
def collect_package_restrictions(restrict, attrs=None, invert=False):
@@ -24,14 +25,11 @@ def collect_package_restrictions(restrict, attrs=None, invert=False):
for r in restrict:
if not isinstance(r, restriction.base):
raise TypeError(
- 'restrict must be of a restriction.base, '
- f'not {r.__class__.__class__}: {r!r}'
+ "restrict must be of a restriction.base, "
+ f"not {r.__class__.__class__}: {r!r}"
)
i = iflatten_func(restrict, _is_package_instance)
if attrs is None:
return i
attrs = frozenset(attrs)
- return (
- r for r in i
- if invert == attrs.isdisjoint(getattr(r, 'attrs', ()))
- )
+ return (r for r in i if invert == attrs.isdisjoint(getattr(r, "attrs", ())))
diff --git a/src/pkgcore/restrictions/values.py b/src/pkgcore/restrictions/values.py
index 62b7c84b6..a444e1426 100644
--- a/src/pkgcore/restrictions/values.py
+++ b/src/pkgcore/restrictions/values.py
@@ -33,7 +33,7 @@ class base(restriction.base):
def hashed_base(name, bases, scope):
- scope.setdefault("__hash__", reflective_hash('_hash'))
+ scope.setdefault("__hash__", reflective_hash("_hash"))
slots = scope.get("__slots__", None)
if slots is not None:
if "_hash" not in slots:
@@ -70,13 +70,14 @@ class VersionRestriction(base):
Gives a clue to what the restriction does.
"""
+
__slots__ = ()
class StrRegex(base, metaclass=hashed_base):
"""regex based matching"""
- __slots__ = ('_hash', 'flags', 'regex', '_matchfunc', 'ismatch', 'negate')
+ __slots__ = ("_hash", "flags", "regex", "_matchfunc", "ismatch", "negate")
__inst_caching__ = True
def __init__(self, regex, case_sensitive=True, match=False, negate=False):
@@ -109,7 +110,7 @@ class StrRegex(base, metaclass=hashed_base):
if not isinstance(value, str):
# Be too clever for our own good --marienz
if value is None:
- value = ''
+ value = ""
else:
value = str(value)
return (self._matchfunc(value) is not None) != self.negate
@@ -117,30 +118,30 @@ class StrRegex(base, metaclass=hashed_base):
def __repr__(self):
result = [self.__class__.__name__, repr(self.regex)]
if self.negate:
- result.append('negated')
+ result.append("negated")
if self.ismatch:
- result.append('match')
+ result.append("match")
else:
- result.append('search')
- result.append('@%#8x' % (id(self),))
- result = ' '.join(result)
- return f'<{result}>'
+ result.append("search")
+ result.append("@%#8x" % (id(self),))
+ result = " ".join(result)
+ return f"<{result}>"
def __str__(self):
if self.ismatch:
- result = 'match '
+ result = "match "
else:
- result = 'search '
+ result = "search "
result += self.regex
if self.negate:
- return f'not {result}'
+ return f"not {result}"
return result
class StrExactMatch(base, metaclass=generic_equality):
"""exact string comparison match"""
- __slots__ = __attr_comparison__ = ('_hash', 'exact', 'case_sensitive', 'negate')
+ __slots__ = __attr_comparison__ = ("_hash", "exact", "case_sensitive", "negate")
__inst_caching__ = True
def __init__(self, exact, case_sensitive=True, negate=False):
@@ -180,23 +181,23 @@ class StrExactMatch(base, metaclass=generic_equality):
def __repr__(self):
if self.negate:
- string = '<%s %r negated @%#8x>'
+ string = "<%s %r negated @%#8x>"
else:
- string = '<%s %r @%#8x>'
+ string = "<%s %r @%#8x>"
return string % (self.__class__.__name__, self.exact, id(self))
def __str__(self):
if self.negate:
- return f'!= {self.exact}'
- return f'== {self.exact}'
+ return f"!= {self.exact}"
+ return f"== {self.exact}"
- __hash__ = reflective_hash('_hash')
+ __hash__ = reflective_hash("_hash")
class StrGlobMatch(base, metaclass=hashed_base):
"""globbing matches; essentially startswith and endswith matches"""
- __slots__ = ('_hash', 'glob', 'prefix', 'negate', 'flags')
+ __slots__ = ("_hash", "glob", "prefix", "negate", "flags")
__inst_caching__ = True
def __init__(self, glob, case_sensitive=True, prefix=True, negate=False):
@@ -231,30 +232,32 @@ class StrGlobMatch(base, metaclass=hashed_base):
def __repr__(self):
if self.negate:
- string = '<%s %r case_sensitive=%r negated @%#8x>'
+ string = "<%s %r case_sensitive=%r negated @%#8x>"
else:
- string = '<%s %r case_sensitive=%r @%#8x>'
+ string = "<%s %r case_sensitive=%r @%#8x>"
if self.prefix:
- g = f'{self.glob}.*'
+ g = f"{self.glob}.*"
else:
- g = f'.*{self.glob}'
+ g = f".*{self.glob}"
return string % (
- self.__class__.__name__, g,
+ self.__class__.__name__,
+ g,
self.flags == re.I and True or False,
- id(self))
+ id(self),
+ )
def __str__(self):
- s = ''
+ s = ""
if self.negate:
- s = 'not '
+ s = "not "
if self.prefix:
- return f'{s}{self.glob}*'
- return '{s}*{self.glob}'
+ return f"{s}{self.glob}*"
+ return "{s}*{self.glob}"
class EqualityMatch(base, metaclass=generic_equality):
- __slots__ = ('negate', 'data')
+ __slots__ = ("negate", "data")
__attr_comparison__ = __slots__
def __init__(self, data, negate=False):
@@ -264,7 +267,7 @@ class EqualityMatch(base, metaclass=generic_equality):
"""
sf = object.__setattr__
- sf(self, 'negate', negate)
+ sf(self, "negate", negate)
sf(self, "data", data)
def __hash__(self):
@@ -274,13 +277,17 @@ class EqualityMatch(base, metaclass=generic_equality):
return (self.data == actual_val) != self.negate
def __repr__(self):
- return '<%s %r negate=%r @%#8x>' % (
- self.__class__.__name__, self.data, self.negate, id(self))
+ return "<%s %r negate=%r @%#8x>" % (
+ self.__class__.__name__,
+ self.data,
+ self.negate,
+ id(self),
+ )
def __str__(self):
if self.negate:
- return f'EqualityMatch: !={self.data}'
- return f'EqualityMatch: ={self.data}'
+ return f"EqualityMatch: !={self.data}"
+ return f"EqualityMatch: ={self.data}"
class ContainmentMatch(base, metaclass=hashed_base):
@@ -290,7 +297,7 @@ class ContainmentMatch(base, metaclass=hashed_base):
not result in a true NAND when all is on.
"""
- __slots__ = ('_hash', 'vals', 'all', 'negate')
+ __slots__ = ("_hash", "vals", "all", "negate")
__inst_caching__ = True
def __init__(self, vals, match_all=False, negate=False):
@@ -349,39 +356,65 @@ class ContainmentMatch(base, metaclass=hashed_base):
vals = self.vals
# XXX pretty much positive this isn't working.
- if isinstance(val, str) or not getattr(pkg, 'configurable', False):
+ if isinstance(val, str) or not getattr(pkg, "configurable", False):
# unchangable
return not self.match(val)
if self.negate:
if self.all:
+
def filter(truths):
return False in truths
+
def true(r, pvals):
return pkg.request_enable(attr, r)
+
def false(r, pvals):
return pkg.request_disable(attr, r)
truths = [x in val for x in vals]
for x in boolean.iterative_quad_toggling(
- pkg, None, list(vals), 0, len(vals), truths,
- filter, desired_false=false, desired_true=true):
+ pkg,
+ None,
+ list(vals),
+ 0,
+ len(vals),
+ truths,
+ filter,
+ desired_false=false,
+ desired_true=true,
+ ):
return True
elif pkg.request_disable(attr, *vals):
- return True
+ return True
return False
if not self.all:
return pkg.request_disable(attr, *vals)
l = len(vals)
- def filter(truths): return truths.count(True) < l
- def true(r, pvals): return pkg.request_enable(attr, r)
- def false(r, pvals): return pkg.request_disable(attr, r)
+
+ def filter(truths):
+ return truths.count(True) < l
+
+ def true(r, pvals):
+ return pkg.request_enable(attr, r)
+
+ def false(r, pvals):
+ return pkg.request_disable(attr, r)
+
truths = [x in val for x in vals]
for x in boolean.iterative_quad_toggling(
- pkg, None, list(vals), 0, l, truths, filter,
- desired_false=false, desired_true=true):
+ pkg,
+ None,
+ list(vals),
+ 0,
+ l,
+ truths,
+ filter,
+ desired_false=false,
+ desired_true=true,
+ ):
return True
return False
@@ -396,24 +429,35 @@ class ContainmentMatch(base, metaclass=hashed_base):
if _values_override is None:
vals = self.vals
- if isinstance(val, str) or not getattr(pkg, 'configurable', False):
+ if isinstance(val, str) or not getattr(pkg, "configurable", False):
# unchangable
return self.match(val)
if not self.negate:
if not self.all:
+
def filter(truths):
return True in truths
+
def true(r, pvals):
return pkg.request_enable(attr, r)
+
def false(r, pvals):
return pkg.request_disable(attr, r)
truths = [x in val for x in vals]
for x in boolean.iterative_quad_toggling(
- pkg, None, list(vals), 0, len(vals), truths,
- filter, desired_false=false, desired_true=true):
+ pkg,
+ None,
+ list(vals),
+ 0,
+ len(vals),
+ truths,
+ filter,
+ desired_false=false,
+ desired_true=true,
+ ):
return True
else:
if pkg.request_enable(attr, *vals):
@@ -425,28 +469,43 @@ class ContainmentMatch(base, metaclass=hashed_base):
if pkg.request_disable(attr, *vals):
return True
else:
- def filter(truths): return True not in truths
- def true(r, pvals): return pkg.request_enable(attr, r)
- def false(r, pvals): return pkg.request_disable(attr, r)
+
+ def filter(truths):
+ return True not in truths
+
+ def true(r, pvals):
+ return pkg.request_enable(attr, r)
+
+ def false(r, pvals):
+ return pkg.request_disable(attr, r)
+
truths = [x in val for x in vals]
for x in boolean.iterative_quad_toggling(
- pkg, None, list(vals), 0, len(vals), truths, filter,
- desired_false=false, desired_true=true):
+ pkg,
+ None,
+ list(vals),
+ 0,
+ len(vals),
+ truths,
+ filter,
+ desired_false=false,
+ desired_true=true,
+ ):
return True
return False
def __repr__(self):
if self.negate:
- string = '<%s %r all=%s negated @%#8x>'
+ string = "<%s %r all=%s negated @%#8x>"
else:
- string = '<%s %r all=%s @%#8x>'
- return string % (
- self.__class__.__name__, tuple(self.vals), self.all, id(self))
+ string = "<%s %r all=%s @%#8x>"
+ return string % (self.__class__.__name__, tuple(self.vals), self.all, id(self))
def __str__(self):
- restricts_str = ', '.join(map(str, self.vals))
- negate = '!' if self.negate else ''
- return f'{negate}{restricts_str}'
+ restricts_str = ", ".join(map(str, self.vals))
+ negate = "!" if self.negate else ""
+ return f"{negate}{restricts_str}"
+
# ContainmentMatch2 was added in f1d3c6f to deprecate ContainmentMatch;
# cleanup took a while (2021). This ContainmentMatch2 can be removed
@@ -457,7 +516,7 @@ ContainmentMatch2 = ContainmentMatch
class FlatteningRestriction(base, metaclass=generic_equality):
"""Flatten the values passed in and apply the nested restriction."""
- __slots__ = __attr_comparison__ = ('dont_iter', 'restriction', 'negate')
+ __slots__ = __attr_comparison__ = ("dont_iter", "restriction", "negate")
__hash__ = object.__hash__
def __init__(self, dont_iter, childrestriction, negate=False):
@@ -473,26 +532,31 @@ class FlatteningRestriction(base, metaclass=generic_equality):
object.__setattr__(self, "restriction", childrestriction)
def match(self, val):
- return self.restriction.match(
- iflatten_instance(val, self.dont_iter)) != self.negate
+ return (
+ self.restriction.match(iflatten_instance(val, self.dont_iter))
+ != self.negate
+ )
def __str__(self):
return (
- 'flattening_restriction: '
- f'dont_iter = {self.dont_iter}, restriction = {self.restriction}'
+ "flattening_restriction: "
+ f"dont_iter = {self.dont_iter}, restriction = {self.restriction}"
)
def __repr__(self):
- return '<%s restriction=%r dont_iter=%r negate=%r @%#8x>' % (
+ return "<%s restriction=%r dont_iter=%r negate=%r @%#8x>" % (
self.__class__.__name__,
- self.restriction, self.dont_iter, self.negate,
- id(self))
+ self.restriction,
+ self.dont_iter,
+ self.negate,
+ id(self),
+ )
class FunctionRestriction(base, metaclass=generic_equality):
"""Convenience class for creating special restrictions."""
- __attr_comparison__ = __slots__ = ('func', 'negate')
+ __attr_comparison__ = __slots__ = ("func", "negate")
__hash__ = object.__hash__
def __init__(self, func, negate=False):
@@ -504,22 +568,26 @@ class FunctionRestriction(base, metaclass=generic_equality):
restriction using this class you should only use it if it is
very unlikely backend-specific optimizations will be possible.
"""
- object.__setattr__(self, 'negate', negate)
- object.__setattr__(self, 'func', func)
+ object.__setattr__(self, "negate", negate)
+ object.__setattr__(self, "func", func)
def match(self, val):
return self.func(val) != self.negate
def __repr__(self):
- return '<%s func=%r negate=%r @%#8x>' % (
- self.__class__.__name__, self.func, self.negate, id(self))
+ return "<%s func=%r negate=%r @%#8x>" % (
+ self.__class__.__name__,
+ self.func,
+ self.negate,
+ id(self),
+ )
class StrConversion(base, metaclass=generic_equality):
"""convert passed in data to a str object"""
__hash__ = object.__hash__
- __attr_comparison__ = __slots__ = ('restrict',)
+ __attr_comparison__ = __slots__ = ("restrict",)
def __init__(self, restrict):
object.__setattr__(self, "restrict", restrict)
@@ -547,7 +615,8 @@ class AnyMatch(restriction.AnyMatch):
# Hack: skip calling base.__init__. Doing this would make
# restriction.base.__init__ run twice.
restriction.AnyMatch.__init__(
- self, childrestriction, restriction.value_type, negate=negate)
+ self, childrestriction, restriction.value_type, negate=negate
+ )
def force_True(self, pkg, attr, val):
return self.match(val)
@@ -559,13 +628,14 @@ class AnyMatch(restriction.AnyMatch):
# "Invalid name" (pylint uses the module const regexp, not the class regexp)
# pylint: disable-msg=C0103
-AndRestriction = restriction.curry_node_type(boolean.AndRestriction,
- restriction.value_type)
-OrRestriction = restriction.curry_node_type(boolean.OrRestriction,
- restriction.value_type)
+AndRestriction = restriction.curry_node_type(
+ boolean.AndRestriction, restriction.value_type
+)
+OrRestriction = restriction.curry_node_type(
+ boolean.OrRestriction, restriction.value_type
+)
-AlwaysBool = restriction.curry_node_type(restriction.AlwaysBool,
- restriction.value_type)
+AlwaysBool = restriction.curry_node_type(restriction.AlwaysBool, restriction.value_type)
AlwaysTrue = AlwaysBool(negate=True)
AlwaysFalse = AlwaysBool(negate=False)
diff --git a/src/pkgcore/scripts/__init__.py b/src/pkgcore/scripts/__init__.py
index a2c9414d4..a80285b7b 100755
--- a/src/pkgcore/scripts/__init__.py
+++ b/src/pkgcore/scripts/__init__.py
@@ -11,21 +11,24 @@ def run(script_name):
"""Run a given script module."""
try:
from pkgcore.util.commandline import Tool
- script_module = '.'.join(
- os.path.realpath(__file__).split(os.path.sep)[-3:-1] +
- [script_name.replace('-', '_')])
+
+ script_module = ".".join(
+ os.path.realpath(__file__).split(os.path.sep)[-3:-1]
+ + [script_name.replace("-", "_")]
+ )
script = import_module(script_module)
except ImportError as e:
- sys.stderr.write(f'Failed importing: {e}!\n')
- py_version = '.'.join(map(str, sys.version_info[:3]))
+ sys.stderr.write(f"Failed importing: {e}!\n")
+ py_version = ".".join(map(str, sys.version_info[:3]))
sys.stderr.write(
- 'Verify that pkgcore and its deps are properly installed '
- f'and/or PYTHONPATH is set correctly for python {py_version}.\n')
+ "Verify that pkgcore and its deps are properly installed "
+ f"and/or PYTHONPATH is set correctly for python {py_version}.\n"
+ )
# show traceback in debug mode or for unhandled exceptions
- if '--debug' in sys.argv[1:] or not all((e.__cause__, e.__context__)):
- sys.stderr.write('\n')
+ if "--debug" in sys.argv[1:] or not all((e.__cause__, e.__context__)):
+ sys.stderr.write("\n")
raise
- sys.stderr.write('Add --debug to the commandline for a traceback.\n')
+ sys.stderr.write("Add --debug to the commandline for a traceback.\n")
sys.exit(1)
tool = Tool(script.argparser)
@@ -40,5 +43,5 @@ def main():
run(os.path.basename(sys.argv[0]))
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/src/pkgcore/scripts/patom.py b/src/pkgcore/scripts/patom.py
index b1de47e18..ac7a2ef23 100644
--- a/src/pkgcore/scripts/patom.py
+++ b/src/pkgcore/scripts/patom.py
@@ -14,14 +14,24 @@ def atom(value: str) -> atom_cls:
except MalformedAtom as exc:
# try to add an operator in case we got a version without op
try:
- return atom_cls('=' + value)
+ return atom_cls("=" + value)
except MalformedAtom:
raise exc
-argparser = ArgumentParser(description=__doc__, prog=__name__, script=(__file__, __name__),
- config=False, domain=False, )
+
+argparser = ArgumentParser(
+ description=__doc__,
+ prog=__name__,
+ script=(__file__, __name__),
+ config=False,
+ domain=False,
+)
group = argparser.add_mutually_exclusive_group()
-group.add_argument("-F", "--format", nargs='+', metavar=("FORMAT", "ATOM"),
+group.add_argument(
+ "-F",
+ "--format",
+ nargs="+",
+ metavar=("FORMAT", "ATOM"),
help="Custom output format",
docs="""
Specify a custom output format.
@@ -63,16 +73,19 @@ group.add_argument("-F", "--format", nargs='+', metavar=("FORMAT", "ATOM"),
OP
The package prefixes, that is version specifiers.
- """
+ """,
+)
+group.add_argument(
+ "-c", "--compare", nargs=2, metavar="ATOM", type=atom, help="Compare two atoms"
)
-group.add_argument("-c", "--compare", nargs=2, metavar="ATOM", type=atom,
- help="Compare two atoms")
+
def _transform_format(atom: atom_cls, match: re.Match):
if res := getattr(atom, match.group(0)[2:-1].lower()):
return str(res)
return "<unset>" if match.group(0)[1] == "{" else ""
+
@argparser.bind_main_func
def main(options, out, err):
if options.format:
@@ -90,7 +103,7 @@ def main(options, out, err):
err.write(f"bad format: {fmt!r}")
return 1
# TODO: check implementation and add tests
- elif options.compare: # pragma: no cover
+ elif options.compare: # pragma: no cover
atom1, atom2 = options.compare
if atom1.unversioned_atom != atom2.unversioned_atom or atom1.slot != atom2.slot:
op = "!="
diff --git a/src/pkgcore/scripts/pclean.py b/src/pkgcore/scripts/pclean.py
index 5d8bbd3fc..c319b9b95 100644
--- a/src/pkgcore/scripts/pclean.py
+++ b/src/pkgcore/scripts/pclean.py
@@ -26,29 +26,46 @@ from ..util import parserestrict
from ..util.commandline import ArgumentParser, StoreRepoObject, convert_to_restrict
argparser = ArgumentParser(description=__doc__, script=(__file__, __name__))
-subparsers = argparser.add_subparsers(description='cleaning applets')
+subparsers = argparser.add_subparsers(description="cleaning applets")
+
+
@argparser.bind_parse_priority(10)
def _initialize_opts(namespace):
namespace.restrict = []
namespace.file_filters = Filters()
+
shared_opts = ArgumentParser(suppress=True)
-cleaning_opts = shared_opts.add_argument_group('generic cleaning options')
+cleaning_opts = shared_opts.add_argument_group("generic cleaning options")
cleaning_opts.add_argument(
- nargs='*', dest='targets', metavar='TARGET',
- help="packages to target for cleaning")
+ nargs="*", dest="targets", metavar="TARGET", help="packages to target for cleaning"
+)
cleaning_opts.add_argument(
- '-p', '--pretend', action='store_true',
- help='dry run without performing any changes')
+ "-p",
+ "--pretend",
+ action="store_true",
+ help="dry run without performing any changes",
+)
cleaning_opts.add_argument(
- '-x', '--exclude', action='csv', dest='excludes', metavar='EXCLUDE',
- help='list of packages to exclude from removal')
+ "-x",
+ "--exclude",
+ action="csv",
+ dest="excludes",
+ metavar="EXCLUDE",
+ help="list of packages to exclude from removal",
+)
cleaning_opts.add_argument(
- '-X', '--exclude-file', type=argparse.FileType('r'),
- help='path to exclusion file')
+ "-X", "--exclude-file", type=argparse.FileType("r"), help="path to exclusion file"
+)
cleaning_opts.add_argument(
- '-S', '--pkgsets', action='csv_negations', metavar='PKGSET',
- help='list of pkgsets to include or exclude from removal')
+ "-S",
+ "--pkgsets",
+ action="csv_negations",
+ metavar="PKGSET",
+ help="list of pkgsets to include or exclude from removal",
+)
+
+
@shared_opts.bind_parse_priority(20)
def _setup_shared_opts(namespace):
namespace.exclude_restrict = None
@@ -58,71 +75,83 @@ def _setup_shared_opts(namespace):
disabled, enabled = namespace.pkgsets
unknown_sets = set(disabled + enabled).difference(namespace.config.pkgset)
if unknown_sets:
- argparser.error("unknown set%s: %s (available sets: %s)" % (
- pluralism(unknown_sets),
- ', '.join(sorted(map(repr, unknown_sets))),
- ', '.join(sorted(namespace.config.pkgset))))
+ argparser.error(
+ "unknown set%s: %s (available sets: %s)"
+ % (
+ pluralism(unknown_sets),
+ ", ".join(sorted(map(repr, unknown_sets))),
+ ", ".join(sorted(namespace.config.pkgset)),
+ )
+ )
for s in set(disabled):
exclude_restrictions.extend(namespace.config.pkgset[s])
for s in set(enabled):
- namespace.restrict.append(boolean.OrRestriction(*namespace.config.pkgset[s]))
+ namespace.restrict.append(
+ boolean.OrRestriction(*namespace.config.pkgset[s])
+ )
# handle command line and file excludes
excludes = namespace.excludes if namespace.excludes is not None else []
if namespace.exclude_file is not None:
- excludes.extend(namespace.exclude_file.read().split('\n'))
+ excludes.extend(namespace.exclude_file.read().split("\n"))
if excludes:
exclude_restrictions.extend(convert_to_restrict(excludes, default=None))
if exclude_restrictions:
namespace.restrict.append(
- boolean.OrRestriction(negate=True, *exclude_restrictions))
+ boolean.OrRestriction(negate=True, *exclude_restrictions)
+ )
namespace.exclude_restrict = boolean.OrRestriction(*exclude_restrictions)
def parse_time(s):
# simple approximations, could use dateutil for exact deltas
- units = {'s': 1}
- units['min'] = units['s'] * 60
- units['h'] = units['min'] * 60
- units['d'] = units['h'] * 24
- units['w'] = units['d'] * 7
- units['m'] = units['d'] * 30
- units['y'] = units['d'] * 365
-
- date = re.match(r'^(\d+)(%s)$' % '|'.join(units.keys()), s)
+ units = {"s": 1}
+ units["min"] = units["s"] * 60
+ units["h"] = units["min"] * 60
+ units["d"] = units["h"] * 24
+ units["w"] = units["d"] * 7
+ units["m"] = units["d"] * 30
+ units["y"] = units["d"] * 365
+
+ date = re.match(r"^(\d+)(%s)$" % "|".join(units.keys()), s)
if date:
value = int(date.group(1))
unit = date.group(2)
else:
raise argparse.ArgumentTypeError(
- f"invalid date: {s!r} (valid units: {' ,'.join(units.keys())})")
+ f"invalid date: {s!r} (valid units: {' ,'.join(units.keys())})"
+ )
return time.time() - (value * units[unit])
def parse_size(s):
units = {
- 'B': 1,
- 'K': 1024,
- 'M': 1024**2,
- 'G': 1024**3,
+ "B": 1,
+ "K": 1024,
+ "M": 1024**2,
+ "G": 1024**3,
}
- size = re.match(r'^(\d+)([%s])$' % ''.join(units.keys()), s)
+ size = re.match(r"^(\d+)([%s])$" % "".join(units.keys()), s)
if size:
value = int(size.group(1))
unit = size.group(2)
else:
raise argparse.ArgumentTypeError(
- f"invalid size: {s!r} (valid units: {' ,'.join(units.keys())})")
+ f"invalid size: {s!r} (valid units: {' ,'.join(units.keys())})"
+ )
return value * units[unit]
file_opts = ArgumentParser(suppress=True)
-file_cleaning_opts = file_opts.add_argument_group('file cleaning options')
+file_cleaning_opts = file_opts.add_argument_group("file cleaning options")
file_cleaning_opts.add_argument(
- '-m', '--modified', metavar='TIME', type=parse_time,
- help='skip files that have been modified since a given time',
+ "-m",
+ "--modified",
+ metavar="TIME",
+ type=parse_time,
+ help="skip files that have been modified since a given time",
docs="""
Don't remove files that have been modified since a given time. For
example, to skip files newer than a year use "1y" as an argument to this
@@ -130,10 +159,14 @@ file_cleaning_opts.add_argument(
Supported units are y, m, w, and d, and s representing years, months,
weeks, days, and seconds, respectively.
- """)
+ """,
+)
file_cleaning_opts.add_argument(
- '-s', '--size', metavar='SIZE', type=parse_size,
- help='skip files bigger than a given size',
+ "-s",
+ "--size",
+ metavar="SIZE",
+ type=parse_size,
+ help="skip files bigger than a given size",
docs="""
Don't remove files bigger than a given size. For example, to skip
files larger than 100 megabytes use "100M" as an argument to this
@@ -141,33 +174,53 @@ file_cleaning_opts.add_argument(
Supported units are B, K, M, and G representing bytes, kilobytes,
megabytes, and gigabytes, respectively.
- """)
+ """,
+)
+
+
@file_opts.bind_parse_priority(20)
def _setup_file_opts(namespace):
if namespace.modified is not None:
- namespace.file_filters.append(lambda x: os.stat(x).st_mtime < namespace.modified)
+ namespace.file_filters.append(
+ lambda x: os.stat(x).st_mtime < namespace.modified
+ )
if namespace.size is not None:
namespace.file_filters.append(lambda x: os.stat(x).st_size < namespace.size)
repo_opts = ArgumentParser(suppress=True)
-repo_cleaning_opts = repo_opts.add_argument_group('repo cleaning options')
+repo_cleaning_opts = repo_opts.add_argument_group("repo cleaning options")
repo_cleaning_opts.add_argument(
- '-I', '--installed', action='store_true', dest='exclude_installed',
- help='skip files for packages that are currently installed')
+ "-I",
+ "--installed",
+ action="store_true",
+ dest="exclude_installed",
+ help="skip files for packages that are currently installed",
+)
repo_cleaning_opts.add_argument(
- '-E', '--exists', action='store_true', dest='exclude_exists',
- help='skip files for packages that relate to ebuilds in the tree')
+ "-E",
+ "--exists",
+ action="store_true",
+ dest="exclude_exists",
+ help="skip files for packages that relate to ebuilds in the tree",
+)
repo_cleaning_opts.add_argument(
- '-f', '--fetch-restricted', action='store_true', dest='exclude_fetch_restricted',
- help='skip fetch-restricted files')
+ "-f",
+ "--fetch-restricted",
+ action="store_true",
+ dest="exclude_fetch_restricted",
+ help="skip fetch-restricted files",
+)
repo_cleaning_opts.add_argument(
- "-r", "--repo", help="target repository",
+ "-r",
+ "--repo",
+ help="target repository",
action=StoreRepoObject,
docs="""
Target repository to search for matches. If no repo is specified all
relevant repos are used.
- """)
+ """,
+)
@argparser.bind_parse_priority(30)
@@ -205,7 +258,10 @@ class _UnfilteredRepos(DictMixin):
"""Generate custom, unfiltered repos on demand."""
_supported_attrs = {
- 'pkg_masks', 'pkg_unmasks', 'pkg_accept_keywords', 'pkg_keywords',
+ "pkg_masks",
+ "pkg_unmasks",
+ "pkg_accept_keywords",
+ "pkg_keywords",
}
def __init__(self, domain):
@@ -238,8 +294,10 @@ class _UnfilteredRepos(DictMixin):
config = subparsers.add_parser(
- 'config', parents=(shared_opts,),
- description='remove config file settings')
+ "config", parents=(shared_opts,), description="remove config file settings"
+)
+
+
@config.bind_main_func
def config_main(options, out, err):
domain = options.domain
@@ -253,13 +311,19 @@ def config_main(options, out, err):
def iter_restrict(iterable):
for x in iterable:
restrict = x[0]
- if (options.exclude_restrict is None or
- not options.exclude_restrict.match(restrict)):
+ if options.exclude_restrict is None or not options.exclude_restrict.match(
+ restrict
+ ):
yield restrict, list(x)
domain_attrs = (
- 'pkg_masks', 'pkg_unmasks', 'pkg_keywords', 'pkg_accept_keywords',
- 'pkg_licenses', 'pkg_use', 'pkg_env',
+ "pkg_masks",
+ "pkg_unmasks",
+ "pkg_keywords",
+ "pkg_accept_keywords",
+ "pkg_licenses",
+ "pkg_use",
+ "pkg_env",
)
attrs = {}
@@ -274,18 +338,20 @@ def config_main(options, out, err):
for restrict, item in iterable:
path, lineno, line = item.pop(), item.pop(), item.pop()
if not all_repos_raw.match(restrict):
- changes['unavailable'][path].append((line, lineno, str(restrict)))
+ changes["unavailable"][path].append((line, lineno, str(restrict)))
continue
if not installed_repos.match(restrict):
- changes['uninstalled'][path].append((line, lineno, str(restrict)))
+ changes["uninstalled"][path].append((line, lineno, str(restrict)))
if name in unfiltered_repos:
filtered_pkgs = all_ebuild_repos.match(restrict)
unfiltered_pkgs = unfiltered_repos[name].match(restrict)
if filtered_pkgs == unfiltered_pkgs:
- changes[f'unnecessary_{name}'][path].append((line, lineno, str(restrict)))
- elif name == 'pkg_use':
+ changes[f"unnecessary_{name}"][path].append(
+ (line, lineno, str(restrict))
+ )
+ elif name == "pkg_use":
atom, use = item
# find duplicates
@@ -298,33 +364,36 @@ def config_main(options, out, err):
duplicates.add(u)
use_sets[i].add(u)
if duplicates:
- changes['duplicate_use'][path].append(
- (line, lineno, ', '.join(duplicates)))
+ changes["duplicate_use"][path].append(
+ (line, lineno, ", ".join(duplicates))
+ )
# find conflicts
conflicting = enabled & disabled
if conflicting:
- changes['conflicting_use'][path].append(
- (line, lineno, ', '.join(conflicting)))
+ changes["conflicting_use"][path].append(
+ (line, lineno, ", ".join(conflicting))
+ )
# find unknowns
pkgs = all_repos_raw.match(atom)
available = {u for pkg in pkgs for u in pkg.iuse_stripped}
unknown = (disabled - available) | (enabled - available)
if unknown:
- changes['unknown_use'][path].append(
- (line, lineno, ', '.join(unknown)))
+ changes["unknown_use"][path].append(
+ (line, lineno, ", ".join(unknown))
+ )
type_mapping = {
- 'unavailable': 'Unavailable package(s)',
- 'uninstalled': 'Uninstalled package(s)',
- 'unnecessary_pkg_masks': 'Unnecessary mask(s)',
- 'unnecessary_pkg_unmasks': 'Unnecessary unmask(s)',
- 'unnecessary_pkg_accept_keywords': 'Unnecessary accept keywords(s)',
- 'unnecessary_pkg_keywords': 'Unnecessary keywords(s)',
- 'duplicate_use': 'Duplicate use flag(s)',
- 'conflicting_use': 'Conflicting use flag(s)',
- 'unknown_use': 'Nonexistent use flag(s)',
+ "unavailable": "Unavailable package(s)",
+ "uninstalled": "Uninstalled package(s)",
+ "unnecessary_pkg_masks": "Unnecessary mask(s)",
+ "unnecessary_pkg_unmasks": "Unnecessary unmask(s)",
+ "unnecessary_pkg_accept_keywords": "Unnecessary accept keywords(s)",
+ "unnecessary_pkg_keywords": "Unnecessary keywords(s)",
+ "duplicate_use": "Duplicate use flag(s)",
+ "conflicting_use": "Conflicting use flag(s)",
+ "unknown_use": "Nonexistent use flag(s)",
}
for t, paths in changes.items():
@@ -337,9 +406,11 @@ def config_main(options, out, err):
dist = subparsers.add_parser(
- 'dist', parents=(shared_opts, file_opts, repo_opts),
- description='remove distfiles')
-dist_opts = dist.add_argument_group('distfile options')
+ "dist", parents=(shared_opts, file_opts, repo_opts), description="remove distfiles"
+)
+dist_opts = dist.add_argument_group("distfile options")
+
+
@dist.bind_final_check
def _dist_validate_args(parser, namespace):
distdir = namespace.domain.distdir
@@ -361,22 +432,30 @@ def _dist_validate_args(parser, namespace):
installed_dist.update(iflatten_instance(pkg.distfiles))
# exclude distfiles for existing ebuilds or fetch restrictions
- if namespace.exclude_fetch_restricted or (namespace.exclude_exists and not namespace.restrict):
+ if namespace.exclude_fetch_restricted or (
+ namespace.exclude_exists and not namespace.restrict
+ ):
for pkg in repo:
- exists_dist.update(iflatten_instance(getattr(pkg, '_raw_pkg', pkg).distfiles))
- if 'fetch' in pkg.restrict:
- restricted_dist.update(iflatten_instance(getattr(pkg, '_raw_pkg', pkg).distfiles))
+ exists_dist.update(
+ iflatten_instance(getattr(pkg, "_raw_pkg", pkg).distfiles)
+ )
+ if "fetch" in pkg.restrict:
+ restricted_dist.update(
+ iflatten_instance(getattr(pkg, "_raw_pkg", pkg).distfiles)
+ )
# exclude distfiles from specified restrictions
if namespace.exclude_restrict:
for pkg in repo.itermatch(namespace.exclude_restrict, sorter=sorted):
- excludes_dist.update(iflatten_instance(getattr(pkg, '_raw_pkg', pkg).distfiles))
+ excludes_dist.update(
+ iflatten_instance(getattr(pkg, "_raw_pkg", pkg).distfiles)
+ )
# determine dist files for custom restrict targets
if namespace.restrict:
target_dist = defaultdict(lambda: defaultdict(set))
for pkg in repo.itermatch(namespace.restrict, sorter=sorted):
- s = set(iflatten_instance(getattr(pkg, '_raw_pkg', pkg).distfiles))
+ s = set(iflatten_instance(getattr(pkg, "_raw_pkg", pkg).distfiles))
target_dist[pkg.unversioned_atom][pkg].update(s)
if namespace.exclude_exists:
exists_dist.update(s)
@@ -384,20 +463,26 @@ def _dist_validate_args(parser, namespace):
extra_regex_prefixes = defaultdict(set)
pkg_regex_prefixes = set()
for catpn, pkgs in target_dist.items():
- pn_regex = r'\W'.join(re.split(r'\W', catpn.package))
- pkg_regex = re.compile(r'(%s)(\W\w+)+([\W?(0-9)+])*(\W\w+)*(\.\w+)*' % pn_regex,
- re.IGNORECASE)
+ pn_regex = r"\W".join(re.split(r"\W", catpn.package))
+ pkg_regex = re.compile(
+ r"(%s)(\W\w+)+([\W?(0-9)+])*(\W\w+)*(\.\w+)*" % pn_regex, re.IGNORECASE
+ )
pkg_regex_prefixes.add(pn_regex)
for pkg, files in pkgs.items():
files = sorted(files)
for f in files:
- if (pkg_regex.match(f) or (
- extra_regex_prefixes and
- re.match(r'(%s)([\W?(0-9)+])+(\W\w+)*(\.\w+)+' % '|'.join(extra_regex_prefixes[catpn]), f))):
+ if pkg_regex.match(f) or (
+ extra_regex_prefixes
+ and re.match(
+ r"(%s)([\W?(0-9)+])+(\W\w+)*(\.\w+)+"
+ % "|".join(extra_regex_prefixes[catpn]),
+ f,
+ )
+ ):
continue
else:
- pieces = re.split(r'([\W?(0-9)+])+(\W\w+)*(\.\w+)+', f)
- if pieces[-1] == '':
+ pieces = re.split(r"([\W?(0-9)+])+(\W\w+)*(\.\w+)+", f)
+ if pieces[-1] == "":
pieces.pop()
if len(pieces) > 1:
extra_regex_prefixes[catpn].add(pieces[0])
@@ -406,14 +491,25 @@ def _dist_validate_args(parser, namespace):
regexes = []
# build regexes to match distfiles for older ebuilds no longer in the tree
if pkg_regex_prefixes:
- pkg_regex_prefixes_str = '|'.join(sorted(pkg_regex_prefixes))
- regexes.append(re.compile(r'(%s)(\W\w+)+([\W?(0-9)+])*(\W\w+)*(\.\w+)*' % (
- pkg_regex_prefixes_str,)))
+ pkg_regex_prefixes_str = "|".join(sorted(pkg_regex_prefixes))
+ regexes.append(
+ re.compile(
+ r"(%s)(\W\w+)+([\W?(0-9)+])*(\W\w+)*(\.\w+)*"
+ % (pkg_regex_prefixes_str,)
+ )
+ )
if extra_regex_prefixes:
- extra_regex_prefixes_str = '|'.join(sorted(chain.from_iterable(
- v for k, v in extra_regex_prefixes.items())))
- regexes.append(re.compile(r'(%s)([\W?(0-9)+])+(\W\w+)*(\.\w+)+' % (
- extra_regex_prefixes_str,)))
+ extra_regex_prefixes_str = "|".join(
+ sorted(
+ chain.from_iterable(v for k, v in extra_regex_prefixes.items())
+ )
+ )
+ regexes.append(
+ re.compile(
+ r"(%s)([\W?(0-9)+])+(\W\w+)*(\.\w+)+"
+ % (extra_regex_prefixes_str,)
+ )
+ )
if regexes:
for f in all_dist_files:
@@ -426,11 +522,13 @@ def _dist_validate_args(parser, namespace):
saving_files = installed_dist | exists_dist | excludes_dist | restricted_dist
target_files.difference_update(saving_files)
- targets = (pjoin(distdir, f) for f in sorted(all_dist_files.intersection(target_files)))
+ targets = (
+ pjoin(distdir, f) for f in sorted(all_dist_files.intersection(target_files))
+ )
removal_func = partial(os.remove)
namespace.remove = (
- (removal_func, f) for f in
- filter(namespace.file_filters.run, targets))
+ (removal_func, f) for f in filter(namespace.file_filters.run, targets)
+ )
def pkg_changed(pkg, domain, attrs):
@@ -446,7 +544,7 @@ def pkg_changed(pkg, domain, attrs):
ebuild_attr = getattr(ebuild_pkg, attr)
binpkg_attr = getattr(pkg, attr)
except AttributeError:
- raise argparser.error(f'nonexistent attribute: {attr!r}')
+ raise argparser.error(f"nonexistent attribute: {attr!r}")
if attr.upper() in pkg.eapi.dep_keys:
ebuild_attr = ebuild_attr.evaluate_depset(pkg.use)
if ebuild_attr != binpkg_attr:
@@ -455,19 +553,29 @@ def pkg_changed(pkg, domain, attrs):
pkg_opts = ArgumentParser(suppress=True)
-pkg_cleaning_opts = pkg_opts.add_argument_group('binpkg cleaning options')
+pkg_cleaning_opts = pkg_opts.add_argument_group("binpkg cleaning options")
pkg_cleaning_opts.add_argument(
- '--source-repo', metavar='REPO',
- help='remove binpkgs with matching source repo')
+ "--source-repo", metavar="REPO", help="remove binpkgs with matching source repo"
+)
pkg_cleaning_opts.add_argument(
- '-b', '--bindist', action='store_true',
- help='only remove binpkgs that restrict distribution')
+ "-b",
+ "--bindist",
+ action="store_true",
+ help="only remove binpkgs that restrict distribution",
+)
pkg_cleaning_opts.add_argument(
- '-c', '--changed', action='csv',
- help='comma separated list of package attributes to check for ebuild changes')
+ "-c",
+ "--changed",
+ action="csv",
+ help="comma separated list of package attributes to check for ebuild changes",
+)
pkg = subparsers.add_parser(
- 'pkg', parents=(shared_opts, file_opts, repo_opts, pkg_opts),
- description='remove binpkgs')
+ "pkg",
+ parents=(shared_opts, file_opts, repo_opts, pkg_opts),
+ description="remove binpkgs",
+)
+
+
@pkg.bind_final_check
def _pkg_validate_args(parser, namespace):
repo = namespace.repo
@@ -481,34 +589,50 @@ def _pkg_validate_args(parser, namespace):
pkgs = (pkg for pkg in repo.itermatch(namespace.restrict))
pkg_filters = Filters()
if namespace.bindist:
- pkg_filters.append(lambda pkg: 'bindist' in pkg.restrict)
+ pkg_filters.append(lambda pkg: "bindist" in pkg.restrict)
if namespace.changed:
- pkg_filters.append(lambda pkg: pkg_changed(pkg, namespace.domain, namespace.changed))
+ pkg_filters.append(
+ lambda pkg: pkg_changed(pkg, namespace.domain, namespace.changed)
+ )
if namespace.exclude_installed:
- pkg_filters.append(lambda pkg: pkg.versioned_atom not in namespace.domain.all_installed_repos)
+ pkg_filters.append(
+ lambda pkg: pkg.versioned_atom not in namespace.domain.all_installed_repos
+ )
if namespace.exclude_exists:
- pkg_filters.append(lambda pkg: pkg.versioned_atom not in namespace.domain.all_ebuild_repos_raw)
+ pkg_filters.append(
+ lambda pkg: pkg.versioned_atom not in namespace.domain.all_ebuild_repos_raw
+ )
if namespace.exclude_fetch_restricted:
- pkg_filters.append(lambda pkg: 'fetch' not in pkg.restrict)
+ pkg_filters.append(lambda pkg: "fetch" not in pkg.restrict)
if namespace.source_repo is not None:
pkg_filters.append(lambda pkg: namespace.source_repo == pkg.source_repository)
pkgs = list(filter(pkg_filters.run, pkgs))
removal_func = partial(os.remove)
namespace.remove = (
- (removal_func, binpkg) for binpkg in
- sorted(filter(namespace.file_filters.run, (pkg.path for pkg in pkgs))))
+ (removal_func, binpkg)
+ for binpkg in sorted(
+ filter(namespace.file_filters.run, (pkg.path for pkg in pkgs))
+ )
+ )
+
tmp = subparsers.add_parser(
- 'tmp', parents=(shared_opts,),
- description='remove tmpdir entries')
-tmp_opts = tmp.add_argument_group('tmpfile options')
+ "tmp", parents=(shared_opts,), description="remove tmpdir entries"
+)
+tmp_opts = tmp.add_argument_group("tmpfile options")
tmp_opts.add_argument(
- '-a', '--all', dest='wipe_all', action='store_true',
- help='wipe the entire tmpdir',
+ "-a",
+ "--all",
+ dest="wipe_all",
+ action="store_true",
+ help="wipe the entire tmpdir",
docs="""
Force the entire tmpdir to be wiped. Note that this overrides any
restrictions that have been specified.
- """)
+ """,
+)
+
+
@tmp.bind_final_check
def _tmp_validate_args(parser, namespace):
tmpdir = namespace.domain.pm_tmpdir
@@ -518,12 +642,16 @@ def _tmp_validate_args(parser, namespace):
if namespace.restrict and not namespace.wipe_all:
# create a fake repo from tmpdir entries and pull matches from it
pkg_map = {}
- for pkg_build_dir in glob.glob(pjoin(tmpdir, '*', '*')):
+ for pkg_build_dir in glob.glob(pjoin(tmpdir, "*", "*")):
try:
- pkg = atom_mod.atom('=' + pkg_build_dir[len(tmpdir):].lstrip(os.path.sep))
+ pkg = atom_mod.atom(
+ "=" + pkg_build_dir[len(tmpdir) :].lstrip(os.path.sep)
+ )
except atom_mod.MalformedAtom:
continue
- pkg_map.setdefault(pkg.category, {}).setdefault(pkg.package, []).append(pkg.fullver)
+ pkg_map.setdefault(pkg.category, {}).setdefault(pkg.package, []).append(
+ pkg.fullver
+ )
repo = SimpleTree(pkg_map)
def _remove_dir_and_empty_parent(d):
@@ -538,8 +666,10 @@ def _tmp_validate_args(parser, namespace):
raise
removal_func = partial(_remove_dir_and_empty_parent)
- dirs = ((removal_func, pjoin(tmpdir, pkg.cpvstr))
- for pkg in repo.itermatch(namespace.restrict))
+ dirs = (
+ (removal_func, pjoin(tmpdir, pkg.cpvstr))
+ for pkg in repo.itermatch(namespace.restrict)
+ )
else:
# not in a configured repo dir, remove all tmpdir entries
dir_removal_func = partial(shutil.rmtree)
@@ -568,11 +698,13 @@ def _remove(options, out, err):
func(target)
except OSError as e:
if options.verbosity >= 0:
- err.write(f"{options.prog}: failed to remove {target!r}: {e.strerror}")
+ err.write(
+ f"{options.prog}: failed to remove {target!r}: {e.strerror}"
+ )
ret = 1
continue
else:
- out.write('\n'.join(target for _, target in options.remove))
+ out.write("\n".join(target for _, target in options.remove))
return ret
diff --git a/src/pkgcore/scripts/pclonecache.py b/src/pkgcore/scripts/pclonecache.py
index 9ee60a74b..905a46434 100644
--- a/src/pkgcore/scripts/pclonecache.py
+++ b/src/pkgcore/scripts/pclonecache.py
@@ -5,23 +5,31 @@ import time
from ..util import commandline
argparser = commandline.ArgumentParser(
- domain=False, description=__doc__, script=(__file__, __name__))
+ domain=False, description=__doc__, script=(__file__, __name__)
+)
argparser.add_argument(
- "source", config_type='cache', priority=20,
+ "source",
+ config_type="cache",
+ priority=20,
action=commandline.StoreConfigObject,
- help="source cache to copy data from")
+ help="source cache to copy data from",
+)
argparser.add_argument(
- "target", config_type='cache', priority=21,
- action=commandline.StoreConfigObject, writable=True,
- help="target cache to update. Must be writable.")
+ "target",
+ config_type="cache",
+ priority=21,
+ action=commandline.StoreConfigObject,
+ writable=True,
+ help="target cache to update. Must be writable.",
+)
@argparser.bind_main_func
def main(options, out, err):
if options.target.readonly:
argparser.error(
- "can't update cache label '%s', it's marked readonly." %
- (options.target,))
+ "can't update cache label '%s', it's marked readonly." % (options.target,)
+ )
source, target = options.source, options.target
if not target.autocommits:
diff --git a/src/pkgcore/scripts/pconfig.py b/src/pkgcore/scripts/pconfig.py
index 95f00bb36..ff5b06cde 100644
--- a/src/pkgcore/scripts/pconfig.py
+++ b/src/pkgcore/scripts/pconfig.py
@@ -13,56 +13,58 @@ from ..util import commandline
def dump_section(config, out):
- out.first_prefix.append(' ')
- out.write(f'# typename of this section: {config.type.name}')
- out.write(f'class {config.type.callable.__module__}.{config.type.callable.__name__};')
+ out.first_prefix.append(" ")
+ out.write(f"# typename of this section: {config.type.name}")
+ out.write(
+ f"class {config.type.callable.__module__}.{config.type.callable.__name__};"
+ )
if config.default:
- out.write('default true;')
+ out.write("default true;")
for key, val in sorted(config.config.items()):
typename = config.type.types.get(key)
if typename is None:
if config.type.allow_unknowns:
- typename = 'str'
+ typename = "str"
else:
- raise ValueError(f'no type set for {key} ({val!r})')
- out.write(f'# type: {typename}')
- if typename.startswith('lazy_refs'):
+ raise ValueError(f"no type set for {key} ({val!r})")
+ out.write(f"# type: {typename}")
+ if typename.startswith("lazy_refs"):
typename = typename[5:]
val = list(ref.collapse() for ref in val)
- elif typename.startswith('lazy_ref'):
+ elif typename.startswith("lazy_ref"):
typename = typename[5:]
val = val.collapse()
- if typename == 'str':
- out.write(f'{key} {val!r};')
- elif typename == 'bool':
- out.write(f'{key} {bool(val)};')
- elif typename == 'list':
+ if typename == "str":
+ out.write(f"{key} {val!r};")
+ elif typename == "bool":
+ out.write(f"{key} {bool(val)};")
+ elif typename == "list":
out.write(f"{key} {' '.join(map(repr, val))};")
- elif typename == 'callable':
- out.write(f'{key} {val.__module__}.{val.__name__};')
- elif typename.startswith('ref:'):
+ elif typename == "callable":
+ out.write(f"{key} {val.__module__}.{val.__name__};")
+ elif typename.startswith("ref:"):
if val.name is None:
- out.write(f'{key} {{')
+ out.write(f"{key} {{")
dump_section(val, out)
- out.write('};')
+ out.write("};")
else:
- out.write(f'{key} {val.name!r};')
- elif typename.startswith('refs:'):
+ out.write(f"{key} {val.name!r};")
+ elif typename.startswith("refs:"):
out.autoline = False
- out.write(f'{key}')
+ out.write(f"{key}")
for i, subconf in enumerate(val):
if subconf.name is None:
out.autoline = True
- out.write(' {')
+ out.write(" {")
dump_section(subconf, out)
out.autoline = False
- out.write('}')
+ out.write("}")
else:
- out.write(f' {subconf.name!r}')
+ out.write(f" {subconf.name!r}")
out.autoline = True
- out.write(';')
+ out.write(";")
else:
- out.write(f'# {key} = {val!r} of unknown type {typename}')
+ out.write(f"# {key} = {val!r} of unknown type {typename}")
out.first_prefix.pop()
@@ -70,36 +72,61 @@ def get_classes(configs):
# Not particularly efficient (doesn't memoize already visited configs)
classes = set()
for config in configs:
- classes.add(f'{config.type.callable.__module__}.{config.type.callable.__name__}')
+ classes.add(
+ f"{config.type.callable.__module__}.{config.type.callable.__name__}"
+ )
for key, val in config.config.items():
typename = config.type.types.get(key)
if typename is None:
continue
- if typename.startswith('ref:'):
+ if typename.startswith("ref:"):
classes.update(get_classes((val,)))
- elif typename.startswith('refs:'):
+ elif typename.startswith("refs:"):
classes.update(get_classes(val))
- elif typename.startswith('lazy_refs'):
+ elif typename.startswith("lazy_refs"):
classes.update(get_classes(c.collapse() for c in val))
- elif typename.startswith('lazy_ref'):
+ elif typename.startswith("lazy_ref"):
classes.update(get_classes((val.collapse(),)))
return classes
-shared_options = (commandline.ArgumentParser(
- config=False, color=False, debug=False, quiet=False, verbose=False,
- version=False, domain=False, add_help=False),)
-shared_options_domain = (commandline.ArgumentParser(
- config=False, color=False, debug=False, quiet=False, verbose=False,
- version=False, domain=True, add_help=False),)
+shared_options = (
+ commandline.ArgumentParser(
+ config=False,
+ color=False,
+ debug=False,
+ quiet=False,
+ verbose=False,
+ version=False,
+ domain=False,
+ add_help=False,
+ ),
+)
+shared_options_domain = (
+ commandline.ArgumentParser(
+ config=False,
+ color=False,
+ debug=False,
+ quiet=False,
+ verbose=False,
+ version=False,
+ domain=True,
+ add_help=False,
+ ),
+)
pkgcore_opts = commandline.ArgumentParser(domain=False, script=(__file__, __name__))
argparser = commandline.ArgumentParser(
- suppress=True, description=__doc__, parents=(pkgcore_opts,))
+ suppress=True, description=__doc__, parents=(pkgcore_opts,)
+)
subparsers = argparser.add_subparsers(description="configuration related subcommands")
classes = subparsers.add_parser(
- "classes", parents=shared_options,
- description="list all classes referenced by the config")
+ "classes",
+ parents=shared_options,
+ description="list all classes referenced by the config",
+)
+
+
@classes.bind_main_func
def classes_main(options, out, err):
"""List all classes referenced by the config."""
@@ -117,43 +144,55 @@ def classes_main(options, out, err):
describe_class = subparsers.add_parser(
- "describe_class", parents=shared_options,
- description="describe the arguments a class needs, how to use it in a config")
+ "describe_class",
+ parents=shared_options,
+ description="describe the arguments a class needs, how to use it in a config",
+)
describe_class.add_argument(
- "target_class", action='store',
+ "target_class",
+ action="store",
type=partial(commandline.python_namespace_type, attribute=True),
- help="The class to inspect and output details about")
+ help="The class to inspect and output details about",
+)
+
+
@describe_class.bind_main_func
def describe_class_main(options, out, err):
"""Describe the arguments a class needs."""
try:
type_obj = basics.ConfigType(options.target_class)
except errors.TypeDefinitionError:
- err.write('Not a valid type!')
+ err.write("Not a valid type!")
return 1
write_type(out, type_obj)
+
def write_type(out, type_obj):
- out.write(f'typename is {type_obj.name}')
+ out.write(f"typename is {type_obj.name}")
if type_obj.doc:
- for line in type_obj.doc.split('\n'):
+ for line in type_obj.doc.split("\n"):
out.write(line.strip(), wrap=True)
if type_obj.allow_unknowns:
- out.write('values not listed are handled as strings')
+ out.write("values not listed are handled as strings")
out.write()
for name, typename in sorted(type_obj.types.items()):
if typename.startswith("lazy_ref:"):
- typename = typename[len("lazy_ref:"):]
+ typename = typename[len("lazy_ref:") :]
elif typename.startswith("lazy_refs:"):
- typename = typename[len("lazy_refs:"):]
- out.write(f'{name}: {typename}', autoline=False)
+ typename = typename[len("lazy_refs:") :]
+ out.write(f"{name}: {typename}", autoline=False)
if name in type_obj.required:
- out.write(' (required)', autoline=False)
+ out.write(" (required)", autoline=False)
out.write()
+
uncollapsable = subparsers.add_parser(
- "uncollapsable", parents=shared_options,
- description="show configuration objects that could not be collapsed/instantiated")
+ "uncollapsable",
+ parents=shared_options,
+ description="show configuration objects that could not be collapsed/instantiated",
+)
+
+
@uncollapsable.bind_main_func
def uncollapsable_main(options, out, err):
"""Show things that could not be collapsed."""
@@ -173,18 +212,26 @@ def uncollapsable_main(options, out, err):
dump = subparsers.add_parser(
- "dump", parents=shared_options,
- description='dump the entire configuration',
+ "dump",
+ parents=shared_options,
+ description="dump the entire configuration",
docs="""
Dump the entire configuration in a format similar to the ini-like
default format; however, do not rely on this to always write a loadable
config. There may be quoting issues. With a typename argument only that
type is dumped.
- """)
+ """,
+)
dump.add_argument(
- "typename", nargs="?", action="store", default=None,
+ "typename",
+ nargs="?",
+ action="store",
+ default=None,
help="if specified, limit output to just config directives of this "
- "type (defaults to showing all types)")
+ "type (defaults to showing all types)",
+)
+
+
@dump.bind_main_func
def dump_main(options, out, err):
"""Dump the entire configuration."""
@@ -202,9 +249,9 @@ def dump_main(options, out, err):
continue
except errors.ConfigurationError:
continue
- out.write(f'{name!r} {{')
+ out.write(f"{name!r} {{")
dump_section(section, out)
- out.write('}')
+ out.write("}")
def all_configurables():
@@ -213,42 +260,52 @@ def all_configurables():
ignore_all_import_failures = True
def _default_module_blacklister(self, target):
- if target.startswith(("pkgcore.test.", 'pkgcore.plugins.')) \
- or 'pkgcore.test' == target:
+ if (
+ target.startswith(("pkgcore.test.", "pkgcore.plugins."))
+ or "pkgcore.test" == target
+ ):
return True
return super()._default_module_blacklister(target)
return (
obj
- for module in walker().walk_namespace('pkgcore')
+ for module in walker().walk_namespace("pkgcore")
for name in dir(module)
- if getattr(obj := getattr(module, name), 'pkgcore_config_type', None) is not None
- if not getattr(obj, 'disabled', False)
- if not getattr(obj, '_plugin_disabled_check', lambda: False)()
+ if getattr(obj := getattr(module, name), "pkgcore_config_type", None)
+ is not None
+ if not getattr(obj, "disabled", False)
+ if not getattr(obj, "_plugin_disabled_check", lambda: False)()
)
configurables = subparsers.add_parser(
- "configurables", parents=shared_options,
- description='list registered configurables (may not be complete)')
+ "configurables",
+ parents=shared_options,
+ description="list registered configurables (may not be complete)",
+)
configurables.add_argument(
- "typename", nargs='?', default=None, action='store',
+ "typename",
+ nargs="?",
+ default=None,
+ action="store",
help="if specified, only output configurables of that type; else output "
- "all configurables")
+ "all configurables",
+)
+
+
@configurables.bind_main_func
def configurables_main(options, out, err):
"""List registered configurables."""
# try and sort this beast.
def key_func(obj):
- return "%s.%s" % (getattr(obj, '__module__', ''),
- getattr(obj, '__name__', ''))
+ return "%s.%s" % (getattr(obj, "__module__", ""), getattr(obj, "__name__", ""))
for configurable in sorted(all_configurables(), key=key_func):
type_obj = basics.ConfigType(configurable)
if options.typename is not None and type_obj.name != options.typename:
continue
- out.write(out.bold, f'{configurable.__module__}.{configurable.__name__}')
+ out.write(out.bold, f"{configurable.__module__}.{configurable.__name__}")
write_type(out, type_obj)
out.write()
out.write()
@@ -257,67 +314,71 @@ def configurables_main(options, out, err):
def _dump_uncollapsed_section(config, out, err, section):
"""Write a single section."""
if isinstance(section, str):
- out.write(f'named section {section!r}')
+ out.write(f"named section {section!r}")
return
for key in sorted(section.keys()):
- kind, value = section.render_value(config, key, 'repr')
- out.write(f'# type: {kind}')
- if kind == 'list':
- for name, val in zip((
- key + '.prepend', key, key + '.append'), value):
+ kind, value = section.render_value(config, key, "repr")
+ out.write(f"# type: {kind}")
+ if kind == "list":
+ for name, val in zip((key + ".prepend", key, key + ".append"), value):
if val:
- out.write(
- repr(name), ' = ', ' '.join(repr(v) for v in val))
+ out.write(repr(name), " = ", " ".join(repr(v) for v in val))
continue
- if kind in ('refs', 'str'):
- for name, val in zip((
- key + '.prepend', key, key + '.append'), value):
+ if kind in ("refs", "str"):
+ for name, val in zip((key + ".prepend", key, key + ".append"), value):
if not val:
continue
- out.write(repr(name), ' = ', autoline=False)
- if kind == 'str':
+ out.write(repr(name), " = ", autoline=False)
+ if kind == "str":
out.write(repr(val))
else:
out.write()
- out.first_prefix.append(' ')
+ out.first_prefix.append(" ")
try:
for subnr, subsection in enumerate(val):
- subname = f'nested section {subnr + 1}'
+ subname = f"nested section {subnr + 1}"
out.write(subname)
- out.write('=' * len(subname))
+ out.write("=" * len(subname))
_dump_uncollapsed_section(config, out, err, subsection)
out.write()
finally:
out.first_prefix.pop()
continue
- out.write(f'{key!r} = ', autoline=False)
- if kind == 'callable':
+ out.write(f"{key!r} = ", autoline=False)
+ if kind == "callable":
out.write(value.__module__, value.__name__)
- elif kind == 'bool':
+ elif kind == "bool":
out.write(str(value))
- elif kind == 'ref':
- out.first_prefix.append(' ')
+ elif kind == "ref":
+ out.first_prefix.append(" ")
try:
out.write()
_dump_uncollapsed_section(config, out, err, value)
finally:
out.first_prefix.pop()
else:
- err.error(f'unsupported type {kind!r}')
+ err.error(f"unsupported type {kind!r}")
+
dump_uncollapsed = subparsers.add_parser(
- "dump-uncollapsed", parents=shared_options,
+ "dump-uncollapsed",
+ parents=shared_options,
description="dump the configuration in a raw, uncollapsed form",
docs="""
Dump the configuration in a raw, uncollapsed form not directly usable
as a configuration file, mainly used for inspection.
- """)
+ """,
+)
+
+
@dump_uncollapsed.bind_main_func
def dump_uncollapsed_main(options, out, err):
"""dump the configuration in a raw, uncollapsed form.
Not directly usable as a configuration file, mainly used for inspection
"""
- out.write(textwrap.dedent('''\
+ out.write(
+ textwrap.dedent(
+ """\
# Warning:
# Do not copy this output to a configuration file directly,
# because the types you see here are only guesses.
@@ -325,25 +386,35 @@ def dump_uncollapsed_main(options, out, err):
# show up as "string" here and may need to be converted
# (for example from space-separated to comma-separated)
# to work in a config file with a different format.
- '''))
+ """
+ )
+ )
for i, source in enumerate(options.config.configs):
- s = f'Source {i + 1}'
- out.write(out.bold, '*' * len(s))
+ s = f"Source {i + 1}"
+ out.write(out.bold, "*" * len(s))
out.write(out.bold, s)
- out.write(out.bold, '*' * len(s))
+ out.write(out.bold, "*" * len(s))
out.write()
for name, section in sorted(source.items()):
- out.write(f'{name}')
- out.write('=' * len(name))
+ out.write(f"{name}")
+ out.write("=" * len(name))
_dump_uncollapsed_section(options.config, out, err, section)
out.write()
+
package = subparsers.add_parser(
- "package", parents=shared_options_domain,
- description="invoke a packages custom configuration scripts")
+ "package",
+ parents=shared_options_domain,
+ description="invoke a packages custom configuration scripts",
+)
commandline.make_query(
- package, nargs='+', dest='query',
- help="restrictions/atoms; matching installed packages will be configured")
+ package,
+ nargs="+",
+ dest="query",
+ help="restrictions/atoms; matching installed packages will be configured",
+)
+
+
@package.bind_main_func
def package_func(options, out, err):
matched = True
@@ -362,8 +433,10 @@ def package_func(options, out, err):
world = subparsers.add_parser(
- "world", parents=shared_options_domain,
- description="inspect and modify the world file")
+ "world",
+ parents=shared_options_domain,
+ description="inspect and modify the world file",
+)
world_modes = world.add_argument_group(
"command modes",
description="""
@@ -372,19 +445,34 @@ world_modes = world.add_argument_group(
can have `--add x11-wm/fluxbox --remove gnome-base/gnome -l` to add
fluxbox, remove gnome, and list the world file contents all in one
call.
- """)
+ """,
+)
world_modes.add_argument(
- '-l', '--list', action='store_true',
- help="List the current world file contents for this domain.")
+ "-l",
+ "--list",
+ action="store_true",
+ help="List the current world file contents for this domain.",
+)
world_modes.add_argument(
- '-r', '--remove', action='append', type=atom.atom,
- help="Remove an entry from the world file. Can be specified multiple times.")
+ "-r",
+ "--remove",
+ action="append",
+ type=atom.atom,
+ help="Remove an entry from the world file. Can be specified multiple times.",
+)
world_modes.add_argument(
- '-a', '--add', action='append', type=atom.atom,
- help="Add an entry to the world file. Can be specified multiple times.")
+ "-a",
+ "--add",
+ action="append",
+ type=atom.atom,
+ help="Add an entry to the world file. Can be specified multiple times.",
+)
world.set_defaults(
- world=commandline.StoreConfigObject.lazy_load_object('pkgset', 'world', 99))
+ world=commandline.StoreConfigObject.lazy_load_object("pkgset", "world", 99)
+)
+
+
@world.bind_main_func
def world_func(options, out, err):
world_file = options.world
diff --git a/src/pkgcore/scripts/pebuild.py b/src/pkgcore/scripts/pebuild.py
index bd0c79553..0a3ce64be 100644
--- a/src/pkgcore/scripts/pebuild.py
+++ b/src/pkgcore/scripts/pebuild.py
@@ -11,15 +11,21 @@ from ..util.commandline import ArgumentParser, StoreTarget
argparser = ArgumentParser(description=__doc__, script=(__file__, __name__))
argparser.add_argument(
- 'target', action=StoreTarget,
- allow_ebuild_paths=True, allow_external_repos=True,
- help="atom or ebuild matching a pkg to execute phases from")
-argparser.add_argument('phase', nargs='+', help="phases to run")
+ "target",
+ action=StoreTarget,
+ allow_ebuild_paths=True,
+ allow_external_repos=True,
+ help="atom or ebuild matching a pkg to execute phases from",
+)
+argparser.add_argument("phase", nargs="+", help="phases to run")
phase_opts = argparser.add_argument_group("phase options")
phase_opts.add_argument(
- "--no-auto", action='store_true', default=False,
+ "--no-auto",
+ action="store_true",
+ default=False,
help="run just the specified phases; "
- "it's up to the invoker to get the order right")
+ "it's up to the invoker to get the order right",
+)
@argparser.bind_final_check
@@ -36,7 +42,7 @@ def main(options, out, err):
pkgs = options.repo.match(restriction, pkg_filter=None)
except MetadataException as e:
error = e.msg(verbosity=options.verbosity)
- argparser.error(f'{e.pkg.cpvstr}::{e.pkg.repo.repo_id}: {error}')
+ argparser.error(f"{e.pkg.cpvstr}::{e.pkg.repo.repo_id}: {error}")
if not pkgs:
argparser.error(f"no matches: {token!r}")
@@ -46,19 +52,19 @@ def main(options, out, err):
argparser.err.write(f"got multiple matches for {token!r}:")
if len(set((p.slot, p.repo) for p in pkgs)) != 1:
for p in pkgs:
- repo_id = getattr(p.repo, 'repo_id', 'unknown')
- argparser.err.write(f"{p.cpvstr}:{p.slot}::{repo_id}", prefix=' ')
+ repo_id = getattr(p.repo, "repo_id", "unknown")
+ argparser.err.write(f"{p.cpvstr}:{p.slot}::{repo_id}", prefix=" ")
argparser.err.write()
argparser.error("please refine your restriction to one match")
- repo_id = getattr(pkg.repo, 'repo_id', 'unknown')
- argparser.err.write(f"choosing {pkg.cpvstr}:{pkg.slot}::{repo_id}", prefix=' ')
+ repo_id = getattr(pkg.repo, "repo_id", "unknown")
+ argparser.err.write(f"choosing {pkg.cpvstr}:{pkg.slot}::{repo_id}", prefix=" ")
sys.stderr.flush()
kwds = {}
phase_obs = observer.phase_observer(observer.formatter_output(out), options.debug)
- phases = [x for x in options.phase if x != 'clean']
- clean = (len(phases) != len(options.phase))
+ phases = [x for x in options.phase if x != "clean"]
+ clean = len(phases) != len(options.phase)
if options.no_auto:
kwds["ignore_deps"] = True
@@ -66,14 +72,15 @@ def main(options, out, err):
phases.insert(0, "fetch")
# forcibly run test phase if selected
- force_test = 'test' in phases
- if force_test and 'test' in pkg.iuse:
- pkg.use.add('test')
+ force_test = "test" in phases
+ if force_test and "test" in pkg.iuse:
+ pkg.use.add("test")
# by default turn off startup cleans; we clean by ourselves if
# told to do so via an arg
build = domain.build_pkg(
- pkg, failed=True, clean=False, observer=phase_obs, force_test=force_test)
+ pkg, failed=True, clean=False, observer=phase_obs, force_test=force_test
+ )
if clean:
build.cleanup(force=True)
build._reload_state()
@@ -81,12 +88,14 @@ def main(options, out, err):
phase_funcs = [(p, getattr(build, p, None)) for p in phases]
unknown_phases = [p for p, func in phase_funcs if func is None]
if unknown_phases:
- argparser.error("unknown phase%s: %s" % (
- pluralism(unknown_phases), ', '.join(map(repr, unknown_phases))))
+ argparser.error(
+ "unknown phase%s: %s"
+ % (pluralism(unknown_phases), ", ".join(map(repr, unknown_phases)))
+ )
try:
for phase, func in phase_funcs:
- out.write(f'executing phase {phase}')
+ out.write(f"executing phase {phase}")
func(**kwds)
except OperationError as e:
raise ExitException(f"caught exception executing phase {phase}: {e}") from e
diff --git a/src/pkgcore/scripts/pinspect.py b/src/pkgcore/scripts/pinspect.py
index fbc40c196..49ab89e6b 100644
--- a/src/pkgcore/scripts/pinspect.py
+++ b/src/pkgcore/scripts/pinspect.py
@@ -31,27 +31,38 @@ from ..util import commandline
pkgcore_opts = commandline.ArgumentParser(domain=False, script=(__file__, __name__))
argparser = commandline.ArgumentParser(
- suppress=True, description=__doc__, parents=(pkgcore_opts,))
+ suppress=True, description=__doc__, parents=(pkgcore_opts,)
+)
subparsers = argparser.add_subparsers(description="report applets")
-pkgsets = subparsers.add_parser(
- "pkgsets", description="pkgset related introspection")
+pkgsets = subparsers.add_parser("pkgsets", description="pkgset related introspection")
mux = pkgsets.add_mutually_exclusive_group()
mux.add_argument(
- "--all", action='store_true', default=False,
- help="display info on all pkgsets")
+ "--all", action="store_true", default=False, help="display info on all pkgsets"
+)
mux.add_argument(
- "pkgsets", nargs="*", metavar="pkgset", default=[],
+ "pkgsets",
+ nargs="*",
+ metavar="pkgset",
+ default=[],
action=commandline.StoreConfigObject,
- config_type='pkgset', store_name=True,
- help="pkgset to inspect")
+ config_type="pkgset",
+ store_name=True,
+ help="pkgset to inspect",
+)
del mux
+
+
@pkgsets.bind_main_func
def pkgsets_run(opts, out, err):
if not opts.pkgsets:
if not opts.all:
- out.write(out.bold, 'available pkgset(s): ', out.reset,
- ', '.join(repr(x) for x in sorted(opts.config.pkgset)))
+ out.write(
+ out.bold,
+ "available pkgset(s): ",
+ out.reset,
+ ", ".join(repr(x) for x in sorted(opts.config.pkgset)),
+ )
return 0
else:
opts.pkgsets = sorted(opts.config.pkgset)
@@ -59,16 +70,17 @@ def pkgsets_run(opts, out, err):
for position, (set_name, pkgset) in enumerate(opts.pkgsets):
if position:
out.write()
- out.write(out.bold, 'pkgset ', repr(set_name), out.reset, ':')
- out.first_prefix.append(' ')
+ out.write(out.bold, "pkgset ", repr(set_name), out.reset, ":")
+ out.first_prefix.append(" ")
for restrict in sorted(pkgset):
out.write(restrict)
out.first_prefix.pop()
return 0
-def print_simple_histogram(data, out, format, total, sort_by_key=False,
- first=None, last=None):
+def print_simple_histogram(
+ data, out, format, total, sort_by_key=False, first=None, last=None
+):
# do the division up front...
total = float(total) / 100
@@ -83,9 +95,10 @@ def print_simple_histogram(data, out, format, total, sort_by_key=False,
data = list(data)[-last:]
for key, val in data:
- out.write(format %
- {'key': str(key), 'val': val,
- 'percent': "%2.2f%%" % (val/total,)})
+ out.write(
+ format
+ % {"key": str(key), "val": val, "percent": "%2.2f%%" % (val / total,)}
+ )
class histo_data(arghparse.ArgparseCommand):
@@ -96,41 +109,64 @@ class histo_data(arghparse.ArgparseCommand):
def bind_to_parser(self, parser):
mux = parser.add_mutually_exclusive_group()
mux.add_argument(
- "--no-final-summary", action='store_true', default=False,
- help="disable outputting a summary of data across all repos")
+ "--no-final-summary",
+ action="store_true",
+ default=False,
+ help="disable outputting a summary of data across all repos",
+ )
parser.set_defaults(repo_summary=bool(self.per_repo_summary))
if self.per_repo_summary:
mux.add_argument(
- "--no-repo-summary", dest='repo_summary',
- action='store_false',
- help="disable outputting repo summaries")
+ "--no-repo-summary",
+ dest="repo_summary",
+ action="store_false",
+ help="disable outputting repo summaries",
+ )
parser.set_defaults(no_detail=False)
if self.allow_no_detail:
mux.add_argument(
- "--no-detail", action='store_true', default=False,
- help="disable outputting a detail view of all repos")
+ "--no-detail",
+ action="store_true",
+ default=False,
+ help="disable outputting a detail view of all repos",
+ )
parser.add_argument(
- "--sort-by-name", action='store_true', default=False,
- help="sort output by name, rather then by frequency")
+ "--sort-by-name",
+ action="store_true",
+ default=False,
+ help="sort output by name, rather then by frequency",
+ )
mux = parser.add_mutually_exclusive_group()
mux.add_argument(
- "--first", action="store", type=int, default=0,
- help="show only the first N detail items")
+ "--first",
+ action="store",
+ type=int,
+ default=0,
+ help="show only the first N detail items",
+ )
mux.add_argument(
- "--last", action="store", type=int, default=0,
- help="show only the last N detail items")
+ "--last",
+ action="store",
+ type=int,
+ default=0,
+ help="show only the last N detail items",
+ )
parser.add_argument(
- "repos", metavar='repo', nargs='*',
- action=commandline.StoreRepoObject, store_name=True,
+ "repos",
+ metavar="repo",
+ nargs="*",
+ action=commandline.StoreRepoObject,
+ store_name=True,
default=commandline.CONFIG_ALL_DEFAULT,
- help="repo(s) to inspect")
+ help="repo(s) to inspect",
+ )
arghparse.ArgparseCommand.bind_to_parser(self, parser)
@@ -151,8 +187,7 @@ class histo_data(arghparse.ArgparseCommand):
if position:
out.write()
position += 1
- out.write(out.bold, "repository", out.reset, ' ',
- repr(repo_name), ':')
+ out.write(out.bold, "repository", out.reset, " ", repr(repo_name), ":")
data, repo_total = self.get_data(repo, opts)
detail_data = self.transform_data_to_detail(data)
if not opts.no_detail:
@@ -161,9 +196,14 @@ class histo_data(arghparse.ArgparseCommand):
out.write("no pkgs found")
else:
print_simple_histogram(
- detail_data, out, self.per_repo_format,
- repo_total, sort_by_key=opts.sort_by_name,
- first=opts.first, last=opts.last)
+ detail_data,
+ out,
+ self.per_repo_format,
+ repo_total,
+ sort_by_key=opts.sort_by_name,
+ first=opts.first,
+ last=opts.last,
+ )
out.first_prefix.pop()
for key, val in detail_data.items():
global_stats.setdefault(key, 0)
@@ -173,26 +213,33 @@ class histo_data(arghparse.ArgparseCommand):
if not opts.repo_summary:
continue
out.write(
- out.bold, 'summary', out.reset, ': ',
- self.per_repo_summary %
- self.transform_data_to_summary(data))
+ out.bold,
+ "summary",
+ out.reset,
+ ": ",
+ self.per_repo_summary % self.transform_data_to_summary(data),
+ )
if position > 1 and not opts.no_final_summary:
out.write()
- out.write(out.bold, 'summary', out.reset, ':')
- out.first_prefix.append(' ')
+ out.write(out.bold, "summary", out.reset, ":")
+ out.first_prefix.append(" ")
print_simple_histogram(
- global_stats, out, self.summary_format,
- total_pkgs, sort_by_key=opts.sort_by_name)
+ global_stats,
+ out,
+ self.summary_format,
+ total_pkgs,
+ sort_by_key=opts.sort_by_name,
+ )
out.first_prefix.pop()
return 0
class eapi_usage_kls(histo_data):
- per_repo_format = ("eapi: %(key)r %(val)s pkgs found, %(percent)s of the repo")
+ per_repo_format = "eapi: %(key)r %(val)s pkgs found, %(percent)s of the repo"
- summary_format = ("eapi: %(key)r %(val)s pkgs found, %(percent)s of all repos")
+ summary_format = "eapi: %(key)r %(val)s pkgs found, %(percent)s of all repos"
def get_data(self, repo, options):
eapis = {}
@@ -202,8 +249,10 @@ class eapi_usage_kls(histo_data):
eapis[str(pkg.eapi)] += 1
return eapis, pos + 1
+
eapi_usage = subparsers.add_parser(
- "eapi_usage", description="report of eapi usage for targeted repos")
+ "eapi_usage", description="report of eapi usage for targeted repos"
+)
eapi_usage.bind_class(eapi_usage_kls())
@@ -222,8 +271,10 @@ class license_usage_kls(histo_data):
data[license] += 1
return data, pos + 1
+
license_usage = subparsers.add_parser(
- "license_usage", description="report of license usage for targeted repos")
+ "license_usage", description="report of license usage for targeted repos"
+)
license_usage.bind_class(license_usage_kls())
@@ -234,14 +285,16 @@ class eclass_usage_kls(histo_data):
summary_format = "eclass: %(key)r %(val)s pkgs found, %(percent)s of all repos"
def get_data(self, repo, options):
- pos, data = 0, defaultdict(lambda:0)
+ pos, data = 0, defaultdict(lambda: 0)
for pos, pkg in enumerate(repo):
- for eclass in getattr(pkg, 'inherited', ()):
+ for eclass in getattr(pkg, "inherited", ()):
data[eclass] += 1
return data, pos + 1
+
eclass_usage = subparsers.add_parser(
- "eclass_usage", description="report of eclass usage for targeted repos")
+ "eclass_usage", description="report of eclass usage for targeted repos"
+)
eclass_usage.bind_class(eclass_usage_kls())
@@ -255,21 +308,27 @@ class mirror_usage_kls(histo_data):
data = {}
for pos, pkg in enumerate(repo):
for fetchable in iflatten_instance(pkg.fetchables, fetch.fetchable):
- for mirror in fetchable.uri.visit_mirrors(treat_default_as_mirror=False):
+ for mirror in fetchable.uri.visit_mirrors(
+ treat_default_as_mirror=False
+ ):
if isinstance(mirror, tuple):
mirror = mirror[0]
data.setdefault(mirror.mirror_name, 0)
data[mirror.mirror_name] += 1
return data, pos + 1
+
mirror_usage = subparsers.add_parser(
- "mirror_usage", description="report of SRC_URI mirror usage for targeted repos")
+ "mirror_usage", description="report of SRC_URI mirror usage for targeted repos"
+)
mirror_usage.bind_class(mirror_usage_kls())
class distfiles_usage_kls(histo_data):
- per_repo_format = "package: %(key)r %(val)s bytes, referencing %(percent)s of the unique total"
+ per_repo_format = (
+ "package: %(key)r %(val)s bytes, referencing %(percent)s of the unique total"
+ )
per_repo_summary = "unique total %(total)i bytes, sharing %(shared)i bytes"
@@ -280,11 +339,17 @@ class distfiles_usage_kls(histo_data):
def bind_to_parser(self, parser):
histo_data.bind_to_parser(self, parser)
parser.add_argument(
- "--include-nonmirrored", action='store_true', default=False,
- help="if set, nonmirrored distfiles will be included in the total")
+ "--include-nonmirrored",
+ action="store_true",
+ default=False,
+ help="if set, nonmirrored distfiles will be included in the total",
+ )
parser.add_argument(
- "--include-restricted", action='store_true', default=False,
- help="if set, fetch restricted distfiles will be included in the total")
+ "--include-restricted",
+ action="store_true",
+ default=False,
+ help="if set, fetch restricted distfiles will be included in the total",
+ )
def get_data(self, repo, options):
owners = defaultdict(set)
@@ -292,9 +357,9 @@ class distfiles_usage_kls(histo_data):
items = {}
for key, subiter in groupby(iterable, attrgetter("key")):
for pkg in subiter:
- if not options.include_restricted and 'fetch' in pkg.restrict:
+ if not options.include_restricted and "fetch" in pkg.restrict:
continue
- if not options.include_nonmirrored and 'mirror' in pkg.restrict:
+ if not options.include_nonmirrored and "mirror" in pkg.restrict:
continue
for fetchable in iflatten_instance(pkg.fetchables, fetch.fetchable):
owners[fetchable.filename].add(key)
@@ -317,26 +382,32 @@ class distfiles_usage_kls(histo_data):
distfiles_usage = subparsers.add_parser(
"distfiles_usage",
- description="report detailing distfiles space usage for targeted repos")
+ description="report detailing distfiles space usage for targeted repos",
+)
distfiles_usage.bind_class(distfiles_usage_kls())
query = subparsers.add_parser(
- "query",
- description="auxiliary access to ebuild/repo info via portageq akin api")
-_portageq.bind_parser(query, name='query')
+ "query", description="auxiliary access to ebuild/repo info via portageq akin api"
+)
+_portageq.bind_parser(query, name="query")
portageq = subparsers.add_parser(
- "portageq", description="portageq compatible interface to query commands")
+ "portageq", description="portageq compatible interface to query commands"
+)
_portageq.bind_parser(portageq, compat=True)
-profile = subparsers.add_parser(
- "profile", description="profile related querying")
+profile = subparsers.add_parser("profile", description="profile related querying")
# TODO: restrict to ebuild repos
-profile_opts = profile.add_argument_group('subcommand options')
+profile_opts = profile.add_argument_group("subcommand options")
profile_opts.add_argument(
- '-r', '--repo', metavar='REPO', help='target repo',
- action=commandline.StoreRepoObject, repo_type='config')
-inspect_profile.bind_parser(profile, 'profile')
+ "-r",
+ "--repo",
+ metavar="REPO",
+ help="target repo",
+ action=commandline.StoreRepoObject,
+ repo_type="config",
+)
+inspect_profile.bind_parser(profile, "profile")
def _bad_digest(pkg):
@@ -349,10 +420,18 @@ def _bad_digest(pkg):
digests = subparsers.add_parser(
- "digests", domain=True, description="identify what packages are missing digest info")
+ "digests", domain=True, description="identify what packages are missing digest info"
+)
digests.add_argument(
- 'repos', nargs='*', help="repo to inspect",
- action=commandline.StoreRepoObject, allow_external_repos=True, store_name=True)
+ "repos",
+ nargs="*",
+ help="repo to inspect",
+ action=commandline.StoreRepoObject,
+ allow_external_repos=True,
+ store_name=True,
+)
+
+
@digests.bind_main_func
def digest_manifest(options, out, err):
for name, repo in options.repos:
@@ -370,7 +449,7 @@ def digest_manifest(options, out, err):
if count:
if broken:
- out.write('Packages with broken digests:')
+ out.write("Packages with broken digests:")
out.first_prefix.append(" ")
out.later_prefix.append(" ")
for pkg in sorted(broken):
@@ -383,7 +462,7 @@ def digest_manifest(options, out, err):
"in the repo have bad checksum data"
)
else:
- out.write('repo has no broken digests')
+ out.write("repo has no broken digests")
else:
out.write("repo has no packages")
diff --git a/src/pkgcore/scripts/pmaint.py b/src/pkgcore/scripts/pmaint.py
index a0b4a9a11..bc930cc0a 100644
--- a/src/pkgcore/scripts/pmaint.py
+++ b/src/pkgcore/scripts/pmaint.py
@@ -29,25 +29,58 @@ from ..util import commandline
pkgcore_opts = commandline.ArgumentParser(domain=False, script=(__file__, __name__))
argparser = commandline.ArgumentParser(
- suppress=True, description=__doc__, parents=(pkgcore_opts,))
+ suppress=True, description=__doc__, parents=(pkgcore_opts,)
+)
subparsers = argparser.add_subparsers(description="general system maintenance")
-shared_options = (commandline.ArgumentParser(
- config=False, color=False, debug=False, quiet=False, verbose=False,
- version=False, domain=False, add_help=False),)
-shared_options_domain = (commandline.ArgumentParser(
- config=False, color=False, debug=False, quiet=False, verbose=False,
- version=False, domain=True, add_help=False),)
+shared_options = (
+ commandline.ArgumentParser(
+ config=False,
+ color=False,
+ debug=False,
+ quiet=False,
+ verbose=False,
+ version=False,
+ domain=False,
+ add_help=False,
+ ),
+)
+shared_options_domain = (
+ commandline.ArgumentParser(
+ config=False,
+ color=False,
+ debug=False,
+ quiet=False,
+ verbose=False,
+ version=False,
+ domain=True,
+ add_help=False,
+ ),
+)
sync = subparsers.add_parser(
- "sync", parents=shared_options,
- description="synchronize a local repository with its defined remote")
+ "sync",
+ parents=shared_options,
+ description="synchronize a local repository with its defined remote",
+)
sync.add_argument(
- 'repos', metavar='repo', nargs='*', help="repo(s) to sync",
- action=commandline.StoreRepoObject, store_name=True, repo_type='config')
+ "repos",
+ metavar="repo",
+ nargs="*",
+ help="repo(s) to sync",
+ action=commandline.StoreRepoObject,
+ store_name=True,
+ repo_type="config",
+)
sync.add_argument(
- '-f', '--force', action='store_true', default=False,
- help="force syncing to occur regardless of staleness checks")
+ "-f",
+ "--force",
+ action="store_true",
+ default=False,
+ help="force syncing to occur regardless of staleness checks",
+)
+
+
@sync.bind_main_func
def sync_main(options, out, err):
"""Update local repos to match their remotes."""
@@ -62,19 +95,18 @@ def sync_main(options, out, err):
continue
out.write(f"*** syncing {repo_name}")
ret = False
- err_msg = ''
+ err_msg = ""
# repo operations don't yet take an observer, thus flush
# output to keep lines consistent.
out.flush()
err.flush()
try:
- ret = repo.operations.sync(
- force=options.force, verbosity=options.verbosity)
+ ret = repo.operations.sync(force=options.force, verbosity=options.verbosity)
except OperationError as e:
- exc = getattr(e, '__cause__', e)
+ exc = getattr(e, "__cause__", e)
if not isinstance(exc, PkgcoreUserException):
raise
- err_msg = f': {exc}'
+ err_msg = f": {exc}"
if not ret:
out.write(f"!!! failed syncing {repo_name}{err_msg}")
failed.append(repo_name)
@@ -87,8 +119,8 @@ def sync_main(options, out, err):
total = len(succeeded) + len(failed)
if total > 1:
results = []
- succeeded = ', '.join(sorted(succeeded))
- failed = ', '.join(sorted(failed))
+ succeeded = ", ".join(sorted(succeeded))
+ failed = ", ".join(sorted(failed))
if succeeded:
results.append(f"*** synced: {succeeded}")
if failed:
@@ -100,25 +132,43 @@ def sync_main(options, out, err):
# TODO: restrict to required repo types
copy = subparsers.add_parser(
- "copy", parents=shared_options_domain,
+ "copy",
+ parents=shared_options_domain,
description="copy binpkgs between repos; primarily useful for "
- "quickpkging a livefs pkg")
+ "quickpkging a livefs pkg",
+)
copy.add_argument(
- 'target_repo', action=commandline.StoreRepoObject, repo_type='binary-raw',
- writable=True, help="repository to add packages to")
+ "target_repo",
+ action=commandline.StoreRepoObject,
+ repo_type="binary-raw",
+ writable=True,
+ help="repository to add packages to",
+)
commandline.make_query(
- copy, nargs='+', dest='query',
- help="packages matching any of these restrictions will be selected "
- "for copying")
+ copy,
+ nargs="+",
+ dest="query",
+ help="packages matching any of these restrictions will be selected " "for copying",
+)
copy_opts = copy.add_argument_group("subcommand options")
copy_opts.add_argument(
- '-s', '--source-repo', default=None, repo_type='installed',
+ "-s",
+ "--source-repo",
+ default=None,
+ repo_type="installed",
action=commandline.StoreRepoObject,
help="copy strictly from the supplied repository; else it copies from "
- "wherever a match is found")
+ "wherever a match is found",
+)
copy_opts.add_argument(
- '-i', '--ignore-existing', default=False, action='store_true',
- help="if a matching pkg already exists in the target, don't update it")
+ "-i",
+ "--ignore-existing",
+ default=False,
+ action="store_true",
+ help="if a matching pkg already exists in the target, don't update it",
+)
+
+
@copy.bind_main_func
def copy_main(options, out, err):
"""Copy pkgs between repos."""
@@ -134,12 +184,12 @@ def copy_main(options, out, err):
out.write(f"skipping existing pkg: {pkg.cpvstr}")
continue
# TODO: remove this once we limit src repos to non-virtual (pkg.provided) repos
- if not getattr(pkg, 'package_is_real', True):
+ if not getattr(pkg, "package_is_real", True):
out.write(f"skipping virtual pkg: {pkg.cpvstr}")
continue
out.write(f"copying {pkg}... ")
- if getattr(getattr(pkg, 'repo', None), 'livefs', False):
+ if getattr(getattr(pkg, "repo", None), "livefs", False):
out.write("forcing regen of contents due to src being livefs..")
new_contents = contents.contentsSet(mutable=True)
for fsobj in pkg.contents:
@@ -147,17 +197,19 @@ def copy_main(options, out, err):
new_contents.add(livefs.gen_obj(fsobj.location))
except FileNotFoundError:
err.write(
- f"warning: dropping fs obj {fsobj!r} since it doesn't exist")
+ f"warning: dropping fs obj {fsobj!r} since it doesn't exist"
+ )
except OSError as oe:
err.write(
f"failed accessing fs obj {fsobj!r}; {oe}\n"
- "aborting this copy")
+ "aborting this copy"
+ )
failures = True
new_contents = None
break
if new_contents is None:
continue
- pkg = mutated.MutatedPkg(pkg, {'contents': new_contents})
+ pkg = mutated.MutatedPkg(pkg, {"contents": new_contents})
target_repo.operations.install_or_replace(pkg).finish()
out.write("completed\n")
@@ -183,26 +235,31 @@ def update_use_local_desc(repo, observer):
f = None
def _raise_xml_error(exc):
- observer.error(f'{cat}/{pkg}: failed parsing metadata.xml: {str(exc)}')
+ observer.error(f"{cat}/{pkg}: failed parsing metadata.xml: {str(exc)}")
nonlocal ret
ret = 1
try:
f = AtomicWriteFile(use_local_desc)
- f.write(textwrap.dedent('''\
+ f.write(
+ textwrap.dedent(
+ """\
# This file is deprecated as per GLEP 56 in favor of metadata.xml.
# Please add your descriptions to your package's metadata.xml ONLY.
- # * generated automatically using pmaint *\n\n'''))
- with patch('pkgcore.log.logger.error', _raise_xml_error):
+ # * generated automatically using pmaint *\n\n"""
+ )
+ )
+ with patch("pkgcore.log.logger.error", _raise_xml_error):
for cat, pkgs in sorted(repo.packages.items()):
for pkg in sorted(pkgs):
metadata = repo._get_metadata_xml(cat, pkg)
for flag, desc in sorted(metadata.local_use.items()):
- f.write(f'{cat}/{pkg}:{flag} - {desc}\n')
+ f.write(f"{cat}/{pkg}:{flag} - {desc}\n")
f.close()
except IOError as e:
observer.error(
- f"Unable to update use.local.desc file {use_local_desc!r}: {e.strerror}")
+ f"Unable to update use.local.desc file {use_local_desc!r}: {e.strerror}"
+ )
ret = os.EX_IOERR
finally:
if f is not None:
@@ -225,7 +282,7 @@ def update_pkg_desc_index(repo, observer):
for cpv in reversed(cpvs):
try:
desc = repo[(cat, pkg, cpv.fullver)].description
- versions = ' '.join(x.fullver for x in cpvs)
+ versions = " ".join(x.fullver for x in cpvs)
f.write(f"{cat}/{pkg} {versions}: {desc}\n")
break
except MetadataException as e:
@@ -234,7 +291,8 @@ def update_pkg_desc_index(repo, observer):
f.close()
except IOError as e:
observer.error(
- f"Unable to update pkg_desc_index file {pkg_desc_index!r}: {e.strerror}")
+ f"Unable to update pkg_desc_index file {pkg_desc_index!r}: {e.strerror}"
+ )
ret = os.EX_IOERR
finally:
if f is not None:
@@ -244,45 +302,73 @@ def update_pkg_desc_index(repo, observer):
regen = subparsers.add_parser(
- "regen", parents=shared_options_domain,
- description="regenerate repository caches")
+ "regen", parents=shared_options_domain, description="regenerate repository caches"
+)
regen.add_argument(
- 'repos', metavar='repo', nargs='*',
- action=commandline.StoreRepoObject, repo_type='source-raw', allow_external_repos=True,
- help="repo(s) to regenerate caches for")
+ "repos",
+ metavar="repo",
+ nargs="*",
+ action=commandline.StoreRepoObject,
+ repo_type="source-raw",
+ allow_external_repos=True,
+ help="repo(s) to regenerate caches for",
+)
regen_opts = regen.add_argument_group("subcommand options")
regen_opts.add_argument(
- "--disable-eclass-caching", action='store_true', default=False,
+ "--disable-eclass-caching",
+ action="store_true",
+ default=False,
help="""
For regen operation, pkgcore internally turns on an optimization that
caches eclasses into individual functions thus parsing the eclass only
twice max per EBD processor. Disabling this optimization via this
option results in ~2x slower regeneration. Disable it only if you
suspect the optimization is somehow causing issues.
- """)
+ """,
+)
regen_opts.add_argument(
- "-t", "--threads", type=int,
+ "-t",
+ "--threads",
+ type=int,
default=arghparse.DelayedValue(_get_default_jobs, 100),
help="number of threads to use",
docs="""
Number of threads to use for regeneration, defaults to using all
available processors.
- """)
+ """,
+)
regen_opts.add_argument(
- "--force", action='store_true', default=False,
- help="force regeneration to occur regardless of staleness checks or repo settings")
+ "--force",
+ action="store_true",
+ default=False,
+ help="force regeneration to occur regardless of staleness checks or repo settings",
+)
regen_opts.add_argument(
- "--dir", dest='cache_dir', type=arghparse.create_dir,
- help="use separate directory to store repository caches")
+ "--dir",
+ dest="cache_dir",
+ type=arghparse.create_dir,
+ help="use separate directory to store repository caches",
+)
regen_opts.add_argument(
- "--rsync", action='store_true', default=False,
- help="perform actions necessary for rsync repos (update metadata/timestamp.chk)")
+ "--rsync",
+ action="store_true",
+ default=False,
+ help="perform actions necessary for rsync repos (update metadata/timestamp.chk)",
+)
regen_opts.add_argument(
- "--use-local-desc", action='store_true', default=False,
- help="update local USE flag description cache (profiles/use.local.desc)")
+ "--use-local-desc",
+ action="store_true",
+ default=False,
+ help="update local USE flag description cache (profiles/use.local.desc)",
+)
regen_opts.add_argument(
- "--pkg-desc-index", action='store_true', default=False,
- help="update package description cache (metadata/pkg_desc_index)")
+ "--pkg-desc-index",
+ action="store_true",
+ default=False,
+ help="update package description cache (metadata/pkg_desc_index)",
+)
+
+
@regen.bind_main_func
def regen_main(options, out, err):
"""Regenerate a repository cache."""
@@ -293,25 +379,29 @@ def regen_main(options, out, err):
if options.cache_dir is not None:
# recreate new repo object with cache dir override
cache = (md5_cache(pjoin(options.cache_dir.rstrip(os.sep), repo.repo_id)),)
- repo = ebuild_repo.tree(
- options.config, repo.config, cache=cache)
+ repo = ebuild_repo.tree(options.config, repo.config, cache=cache)
if not repo.operations.supports("regen_cache"):
out.write(f"repo {repo} doesn't support cache regeneration")
continue
- elif not getattr(repo, 'cache', False) and not options.force:
+ elif not getattr(repo, "cache", False) and not options.force:
out.write(f"skipping repo {repo}: cache disabled")
continue
start_time = time.time()
- ret.append(repo.operations.regen_cache(
- threads=options.threads, observer=observer, force=options.force,
- eclass_caching=(not options.disable_eclass_caching)))
+ ret.append(
+ repo.operations.regen_cache(
+ threads=options.threads,
+ observer=observer,
+ force=options.force,
+ eclass_caching=(not options.disable_eclass_caching),
+ )
+ )
end_time = time.time()
if options.verbosity > 0:
out.write(
- "finished %d nodes in %.2f seconds" %
- (len(repo), end_time - start_time))
+ "finished %d nodes in %.2f seconds" % (len(repo), end_time - start_time)
+ )
if options.rsync:
timestamp = pjoin(repo.location, "metadata", "timestamp.chk")
@@ -319,7 +409,9 @@ def regen_main(options, out, err):
with open(timestamp, "w") as f:
f.write(time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()))
except IOError as e:
- err.write(f"Unable to update timestamp file {timestamp!r}: {e.strerror}")
+ err.write(
+ f"Unable to update timestamp file {timestamp!r}: {e.strerror}"
+ )
ret.append(os.EX_IOERR)
if options.use_local_desc:
@@ -331,17 +423,24 @@ def regen_main(options, out, err):
env_update = subparsers.add_parser(
- "env-update", description="update env.d and ldconfig",
- parents=shared_options_domain)
+ "env-update", description="update env.d and ldconfig", parents=shared_options_domain
+)
env_update_opts = env_update.add_argument_group("subcommand options")
env_update_opts.add_argument(
- "--skip-ldconfig", action='store_true', default=False,
- help="do not update etc/ldso.conf and ld.so.cache")
+ "--skip-ldconfig",
+ action="store_true",
+ default=False,
+ help="do not update etc/ldso.conf and ld.so.cache",
+)
+
+
@env_update.bind_main_func
def env_update_main(options, out, err):
- root = getattr(options.domain, 'root', None)
+ root = getattr(options.domain, "root", None)
if root is None:
- env_update.error("domain specified lacks a root setting; is it a virtual or remote domain?")
+ env_update.error(
+ "domain specified lacks a root setting; is it a virtual or remote domain?"
+ )
out.write(f"updating env for {root!r}...")
try:
@@ -364,52 +463,67 @@ class EclassArgs(argparse.Action):
path = os.path.realpath(val)
if os.path.isdir(path):
eclasses.extend(os.listdir(path))
- elif val.endswith('.eclass'):
+ elif val.endswith(".eclass"):
eclasses.append(path)
else:
- raise argparse.ArgumentError(self, f'invalid eclass: {val!r}')
- eclasses = sorted(x for x in eclasses if x.endswith('.eclass'))
+ raise argparse.ArgumentError(self, f"invalid eclass: {val!r}")
+ eclasses = sorted(x for x in eclasses if x.endswith(".eclass"))
else:
- eclass_dir = pjoin(namespace.repo.location, 'eclass')
+ eclass_dir = pjoin(namespace.repo.location, "eclass")
try:
files = sorted(os.listdir(eclass_dir))
except FileNotFoundError:
files = []
- eclasses = [pjoin(eclass_dir, x) for x in files if x.endswith('.eclass')]
+ eclasses = [pjoin(eclass_dir, x) for x in files if x.endswith(".eclass")]
if not eclasses:
- parser.error(f'{namespace.repo.repo_id} repo: no eclasses found')
+ parser.error(f"{namespace.repo.repo_id} repo: no eclasses found")
setattr(namespace, self.dest, eclasses)
eclass = subparsers.add_parser(
- "eclass", parents=shared_options_domain,
- description="generate eclass docs")
+ "eclass", parents=shared_options_domain, description="generate eclass docs"
+)
eclass.add_argument(
- 'eclasses', nargs='*', help="eclasses to target",
- action=arghparse.Delayed, target=EclassArgs, priority=1001)
+ "eclasses",
+ nargs="*",
+ help="eclasses to target",
+ action=arghparse.Delayed,
+ target=EclassArgs,
+ priority=1001,
+)
eclass_opts = eclass.add_argument_group("subcommand options")
eclass_opts.add_argument(
- "--dir", dest='output_dir', type=arghparse.create_dir, help="output directory")
+ "--dir", dest="output_dir", type=arghparse.create_dir, help="output directory"
+)
eclass_opts.add_argument(
- "-f", "--format", help="output format",
- default='man', choices=('rst', 'man', 'html'))
+ "-f",
+ "--format",
+ help="output format",
+ default="man",
+ choices=("rst", "man", "html"),
+)
eclass_opts.add_argument(
- "-r", "--repo", help="target repository",
- action=commandline.StoreRepoObject, repo_type='ebuild-raw', allow_external_repos=True,
+ "-r",
+ "--repo",
+ help="target repository",
+ action=commandline.StoreRepoObject,
+ repo_type="ebuild-raw",
+ allow_external_repos=True,
docs="""
Target repository to search for eclasses. If no repo is specified the default repo is used.
- """)
+ """,
+)
-@eclass.bind_delayed_default(1000, 'repo')
+@eclass.bind_delayed_default(1000, "repo")
def _eclass_default_repo(namespace, attr):
"""Use default repo if none is selected."""
- repo = namespace.config.get_default('repo')
+ repo = namespace.config.get_default("repo")
setattr(namespace, attr, repo)
-@eclass.bind_delayed_default(1000, 'output_dir')
+@eclass.bind_delayed_default(1000, "output_dir")
def _eclass_default_output_dir(namespace, attr):
"""Use CWD as output dir if unset."""
setattr(namespace, attr, os.getcwd())
@@ -418,25 +532,27 @@ def _eclass_default_output_dir(namespace, attr):
@eclass.bind_main_func
def _eclass_main(options, out, err):
# suppress all eclassdoc parsing warnings
- logging.getLogger('pkgcore').setLevel(100)
+ logging.getLogger("pkgcore").setLevel(100)
failed = []
# determine output file extension
- ext_map = {'man': '5'}
+ ext_map = {"man": "5"}
ext = ext_map.get(options.format, options.format)
for path in options.eclasses:
try:
- with open(pjoin(options.output_dir, f'{os.path.basename(path)}.{ext}'), 'wt') as f:
+ with open(
+ pjoin(options.output_dir, f"{os.path.basename(path)}.{ext}"), "wt"
+ ) as f:
obj = EclassDoc(path)
- convert_func = getattr(obj, f'to_{options.format}')
+ convert_func = getattr(obj, f"to_{options.format}")
f.write(convert_func())
except ValueError as e:
# skip eclasses lacking eclassdoc support
- err.write(f'{eclass.prog}: skipping {path!r}: {e}')
+ err.write(f"{eclass.prog}: skipping {path!r}: {e}")
err.flush()
except IOError as e:
- err.write(f'{eclass.prog}: error: {path!r}: {e}')
+ err.write(f"{eclass.prog}: error: {path!r}: {e}")
err.flush()
failed.append(path)
diff --git a/src/pkgcore/scripts/pmerge.py b/src/pkgcore/scripts/pmerge.py
index 8ea24c8c6..30135a250 100644
--- a/src/pkgcore/scripts/pmerge.py
+++ b/src/pkgcore/scripts/pmerge.py
@@ -27,44 +27,67 @@ from ..restrictions import packages
from ..restrictions.boolean import OrRestriction
from ..util import commandline, parserestrict
-pmerge_config = ({
- 'basic': ConfigSectionFromStringDict({
- 'class': 'pkgcore.ebuild.formatter.BasicFormatter',
- }),
- 'pkgcore': ConfigSectionFromStringDict({
- 'class': 'pkgcore.ebuild.formatter.PkgcoreFormatter',
- }),
- 'portage': ConfigSectionFromStringDict({
- 'class': 'pkgcore.ebuild.formatter.PortageFormatter',
- 'default': 'True',
- }),
- 'portage-verbose': ConfigSectionFromStringDict({
- 'class': 'pkgcore.ebuild.formatter.PortageVerboseFormatter',
- }),
-},)
+pmerge_config = (
+ {
+ "basic": ConfigSectionFromStringDict(
+ {
+ "class": "pkgcore.ebuild.formatter.BasicFormatter",
+ }
+ ),
+ "pkgcore": ConfigSectionFromStringDict(
+ {
+ "class": "pkgcore.ebuild.formatter.PkgcoreFormatter",
+ }
+ ),
+ "portage": ConfigSectionFromStringDict(
+ {
+ "class": "pkgcore.ebuild.formatter.PortageFormatter",
+ "default": "True",
+ }
+ ),
+ "portage-verbose": ConfigSectionFromStringDict(
+ {
+ "class": "pkgcore.ebuild.formatter.PortageVerboseFormatter",
+ }
+ ),
+ },
+)
argparser = commandline.ArgumentParser(
- domain=True, description=__doc__, script=(__file__, __name__), global_config=pmerge_config)
+ domain=True,
+ description=__doc__,
+ script=(__file__, __name__),
+ global_config=pmerge_config,
+)
argparser.add_argument(
- nargs='*', dest='targets', metavar='TARGET',
- action=commandline.StoreTarget, use_sets='sets',
+ nargs="*",
+ dest="targets",
+ metavar="TARGET",
+ action=commandline.StoreTarget,
+ use_sets="sets",
help="extended package matching",
- docs=commandline.StoreTarget.__doc__.split('\n')[1:])
+ docs=commandline.StoreTarget.__doc__.split("\n")[1:],
+)
-operation_args = argparser.add_argument_group('operations')
+operation_args = argparser.add_argument_group("operations")
operation_options = operation_args.add_mutually_exclusive_group()
operation_options.add_argument(
- '-u', '--upgrade', action='store_true',
- help='try to upgrade installed pkgs/deps',
+ "-u",
+ "--upgrade",
+ action="store_true",
+ help="try to upgrade installed pkgs/deps",
docs="""
Try to upgrade specified targets to the latest visible version. Note
that altered package visibility due to keywording or masking can often
hide the latest versions of packages, especially for stable
configurations.
- """)
+ """,
+)
operation_options.add_argument(
- '-d', '--downgrade', action='store_true',
- help='try to downgrade installed pkgs/deps',
+ "-d",
+ "--downgrade",
+ action="store_true",
+ help="try to downgrade installed pkgs/deps",
docs="""
Try to downgrade specified targets to a lower visible version
compared to what's currently installed.
@@ -72,19 +95,24 @@ operation_options.add_argument(
Useful for reverting to the previously installed package version;
however, note that the -O/--nodeps option is generally used with this
otherwise lots of downgrades will be pulled into the resolved deptree.
- """)
+ """,
+)
operation_options.add_argument(
- '-C', '--unmerge', action='store_true',
- help='unmerge packages',
+ "-C",
+ "--unmerge",
+ action="store_true",
+ help="unmerge packages",
docs="""
Target packages for unmerging from the system.
WARNING: This does not ask for user confirmation for any targets so
it's possible to quickly break a system.
- """)
+ """,
+)
operation_options.add_argument(
- '--clean', action='store_true',
- help='remove installed packages not referenced by any target pkgs/sets',
+ "--clean",
+ action="store_true",
+ help="remove installed packages not referenced by any target pkgs/sets",
docs="""
Remove installed packages that aren't referenced by any target packages
or sets. This defaults to using the world and system sets if no targets
@@ -92,51 +120,71 @@ operation_options.add_argument(
Use with *caution*, this option used incorrectly can render your system
unusable. Note that this implies --deep.
- """)
+ """,
+)
operation_options.add_argument(
- '--list-sets', action='store_true',
- help='display the list of available package sets')
+ "--list-sets",
+ action="store_true",
+ help="display the list of available package sets",
+)
resolution_options = argparser.add_argument_group("resolver options")
resolution_options.add_argument(
- '-p', '--pretend', action='store_true',
+ "-p",
+ "--pretend",
+ action="store_true",
help="only perform the dep resolution",
docs="""
Resolve package dependencies and display the results without performing
any merges.
- """)
+ """,
+)
resolution_options.add_argument(
- '-a', '--ask', action='store_true',
+ "-a",
+ "--ask",
+ action="store_true",
help="ask for user confirmation after dep resolution",
docs="""
Perform the dependency resolution, but ask for user confirmation before
beginning the fetch/build/merge process. The choice defaults to yes so
pressing the "Enter" key will trigger acceptance.
- """)
+ """,
+)
resolution_options.add_argument(
- '-f', '--fetchonly', action='store_true',
+ "-f",
+ "--fetchonly",
+ action="store_true",
help="do only the fetch steps of the resolved plan",
docs="""
Only perform fetching of all targets from SRC_URI based on the current
USE configuration.
- """)
+ """,
+)
resolution_options.add_argument(
- '-1', '--oneshot', action='store_true',
+ "-1",
+ "--oneshot",
+ action="store_true",
help="do not record changes in the world file",
docs="""
Build and merge packages normally, but do not add any targets to the
world file. Note that this is forcibly enabled if a package set is
specified.
- """)
+ """,
+)
resolution_options.add_argument(
- '-D', '--deep', action='store_true',
- help='force the resolver to verify installed deps',
+ "-D",
+ "--deep",
+ action="store_true",
+ help="force the resolver to verify installed deps",
docs="""
Force dependency resolution across the entire dependency tree for all
specified targets.
- """)
+ """,
+)
resolution_options.add_argument(
- '-N', '--newuse', action='store_true',
+ "-N",
+ "--newuse",
+ action="store_true",
help="add installed pkgs with changed useflags to targets",
docs="""
Include installed packages with USE flag changes in the list of viable
@@ -148,100 +196,141 @@ resolution_options.add_argument(
modifications.
Note that this option implies -1/--oneshot.
- """)
+ """,
+)
resolution_options.add_argument(
- '-i', '--ignore-cycles', action='store_true',
+ "-i",
+ "--ignore-cycles",
+ action="store_true",
help="ignore unbreakable dep cycles",
docs="""
Ignore dependency cycles if they're found to be unbreakable; for
example: a depends on b, and b depends on a, with neither built.
- """)
+ """,
+)
resolution_options.add_argument(
- '--with-bdeps', action='store_true',
+ "--with-bdeps",
+ action="store_true",
help="process build deps for built packages",
docs="""
Pull in build time dependencies for built packages during dependency
resolution, by default they're ignored.
- """)
+ """,
+)
resolution_options.add_argument(
- '-O', '--nodeps', action='store_true',
- help='disable dependency resolution',
+ "-O",
+ "--nodeps",
+ action="store_true",
+ help="disable dependency resolution",
docs="""
Build and merge packages without resolving any dependencies.
- """)
+ """,
+)
resolution_options.add_argument(
- '-o', '--onlydeps', action='store_true',
- help='only merge the deps of the specified packages',
+ "-o",
+ "--onlydeps",
+ action="store_true",
+ help="only merge the deps of the specified packages",
docs="""
Build and merge only the dependencies for the packages specified.
- """)
+ """,
+)
resolution_options.add_argument(
- '-n', '--noreplace', action='store_false', dest='replace',
+ "-n",
+ "--noreplace",
+ action="store_false",
+ dest="replace",
help="don't reinstall target pkgs that are already installed",
docs="""
Skip packages that are already installed. By default when running
without this option, any specified target packages will be remerged
regardless of if they are already installed.
- """)
+ """,
+)
resolution_options.add_argument(
- '-b', '--buildpkg', action='store_true',
+ "-b",
+ "--buildpkg",
+ action="store_true",
help="build binpkgs",
docs="""
Force binary packages to be built for all merged packages.
- """)
+ """,
+)
resolution_options.add_argument(
- '-k', '--usepkg', action='store_true',
+ "-k",
+ "--usepkg",
+ action="store_true",
help="prefer to use binpkgs",
docs="""
Binary packages are preferred over ebuilds when performing dependency
resolution.
- """)
+ """,
+)
resolution_options.add_argument(
- '-K', '--usepkgonly', action='store_true',
+ "-K",
+ "--usepkgonly",
+ action="store_true",
help="use only binpkgs",
docs="""
Only binary packages are considered when performing dependency
resolution.
- """)
+ """,
+)
resolution_options.add_argument(
- '-S', '--source-only', action='store_true',
+ "-S",
+ "--source-only",
+ action="store_true",
help="use only ebuilds, no binpkgs",
docs="""
Only ebuilds are considered when performing dependency
resolution.
- """)
+ """,
+)
resolution_options.add_argument(
- '-e', '--empty', action='store_true',
+ "-e",
+ "--empty",
+ action="store_true",
help="force rebuilding of all involved packages",
docs="""
Force all targets and their dependencies to be rebuilt.
- """)
+ """,
+)
resolution_options.add_argument(
- '-x', '--exclude', dest='excludes', metavar='TARGET[,TARGET,...]',
- action=commandline.StoreTarget, separator=',',
- help='inject packages into the installed set',
+ "-x",
+ "--exclude",
+ dest="excludes",
+ metavar="TARGET[,TARGET,...]",
+ action=commandline.StoreTarget,
+ separator=",",
+ help="inject packages into the installed set",
docs="""
Comma-separated list of targets to pretend are installed.
This supports extended package globbing, e.g. ``'dev-python/*'``
equates to faking the entire dev-python category is installed.
- """)
+ """,
+)
resolution_options.add_argument(
- '--ignore-failures', action='store_true',
- help='ignore failures while running all types of tasks',
+ "--ignore-failures",
+ action="store_true",
+ help="ignore failures while running all types of tasks",
docs="""
Skip failures during the following phases: sanity checks
(pkg_pretend), fetching, dep resolution, and (un)merging.
- """)
+ """,
+)
resolution_options.add_argument(
- '--force', action='store_true',
- dest='force',
+ "--force",
+ action="store_true",
+ dest="force",
help="force changes to a repo, regardless of if it's frozen",
docs="""
Force (un)merging on the livefs (vdb), regardless of if it's frozen.
- """)
+ """,
+)
resolution_options.add_argument(
- '--preload-vdb-state', action='store_true',
+ "--preload-vdb-state",
+ action="store_true",
help="enable preloading of the installed packages database",
docs="""
Preload the installed package database which causes the resolver to
@@ -249,21 +338,28 @@ resolution_options.add_argument(
installed packages. If disabled, it's possible for the requested action
to conflict with already installed dependencies that aren't involved in
the graph of the requested operation.
- """)
+ """,
+)
output_options = argparser.add_argument_group("output options")
output_options.add_argument(
- '--quiet-repo-display', action='store_true',
+ "--quiet-repo-display",
+ action="store_true",
help="use indexes instead of ::repo suffixes in dep resolution output",
docs="""
In the package merge list display, suppress ::repo output and instead
use index numbers to indicate which repos packages come from.
- """)
+ """,
+)
output_options.add_argument(
- '-F', '--formatter', priority=90, metavar='FORMATTER',
- action=commandline.StoreConfigObject, get_default=True,
- config_type='pmerge_formatter',
- help='output formatter to use',
+ "-F",
+ "--formatter",
+ priority=90,
+ metavar="FORMATTER",
+ action=commandline.StoreConfigObject,
+ get_default=True,
+ config_type="pmerge_formatter",
+ help="output formatter to use",
docs="""
Select an output formatter to use for text formatting of --pretend or
--ask output, currently available formatters include the following:
@@ -272,24 +368,30 @@ output_options.add_argument(
The basic formatter is the nearest to simple text output and is
intended for scripting while the portage/portage-verbose formatter
closely emulates portage output and is used by default.
- """)
+ """,
+)
debug_options = argparser.add_argument_group("resolver debugging options")
debug_options.add_argument(
- '--pdb-intercept', dest='pdb_intercept', metavar='TARGET[,TARGET,...]',
- action=commandline.StoreTarget, separator=',',
- help='trigger a pdb.set_trace() for any resolver decisions that match this restriction',
+ "--pdb-intercept",
+ dest="pdb_intercept",
+ metavar="TARGET[,TARGET,...]",
+ action=commandline.StoreTarget,
+ separator=",",
+ help="trigger a pdb.set_trace() for any resolver decisions that match this restriction",
docs="""
Comma-seperated list of targets to trigger a pdb.set_trace() within resolver code for investigation.
This is primarily used for debugging resolver implementation, being able to walk through
what it has decided and why.
- """
+ """,
)
debug_options.add_argument(
- '--disable-resolver-target-sorting', dest='force_stable_ordering_of_targets',
- action='store_false', default=True,
- help='disable stabilization of resolver graph processing',
+ "--disable-resolver-target-sorting",
+ dest="force_stable_ordering_of_targets",
+ action="store_false",
+ default=True,
+ help="disable stabilization of resolver graph processing",
docs="""
Resolution of package dependencies can grossly vary depending on which nodes you start from.
@@ -301,12 +403,13 @@ debug_options.add_argument(
be found. If a solution can't be found, then this flag should also result in no solution found.
Any deviation from this is a bug in the resolver and should be reported.
- """
+ """,
)
class AmbiguousQuery(parserestrict.ParseError):
"""Exception for multiple matches where a single match is required."""
+
def __init__(self, token, keys):
self.token = token
self.keys = keys
@@ -317,8 +420,9 @@ class AmbiguousQuery(parserestrict.ParseError):
class NoMatches(parserestrict.ParseError):
"""Exception for no matches where at least one match is required."""
+
def __init__(self, token):
- super().__init__(f'{token!r}: no matches')
+ super().__init__(f"{token!r}: no matches")
class Failure(ValueError):
@@ -354,37 +458,45 @@ def unmerge(out, err, installed_repos, targets, options, formatter, world_set=No
categories = set(pkg.category for pkg in installed)
if len(categories) > 1:
raise parserestrict.ParseError(
- "%r is in multiple categories (%s)" % (
- token, ', '.join(sorted(set(pkg.key for pkg in installed)))))
+ "%r is in multiple categories (%s)"
+ % (token, ", ".join(sorted(set(pkg.key for pkg in installed))))
+ )
matches.update(installed)
# fail out if no matches are found, otherwise just output a notification
if unknown:
- unknowns = ', '.join(map(repr, unknown))
+ unknowns = ", ".join(map(repr, unknown))
if matches:
err.write(f"Skipping unknown matches: {unknowns}\n")
else:
raise Failure(f"no matches found: {unknowns}")
if fake:
- err.write('Skipping virtual pkg%s: %s' % (
- pluralism(fake_pkgs),
- ', '.join(f'{x.versioned_atom}::{x.repo_id}' for x in fake)))
+ err.write(
+ "Skipping virtual pkg%s: %s"
+ % (
+ pluralism(fake_pkgs),
+ ", ".join(f"{x.versioned_atom}::{x.repo_id}" for x in fake),
+ )
+ )
if matches:
- out.write(out.bold, 'The following packages are to be unmerged:')
- out.prefix = [out.bold, ' * ', out.reset]
+ out.write(out.bold, "The following packages are to be unmerged:")
+ out.prefix = [out.bold, " * ", out.reset]
for pkg in matches:
out.write(pkg.cpvstr)
out.prefix = []
repo_obs = observer.repo_observer(
- observer.formatter_output(out), debug=options.debug)
+ observer.formatter_output(out), debug=options.debug
+ )
if options.pretend:
return
- if (options.ask and not formatter.ask("Would you like to unmerge these packages?")):
+ if options.ask and not formatter.ask(
+ "Would you like to unmerge these packages?"
+ ):
return
return do_unmerge(options, out, err, vdb, matches, world_set, repo_obs)
@@ -392,12 +504,10 @@ def unmerge(out, err, installed_repos, targets, options, formatter, world_set=No
def do_unmerge(options, out, err, vdb, matches, world_set, repo_obs):
if vdb.frozen:
if options.force:
- out.write(
- out.fg('red'), out.bold,
- 'warning: vdb is frozen, overriding')
+ out.write(out.fg("red"), out.bold, "warning: vdb is frozen, overriding")
vdb.frozen = False
else:
- raise Failure('vdb is frozen')
+ raise Failure("vdb is frozen")
for idx, match in enumerate(matches):
out.write(f"removing {idx + 1} of {len(matches)}: {match}")
@@ -406,8 +516,8 @@ def do_unmerge(options, out, err, vdb, matches, world_set, repo_obs):
ret = op.finish()
if not ret:
if not options.ignore_failures:
- raise Failure(f'failed unmerging {match}')
- out.write(out.fg('red'), 'failed unmerging ', match)
+ raise Failure(f"failed unmerging {match}")
+ out.write(out.fg("red"), "failed unmerging ", match)
pkg = slotatom_if_slotted(vdb, match.versioned_atom)
update_worldset(world_set, pkg, remove=True)
out.write(f"finished; removed {len(matches)} packages")
@@ -441,18 +551,23 @@ def display_failures(out, sequence, first_level=True, debug=False, _color_index=
for step in steps:
set_color("red")
if isinstance(step, list):
- display_failures(out, step, False, debug=debug, _color_index=_color_index)
- elif step[0] == 'reduce':
- out.write("removing choices involving %s" %
- ', '.join(str(x) for x in step[1]))
- elif step[0] == 'blocker':
- out.write("blocker %s failed due to %s existing" % (step[1],
- ', '.join(str(x) for x in step[2])))
- elif step[0] == 'cycle':
+ display_failures(
+ out, step, False, debug=debug, _color_index=_color_index
+ )
+ elif step[0] == "reduce":
+ out.write(
+ "removing choices involving %s" % ", ".join(str(x) for x in step[1])
+ )
+ elif step[0] == "blocker":
+ out.write(
+ "blocker %s failed due to %s existing"
+ % (step[1], ", ".join(str(x) for x in step[2]))
+ )
+ elif step[0] == "cycle":
out.write("%s cycle on %s: %s" % (step[1].mode, step[1].atom, step[2]))
- elif step[0] == 'viable' and not step[1]:
+ elif step[0] == "viable" and not step[1]:
out.write("%s: failed %s" % (step[3], step[4]))
- elif step[0] == 'choice':
+ elif step[0] == "choice":
if not step[2]:
out.write("failed due to %s" % (step[3],))
elif step[0] == "debug":
@@ -515,16 +630,18 @@ def _validate(parser, namespace):
if namespace.clean:
if namespace.sets or namespace.targets:
parser.error(
- "--clean currently cannot be used w/ any sets or targets given")
- namespace.sets = ('world', 'system')
+ "--clean currently cannot be used w/ any sets or targets given"
+ )
+ namespace.sets = ("world", "system")
namespace.deep = True
namespace.replace = False
if namespace.usepkgonly or namespace.usepkg or namespace.source_only:
parser.error(
- '--clean cannot be used with any of the following options: '
- '--usepkg --usepkgonly --source-only')
+ "--clean cannot be used with any of the following options: "
+ "--usepkg --usepkgonly --source-only"
+ )
elif namespace.usepkgonly and namespace.usepkg:
- parser.error('--usepkg is redundant when --usepkgonly is used')
+ parser.error("--usepkg is redundant when --usepkgonly is used")
elif (namespace.usepkgonly or namespace.usepkg) and namespace.source_only:
parser.error("--source-only cannot be used with --usepkg nor --usepkgonly")
elif namespace.nodeps and namespace.onlydeps:
@@ -533,15 +650,19 @@ def _validate(parser, namespace):
if namespace.sets:
unknown_sets = set(namespace.sets).difference(namespace.config.pkgset)
if unknown_sets:
- parser.error("unknown set%s: %s (available sets: %s)" % (
- pluralism(unknown_sets),
- ', '.join(sorted(map(repr, unknown_sets))),
- ', '.join(sorted(namespace.config.pkgset))))
+ parser.error(
+ "unknown set%s: %s (available sets: %s)"
+ % (
+ pluralism(unknown_sets),
+ ", ".join(sorted(map(repr, unknown_sets))),
+ ", ".join(sorted(namespace.config.pkgset)),
+ )
+ )
namespace.sets = [(x, namespace.config.pkgset[x]) for x in namespace.sets]
if namespace.upgrade or namespace.downgrade:
namespace.replace = False
if not namespace.targets and not namespace.sets:
- parser.error('please specify at least one atom or nonempty set')
+ parser.error("please specify at least one atom or nonempty set")
if namespace.newuse:
namespace.oneshot = True
@@ -578,17 +699,23 @@ def parse_target(restriction, repo, installed_repos, return_none=False):
return None
raise NoMatches(restriction)
elif len(key_matches) > 1:
- if any(isinstance(r, restricts.PackageDep) for r in iflatten_instance([restriction])):
+ if any(
+ isinstance(r, restricts.PackageDep)
+ for r in iflatten_instance([restriction])
+ ):
if len(restriction) > 1:
# drop repo specific restrictions, ebuild repos don't match installed pkgs
restriction = restriction.remove_restriction(
- restriction_types=(restricts.RepositoryDep,))
+ restriction_types=(restricts.RepositoryDep,)
+ )
# find installed package matches
- matches = {x.unversioned_atom for x in installed_repos.itermatch(restriction)}
+ matches = {
+ x.unversioned_atom for x in installed_repos.itermatch(restriction)
+ }
# try removing stub pkgs if there are multiple installed matches or none at all
- skip_categories = {'acct-group', 'acct-user', 'virtual'}
+ skip_categories = {"acct-group", "acct-user", "virtual"}
if not matches:
matches = {x for x in key_matches if x.category not in skip_categories}
elif len(matches) > 1:
@@ -609,9 +736,9 @@ def parse_target(restriction, repo, installed_repos, return_none=False):
return [packages.KeyedAndRestriction(restriction, key=key_matches.pop().key)]
-@argparser.bind_delayed_default(50, name='world')
+@argparser.bind_delayed_default(50, name="world")
def load_world(namespace, attr):
- value = namespace.config.pkgset['world']
+ value = namespace.config.pkgset["world"]
setattr(namespace, attr, value)
@@ -619,10 +746,10 @@ def display_pkgsets(out, options):
for name, kls in sorted(options.config.pkgset.items()):
if options.verbosity > 0:
out.write(name)
- out.write('\n'.join(' ' + dedent(x) for x in kls.__doc__.split('\n')))
+ out.write("\n".join(" " + dedent(x) for x in kls.__doc__.split("\n")))
out.write()
if options.verbosity > 1:
- out.write('\n'.join(' ' + str(pkg) for pkg in sorted(kls)))
+ out.write("\n".join(" " + str(pkg) for pkg in sorted(kls)))
out.write()
else:
out.write(name)
@@ -644,7 +771,8 @@ def main(options, out, err):
world_set = None
formatter = options.formatter(
- out=out, err=err,
+ out=out,
+ err=err,
unstable_arch=domain.unstable_arch,
use_expand=domain.profile.use_expand,
use_expand_hidden=domain.profile.use_expand_hidden,
@@ -653,32 +781,42 @@ def main(options, out, err):
verbosity=options.verbosity,
installed_repos=domain.all_installed_repos,
distdir=domain.distdir,
- quiet_repo_display=options.quiet_repo_display)
+ quiet_repo_display=options.quiet_repo_display,
+ )
# This mode does not care about sets and packages so bypass all that.
if options.unmerge:
# TODO: this logic should be updated to honor self.force_stable_ordering_of_targets
if not options.oneshot:
if world_set is None:
- argparser.error("disable world updating via --oneshot, "
- "or fix your configuration")
+ argparser.error(
+ "disable world updating via --oneshot, " "or fix your configuration"
+ )
try:
- unmerge(out, err, domain.installed_repos, options.targets, options, formatter, world_set)
+ unmerge(
+ out,
+ err,
+ domain.installed_repos,
+ options.targets,
+ options,
+ formatter,
+ world_set,
+ )
except (parserestrict.ParseError, Failure) as e:
argparser.error(e)
return
source_repos = domain.source_repos
installed_repos = domain.installed_repos
- pkg_type = 'ebuilds'
+ pkg_type = "ebuilds"
if options.usepkgonly:
source_repos = domain.binary_repos
- pkg_type = 'binpkgs'
+ pkg_type = "binpkgs"
elif options.usepkg:
# binary repos are checked for matches first before ebuild repos
source_repos = domain.binary_repos + domain.ebuild_repos
- pkg_type = 'ebuilds or binpkgs'
+ pkg_type = "ebuilds or binpkgs"
elif options.source_only:
source_repos = domain.ebuild_repos
@@ -695,7 +833,8 @@ def main(options, out, err):
for token, restriction in options.targets:
try:
matches = parse_target(
- restriction, source_repos.combined, installed_repos, return_none=True)
+ restriction, source_repos.combined, installed_repos, return_none=True
+ )
except parserestrict.ParseError as e:
e.token = token
argparser.error(e)
@@ -706,12 +845,17 @@ def main(options, out, err):
error_msg.append(f"use '@{token}' instead for the package set")
elif options.usepkgonly:
matches = parse_target(
- restriction, domain.ebuild_repos.combined,
- installed_repos, return_none=True)
+ restriction,
+ domain.ebuild_repos.combined,
+ installed_repos,
+ return_none=True,
+ )
if matches:
- error_msg.append("try re-running without -K/--usepkgonly "
- "enabled to rebuild from source")
- argparser.error(' -- '.join(error_msg))
+ error_msg.append(
+ "try re-running without -K/--usepkgonly "
+ "enabled to rebuild from source"
+ )
+ argparser.error(" -- ".join(error_msg))
else:
atoms.extend(matches)
@@ -725,18 +869,20 @@ def main(options, out, err):
if options.clean and not options.oneshot:
if world_set is None:
- argparser.error("disable world updating via --oneshot, or fix your configuration")
+ argparser.error(
+ "disable world updating via --oneshot, or fix your configuration"
+ )
extra_kwargs = {}
if options.empty:
- extra_kwargs['resolver_cls'] = resolver.empty_tree_merge_plan
+ extra_kwargs["resolver_cls"] = resolver.empty_tree_merge_plan
if options.debug:
- extra_kwargs['debug'] = True
+ extra_kwargs["debug"] = True
# XXX: This should recurse on deep
if options.newuse:
- out.write(out.bold, ' * ', out.reset, 'Scanning for changed USE...')
- out.title('Scanning for changed USE...')
+ out.write(out.bold, " * ", out.reset, "Scanning for changed USE...")
+ out.title("Scanning for changed USE...")
for inst_pkg in installed_repos.itermatch(OrRestriction(*atoms)):
src_pkgs = source_repos.match(inst_pkg.versioned_atom)
if src_pkgs:
@@ -745,8 +891,9 @@ def main(options, out, err):
src_iuse = src_pkg.iuse_stripped
inst_flags = inst_iuse.intersection(inst_pkg.use)
src_flags = src_iuse.intersection(src_pkg.use)
- if inst_flags.symmetric_difference(src_flags) or \
- inst_iuse.symmetric_difference(src_iuse):
+ if inst_flags.symmetric_difference(
+ src_flags
+ ) or inst_iuse.symmetric_difference(src_iuse):
atoms.append(src_pkg.unversioned_atom)
excludes = [restriction for token, restriction in options.excludes]
@@ -755,24 +902,30 @@ def main(options, out, err):
if excludes:
injected_repo = RestrictionRepo(
- repo_id='injected', restrictions=excludes, frozen=True, livefs=True)
+ repo_id="injected", restrictions=excludes, frozen=True, livefs=True
+ )
installed_repos = injected_repo + installed_repos
-# left intentionally in place for ease of debugging.
-# from guppy import hpy
-# hp = hpy()
-# hp.setrelheap()
+ # left intentionally in place for ease of debugging.
+ # from guppy import hpy
+ # hp = hpy()
+ # hp.setrelheap()
- extra_kwargs['pdb_intercept'] = tuple(x[1] for x in options.pdb_intercept)
+ extra_kwargs["pdb_intercept"] = tuple(x[1] for x in options.pdb_intercept)
resolver_inst = options.resolver_kls(
- vdbs=installed_repos, dbs=source_repos,
- verify_vdb=options.deep, nodeps=options.nodeps,
- drop_cycles=options.ignore_cycles, force_replace=options.replace,
- process_built_depends=options.with_bdeps, **extra_kwargs)
+ vdbs=installed_repos,
+ dbs=source_repos,
+ verify_vdb=options.deep,
+ nodeps=options.nodeps,
+ drop_cycles=options.ignore_cycles,
+ force_replace=options.replace,
+ process_built_depends=options.with_bdeps,
+ **extra_kwargs,
+ )
if options.preload_vdb_state:
- out.write(out.bold, ' * ', out.reset, 'Preloading vdb... ')
+ out.write(out.bold, " * ", out.reset, "Preloading vdb... ")
vdb_time = time()
resolver_inst.load_vdb_state()
vdb_time = time() - vdb_time
@@ -786,12 +939,12 @@ def main(options, out, err):
failures = []
resolve_time = time()
if sys.stdout.isatty():
- out.title('Resolving...')
- out.write(out.bold, ' * ', out.reset, 'Resolving...')
+ out.title("Resolving...")
+ out.write(out.bold, " * ", out.reset, "Resolving...")
out.flush()
ret = resolver_inst.add_atoms(atoms, finalize=True)
while ret:
- out.error('resolution failed')
+ out.error("resolution failed")
restrict = ret[0][0]
just_failures = reduce_to_failures(ret[1])
display_failures(out, just_failures, debug=options.debug)
@@ -806,10 +959,10 @@ def main(options, out, err):
if failures:
out.write()
- out.write('Failures encountered:')
+ out.write("Failures encountered:")
for restrict in failures:
out.error(f"failed '{restrict}'")
- out.write('potentials:')
+ out.write("potentials:")
match_count = 0
for r in get_raw_repos(source_repos):
l = r.match(restrict)
@@ -825,7 +978,7 @@ def main(options, out, err):
resolver_inst.free_caches()
if options.clean:
- out.write(out.bold, ' * ', out.reset, 'Packages to be removed:')
+ out.write(out.bold, " * ", out.reset, "Packages to be removed:")
vset = set(installed_repos.real.combined)
len_vset = len(vset)
vset.difference_update(x.pkg for x in resolver_inst.state.iter_ops(True))
@@ -834,8 +987,10 @@ def main(options, out, err):
out.write(f"Remove {x}")
out.write()
if wipes:
- out.write("removing %i packages of %i installed, %0.2f%%." %
- (len(wipes), len_vset, 100*(len(wipes)/float(len_vset))))
+ out.write(
+ "removing %i packages of %i installed, %0.2f%%."
+ % (len(wipes), len_vset, 100 * (len(wipes) / float(len_vset)))
+ )
else:
out.write("no packages to remove")
if options.pretend:
@@ -845,27 +1000,32 @@ def main(options, out, err):
return 1
out.write()
repo_obs = observer.repo_observer(
- observer.formatter_output(out), debug=options.debug)
- do_unmerge(options, out, err, installed_repos.real.combined, wipes, world_set, repo_obs)
+ observer.formatter_output(out), debug=options.debug
+ )
+ do_unmerge(
+ options, out, err, installed_repos.real.combined, wipes, world_set, repo_obs
+ )
return 0
if options.debug:
out.write()
- out.write(out.bold, ' * ', out.reset, 'debug: all ops')
+ out.write(out.bold, " * ", out.reset, "debug: all ops")
out.first_prefix.append(" ")
plan_len = len(str(len(resolver_inst.state.plan)))
for pos, op in enumerate(resolver_inst.state.plan):
- out.write(str(pos + 1).rjust(plan_len), ': ', str(op))
+ out.write(str(pos + 1).rjust(plan_len), ": ", str(op))
out.first_prefix.pop()
- out.write(out.bold, ' * ', out.reset, 'debug: end all ops')
+ out.write(out.bold, " * ", out.reset, "debug: end all ops")
out.write()
changes = resolver_inst.state.ops(only_real=True)
build_obs = observer.phase_observer(
- observer.formatter_output(out), debug=options.debug)
+ observer.formatter_output(out), debug=options.debug
+ )
repo_obs = observer.repo_observer(
- observer.formatter_output(out), debug=options.debug)
+ observer.formatter_output(out), debug=options.debug
+ )
# show pkgs to merge in selected format
if (options.ask or options.pretend) and changes:
@@ -874,8 +1034,12 @@ def main(options, out, err):
formatter.end()
if vdb_time:
- out.write(out.bold, 'Took %.2f' % (vdb_time,), out.reset,
- ' seconds to preload vdb state')
+ out.write(
+ out.bold,
+ "Took %.2f" % (vdb_time,),
+ out.reset,
+ " seconds to preload vdb state",
+ )
if changes:
if not options.fetchonly:
@@ -889,24 +1053,37 @@ def main(options, out, err):
sanity_failures = run_sanity_checks((x.pkg for x in changes), domain)
if sanity_failures:
for errors in sanity_failures.values():
- out.write('\n'.join(e.msg(verbosity=options.verbosity) for e in errors))
+ out.write(
+ "\n".join(e.msg(verbosity=options.verbosity) for e in errors)
+ )
if options.verbosity > 0:
out.write()
if options.ignore_failures:
out.write(
- out.fg('red'), out.bold, "!!! ",
- out.reset, "Skipping failed sanity checks...")
+ out.fg("red"),
+ out.bold,
+ "!!! ",
+ out.reset,
+ "Skipping failed sanity checks...",
+ )
else:
out.write(
- out.fg('red'), out.bold, "!!! ",
- out.reset, "Sanity checks failed, exiting...")
+ out.fg("red"),
+ out.bold,
+ "!!! ",
+ out.reset,
+ "Sanity checks failed, exiting...",
+ )
return 1
else:
out.write()
if options.debug:
out.write(
- out.bold, " * ", out.reset,
- "finished sanity checks in %.2f seconds" % (time() - start_time))
+ out.bold,
+ " * ",
+ out.reset,
+ "finished sanity checks in %.2f seconds" % (time() - start_time),
+ )
out.write()
elif options.verbosity > 0:
# show skipped virtuals
@@ -917,8 +1094,9 @@ def main(options, out, err):
virtual_pkgs.add(sorted(matches)[-1])
if virtual_pkgs:
out.write(
- "Skipping virtual pkgs:\n%s\n" % '\n'.join(
- str(x.versioned_atom) for x in virtual_pkgs))
+ "Skipping virtual pkgs:\n%s\n"
+ % "\n".join(str(x.versioned_atom) for x in virtual_pkgs)
+ )
out.write("Nothing to merge.")
return
@@ -926,15 +1104,18 @@ def main(options, out, err):
if options.pretend:
if options.verbosity > 0:
out.write(
- out.bold, ' * ', out.reset,
- "resolver plan required %i ops (%.2f seconds)" %
- (len(resolver_inst.state.plan), resolve_time))
+ out.bold,
+ " * ",
+ out.reset,
+ "resolver plan required %i ops (%.2f seconds)"
+ % (len(resolver_inst.state.plan), resolve_time),
+ )
return
- action = 'merge'
+ action = "merge"
if options.fetchonly:
- action = 'fetch'
- if (options.ask and not formatter.ask(f"Would you like to {action} these packages?")):
+ action = "fetch"
+ if options.ask and not formatter.ask(f"Would you like to {action} these packages?"):
return
change_count = len(changes)
@@ -948,8 +1129,10 @@ def main(options, out, err):
cleanup = []
- out.write(f"\nProcessing {count + 1} of {change_count}: "
- f"{op.pkg.cpvstr}::{op.pkg.repo}")
+ out.write(
+ f"\nProcessing {count + 1} of {change_count}: "
+ f"{op.pkg.cpvstr}::{op.pkg.repo}"
+ )
out.title(f"{count + 1}/{change_count}: {op.pkg.cpvstr}")
if op.desc != "remove":
cleanup.append(op.pkg.release_cached_data)
@@ -958,7 +1141,9 @@ def main(options, out, err):
out.write("Forcing a clean of workdir")
pkg_ops = domain.pkg_operations(op.pkg, observer=build_obs)
- out.write(f"\n{len(op.pkg.distfiles)} file{pluralism(op.pkg.distfiles)} required-")
+ out.write(
+ f"\n{len(op.pkg.distfiles)} file{pluralism(op.pkg.distfiles)} required-"
+ )
if not pkg_ops.run_if_supported("fetch", or_return=True):
out.error(f"fetching failed for {op.pkg.cpvstr}")
if not options.ignore_failures:
@@ -1002,7 +1187,9 @@ def main(options, out, err):
if op.old_pkg == pkg:
out.write(f">>> Reinstalling {pkg.cpvstr}")
else:
- out.write(f">>> Replacing {op.old_pkg.cpvstr} with {pkg.cpvstr}")
+ out.write(
+ f">>> Replacing {op.old_pkg.cpvstr} with {pkg.cpvstr}"
+ )
i = domain.replace_pkg(op.old_pkg, pkg, repo_obs)
cleanup.append(op.old_pkg.release_cached_data)
else:
@@ -1030,23 +1217,24 @@ def main(options, out, err):
if world_set is not None:
if op.desc == "remove":
- out.write(f'>>> Removing {op.pkg.cpvstr} from world file')
+ out.write(f">>> Removing {op.pkg.cpvstr} from world file")
removal_pkg = slotatom_if_slotted(
- source_repos.combined, op.pkg.versioned_atom)
+ source_repos.combined, op.pkg.versioned_atom
+ )
update_worldset(world_set, removal_pkg, remove=True)
elif not options.oneshot and any(x.match(op.pkg) for x in atoms):
if not (options.upgrade or options.downgrade):
- out.write(f'>>> Adding {op.pkg.cpvstr} to world file')
+ out.write(f">>> Adding {op.pkg.cpvstr} to world file")
add_pkg = slotatom_if_slotted(
- source_repos.combined, op.pkg.versioned_atom)
+ source_repos.combined, op.pkg.versioned_atom
+ )
update_worldset(world_set, add_pkg)
-
-# again... left in place for ease of debugging.
-# except KeyboardInterrupt:
-# import pdb;pdb.set_trace()
-# else:
-# import pdb;pdb.set_trace()
+ # again... left in place for ease of debugging.
+ # except KeyboardInterrupt:
+ # import pdb;pdb.set_trace()
+ # else:
+ # import pdb;pdb.set_trace()
finally:
pass
diff --git a/src/pkgcore/scripts/pplugincache.py b/src/pkgcore/scripts/pplugincache.py
index e649b1aa4..09f54a7b0 100644
--- a/src/pkgcore/scripts/pplugincache.py
+++ b/src/pkgcore/scripts/pplugincache.py
@@ -8,13 +8,21 @@ from .. import plugin, plugins
from ..util import commandline
argparser = commandline.ArgumentParser(
- config=False, domain=False, color=False,
- description=__doc__, script=(__file__, __name__))
+ config=False,
+ domain=False,
+ color=False,
+ description=__doc__,
+ script=(__file__, __name__),
+)
argparser.add_argument(
- "packages", nargs="*", action='store', default=[plugins],
+ "packages",
+ nargs="*",
+ action="store",
+ default=[plugins],
type=partial(commandline.python_namespace_type, module=True),
help="python namespace(s) to regenerate plugins for. If none are "
- "specified, pkgcore.plugins is updated")
+ "specified, pkgcore.plugins is updated",
+)
@argparser.bind_main_func
@@ -22,5 +30,5 @@ def main(options, out, err):
"""Update caches."""
for package in stable_unique(options.packages):
if options.verbosity >= 0:
- out.write(f'updating cache: {package.__name__}')
+ out.write(f"updating cache: {package.__name__}")
plugin.initialize_cache(package, force=True)
diff --git a/src/pkgcore/scripts/pquery.py b/src/pkgcore/scripts/pquery.py
index eaba242f3..7a8922bad 100644
--- a/src/pkgcore/scripts/pquery.py
+++ b/src/pkgcore/scripts/pquery.py
@@ -40,13 +40,13 @@ class DataSourceRestriction(values.base):
self.restriction = childrestriction
def __str__(self):
- return f'DataSourceRestriction: {self.restriction} negate={self.negate}'
+ return f"DataSourceRestriction: {self.restriction} negate={self.negate}"
def __repr__(self):
if self.negate:
- string = '<%s restriction=%r negate @%#8x>'
+ string = "<%s restriction=%r negate @%#8x>"
else:
- string = '<%s restriction=%r @%#8x>'
+ string = "<%s restriction=%r @%#8x>"
return string % (self.__class__.__name__, self.restriction, id(self))
def match(self, value):
@@ -55,55 +55,55 @@ class DataSourceRestriction(values.base):
__hash__ = object.__hash__
-dep_attrs = ['bdepend', 'depend', 'rdepend', 'pdepend', 'idepend']
+dep_attrs = ["bdepend", "depend", "rdepend", "pdepend", "idepend"]
metadata_attrs = dep_attrs
-dep_attrs += list(f'raw_{x}' for x in dep_attrs)
-dep_formatted_attrs = dep_attrs + ['restrict']
-dep_formatted_attrs = frozenset(dep_attrs + ['restrict'])
+dep_attrs += list(f"raw_{x}" for x in dep_attrs)
+dep_formatted_attrs = dep_attrs + ["restrict"]
+dep_formatted_attrs = frozenset(dep_attrs + ["restrict"])
dep_attrs = tuple(sorted(dep_attrs))
metadata_attrs += [
- 'defined_phases',
- 'description',
- 'eapi',
- 'fetchables',
- 'distfiles',
- 'homepage',
- 'inherited',
- 'iuse',
- 'keywords',
- 'license',
- 'properties',
- 'required_use',
- 'restrict',
- 'slot',
- 'subslot',
- 'use',
+ "defined_phases",
+ "description",
+ "eapi",
+ "fetchables",
+ "distfiles",
+ "homepage",
+ "inherited",
+ "iuse",
+ "keywords",
+ "license",
+ "properties",
+ "required_use",
+ "restrict",
+ "slot",
+ "subslot",
+ "use",
]
metadata_attrs = tuple(sorted(metadata_attrs))
printable_attrs = tuple(dep_formatted_attrs) + metadata_attrs
printable_attrs += (
- 'all',
- 'alldepends',
- 'allmetadata',
- 'category',
- 'cbuild',
- 'chost',
- 'ctarget',
- 'environment',
- 'files',
- 'fullver',
- 'longdescription',
- 'maintainers',
- 'package',
- 'path',
- 'raw_alldepends',
- 'repo',
- 'revision',
- 'source_repository',
- 'uris',
- 'version',
+ "all",
+ "alldepends",
+ "allmetadata",
+ "category",
+ "cbuild",
+ "chost",
+ "ctarget",
+ "environment",
+ "files",
+ "fullver",
+ "longdescription",
+ "maintainers",
+ "package",
+ "path",
+ "raw_alldepends",
+ "repo",
+ "revision",
+ "source_repository",
+ "uris",
+ "version",
)
printable_attrs = tuple(sorted(set(printable_attrs)))
@@ -111,47 +111,51 @@ printable_attrs = tuple(sorted(set(printable_attrs)))
def stringify_attr(config, pkg, attr):
"""Grab a package attr and convert it to a string."""
# config is currently unused but may affect display in the future.
- if attr in ('files', 'uris'):
- data = get_pkg_attr(pkg, 'fetchables')
+ if attr in ("files", "uris"):
+ data = get_pkg_attr(pkg, "fetchables")
if data is None:
- return 'MISSING'
- if attr == 'files':
+ return "MISSING"
+ if attr == "files":
+
def _format(node):
return node.filename
+
else:
+
def _format(node):
- return ' '.join(node.uri)
+ return " ".join(node.uri)
+
return conditionals.stringify_boolean(data, _format)
- if attr == 'use':
+ if attr == "use":
# Combine a list of all enabled (including irrelevant) and all
# available flags into a "enabled -disabled" style string.
- use = set(get_pkg_attr(pkg, 'use', ()))
- iuse = get_pkg_attr(pkg, 'iuse_stripped', ())
- result = sorted(iuse & use) + sorted('-' + val for val in (iuse - use))
- return ' '.join(result)
+ use = set(get_pkg_attr(pkg, "use", ()))
+ iuse = get_pkg_attr(pkg, "iuse_stripped", ())
+ result = sorted(iuse & use) + sorted("-" + val for val in (iuse - use))
+ return " ".join(result)
value = get_pkg_attr(pkg, attr)
if value is None:
- return 'MISSING'
+ return "MISSING"
- if attr in ('iuse', 'properties', 'defined_phases', 'inherited'):
- return ' '.join(sorted(str(v) for v in value))
- if attr in ('maintainers', 'homepage'):
- return ' '.join(str(v) for v in value)
- if attr == 'longdescription':
+ if attr in ("iuse", "properties", "defined_phases", "inherited"):
+ return " ".join(sorted(str(v) for v in value))
+ if attr in ("maintainers", "homepage"):
+ return " ".join(str(v) for v in value)
+ if attr == "longdescription":
return str(value)
- if attr == 'keywords':
- return ' '.join(sorted(value, key=lambda x: x.lstrip("~")))
- if attr == 'distfiles':
+ if attr == "keywords":
+ return " ".join(sorted(value, key=lambda x: x.lstrip("~")))
+ if attr == "distfiles":
# collapse depsets for raw repo pkgs -- no USE flags are enabled
if isinstance(value, conditionals.DepSet):
value = value.evaluate_depset([])
- return ' '.join(value)
- if attr == 'environment':
+ return " ".join(value)
+ if attr == "environment":
return value.text_fileobj().read()
- if attr == 'repo':
- return str(get_pkg_attr(value, 'repo_id', 'no repo id'))
+ if attr == "repo":
+ return str(get_pkg_attr(value, "repo_id", "no repo id"))
# hackish.
return str(value)
@@ -196,39 +200,40 @@ def format_depends(out, node, func=_default_formatter):
def _internal_format_depends(out, node, func):
prefix = None
if isinstance(node, boolean.OrRestriction):
- prefix = '|| ('
+ prefix = "|| ("
children = node.restrictions
- elif (isinstance(node, boolean.AndRestriction) and not
- isinstance(node, atom.atom)):
- prefix = '('
+ elif isinstance(node, boolean.AndRestriction) and not isinstance(node, atom.atom):
+ prefix = "("
children = node.restrictions
elif isinstance(node, packages.Conditional):
assert len(node.restriction.vals) == 1
- prefix = '%s%s? (' % (node.restriction.negate and '!' or '',
- list(node.restriction.vals)[0])
+ prefix = "%s%s? (" % (
+ node.restriction.negate and "!" or "",
+ list(node.restriction.vals)[0],
+ )
children = node.payload
if prefix:
children = list(children)
if len(children) == 1:
- out.write(prefix, ' ', autoline=False)
- out.first_prefix.append(' ')
+ out.write(prefix, " ", autoline=False)
+ out.first_prefix.append(" ")
newline = _internal_format_depends(out, children[0], func)
out.first_prefix.pop()
if newline:
out.write()
- out.write(')', autoline=False)
+ out.write(")", autoline=False)
return True
else:
- out.write(' )', autoline=False)
+ out.write(" )", autoline=False)
return False
else:
out.write(prefix)
- out.first_prefix.append(' ')
+ out.first_prefix.append(" ")
for child in children:
_internal_format_depends(out, child, func)
out.write()
out.first_prefix.pop()
- out.write(')', autoline=False)
+ out.write(")", autoline=False)
return True
else:
return func(out, node)
@@ -240,45 +245,50 @@ def format_attr(config, out, pkg, attr):
if attr in dep_formatted_attrs:
data = get_pkg_attr(pkg, attr)
if data is None:
- out.write('MISSING')
+ out.write("MISSING")
else:
- out.first_prefix.append(' ')
+ out.first_prefix.append(" ")
if config.highlight_dep:
+
def _format(out, node):
for highlight in config.highlight_dep:
if highlight.intersects(node):
- out.write(out.bold, out.fg('cyan'), node,
- autoline=False)
+ out.write(out.bold, out.fg("cyan"), node, autoline=False)
return
out.write(node, autoline=False)
+
format_depends(out, data, _format)
else:
format_depends(out, data)
out.first_prefix.pop()
out.write()
- elif attr in ('files', 'uris'):
- data = get_pkg_attr(pkg, 'fetchables')
+ elif attr in ("files", "uris"):
+ data = get_pkg_attr(pkg, "fetchables")
if data is None:
- out.write('MISSING')
+ out.write("MISSING")
return
- if attr == 'files':
+ if attr == "files":
+
def _format(out, node):
out.write(node.filename, autoline=False)
+
else:
+
def _format(out, node):
if not node.uri:
return False
if len(node.uri) == 1:
out.write(next(iter(node.uri)), autoline=False)
return False
- out.write('|| (')
- out.first_prefix.append(' ')
+ out.write("|| (")
+ out.first_prefix.append(" ")
for uri in node.uri:
out.write(uri)
out.first_prefix.pop()
- out.write(')', autoline=False)
+ out.write(")", autoline=False)
return True
- out.first_prefix.append(' ')
+
+ out.first_prefix.append(" ")
format_depends(out, data, _format)
out.first_prefix.pop()
out.write()
@@ -289,63 +299,74 @@ def format_attr(config, out, pkg, attr):
def print_package(options, out, err, pkg):
"""Print a package."""
if options.verbosity > 0:
- green = out.fg('green')
- out.write(out.bold, green, ' * ', out.fg(), pkg.cpvstr)
+ green = out.fg("green")
+ out.write(out.bold, green, " * ", out.fg(), pkg.cpvstr)
out.wrap = True
- out.later_prefix = [' ']
+ out.later_prefix = [" "]
for attr in options.attr:
- out.write(green, f' {attr}: ', out.fg(), autoline=False)
+ out.write(green, f" {attr}: ", out.fg(), autoline=False)
format_attr(options, out, pkg, attr)
for revdep in options.print_revdep:
for name in dep_attrs:
depset = get_pkg_attr(pkg, name)
- find_cond = getattr(depset, 'find_cond_nodes', None)
+ find_cond = getattr(depset, "find_cond_nodes", None)
if find_cond is None:
out.write(
- green, ' revdep: ', out.fg(), name, ' on ',
- str(revdep))
+ green, " revdep: ", out.fg(), name, " on ", str(revdep)
+ )
continue
for key, restricts in depset.find_cond_nodes(depset.restrictions, True):
if not restricts and key.intersects(revdep):
out.write(
- green, ' revdep: ', out.fg(), name, ' on ',
- autoline=False)
+ green,
+ " revdep: ",
+ out.fg(),
+ name,
+ " on ",
+ autoline=False,
+ )
if key == revdep:
# this is never reached...
out.write(out.bold, str(revdep))
else:
- out.write(
- str(revdep), ' through dep ', out.bold,
- str(key))
+ out.write(str(revdep), " through dep ", out.bold, str(key))
for key, restricts in depset.node_conds.items():
if key.intersects(revdep):
out.write(
- green, ' revdep: ', out.fg(), name, ' on ',
- autoline=False)
+ green,
+ " revdep: ",
+ out.fg(),
+ name,
+ " on ",
+ autoline=False,
+ )
if key == revdep:
- out.write(
- out.bold, str(revdep), out.reset,
- autoline=False)
+ out.write(out.bold, str(revdep), out.reset, autoline=False)
else:
out.write(
- str(revdep), ' through dep ', out.bold,
- str(key), out.reset, autoline=False)
- out.write(' if USE matches one of:')
+ str(revdep),
+ " through dep ",
+ out.bold,
+ str(key),
+ out.reset,
+ autoline=False,
+ )
+ out.write(" if USE matches one of:")
for r in restricts:
- out.write(' ', str(r))
+ out.write(" ", str(r))
out.write()
out.later_prefix = []
out.wrap = False
elif options.one_attr:
if options.atom:
- out.write('=', autoline=False)
+ out.write("=", autoline=False)
if options.atom or options.cpv:
out.write(pkg.cpvstr, autoline=False)
if options.display_slot:
- out.write(':', pkg.slot, autoline=False)
+ out.write(":", pkg.slot, autoline=False)
if options.display_repo:
- out.write('::', pkg.repo.repo_id, autoline=False)
- out.write('|', autoline=False)
+ out.write("::", pkg.repo.repo_id, autoline=False)
+ out.write("|", autoline=False)
out.write(stringify_attr(options, pkg, options.one_attr))
else:
printed_something = False
@@ -353,33 +374,35 @@ def print_package(options, out, err, pkg):
if (not options.contents) or options.cpv:
printed_something = True
if options.atom:
- out.write('=')
+ out.write("=")
out.write(pkg.cpvstr)
if options.display_slot:
- out.write(':', pkg.slot)
+ out.write(":", pkg.slot)
if options.display_repo:
- out.write('::', pkg.repo.repo_id)
+ out.write("::", pkg.repo.repo_id)
for attr in options.attr:
if printed_something:
- out.write(' ')
+ out.write(" ")
printed_something = True
attr_str = stringify_attr(options, pkg, attr)
out.write(f'{attr}="{attr_str}"')
for revdep in options.print_revdep:
for name in dep_attrs:
depset = get_pkg_attr(pkg, name)
- if getattr(depset, 'find_cond_nodes', None) is None:
+ if getattr(depset, "find_cond_nodes", None) is None:
# TODO maybe be smarter here? (this code is
# triggered by virtuals currently).
- out.write(f' {name} on {revdep}')
+ out.write(f" {name} on {revdep}")
continue
for key, restricts in depset.find_cond_nodes(depset.restrictions, True):
if not restricts and key.intersects(revdep):
- out.write(f' {name} on {revdep} through {key}')
+ out.write(f" {name} on {revdep} through {key}")
for key, restricts in depset.node_conds.items():
if key.intersects(revdep):
- restricts = ' or '.join(map(str, restricts))
- out.write(f' {name} on {revdep} through {key} if USE {restricts},')
+ restricts = " or ".join(map(str, restricts))
+ out.write(
+ f" {name} on {revdep} through {key} if USE {restricts},"
+ )
# If we printed anything at all print the newline now
out.autoline = True
if printed_something:
@@ -387,65 +410,71 @@ def print_package(options, out, err, pkg):
if options.contents:
color = {
- fs_module.fsDir: [out.bold, out.fg('blue')],
- fs_module.fsLink: [out.bold, out.fg('cyan')],
+ fs_module.fsDir: [out.bold, out.fg("blue")],
+ fs_module.fsLink: [out.bold, out.fg("cyan")],
}
- for obj in sorted(obj for obj in get_pkg_attr(pkg, 'contents', ())):
+ for obj in sorted(obj for obj in get_pkg_attr(pkg, "contents", ())):
if options.color:
out.write(*(color.get(obj.__class__, []) + [obj] + [out.reset]))
else:
- out.write(f'{obj!r}')
+ out.write(f"{obj!r}")
if options.size:
size = 0
files = 0
- for location in (obj.location for obj in get_pkg_attr(pkg, 'contents', ())):
+ for location in (obj.location for obj in get_pkg_attr(pkg, "contents", ())):
files += 1
size += os.lstat(location).st_size
- out.write(f'Total files: {files}')
- out.write(f'Total size: {sizeof_fmt(size)}')
+ out.write(f"Total files: {files}")
+ out.write(f"Total size: {sizeof_fmt(size)}")
def print_packages_noversion(options, out, err, pkgs):
"""Print a summary of all versions for a single package."""
if options.verbosity > 0:
- green = out.fg('green')
- out.write(out.bold, green, ' * ', out.fg(), pkgs[0].key)
+ green = out.fg("green")
+ out.write(out.bold, green, " * ", out.fg(), pkgs[0].key)
out.wrap = True
- out.later_prefix = [' ']
- versions = ' '.join(pkg.fullver for pkg in sorted(pkgs))
- out.write(green, ' versions: ', out.fg(), versions)
+ out.later_prefix = [" "]
+ versions = " ".join(pkg.fullver for pkg in sorted(pkgs))
+ out.write(green, " versions: ", out.fg(), versions)
# If we are already matching on all repos we do not need to duplicate.
if not options.all_repos:
versions = sorted(
- pkg.fullver for repo in options.domain.installed_repos
- for pkg in repo.itermatch(pkgs[0].unversioned_atom))
+ pkg.fullver
+ for repo in options.domain.installed_repos
+ for pkg in repo.itermatch(pkgs[0].unversioned_atom)
+ )
if versions:
- out.write(green, ' installed: ', out.fg(), ' '.join(versions))
+ out.write(green, " installed: ", out.fg(), " ".join(versions))
for attr in options.attr:
- out.write(green, f' {attr}: ', out.fg(),
- stringify_attr(options, pkgs[-1], attr))
+ out.write(
+ green,
+ f" {attr}: ",
+ out.fg(),
+ stringify_attr(options, pkgs[-1], attr),
+ )
out.write()
out.wrap = False
out.later_prefix = []
elif options.one_attr:
if options.atom:
- out.write('=', autoline=False)
+ out.write("=", autoline=False)
if options.atom or options.cpv:
out.write(pkgs[0].key, autoline=False)
if options.display_slot:
- out.write(':', pkgs[0].slot, autoline=False)
+ out.write(":", pkgs[0].slot, autoline=False)
if options.display_repo:
- out.write('::', pkgs[0].repo.repo_id, autoline=False)
- out.write('|', autoline=False)
+ out.write("::", pkgs[0].repo.repo_id, autoline=False)
+ out.write("|", autoline=False)
out.write(stringify_attr(options, pkgs[-1], options.one_attr))
else:
out.autoline = False
out.write(pkgs[0].key)
if options.display_slot:
- out.write(':', pkgs[0].slot, autoline=False)
+ out.write(":", pkgs[0].slot, autoline=False)
if options.display_repo:
- out.write('::', pkgs[0].repo.repo_id, autoline=False)
+ out.write("::", pkgs[0].repo.repo_id, autoline=False)
for attr in options.attr:
attr_str = stringify_attr(options, pkgs[-1], attr)
out.write(f' {attr}="{attr_str}"')
@@ -468,13 +497,17 @@ def print_packages_noversion(options, out, err, pkgs):
# default priority for DelayedValue; anything else is setup then.
argparser = commandline.ArgumentParser(
- domain=True, description=__doc__, script=(__file__, __name__))
+ domain=True, description=__doc__, script=(__file__, __name__)
+)
repo_group = argparser.add_argument_group(
- 'repository matching options',
- description='options controlling which repos to inspect')
+ "repository matching options",
+ description="options controlling which repos to inspect",
+)
repo_group.add_argument(
- '--raw', action='store_true', default=False,
+ "--raw",
+ action="store_true",
+ default=False,
help="disable configuration filtering",
docs="""
Disable configuration filtering that forces raw dependencies to be
@@ -482,24 +515,31 @@ repo_group.add_argument(
Primarily useful for people who need to look under the hood- ebuild
devs, PM tool authors, etc. Note this option ignores --domain if is
specified.
- """)
+ """,
+)
repo_group.add_argument(
- '--unfiltered', action='store_true', default=False,
+ "--unfiltered",
+ action="store_true",
+ default=False,
help="disable all license and visibility filtering",
docs="""
Disable all package filtering mechanisms such as ACCEPT_KEYWORDS,
ACCEPT_LICENSE, and package.mask.
- """)
+ """,
+)
repo_group.add_argument(
- '--virtuals', action='store', choices=('only', 'disable'),
- help='only match virtuals or disable virtuals matching entirely',
+ "--virtuals",
+ action="store",
+ choices=("only", "disable"),
+ help="only match virtuals or disable virtuals matching entirely",
docs="""
This option requires one of two arguments, either 'only' or 'disable',
which causes only virtuals to be matched or disables virtuals matching
entirely, respectively.
By default, virtuals are included during matching.
- """)
+ """,
+)
class RawAwareStoreRepoObject(commandline.StoreRepoObject):
@@ -507,19 +547,23 @@ class RawAwareStoreRepoObject(commandline.StoreRepoObject):
def _get_sections(self, config, namespace):
if namespace.raw:
- self.repo_key = 'repos_raw'
+ self.repo_key = "repos_raw"
elif namespace.unfiltered:
- self.repo_key = 'unfiltered_repos'
+ self.repo_key = "unfiltered_repos"
else:
- self.repo_key = 'repos'
+ self.repo_key = "repos"
return super()._get_sections(config, namespace)
+
repo_mux = repo_group.add_mutually_exclusive_group()
# TODO: update docs when binpkg/vdb repos are configured via repos.conf
repo_mux.add_argument(
- '-r', '--repo', action=RawAwareStoreRepoObject,
- priority=29, allow_external_repos=True,
- help='repo to search (default from domain if omitted)',
+ "-r",
+ "--repo",
+ action=RawAwareStoreRepoObject,
+ priority=29,
+ allow_external_repos=True,
+ help="repo to search (default from domain if omitted)",
docs="""
Select the repo to search in for matches. This includes all the
configured repos in repos.conf as well as the special keywords binpkg,
@@ -528,34 +572,52 @@ repo_mux.add_argument(
By default, all configured repos except the vdb will be searched when
this option isn't specified.
- """)
+ """,
+)
repo_mux.add_argument(
- '-E', '--ebuild-repos', action='store_true',
- help='search all ebuild repos',
- docs="Search within all ebuild repos, all non-ebuild repos are skipped.")
+ "-E",
+ "--ebuild-repos",
+ action="store_true",
+ help="search all ebuild repos",
+ docs="Search within all ebuild repos, all non-ebuild repos are skipped.",
+)
repo_mux.add_argument(
- '-B', '--binary-repos', action='store_true',
- help='search all binary repos',
- docs="Search within all binary repos, all non-binary repos are skipped.")
+ "-B",
+ "--binary-repos",
+ action="store_true",
+ help="search all binary repos",
+ docs="Search within all binary repos, all non-binary repos are skipped.",
+)
repo_mux.add_argument(
- '-I', '--installed', action='store_true',
- help='search installed packages',
- docs="Search within installed packages (alias for '--repo vdb').")
+ "-I",
+ "--installed",
+ action="store_true",
+ help="search installed packages",
+ docs="Search within installed packages (alias for '--repo vdb').",
+)
repo_mux.add_argument(
- '-A', '--all-repos', action='store_true',
- help='search all repos',
- docs="Search all available repos including the vdb.")
+ "-A",
+ "--all-repos",
+ action="store_true",
+ help="search all repos",
+ docs="Search all available repos including the vdb.",
+)
-@argparser.bind_delayed_default(30, 'repos')
+@argparser.bind_delayed_default(30, "repos")
def setup_repos(namespace, attr):
# Get repo(s) to operate on.
if namespace.repo:
# The store repo machinery handles --raw and --unfiltered for
# us, thus it being the first check.
repos = [namespace.repo]
- elif (namespace.contents or namespace.size or namespace._owns or
- namespace._owns_re or namespace.installed):
+ elif (
+ namespace.contents
+ or namespace.size
+ or namespace._owns
+ or namespace._owns_re
+ or namespace.installed
+ ):
repos = namespace.domain.installed_repos
elif namespace.unfiltered:
if namespace.all_repos:
@@ -579,47 +641,61 @@ def setup_repos(namespace, attr):
if namespace.raw or namespace.virtuals:
repos = get_raw_repos(repos)
if namespace.virtuals:
- repos = get_virtual_repos(
- repos, namespace.virtuals == 'only')
+ repos = get_virtual_repos(repos, namespace.virtuals == "only")
setattr(namespace, attr, repos)
+
query = argparser.add_argument_group(
- 'package matching options',
+ "package matching options",
docs="""
Each option specifies a restriction packages must match. Specifying
the same option twice means "or" unless stated otherwise. Specifying
multiple types of restrictions means "and" unless stated otherwise.
- """)
+ """,
+)
# for queries, use add_query always; this has the bookkeeping
# necessary to ensure the sub-query gets bound into the
# finalized query
_query_items = []
+
+
def add_query(*args, **kwds):
- if 'dest' not in kwds:
+ if "dest" not in kwds:
# auto-determine destination name from long option(s)
- dest = [x for x in args if x.startswith(argparser.prefix_chars * 2) and len(x) > 2]
+ dest = [
+ x for x in args if x.startswith(argparser.prefix_chars * 2) and len(x) > 2
+ ]
if not dest:
- raise ValueError(f"no valid options for query dest names: {', '.join(args)}")
+ raise ValueError(
+ f"no valid options for query dest names: {', '.join(args)}"
+ )
dest = dest[0].lstrip(argparser.prefix_chars)
- kwds['dest'] = dest.replace('-', '_')
- _query_items.append(kwds['dest'])
- kwds.setdefault('final_priority', 50)
- if kwds.get('action', None) == 'append':
- kwds.setdefault('default', [])
+ kwds["dest"] = dest.replace("-", "_")
+ _query_items.append(kwds["dest"])
+ kwds.setdefault("final_priority", 50)
+ if kwds.get("action", None) == "append":
+ kwds.setdefault("default", [])
commandline.make_query(query, *args, **kwds)
+
def bind_add_query(*args, **kwds):
def f(functor):
- kwds[kwds.pop('bind', 'type')] = functor
+ kwds[kwds.pop("bind", "type")] = functor
add_query(*args, **kwds)
return functor
+
return f
+
@bind_add_query(
- nargs='*', dest='matches', metavar='TARGET',
- bind='final_converter', type=None,
- help="extended atom matching of pkgs")
+ nargs="*",
+ dest="matches",
+ metavar="TARGET",
+ bind="final_converter",
+ type=None,
+ help="extended atom matching of pkgs",
+)
def matches_finalize(targets, namespace):
repos = multiplex.tree(*namespace.repos)
@@ -648,48 +724,61 @@ def matches_finalize(targets, namespace):
return packages.OrRestriction(*restrictions)
return []
+
add_query(
- '--all', action='append_const',
- const=packages.AlwaysTrue, type=None,
- help='match all packages',
+ "--all",
+ action="append_const",
+ const=packages.AlwaysTrue,
+ type=None,
+ help="match all packages",
docs="""
Match all packages which is equivalent to "pquery *". Note that if no
query options are specified, this option is enabled.
- """)
+ """,
+)
add_query(
- '--has-use', action='append',
- type=parserestrict.comma_separated_containment('iuse_stripped'),
- help='exact string match on a USE flag')
+ "--has-use",
+ action="append",
+ type=parserestrict.comma_separated_containment("iuse_stripped"),
+ help="exact string match on a USE flag",
+)
add_query(
- '--license', action='append',
- type=parserestrict.comma_separated_containment('license'),
- help='exact match on a license')
+ "--license",
+ action="append",
+ type=parserestrict.comma_separated_containment("license"),
+ help="exact match on a license",
+)
query.add_argument(
- '--revdep', nargs=1,
+ "--revdep",
+ nargs=1,
action=arghparse.Expansion,
- subst=(('--restrict-revdep', '%(0)s'), ('--print-revdep', '%(0)s')),
- help='shorthand for --restrict-revdep atom --print-revdep atom',
+ subst=(("--restrict-revdep", "%(0)s"), ("--print-revdep", "%(0)s")),
+ help="shorthand for --restrict-revdep atom --print-revdep atom",
docs="""
An alias for '--restrict-revdep atom --print-revdep atom', but note
that --print-revdep is slow so use --restrict-revdep if you just need a
list.
- """)
+ """,
+)
query.add_argument(
- '--revdep-pkgs', nargs=1,
+ "--revdep-pkgs",
+ nargs=1,
action=arghparse.Expansion,
- subst=(('--restrict-revdep-pkgs', '%(0)s'), ('--print-revdep', '%(0)s')),
- help='shorthand for --restrict-revdep-pkgs atom --print-revdep atom',
+ subst=(("--restrict-revdep-pkgs", "%(0)s"), ("--print-revdep", "%(0)s")),
+ help="shorthand for --restrict-revdep-pkgs atom --print-revdep atom",
docs="""
An alias for '--restrict-revdep-pkgs atom --print-revdep atom', but
note that --print-revdep is slow so use --restrict-revdep if you just
need a list.
- """)
+ """,
+)
+
@bind_add_query(
- '--restrict-revdep', action='append', default=[],
- help='dependency on an atom')
+ "--restrict-revdep", action="append", default=[], help="dependency on an atom"
+)
def parse_revdep(value):
"""Value should be an atom, packages with deps intersecting that match."""
try:
@@ -697,19 +786,25 @@ def parse_revdep(value):
except atom.MalformedAtom as e:
raise argparser.error(e)
val_restrict = values.FlatteningRestriction(
- atom.atom,
- values.AnyMatch(values.FunctionRestriction(targetatom.intersects)))
- return packages.OrRestriction(*list(
- packages.PackageRestriction(dep, val_restrict)
- for dep in dep_attrs))
+ atom.atom, values.AnyMatch(values.FunctionRestriction(targetatom.intersects))
+ )
+ return packages.OrRestriction(
+ *list(packages.PackageRestriction(dep, val_restrict) for dep in dep_attrs)
+ )
+
def _revdep_pkgs_match(pkgs, value):
return any(value.match(pkg) for pkg in pkgs)
+
@bind_add_query(
- '--restrict-revdep-pkgs', action='append', type=atom.atom,
- default=[], bind='final_converter',
- help='dependency on pkgs that match a specific atom')
+ "--restrict-revdep-pkgs",
+ action="append",
+ type=atom.atom,
+ default=[],
+ bind="final_converter",
+ help="dependency on pkgs that match a specific atom",
+)
def revdep_pkgs_finalize(sequence, namespace):
if not sequence:
return []
@@ -719,42 +814,48 @@ def revdep_pkgs_finalize(sequence, namespace):
l.extend(repo.itermatch(atom_inst))
# have our pkgs; now build the restrict.
any_restrict = values.AnyMatch(
- values.FunctionRestriction(partial(_revdep_pkgs_match, tuple(l))))
+ values.FunctionRestriction(partial(_revdep_pkgs_match, tuple(l)))
+ )
r = values.FlatteningRestriction(atom.atom, any_restrict)
- return list(packages.PackageRestriction(dep, r)
- for dep in dep_attrs)
+ return list(packages.PackageRestriction(dep, r) for dep in dep_attrs)
+
@bind_add_query(
- '-S', '--description', action='append',
- help='regexp search on description and longdescription')
+ "-S",
+ "--description",
+ action="append",
+ help="regexp search on description and longdescription",
+)
def parse_description(value):
"""Value is used as a regexp matching description or longdescription."""
matcher = values.StrRegex(value, case_sensitive=False)
- return packages.OrRestriction(*list(
- packages.PackageRestriction(attr, matcher)
- for attr in ('description', 'longdescription')))
+ return packages.OrRestriction(
+ *list(
+ packages.PackageRestriction(attr, matcher)
+ for attr in ("description", "longdescription")
+ )
+ )
-@bind_add_query(
- '--eapi', action='append',
- help='match packages using a given EAPI')
+
+@bind_add_query("--eapi", action="append", help="match packages using a given EAPI")
def parse_eapi(value):
"""Value is matched against package EAPI versions."""
- return packages.PackageRestriction(
- 'eapi',
- values.StrExactMatch(value))
+ return packages.PackageRestriction("eapi", values.StrExactMatch(value))
-@bind_add_query(
- '--owns', action='append',
- help='exact match on an owned file/dir')
+
+@bind_add_query("--owns", action="append", help="exact match on an owned file/dir")
def parse_owns(value):
return packages.PackageRestriction(
- 'contents',
- values.AnyMatch(values.GetAttrRestriction(
- 'location', values.StrExactMatch(value))))
+ "contents",
+ values.AnyMatch(
+ values.GetAttrRestriction("location", values.StrExactMatch(value))
+ ),
+ )
+
@bind_add_query(
- '--owns-re', action='append',
- help='like "owns" but using a regexp for matching')
+ "--owns-re", action="append", help='like "owns" but using a regexp for matching'
+)
def parse_ownsre(value):
"""Value is a regexp matched against the string form of an fs object.
@@ -762,85 +863,115 @@ def parse_ownsre(value):
to match.
"""
return packages.PackageRestriction(
- 'contents',
- values.AnyMatch(values.GetAttrRestriction(
- 'location', values.StrRegex(value))))
+ "contents",
+ values.AnyMatch(values.GetAttrRestriction("location", values.StrRegex(value))),
+ )
-@bind_add_query(
- '--maintainer', action='append',
- help='regex to search for maintainers')
+
+@bind_add_query("--maintainer", action="append", help="regex to search for maintainers")
def parse_maintainer(value):
"""
Case insensitive Regex match on the combined 'name <email>' bit of
metadata.xml's maintainer data.
"""
- if value and value != 'maintainer-needed':
+ if value and value != "maintainer-needed":
return packages.PackageRestriction(
- 'maintainers',
- values.AnyMatch(values.UnicodeConversion(
- values.StrRegex(value.lower(), case_sensitive=False))))
+ "maintainers",
+ values.AnyMatch(
+ values.UnicodeConversion(
+ values.StrRegex(value.lower(), case_sensitive=False)
+ )
+ ),
+ )
else:
# empty string matches packages without a maintainer
- return packages.PackageRestriction(
- 'maintainers',
- values.EqualityMatch(()))
+ return packages.PackageRestriction("maintainers", values.EqualityMatch(()))
+
@bind_add_query(
- '--maintainer-name', action='append',
- help='comma-separated list of maintainer name regexes to search for')
+ "--maintainer-name",
+ action="append",
+ help="comma-separated list of maintainer name regexes to search for",
+)
def parse_maintainer_name(value):
"""
Case insensitive Regex match on the name bit of metadata.xml's
maintainer data.
"""
return packages.PackageRestriction(
- 'maintainers',
- values.AnyMatch(values.GetAttrRestriction(
- 'name', values.StrRegex(value.lower(), case_sensitive=False))))
+ "maintainers",
+ values.AnyMatch(
+ values.GetAttrRestriction(
+ "name", values.StrRegex(value.lower(), case_sensitive=False)
+ )
+ ),
+ )
+
@bind_add_query(
- '--maintainer-email', action='append',
- help='comma-separated list of maintainer email regexes to search for')
+ "--maintainer-email",
+ action="append",
+ help="comma-separated list of maintainer email regexes to search for",
+)
def parse_maintainer_email(value):
"""
Case insensitive Regex match on the email bit of metadata.xml's
maintainer data.
"""
return packages.PackageRestriction(
- 'maintainers',
- values.AnyMatch(values.GetAttrRestriction(
- 'email', values.StrRegex(value.lower(), case_sensitive=False))))
+ "maintainers",
+ values.AnyMatch(
+ values.GetAttrRestriction(
+ "email", values.StrRegex(value.lower(), case_sensitive=False)
+ )
+ ),
+ )
+
@bind_add_query(
- '--environment', action='append',
- help='regexp search in environment.bz2')
+ "--environment", action="append", help="regexp search in environment.bz2"
+)
def parse_envmatch(value):
"""Apply a regexp to the environment."""
return packages.PackageRestriction(
- 'environment', DataSourceRestriction(values.AnyMatch(
- values.StrRegex(value))))
+ "environment", DataSourceRestriction(values.AnyMatch(values.StrRegex(value)))
+ )
+
# note the type=str; this is to suppress the default
# fallback of using match parsing.
add_query(
- '--pkgset', action=commandline.StoreConfigObject,
- nargs=1, type=str, priority=35, config_type='pkgset',
- help='find packages that match the given package set (world for example)')
+ "--pkgset",
+ action=commandline.StoreConfigObject,
+ nargs=1,
+ type=str,
+ priority=35,
+ config_type="pkgset",
+ help="find packages that match the given package set (world for example)",
+)
# add a fallback if no restrictions are specified.
-_query_items.append('_fallback_all')
+_query_items.append("_fallback_all")
+
+
def _add_all_if_needed(namespace, attr):
val = [packages.AlwaysTrue]
for query_attr in _query_items:
- if getattr(namespace, f'_{query_attr}', None):
+ if getattr(namespace, f"_{query_attr}", None):
val = None
break
setattr(namespace, attr, val)
+
@bind_add_query(
- '-u', '--upgrade', action='store_true',
- metavar=None, type=None, bind='final_converter',
- help='match installed packages without best slotted version')
+ "-u",
+ "--upgrade",
+ action="store_true",
+ metavar=None,
+ type=None,
+ bind="final_converter",
+ help="match installed packages without best slotted version",
+)
def pkg_upgrade(_value, namespace):
pkgs = []
for pkg in namespace.domain.all_installed_repos:
@@ -849,105 +980,138 @@ def pkg_upgrade(_value, namespace):
pkgs.append(matches[-1].versioned_atom)
return packages.OrRestriction(*pkgs)
+
argparser.set_defaults(
- _fallback_all=arghparse.DelayedValue(_add_all_if_needed, priority=89))
+ _fallback_all=arghparse.DelayedValue(_add_all_if_needed, priority=89)
+)
argparser.set_defaults(
- query=commandline.BooleanQuery(_query_items, klass_type='and', priority=90))
+ query=commandline.BooleanQuery(_query_items, klass_type="and", priority=90)
+)
-output = argparser.add_argument_group('output options')
+output = argparser.add_argument_group("output options")
output.add_argument(
- '-1', '--first', action='store_true',
- help='stop when first match is found')
+ "-1", "--first", action="store_true", help="stop when first match is found"
+)
output.add_argument(
- '-a', '--atom', action=arghparse.Expansion,
- subst=(('--cpv',),),
- help='print =cat/pkg-3 instead of cat/pkg-3.',
+ "-a",
+ "--atom",
+ action=arghparse.Expansion,
+ subst=(("--cpv",),),
+ help="print =cat/pkg-3 instead of cat/pkg-3.",
docs="""
Output valid package atoms, e.g. =cat/pkg-3 instead of cat/pkg-3.
Note that this option implies --cpv and has no effect if used with
--no-version.
- """)
+ """,
+)
output.add_argument(
- '--cpv', action='store_true',
- help='print the category/package-version',
+ "--cpv",
+ action="store_true",
+ help="print the category/package-version",
docs="""
Display output in the format of 'category/package-version' which is
done by default, this option forces the output format if another output
option (such as --contents) alters it.
- """)
+ """,
+)
output.add_argument(
- '-R', action='store_true', dest='display_repo',
- help='print the repo of the package')
+ "-R", action="store_true", dest="display_repo", help="print the repo of the package"
+)
output.add_argument(
- '--slot', action='store_true', dest='display_slot',
- help='print the slot of the package')
+ "--slot",
+ action="store_true",
+ dest="display_slot",
+ help="print the slot of the package",
+)
output_mux = output.add_mutually_exclusive_group()
output_mux.add_argument(
- '-n', '--no-version', action='store_true',
- dest='noversion',
- help='collapse multiple matching versions together')
+ "-n",
+ "--no-version",
+ action="store_true",
+ dest="noversion",
+ help="collapse multiple matching versions together",
+)
output_mux.add_argument(
- '--min', action='store_true',
- help='show only the lowest version for each package')
+ "--min", action="store_true", help="show only the lowest version for each package"
+)
output_mux.add_argument(
- '--max', action='store_true',
- help='show only the highest version for each package')
+ "--max", action="store_true", help="show only the highest version for each package"
+)
del output_mux
output.add_argument(
- '--blame', action=arghparse.Expansion,
+ "--blame",
+ action=arghparse.Expansion,
subst=(("--attr", "maintainers"),),
- help='shorthand for --attr maintainers')
+ help="shorthand for --attr maintainers",
+)
output.add_argument(
- '--size', action='store_true',
- help='display size of all files owned by the package')
+ "--size", action="store_true", help="display size of all files owned by the package"
+)
output.add_argument(
- '--contents', action='store_true',
- help='list files owned by the package')
+ "--contents", action="store_true", help="list files owned by the package"
+)
output.add_argument(
- '--highlight-dep', action='append',
- type=atom.atom, default=[],
- help='highlight dependencies matching this atom')
+ "--highlight-dep",
+ action="append",
+ type=atom.atom,
+ default=[],
+ help="highlight dependencies matching this atom",
+)
output.add_argument(
- '--print-revdep', action='append',
- type=atom.atom, default=[],
- help='print what condition(s) trigger a dep')
+ "--print-revdep",
+ action="append",
+ type=atom.atom,
+ default=[],
+ help="print what condition(s) trigger a dep",
+)
output.add_argument(
- '--attr', action='append', choices=printable_attrs,
- metavar='attribute', default=[],
+ "--attr",
+ action="append",
+ choices=printable_attrs,
+ metavar="attribute",
+ default=[],
help="print this attribute's value (can be specified more than once)",
docs=f"""
Print the given attribute's value. This option can be specified
multiple times.
Valid attributes: {', '.join(printable_attrs)}
- """)
+ """,
+)
output.add_argument(
- '--force-attr', action='append', dest='attr',
- metavar='attribute', default=[],
- help='like --attr but accepts any string as '
- 'attribute name instead of only explicitly '
- 'supported names')
+ "--force-attr",
+ action="append",
+ dest="attr",
+ metavar="attribute",
+ default=[],
+ help="like --attr but accepts any string as "
+ "attribute name instead of only explicitly "
+ "supported names",
+)
one_attr_mux = output.add_mutually_exclusive_group()
one_attr_mux.add_argument(
- '--one-attr', choices=printable_attrs,
- metavar='attribute',
- help="print one attribute, suppresses other output")
+ "--one-attr",
+ choices=printable_attrs,
+ metavar="attribute",
+ help="print one attribute, suppresses other output",
+)
one_attr_mux.add_argument(
- '--force-one-attr',
- metavar='attribute',
- help='like --one-attr but accepts any string as '
- 'attribute name instead of only explicitly '
- 'supported names')
+ "--force-one-attr",
+ metavar="attribute",
+ help="like --one-attr but accepts any string as "
+ "attribute name instead of only explicitly "
+ "supported names",
+)
del one_attr_mux
def get_pkg_attr(pkg, attr, fallback=None):
- if attr[0:4] == 'raw_':
- pkg = getattr(pkg, '_raw_pkg', pkg)
+ if attr[0:4] == "raw_":
+ pkg = getattr(pkg, "_raw_pkg", pkg)
attr = attr[4:]
return getattr(pkg, attr, fallback)
@@ -956,31 +1120,37 @@ def get_pkg_attr(pkg, attr, fallback=None):
def _validate_args(parser, namespace):
if namespace.noversion:
if namespace.contents:
- parser.error('both --no-version and --contents does not make sense')
+ parser.error("both --no-version and --contents does not make sense")
if namespace.min or namespace.max:
- parser.error('--no-version with --min or --max does not make sense')
+ parser.error("--no-version with --min or --max does not make sense")
if namespace.print_revdep:
- parser.error('--print-revdep with --no-version does not make sense')
+ parser.error("--print-revdep with --no-version does not make sense")
if namespace.one_attr and namespace.print_revdep:
- parser.error('--print-revdep with --force-one-attr or --one-attr does not make sense')
+ parser.error(
+ "--print-revdep with --force-one-attr or --one-attr does not make sense"
+ )
def process_attrs(sequence):
for attr in sequence:
- if attr == 'all':
- i = [x for x in printable_attrs if x != 'all']
- elif attr == 'allmetadata':
+ if attr == "all":
+ i = [x for x in printable_attrs if x != "all"]
+ elif attr == "allmetadata":
i = process_attrs(metadata_attrs)
- elif attr == 'alldepends':
+ elif attr == "alldepends":
i = dep_attrs
- elif attr == 'raw_alldepends':
- i = [f'raw_{x}' for x in dep_attrs]
+ elif attr == "raw_alldepends":
+ i = [f"raw_{x}" for x in dep_attrs]
else:
i = [attr]
for attr in i:
yield attr
- attrs = ['repo', 'description', 'homepage', 'license'] if namespace.verbosity > 0 else []
+ attrs = (
+ ["repo", "description", "homepage", "license"]
+ if namespace.verbosity > 0
+ else []
+ )
attrs.extend(process_attrs(namespace.attr))
# finally, uniquify the attrs.
@@ -992,15 +1162,17 @@ def main(options, out, err):
"""Run a query."""
if options.debug:
for repo in options.repos:
- out.write(f'repo: {repo.repo_id}')
- out.write(f'restrict: {options.query}')
+ out.write(f"repo: {repo.repo_id}")
+ out.write(f"restrict: {options.query}")
out.write()
if options.query is None:
return 0
for repo in options.repos:
try:
- for pkgs in pkgutils.groupby_pkg(repo.itermatch(options.query, sorter=sorted)):
+ for pkgs in pkgutils.groupby_pkg(
+ repo.itermatch(options.query, sorter=sorted)
+ ):
pkgs = list(pkgs)
if options.noversion:
print_packages_noversion(options, out, err, pkgs)
diff --git a/src/pkgcore/sync/base.py b/src/pkgcore/sync/base.py
index d60edeef8..2a63545e3 100644
--- a/src/pkgcore/sync/base.py
+++ b/src/pkgcore/sync/base.py
@@ -1,7 +1,14 @@
__all__ = (
- "SyncError", "UriError", "MissingLocalUser", "MissingBinary",
- "Syncer", "ExternalSyncer", "VcsSyncer",
- "GenericSyncer", "DisabledSyncer", "AutodetectSyncer",
+ "SyncError",
+ "UriError",
+ "MissingLocalUser",
+ "MissingBinary",
+ "Syncer",
+ "ExternalSyncer",
+ "VcsSyncer",
+ "GenericSyncer",
+ "DisabledSyncer",
+ "AutodetectSyncer",
)
import os
@@ -21,7 +28,6 @@ class SyncError(PkgcoreUserException):
class UriError(SyncError):
-
def __init__(self, uri, msg):
self.uri = uri
self.msg = msg
@@ -29,7 +35,6 @@ class UriError(SyncError):
class PathError(SyncError):
-
def __init__(self, path, msg):
self.path = path.rstrip(os.path.sep)
self.msg = msg
@@ -37,7 +42,6 @@ class PathError(SyncError):
class MissingLocalUser(SyncError):
-
def __init__(self, uri, msg):
self.uri = uri
self.msg = msg
@@ -45,7 +49,6 @@ class MissingLocalUser(SyncError):
class MissingBinary(SyncError):
-
def __init__(self, binary, msg):
self.binary = binary
self.msg = msg
@@ -64,10 +67,11 @@ class Syncer:
disabled = False
pkgcore_config_type = ConfigHint(
- {'path': 'str', 'uri': 'str', 'opts': 'str', 'usersync': 'bool'},
- typename='syncer')
+ {"path": "str", "uri": "str", "opts": "str", "usersync": "bool"},
+ typename="syncer",
+ )
- def __init__(self, path, uri, default_verbosity=0, usersync=False, opts=''):
+ def __init__(self, path, uri, default_verbosity=0, usersync=False, opts=""):
self.verbosity = default_verbosity
self.usersync = usersync
self.basedir = path.rstrip(os.path.sep) + os.path.sep
@@ -109,7 +113,7 @@ class Syncer:
try:
if uri[1].startswith("@"):
uri[1] = uri[1][1:]
- if '/' in uri[0] or ':' in uri[0]:
+ if "/" in uri[0] or ":" in uri[0]:
proto = uri[0].split("/", 1)
proto[1] = proto[1].lstrip("/")
uri[0] = proto[1]
@@ -153,15 +157,13 @@ class ExternalSyncer(Syncer):
binary = None
# external env settings passed through to syncing commands
- env_whitelist = (
- 'SSH_AUTH_SOCK',
- )
+ env_whitelist = ("SSH_AUTH_SOCK",)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.env = {v: os.environ[v] for v in self.env_whitelist if v in os.environ}
- if not hasattr(self, 'binary_path'):
+ if not hasattr(self, "binary_path"):
self.binary_path = self.require_binary(self.binary)
@staticmethod
@@ -175,22 +177,24 @@ class ExternalSyncer(Syncer):
@classmethod
def _plugin_disabled_check(cls):
- disabled = getattr(cls, '_disabled', None)
+ disabled = getattr(cls, "_disabled", None)
if disabled is None:
- path = getattr(cls, 'binary_path', None)
+ path = getattr(cls, "binary_path", None)
if path is None:
if cls.binary is None:
disabled = cls._disabled = True
else:
disabled = cls._disabled = (
- cls.require_binary(cls.binary, fatal=False) is None)
+ cls.require_binary(cls.binary, fatal=False) is None
+ )
else:
disabled = cls._disabled = os.path.exists(path)
return disabled
def _spawn(self, command, pipes, **kwargs):
return process.spawn.spawn(
- command, fd_pipes=pipes, uid=self.uid, gid=self.gid, env=self.env, **kwargs)
+ command, fd_pipes=pipes, uid=self.uid, gid=self.gid, env=self.env, **kwargs
+ )
@staticmethod
def _rewrite_uri_from_stat(path, uri):
@@ -205,7 +209,6 @@ class ExternalSyncer(Syncer):
class VcsSyncer(ExternalSyncer):
-
def _sync(self, verbosity, output_fd):
try:
st = os.stat(self.basedir)
@@ -222,12 +225,11 @@ class VcsSyncer(ExternalSyncer):
# we assume syncers support -v and -q for verbose and quiet output
if verbosity < 0:
- command.append('-q')
+ command.append("-q")
elif verbosity > 0:
- command.append('-' + 'v' * verbosity)
+ command.append("-" + "v" * verbosity)
- ret = self._spawn(command, pipes={1: output_fd, 2: output_fd, 0: 0},
- cwd=chdir)
+ ret = self._spawn(command, pipes={1: output_fd, 2: output_fd, 0: 0}, cwd=chdir)
return ret == 0
def _initial_pull(self):
@@ -238,27 +240,30 @@ class VcsSyncer(ExternalSyncer):
def _load_syncers():
- syncers = ('bzr', 'cvs', 'darcs', 'git', 'git_svn', 'hg', 'sqfs', 'svn', 'tar')
+ syncers = ("bzr", "cvs", "darcs", "git", "git_svn", "hg", "sqfs", "svn", "tar")
for syncer in syncers:
try:
- syncer_cls: type[Syncer] = getattr(import_module(f'pkgcore.sync.{syncer}'), f'{syncer}_syncer')
+ syncer_cls: type[Syncer] = getattr(
+ import_module(f"pkgcore.sync.{syncer}"), f"{syncer}_syncer"
+ )
except (ImportError, AttributeError):
continue
if syncer_cls.disabled:
continue
- if (f := getattr(syncer_cls, '_plugin_disabled_check', None)) is not None and f():
+ if (
+ f := getattr(syncer_cls, "_plugin_disabled_check", None)
+ ) is not None and f():
continue
yield syncer_cls
@configurable(
- {'basedir': 'str', 'uri': 'str', 'usersync': 'bool', 'opts': 'str'},
- typename='syncer')
+ {"basedir": "str", "uri": "str", "usersync": "bool", "opts": "str"},
+ typename="syncer",
+)
def GenericSyncer(basedir, uri, **kwargs):
"""Syncer using the plugin system to find a syncer based on uri."""
- plugins = [
- (plug.supports_uri(uri), plug)
- for plug in _load_syncers()]
+ plugins = [(plug.supports_uri(uri), plug) for plug in _load_syncers()]
plugins.sort(key=lambda x: x[0])
if not plugins or plugins[-1][0] <= 0:
raise UriError(uri, "no known syncer support")
@@ -271,15 +276,15 @@ class DisabledSyncer(Syncer):
disabled = True
def __init__(self, path, *args, **kwargs):
- super().__init__(path, uri='')
+ super().__init__(path, uri="")
-@configurable({'basedir': 'str', 'usersync': 'bool'}, typename='syncer')
+@configurable({"basedir": "str", "usersync": "bool"}, typename="syncer")
def DisabledSync(basedir, *args, **kwargs):
return DisabledSyncer(basedir)
-@configurable({'basedir': 'str', 'usersync': 'bool'}, typename='syncer')
+@configurable({"basedir": "str", "usersync": "bool"}, typename="syncer")
def AutodetectSyncer(basedir, **kwargs):
for syncer_cls in _load_syncers():
if args := syncer_cls.is_usable_on_filepath(basedir):
diff --git a/src/pkgcore/sync/bzr.py b/src/pkgcore/sync/bzr.py
index bc6c703f5..02fa3ecd1 100644
--- a/src/pkgcore/sync/bzr.py
+++ b/src/pkgcore/sync/bzr.py
@@ -11,13 +11,11 @@ class bzr_syncer(base.VcsSyncer):
binary = "bzr"
- supported_uris = (
- ('bzr+', 5),
- )
+ supported_uris = (("bzr+", 5),)
@classmethod
def is_usable_on_filepath(cls, path):
- bzr_path = os.path.join(path, '.bzr')
+ bzr_path = os.path.join(path, ".bzr")
if cls.disabled or not os.path.isdir(bzr_path):
return None
code, data = spawn_get_output([cls.binary, "info", path])
@@ -28,7 +26,7 @@ class bzr_syncer(base.VcsSyncer):
line = line.strip().split(":", 1)
if len(line) != 2:
continue
- if line[0] == 'parent branch':
+ if line[0] == "parent branch":
uri = f"bzr+{line[1].strip()}"
return (cls._rewrite_uri_from_stat(bzr_path, uri),)
return None
diff --git a/src/pkgcore/sync/cvs.py b/src/pkgcore/sync/cvs.py
index 1a6776fd9..c6f3513aa 100644
--- a/src/pkgcore/sync/cvs.py
+++ b/src/pkgcore/sync/cvs.py
@@ -10,8 +10,8 @@ class cvs_syncer(base.VcsSyncer):
binary = "cvs"
supported_uris = (
- ('cvs+', 5),
- ('cvs://', 5),
+ ("cvs+", 5),
+ ("cvs://", 5),
)
@classmethod
@@ -19,27 +19,26 @@ class cvs_syncer(base.VcsSyncer):
cvs_path = os.path.join(path, "CVS")
if cls.disabled or not os.path.isdir(cvs_path):
return None
- return (cls._rewrite_uri_from_stat(cvs_path, 'cvs://'),)
+ return (cls._rewrite_uri_from_stat(cvs_path, "cvs://"),)
@classmethod
def _parse_uri(cls, raw_uri):
- if not raw_uri.startswith("cvs") and \
- not raw_uri.startswith("cvs+"):
+ if not raw_uri.startswith("cvs") and not raw_uri.startswith("cvs+"):
raise base.UriError(raw_uri, "must be cvs:// or cvs+${RSH}")
if raw_uri.startswith("cvs://"):
- return None, raw_uri[len("cvs://"):]
- proto = raw_uri[len("cvs+"):].split(":", 1)
+ return None, raw_uri[len("cvs://") :]
+ proto = raw_uri[len("cvs+") :].split(":", 1)
if not proto[0]:
raise base.UriError(
- raw_uri, "cvs+ requires the rsh alternative to be specified")
+ raw_uri, "cvs+ requires the rsh alternative to be specified"
+ )
if proto[0] == "anon":
proto[0] = None
elif proto[0] != "pserver":
try:
proto[0] = cls.require_binary(proto[0])
except base.MissingBinary:
- raise base.UriError(
- raw_uri, f"missing rsh binary: {proto[0]!r}")
+ raise base.UriError(raw_uri, f"missing rsh binary: {proto[0]!r}")
return proto[0], proto[1].lstrip("/")
def __init__(self, basedir, raw_uri, **kwargs):
@@ -55,9 +54,9 @@ class cvs_syncer(base.VcsSyncer):
host, self.module = uri.rsplit(":", 1)
super().__init__(basedir, host, **kwargs)
- self.env['CVSROOT'] = self.uri
+ self.env["CVSROOT"] = self.uri
if self.rsh is not None:
- self.env['CVS_RSH'] = self.rsh
+ self.env["CVS_RSH"] = self.rsh
def _update_existing(self):
return [self.binary_path, "up"]
diff --git a/src/pkgcore/sync/darcs.py b/src/pkgcore/sync/darcs.py
index c81db211b..8ef108fe6 100644
--- a/src/pkgcore/sync/darcs.py
+++ b/src/pkgcore/sync/darcs.py
@@ -7,9 +7,7 @@ class darcs_syncer(base.VcsSyncer):
binary = "darcs"
- supported_uris = (
- ('darcs+', 5),
- )
+ supported_uris = (("darcs+", 5),)
@staticmethod
def parse_uri(raw_uri):
diff --git a/src/pkgcore/sync/git.py b/src/pkgcore/sync/git.py
index 7e57cdcc2..c2bfa34bd 100644
--- a/src/pkgcore/sync/git.py
+++ b/src/pkgcore/sync/git.py
@@ -10,34 +10,36 @@ class git_syncer(base.VcsSyncer):
binary = "git"
supported_uris = (
- ('git://', 5),
- ('git+', 5),
+ ("git://", 5),
+ ("git+", 5),
)
- supported_protocols = ('http://', 'https://', 'git://')
- supported_exts = ('.git',)
+ supported_protocols = ("http://", "https://", "git://")
+ supported_exts = (".git",)
@classmethod
def is_usable_on_filepath(cls, path):
- git_path = os.path.join(path, '.git')
+ git_path = os.path.join(path, ".git")
if cls.disabled or not os.path.isdir(git_path):
return None
# defer to git-svn plugin
- if os.path.isdir(os.path.join(git_path, 'svn')):
+ if os.path.isdir(os.path.join(git_path, "svn")):
return None
return (cls._rewrite_uri_from_stat(git_path, "git://"),)
@classmethod
def parse_uri(cls, raw_uri):
if not raw_uri.startswith("git+") and not raw_uri.startswith("git://"):
- if raw_uri.startswith(cls.supported_protocols) and raw_uri.endswith(cls.supported_exts):
+ if raw_uri.startswith(cls.supported_protocols) and raw_uri.endswith(
+ cls.supported_exts
+ ):
return raw_uri
- raise base.UriError(
- raw_uri, "doesn't start with git+ or git://")
+ raise base.UriError(raw_uri, "doesn't start with git+ or git://")
if raw_uri.startswith("git+"):
if raw_uri.startswith("git+:"):
raise base.UriError(
- raw_uri, "need to specify the sub protocol if using git+")
+ raw_uri, "need to specify the sub protocol if using git+"
+ )
return raw_uri[4:]
return raw_uri
diff --git a/src/pkgcore/sync/git_svn.py b/src/pkgcore/sync/git_svn.py
index e06e2df4a..3a34c03e4 100644
--- a/src/pkgcore/sync/git_svn.py
+++ b/src/pkgcore/sync/git_svn.py
@@ -12,13 +12,13 @@ class git_svn_syncer(base.VcsSyncer):
binary = "git"
supported_uris = (
- ('git+svn://', 10),
- ('git+svn+', 10),
+ ("git+svn://", 10),
+ ("git+svn+", 10),
)
@classmethod
def is_usable_on_filepath(cls, path):
- git_svn_path = os.path.join(path, '.git', 'svn')
+ git_svn_path = os.path.join(path, ".git", "svn")
if cls.disabled or not os.path.isdir(git_svn_path):
return None
return (cls._rewrite_uri_from_stat(git_svn_path, "git+svn://"),)
@@ -26,12 +26,12 @@ class git_svn_syncer(base.VcsSyncer):
@staticmethod
def parse_uri(raw_uri):
if not raw_uri.startswith("git+svn+") and not raw_uri.startswith("git+svn://"):
- raise base.UriError(
- raw_uri, "doesn't start with git+svn+ nor git+svn://")
+ raise base.UriError(raw_uri, "doesn't start with git+svn+ nor git+svn://")
if raw_uri.startswith("git+svn+"):
if raw_uri.startswith("git+svn+:"):
raise base.UriError(
- raw_uri, "need to specify the sub protocol if using git+svn+")
+ raw_uri, "need to specify the sub protocol if using git+svn+"
+ )
return raw_uri[8:]
return raw_uri[4:]
diff --git a/src/pkgcore/sync/hg.py b/src/pkgcore/sync/hg.py
index 089291366..1c698ded7 100644
--- a/src/pkgcore/sync/hg.py
+++ b/src/pkgcore/sync/hg.py
@@ -10,25 +10,24 @@ class hg_syncer(base.VcsSyncer):
binary = "hg"
supported_uris = (
- ('hg+', 5),
- ('mercurial+', 5),
+ ("hg+", 5),
+ ("mercurial+", 5),
)
@classmethod
def is_usable_on_filepath(cls, path):
- hg_path = os.path.join(path, '.hg')
+ hg_path = os.path.join(path, ".hg")
if cls.disabled or not os.path.isdir(hg_path):
return None
- return (cls._rewrite_uri_from_stat(hg_path, 'hg+//'),)
+ return (cls._rewrite_uri_from_stat(hg_path, "hg+//"),)
@staticmethod
def parse_uri(raw_uri):
if raw_uri.startswith("hg+"):
return raw_uri[3:]
elif raw_uri.startswith("mercurial+"):
- return raw_uri[len("mercurial+"):]
- raise base.UriError(
- raw_uri, "doesn't start with hg+ nor mercurial+")
+ return raw_uri[len("mercurial+") :]
+ raise base.UriError(raw_uri, "doesn't start with hg+ nor mercurial+")
def _initial_pull(self):
return [self.binary_path, "clone", self.uri, self.basedir]
diff --git a/src/pkgcore/sync/http.py b/src/pkgcore/sync/http.py
index fa9d5406d..b422d94ad 100644
--- a/src/pkgcore/sync/http.py
+++ b/src/pkgcore/sync/http.py
@@ -25,43 +25,43 @@ class http_syncer(base.Syncer):
def _sync(self, verbosity, output_fd, force=False, **kwargs):
dest = self._pre_download()
- if self.uri.lower().startswith('https://'):
+ if self.uri.lower().startswith("https://"):
# default to using system ssl certs
context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
else:
context = None
headers = {}
- etag_path = pjoin(self.basedir, '.etag')
- modified_path = pjoin(self.basedir, '.modified')
+ etag_path = pjoin(self.basedir, ".etag")
+ modified_path = pjoin(self.basedir, ".modified")
if not force:
# use cached ETag to check if updates exist
previous_etag = readfile_ascii(etag_path, none_on_missing=True)
if previous_etag:
- headers['If-None-Match'] = previous_etag
+ headers["If-None-Match"] = previous_etag
# use cached modification timestamp to check if updates exist
previous_modified = readfile_ascii(modified_path, none_on_missing=True)
if previous_modified:
- headers['If-Modified-Since'] = previous_modified
+ headers["If-Modified-Since"] = previous_modified
- req = urllib.request.Request(self.uri, headers=headers, method='GET')
+ req = urllib.request.Request(self.uri, headers=headers, method="GET")
# TODO: add customizable timeout
try:
resp = urllib.request.urlopen(req, context=context)
except urllib.error.URLError as e:
- if e.getcode() == 304: # Not Modified
+ if e.getcode() == 304: # Not Modified
logger.debug("content is unchanged")
return True
- raise base.SyncError(f'failed fetching {self.uri!r}: {e.reason}') from e
+ raise base.SyncError(f"failed fetching {self.uri!r}: {e.reason}") from e
# Manually check cached values ourselves since some servers appear to
# ignore If-None-Match or If-Modified-Since headers.
convert = lambda x: x.strip() if x else None
- etag = resp.getheader('ETag')
- modified = resp.getheader('Last-Modified')
+ etag = resp.getheader("ETag")
+ modified = resp.getheader("Last-Modified")
if not force:
if etag is not None and convert(etag) == convert(previous_etag):
logger.debug(f"etag {etag} is equal, no update available")
@@ -74,9 +74,10 @@ class http_syncer(base.Syncer):
os.makedirs(self.basedir, exist_ok=True)
except OSError as e:
raise base.SyncError(
- f'failed creating repo dir {self.basedir!r}: {e.strerror}') from e
+ f"failed creating repo dir {self.basedir!r}: {e.strerror}"
+ ) from e
- length = resp.getheader('content-length')
+ length = resp.getheader("content-length")
if length:
length = int(length)
blocksize = max(4096, length // 100)
@@ -94,13 +95,13 @@ class http_syncer(base.Syncer):
buf = resp.read(blocksize)
if not buf:
if length:
- sys.stdout.write('\n')
+ sys.stdout.write("\n")
break
self._download.write(buf)
size += len(buf)
if length:
- sys.stdout.write('\r')
- progress = '=' * int(size / length * 50)
+ sys.stdout.write("\r")
+ progress = "=" * int(size / length * 50)
percent = int(size / length * 100)
sys.stdout.write("[%-50s] %d%%" % (progress, percent))
sys.stdout.flush()
@@ -110,10 +111,10 @@ class http_syncer(base.Syncer):
# TODO: store this in pkgcore cache dir instead?
# update cached ETag/Last-Modified values
if etag:
- with open(etag_path, 'w') as f:
+ with open(etag_path, "w") as f:
f.write(etag)
if modified:
- with open(modified_path, 'w') as f:
+ with open(modified_path, "w") as f:
f.write(modified)
return True
diff --git a/src/pkgcore/sync/rsync.py b/src/pkgcore/sync/rsync.py
index b0ac4b48d..c6b95c70f 100644
--- a/src/pkgcore/sync/rsync.py
+++ b/src/pkgcore/sync/rsync.py
@@ -1,4 +1,7 @@
-__all__ = ("rsync_syncer", "rsync_timestamp_syncer",)
+__all__ = (
+ "rsync_syncer",
+ "rsync_timestamp_syncer",
+)
import os
import socket
@@ -13,23 +16,23 @@ from . import base
class rsync_syncer(base.ExternalSyncer):
- default_excludes = ['/distfiles', '/local', '/packages']
+ default_excludes = ["/distfiles", "/local", "/packages"]
default_includes = []
default_conn_timeout = 15
default_opts = [
- '--recursive',
- '--delete',
- '--delete-delay',
- '--perms',
- '--times',
- '--compress',
- '--force',
- '--links',
- '--safe-links',
- '--stats',
- '--human-readable',
- '--timeout=180',
- '--whole-file', # this one probably shouldn't be a default
+ "--recursive",
+ "--delete",
+ "--delete-delay",
+ "--perms",
+ "--times",
+ "--compress",
+ "--force",
+ "--links",
+ "--safe-links",
+ "--stats",
+ "--human-readable",
+ "--timeout=180",
+ "--whole-file", # this one probably shouldn't be a default
]
default_retries = 5
@@ -37,8 +40,7 @@ class rsync_syncer(base.ExternalSyncer):
@classmethod
def _parse_uri(cls, raw_uri):
- if not raw_uri.startswith("rsync://") and \
- not raw_uri.startswith("rsync+"):
+ if not raw_uri.startswith("rsync://") and not raw_uri.startswith("rsync+"):
raise base.UriError(raw_uri, "doesn't start with rsync:// nor rsync+")
if raw_uri.startswith("rsync://"):
@@ -49,16 +51,37 @@ class rsync_syncer(base.ExternalSyncer):
cls.require_binary(proto[0])
return proto[0], f"rsync:{proto[1]}"
- pkgcore_config_type = ConfigHint({
- 'basedir': 'str', 'uri': 'str', 'conn_timeout': 'str', 'usersync': 'bool',
- 'compress': 'bool', 'excludes': 'list', 'includes': 'list',
- 'retries': 'str', 'opts': 'list', 'extra_opts': 'list', 'proxy': 'str'},
- typename='syncer')
-
- def __init__(self, basedir, uri, conn_timeout=default_conn_timeout,
- usersync=False, compress=False, excludes=(), includes=(),
- retries=default_retries, proxy=None,
- opts=(), extra_opts=()):
+ pkgcore_config_type = ConfigHint(
+ {
+ "basedir": "str",
+ "uri": "str",
+ "conn_timeout": "str",
+ "usersync": "bool",
+ "compress": "bool",
+ "excludes": "list",
+ "includes": "list",
+ "retries": "str",
+ "opts": "list",
+ "extra_opts": "list",
+ "proxy": "str",
+ },
+ typename="syncer",
+ )
+
+ def __init__(
+ self,
+ basedir,
+ uri,
+ conn_timeout=default_conn_timeout,
+ usersync=False,
+ compress=False,
+ excludes=(),
+ includes=(),
+ retries=default_retries,
+ proxy=None,
+ opts=(),
+ extra_opts=(),
+ ):
uri = uri.rstrip(os.path.sep) + os.path.sep
self.rsh, uri = self._parse_uri(uri)
super().__init__(basedir, uri, default_verbosity=1, usersync=usersync)
@@ -75,13 +98,13 @@ class rsync_syncer(base.ExternalSyncer):
self.retries = int(retries)
self.use_proxy = proxy is not None
if self.use_proxy:
- self.env['RSYNC_PROXY'] = proxy
+ self.env["RSYNC_PROXY"] = proxy
self.is_ipv6 = "--ipv6" in self.opts or "-6" in self.opts
self.is_ipv6 = self.is_ipv6 and socket.has_ipv6
@staticmethod
def parse_hostname(uri):
- return uri[len("rsync://"):].split("@", 1)[-1].split("/", 1)[0]
+ return uri[len("rsync://") :].split("@", 1)[-1].split("/", 1)[0]
def _get_ips(self):
if self.use_proxy:
@@ -94,14 +117,16 @@ class rsync_syncer(base.ExternalSyncer):
af_fam = socket.AF_INET6
try:
for ipaddr in socket.getaddrinfo(
- self.hostname, None, af_fam, socket.SOCK_STREAM):
+ self.hostname, None, af_fam, socket.SOCK_STREAM
+ ):
if ipaddr[0] == socket.AF_INET6:
yield f"[{ipaddr[4][0]}]"
else:
yield ipaddr[4][0]
except OSError as e:
raise base.SyncError(
- f"DNS resolution failed for {self.hostname!r}: {e.strerror}")
+ f"DNS resolution failed for {self.hostname!r}: {e.strerror}"
+ )
def _sync(self, verbosity, output_fd):
fd_pipes = {1: output_fd, 2: output_fd}
@@ -114,14 +139,16 @@ class rsync_syncer(base.ExternalSyncer):
if verbosity < 0:
opts.append("--quiet")
elif verbosity > 0:
- opts.extend('-v' for x in range(verbosity))
+ opts.extend("-v" for x in range(verbosity))
# zip limits to the shortest iterable
ret = None
for count, ip in zip(range(self.retries), self._get_ips()):
- cmd = [self.binary_path,
- self.uri.replace(self.hostname, ip, 1),
- self.basedir] + opts
+ cmd = [
+ self.binary_path,
+ self.uri.replace(self.hostname, ip, 1),
+ self.basedir,
+ ] + opts
ret = self._spawn(cmd, fd_pipes)
if ret == 0:
@@ -130,9 +157,9 @@ class rsync_syncer(base.ExternalSyncer):
raise base.SyncError("rsync command syntax error: {' '.join(cmd)}")
elif ret == 11:
raise base.SyncError("rsync ran out of disk space")
- # need to do something here instead of just restarting...
- # else:
- # print(ret)
+ # need to do something here instead of just restarting...
+ # else:
+ # print(ret)
raise base.SyncError("all attempts failed")
@@ -149,8 +176,8 @@ class _RsyncFileSyncer(rsync_syncer):
class rsync_timestamp_syncer(rsync_syncer):
forcable = True
- forward_sync_delay = 25 * 60 # 25 minutes
- negative_sync_delay = 60 * 60 # 60 minutes
+ forward_sync_delay = 25 * 60 # 25 minutes
+ negative_sync_delay = 60 * 60 # 60 minutes
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -165,7 +192,7 @@ class rsync_timestamp_syncer(rsync_syncer):
path = pjoin(self.basedir, "metadata", "timestamp.chk")
try:
with open(path) as f:
- date, offset = f.read().strip().rsplit('+', 1)
+ date, offset = f.read().strip().rsplit("+", 1)
date = time.mktime(time.strptime(date, "%a, %d %b %Y %H:%M:%S "))
# add the hour/minute offset
date += int(offset[:2] * 60) + int(offset[2:])
@@ -190,7 +217,9 @@ class rsync_timestamp_syncer(rsync_syncer):
if not ret:
doit = True
else:
- delta = self.current_timestamp(timestamp_path) - self.last_timestamp
+ delta = (
+ self.current_timestamp(timestamp_path) - self.last_timestamp
+ )
if delta >= 0:
doit = delta > self.forward_sync_delay
else:
@@ -210,8 +239,12 @@ class rsync_timestamp_syncer(rsync_syncer):
os.remove(timestamp_path)
else:
with open(timestamp_path, "w") as f:
- f.write(time.strftime("%a, %d %b %Y %H:%M:%S +0000",
- time.gmtime(self.last_timestamp)))
+ f.write(
+ time.strftime(
+ "%a, %d %b %Y %H:%M:%S +0000",
+ time.gmtime(self.last_timestamp),
+ )
+ )
except EnvironmentError:
# don't care...
pass
diff --git a/src/pkgcore/sync/sqfs.py b/src/pkgcore/sync/sqfs.py
index a9fdc676f..1e4ff41e0 100644
--- a/src/pkgcore/sync/sqfs.py
+++ b/src/pkgcore/sync/sqfs.py
@@ -7,8 +7,8 @@ from .http import http_syncer
class sqfs_syncer(http_syncer):
supported_uris = (
- ('sqfs+http://', 5),
- ('sqfs+https://', 5),
+ ("sqfs+http://", 5),
+ ("sqfs+https://", 5),
)
@staticmethod
diff --git a/src/pkgcore/sync/svn.py b/src/pkgcore/sync/svn.py
index 1764668fd..e8cd60593 100644
--- a/src/pkgcore/sync/svn.py
+++ b/src/pkgcore/sync/svn.py
@@ -12,13 +12,13 @@ class svn_syncer(base.ExternalSyncer):
binary = "svn"
supported_uris = (
- ('svn://', 5),
- ('svn+', 5),
+ ("svn://", 5),
+ ("svn+", 5),
)
@classmethod
def is_usable_on_filepath(cls, path):
- svn_path = os.path.join(path, '.svn')
+ svn_path = os.path.join(path, ".svn")
if cls.disabled or not os.path.isdir(svn_path):
return None
code, data = spawn_get_output([cls.binary, "info", path])
@@ -29,7 +29,7 @@ class svn_syncer(base.ExternalSyncer):
line = line.strip().split(":", 1)
if len(line) != 2:
continue
- if line[0] == 'URL':
+ if line[0] == "URL":
uri = f"svn+{line[1].strip()}"
return (cls._rewrite_uri_from_stat(svn_path, uri),)
return None
@@ -52,14 +52,17 @@ class svn_syncer(base.ExternalSyncer):
def _sync(self, verbosity, output_fd):
uri = self.uri
- if uri.startswith('svn+http://'):
- uri = uri.replace('svn+http://', 'http://')
- elif uri.startswith('svn+https://'):
- uri = uri.replace('svn+https://', 'https://')
+ if uri.startswith("svn+http://"):
+ uri = uri.replace("svn+http://", "http://")
+ elif uri.startswith("svn+https://"):
+ uri = uri.replace("svn+https://", "https://")
if not os.path.exists(self.basedir):
return 0 == self._spawn(
[self.binary_path, "co", uri, self.basedir],
- {1: output_fd, 2: output_fd, 0: 0})
+ {1: output_fd, 2: output_fd, 0: 0},
+ )
return 0 == self._spawn(
[self.binary_path, "update"],
- {1: output_fd, 2: output_fd, 0: 0}, cwd=self.basedir)
+ {1: output_fd, 2: output_fd, 0: 0},
+ cwd=self.basedir,
+ )
diff --git a/src/pkgcore/sync/tar.py b/src/pkgcore/sync/tar.py
index 5562bb99c..a1ecdd27a 100644
--- a/src/pkgcore/sync/tar.py
+++ b/src/pkgcore/sync/tar.py
@@ -13,26 +13,29 @@ from .http import http_syncer
class tar_syncer(http_syncer, base.ExternalSyncer):
- binary = 'tar'
+ binary = "tar"
supported_uris = (
- ('tar+http://', 5),
- ('tar+https://', 5),
+ ("tar+http://", 5),
+ ("tar+https://", 5),
)
# TODO: support more of the less used file extensions
- supported_protocols = ('http://', 'https://')
- supported_exts = ('.tar.gz', '.tar.bz2', '.tar.xz')
+ supported_protocols = ("http://", "https://")
+ supported_exts = (".tar.gz", ".tar.bz2", ".tar.xz")
@classmethod
def parse_uri(cls, raw_uri):
if raw_uri.startswith(("tar+http://", "tar+https://")):
raw_uri = raw_uri[4:]
- if raw_uri.startswith(cls.supported_protocols) and raw_uri.endswith(cls.supported_exts):
+ if raw_uri.startswith(cls.supported_protocols) and raw_uri.endswith(
+ cls.supported_exts
+ ):
return raw_uri
else:
raise base.UriError(
- raw_uri, "unsupported compression format for tarball archive")
+ raw_uri, "unsupported compression format for tarball archive"
+ )
raise base.UriError(raw_uri, "unsupported URI")
def _pre_download(self):
@@ -45,8 +48,8 @@ class tar_syncer(http_syncer, base.ExternalSyncer):
basedir = self.basedir.rstrip(os.path.sep)
repos_dir = os.path.dirname(basedir)
repo_name = os.path.basename(basedir)
- self.tempdir = os.path.join(repos_dir, f'.{repo_name}.update')
- self.tempdir_old = os.path.join(repos_dir, f'.{repo_name}.old')
+ self.tempdir = os.path.join(repos_dir, f".{repo_name}.update")
+ self.tempdir_old = os.path.join(repos_dir, f".{repo_name}.old")
# remove tempdirs on exit
atexit.register(partial(shutil.rmtree, self.tempdir, ignore_errors=True))
atexit.register(partial(shutil.rmtree, self.tempdir_old, ignore_errors=True))
@@ -60,22 +63,29 @@ class tar_syncer(http_syncer, base.ExternalSyncer):
os.makedirs(self.tempdir)
os.makedirs(self.tempdir_old)
except OSError as e:
- raise base.SyncError(f'failed creating repo update dirs: {e}')
+ raise base.SyncError(f"failed creating repo update dirs: {e}")
- exts = {'gz': 'gzip', 'bz2': 'bzip2', 'xz': 'xz'}
- compression = exts[self.uri.rsplit('.', 1)[1]]
+ exts = {"gz": "gzip", "bz2": "bzip2", "xz": "xz"}
+ compression = exts[self.uri.rsplit(".", 1)[1]]
# use tar instead of tarfile so we can easily strip leading path components
# TODO: programmatically determine how many components to strip?
cmd = [
- 'tar', '--extract', f'--{compression}', '-f', self.tarball.name,
- '--strip-components=1', '--no-same-owner', '-C', self.tempdir
+ "tar",
+ "--extract",
+ f"--{compression}",
+ "-f",
+ self.tarball.name,
+ "--strip-components=1",
+ "--no-same-owner",
+ "-C",
+ self.tempdir,
]
try:
- subprocess.run(cmd, stderr=subprocess.PIPE, check=True, encoding='utf8')
+ subprocess.run(cmd, stderr=subprocess.PIPE, check=True, encoding="utf8")
except subprocess.CalledProcessError as e:
error = e.stderr.splitlines()[0]
- raise base.SyncError(f'failed to unpack tarball: {error}')
+ raise base.SyncError(f"failed to unpack tarball: {error}")
# TODO: verify gpg data if it exists
@@ -86,4 +96,4 @@ class tar_syncer(http_syncer, base.ExternalSyncer):
# move new, unpacked repo into place
os.rename(self.tempdir, self.basedir)
except OSError as e:
- raise base.SyncError(f'failed to update repo: {e.strerror}') from e
+ raise base.SyncError(f"failed to update repo: {e.strerror}") from e
diff --git a/src/pkgcore/system/libtool.py b/src/pkgcore/system/libtool.py
index 55b4d56e2..241d4414b 100644
--- a/src/pkgcore/system/libtool.py
+++ b/src/pkgcore/system/libtool.py
@@ -10,19 +10,17 @@ from snakeoil.sequences import stable_unique
from ..exceptions import PkgcoreException
from ..merge import triggers
-x11_sub = post_curry(partial(
- re.compile(r"X11R6/+lib").sub, "lib"), 1)
-local_sub = post_curry(partial(
- re.compile(r"local/+lib").sub, "lib"), 1)
-pkgconfig1_sub = post_curry(partial(
- re.compile(r"usr/+lib[^/]*/+pkgconfig/+\.\./\.\.").sub,
- "usr"), 1)
-pkgconfig2_sub = post_curry(partial(
- re.compile(r"usr/+lib[^/]*/+pkgconfig/+\.\.").sub,
- "usr"), 1)
+x11_sub = post_curry(partial(re.compile(r"X11R6/+lib").sub, "lib"), 1)
+local_sub = post_curry(partial(re.compile(r"local/+lib").sub, "lib"), 1)
+pkgconfig1_sub = post_curry(
+ partial(re.compile(r"usr/+lib[^/]*/+pkgconfig/+\.\./\.\.").sub, "usr"), 1
+)
+pkgconfig2_sub = post_curry(
+ partial(re.compile(r"usr/+lib[^/]*/+pkgconfig/+\.\.").sub, "usr"), 1
+)
flags_match = re.compile(
- r"-(?:mt|mthreads|kthread|Kthread|pthread"
- r"|pthreads|-thread-safe|threads)").match
+ r"-(?:mt|mthreads|kthread|Kthread|pthread" r"|pthreads|-thread-safe|threads)"
+).match
template = """# %(file)s - a libtool library file
# Generated by ltmain.sh - GNU libtool 1.5.10 (1.1220.2.130 2004/09/19 12:13:49)
@@ -31,7 +29,6 @@ template = """# %(file)s - a libtool library file
class UnknownData(PkgcoreException):
-
def __init__(self, line, token=None):
self.token, self.line = token, line
@@ -41,6 +38,7 @@ class UnknownData(PkgcoreException):
s += f"specifically token {self.token!r}"
return s
+
# libtiff.la - a libtool library file
# Generated by ltmain.sh - GNU libtool 1.5.10 (1.1220.2.130 2004/09/19 12:13:49)
def parse_lafile(handle):
@@ -82,12 +80,12 @@ def rewrite_lafile(handle, filename):
else:
libs.append(item)
elif item.startswith("-L"):
- # this is heinous, but is what the script did.
- item = x11_sub(item)
- item = local_sub(item)
- item = pkgconfig1_sub(item)
- item = pkgconfig2_sub(item)
- libladirs.append(item)
+ # this is heinous, but is what the script did.
+ item = x11_sub(item)
+ item = local_sub(item)
+ item = pkgconfig1_sub(item)
+ item = pkgconfig2_sub(item)
+ libladirs.append(item)
elif item.startswith("-R"):
rpaths.append(item)
elif flags_match(item):
@@ -103,31 +101,33 @@ def rewrite_lafile(handle, filename):
return False, None
# must be prefixed with a space
- data["dependency_libs"] = ' ' + (' '.join(libs))
+ data["dependency_libs"] = " " + (" ".join(libs))
if inherited_flags:
# must be prefixed with a space
- data["inherited_flags"] = ' ' + (' '.join(inherited_flags))
- content = "\n".join(f"{k}='{v}'" for k,v in sorted(data.items()))
- return True, template % {"content":content, "file":filename}
+ data["inherited_flags"] = " " + (" ".join(inherited_flags))
+ content = "\n".join(f"{k}='{v}'" for k, v in sorted(data.items()))
+ return True, template % {"content": content, "file": filename}
+
def fix_fsobject(location):
from ..fs import fs, livefs
+
for obj in livefs.iter_scan(location):
if not fs.isreg(obj) or not obj.basename.endswith(".la"):
continue
- with open(obj.location, 'r') as f:
+ with open(obj.location, "r") as f:
updated, content = rewrite_lafile(f, obj.basename)
if updated:
- with open(obj.location, 'w') as f:
+ with open(obj.location, "w") as f:
f.write(content)
class FixLibtoolArchivesTrigger(triggers.base):
- required_csets = ('install',)
+ required_csets = ("install",)
_engine_types = triggers.INSTALLING_MODES
- _hooks = ('pre_merge',)
+ _hooks = ("pre_merge",)
def trigger(self, engine, cset):
updates = []
@@ -143,6 +143,5 @@ class FixLibtoolArchivesTrigger(triggers.base):
source = engine.get_writable_fsobj(obj, empty=True)
source.text_fileobj(True).write(content)
# force chksums to be regenerated
- updates.append(obj.change_attributes(data=source,
- chksums=None))
+ updates.append(obj.change_attributes(data=source, chksums=None))
cset.update(updates)
diff --git a/src/pkgcore/test/misc.py b/src/pkgcore/test/misc.py
index d73da12ce..43cc257c1 100644
--- a/src/pkgcore/test/misc.py
+++ b/src/pkgcore/test/misc.py
@@ -29,7 +29,7 @@ class FakePkgBase(package):
for x in ("DEPEND", "RDEPEND", "PDEPEND", "IUSE", "LICENSE"):
data.setdefault(x, "")
- data.setdefault("KEYWORDS", ' '.join(default_arches))
+ data.setdefault("KEYWORDS", " ".join(default_arches))
cpv = CPV(cpvstr, versioned=True)
super().__init__(shared, repo, cpv.category, cpv.package, cpv.fullver)
@@ -37,9 +37,16 @@ class FakePkgBase(package):
class FakeProfile:
-
- def __init__(self, masked_use={}, forced_use={},
- provides={}, masks=[], virtuals={}, arch='x86', name='none'):
+ def __init__(
+ self,
+ masked_use={},
+ forced_use={},
+ provides={},
+ masks=[],
+ virtuals={},
+ arch="x86",
+ name="none",
+ ):
self.provides_repo = SimpleTree(provides)
self.masked_use = {atom(k): v for k, v in masked_use.items()}
self.forced_use = {atom(k): v for k, v in forced_use.items()}
@@ -49,20 +56,19 @@ class FakeProfile:
self.name = name
self.forced_data = collapsed_restrict_to_data(
- [(packages.AlwaysTrue, (self.arch,))],
- self.forced_use.items())
+ [(packages.AlwaysTrue, (self.arch,))], self.forced_use.items()
+ )
self.masked_data = collapsed_restrict_to_data(
- [(packages.AlwaysTrue, default_arches)],
- self.masked_use.items())
+ [(packages.AlwaysTrue, default_arches)], self.masked_use.items()
+ )
def make_virtuals_repo(self, repo):
return self.virtuals
class FakeRepo:
-
- def __init__(self, pkgs=(), repo_id='', location='', masks=(), **kwds):
+ def __init__(self, pkgs=(), repo_id="", location="", masks=(), **kwds):
self.pkgs = pkgs
self.repo_id = repo_id or location
self.location = location
@@ -97,16 +103,26 @@ class FakeRepo:
class FakeEbuildRepo(FakeRepo):
-
def __init__(self, *args, **kwds):
- self.config = kwds.pop('config', RepoConfig('nonexistent'))
+ self.config = kwds.pop("config", RepoConfig("nonexistent"))
self.trees = (self,)
super().__init__(*args, **kwds)
class FakePkg(FakePkgBase):
- def __init__(self, cpv, eapi="0", slot="0", subslot=None, iuse=None, use=(),
- repo=FakeRepo(), restrict='', keywords=None, **kwargs):
+ def __init__(
+ self,
+ cpv,
+ eapi="0",
+ slot="0",
+ subslot=None,
+ iuse=None,
+ use=(),
+ repo=FakeRepo(),
+ restrict="",
+ keywords=None,
+ **kwargs,
+ ):
if isinstance(repo, str):
repo = FakeRepo(repo)
elif isinstance(repo, (tuple, list)) and len(repo) < 3:
@@ -121,15 +137,14 @@ class FakePkg(FakePkgBase):
object.__setattr__(self, "restrict", DepSet.parse(restrict, str))
object.__setattr__(self, "fetchables", [])
object.__setattr__(self, "use", set(use))
- object.__setattr__(self, 'eapi', get_eapi(eapi, False))
+ object.__setattr__(self, "eapi", get_eapi(eapi, False))
if iuse is not None:
object.__setattr__(self, "iuse", set(iuse))
# misc setup code for generating glsas for testing
-glsa_template = \
-"""<?xml version="1.0" encoding="UTF-8"?>
+glsa_template = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE glsa SYSTEM "http://www.gentoo.org/dtd/glsa.dtd">
<?xml-stylesheet href="/xsl/glsa.xsl" type="text/xsl"?>
<?xml-stylesheet href="/xsl/guide.xsl" type="text/xsl"?>
@@ -168,10 +183,10 @@ glsa_template = \
</glsa>
"""
-ops = {'>': 'gt', '<': 'lt'}
-ops.update((k + '=', v[0] + 'e') for k, v in list(ops.items()))
-ops.update(('~' + k, 'r' + v) for k, v in list(ops.items()))
-ops['='] = 'eq'
+ops = {">": "gt", "<": "lt"}
+ops.update((k + "=", v[0] + "e") for k, v in list(ops.items()))
+ops.update(("~" + k, "r" + v) for k, v in list(ops.items()))
+ops["="] = "eq"
def convert_range(text, tag, slot):
@@ -181,7 +196,7 @@ def convert_range(text, tag, slot):
op = text[:i]
text = text[i:]
range = ops[op]
- slot = f' slot="{slot}"' if slot else ''
+ slot = f' slot="{slot}"' if slot else ""
return f'<{tag} range="{range}"{slot}>{text}</{tag}>'
@@ -190,19 +205,21 @@ def mk_glsa(*pkgs, **kwds):
if kwds:
raise TypeError("id is the only allowed kwds; got %r" % kwds)
id = str(id)
- horked = ''
+ horked = ""
for data in pkgs:
if len(data) == 4:
pkg, slot, ranges, arch = data
elif len(data) == 3:
pkg, ranges, arch = data
- slot = ''
+ slot = ""
else:
pkg, ranges = data
- slot = ''
- arch = '*'
- horked += '<package name="%s" auto="yes" arch="%s">%s%s\n</package>' \
- % (pkg, arch,
- '\n'.join(convert_range(x, 'unaffected', slot) for x in ranges[0]),
- '\n'.join(convert_range(x, 'vulnerable', slot) for x in ranges[1]))
+ slot = ""
+ arch = "*"
+ horked += '<package name="%s" auto="yes" arch="%s">%s%s\n</package>' % (
+ pkg,
+ arch,
+ "\n".join(convert_range(x, "unaffected", slot) for x in ranges[0]),
+ "\n".join(convert_range(x, "vulnerable", slot) for x in ranges[1]),
+ )
return glsa_template % (id, id, horked)
diff --git a/src/pkgcore/test/scripts/helpers.py b/src/pkgcore/test/scripts/helpers.py
index 61944b647..d5318ad2f 100644
--- a/src/pkgcore/test/scripts/helpers.py
+++ b/src/pkgcore/test/scripts/helpers.py
@@ -8,15 +8,18 @@ from ...config.hint import ConfigHint
class fake_domain:
- pkgcore_config_type = ConfigHint(typename='domain')
+ pkgcore_config_type = ConfigHint(typename="domain")
def __init__(self):
pass
-default_domain = basics.HardCodedConfigSection({
- 'class': fake_domain,
- 'default': True,
- })
+
+default_domain = basics.HardCodedConfigSection(
+ {
+ "class": fake_domain,
+ "default": True,
+ }
+)
class ArgParseMixin(argparse_helpers.ArgParseMixin):
@@ -41,8 +44,8 @@ class ArgParseMixin(argparse_helpers.ArgParseMixin):
args are passed to parse_args, keyword args are used as config keys.
"""
- ns_kwargs = kwargs.pop('ns_kwargs', {})
- namespace = kwargs.get('namespace', arghparse.Namespace(**ns_kwargs))
+ ns_kwargs = kwargs.pop("ns_kwargs", {})
+ namespace = kwargs.get("namespace", arghparse.Namespace(**ns_kwargs))
if self.has_config:
if kwargs.pop("suppress_domain", self.suppress_domain):
kwargs["default_domain"] = default_domain
diff --git a/src/pkgcore/util/commandline.py b/src/pkgcore/util/commandline.py
index 6457b3704..bb6921d9b 100644
--- a/src/pkgcore/util/commandline.py
+++ b/src/pkgcore/util/commandline.py
@@ -53,11 +53,11 @@ class StoreTarget(argparse._AppendAction):
"""
def __init__(self, *args, **kwargs):
- self.use_sets = kwargs.pop('use_sets', False)
- self.allow_ebuild_paths = kwargs.pop('allow_ebuild_paths', False)
- self.allow_external_repos = kwargs.pop('allow_external_repos', False)
- self.separator = kwargs.pop('separator', None)
- kwargs.setdefault('default', ())
+ self.use_sets = kwargs.pop("use_sets", False)
+ self.allow_ebuild_paths = kwargs.pop("allow_ebuild_paths", False)
+ self.allow_external_repos = kwargs.pop("allow_external_repos", False)
+ self.separator = kwargs.pop("separator", None)
+ kwargs.setdefault("default", ())
super().__init__(*args, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
@@ -68,39 +68,48 @@ class StoreTarget(argparse._AppendAction):
if isinstance(values, str):
values = [values]
- elif values is not None and len(values) == 1 and values[0] == '-':
+ elif values is not None and len(values) == 1 and values[0] == "-":
if not sys.stdin.isatty():
- values = [x.strip() for x in sys.stdin.readlines() if x.strip() != '']
+ values = [x.strip() for x in sys.stdin.readlines() if x.strip() != ""]
# reassign stdin to allow interactivity (currently only works for unix)
- sys.stdin = open('/dev/tty')
+ sys.stdin = open("/dev/tty")
else:
- raise argparse.ArgumentError(self, "'-' is only valid when piping data in")
+ raise argparse.ArgumentError(
+ self, "'-' is only valid when piping data in"
+ )
# override default empty tuple value to appendable list
if values:
setattr(namespace, self.dest, [])
for token in values:
- if self.use_sets and token.startswith('@'):
+ if self.use_sets and token.startswith("@"):
namespace.sets.append(token[1:])
else:
- if self.allow_ebuild_paths and token.endswith('.ebuild'):
+ if self.allow_ebuild_paths and token.endswith(".ebuild"):
try:
- repo = getattr(namespace, 'repo', namespace.domain.ebuild_repos_raw)
+ repo = getattr(
+ namespace, "repo", namespace.domain.ebuild_repos_raw
+ )
except AttributeError:
raise argparse.ArgumentTypeError(
- 'repo or domain must be defined in the namespace')
+ "repo or domain must be defined in the namespace"
+ )
if not os.path.exists(token):
- raise argparse.ArgumentError(self, f"nonexistent ebuild: {token!r}")
+ raise argparse.ArgumentError(
+ self, f"nonexistent ebuild: {token!r}"
+ )
elif not os.path.isfile(token):
raise argparse.ArgumentError(self, f"invalid ebuild: {token!r}")
if self.allow_external_repos and token not in repo:
repo_root_dir = os.path.abspath(
- pjoin(token, os.pardir, os.pardir, os.pardir))
+ pjoin(token, os.pardir, os.pardir, os.pardir)
+ )
try:
with suppress_logging():
repo = namespace.domain.add_repo(
- repo_root_dir, config=namespace.config)
+ repo_root_dir, config=namespace.config
+ )
except repo_errors.RepoError as e:
raise argparse.ArgumentError(self, f"{token!r} -- {e}")
try:
@@ -113,8 +122,8 @@ class StoreTarget(argparse._AppendAction):
except parserestrict.ParseError as e:
parser.error(e)
super().__call__(
- parser, namespace,
- (token, restriction), option_string=option_string)
+ parser, namespace, (token, restriction), option_string=option_string
+ )
CONFIG_ALL_DEFAULT = object()
@@ -136,9 +145,13 @@ class StoreConfigObject(argparse._StoreAction):
if kwargs.pop("get_default", False):
kwargs["default"] = arghparse.DelayedValue(
- partial(self.store_default, self.config_type,
- option_string=kwargs.get('option_strings', [None])[0]),
- self.priority)
+ partial(
+ self.store_default,
+ self.config_type,
+ option_string=kwargs.get("option_strings", [None])[0],
+ ),
+ self.priority,
+ )
self.store_name = kwargs.pop("store_name", False)
self.writable = kwargs.pop("writable", None)
@@ -154,36 +167,41 @@ class StoreConfigObject(argparse._StoreAction):
def _load_obj(self, sections, name):
obj_type = self.metavar if self.metavar is not None else self.config_type
- obj_type = obj_type.lower() + ' ' if obj_type is not None else ''
+ obj_type = obj_type.lower() + " " if obj_type is not None else ""
try:
val = sections[name]
except KeyError:
- choices = ', '.join(self._choices(sections))
+ choices = ", ".join(self._choices(sections))
if choices:
choices = f" (available: {choices})"
raise argparse.ArgumentError(
- self, f"couldn't find {obj_type}{name!r}{choices}")
+ self, f"couldn't find {obj_type}{name!r}{choices}"
+ )
- if self.writable and getattr(val, 'frozen', False):
- raise argparse.ArgumentError(
- self, f"{obj_type}{name!r} is readonly")
+ if self.writable and getattr(val, "frozen", False):
+ raise argparse.ArgumentError(self, f"{obj_type}{name!r} is readonly")
if self.store_name:
return name, val
return val
def __call__(self, parser, namespace, values, option_string=None):
- setattr(namespace, self.dest, arghparse.DelayedParse(
- partial(self._real_call, parser, namespace, values, option_string),
- self.priority))
+ setattr(
+ namespace,
+ self.dest,
+ arghparse.DelayedParse(
+ partial(self._real_call, parser, namespace, values, option_string),
+ self.priority,
+ ),
+ )
def _get_sections(self, config, namespace):
return getattr(config, self.config_type)
def _real_call(self, parser, namespace, values, option_string=None):
- config = getattr(namespace, 'config', None)
+ config = getattr(namespace, "config", None)
if config is None:
raise ValueError("no config found, internal bug")
@@ -202,10 +220,11 @@ class StoreConfigObject(argparse._StoreAction):
@staticmethod
def store_default(config_type, namespace, attr, option_string=None):
- config = getattr(namespace, 'config', None)
+ config = getattr(namespace, "config", None)
if config is None:
raise argparse.ArgumentTypeError(
- "no config found -- internal bug, or broken on disk configuration")
+ "no config found -- internal bug, or broken on disk configuration"
+ )
obj = config.get_default(config_type)
if obj is None:
known_objs = sorted(getattr(config, config_type).keys())
@@ -215,7 +234,8 @@ class StoreConfigObject(argparse._StoreAction):
else:
msg += (
"Please either fix your configuration, or set the "
- f"{config_type} via the {option_string} option.")
+ f"{config_type} via the {option_string} option."
+ )
if known_objs:
msg += f"Known {config_type}s: {', '.join(map(repr, known_objs))}"
raise NoDefaultConfigError(None, msg)
@@ -223,7 +243,7 @@ class StoreConfigObject(argparse._StoreAction):
@staticmethod
def store_all_default(config_type, namespace, attr):
- config = getattr(namespace, 'config', None)
+ config = getattr(namespace, "config", None)
if config is None:
raise ValueError("no config found -- internal bug")
obj = [(k, v) for k, v in getattr(config, config_type).items()]
@@ -234,16 +254,15 @@ class StoreConfigObject(argparse._StoreAction):
if priority is None:
priority = cls.default_priority
return arghparse.DelayedValue(
- partial(cls._lazy_load_object, config_type, key),
- priority)
+ partial(cls._lazy_load_object, config_type, key), priority
+ )
@staticmethod
def _lazy_load_object(config_type, key, namespace, attr):
try:
obj = getattr(namespace.config, config_type)[key]
except KeyError:
- raise argparse.ArgumentError(
- None, f"couldn't find {config_type} {attr!r}")
+ raise argparse.ArgumentError(None, f"couldn't find {config_type} {attr!r}")
setattr(namespace, attr, obj)
@@ -253,29 +272,30 @@ class StoreRepoObject(StoreConfigObject):
# mapping between supported repo type requests and the related attr on
# domain objects to pull the requested repos from
valid_repo_types = {
- 'config': 'repo_configs',
- 'all': 'repos',
- 'all-raw': 'repos_raw',
- 'source': 'source_repos',
- 'source-raw': 'source_repos_raw',
- 'installed': 'installed_repos',
- 'installed-raw': 'installed_repos_raw',
- 'unfiltered': 'unfiltered_repos',
- 'ebuild': 'ebuild_repos',
- 'ebuild-unfiltered': 'ebuild_repos_unfiltered',
- 'ebuild-raw': 'ebuild_repos_raw',
- 'binary': 'binary_repos',
- 'binary-unfiltered': 'binary_repos_unfiltered',
- 'binary-raw': 'binary_repos_raw',
+ "config": "repo_configs",
+ "all": "repos",
+ "all-raw": "repos_raw",
+ "source": "source_repos",
+ "source-raw": "source_repos_raw",
+ "installed": "installed_repos",
+ "installed-raw": "installed_repos_raw",
+ "unfiltered": "unfiltered_repos",
+ "ebuild": "ebuild_repos",
+ "ebuild-unfiltered": "ebuild_repos_unfiltered",
+ "ebuild-raw": "ebuild_repos_raw",
+ "binary": "binary_repos",
+ "binary-unfiltered": "binary_repos_unfiltered",
+ "binary-raw": "binary_repos_raw",
}
def __init__(self, *args, **kwargs):
- if 'config_type' in kwargs:
+ if "config_type" in kwargs:
raise ValueError(
"StoreRepoObject: config_type keyword is redundant: got %s"
- % (kwargs['config_type'],))
+ % (kwargs["config_type"],)
+ )
- self.repo_type = kwargs.pop('repo_type', 'all')
+ self.repo_type = kwargs.pop("repo_type", "all")
if self.repo_type not in self.valid_repo_types:
raise argparse.ArgumentTypeError(f"unknown repo type: {self.repo_type!r}")
self.repo_key = self.valid_repo_types[self.repo_type]
@@ -285,22 +305,26 @@ class StoreRepoObject(StoreConfigObject):
unknown_aliases = self.allow_aliases.difference(self.valid_repo_types)
if unknown_aliases:
raise argparse.ArgumentTypeError(
- 'unknown repo alias%s: %s' % (
- pluralism(unknown_aliases, plural='es'), ', '.join(unknown_aliases)))
-
- if self.repo_type == 'config':
- kwargs['config_type'] = 'repo_config'
+ "unknown repo alias%s: %s"
+ % (
+ pluralism(unknown_aliases, plural="es"),
+ ", ".join(unknown_aliases),
+ )
+ )
+
+ if self.repo_type == "config":
+ kwargs["config_type"] = "repo_config"
else:
- kwargs['config_type'] = 'repo'
+ kwargs["config_type"] = "repo"
self.allow_name_lookup = kwargs.pop("allow_name_lookup", True)
self.allow_external_repos = kwargs.pop("allow_external_repos", False)
super().__init__(*args, **kwargs)
def _get_sections(self, config, namespace):
- domain = getattr(namespace, 'domain', None)
+ domain = getattr(namespace, "domain", None)
# return repo config objects
- if domain is None or self.repo_type == 'config':
+ if domain is None or self.repo_type == "config":
return StoreConfigObject._get_sections(self, config, namespace)
self.config = config
@@ -316,8 +340,8 @@ class StoreRepoObject(StoreConfigObject):
If a repo doesn't have a proper location just the name is returned.
"""
for repo_name, repo in sorted(unstable_unique(sections.items())):
- repo_name = getattr(repo, 'repo_id', repo_name)
- if hasattr(repo, 'location'):
+ repo_name = getattr(repo, "repo_id", repo_name)
+ if hasattr(repo, "location"):
yield f"{repo_name}:{repo.location}"
else:
yield repo_name
@@ -340,24 +364,24 @@ class StoreRepoObject(StoreConfigObject):
# try to add it as an external repo
if self.allow_external_repos and os.path.exists(repo):
try:
- configure = not self.repo_type.endswith('-raw')
+ configure = not self.repo_type.endswith("-raw")
with suppress_logging():
repo_obj = self.domain.add_repo(
- repo, config=self.config, configure=configure)
+ repo, config=self.config, configure=configure
+ )
repo = repo_obj.location
except repo_errors.RepoError as e:
raise argparse.ArgumentError(self, e)
- if hasattr(self.domain, '_' + self.repo_key):
+ if hasattr(self.domain, "_" + self.repo_key):
# force JIT-ed attr refresh to include newly added repo
- setattr(self.domain, '_' + self.repo_key, None)
+ setattr(self.domain, "_" + self.repo_key, None)
sections = getattr(self.domain, self.repo_key)
return StoreConfigObject._load_obj(self, sections, repo)
class DomainFromPath(StoreConfigObject):
-
def __init__(self, *args, **kwargs):
- kwargs['config_type'] = 'domain'
+ kwargs["config_type"] = "domain"
super().__init__(*args, **kwargs)
def _load_obj(self, sections, requested_path):
@@ -366,15 +390,16 @@ class DomainFromPath(StoreConfigObject):
raise ValueError(f"couldn't find domain at path {requested_path!r}")
elif len(targets) != 1:
raise ValueError(
- "multiple domains claim root %r: domains %s" %
- (requested_path, ', '.join(repr(x[0]) for x in targets)))
+ "multiple domains claim root %r: domains %s"
+ % (requested_path, ", ".join(repr(x[0]) for x in targets))
+ )
return targets[0][1]
def find_domains_from_path(sections, path):
path = normpath(abspath(path))
for name, domain in sections.items():
- root = getattr(domain, 'root', None)
+ root = getattr(domain, "root", None)
if root is None:
continue
root = normpath(abspath(root))
@@ -383,23 +408,24 @@ def find_domains_from_path(sections, path):
class BooleanQuery(arghparse.DelayedValue):
-
def __init__(self, attrs, klass_type=None, priority=100, converter=None):
- if klass_type == 'and':
+ if klass_type == "and":
self.klass = packages.AndRestriction
- elif klass_type == 'or':
+ elif klass_type == "or":
self.klass = packages.OrRestriction
elif callable(klass_type):
self.klass = klass_type
else:
raise ValueError(
"klass_type either needs to be 'or', 'and', "
- f"or a callable. Got {klass_type!r}")
+ f"or a callable. Got {klass_type!r}"
+ )
if converter is not None and not callable(converter):
raise ValueError(
"converter either needs to be None, or a callable;"
- f" got {converter!r}")
+ f" got {converter!r}"
+ )
self.converter = converter
self.priority = int(priority)
@@ -442,22 +468,24 @@ def make_query(parser, *args, **kwargs):
attrs = kwargs.pop("attrs", [])
subattr = f"_{dest}"
kwargs["dest"] = subattr
- if kwargs.get('type', False) is None:
- del kwargs['type']
+ if kwargs.get("type", False) is None:
+ del kwargs["type"]
else:
+
def query(value):
return parserestrict.parse_match(value)
+
kwargs.setdefault("type", query)
- if kwargs.get('metavar', False) is None:
- del kwargs['metavar']
+ if kwargs.get("metavar", False) is None:
+ del kwargs["metavar"]
else:
kwargs.setdefault("metavar", dest)
final_priority = kwargs.pop("final_priority", None)
final_converter = kwargs.pop("final_converter", None)
parser.add_argument(*args, **kwargs)
- bool_kwargs = {'converter': final_converter}
+ bool_kwargs = {"converter": final_converter}
if final_priority is not None:
- bool_kwargs['priority'] = final_priority
+ bool_kwargs["priority"] = final_priority
obj = BooleanQuery(list(attrs) + [subattr], klass_type=klass_type, **bool_kwargs)
# note that dict expansion has to be used here; dest=obj would just set a
# default named 'dest'
@@ -488,6 +516,7 @@ def register_command(commands, real_type=type):
o = real_type(name, bases, scope)
commands.append(o)
return o
+
return f
@@ -495,19 +524,24 @@ def store_config(namespace, attr, global_config=()):
config = load_config(
prepend_sources=tuple(global_config),
location=namespace.config_path,
- **vars(namespace))
+ **vars(namespace),
+ )
setattr(namespace, attr, config)
def _mk_domain(parser, help=True):
parser.add_argument(
- '--domain', get_default=True, config_type='domain',
+ "--domain",
+ get_default=True,
+ config_type="domain",
action=StoreConfigObject,
- help='custom pkgcore domain to use for this operation' if help else argparse.SUPPRESS)
+ help="custom pkgcore domain to use for this operation"
+ if help
+ else argparse.SUPPRESS,
+ )
class _SubParser(arghparse._SubParser):
-
def add_parser(self, name, config=False, domain=False, **kwds):
"""Suppress config and domain options in subparsers by default.
@@ -520,26 +554,36 @@ class _ConfigArg(argparse._StoreAction):
"""Store given config path location or use the stub config when disabled."""
def __call__(self, parser, namespace, value, option_string=None):
- if value.lower() in {'false', 'no', 'n'}:
- path = pjoin(const.DATA_PATH, 'stubconfig')
+ if value.lower() in {"false", "no", "n"}:
+ path = pjoin(const.DATA_PATH, "stubconfig")
else:
path = arghparse.existent_path(value)
setattr(namespace, self.dest, path)
class ArgumentParser(arghparse.ArgumentParser):
-
- def __init__(self, suppress=False, help=True, config=True,
- domain=True, global_config=(), **kwds):
+ def __init__(
+ self,
+ suppress=False,
+ help=True,
+ config=True,
+ domain=True,
+ global_config=(),
+ **kwds,
+ ):
super().__init__(suppress=suppress, **kwds)
- self.register('action', 'parsers', _SubParser)
+ self.register("action", "parsers", _SubParser)
if not suppress:
- config_opts = self.add_argument_group('config options')
+ config_opts = self.add_argument_group("config options")
if config:
config_opts.add_argument(
- '--config', action=_ConfigArg, dest='config_path',
- help='use custom config or skip loading system config' if help else argparse.SUPPRESS,
+ "--config",
+ action=_ConfigArg,
+ dest="config_path",
+ help="use custom config or skip loading system config"
+ if help
+ else argparse.SUPPRESS,
docs="""
The path to a custom pkgcore config file or portage
config directory can be given to override loading the
@@ -547,10 +591,14 @@ class ArgumentParser(arghparse.ArgumentParser):
Alternatively, an argument of 'false' or 'no' will skip
loading the system config entirely if one exists.
- """)
+ """,
+ )
- self.set_defaults(config=arghparse.DelayedValue(
- partial(store_config, global_config=global_config)))
+ self.set_defaults(
+ config=arghparse.DelayedValue(
+ partial(store_config, global_config=global_config)
+ )
+ )
if domain:
_mk_domain(config_opts, help)
@@ -574,4 +622,4 @@ class Tool(tool.Tool):
"""Pass down pkgcore-specific settings to the bash side."""
# pass down verbosity level to affect debug output
if self.parser.debug:
- os.environ['PKGCORE_DEBUG'] = str(self.parser.verbosity)
+ os.environ["PKGCORE_DEBUG"] = str(self.parser.verbosity)
diff --git a/src/pkgcore/util/file_type.py b/src/pkgcore/util/file_type.py
index 431e71d32..7f5ea1211 100644
--- a/src/pkgcore/util/file_type.py
+++ b/src/pkgcore/util/file_type.py
@@ -6,7 +6,6 @@ from snakeoil.process.spawn import spawn_get_output
class file_identifier:
-
def __init__(self, force_binary=False):
if force_binary:
self.func = self._fallback_file
@@ -22,7 +21,7 @@ class file_identifier:
import magic
except ImportError:
return self._fallback_file
- if hasattr(magic, 'MAGIC_NONE'):
+ if hasattr(magic, "MAGIC_NONE"):
# <5.05 of file
magic_const = magic.MAGIC_NONE
else:
@@ -35,7 +34,7 @@ class file_identifier:
except IGNORED_EXCEPTIONS:
raise
except Exception:
- pass # POS of library.
+ pass # POS of library.
return self._fallback_file
@staticmethod
@@ -43,9 +42,9 @@ class file_identifier:
ret, out = spawn_get_output(["file", path])
if ret != 0:
raise ValueError(f"file output was non zero- ret:{ret!r} out:{out!r}")
- out = ''.join(out)
+ out = "".join(out)
if out.startswith(path):
- out = out[len(path):]
+ out = out[len(path) :]
if out.startswith(":"):
out = out[1:]
return out
diff --git a/src/pkgcore/util/packages.py b/src/pkgcore/util/packages.py
index b93a7255b..b41660b09 100644
--- a/src/pkgcore/util/packages.py
+++ b/src/pkgcore/util/packages.py
@@ -12,5 +12,5 @@ def get_raw_pkg(pkg):
def groupby_pkg(iterable):
- for key, pkgs in itertools.groupby(iterable, attrgetter('key')):
+ for key, pkgs in itertools.groupby(iterable, attrgetter("key")):
yield pkgs
diff --git a/src/pkgcore/util/parserestrict.py b/src/pkgcore/util/parserestrict.py
index 4cdb986be..5fc022f43 100644
--- a/src/pkgcore/util/parserestrict.py
+++ b/src/pkgcore/util/parserestrict.py
@@ -26,25 +26,29 @@ def comma_separated_containment(attr, values_kls=frozenset, token_kls=str):
returns a :obj:`packages.PackageRestriction` matching packages that
have any of those values in the attribute passed to this function.
"""
+
def _parse(value):
return packages.PackageRestriction(
- attr, values.ContainmentMatch(
- values_kls(token_kls(piece.strip()) for piece in value.split(','))
- )
+ attr,
+ values.ContainmentMatch(
+ values_kls(token_kls(piece.strip()) for piece in value.split(","))
+ ),
)
+
return _parse
def convert_glob(token):
- if token in ('*', ''):
+ if token in ("*", ""):
return None
- elif '*' not in token:
+ elif "*" not in token:
return values.StrExactMatch(token)
elif not valid_globbing(token):
raise ParseError(
"globs must be composed of [\\w-.+], with optional "
- f"'*'- {token!r} is disallowed however")
- pattern = re.escape(token).replace('\\*', '.*')
+ f"'*'- {token!r} is disallowed however"
+ )
+ pattern = re.escape(token).replace("\\*", ".*")
pattern = f"^{pattern}$"
return values.StrRegex(pattern, match=True)
@@ -88,23 +92,24 @@ def parse_match(text):
orig_text = text = text.strip()
if "!" in text:
raise ParseError(
- f"'!' or any form of blockers make no sense in this usage: {text!r}")
+ f"'!' or any form of blockers make no sense in this usage: {text!r}"
+ )
restrictions = []
- if '::' in text:
- text, repo_id = text.rsplit('::', 1)
+ if "::" in text:
+ text, repo_id = text.rsplit("::", 1)
restrictions.append(restricts.RepositoryDep(repo_id))
- if ':' in text:
- text, slot = text.rsplit(':', 1)
- slot, _sep, subslot = slot.partition('/')
+ if ":" in text:
+ text, slot = text.rsplit(":", 1)
+ slot, _sep, subslot = slot.partition("/")
if slot:
- if '*' in slot:
+ if "*" in slot:
if r := convert_glob(slot):
restrictions.append(packages.PackageRestriction("slot", r))
else:
restrictions.append(restricts.SlotDep(slot))
if subslot:
- if '*' in subslot:
+ if "*" in subslot:
if r := convert_glob(subslot):
restrictions.append(packages.PackageRestriction("subslot", r))
else:
@@ -124,12 +129,17 @@ def parse_match(text):
return packages.AndRestriction(*restrictions)
elif text.startswith("*"):
raise ParseError(
- f"cannot do prefix glob matches with version ops: {orig_text}")
+ f"cannot do prefix glob matches with version ops: {orig_text}"
+ )
# ok... fake category. whee.
try:
- r = list(collect_package_restrictions(
- atom.atom(f"{ops}category/{text}").restrictions,
- attrs=("category",), invert=True))
+ r = list(
+ collect_package_restrictions(
+ atom.atom(f"{ops}category/{text}").restrictions,
+ attrs=("category",),
+ invert=True,
+ )
+ )
except errors.MalformedAtom as e:
e.atom = orig_text
raise ParseError(str(e)) from e
@@ -137,12 +147,12 @@ def parse_match(text):
return r[0]
restrictions.extend(r)
return packages.AndRestriction(*restrictions)
- elif text[0] in atom.valid_ops or '*' not in text:
+ elif text[0] in atom.valid_ops or "*" not in text:
# possibly a valid atom object
try:
return atom.atom(orig_text)
except errors.MalformedAtom as e:
- if '*' not in text:
+ if "*" not in text:
raise ParseError(str(e)) from e
# support globbed targets with version restrictions
return packages.AndRestriction(*parse_globbed_version(text, orig_text))
@@ -155,10 +165,12 @@ def parse_match(text):
elif not r[1]:
restrictions.append(packages.PackageRestriction("category", r[0]))
else:
- restrictions.extend((
- packages.PackageRestriction("category", r[0]),
- packages.PackageRestriction("package", r[1]),
- ))
+ restrictions.extend(
+ (
+ packages.PackageRestriction("category", r[0]),
+ packages.PackageRestriction("package", r[1]),
+ )
+ )
if len(restrictions) == 1:
return restrictions[0]
return packages.AndRestriction(*restrictions)
@@ -173,18 +185,19 @@ def parse_globbed_version(text, orig_text):
restrictions = []
# find longest matching op
op = max(x for x in atom.valid_ops if text.startswith(x))
- text = text[len(op):]
+ text = text[len(op) :]
# determine pkg version
- chunks = text.rsplit('-', 1)
+ chunks = text.rsplit("-", 1)
if len(chunks) == 1:
- raise ParseError(f'missing valid package version: {orig_text!r}')
+ raise ParseError(f"missing valid package version: {orig_text!r}")
version_txt = chunks[-1]
version = cpv.isvalid_version_re.match(version_txt)
if not version:
- if '*' in version_txt:
+ if "*" in version_txt:
raise ParseError(
- f'operator {op!r} invalid with globbed version: {version_txt!r}')
- raise ParseError(f'missing valid package version: {orig_text!r}')
+ f"operator {op!r} invalid with globbed version: {version_txt!r}"
+ )
+ raise ParseError(f"missing valid package version: {orig_text!r}")
restrictions.append(restricts.VersionMatch(op, version.group(0)))
# parse the remaining chunk
restrictions.append(parse_match(chunks[0]))
@@ -204,7 +217,8 @@ def parse_pv(repo, text):
for match in repo.itermatch(restrict):
if result is not None:
raise ParseError(
- f"multiple matches for {text} ({result.cpvstr}, {match.cpvstr})")
+ f"multiple matches for {text} ({result.cpvstr}, {match.cpvstr})"
+ )
result = match
if result is None:
raise ParseError(f"no matches for {text}")
@@ -212,5 +226,5 @@ def parse_pv(repo, text):
parse_funcs = {
- 'match': parse_match,
+ "match": parse_match,
}
diff --git a/src/pkgcore/util/thread_pool.py b/src/pkgcore/util/thread_pool.py
index 5c2aab6c0..6141e02c6 100644
--- a/src/pkgcore/util/thread_pool.py
+++ b/src/pkgcore/util/thread_pool.py
@@ -26,7 +26,7 @@ def map_async(iterable, functor, *args, **kwds):
if parallelism is None:
parallelism = cpu_count()
- if hasattr(iterable, '__len__'):
+ if hasattr(iterable, "__len__"):
# if there are less items than parallelism, don't
# spawn pointless threads.
parallelism = max(min(len(iterable), parallelism), 0)
diff --git a/src/pkgcore/vdb/contents.py b/src/pkgcore/vdb/contents.py
index ac10dff28..4590d1999 100644
--- a/src/pkgcore/vdb/contents.py
+++ b/src/pkgcore/vdb/contents.py
@@ -22,8 +22,9 @@ class LookupFsDev(fs.fsDev):
st = os.lstat(path)
except FileNotFoundError:
st = None
- if st is None or any(f(st.st_mode) for f in
- (stat.S_ISREG, stat.S_ISDIR, stat.S_ISFIFO)):
+ if st is None or any(
+ f(st.st_mode) for f in (stat.S_ISREG, stat.S_ISDIR, stat.S_ISFIFO)
+ ):
kwds["strict"] = True
else:
major, minor = fs.get_major_minor(st)
@@ -67,8 +68,11 @@ class ContentsFile(contentsSet):
if isinstance(self._source, str):
if write:
return AtomicWriteFile(
- self._source, uid=os_data.root_uid,
- gid=os_data.root_gid, perms=0o644)
+ self._source,
+ uid=os_data.root_uid,
+ gid=os_data.root_gid,
+ perms=0o644,
+ )
return readlines_utf8(self._source, True)
fobj = self._source.text_fileobj(writable=write)
if write:
@@ -86,23 +90,30 @@ class ContentsFile(contentsSet):
continue
s = line.split(" ")
if s[0] in ("dir", "dev", "fif"):
- path = ' '.join(s[1:])
- if s[0] == 'dir':
+ path = " ".join(s[1:])
+ if s[0] == "dir":
obj = fs.fsDir(path, strict=False)
- elif s[0] == 'dev':
+ elif s[0] == "dev":
obj = LookupFsDev(path, strict=False)
else:
obj = fs.fsFifo(path, strict=False)
elif s[0] == "obj":
- path = ' '.join(s[1:-2])
+ path = " ".join(s[1:-2])
obj = fs.fsFile(
- path, chksums={"md5":int(s[-2], 16)},
- mtime=int(s[-1]), strict=False)
+ path,
+ chksums={"md5": int(s[-2], 16)},
+ mtime=int(s[-1]),
+ strict=False,
+ )
elif s[0] == "sym":
try:
p = s.index("->")
- obj = fs.fsLink(' '.join(s[1:p]), ' '.join(s[p+1:-1]),
- mtime=int(s[-1]), strict=False)
+ obj = fs.fsLink(
+ " ".join(s[1:p]),
+ " ".join(s[p + 1 : -1]),
+ mtime=int(s[-1]),
+ strict=False,
+ )
except ValueError:
# XXX throw a corruption error
@@ -113,7 +124,7 @@ class ContentsFile(contentsSet):
yield obj
def _write(self):
- md5_handler = get_handler('md5')
+ md5_handler = get_handler("md5")
outfile = None
try:
outfile = self._get_fd(True)
@@ -121,13 +132,19 @@ class ContentsFile(contentsSet):
for obj in sorted(self):
if obj.is_reg:
- s = " ".join(("obj", obj.location,
- md5_handler.long2str(obj.chksums["md5"]),
- str(int(obj.mtime))))
+ s = " ".join(
+ (
+ "obj",
+ obj.location,
+ md5_handler.long2str(obj.chksums["md5"]),
+ str(int(obj.mtime)),
+ )
+ )
elif obj.is_sym:
- s = " ".join(("sym", obj.location, "->",
- obj.target, str(int(obj.mtime))))
+ s = " ".join(
+ ("sym", obj.location, "->", obj.target, str(int(obj.mtime)))
+ )
elif obj.is_dir:
s = "dir " + obj.location
diff --git a/src/pkgcore/vdb/ondisk.py b/src/pkgcore/vdb/ondisk.py
index 05a13f20f..721d03495 100644
--- a/src/pkgcore/vdb/ondisk.py
+++ b/src/pkgcore/vdb/ondisk.py
@@ -31,13 +31,18 @@ class tree(prototype.tree):
operations_kls = repo_ops.operations
pkgcore_config_type = ConfigHint(
- {'location': 'str',
- 'cache_location': 'str', 'repo_id': 'str',
- 'disable_cache': 'bool'},
- typename='repo')
-
- def __init__(self, location, cache_location=None, repo_id='vdb',
- disable_cache=False):
+ {
+ "location": "str",
+ "cache_location": "str",
+ "repo_id": "str",
+ "disable_cache": "bool",
+ },
+ typename="repo",
+ )
+
+ def __init__(
+ self, location, cache_location=None, repo_id="vdb", disable_cache=False
+ ):
super().__init__(frozen=False)
self.repo_id = repo_id
self.location = location
@@ -50,15 +55,17 @@ class tree(prototype.tree):
try:
st = os.stat(self.location)
if not stat.S_ISDIR(st.st_mode):
- raise errors.InitializationError(
- f"base not a dir: {self.location!r}")
+ raise errors.InitializationError(f"base not a dir: {self.location!r}")
elif not st.st_mode & (os.X_OK | os.R_OK):
raise errors.InitializationError(
- f"base lacks read/executable: {self.location!r}")
+ f"base lacks read/executable: {self.location!r}"
+ )
except FileNotFoundError:
pass
except OSError as e:
- raise errors.InitializationError(f'lstat failed on base: {self.location!r}') from e
+ raise errors.InitializationError(
+ f"lstat failed on base: {self.location!r}"
+ ) from e
self.package_class = self.package_factory(self)
@@ -71,8 +78,9 @@ class tree(prototype.tree):
return {}
try:
try:
- return tuple(x for x in listdir_dirs(self.location) if not
- x.startswith('.'))
+ return tuple(
+ x for x in listdir_dirs(self.location) if not x.startswith(".")
+ )
except EnvironmentError as e:
raise KeyError(f"failed fetching categories: {e}") from e
finally:
@@ -85,36 +93,43 @@ class tree(prototype.tree):
bad = False
try:
for x in listdir_dirs(cpath):
- if x.startswith(".tmp.") or x.endswith(".lockfile") \
- or x.startswith("-MERGING-"):
+ if (
+ x.startswith(".tmp.")
+ or x.endswith(".lockfile")
+ or x.startswith("-MERGING-")
+ ):
continue
try:
- pkg = VersionedCPV(f'{category}/{x}')
+ pkg = VersionedCPV(f"{category}/{x}")
except InvalidCPV:
bad = True
if bad or not pkg.fullver:
- if '-scm' in x:
- bad = 'scm'
- elif '-try' in x:
- bad = 'try'
+ if "-scm" in x:
+ bad = "scm"
+ elif "-try" in x:
+ bad = "try"
else:
- raise InvalidCPV(f'{category}/{x}', 'no version component')
+ raise InvalidCPV(f"{category}/{x}", "no version component")
logger.error(
- f'merged -{bad} pkg detected: {category}/{x}. '
- f'throwing exception due to -{bad} not being a valid'
- ' version component. Silently ignoring that '
- 'specific version is not viable either since it '
- 'would result in pkgcore stomping whatever it was '
- f'that -{bad} version merged. '
- 'Use the offending pkg manager that merged it to '
- 'unmerge it.')
+ f"merged -{bad} pkg detected: {category}/{x}. "
+ f"throwing exception due to -{bad} not being a valid"
+ " version component. Silently ignoring that "
+ "specific version is not viable either since it "
+ "would result in pkgcore stomping whatever it was "
+ f"that -{bad} version merged. "
+ "Use the offending pkg manager that merged it to "
+ "unmerge it."
+ )
raise InvalidCPV(
- f'{category}/{x}', f'{bad} version component is not standard.')
+ f"{category}/{x}", f"{bad} version component is not standard."
+ )
l.add(pkg.package)
d.setdefault((category, pkg.package), []).append(pkg.fullver)
except EnvironmentError as e:
category = pjoin(self.location, category.lstrip(os.path.sep))
- raise KeyError(f'failed fetching packages for category {category}: {e}') from e
+ raise KeyError(
+ f"failed fetching packages for category {category}: {e}"
+ ) from e
self._versions_tmp_cache.update(d)
return tuple(l)
@@ -131,17 +146,25 @@ class tree(prototype.tree):
return pjoin(self.location, pkg.category, s)
_metadata_rewrites = {
- "bdepend": "BDEPEND", "depend": "DEPEND", "rdepend": "RDEPEND", "pdepend": "PDEPEND",
+ "bdepend": "BDEPEND",
+ "depend": "DEPEND",
+ "rdepend": "RDEPEND",
+ "pdepend": "PDEPEND",
"idepend": "IDEPEND",
- "use": "USE", "eapi": "EAPI", "CONTENTS": "contents",
- "source_repository": "repository", "fullslot": "SLOT",
+ "use": "USE",
+ "eapi": "EAPI",
+ "CONTENTS": "contents",
+ "source_repository": "repository",
+ "fullslot": "SLOT",
}
def _get_metadata(self, pkg):
return IndeterminantDict(
- partial(self._internal_load_key, pjoin(
- self.location, pkg.category,
- f"{pkg.package}-{pkg.fullver}")))
+ partial(
+ self._internal_load_key,
+ pjoin(self.location, pkg.category, f"{pkg.package}-{pkg.fullver}"),
+ )
+ )
def _internal_load_key(self, path, key):
key = self._metadata_rewrites.get(key, key)
@@ -149,28 +172,28 @@ class tree(prototype.tree):
data = ContentsFile(pjoin(path, "CONTENTS"), mutable=True)
elif key == "environment":
fp = pjoin(path, key)
- if not os.path.exists(f'{fp}.bz2'):
+ if not os.path.exists(f"{fp}.bz2"):
if not os.path.exists(fp):
# icky.
raise KeyError("environment: no environment file found")
data = data_source.local_source(fp)
else:
- data = data_source.bz2_source(f'{fp}.bz2')
- elif key == 'ebuild':
- fp = pjoin(path, os.path.basename(path.rstrip(os.path.sep)) + '.ebuild')
+ data = data_source.bz2_source(f"{fp}.bz2")
+ elif key == "ebuild":
+ fp = pjoin(path, os.path.basename(path.rstrip(os.path.sep)) + ".ebuild")
data = data_source.local_source(fp)
- elif key == 'repo':
+ elif key == "repo":
# try both, for portage/paludis compatibility.
- data = readfile(pjoin(path, 'repository'), True)
+ data = readfile(pjoin(path, "repository"), True)
if data is None:
- data = readfile(pjoin(path, 'REPOSITORY'), True)
+ data = readfile(pjoin(path, "REPOSITORY"), True)
if data is None:
raise KeyError(key)
else:
data = readfile(pjoin(path, key), True)
if data is None:
raise KeyError((path, key))
- data = data.rstrip('\n')
+ data = data.rstrip("\n")
return data
def notify_remove_package(self, pkg):
@@ -200,8 +223,8 @@ class _WrappedInstalledPkg(pkg_base.wrapper):
def __str__(self):
return (
- f'installed pkg: {self.cpvstr}::{self.repo.repo_id}, '
- f'source repo {self.source_repository!r}'
+ f"installed pkg: {self.cpvstr}::{self.repo.repo_id}, "
+ f"source repo {self.source_repository!r}"
)
@@ -221,4 +244,5 @@ class ConfiguredTree(wrapper.tree, tree):
def _generate_operations(self, domain, pkg, **kwargs):
pkg = pkg._raw_pkg
return ebd.built_operations(
- domain, pkg, initial_env=self.domain_settings, **kwargs)
+ domain, pkg, initial_env=self.domain_settings, **kwargs
+ )
diff --git a/src/pkgcore/vdb/repo_ops.py b/src/pkgcore/vdb/repo_ops.py
index 45bfd7077..6042ad486 100644
--- a/src/pkgcore/vdb/repo_ops.py
+++ b/src/pkgcore/vdb/repo_ops.py
@@ -28,7 +28,6 @@ def update_mtime(path, timestamp=None):
class install(repo_ops.install):
-
def __init__(self, repo, newpkg, observer):
base = pjoin(repo.location, newpkg.category)
dirname = f"{newpkg.package}-{newpkg.fullver}"
@@ -44,32 +43,32 @@ class install(repo_ops.install):
rewrite = self.repo._metadata_rewrites
for k in self.new_pkg.tracked_attributes:
if k == "contents":
- v = ContentsFile(pjoin(dirpath, "CONTENTS"),
- mutable=True, create=True)
+ v = ContentsFile(pjoin(dirpath, "CONTENTS"), mutable=True, create=True)
v.update(self.new_pkg.contents)
v.flush()
elif k == "environment":
- data = compression.compress_data('bzip2',
- self.new_pkg.environment.bytes_fileobj().read())
+ data = compression.compress_data(
+ "bzip2", self.new_pkg.environment.bytes_fileobj().read()
+ )
with open(pjoin(dirpath, "environment.bz2"), "wb") as f:
f.write(data)
del data
else:
v = getattr(self.new_pkg, k)
- if k in ('bdepend', 'depend', 'rdepend', 'idepend'):
+ if k in ("bdepend", "depend", "rdepend", "idepend"):
s = v.slotdep_str(domain)
- elif k == 'user_patches':
- s = '\n'.join(chain.from_iterable(files for _, files in v))
+ elif k == "user_patches":
+ s = "\n".join(chain.from_iterable(files for _, files in v))
elif not isinstance(v, str):
try:
- s = ' '.join(v)
+ s = " ".join(v)
except TypeError:
s = str(v)
else:
s = v
with open(pjoin(dirpath, rewrite.get(k, k.upper())), "w", 32768) as f:
if s:
- s += '\n'
+ s += "\n"
f.write(s)
# ebuild_data is the actual ebuild- no point in holding onto
@@ -79,8 +78,9 @@ class install(repo_ops.install):
logger.warning(
"doing install/replace op, "
"but source package doesn't provide the actual ebuild data. "
- "Creating an empty file")
- o = ''
+ "Creating an empty file"
+ )
+ o = ""
else:
o = o.bytes_fileobj().read()
# XXX lil hackish accessing PF
@@ -88,9 +88,10 @@ class install(repo_ops.install):
f.write(o)
# install NEEDED and NEEDED.ELF.2 files from tmpdir if they exist
- pkg_tmpdir = normpath(pjoin(domain.pm_tmpdir, self.new_pkg.category,
- self.new_pkg.PF, 'temp'))
- for f in ['NEEDED', 'NEEDED.ELF.2']:
+ pkg_tmpdir = normpath(
+ pjoin(domain.pm_tmpdir, self.new_pkg.category, self.new_pkg.PF, "temp")
+ )
+ for f in ["NEEDED", "NEEDED.ELF.2"]:
fp = pjoin(pkg_tmpdir, f)
if os.path.exists(fp):
local_source(fp).transfer_to_path(pjoin(dirpath, f))
@@ -118,10 +119,10 @@ class install(repo_ops.install):
class uninstall(repo_ops.uninstall):
-
def __init__(self, repo, pkg, observer):
self.remove_path = pjoin(
- repo.location, pkg.category, pkg.package+"-"+pkg.fullver)
+ repo.location, pkg.category, pkg.package + "-" + pkg.fullver
+ )
super().__init__(repo, pkg, observer)
def remove_data(self):
@@ -136,7 +137,6 @@ class uninstall(repo_ops.uninstall):
# should convert these to mixins.
class replace(repo_ops.replace, install, uninstall):
-
def __init__(self, repo, pkg, newpkg, observer):
uninstall.__init__(self, repo, pkg, observer)
install.__init__(self, repo, newpkg, observer)
@@ -158,7 +158,6 @@ class replace(repo_ops.replace, install, uninstall):
class operations(repo_ops.operations):
-
def _cmd_implementation_install(self, pkg, observer):
return install(self.repo, pkg, observer)
diff --git a/tests/cache/test_base.py b/tests/cache/test_base.py
index 6b3fdc22f..a613bebb6 100644
--- a/tests/cache/test_base.py
+++ b/tests/cache/test_base.py
@@ -5,8 +5,10 @@ from snakeoil.chksum import LazilyHashedPath
def _mk_chf_obj(**kwargs):
- kwargs.setdefault('mtime', 100)
- return LazilyHashedPath('/nonexistent/path', **kwargs)
+ kwargs.setdefault("mtime", 100)
+ return LazilyHashedPath("/nonexistent/path", **kwargs)
+
+
_chf_obj = _mk_chf_obj()
@@ -29,11 +31,11 @@ class DictCache(base):
# pita to deal with for this code- thus we convert back.
# Additionally, we drop any chksum info in the process.
d = dict(base.__getitem__(self, cpv).items())
- d.pop(f'_{self.chf_type}_', None)
+ d.pop(f"_{self.chf_type}_", None)
return d
def __setitem__(self, cpv, data):
- data['_chf_'] = _chf_obj
+ data["_chf_"] = _chf_obj
return base.__setitem__(self, cpv, data)
def _setitem(self, cpv, values):
@@ -68,11 +70,11 @@ class DictCacheBulk(bulk):
def __getitem__(self, cpv):
d = bulk.__getitem__(self, cpv)
- d.pop(f'_{self.chf_type}_', None)
+ d.pop(f"_{self.chf_type}_", None)
return d
def __setitem__(self, cpv, data):
- data['_chf_'] = _chf_obj
+ data["_chf_"] = _chf_obj
return bulk.__setitem__(self, cpv, data)
def keys(self):
@@ -84,66 +86,68 @@ class TestBase:
cache_keys = ("foo", "_eclasses_")
def get_db(self, readonly=False):
- return DictCache(auxdbkeys=self.cache_keys,
- readonly=readonly)
+ return DictCache(auxdbkeys=self.cache_keys, readonly=readonly)
def test_basics(self):
cache = self.get_db()
- cache['spork'] = {'foo':'bar'}
- assert {'foo': 'bar'} == cache['spork']
+ cache["spork"] = {"foo": "bar"}
+ assert {"foo": "bar"} == cache["spork"]
with pytest.raises(KeyError):
- cache['notaspork']
+ cache["notaspork"]
- cache['spork'] = {'foo': 42}
- cache['foon'] = {'foo': 42}
- assert {'foo': 42} == cache['spork']
- assert {'foo': 42} == cache['foon']
+ cache["spork"] = {"foo": 42}
+ cache["foon"] = {"foo": 42}
+ assert {"foo": 42} == cache["spork"]
+ assert {"foo": 42} == cache["foon"]
- assert {'foon', 'spork'} == set(cache.keys())
- assert [('foon', {'foo': 42}), ('spork', {'foo': 42})] == sorted(cache.items())
- del cache['foon']
+ assert {"foon", "spork"} == set(cache.keys())
+ assert [("foon", {"foo": 42}), ("spork", {"foo": 42})] == sorted(cache.items())
+ del cache["foon"]
with pytest.raises(KeyError):
- cache['foon']
+ cache["foon"]
- assert 'spork' in cache
- assert 'foon' not in cache
+ assert "spork" in cache
+ assert "foon" not in cache
- cache['empty'] = {'foo': ''}
- assert not cache['empty']
+ cache["empty"] = {"foo": ""}
+ assert not cache["empty"]
def test_eclasses(self):
cache = self.get_db()
- cache['spork'] = {'foo':'bar'}
- cache['spork'] = {'_eclasses_': {'spork': _chf_obj,
- 'foon': _chf_obj}}
- assert len(cache['spork']['_eclasses_']) == 2
-
- cache['spork'] = {'_eclasses_': {'spork': _mk_chf_obj(mtime=1),
- 'foon': _mk_chf_obj(mtime=2)}}
- assert cache._data['spork']['_eclasses_'] in ['spork\t1\tfoon\t2', 'foon\t2\tspork\t1']
- assert (
- {('foon', (('mtime', 2),)), ('spork', (('mtime', 1),))} ==
- set(cache['spork']['_eclasses_']))
+ cache["spork"] = {"foo": "bar"}
+ cache["spork"] = {"_eclasses_": {"spork": _chf_obj, "foon": _chf_obj}}
+ assert len(cache["spork"]["_eclasses_"]) == 2
+
+ cache["spork"] = {
+ "_eclasses_": {"spork": _mk_chf_obj(mtime=1), "foon": _mk_chf_obj(mtime=2)}
+ }
+ assert cache._data["spork"]["_eclasses_"] in [
+ "spork\t1\tfoon\t2",
+ "foon\t2\tspork\t1",
+ ]
+ assert {("foon", (("mtime", 2),)), ("spork", (("mtime", 1),))} == set(
+ cache["spork"]["_eclasses_"]
+ )
def test_readonly(self):
cache = self.get_db()
- cache['spork'] = {'foo':'bar'}
+ cache["spork"] = {"foo": "bar"}
cache2 = self.get_db(True)
cache2._data = cache._data
with pytest.raises(errors.ReadOnly):
- del cache2['spork']
+ del cache2["spork"]
with pytest.raises(errors.ReadOnly):
- cache2['spork'] = {'foo': 42}
- assert {'foo': 'bar'} == cache2['spork']
+ cache2["spork"] = {"foo": 42}
+ assert {"foo": "bar"} == cache2["spork"]
def test_clear(self):
cache = self.get_db()
- cache['spork'] = {'foo': 'bar'}
- assert {'foo':'bar'} == cache['spork']
- assert list(cache) == ['spork']
- cache['dork'] = {'foo': 'bar2'}
- cache['dork2'] = {'foo': 'bar2'}
- assert set(cache) == {'dork', 'dork2', 'spork'}
+ cache["spork"] = {"foo": "bar"}
+ assert {"foo": "bar"} == cache["spork"]
+ assert list(cache) == ["spork"]
+ cache["dork"] = {"foo": "bar2"}
+ cache["dork2"] = {"foo": "bar2"}
+ assert set(cache) == {"dork", "dork2", "spork"}
cache.clear()
assert not list(cache)
@@ -179,10 +183,8 @@ class TestBase:
class TestBulk(TestBase):
-
def get_db(self, readonly=False):
- return DictCacheBulk(auxdbkeys=self.cache_keys,
- readonly=readonly)
+ return DictCacheBulk(auxdbkeys=self.cache_keys, readonly=readonly)
def test_filtering(self):
db = self.get_db()
diff --git a/tests/cache/test_flat_hash.py b/tests/cache/test_flat_hash.py
index 96f6ad83d..b71b87fbf 100644
--- a/tests/cache/test_flat_hash.py
+++ b/tests/cache/test_flat_hash.py
@@ -7,71 +7,91 @@ from . import test_base
class db(flat_hash.database):
-
def __setitem__(self, cpv, data):
- data['_chf_'] = test_base._chf_obj
+ data["_chf_"] = test_base._chf_obj
return flat_hash.database.__setitem__(self, cpv, data)
def __getitem__(self, cpv):
d = dict(flat_hash.database.__getitem__(self, cpv).items())
- d.pop(f'_{self.chf_type}_', None)
+ d.pop(f"_{self.chf_type}_", None)
return d
-generic_data = \
- ("sys-libs/libtrash-2.4",
- (('DEPEND', 'virtual/libc dev-lang/perl'),
- ('DESCRIPTION', 'provides a trash can by intercepting calls...'),
- ('EAPI', ''),
- ('HOMEPAGE', 'http://pages.stern.nyu.edu/~marriaga/software/libtrash/'),
- ('IUSE', ''),
- ('KEYWORDS', '~amd64 ~ppc ~x86'),
- ('LICENSE', 'GPL-2'),
- ('PDEPEND', ''),
- ('RDEPEND', 'virtual/libc dev-lang/perl'),
- ('RESTRICT', ''),
- ('SLOT', '0'),
- ('SRC_URI', 'http://pages.stern.nyu.edu/~marriaga/software/blah.tgz'),
- ('_eclasses_',
+generic_data = (
+ "sys-libs/libtrash-2.4",
+ (
+ ("DEPEND", "virtual/libc dev-lang/perl"),
+ ("DESCRIPTION", "provides a trash can by intercepting calls..."),
+ ("EAPI", ""),
+ ("HOMEPAGE", "http://pages.stern.nyu.edu/~marriaga/software/libtrash/"),
+ ("IUSE", ""),
+ ("KEYWORDS", "~amd64 ~ppc ~x86"),
+ ("LICENSE", "GPL-2"),
+ ("PDEPEND", ""),
+ ("RDEPEND", "virtual/libc dev-lang/perl"),
+ ("RESTRICT", ""),
+ ("SLOT", "0"),
+ ("SRC_URI", "http://pages.stern.nyu.edu/~marriaga/software/blah.tgz"),
+ (
+ "_eclasses_",
{
- 'toolchain-funcs': LazilyHashedPath('/var/gentoo/repos/gentoo/eclass', mtime=1155996352),
- 'multilib': LazilyHashedPath('/var/gentoo/repos/gentoo/eclass', mtime=1156014349),
- 'eutils': LazilyHashedPath('/var/gentoo/repos/gentoo/eclass', mtime=1155996352),
- 'portability': LazilyHashedPath('/var/gentoo/repos/gentoo/eclass', mtime=1141850196)
- }
+ "toolchain-funcs": LazilyHashedPath(
+ "/var/gentoo/repos/gentoo/eclass", mtime=1155996352
+ ),
+ "multilib": LazilyHashedPath(
+ "/var/gentoo/repos/gentoo/eclass", mtime=1156014349
+ ),
+ "eutils": LazilyHashedPath(
+ "/var/gentoo/repos/gentoo/eclass", mtime=1155996352
+ ),
+ "portability": LazilyHashedPath(
+ "/var/gentoo/repos/gentoo/eclass", mtime=1141850196
+ ),
+ },
),
- ('_mtime_', 1000),
+ ("_mtime_", 1000),
),
)
-class TestFlatHash:
+class TestFlatHash:
@pytest.fixture
def db(self, tmp_path, request):
return db(str(tmp_path), auxdbkeys=self.cache_keys, readonly=request.param)
- cache_keys = ("DEPENDS", "RDEPEND", "EAPI", "HOMEPAGE", "KEYWORDS",
- "LICENSE", "PDEPEND", "RESTRICT", "SLOT", "SRC_URI",
- "_eclasses_", "_mtime_")
+ cache_keys = (
+ "DEPENDS",
+ "RDEPEND",
+ "EAPI",
+ "HOMEPAGE",
+ "KEYWORDS",
+ "LICENSE",
+ "PDEPEND",
+ "RESTRICT",
+ "SLOT",
+ "SRC_URI",
+ "_eclasses_",
+ "_mtime_",
+ )
# truncating the original metadata we grabbed for 80 char...
test_data = (generic_data,)
- @pytest.mark.parametrize("db", (False, ), indirect=True)
+ @pytest.mark.parametrize("db", (False,), indirect=True)
def test_readwrite(self, db):
for key, raw_data in self.test_data:
d = dict(raw_data)
db[key] = d
db.commit()
- @pytest.mark.parametrize("db", (True, ), indirect=True)
+ @pytest.mark.parametrize("db", (True,), indirect=True)
def test_readonly(self, db):
for key, raw_data in self.test_data:
d = dict(raw_data)
with pytest.raises(errors.ReadOnly):
db[key] = d
- @pytest.mark.parametrize("db", (False, ), indirect=True)
+ @pytest.mark.parametrize("db", (False,), indirect=True)
def test_setitem(self, db):
for key, raw_data in self.test_data:
d = dict(raw_data)
diff --git a/tests/config/test_basics.py b/tests/config/test_basics.py
index 825c1a0cd..374fc1352 100644
--- a/tests/config/test_basics.py
+++ b/tests/config/test_basics.py
@@ -9,15 +9,19 @@ def passthrough(*args, **kwargs):
def test_invalid_config_types():
- for var in ('class', 'inherit'):
- @configurable({var: 'str'})
+ for var in ("class", "inherit"):
+
+ @configurable({var: "str"})
def testtype():
pass
+
with pytest.raises(errors.TypeDefinitionError):
basics.ConfigType(testtype)
- @configurable(positional=['foo'])
+
+ @configurable(positional=["foo"])
def test(*args):
pass
+
with pytest.raises(errors.TypeDefinitionError):
basics.ConfigType(test)
@@ -25,9 +29,11 @@ def test_invalid_config_types():
# the docstrings aren't part of the test, but using 'pass' instead
# makes trial's --coverage complain about them.
+
def argsfunc(*args):
"""Function taking a variable number of arguments."""
+
def kwargsfunc(**kwargs):
"""Function taking keyword arguments."""
@@ -36,14 +42,12 @@ def nonopt(one, two):
"""Function taking two non-optional args."""
-def alltypes(alist=(), astr='astr', abool=True, aref=object(), anint=3,
- along=int(3)):
+def alltypes(alist=(), astr="astr", abool=True, aref=object(), anint=3, along=int(3)):
"""Function taking lots of kinds of args."""
class NewStyleStrClass:
-
- def __init__(self, one, two='two'):
+ def __init__(self, one, two="two"):
"""Newstyle testclass."""
def test_member(self, one):
@@ -51,7 +55,6 @@ class NewStyleStrClass:
class NewStyleClass:
-
def __init__(self, one, two=object()):
"""Newstyle testclass."""
@@ -60,7 +63,6 @@ class NewStyleClass:
class OldStyleClass:
-
def __init__(self, one, two=object()):
"""Newstyle testclass."""
@@ -69,7 +71,6 @@ class OldStyleClass:
class TestConfigTypeFromFunction:
-
def test_invalid(self):
with pytest.raises(TypeError):
basics.ConfigType(argsfunc)
@@ -78,62 +79,68 @@ class TestConfigTypeFromFunction:
def test_basic(self):
nonopt_type = basics.ConfigType(nonopt)
- assert nonopt_type.name == 'nonopt'
- assert nonopt_type.types == {'one': 'str', 'two': 'str'}
- assert nonopt_type.required == ('one', 'two')
- assert nonopt_type.positional == ('one', 'two')
+ assert nonopt_type.name == "nonopt"
+ assert nonopt_type.types == {"one": "str", "two": "str"}
+ assert nonopt_type.required == ("one", "two")
+ assert nonopt_type.positional == ("one", "two")
def test_default_types(self):
test_type = basics.ConfigType(alltypes)
assert test_type.types == {
- 'alist': 'list', 'astr': 'str', 'abool': 'bool',
- 'anint': 'int', 'along': 'int'}
+ "alist": "list",
+ "astr": "str",
+ "abool": "bool",
+ "anint": "int",
+ "along": "int",
+ }
assert not test_type.required
- @pytest.mark.parametrize('func', (
- pytest.param(NewStyleClass(1).member, id='newstyle_instance'),
- pytest.param(OldStyleClass(1).member, id='oldstyle_instance'),
- pytest.param(NewStyleClass.member, id='newstyle_class'),
- pytest.param(OldStyleClass.member, id='oldstyle_class'),
- ))
+ @pytest.mark.parametrize(
+ "func",
+ (
+ pytest.param(NewStyleClass(1).member, id="newstyle_instance"),
+ pytest.param(OldStyleClass(1).member, id="oldstyle_instance"),
+ pytest.param(NewStyleClass.member, id="newstyle_class"),
+ pytest.param(OldStyleClass.member, id="oldstyle_class"),
+ ),
+ )
def test_class_member(self, func):
test_type = basics.ConfigType(func)
- assert test_type.name == 'member'
- assert test_type.required == ('one',)
+ assert test_type.name == "member"
+ assert test_type.required == ("one",)
class TestConfigTypeFromClass:
-
def _test_basics(self, klass, name, two_override=None):
test_type = basics.ConfigType(klass)
assert test_type.name == name
- assert set(test_type.required) == {'one'}
- target_types = {'one': 'str'}
+ assert set(test_type.required) == {"one"}
+ target_types = {"one": "str"}
if two_override is not None:
- target_types['two'] = two_override
+ target_types["two"] = two_override
assert target_types == test_type.types
assert test_type.name == name
def test_oldstyle(self):
- self._test_basics(OldStyleClass, 'OldStyleClass')
+ self._test_basics(OldStyleClass, "OldStyleClass")
def test_newstyle(self):
- self._test_basics(NewStyleClass, 'NewStyleClass')
+ self._test_basics(NewStyleClass, "NewStyleClass")
def test_defaults_str(self):
- self._test_basics(NewStyleStrClass, 'NewStyleStrClass',
- two_override='str')
+ self._test_basics(NewStyleStrClass, "NewStyleStrClass", two_override="str")
def test_config_hint(self):
class Class(NewStyleClass):
- pkgcore_config_type = ConfigHint(
- types={'two':'bool'}, doc='interesting')
- self._test_basics(Class, 'Class', two_override='bool')
- assert 'interesting' == basics.ConfigType(Class).doc
+ pkgcore_config_type = ConfigHint(types={"two": "bool"}, doc="interesting")
+
+ self._test_basics(Class, "Class", two_override="bool")
+ assert "interesting" == basics.ConfigType(Class).doc
def test_object_init(self):
class kls:
pass
+
conf = basics.ConfigType(kls)
assert not conf.types
assert not conf.required
@@ -151,12 +158,16 @@ class TestConfigTypeFromClass:
# have to be accurate however
class cls(dict):
__slots__ = ()
+
with pytest.raises(TypeError):
basics.ConfigType(cls)
- raw_hint = ConfigHint(types={"filename":"str", "mode":"r",
- "buffering":"int"}, typename='file',
- required=['filename'], positional=['filename'])
+ raw_hint = ConfigHint(
+ types={"filename": "str", "mode": "r", "buffering": "int"},
+ typename="file",
+ required=["filename"],
+ positional=["filename"],
+ )
# make sure it still tries to introspect, and throws typeerror.
# introspection is generally wanted- if it must be skipped, the
@@ -166,41 +177,45 @@ class TestConfigTypeFromClass:
basics.ConfigType(cls)
cls.pkgcore_config_type = raw_hint.clone(authorative=True)
conf = basics.ConfigType(cls)
- assert conf.name == 'file'
- assert list(conf.required) == ['filename']
- assert list(conf.positional) == ['filename']
- assert set(conf.types) == {'buffering', 'filename', 'mode'}
+ assert conf.name == "file"
+ assert list(conf.required) == ["filename"]
+ assert list(conf.positional) == ["filename"]
+ assert set(conf.types) == {"buffering", "filename", "mode"}
-class TestConfigHint:
+class TestConfigHint:
def test_configurable_decorator(self):
- @configurable(typename='spork', types={'foon': 'str'})
+ @configurable(typename="spork", types={"foon": "str"})
def stuff(*args, **kwargs):
return args, kwargs
- assert 'spork' == stuff.pkgcore_config_type.typename
- assert 'str' == basics.ConfigType(stuff).types['foon']
- assert (('spork',), {}) == stuff('spork')
-
+ assert "spork" == stuff.pkgcore_config_type.typename
+ assert "str" == basics.ConfigType(stuff).types["foon"]
+ assert (("spork",), {}) == stuff("spork")
def test_clone(self):
- c = ConfigHint(types={'foo': 'list', 'one': 'str'},
- positional=['one'], required=['one'],
- typename='barn', doc='orig doc')
- c2 = c.clone(types={'foo': 'list', 'one': 'str', 'two': 'str'},
- required=['one', 'two'])
- assert c2.types == {'foo': 'list', 'one': 'str', 'two': 'str'}
+ c = ConfigHint(
+ types={"foo": "list", "one": "str"},
+ positional=["one"],
+ required=["one"],
+ typename="barn",
+ doc="orig doc",
+ )
+ c2 = c.clone(
+ types={"foo": "list", "one": "str", "two": "str"}, required=["one", "two"]
+ )
+ assert c2.types == {"foo": "list", "one": "str", "two": "str"}
assert c2.positional == c.positional
- assert c2.required == ['one', 'two']
+ assert c2.required == ["one", "two"]
assert c2.typename == c.typename
assert c2.allow_unknowns == c.allow_unknowns
assert c2.doc == c.doc
-class TestConfigSection:
+class TestConfigSection:
def test_section_ref_collapse(self):
# Silly testcase just to make something drop off the --coverage radar.
- ref = basics.LazySectionRef(None, 'ref:foon')
+ ref = basics.LazySectionRef(None, "ref:foon")
pytest.raises(NotImplementedError, ref._collapse)
pytest.raises(NotImplementedError, ref.collapse)
pytest.raises(NotImplementedError, ref.instantiate)
@@ -209,108 +224,123 @@ class TestConfigSection:
section = basics.ConfigSection()
pytest.raises(NotImplementedError, section.__contains__, 42)
pytest.raises(NotImplementedError, section.keys)
- pytest.raises(NotImplementedError, section.render_value, None, 'a', 'str')
+ pytest.raises(NotImplementedError, section.render_value, None, "a", "str")
class TestDictConfigSection:
-
def test_misc(self):
def convert(central, value, arg_type):
return central, value, arg_type
- section = basics.DictConfigSection(convert, {'list': [1, 2]})
- assert 'foo' not in section
- assert 'list' in section
- assert ['list'] == list(section.keys())
- assert (None, [1, 2], 'spoon') == section.render_value(None, 'list', 'spoon')
+
+ section = basics.DictConfigSection(convert, {"list": [1, 2]})
+ assert "foo" not in section
+ assert "list" in section
+ assert ["list"] == list(section.keys())
+ assert (None, [1, 2], "spoon") == section.render_value(None, "list", "spoon")
def test_failure(self):
def fail(central, value, arg_type):
- raise errors.ConfigurationError('fail')
- section = basics.DictConfigSection(fail, {'list': [1, 2]})
+ raise errors.ConfigurationError("fail")
+
+ section = basics.DictConfigSection(fail, {"list": [1, 2]})
with pytest.raises(errors.ConfigurationError):
- section.render_value(None, 'list', 'spoon')
+ section.render_value(None, "list", "spoon")
class TestFakeIncrementalDictConfigSection:
-
@staticmethod
def _convert(central, value, arg_type):
return central, value, arg_type
@staticmethod
def _fail(central, value, arg_type):
- raise errors.ConfigurationError('fail')
+ raise errors.ConfigurationError("fail")
def test_misc(self):
section = basics.FakeIncrementalDictConfigSection(
- self._convert, {'list': [1, 2]})
- assert 'foo' not in section
- assert 'list' in section
- assert ['list'] == list(section.keys())
+ self._convert, {"list": [1, 2]}
+ )
+ assert "foo" not in section
+ assert "list" in section
+ assert ["list"] == list(section.keys())
with pytest.raises(errors.ConfigurationError):
- obj = basics.FakeIncrementalDictConfigSection(self._fail, {'a': 'b'})
- obj.render_value(None, 'a', 'str')
+ obj = basics.FakeIncrementalDictConfigSection(self._fail, {"a": "b"})
+ obj.render_value(None, "a", "str")
def test_fake_incrementals(self):
section = basics.FakeIncrementalDictConfigSection(
- self._convert, {'seq.append': [1, 2]})
+ self._convert, {"seq.append": [1, 2]}
+ )
manager = object()
- assert [None, None, (manager, [1, 2], 'list')] == section.render_value(manager, 'seq', 'list')
+ assert [None, None, (manager, [1, 2], "list")] == section.render_value(
+ manager, "seq", "list"
+ )
+
def _repr(central, value, arg_type):
- return 'list', ['thing']
- section = basics.FakeIncrementalDictConfigSection(
- _repr, {'foo': None})
- assert ('list', (None, ['thing'], None)) == section.render_value(manager, 'foo', 'repr')
+ return "list", ["thing"]
+
+ section = basics.FakeIncrementalDictConfigSection(_repr, {"foo": None})
+ assert ("list", (None, ["thing"], None)) == section.render_value(
+ manager, "foo", "repr"
+ )
with pytest.raises(errors.ConfigurationError):
- obj = basics.FakeIncrementalDictConfigSection(self._fail, {'a.prepend': 'b'})
- obj.render_value(None, 'a', 'list')
+ obj = basics.FakeIncrementalDictConfigSection(
+ self._fail, {"a.prepend": "b"}
+ )
+ obj.render_value(None, "a", "list")
def test_repr(self):
def asis(central, value, arg_type):
- assert arg_type == 'repr', arg_type
+ assert arg_type == "repr", arg_type
return value
+
source_dict = {
- 'seq.append': ('list', [1, 2]),
- 'simple': ('bool', True),
- 'multistr': ('str', 'body'),
- 'multistr.prepend': ('str', 'head'),
- 'refs': ('str', 'lost'),
- 'refs.append': ('ref', 'main'),
- 'refs.prepend': ('refs', ['a', 'b']),
- 'strlist': ('callable', asis),
- 'strlist.prepend': ('str', 'whatever'),
- 'wrong.prepend': ('wrong', 'wrong'),
+ "seq.append": ("list", [1, 2]),
+ "simple": ("bool", True),
+ "multistr": ("str", "body"),
+ "multistr.prepend": ("str", "head"),
+ "refs": ("str", "lost"),
+ "refs.append": ("ref", "main"),
+ "refs.prepend": ("refs", ["a", "b"]),
+ "strlist": ("callable", asis),
+ "strlist.prepend": ("str", "whatever"),
+ "wrong.prepend": ("wrong", "wrong"),
}
section = basics.FakeIncrementalDictConfigSection(asis, source_dict)
manager = object()
with pytest.raises(KeyError):
- section.render_value(manager, 'spoon', 'repr')
- assert ('list', [None, None, [1, 2]]) == section.render_value(manager, 'seq', 'repr')
- assert ('bool', True) == section.render_value(manager, 'simple', 'repr')
- assert ('str', ['head', 'body', None]) == section.render_value(manager, 'multistr', 'repr')
- assert ('refs', [['a', 'b'], ['lost'], ['main']]) == section.render_value(manager, 'refs', 'repr')
- assert ('list', [
- ['whatever'],
- ['tests.config.test_basics.asis'],
- None]) == section.render_value(manager, 'strlist', 'repr')
+ section.render_value(manager, "spoon", "repr")
+ assert ("list", [None, None, [1, 2]]) == section.render_value(
+ manager, "seq", "repr"
+ )
+ assert ("bool", True) == section.render_value(manager, "simple", "repr")
+ assert ("str", ["head", "body", None]) == section.render_value(
+ manager, "multistr", "repr"
+ )
+ assert ("refs", [["a", "b"], ["lost"], ["main"]]) == section.render_value(
+ manager, "refs", "repr"
+ )
+ assert (
+ "list",
+ [["whatever"], ["tests.config.test_basics.asis"], None],
+ ) == section.render_value(manager, "strlist", "repr")
with pytest.raises(errors.ConfigurationError):
- section.render_value(manager, 'wrong', 'repr')
+ section.render_value(manager, "wrong", "repr")
class TestConvertString:
-
def test_render_value(self):
source = {
- 'str': 'tests',
- 'bool': 'yes',
- 'list': '0 1 2',
- 'callable': 'tests.config.test_basics.passthrough',
+ "str": "tests",
+ "bool": "yes",
+ "list": "0 1 2",
+ "callable": "tests.config.test_basics.passthrough",
}
destination = {
- 'str': 'tests',
- 'bool': True,
- 'list': ['0', '1', '2'],
- 'callable': passthrough,
+ "str": "tests",
+ "bool": True,
+ "list": ["0", "1", "2"],
+ "callable": passthrough,
}
# valid gets
@@ -319,49 +349,59 @@ class TestConvertString:
# reprs
for typename, value in source.items():
- assert ('str', value) == basics.convert_string(None, source[typename], 'repr')
+ assert ("str", value) == basics.convert_string(
+ None, source[typename], "repr"
+ )
# invalid gets
# not callable
with pytest.raises(errors.ConfigurationError):
- basics.convert_string(None, source['str'], 'callable')
+ basics.convert_string(None, source["str"], "callable")
# not importable
with pytest.raises(errors.ConfigurationError):
- basics.convert_string(None, source['bool'], 'callable')
+ basics.convert_string(None, source["bool"], "callable")
# Bogus type.
with pytest.raises(errors.ConfigurationError):
- basics.convert_string(None, source['bool'], 'frob')
+ basics.convert_string(None, source["bool"], "frob")
def test_section_ref(self):
def spoon():
"""Noop."""
- target_config = central.CollapsedConfig(
- basics.ConfigType(spoon), {}, None)
+
+ target_config = central.CollapsedConfig(basics.ConfigType(spoon), {}, None)
+
class TestCentral:
def collapse_named_section(self, section):
try:
- return {'target': target_config}[section]
+ return {"target": target_config}[section]
except KeyError:
raise errors.ConfigurationError(section)
- assert basics.convert_string(TestCentral(), 'target', 'ref:spoon').collapse() == target_config
+
+ assert (
+ basics.convert_string(TestCentral(), "target", "ref:spoon").collapse()
+ == target_config
+ )
with pytest.raises(errors.ConfigurationError):
- basics.convert_string(TestCentral(), 'missing', 'ref:spoon').instantiate()
+ basics.convert_string(TestCentral(), "missing", "ref:spoon").instantiate()
def test_section_refs(self):
def spoon():
"""Noop."""
- config1 = central.CollapsedConfig(
- basics.ConfigType(spoon), {}, None)
- config2 = central.CollapsedConfig(
- basics.ConfigType(spoon), {}, None)
+
+ config1 = central.CollapsedConfig(basics.ConfigType(spoon), {}, None)
+ config2 = central.CollapsedConfig(basics.ConfigType(spoon), {}, None)
+
class TestCentral:
def collapse_named_section(self, section):
try:
- return {'1': config1, '2': config2}[section]
+ return {"1": config1, "2": config2}[section]
except KeyError:
raise errors.ConfigurationError(section)
- assert [config1, config2] == list(ref.collapse() for ref in basics.convert_string(
- TestCentral(), '1 2', 'refs:spoon'))
- lazy_refs = basics.convert_string(TestCentral(), '2 3', 'refs:spoon')
+
+ assert [config1, config2] == list(
+ ref.collapse()
+ for ref in basics.convert_string(TestCentral(), "1 2", "refs:spoon")
+ )
+ lazy_refs = basics.convert_string(TestCentral(), "2 3", "refs:spoon")
assert len(lazy_refs) == 2
with pytest.raises(errors.ConfigurationError):
lazy_refs[1].collapse()
@@ -370,10 +410,10 @@ class TestConvertString:
class TestConvertAsIs:
source = {
- 'str': 'tests',
- 'bool': True,
- 'list': ['0', '1', '2'],
- 'callable': passthrough,
+ "str": "tests",
+ "bool": True,
+ "list": ["0", "1", "2"],
+ "callable": passthrough,
}
def test_render_value(self):
@@ -388,91 +428,89 @@ class TestConvertAsIs:
def test_repr(self):
for typename, value in self.source.items():
- assert (typename, value) == basics.convert_asis(None, value, 'repr')
+ assert (typename, value) == basics.convert_asis(None, value, "repr")
with pytest.raises(errors.ConfigurationError):
- basics.convert_asis(None, object(), 'repr')
+ basics.convert_asis(None, object(), "repr")
def test_section_ref(self):
ref = basics.HardCodedConfigSection({})
with pytest.raises(errors.ConfigurationError):
- basics.convert_asis(None, 42, 'ref:spoon')
- assert ref is basics.convert_asis(None, ref, 'ref:spoon').section
- assert ('ref', ref) == basics.convert_asis(None, ref, 'repr')
+ basics.convert_asis(None, 42, "ref:spoon")
+ assert ref is basics.convert_asis(None, ref, "ref:spoon").section
+ assert ("ref", ref) == basics.convert_asis(None, ref, "repr")
def test_section_refs(self):
ref = basics.HardCodedConfigSection({})
with pytest.raises(errors.ConfigurationError):
- basics.convert_asis(None, [1, 2], 'refs:spoon')
- assert ref is basics.convert_asis(None, [ref], 'refs:spoon')[0].section
- assert ('refs', [ref]) == basics.convert_asis(None, [ref], 'repr')
+ basics.convert_asis(None, [1, 2], "refs:spoon")
+ assert ref is basics.convert_asis(None, [ref], "refs:spoon")[0].section
+ assert ("refs", [ref]) == basics.convert_asis(None, [ref], "repr")
def test_alias():
def spoon():
"""Noop."""
+
foon = central.CollapsedConfig(basics.ConfigType(spoon), {}, None)
+
class MockManager:
def collapse_named_section(self, name):
- if name == 'foon':
+ if name == "foon":
return foon
return object()
+
manager = MockManager()
- alias = basics.section_alias('foon', 'spoon')
- type_obj = basics.ConfigType(alias.render_value(manager, 'class',
- 'callable'))
- assert 'spoon' == type_obj.name
- assert foon is alias.render_value(manager, 'target', 'ref:spoon').collapse()
+ alias = basics.section_alias("foon", "spoon")
+ type_obj = basics.ConfigType(alias.render_value(manager, "class", "callable"))
+ assert "spoon" == type_obj.name
+ assert foon is alias.render_value(manager, "target", "ref:spoon").collapse()
class TestParsers:
-
def test_str_to_bool(self):
# abuse assert is to make sure we get actual booleans, not some
# weird object that happens to be True or False when converted
# to a bool
for string, output in [
- ('True', True),
- ('yes', True),
- ('1', True),
- ('False', False),
- ('no', False),
- ('0', False),
- ]:
+ ("True", True),
+ ("yes", True),
+ ("1", True),
+ ("False", False),
+ ("no", False),
+ ("0", False),
+ ]:
assert basics.str_to_bool(string) is output
def test_str_to_int(self):
- for string, output in [
- ('\t 1', 1),
- ('1', 1),
- ('-100', -100)]:
+ for string, output in [("\t 1", 1), ("1", 1), ("-100", -100)]:
assert basics.str_to_int(string) == output
with pytest.raises(errors.ConfigurationError):
- basics.str_to_int('f')
+ basics.str_to_int("f")
def test_str_to_str(self):
for string, output in [
- ('\t ', ''),
- (' foo ', 'foo'),
- (' " foo " ', ' foo '),
+ ("\t ", ""),
+ (" foo ", "foo"),
+ (' " foo " ', " foo "),
('\t"', '"'),
- ('\nfoo\t\n bar\t', 'foo bar'),
- ('"a"', 'a'),
+ ("\nfoo\t\n bar\t", "foo bar"),
+ ('"a"', "a"),
("'a'", "a"),
("'a", "'a"),
('"a', '"a'),
- ]:
+ ]:
assert basics.str_to_str(string) == output
def test_str_to_list(self):
for string, output in [
- ('foo', ['foo']),
- ('"f\'oo" \'b"ar\'', ["f'oo", 'b"ar']),
- ('', []),
- (' ', []),
+ ("foo", ["foo"]),
+ ("\"f'oo\" 'b\"ar'", ["f'oo", 'b"ar']),
+ ("", []),
+ (" ", []),
('\\"hi ', ['"hi']),
- ('\'"hi\'', ['"hi']),
+ ("'\"hi'", ['"hi']),
('"\\"hi"', ['"hi']),
- ]:
+ ]:
assert basics.str_to_list(string) == output
for string in ['"', "'foo", 'ba"r', 'baz"']:
with pytest.raises(errors.QuoteInterpretationError):
@@ -480,13 +518,15 @@ class TestParsers:
# make sure this explodes instead of returning something
# confusing so we explode much later
with pytest.raises(TypeError):
- basics.str_to_list(['no', 'string'])
+ basics.str_to_list(["no", "string"])
def test_parse_config_file(tmp_path):
- (fp := tmp_path / 'file').write_text('foon')
+ (fp := tmp_path / "file").write_text("foon")
with pytest.raises(errors.ConfigurationError):
- basics.parse_config_file('/spork', None)
+ basics.parse_config_file("/spork", None)
+
def parser(f):
return f.read()
- assert 'foon' == basics.parse_config_file(fp, parser)
+
+ assert "foon" == basics.parse_config_file(fp, parser)
diff --git a/tests/config/test_central.py b/tests/config/test_central.py
index 692cea047..f3fc1c702 100644
--- a/tests/config/test_central.py
+++ b/tests/config/test_central.py
@@ -8,10 +8,13 @@ from snakeoil.errors import walk_exception_chain
# A bunch of functions used from various tests below.
def repo(cache):
return cache
-@configurable({'content': 'ref:drawer', 'contents': 'refs:drawer'})
+
+
+@configurable({"content": "ref:drawer", "contents": "refs:drawer"})
def drawer(content=None, contents=None):
return content, contents
+
# The exception checks here also check if the str value of the
# exception is what we expect. This does not mean the wording of the
# error messages used here is strictly required. It just makes sure
@@ -22,12 +25,13 @@ def drawer(content=None, contents=None):
# This makes sure the types are not unnecessarily queried (since
# querying object() will blow up).
+
class RemoteSource:
"""Use this one for tests that do need the names but nothing more."""
def __iter__(self):
- return iter(('remote',))
+ return iter(("remote",))
def __getitem__(self, key):
raise NotImplementedError()
@@ -36,496 +40,748 @@ class RemoteSource:
def _str_exc(exc):
return ":\n".join(str(x) for x in walk_exception_chain(exc))
+
def check_error(message, func, *args, **kwargs):
"""Like assertRaises but checks for the message string too."""
- klass = kwargs.pop('klass', errors.ConfigurationError)
+ klass = kwargs.pop("klass", errors.ConfigurationError)
try:
func(*args, **kwargs)
except klass as exc:
- assert message == _str_exc(exc), \
- f'\nGot:\n{_str_exc(exc)!r}\nExpected:\n{message!r}\n'
+ assert message == _str_exc(
+ exc
+ ), f"\nGot:\n{_str_exc(exc)!r}\nExpected:\n{message!r}\n"
else:
- pytest.fail('no exception raised')
+ pytest.fail("no exception raised")
+
def get_config_obj(manager, obj_type, obj_name):
types = getattr(manager.objects, obj_type)
return types[obj_name]
+
def test_sections():
- manager = central.ConfigManager([{
- 'fooinst': basics.HardCodedConfigSection({'class': repo}),
- 'barinst': basics.HardCodedConfigSection({'class': drawer}),
- }])
- assert ['barinst', 'fooinst'] == sorted(manager.sections())
- assert list(manager.objects.drawer.keys()) == ['barinst']
- assert manager.objects.drawer == {'barinst': (None, None)}
+ manager = central.ConfigManager(
+ [
+ {
+ "fooinst": basics.HardCodedConfigSection({"class": repo}),
+ "barinst": basics.HardCodedConfigSection({"class": drawer}),
+ }
+ ]
+ )
+ assert ["barinst", "fooinst"] == sorted(manager.sections())
+ assert list(manager.objects.drawer.keys()) == ["barinst"]
+ assert manager.objects.drawer == {"barinst": (None, None)}
+
def test_contains():
manager = central.ConfigManager(
- [{'spork': basics.HardCodedConfigSection({'class': drawer})}],
- [RemoteSource()])
- assert 'spork' in manager.objects.drawer
- assert 'foon' not in manager.objects.drawer
+ [{"spork": basics.HardCodedConfigSection({"class": drawer})}], [RemoteSource()]
+ )
+ assert "spork" in manager.objects.drawer
+ assert "foon" not in manager.objects.drawer
+
def test_no_class():
- manager = central.ConfigManager(
- [{'foo': basics.HardCodedConfigSection({})}])
+ manager = central.ConfigManager([{"foo": basics.HardCodedConfigSection({})}])
check_error(
- "Collapsing section named 'foo':\n"
- 'no class specified',
- manager.collapse_named_section, 'foo')
+ "Collapsing section named 'foo':\n" "no class specified",
+ manager.collapse_named_section,
+ "foo",
+ )
+
def test_missing_section_ref():
- manager = central.ConfigManager([{
- 'rsync repo': basics.HardCodedConfigSection({'class': repo}),
- }])
+ manager = central.ConfigManager(
+ [
+ {
+ "rsync repo": basics.HardCodedConfigSection({"class": repo}),
+ }
+ ]
+ )
check_error(
"Collapsing section named 'rsync repo':\n"
"type tests.config.test_central.repo needs settings for "
"'cache'",
- get_config_obj, manager, 'repo', 'rsync repo')
+ get_config_obj,
+ manager,
+ "repo",
+ "rsync repo",
+ )
+
def test_unknown_type():
manager = central.ConfigManager(
- [{'spork': basics.HardCodedConfigSection({'class': drawer,
- 'foon': None})}])
+ [{"spork": basics.HardCodedConfigSection({"class": drawer, "foon": None})}]
+ )
check_error(
- "Collapsing section named 'spork':\n"
- "Type of 'foon' unknown",
- manager.collapse_named_section, 'spork')
+ "Collapsing section named 'spork':\n" "Type of 'foon' unknown",
+ manager.collapse_named_section,
+ "spork",
+ )
+
def test_missing_inherit_target():
- manager = central.ConfigManager([{
- 'myrepo': basics.HardCodedConfigSection({
- 'class': repo,
- 'inherit': ['baserepo'],
- }),
- }], [RemoteSource()])
+ manager = central.ConfigManager(
+ [
+ {
+ "myrepo": basics.HardCodedConfigSection(
+ {
+ "class": repo,
+ "inherit": ["baserepo"],
+ }
+ ),
+ }
+ ],
+ [RemoteSource()],
+ )
check_error(
"Collapsing section named 'myrepo':\n"
"Inherit target 'baserepo' cannot be found",
- get_config_obj, manager, 'repo', 'myrepo')
+ get_config_obj,
+ manager,
+ "repo",
+ "myrepo",
+ )
+
def test_inherit_unknown_type():
- manager = central.ConfigManager([{
- 'baserepo': basics.HardCodedConfigSection({
- 'cache': 'available',
- }),
- 'actual repo': basics.HardCodedConfigSection({
- 'class': drawer,
- 'inherit': ['baserepo'],
- }),
- }])
+ manager = central.ConfigManager(
+ [
+ {
+ "baserepo": basics.HardCodedConfigSection(
+ {
+ "cache": "available",
+ }
+ ),
+ "actual repo": basics.HardCodedConfigSection(
+ {
+ "class": drawer,
+ "inherit": ["baserepo"],
+ }
+ ),
+ }
+ ]
+ )
check_error(
- "Collapsing section named 'actual repo':\n"
- "Type of 'cache' unknown",
- get_config_obj, manager, 'repo', 'actual repo')
+ "Collapsing section named 'actual repo':\n" "Type of 'cache' unknown",
+ get_config_obj,
+ manager,
+ "repo",
+ "actual repo",
+ )
+
def test_inherit():
- manager = central.ConfigManager([{
- 'baserepo': basics.HardCodedConfigSection({
- 'cache': 'available',
- 'inherit': ['unneeded'],
- }),
- 'unneeded': basics.HardCodedConfigSection({
- 'cache': 'unavailable'}),
- 'actual repo': basics.HardCodedConfigSection({
- 'class': repo,
- 'inherit': ['baserepo'],
- }),
- }])
-
- assert 'available' == manager.objects.repo['actual repo']
+ manager = central.ConfigManager(
+ [
+ {
+ "baserepo": basics.HardCodedConfigSection(
+ {
+ "cache": "available",
+ "inherit": ["unneeded"],
+ }
+ ),
+ "unneeded": basics.HardCodedConfigSection({"cache": "unavailable"}),
+ "actual repo": basics.HardCodedConfigSection(
+ {
+ "class": repo,
+ "inherit": ["baserepo"],
+ }
+ ),
+ }
+ ]
+ )
+
+ assert "available" == manager.objects.repo["actual repo"]
+
def test_no_object_returned():
def noop():
"""Do not do anything."""
- manager = central.ConfigManager([{
- 'myrepo': basics.HardCodedConfigSection({'class': noop}),
- }])
+
+ manager = central.ConfigManager(
+ [
+ {
+ "myrepo": basics.HardCodedConfigSection({"class": noop}),
+ }
+ ]
+ )
check_error(
"Failed instantiating section 'myrepo':\n"
"'No object returned' instantiating "
"tests.config.test_central.noop",
- manager.collapse_named_section('myrepo').instantiate)
+ manager.collapse_named_section("myrepo").instantiate,
+ )
+
def test_not_callable():
class myrepo:
def __repr__(self):
- return 'useless'
- manager = central.ConfigManager([{
- 'myrepo': basics.HardCodedConfigSection({'class': myrepo()}),
- }])
+ return "useless"
+
+ manager = central.ConfigManager(
+ [
+ {
+ "myrepo": basics.HardCodedConfigSection({"class": myrepo()}),
+ }
+ ]
+ )
check_error(
"Collapsing section named 'myrepo':\n"
"Failed converting argument 'class' to callable:\n"
"useless is not callable",
- get_config_obj, manager, 'myrepo', 'myrepo')
+ get_config_obj,
+ manager,
+ "myrepo",
+ "myrepo",
+ )
+
def test_raises_instantiationerror():
def myrepo():
- raise Exception('I raised')
- manager = central.ConfigManager([{
- 'myrepo': basics.HardCodedConfigSection({'class': myrepo}),
- }])
+ raise Exception("I raised")
+
+ manager = central.ConfigManager(
+ [
+ {
+ "myrepo": basics.HardCodedConfigSection({"class": myrepo}),
+ }
+ ]
+ )
check_error(
"Failed instantiating section 'myrepo':\n"
"Failed instantiating section 'myrepo': exception caught from 'tests.config.test_central.myrepo':\n"
"I raised",
- get_config_obj, manager, 'myrepo', 'myrepo')
+ get_config_obj,
+ manager,
+ "myrepo",
+ "myrepo",
+ )
+
def test_raises():
def myrepo():
- raise ValueError('I raised')
- manager = central.ConfigManager([{
- 'myrepo': basics.HardCodedConfigSection({'class': myrepo})
- }])
+ raise ValueError("I raised")
+
+ manager = central.ConfigManager(
+ [{"myrepo": basics.HardCodedConfigSection({"class": myrepo})}]
+ )
check_error(
"Failed instantiating section 'myrepo':\n"
"Failed instantiating section 'myrepo': exception caught from 'tests.config.test_central.myrepo':\n"
"I raised",
- get_config_obj, manager, 'myrepo', 'myrepo')
- manager = central.ConfigManager([{
- 'myrepo': basics.HardCodedConfigSection({'class': myrepo})
- }], debug=True)
+ get_config_obj,
+ manager,
+ "myrepo",
+ "myrepo",
+ )
+ manager = central.ConfigManager(
+ [{"myrepo": basics.HardCodedConfigSection({"class": myrepo})}], debug=True
+ )
check_error(
"Failed instantiating section 'myrepo':\n"
"Failed instantiating section 'myrepo': exception caught from 'tests.config.test_central.myrepo':\n"
"I raised",
- get_config_obj, manager, 'myrepo', 'myrepo',
- klass=errors.ConfigurationError)
+ get_config_obj,
+ manager,
+ "myrepo",
+ "myrepo",
+ klass=errors.ConfigurationError,
+ )
+
def test_pargs():
- @configurable(types={'p': 'str', 'notp': 'str'},
- positional=['p'], required=['p'])
+ @configurable(types={"p": "str", "notp": "str"}, positional=["p"], required=["p"])
def myrepo(*args, **kwargs):
return args, kwargs
- manager = central.ConfigManager([{
- 'myrepo': basics.HardCodedConfigSection({
- 'class': myrepo,
- 'p': 'pos',
- 'notp': 'notpos',
- }),
- }])
- assert manager.objects.myrepo['myrepo'] == (('pos',), {'notp': 'notpos'})
+ manager = central.ConfigManager(
+ [
+ {
+ "myrepo": basics.HardCodedConfigSection(
+ {
+ "class": myrepo,
+ "p": "pos",
+ "notp": "notpos",
+ }
+ ),
+ }
+ ]
+ )
+
+ assert manager.objects.myrepo["myrepo"] == (("pos",), {"notp": "notpos"})
+
def test_autoexec():
- @configurable(typename='configsection')
+ @configurable(typename="configsection")
def autoloader():
- return {'spork': basics.HardCodedConfigSection({'class': repo,
- 'cache': 'test'})}
+ return {
+ "spork": basics.HardCodedConfigSection({"class": repo, "cache": "test"})
+ }
+
+ manager = central.ConfigManager(
+ [
+ {
+ "autoload-sub": basics.HardCodedConfigSection(
+ {
+ "class": autoloader,
+ }
+ )
+ }
+ ]
+ )
+ assert {"autoload-sub", "spork"} == set(manager.sections())
+ assert ["spork"] == list(manager.objects.repo.keys())
+ assert "test" == manager.collapse_named_section("spork").instantiate()
- manager = central.ConfigManager([{
- 'autoload-sub': basics.HardCodedConfigSection({
- 'class': autoloader,
- })}])
- assert {'autoload-sub', 'spork'} == set(manager.sections())
- assert ['spork'] == list(manager.objects.repo.keys())
- assert 'test' == manager.collapse_named_section('spork').instantiate()
def test_reload():
- mod_dict = {'class': repo, 'cache': 'test'}
+ mod_dict = {"class": repo, "cache": "test"}
- @configurable(typename='configsection')
+ @configurable(typename="configsection")
def autoloader():
- return {'spork': basics.HardCodedConfigSection(mod_dict)}
-
- manager = central.ConfigManager([{
- 'autoload-sub': basics.HardCodedConfigSection({
- 'class': autoloader})}])
-
- assert {'autoload-sub', 'spork'} == set(manager.sections())
- assert ['spork'] == list(manager.objects.repo.keys())
- collapsedspork = manager.collapse_named_section('spork')
- assert 'test' == collapsedspork.instantiate()
- mod_dict['cache'] = 'modded'
- assert collapsedspork is manager.collapse_named_section('spork')
- assert 'test' == collapsedspork.instantiate()
+ return {"spork": basics.HardCodedConfigSection(mod_dict)}
+
+ manager = central.ConfigManager(
+ [{"autoload-sub": basics.HardCodedConfigSection({"class": autoloader})}]
+ )
+
+ assert {"autoload-sub", "spork"} == set(manager.sections())
+ assert ["spork"] == list(manager.objects.repo.keys())
+ collapsedspork = manager.collapse_named_section("spork")
+ assert "test" == collapsedspork.instantiate()
+ mod_dict["cache"] = "modded"
+ assert collapsedspork is manager.collapse_named_section("spork")
+ assert "test" == collapsedspork.instantiate()
types = manager.types
manager.reload()
- newspork = manager.collapse_named_section('spork')
+ newspork = manager.collapse_named_section("spork")
assert collapsedspork is not newspork
- assert 'modded' == newspork.instantiate(), \
- 'it did not throw away the cached instance'
+ assert (
+ "modded" == newspork.instantiate()
+ ), "it did not throw away the cached instance"
assert types is not manager.types
+
def test_instantiate_default_ref():
- manager = central.ConfigManager([{
- 'spork': basics.HardCodedConfigSection({'class': drawer}),
- }])
- assert (None, None) == manager.collapse_named_section('spork').instantiate()
+ manager = central.ConfigManager(
+ [
+ {
+ "spork": basics.HardCodedConfigSection({"class": drawer}),
+ }
+ ]
+ )
+ assert (None, None) == manager.collapse_named_section("spork").instantiate()
+
def test_allow_unknowns():
@configurable(allow_unknowns=True)
def myrepo(**kwargs):
return kwargs
- manager = central.ConfigManager([{
- 'spork': basics.HardCodedConfigSection({
- 'class': myrepo, 'spork': 'foon'})}])
+ manager = central.ConfigManager(
+ [{"spork": basics.HardCodedConfigSection({"class": myrepo, "spork": "foon"})}]
+ )
+
+ assert {"spork": "foon"} == manager.collapse_named_section("spork").instantiate()
- assert {'spork': 'foon'} == manager.collapse_named_section('spork').instantiate()
def test_reinstantiate_after_raise():
# The most likely bug this tests for is attempting to
# reprocess already processed section_ref args.
spork = object()
- @configurable({'thing': 'ref:spork'})
+
+ @configurable({"thing": "ref:spork"})
def myrepo(thing):
assert thing is spork
- raise errors.ComplexInstantiationError('I suck')
- @configurable(typename='spork')
+ raise errors.ComplexInstantiationError("I suck")
+
+ @configurable(typename="spork")
def spork_producer():
return spork
- manager = central.ConfigManager([{
- 'spork': basics.HardCodedConfigSection({
- 'class': myrepo,
- 'thing': basics.HardCodedConfigSection({
- 'class': spork_producer,
- }),
- })}])
- spork = manager.collapse_named_section('spork')
+
+ manager = central.ConfigManager(
+ [
+ {
+ "spork": basics.HardCodedConfigSection(
+ {
+ "class": myrepo,
+ "thing": basics.HardCodedConfigSection(
+ {
+ "class": spork_producer,
+ }
+ ),
+ }
+ )
+ }
+ ]
+ )
+ spork = manager.collapse_named_section("spork")
for i in range(3):
check_error(
"Failed instantiating section 'spork':\n"
"Failed instantiating section 'spork': exception caught from 'tests.config.test_central.myrepo':\n"
"'I suck', callable unset!",
- spork.instantiate)
+ spork.instantiate,
+ )
for i in range(3):
check_error(
"Failed instantiating section 'spork':\n"
"Failed instantiating section 'spork': exception caught from 'tests.config.test_central.myrepo':\n"
"'I suck', callable unset!",
- manager.collapse_named_section('spork').instantiate)
+ manager.collapse_named_section("spork").instantiate,
+ )
+
def test_instantiation_caching():
- @configurable(typename='drawer')
+ @configurable(typename="drawer")
def myrepo():
return object()
- manager = central.ConfigManager([{
- 'spork': basics.HardCodedConfigSection({'class': myrepo}),
- 'drawer': basics.ConfigSectionFromStringDict({
- 'class': 'tests.config.test_central.drawer',
- 'content': 'spork',
- }),
- }])
-
- config = manager.collapse_named_section('spork')
+ manager = central.ConfigManager(
+ [
+ {
+ "spork": basics.HardCodedConfigSection({"class": myrepo}),
+ "drawer": basics.ConfigSectionFromStringDict(
+ {
+ "class": "tests.config.test_central.drawer",
+ "content": "spork",
+ }
+ ),
+ }
+ ]
+ )
+
+ config = manager.collapse_named_section("spork")
assert config.instantiate() is config.instantiate()
- assert config.instantiate() is manager.collapse_named_section('drawer').instantiate()[0]
+ assert (
+ config.instantiate()
+ is manager.collapse_named_section("drawer").instantiate()[0]
+ )
+
def test_collapse_named_errors():
- manager = central.ConfigManager([{
- 'spork': basics.ConfigSectionFromStringDict({
- 'class': 'tests.config.test_central.drawer',
- 'content': 'ref'})
- }], [RemoteSource()])
+ manager = central.ConfigManager(
+ [
+ {
+ "spork": basics.ConfigSectionFromStringDict(
+ {"class": "tests.config.test_central.drawer", "content": "ref"}
+ )
+ }
+ ],
+ [RemoteSource()],
+ )
with pytest.raises(KeyError):
- get_config_obj(manager, 'repo', 'foon')
+ get_config_obj(manager, "repo", "foon")
check_error(
"Collapsing section named 'spork':\n"
"Failed collapsing section key 'content':\n"
"no section called 'ref'",
- get_config_obj, manager, 'repo', 'spork')
+ get_config_obj,
+ manager,
+ "repo",
+ "spork",
+ )
+
def test_recursive_autoload():
- @configurable(typename='configsection')
+ @configurable(typename="configsection")
def autoloader():
- return {'autoload-sub': basics.HardCodedConfigSection(
- {'class': autoloader}),
- 'spork': basics.HardCodedConfigSection({'class': repo,
- 'cache': 'test'})}
+ return {
+ "autoload-sub": basics.HardCodedConfigSection({"class": autoloader}),
+ "spork": basics.HardCodedConfigSection({"class": repo, "cache": "test"}),
+ }
check_error(
"New config is trying to modify existing section(s) 'autoload-sub' "
"that was already instantiated.",
central.ConfigManager,
- [{'autoload-sub': basics.HardCodedConfigSection({
- 'class': autoloader,
- })}])
+ [
+ {
+ "autoload-sub": basics.HardCodedConfigSection(
+ {
+ "class": autoloader,
+ }
+ )
+ }
+ ],
+ )
+
def test_recursive_section_ref():
- manager = central.ConfigManager([{
- 'spork': basics.ConfigSectionFromStringDict({
- 'class': 'tests.config.test_central.drawer',
- 'content': 'foon'}),
- 'foon': basics.ConfigSectionFromStringDict({
- 'class': 'tests.config.test_central.drawer',
- 'content': 'spork'}),
- 'self': basics.ConfigSectionFromStringDict({
- 'class': 'tests.config.test_central.drawer',
- 'content': 'self'}),
- }])
+ manager = central.ConfigManager(
+ [
+ {
+ "spork": basics.ConfigSectionFromStringDict(
+ {"class": "tests.config.test_central.drawer", "content": "foon"}
+ ),
+ "foon": basics.ConfigSectionFromStringDict(
+ {"class": "tests.config.test_central.drawer", "content": "spork"}
+ ),
+ "self": basics.ConfigSectionFromStringDict(
+ {"class": "tests.config.test_central.drawer", "content": "self"}
+ ),
+ }
+ ]
+ )
check_error(
"Collapsing section named 'self':\n"
"Failed collapsing section key 'content':\n"
"Reference to 'self' is recursive",
- get_config_obj, manager, 'drawer', 'self')
+ get_config_obj,
+ manager,
+ "drawer",
+ "self",
+ )
check_error(
"Collapsing section named 'spork':\n"
"Failed collapsing section key 'content':\n"
"Collapsing section named 'foon':\n"
"Failed collapsing section key 'content':\n"
"Reference to 'spork' is recursive",
- get_config_obj, manager, 'drawer', 'spork')
+ get_config_obj,
+ manager,
+ "drawer",
+ "spork",
+ )
+
def test_recursive_inherit():
- manager = central.ConfigManager([{
- 'spork': basics.ConfigSectionFromStringDict({
- 'class': 'tests.config.test_central.drawer',
- 'inherit': 'foon'}),
- 'foon': basics.ConfigSectionFromStringDict({
- 'class': 'tests.config.test_central.drawer',
- 'inherit': 'spork'}),
- }])
+ manager = central.ConfigManager(
+ [
+ {
+ "spork": basics.ConfigSectionFromStringDict(
+ {"class": "tests.config.test_central.drawer", "inherit": "foon"}
+ ),
+ "foon": basics.ConfigSectionFromStringDict(
+ {"class": "tests.config.test_central.drawer", "inherit": "spork"}
+ ),
+ }
+ ]
+ )
check_error(
- "Collapsing section named 'spork':\n"
- "Inherit 'spork' is recursive",
- get_config_obj, manager, 'drawer', 'spork')
+ "Collapsing section named 'spork':\n" "Inherit 'spork' is recursive",
+ get_config_obj,
+ manager,
+ "drawer",
+ "spork",
+ )
+
def test_alias():
def myspork():
return object
- manager = central.ConfigManager([{
- 'spork': basics.HardCodedConfigSection({'class': myspork}),
- 'foon': basics.section_alias('spork', 'myspork'),
- }])
+
+ manager = central.ConfigManager(
+ [
+ {
+ "spork": basics.HardCodedConfigSection({"class": myspork}),
+ "foon": basics.section_alias("spork", "myspork"),
+ }
+ ]
+ )
# This tests both the detected typename of foon and the caching.
- assert manager.objects.myspork['spork'] is manager.objects.myspork['foon']
+ assert manager.objects.myspork["spork"] is manager.objects.myspork["foon"]
+
def test_typecheck():
- @configurable({'myrepo': 'ref:repo'}, typename='repo')
+ @configurable({"myrepo": "ref:repo"}, typename="repo")
def reporef(myrepo=None):
return myrepo
- @configurable({'myrepo': 'refs:repo'}, typename='repo')
+
+ @configurable({"myrepo": "refs:repo"}, typename="repo")
def reporefs(myrepo=None):
return myrepo
- @configurable(typename='repo')
+
+ @configurable(typename="repo")
def myrepo():
- return 'repo!'
- manager = central.ConfigManager([{
- 'myrepo': basics.HardCodedConfigSection({'class': myrepo}),
- 'drawer': basics.HardCodedConfigSection({'class': drawer}),
- 'right': basics.AutoConfigSection({'class': reporef,
- 'myrepo': 'myrepo'}),
- 'wrong': basics.AutoConfigSection({'class': reporef,
- 'myrepo': 'drawer'}),
- }])
+ return "repo!"
+
+ manager = central.ConfigManager(
+ [
+ {
+ "myrepo": basics.HardCodedConfigSection({"class": myrepo}),
+ "drawer": basics.HardCodedConfigSection({"class": drawer}),
+ "right": basics.AutoConfigSection(
+ {"class": reporef, "myrepo": "myrepo"}
+ ),
+ "wrong": basics.AutoConfigSection(
+ {"class": reporef, "myrepo": "drawer"}
+ ),
+ }
+ ]
+ )
check_error(
"Collapsing section named 'wrong':\n"
"Failed collapsing section key 'myrepo':\n"
"reference 'drawer' should be of type 'repo', got 'drawer'",
- get_config_obj, manager, 'repo', 'wrong')
- assert 'repo!' == manager.objects.repo['right']
-
- manager = central.ConfigManager([{
- 'myrepo': basics.HardCodedConfigSection({'class': myrepo}),
- 'drawer': basics.HardCodedConfigSection({'class': drawer}),
- 'right': basics.AutoConfigSection({'class': reporefs,
- 'myrepo': 'myrepo'}),
- 'wrong': basics.AutoConfigSection({'class': reporefs,
- 'myrepo': 'drawer'}),
- }])
+ get_config_obj,
+ manager,
+ "repo",
+ "wrong",
+ )
+ assert "repo!" == manager.objects.repo["right"]
+
+ manager = central.ConfigManager(
+ [
+ {
+ "myrepo": basics.HardCodedConfigSection({"class": myrepo}),
+ "drawer": basics.HardCodedConfigSection({"class": drawer}),
+ "right": basics.AutoConfigSection(
+ {"class": reporefs, "myrepo": "myrepo"}
+ ),
+ "wrong": basics.AutoConfigSection(
+ {"class": reporefs, "myrepo": "drawer"}
+ ),
+ }
+ ]
+ )
check_error(
"Collapsing section named 'wrong':\n"
"Failed collapsing section key 'myrepo':\n"
"reference 'drawer' should be of type 'repo', got 'drawer'",
- get_config_obj, manager, 'repo', 'wrong')
- assert ['repo!'] == manager.objects.repo['right']
+ get_config_obj,
+ manager,
+ "repo",
+ "wrong",
+ )
+ assert ["repo!"] == manager.objects.repo["right"]
+
def test_default():
- manager = central.ConfigManager([{
- 'thing': basics.HardCodedConfigSection({'class': drawer,
- 'default': True}),
- 'bug': basics.HardCodedConfigSection({'class': None,
- 'inherit-only':True,
- 'default': True}),
- 'ignore': basics.HardCodedConfigSection({'class': drawer}),
- }])
- assert (None, None) == manager.get_default('drawer')
- assert manager.collapse_named_section('thing').default
-
- manager = central.ConfigManager([{
- 'thing': basics.HardCodedConfigSection({'class': drawer,
- 'default': True}),
- 'thing2': basics.HardCodedConfigSection({'class': drawer,
- 'default': True}),
- }])
+ manager = central.ConfigManager(
+ [
+ {
+ "thing": basics.HardCodedConfigSection(
+ {"class": drawer, "default": True}
+ ),
+ "bug": basics.HardCodedConfigSection(
+ {"class": None, "inherit-only": True, "default": True}
+ ),
+ "ignore": basics.HardCodedConfigSection({"class": drawer}),
+ }
+ ]
+ )
+ assert (None, None) == manager.get_default("drawer")
+ assert manager.collapse_named_section("thing").default
+
+ manager = central.ConfigManager(
+ [
+ {
+ "thing": basics.HardCodedConfigSection(
+ {"class": drawer, "default": True}
+ ),
+ "thing2": basics.HardCodedConfigSection(
+ {"class": drawer, "default": True}
+ ),
+ }
+ ]
+ )
check_error(
"type drawer incorrectly has multiple default sections: 'thing', 'thing2'",
- manager.get_default, 'drawer')
+ manager.get_default,
+ "drawer",
+ )
manager = central.ConfigManager([])
- assert manager.get_default('drawer') is None
+ assert manager.get_default("drawer") is None
+
def test_broken_default():
def broken():
- raise errors.ComplexInstantiationError('broken')
- manager = central.ConfigManager([{
- 'thing': basics.HardCodedConfigSection({
- 'class': drawer, 'default': True,
- 'content': basics.HardCodedConfigSection({
- 'class': 'spork'})}),
- 'thing2': basics.HardCodedConfigSection({
- 'class': broken, 'default': True})}])
+ raise errors.ComplexInstantiationError("broken")
+
+ manager = central.ConfigManager(
+ [
+ {
+ "thing": basics.HardCodedConfigSection(
+ {
+ "class": drawer,
+ "default": True,
+ "content": basics.HardCodedConfigSection({"class": "spork"}),
+ }
+ ),
+ "thing2": basics.HardCodedConfigSection(
+ {"class": broken, "default": True}
+ ),
+ }
+ ]
+ )
check_error(
"Collapsing defaults for 'drawer':\n"
"Collapsing section named 'thing':\n"
"Failed collapsing section key 'content':\n"
"Failed converting argument 'class' to callable:\n"
"'spork' is not callable",
- manager.get_default, 'drawer')
+ manager.get_default,
+ "drawer",
+ )
check_error(
"Collapsing defaults for 'broken':\n"
"Collapsing section named 'thing':\n"
"Failed collapsing section key 'content':\n"
"Failed converting argument 'class' to callable:\n"
"'spork' is not callable",
- manager.get_default, 'broken')
+ manager.get_default,
+ "broken",
+ )
+
def test_instantiate_broken_ref():
- @configurable(typename='drawer')
+ @configurable(typename="drawer")
def broken():
- raise errors.ComplexInstantiationError('broken')
- manager = central.ConfigManager([{
- 'one': basics.HardCodedConfigSection({
- 'class': drawer,
- 'content': basics.HardCodedConfigSection({
- 'class': broken})}),
- 'multi': basics.HardCodedConfigSection({
- 'class': drawer,
- 'contents': [basics.HardCodedConfigSection({
- 'class': broken})]}),
- }])
+ raise errors.ComplexInstantiationError("broken")
+
+ manager = central.ConfigManager(
+ [
+ {
+ "one": basics.HardCodedConfigSection(
+ {
+ "class": drawer,
+ "content": basics.HardCodedConfigSection({"class": broken}),
+ }
+ ),
+ "multi": basics.HardCodedConfigSection(
+ {
+ "class": drawer,
+ "contents": [basics.HardCodedConfigSection({"class": broken})],
+ }
+ ),
+ }
+ ]
+ )
check_error(
"Failed instantiating section 'one':\n"
"Instantiating reference 'content' pointing at None:\n"
"Failed instantiating section None:\n"
"Failed instantiating section None: exception caught from 'tests.config.test_central.broken':\n"
"'broken', callable unset!",
- manager.collapse_named_section('one').instantiate)
+ manager.collapse_named_section("one").instantiate,
+ )
check_error(
"Failed instantiating section 'multi':\n"
"Instantiating reference 'contents' pointing at None:\n"
"Failed instantiating section None:\n"
"Failed instantiating section None: exception caught from 'tests.config.test_central.broken':\n"
"'broken', callable unset!",
- manager.collapse_named_section('multi').instantiate)
+ manager.collapse_named_section("multi").instantiate,
+ )
+
def test_autoload_instantiationerror():
- @configurable(typename='configsection')
+ @configurable(typename="configsection")
def broken():
- raise errors.ComplexInstantiationError('broken')
+ raise errors.ComplexInstantiationError("broken")
+
check_error(
"Failed loading autoload section 'autoload_broken':\n"
"Failed instantiating section 'autoload_broken':\n"
"Failed instantiating section 'autoload_broken': exception caught from 'tests.config.test_central.broken':\n"
"'broken', callable unset!",
- central.ConfigManager, [{
- 'autoload_broken': basics.HardCodedConfigSection({
- 'class': broken})}])
+ central.ConfigManager,
+ [{"autoload_broken": basics.HardCodedConfigSection({"class": broken})}],
+ )
+
def test_autoload_uncollapsable():
check_error(
@@ -533,172 +789,266 @@ def test_autoload_uncollapsable():
"Collapsing section named 'autoload_broken':\n"
"Failed converting argument 'class' to callable:\n"
"'spork' is not callable",
- central.ConfigManager, [{
- 'autoload_broken': basics.HardCodedConfigSection({
- 'class': 'spork'})}])
+ central.ConfigManager,
+ [{"autoload_broken": basics.HardCodedConfigSection({"class": "spork"})}],
+ )
+
def test_autoload_wrong_type():
check_error(
"Section 'autoload_wrong' is marked as autoload but type is "
- 'drawer, not configsection',
- central.ConfigManager, [{
- 'autoload_wrong': basics.HardCodedConfigSection({
- 'class': drawer})}])
+ "drawer, not configsection",
+ central.ConfigManager,
+ [{"autoload_wrong": basics.HardCodedConfigSection({"class": drawer})}],
+ )
+
def test_lazy_refs():
- @configurable({'myrepo': 'lazy_ref:repo', 'thing': 'lazy_ref'},
- typename='repo')
+ @configurable({"myrepo": "lazy_ref:repo", "thing": "lazy_ref"}, typename="repo")
def reporef(myrepo=None, thing=None):
return myrepo, thing
- @configurable({'myrepo': 'lazy_refs:repo', 'thing': 'lazy_refs'},
- typename='repo')
+
+ @configurable({"myrepo": "lazy_refs:repo", "thing": "lazy_refs"}, typename="repo")
def reporefs(myrepo=None, thing=None):
return myrepo, thing
- @configurable(typename='repo')
+
+ @configurable(typename="repo")
def myrepo():
- return 'repo!'
- manager = central.ConfigManager([{
- 'myrepo': basics.HardCodedConfigSection({'class': myrepo}),
- 'drawer': basics.HardCodedConfigSection({'class': drawer}),
- 'right': basics.AutoConfigSection({'class': reporef,
- 'myrepo': 'myrepo'}),
- 'wrong': basics.AutoConfigSection({'class': reporef,
- 'myrepo': 'drawer'}),
- }])
+ return "repo!"
+
+ manager = central.ConfigManager(
+ [
+ {
+ "myrepo": basics.HardCodedConfigSection({"class": myrepo}),
+ "drawer": basics.HardCodedConfigSection({"class": drawer}),
+ "right": basics.AutoConfigSection(
+ {"class": reporef, "myrepo": "myrepo"}
+ ),
+ "wrong": basics.AutoConfigSection(
+ {"class": reporef, "myrepo": "drawer"}
+ ),
+ }
+ ]
+ )
check_error(
"reference 'drawer' should be of type 'repo', got 'drawer'",
- manager.objects.repo['wrong'][0].collapse)
- assert 'repo!' == manager.objects.repo['right'][0].instantiate()
-
- manager = central.ConfigManager([{
- 'myrepo': basics.HardCodedConfigSection({'class': myrepo}),
- 'drawer': basics.HardCodedConfigSection({'class': drawer}),
- 'right': basics.AutoConfigSection({'class': reporefs,
- 'myrepo': 'myrepo'}),
- 'wrong': basics.AutoConfigSection({'class': reporefs,
- 'myrepo': 'drawer'}),
- }])
+ manager.objects.repo["wrong"][0].collapse,
+ )
+ assert "repo!" == manager.objects.repo["right"][0].instantiate()
+
+ manager = central.ConfigManager(
+ [
+ {
+ "myrepo": basics.HardCodedConfigSection({"class": myrepo}),
+ "drawer": basics.HardCodedConfigSection({"class": drawer}),
+ "right": basics.AutoConfigSection(
+ {"class": reporefs, "myrepo": "myrepo"}
+ ),
+ "wrong": basics.AutoConfigSection(
+ {"class": reporefs, "myrepo": "drawer"}
+ ),
+ }
+ ]
+ )
check_error(
"reference 'drawer' should be of type 'repo', got 'drawer'",
- manager.objects.repo['wrong'][0][0].collapse)
- assert ['repo!'] == [c.instantiate() for c in manager.objects.repo['right'][0]]
+ manager.objects.repo["wrong"][0][0].collapse,
+ )
+ assert ["repo!"] == [c.instantiate() for c in manager.objects.repo["right"][0]]
+
def test_inherited_default():
- manager = central.ConfigManager([{
- 'default': basics.HardCodedConfigSection({
- 'default': True,
- 'inherit': ['basic'],
- }),
- 'uncollapsable': basics.HardCodedConfigSection({
- 'default': True,
- 'inherit': ['spork'],
- 'inherit-only': True,
- }),
- 'basic': basics.HardCodedConfigSection({'class': drawer}),
- }], [RemoteSource()])
- assert manager.get_default('drawer')
+ manager = central.ConfigManager(
+ [
+ {
+ "default": basics.HardCodedConfigSection(
+ {
+ "default": True,
+ "inherit": ["basic"],
+ }
+ ),
+ "uncollapsable": basics.HardCodedConfigSection(
+ {
+ "default": True,
+ "inherit": ["spork"],
+ "inherit-only": True,
+ }
+ ),
+ "basic": basics.HardCodedConfigSection({"class": drawer}),
+ }
+ ],
+ [RemoteSource()],
+ )
+ assert manager.get_default("drawer")
+
def test_section_names():
- manager = central.ConfigManager([{
- 'thing': basics.HardCodedConfigSection({'class': drawer}),
- }], [RemoteSource()])
- collapsed = manager.collapse_named_section('thing')
- assert 'thing' == collapsed.name
+ manager = central.ConfigManager(
+ [
+ {
+ "thing": basics.HardCodedConfigSection({"class": drawer}),
+ }
+ ],
+ [RemoteSource()],
+ )
+ collapsed = manager.collapse_named_section("thing")
+ assert "thing" == collapsed.name
+
def test_inherit_only():
- manager = central.ConfigManager([{
- 'source': basics.HardCodedConfigSection({
- 'class': drawer,
- 'inherit-only': True,
- }),
- 'target': basics.HardCodedConfigSection({
- 'inherit': ['source'],
- })
- }], [RemoteSource()])
+ manager = central.ConfigManager(
+ [
+ {
+ "source": basics.HardCodedConfigSection(
+ {
+ "class": drawer,
+ "inherit-only": True,
+ }
+ ),
+ "target": basics.HardCodedConfigSection(
+ {
+ "inherit": ["source"],
+ }
+ ),
+ }
+ ],
+ [RemoteSource()],
+ )
check_error(
- "Collapsing section named 'source':\n"
- 'cannot collapse inherit-only section',
- manager.collapse_named_section, 'source')
- assert manager.collapse_named_section('target')
+ "Collapsing section named 'source':\n" "cannot collapse inherit-only section",
+ manager.collapse_named_section,
+ "source",
+ )
+ assert manager.collapse_named_section("target")
+
def test_self_inherit():
- section = basics.HardCodedConfigSection({'inherit': ['self']})
- manager = central.ConfigManager([{
- 'self': basics.ConfigSectionFromStringDict({
- 'class': 'tests.config.test_central.drawer',
- 'inherit': 'self'}),
- }], [RemoteSource()])
+ section = basics.HardCodedConfigSection({"inherit": ["self"]})
+ manager = central.ConfigManager(
+ [
+ {
+ "self": basics.ConfigSectionFromStringDict(
+ {"class": "tests.config.test_central.drawer", "inherit": "self"}
+ ),
+ }
+ ],
+ [RemoteSource()],
+ )
check_error(
- "Collapsing section named 'self':\n"
- "Self-inherit 'self' cannot be found",
- get_config_obj, manager, 'drawer', 'self')
+ "Collapsing section named 'self':\n" "Self-inherit 'self' cannot be found",
+ get_config_obj,
+ manager,
+ "drawer",
+ "self",
+ )
check_error(
- "Self-inherit 'self' cannot be found",
- manager.collapse_section, [section])
-
- manager = central.ConfigManager([{
- 'self': basics.HardCodedConfigSection({
- 'inherit': ['self'],
- })}, {
- 'self': basics.HardCodedConfigSection({
- 'inherit': ['self'],
- })}, {
- 'self': basics.HardCodedConfigSection({
- 'class': drawer})}])
- assert manager.collapse_named_section('self')
+ "Self-inherit 'self' cannot be found", manager.collapse_section, [section]
+ )
+
+ manager = central.ConfigManager(
+ [
+ {
+ "self": basics.HardCodedConfigSection(
+ {
+ "inherit": ["self"],
+ }
+ )
+ },
+ {
+ "self": basics.HardCodedConfigSection(
+ {
+ "inherit": ["self"],
+ }
+ )
+ },
+ {"self": basics.HardCodedConfigSection({"class": drawer})},
+ ]
+ )
+ assert manager.collapse_named_section("self")
assert manager.collapse_section([section])
+
def test_prepend_inherit():
- manager = central.ConfigManager([{
- 'sect': basics.HardCodedConfigSection({
- 'inherit.prepend': ['self']})}])
+ manager = central.ConfigManager(
+ [{"sect": basics.HardCodedConfigSection({"inherit.prepend": ["self"]})}]
+ )
check_error(
"Collapsing section named 'sect':\n"
- 'Prepending or appending to the inherit list makes no sense',
- manager.collapse_named_section, 'sect')
+ "Prepending or appending to the inherit list makes no sense",
+ manager.collapse_named_section,
+ "sect",
+ )
+
def test_list_prepend():
- @configurable({'seq': 'list'})
+ @configurable({"seq": "list"})
def seq(seq):
return seq
- manager = central.ConfigManager([{
- 'inh': basics.HardCodedConfigSection({
- 'inherit': ['sect'],
- 'seq.prepend': ['pre'],
- }),
- 'sect': basics.HardCodedConfigSection({
- 'inherit': ['base'],
- 'seq': ['1', '2'],
- })}, {
- 'base': basics.HardCodedConfigSection({
- 'class': seq,
- 'seq.prepend': ['-1'],
- 'seq.append': ['post'],
- })}])
- assert ['-1', 'post'] == manager.objects.seq['base']
- assert ['1', '2'] == manager.objects.seq['sect']
- assert ['pre', '1', '2'] == manager.objects.seq['inh']
+
+ manager = central.ConfigManager(
+ [
+ {
+ "inh": basics.HardCodedConfigSection(
+ {
+ "inherit": ["sect"],
+ "seq.prepend": ["pre"],
+ }
+ ),
+ "sect": basics.HardCodedConfigSection(
+ {
+ "inherit": ["base"],
+ "seq": ["1", "2"],
+ }
+ ),
+ },
+ {
+ "base": basics.HardCodedConfigSection(
+ {
+ "class": seq,
+ "seq.prepend": ["-1"],
+ "seq.append": ["post"],
+ }
+ )
+ },
+ ]
+ )
+ assert ["-1", "post"] == manager.objects.seq["base"]
+ assert ["1", "2"] == manager.objects.seq["sect"]
+ assert ["pre", "1", "2"] == manager.objects.seq["inh"]
+
def test_str_prepend():
- @configurable({'string': 'str'})
+ @configurable({"string": "str"})
def sect(string):
return string
- manager = central.ConfigManager([{
- 'inh': basics.HardCodedConfigSection({
- 'inherit': ['sect'],
- 'string.prepend': 'pre',
- }),
- 'sect': basics.HardCodedConfigSection({
- 'inherit': ['base'],
- 'string': 'b',
- })
- }, {
- 'base': basics.HardCodedConfigSection({
- 'class': sect,
- 'string.prepend': 'a',
- 'string.append': 'c',
- })
- }])
- assert 'a c' == manager.objects.sect['base']
- assert 'b' == manager.objects.sect['sect']
- assert 'pre b' == manager.objects.sect['inh']
+
+ manager = central.ConfigManager(
+ [
+ {
+ "inh": basics.HardCodedConfigSection(
+ {
+ "inherit": ["sect"],
+ "string.prepend": "pre",
+ }
+ ),
+ "sect": basics.HardCodedConfigSection(
+ {
+ "inherit": ["base"],
+ "string": "b",
+ }
+ ),
+ },
+ {
+ "base": basics.HardCodedConfigSection(
+ {
+ "class": sect,
+ "string.prepend": "a",
+ "string.append": "c",
+ }
+ )
+ },
+ ]
+ )
+ assert "a c" == manager.objects.sect["base"]
+ assert "b" == manager.objects.sect["sect"]
+ assert "pre b" == manager.objects.sect["inh"]
diff --git a/tests/config/test_cparser.py b/tests/config/test_cparser.py
index 648097328..e7d0b0ea5 100644
--- a/tests/config/test_cparser.py
+++ b/tests/config/test_cparser.py
@@ -7,23 +7,29 @@ from pkgcore.config import central, cparser, errors
def test_case_sensitive_config_parser():
cp = cparser.CaseSensitiveConfigParser()
- config = StringIO('\n'.join((
- '[header]',
- 'foo=bar',
- 'FOO=BAR',
- '[HEADER]',
- 'foo=notbar',
- )))
+ config = StringIO(
+ "\n".join(
+ (
+ "[header]",
+ "foo=bar",
+ "FOO=BAR",
+ "[HEADER]",
+ "foo=notbar",
+ )
+ )
+ )
cp.read_file(config)
- assert cp.get('header', 'foo') == 'bar'
- assert cp.get('header', 'FOO') == 'BAR'
- assert cp.get('HEADER', 'foo') == 'notbar'
+ assert cp.get("header", "foo") == "bar"
+ assert cp.get("header", "FOO") == "BAR"
+ assert cp.get("HEADER", "foo") == "notbar"
class TestConfigFromIni:
-
def test_config_from_ini(self):
- config = cparser.config_from_file(StringIO(textwrap.dedent('''\
+ config = cparser.config_from_file(
+ StringIO(
+ textwrap.dedent(
+ """\
[test]
string = 'hi I am a string'
list = foo bar baz
@@ -31,23 +37,37 @@ class TestConfigFromIni:
list.append = post bits
true = yes
false = no
- ''')))
- assert list(config.keys()) == ['test']
- section = config['test']
+ """
+ )
+ )
+ )
+ assert list(config.keys()) == ["test"]
+ section = config["test"]
for key, arg_type, value in (
- ('string', 'str', [None, 'hi I am a string', None]),
- ('list', 'list', [
- ['pre', 'bits'], ['foo', 'bar', 'baz'], ['post', 'bits']]),
- ('true', 'bool', True),
- ('false', 'bool', False),
- ):
+ ("string", "str", [None, "hi I am a string", None]),
+ (
+ "list",
+ "list",
+ [["pre", "bits"], ["foo", "bar", "baz"], ["post", "bits"]],
+ ),
+ ("true", "bool", True),
+ ("false", "bool", False),
+ ):
assert section.render_value(None, key, arg_type) == value
def test_missing_section_ref(self):
- config = cparser.config_from_file(StringIO(textwrap.dedent('''\
+ config = cparser.config_from_file(
+ StringIO(
+ textwrap.dedent(
+ """\
[test]
ref = 'missing'
- ''')))
- section = config['test']
+ """
+ )
+ )
+ )
+ section = config["test"]
with pytest.raises(errors.ConfigurationError):
- section.render_value(central.ConfigManager([]), 'ref', 'ref:drawer').collapse()
+ section.render_value(
+ central.ConfigManager([]), "ref", "ref:drawer"
+ ).collapse()
diff --git a/tests/config/test_init.py b/tests/config/test_init.py
index 1e2cc9cf3..1ac35e8eb 100644
--- a/tests/config/test_init.py
+++ b/tests/config/test_init.py
@@ -6,51 +6,44 @@ from pkgcore.config import basics, load_config
from pkgcore.config.hint import configurable
-@configurable(typename='foo')
+@configurable(typename="foo")
def passthrough(*args, **kwargs):
return args, kwargs
class TestConfigLoading:
-
@pytest.fixture
def user_config(self, tmp_path):
- user_config = tmp_path / 'user.conf'
- user_config.write_text(
- '[foo]\n'
- 'class = tests.config.test_init.passthrough\n'
- )
+ user_config = tmp_path / "user.conf"
+ user_config.write_text("[foo]\n" "class = tests.config.test_init.passthrough\n")
return str(user_config)
@pytest.fixture
def system_config(self, tmp_path):
- system_config = tmp_path / 'system.conf'
- system_config.write_text(
- '[foo]\n'
- 'class = also invalid\n'
- )
+ system_config = tmp_path / "system.conf"
+ system_config.write_text("[foo]\n" "class = also invalid\n")
return str(system_config)
def test_load_config(self, user_config):
manager = load_config(user_conf_file=user_config)
- assert manager.foo['foo'] == ((), {})
+ assert manager.foo["foo"] == ((), {})
def test_user_config_override_system(self, user_config, system_config):
manager = load_config(
- user_conf_file=user_config,
- system_conf_file=system_config)
- assert manager.foo['foo'] == ((), {})
+ user_conf_file=user_config, system_conf_file=system_config
+ )
+ assert manager.foo["foo"] == ((), {})
def test_prepends(self, user_config):
manager = load_config(
user_conf_file=user_config,
- prepend_sources=[{'myfoo': basics.HardCodedConfigSection({
- 'inherit': ['foo']})}])
- assert manager.foo['myfoo'] == ((), {})
+ prepend_sources=[
+ {"myfoo": basics.HardCodedConfigSection({"inherit": ["foo"]})}
+ ],
+ )
+ assert manager.foo["myfoo"] == ((), {})
def test_disabling_loading(self, user_config):
- manager = load_config(
- user_conf_file=user_config,
- skip_config_files=True)
+ manager = load_config(user_conf_file=user_config, skip_config_files=True)
with pytest.raises(KeyError):
- manager.foo['foo']
+ manager.foo["foo"]
diff --git a/tests/conftest.py b/tests/conftest.py
index c8f632b85..527754c8c 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -5,15 +5,19 @@ import pytest
def pytest_addoption(parser):
parser.addoption(
- '--network', action='store_true', dest="network",
- default=False, help="allow network related tests to run")
+ "--network",
+ action="store_true",
+ dest="network",
+ default=False,
+ help="allow network related tests to run",
+ )
def mark_network(config, func):
"""Decorator to add a 'net' mark and skip the test unless --network is passed."""
skip_func = pytest.mark.skipif(
- not config.option.network,
- reason="needs --network option to run")
+ not config.option.network, reason="needs --network option to run"
+ )
return skip_func(pytest.mark.net(func))
diff --git a/tests/ebuild/test_atom.py b/tests/ebuild/test_atom.py
index 0893b817a..d13efa83c 100644
--- a/tests/ebuild/test_atom.py
+++ b/tests/ebuild/test_atom.py
@@ -20,6 +20,7 @@ def assert_equal_bidirectional(o1, o2):
assert o2 == o1
assert cmp(o2, o1) == 0
+
def assert_not_equal_bidirectional(o1, o2):
# is why we test the comparison *both* ways.
assert o1 != o2
@@ -27,8 +28,8 @@ def assert_not_equal_bidirectional(o1, o2):
assert o2 != o1
assert cmp(o2, o1) != 0
-class TestAtom(TestRestriction):
+class TestAtom(TestRestriction):
class kls(atom.atom):
__inst_caching__ = True
__slots__ = ()
@@ -52,37 +53,48 @@ class TestAtom(TestRestriction):
assert d.dnf_solutions(True) == bd.dnf_solutions()
assert d.cnf_solutions(True) == bd.cnf_solutions()
- @pytest.mark.parametrize("atom", (
- "dev-util/diffball", "=dev-util/diffball-0.7.1",
- ">foon/bar-1:2[-4,3]", "=foon/bar-2*", "~foon/bar-2.3",
- "cat/pkg:0", "cat/pkg:5", "cat/pkg:0/5", "cat/pkg:5/5",
- "cat/pkg:=", "cat/pkg:0=", "cat/pkg:*",
- "!dev-util/diffball", "!=dev-util/diffball-0.7*",
- "foon/bar::gentoo", ">=foon/bar-10_alpha1:1::gentoo[-not,use]",
- "!!dev-util/diffball[use]",
- ))
+ @pytest.mark.parametrize(
+ "atom",
+ (
+ "dev-util/diffball",
+ "=dev-util/diffball-0.7.1",
+ ">foon/bar-1:2[-4,3]",
+ "=foon/bar-2*",
+ "~foon/bar-2.3",
+ "cat/pkg:0",
+ "cat/pkg:5",
+ "cat/pkg:0/5",
+ "cat/pkg:5/5",
+ "cat/pkg:=",
+ "cat/pkg:0=",
+ "cat/pkg:*",
+ "!dev-util/diffball",
+ "!=dev-util/diffball-0.7*",
+ "foon/bar::gentoo",
+ ">=foon/bar-10_alpha1:1::gentoo[-not,use]",
+ "!!dev-util/diffball[use]",
+ ),
+ )
def test_str_hash(self, atom):
assert str(self.kls(atom)) == atom
- assert hash(self.kls(atom, disable_inst_caching=True)) == hash(self.kls(atom, disable_inst_caching=True))
+ assert hash(self.kls(atom, disable_inst_caching=True)) == hash(
+ self.kls(atom, disable_inst_caching=True)
+ )
def test_blockers(self):
- pytest.raises(errors.MalformedAtom, self.kls,
- "!!dev-util/diffball", eapi='0')
- pytest.raises(errors.MalformedAtom, self.kls,
- "!!dev-util/diffball", eapi='1')
- pytest.raises(errors.MalformedAtom, self.kls,
- "!!!dev-util/diffball", eapi='2')
+ pytest.raises(errors.MalformedAtom, self.kls, "!!dev-util/diffball", eapi="0")
+ pytest.raises(errors.MalformedAtom, self.kls, "!!dev-util/diffball", eapi="1")
+ pytest.raises(errors.MalformedAtom, self.kls, "!!!dev-util/diffball", eapi="2")
for x in range(0, 2):
obj = self.kls("!dev-util/diffball", eapi=str(x))
assert obj.blocks
assert obj.blocks_temp_ignorable
assert not obj.blocks_strongly
- obj = self.kls("!!dev-util/diffball", eapi='2')
+ obj = self.kls("!!dev-util/diffball", eapi="2")
assert obj.blocks
assert not obj.blocks_temp_ignorable
assert obj.blocks_strongly
-
def test_iter(self):
d = self.kls("!>=dev-util/diffball-0.7:1::gentoo[use,x]")
assert list(d) == list(d.restrictions)
@@ -94,14 +106,10 @@ class TestAtom(TestRestriction):
assert a == loads(dumps(a))
def test_glob(self):
- pytest.raises(errors.MalformedAtom, self.kls,
- "dev-util/diffball-1*")
- pytest.raises(errors.MalformedAtom, self.kls,
- "dev-util/diffball-1.*")
- pytest.raises(errors.MalformedAtom, self.kls,
- "~dev-util/diffball-1*")
- pytest.raises(errors.MalformedAtom, self.kls,
- "~dev-util/diffball-1.*")
+ pytest.raises(errors.MalformedAtom, self.kls, "dev-util/diffball-1*")
+ pytest.raises(errors.MalformedAtom, self.kls, "dev-util/diffball-1.*")
+ pytest.raises(errors.MalformedAtom, self.kls, "~dev-util/diffball-1*")
+ pytest.raises(errors.MalformedAtom, self.kls, "~dev-util/diffball-1.*")
a = self.kls("=dev-util/diffball-1.2*")
self.assertMatch(a, FakePkg("dev-util/diffball-1.2"))
@@ -134,8 +142,13 @@ class TestAtom(TestRestriction):
# <, =, >
ops = (-1, 0, 1)
- for ops, ver in ((-1, "1.0"), (-1, "1.1"),
- (0, "1.1-r2"), (1, "1.1-r3"), (1, "1.2")):
+ for ops, ver in (
+ (-1, "1.0"),
+ (-1, "1.1"),
+ (0, "1.1-r2"),
+ (1, "1.1-r3"),
+ (1, "1.2"),
+ ):
if not isinstance(ops, (list, tuple)):
ops = (ops,)
a = self.make_atom(astr, ops, ver)
@@ -166,10 +179,18 @@ class TestAtom(TestRestriction):
# with ~
pytest.raises(errors.MalformedAtom, self.kls, f"~{astr}-1-r0")
- @pytest.mark.parametrize(("eapi", "defaults"), (
- (2, False), (3, False),
- (4, True), (5, True), (6, True), (7, True), (8, True),
- ))
+ @pytest.mark.parametrize(
+ ("eapi", "defaults"),
+ (
+ (2, False),
+ (3, False),
+ (4, True),
+ (5, True),
+ (6, True),
+ (7, True),
+ (8, True),
+ ),
+ )
def test_eapi_use(self, eapi, defaults):
astr = "dev-util/bsdiff"
c = FakePkg(f"{astr}-1", use=("debug",), iuse=("debug", "foon"), slot=1)
@@ -178,18 +199,26 @@ class TestAtom(TestRestriction):
# Valid chars: [a-zA-Z0-9_@+-]
for use_text in (
- '[zZaA09]', '[x@y]', '[x+y]', '[x-y]', '[x_y]',
- '[-x_y]', '[x?]', '[!x?]', '[x=]', '[!x=]',
+ "[zZaA09]",
+ "[x@y]",
+ "[x+y]",
+ "[x-y]",
+ "[x_y]",
+ "[-x_y]",
+ "[x?]",
+ "[!x?]",
+ "[x=]",
+ "[!x=]",
):
- kls(f'{astr}{use_text}')
+ kls(f"{astr}{use_text}")
if defaults:
- kls(f'{astr}[x(+)]')
- kls(f'{astr}[x(-)]')
+ kls(f"{astr}[x(+)]")
+ kls(f"{astr}[x(-)]")
with pytest.raises(errors.MalformedAtom):
- kls(f'{astr}[x(+-)]')
+ kls(f"{astr}[x(+-)]")
with pytest.raises(errors.MalformedAtom):
- kls(f'{astr}[x(@)]')
+ kls(f"{astr}[x(@)]")
self.assertMatch(kls(f"{astr}[debug(+)]"), c)
self.assertMatch(kls(f"{astr}[debug(-)]"), c)
self.assertMatch(kls(f"{astr}[missing(+)]"), c)
@@ -203,15 +232,21 @@ class TestAtom(TestRestriction):
self.assertMatch(kls(f"{astr}[missing(+),debug(+)]"), c)
else:
with pytest.raises(errors.MalformedAtom):
- kls(f'{astr}[x(+)]')
+ kls(f"{astr}[x(+)]")
with pytest.raises(errors.MalformedAtom):
- kls(f'{astr}[x(-)]')
+ kls(f"{astr}[x(-)]")
for use_text in (
# '.' not a valid char in use deps
"[x.y]",
# Use deps start with an alphanumeric char (non-transitive)
- "[@x]", "[_x]", "[+x]", "[-@x]", "[-_x]", "[-+x]", "[--x]",
+ "[@x]",
+ "[_x]",
+ "[+x]",
+ "[-@x]",
+ "[-_x]",
+ "[-+x]",
+ "[--x]",
):
with pytest.raises(errors.MalformedAtom):
kls(f"{astr}{use_text}")
@@ -251,54 +286,66 @@ class TestAtom(TestRestriction):
self.assertNotMatch(self.kls(f"{astr}:2"), c)
# note the above isn't compliant with eapi2/3; thus this test
with pytest.raises(errors.MalformedAtom):
- self.kls("dev-util/foo:0", eapi='0')
+ self.kls("dev-util/foo:0", eapi="0")
# shouldn't puke, but has, thus checking"
self.kls("sys-libs/db:4.4")
self.kls(f"{astr}:azAZ.-+_09")
- self.kls(f"{astr}:_bar") # According to PMS, underscore and plus-sign are
- self.kls(f"{astr}:+bar") # not invalid first chars in a slot dep
-
- @pytest.mark.parametrize("atom", (
- "dev-util/foo:",
- "dev-util/foo:1,,0",
- "dev-util/foo:1:",
- "dev-util/foo:-1",
- "dev-util/foo:.1",
- "dev-util/foo:1@2",
- "dev-util/foo[bar]:1",
- ))
+ self.kls(f"{astr}:_bar") # According to PMS, underscore and plus-sign are
+ self.kls(f"{astr}:+bar") # not invalid first chars in a slot dep
+
+ @pytest.mark.parametrize(
+ "atom",
+ (
+ "dev-util/foo:",
+ "dev-util/foo:1,,0",
+ "dev-util/foo:1:",
+ "dev-util/foo:-1",
+ "dev-util/foo:.1",
+ "dev-util/foo:1@2",
+ "dev-util/foo[bar]:1",
+ ),
+ )
def test_slot_malformed_atom(self, atom):
with pytest.raises(errors.MalformedAtom):
self.kls(atom)
def test_slot_operators_and_subslots(self):
- pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:*", eapi='4')
- self.kls("sys-libs/db:*", eapi='5')
- pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:=", eapi='4')
- self.kls("sys-libs/db:=", eapi='5')
- pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:==", eapi='5')
- pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:1=", eapi='4')
- pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:2/3.0=", eapi='4')
- pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:2/3.0", eapi='1')
- pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:/=", eapi='5')
- pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:/1=", eapi='5')
- pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:1/=", eapi='5')
- pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:*1/=", eapi='5')
+ pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:*", eapi="4")
+ self.kls("sys-libs/db:*", eapi="5")
+ pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:=", eapi="4")
+ self.kls("sys-libs/db:=", eapi="5")
+ pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:==", eapi="5")
+ pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:1=", eapi="4")
+ pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:2/3.0=", eapi="4")
+ pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:2/3.0", eapi="1")
+ pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:/=", eapi="5")
+ pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:/1=", eapi="5")
+ pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:1/=", eapi="5")
+ pytest.raises(errors.MalformedAtom, self.kls, "sys-libs/db:*1/=", eapi="5")
for subslot in ("/1.0", ""):
- pytest.raises(errors.MalformedAtom, self.kls, f"sys-libs/db:*4{subslot}", eapi='5')
- pytest.raises(errors.MalformedAtom, self.kls, f"sys-libs/db:4{subslot}*", eapi='5')
- pytest.raises(errors.MalformedAtom, self.kls, f"sys-libs/db:=4{subslot}", eapi='5')
- self.kls(f"sys-libs/db:4{subslot}=", eapi='5')
- self.kls(f"sys-libs/db:3.2{subslot}=", eapi='5')
- pytest.raises(errors.MalformedAtom, self.kls, f"sys-libs/db:4{subslot}==", eapi='5')
+ pytest.raises(
+ errors.MalformedAtom, self.kls, f"sys-libs/db:*4{subslot}", eapi="5"
+ )
+ pytest.raises(
+ errors.MalformedAtom, self.kls, f"sys-libs/db:4{subslot}*", eapi="5"
+ )
+ pytest.raises(
+ errors.MalformedAtom, self.kls, f"sys-libs/db:=4{subslot}", eapi="5"
+ )
+ self.kls(f"sys-libs/db:4{subslot}=", eapi="5")
+ self.kls(f"sys-libs/db:3.2{subslot}=", eapi="5")
+ pytest.raises(
+ errors.MalformedAtom, self.kls, f"sys-libs/db:4{subslot}==", eapi="5"
+ )
def check_it(text, slot, subslot, operator):
obj = self.kls(f"sys-libs/db{text}")
assert obj.slot == slot
assert obj.subslot == subslot
assert obj.slot_operator == operator
+
check_it(":4", "4", None, None)
check_it(":=", None, None, "=")
check_it(":4=", "4", None, "=")
@@ -306,16 +353,22 @@ class TestAtom(TestRestriction):
check_it(":*", None, None, "*")
# Verify restrictions.
- self.assertMatch(self.kls("sys-libs/db:1="),
- FakePkg("sys-libs/db-1", slot="1"))
- self.assertMatch(self.kls("sys-libs/db:1/2="),
- FakePkg("sys-libs/db-1", slot="1", subslot="2"))
- self.assertNotMatch(self.kls("sys-libs/db:1/2.3="),
- FakePkg("sys-libs/db-1", slot="1", subslot="2"))
- self.assertNotMatch(self.kls("sys-libs/db:1/2.3="),
- FakePkg("sys-libs/db-1", slot="1"))
- self.assertMatch(self.kls("sys-libs/db:1a.2/2.3"),
- FakePkg("sys-libs/db-1", slot="1a.2", subslot="2.3"))
+ self.assertMatch(self.kls("sys-libs/db:1="), FakePkg("sys-libs/db-1", slot="1"))
+ self.assertMatch(
+ self.kls("sys-libs/db:1/2="),
+ FakePkg("sys-libs/db-1", slot="1", subslot="2"),
+ )
+ self.assertNotMatch(
+ self.kls("sys-libs/db:1/2.3="),
+ FakePkg("sys-libs/db-1", slot="1", subslot="2"),
+ )
+ self.assertNotMatch(
+ self.kls("sys-libs/db:1/2.3="), FakePkg("sys-libs/db-1", slot="1")
+ )
+ self.assertMatch(
+ self.kls("sys-libs/db:1a.2/2.3"),
+ FakePkg("sys-libs/db-1", slot="1a.2", subslot="2.3"),
+ )
def test_getattr(self):
# assert it explodes for bad attr access.
@@ -327,15 +380,17 @@ class TestAtom(TestRestriction):
def assertAttr(attr):
assert restrictions[pos].attr == attr, (
f"expected attr {attr!r} at {pos} for ver({ver}), repo({repo}) use({use}), "
- f"slot({slot}): got {restrictions[pos].attr!r} from {restrictions!r}")
+ f"slot({slot}): got {restrictions[pos].attr!r} from {restrictions!r}"
+ )
return pos + 1
- slot = ''
+ slot = ""
+
def f():
- for pref, ver in (('', ''), ('=', '-0.1')):
- for repo in ('', '::gentoo'):
- for slot in ('', ':1'):
- for use in ('', '[x]'):
+ for pref, ver in (("", ""), ("=", "-0.1")):
+ for repo in ("", "::gentoo"):
+ for slot in ("", ":1"):
+ for use in ("", "[x]"):
yield pref, ver, repo, slot, use
for pref, ver, repo, slot, use in f():
@@ -348,46 +403,66 @@ class TestAtom(TestRestriction):
restrictions = o.restrictions
assert len(restrictions) == count
- assert [getattr(x, 'type', None) for x in restrictions] == ['package'] * count
+ assert [getattr(x, "type", None) for x in restrictions] == [
+ "package"
+ ] * count
if repo:
- pos = assertAttr('repo.repo_id')
- pos = assertAttr('package')
- pos = assertAttr('category')
+ pos = assertAttr("repo.repo_id")
+ pos = assertAttr("package")
+ pos = assertAttr("category")
if ver:
assert isinstance(restrictions[pos], restricts.VersionMatch)
pos += 1
if slot:
- pos = assertAttr('slot')
+ pos = assertAttr("slot")
if use:
- pos = assertAttr('use')
+ pos = assertAttr("use")
def test_eapi0(self):
- for postfix in (':1', ':1,2', ':asdf', '::asdf', '::asdf-x86', '[x]',
- '[x,y]', ':1[x,y]', '[x,y]:1', ':1::repo'):
+ for postfix in (
+ ":1",
+ ":1,2",
+ ":asdf",
+ "::asdf",
+ "::asdf-x86",
+ "[x]",
+ "[x,y]",
+ ":1[x,y]",
+ "[x,y]:1",
+ ":1::repo",
+ ):
with pytest.raises(errors.MalformedAtom):
# "dev-util/foon{postfix} must be invalid in EAPI 0",
- self.kls(f"dev-util/foon{postfix}", eapi='0')
+ self.kls(f"dev-util/foon{postfix}", eapi="0")
def test_eapi1(self):
- for postfix in (':1,2', '::asdf', '::asdf-x86', '[x]',
- '[x,y]', ':1[x,y]', '[x,y]:1', ':1:repo'):
+ for postfix in (
+ ":1,2",
+ "::asdf",
+ "::asdf-x86",
+ "[x]",
+ "[x,y]",
+ ":1[x,y]",
+ "[x,y]:1",
+ ":1:repo",
+ ):
with pytest.raises(errors.MalformedAtom):
# "dev-util/foon{postfix} must be invalid in EAPI 1"
- self.kls(f"dev-util/foon{postfix}", eapi='1')
- self.kls("dev-util/foon:1", eapi='1')
- self.kls("dev-util/foon:12", eapi='1')
+ self.kls(f"dev-util/foon{postfix}", eapi="1")
+ self.kls("dev-util/foon:1", eapi="1")
+ self.kls("dev-util/foon:12", eapi="1")
with pytest.raises(errors.MalformedAtom):
# "dev-util/foon[dar] must be invalid in EAPI 1"
- self.kls("dev-util/foon:1,2", eapi='1')
+ self.kls("dev-util/foon:1,2", eapi="1")
def test_eapi3(self):
- self.kls("dev-util/foon:1", eapi='3')
- self.kls("dev-util/foon:2", eapi='3')
- self.kls("!dev-util/foon:1", eapi='3')
- self.kls("dev-util/foon:1[x]", eapi='3')
- self.kls("dev-util/foon:1[x?]", eapi='3')
+ self.kls("dev-util/foon:1", eapi="3")
+ self.kls("dev-util/foon:2", eapi="3")
+ self.kls("!dev-util/foon:1", eapi="3")
+ self.kls("dev-util/foon:1[x]", eapi="3")
+ self.kls("dev-util/foon:1[x?]", eapi="3")
with pytest.raises(errors.MalformedAtom):
- self.kls("dev-util/foon:1::dar", eapi='3')
+ self.kls("dev-util/foon:1::dar", eapi="3")
def test_repo_id(self):
astr = "dev-util/bsdiff"
@@ -408,115 +483,140 @@ class TestAtom(TestRestriction):
with pytest.raises(errors.MalformedAtom):
self.kls("dev-util/foon::gentoo-x86", eapi=str(x))
- @pytest.mark.parametrize("atom", (
- '~dev-util/spork', '>dev-util/spork', 'dev-util/spork-3', 'spork'
- ))
+ @pytest.mark.parametrize(
+ "atom", ("~dev-util/spork", ">dev-util/spork", "dev-util/spork-3", "spork")
+ )
def test_invalid_atom(self, atom):
with pytest.raises(errors.MalformedAtom):
self.kls(atom)
- @pytest.mark.parametrize(("this", "that", "result"), (
- ('cat/pkg', 'pkg/cat', False),
- ('cat/pkg', 'cat/pkg', True),
- ('cat/pkg:1', 'cat/pkg:2', False),
- ('cat/pkg:1', 'cat/pkg:1', True),
- ('cat/pkg:1', 'cat/pkg[foo]', True),
- ('cat/pkg:0/0', 'cat/pkg:0/1', False),
- ('cat/pkg:0/0', 'cat/pkg:0/0', True),
- ('cat/pkg:0/0', 'cat/pkg:0', True),
- ('cat/pkg:0/0', 'cat/pkg', True),
- ('cat/pkg[foo]', 'cat/pkg[-bar]', True),
- ('cat/pkg[foo]', 'cat/pkg[-foo]', False),
- ('>cat/pkg-3', '>cat/pkg-1', True),
- ('>cat/pkg-3', '<cat/pkg-3', False),
- ('>=cat/pkg-3', '<cat/pkg-3', False),
- ('>cat/pkg-2', '=cat/pkg-2*', True),
- ('<cat/pkg-2_alpha1', '=cat/pkg-2*', True),
- ('=cat/pkg-2', '=cat/pkg-2', True),
- ('=cat/pkg-3', '=cat/pkg-2', False),
- ('=cat/pkg-2', '>cat/pkg-2', False),
- ('=cat/pkg-2', '>=cat/pkg-2', True),
- ('~cat/pkg-2', '~cat/pkg-2', True),
- ('~cat/pkg-2', '~cat/pkg-2.1', False),
- ('=cat/pkg-2*', '=cat/pkg-2.3*', True),
- ('>cat/pkg-2.4', '=cat/pkg-2*', True),
- ('<cat/pkg-2.4', '=cat/pkg-2*', True),
- ('<cat/pkg-1', '=cat/pkg-2*', False),
- ('~cat/pkg-2', '>cat/pkg-2-r1', True),
- ('~cat/pkg-2', '<=cat/pkg-2', True),
- ('=cat/pkg-2-r2*', '<=cat/pkg-2-r20', True),
- ('=cat/pkg-2-r2*', '<cat/pkg-2-r20', True),
- ('=cat/pkg-2-r2*', '<=cat/pkg-2-r2', True),
- ('~cat/pkg-2', '<cat/pkg-2', False),
- ('=cat/pkg-1-r10*', '~cat/pkg-1', True),
- ('=cat/pkg-1-r1*', '<cat/pkg-1-r1', False),
- ('=cat/pkg-1*', '>cat/pkg-2', False),
- ('>=cat/pkg-8.4', '=cat/pkg-8.3.4*', False),
- ('cat/pkg::gentoo', 'cat/pkg', True),
- ('cat/pkg::gentoo', 'cat/pkg::foo', False),
- # known to cause an assplosion, thus redundant test.
- ('=sys-devel/gcc-4.1.1-r3', '=sys-devel/gcc-3.3*', False),
- ('=sys-libs/db-4*', '~sys-libs/db-4.3.29', True),
- ))
+ @pytest.mark.parametrize(
+ ("this", "that", "result"),
+ (
+ ("cat/pkg", "pkg/cat", False),
+ ("cat/pkg", "cat/pkg", True),
+ ("cat/pkg:1", "cat/pkg:2", False),
+ ("cat/pkg:1", "cat/pkg:1", True),
+ ("cat/pkg:1", "cat/pkg[foo]", True),
+ ("cat/pkg:0/0", "cat/pkg:0/1", False),
+ ("cat/pkg:0/0", "cat/pkg:0/0", True),
+ ("cat/pkg:0/0", "cat/pkg:0", True),
+ ("cat/pkg:0/0", "cat/pkg", True),
+ ("cat/pkg[foo]", "cat/pkg[-bar]", True),
+ ("cat/pkg[foo]", "cat/pkg[-foo]", False),
+ (">cat/pkg-3", ">cat/pkg-1", True),
+ (">cat/pkg-3", "<cat/pkg-3", False),
+ (">=cat/pkg-3", "<cat/pkg-3", False),
+ (">cat/pkg-2", "=cat/pkg-2*", True),
+ ("<cat/pkg-2_alpha1", "=cat/pkg-2*", True),
+ ("=cat/pkg-2", "=cat/pkg-2", True),
+ ("=cat/pkg-3", "=cat/pkg-2", False),
+ ("=cat/pkg-2", ">cat/pkg-2", False),
+ ("=cat/pkg-2", ">=cat/pkg-2", True),
+ ("~cat/pkg-2", "~cat/pkg-2", True),
+ ("~cat/pkg-2", "~cat/pkg-2.1", False),
+ ("=cat/pkg-2*", "=cat/pkg-2.3*", True),
+ (">cat/pkg-2.4", "=cat/pkg-2*", True),
+ ("<cat/pkg-2.4", "=cat/pkg-2*", True),
+ ("<cat/pkg-1", "=cat/pkg-2*", False),
+ ("~cat/pkg-2", ">cat/pkg-2-r1", True),
+ ("~cat/pkg-2", "<=cat/pkg-2", True),
+ ("=cat/pkg-2-r2*", "<=cat/pkg-2-r20", True),
+ ("=cat/pkg-2-r2*", "<cat/pkg-2-r20", True),
+ ("=cat/pkg-2-r2*", "<=cat/pkg-2-r2", True),
+ ("~cat/pkg-2", "<cat/pkg-2", False),
+ ("=cat/pkg-1-r10*", "~cat/pkg-1", True),
+ ("=cat/pkg-1-r1*", "<cat/pkg-1-r1", False),
+ ("=cat/pkg-1*", ">cat/pkg-2", False),
+ (">=cat/pkg-8.4", "=cat/pkg-8.3.4*", False),
+ ("cat/pkg::gentoo", "cat/pkg", True),
+ ("cat/pkg::gentoo", "cat/pkg::foo", False),
+ # known to cause an assplosion, thus redundant test.
+ ("=sys-devel/gcc-4.1.1-r3", "=sys-devel/gcc-3.3*", False),
+ ("=sys-libs/db-4*", "~sys-libs/db-4.3.29", True),
+ ),
+ )
def test_intersects(self, this, that, result):
this_atom = self.kls(this)
that_atom = self.kls(that)
- assert result == this_atom.intersects(that_atom), f'{this} intersecting {that} should be {result}'
- assert result == that_atom.intersects(this_atom), f'{that} intersecting {this} should be {result}'
-
+ assert result == this_atom.intersects(
+ that_atom
+ ), f"{this} intersecting {that} should be {result}"
+ assert result == that_atom.intersects(
+ this_atom
+ ), f"{that} intersecting {this} should be {result}"
def test_comparison(self):
- assert_equal_bidirectional(self.kls('cat/pkg'), self.kls('cat/pkg'))
- assert_not_equal_bidirectional(self.kls('cat/pkg'), self.kls('cat/pkgb'))
- assert_not_equal_bidirectional(self.kls('cata/pkg'), self.kls('cat/pkg'))
- assert_not_equal_bidirectional(self.kls('cat/pkg'), self.kls('!cat/pkg'))
- assert_equal_bidirectional(self.kls('!cat/pkg'), self.kls('!cat/pkg'))
- assert_not_equal_bidirectional(self.kls('=cat/pkg-0.1:0'), self.kls('=cat/pkg-0.1'))
- assert_not_equal_bidirectional(self.kls('=cat/pkg-1[foon]'), self.kls('=cat/pkg-1'))
- assert_equal_bidirectional(self.kls('=cat/pkg-0'), self.kls('=cat/pkg-0'))
- assert_not_equal_bidirectional(self.kls('<cat/pkg-2'), self.kls('>cat/pkg-2'))
- assert_not_equal_bidirectional(self.kls('=cat/pkg-2*'), self.kls('=cat/pkg-2'))
- assert_not_equal_bidirectional(self.kls('=cat/pkg-2', True), self.kls('=cat/pkg-2'))
+ assert_equal_bidirectional(self.kls("cat/pkg"), self.kls("cat/pkg"))
+ assert_not_equal_bidirectional(self.kls("cat/pkg"), self.kls("cat/pkgb"))
+ assert_not_equal_bidirectional(self.kls("cata/pkg"), self.kls("cat/pkg"))
+ assert_not_equal_bidirectional(self.kls("cat/pkg"), self.kls("!cat/pkg"))
+ assert_equal_bidirectional(self.kls("!cat/pkg"), self.kls("!cat/pkg"))
+ assert_not_equal_bidirectional(
+ self.kls("=cat/pkg-0.1:0"), self.kls("=cat/pkg-0.1")
+ )
+ assert_not_equal_bidirectional(
+ self.kls("=cat/pkg-1[foon]"), self.kls("=cat/pkg-1")
+ )
+ assert_equal_bidirectional(self.kls("=cat/pkg-0"), self.kls("=cat/pkg-0"))
+ assert_not_equal_bidirectional(self.kls("<cat/pkg-2"), self.kls(">cat/pkg-2"))
+ assert_not_equal_bidirectional(self.kls("=cat/pkg-2*"), self.kls("=cat/pkg-2"))
+ assert_not_equal_bidirectional(
+ self.kls("=cat/pkg-2", True), self.kls("=cat/pkg-2")
+ )
# use...
- assert_not_equal_bidirectional(self.kls('cat/pkg[foo]'), self.kls('cat/pkg'))
- assert_not_equal_bidirectional(self.kls('cat/pkg[foo]'), self.kls('cat/pkg[-foo]'))
- assert_equal_bidirectional(self.kls('cat/pkg[foo,-bar]'), self.kls('cat/pkg[-bar,foo]'))
+ assert_not_equal_bidirectional(self.kls("cat/pkg[foo]"), self.kls("cat/pkg"))
+ assert_not_equal_bidirectional(
+ self.kls("cat/pkg[foo]"), self.kls("cat/pkg[-foo]")
+ )
+ assert_equal_bidirectional(
+ self.kls("cat/pkg[foo,-bar]"), self.kls("cat/pkg[-bar,foo]")
+ )
# repo_id
- assert_equal_bidirectional(self.kls('cat/pkg::a'), self.kls('cat/pkg::a'))
- assert_not_equal_bidirectional(self.kls('cat/pkg::a'), self.kls('cat/pkg::b'))
- assert_not_equal_bidirectional(self.kls('cat/pkg::a'), self.kls('cat/pkg'))
+ assert_equal_bidirectional(self.kls("cat/pkg::a"), self.kls("cat/pkg::a"))
+ assert_not_equal_bidirectional(self.kls("cat/pkg::a"), self.kls("cat/pkg::b"))
+ assert_not_equal_bidirectional(self.kls("cat/pkg::a"), self.kls("cat/pkg"))
# slots.
- assert_not_equal_bidirectional(self.kls('cat/pkg:1'), self.kls('cat/pkg'))
- assert_equal_bidirectional(self.kls('cat/pkg:2'), self.kls('cat/pkg:2'))
- for lesser, greater in (('0.1', '1'), ('1', '1-r1'), ('1.1', '1.2')):
- assert self.kls(f'=d/b-{lesser}') < self.kls(f'=d/b-{greater}'), \
- f"d/b-{lesser} < d/b-{greater}"
- assert not (self.kls(f'=d/b-{lesser}') > self.kls(f'=d/b-{greater}')), \
- f"!: d/b-{lesser} < d/b-{greater}"
- assert self.kls(f'=d/b-{greater}') > self.kls(f'=d/b-{lesser}'), \
- f"d/b-{greater} > d/b-{lesser}"
- assert not (self.kls(f'=d/b-{greater}') < self.kls(f'=d/b-{lesser}')), \
- f"!: d/b-{greater} > d/b-{lesser}"
-
- assert self.kls("!!=d/b-1", eapi='2') > self.kls("!=d/b-1")
+ assert_not_equal_bidirectional(self.kls("cat/pkg:1"), self.kls("cat/pkg"))
+ assert_equal_bidirectional(self.kls("cat/pkg:2"), self.kls("cat/pkg:2"))
+ for lesser, greater in (("0.1", "1"), ("1", "1-r1"), ("1.1", "1.2")):
+ assert self.kls(f"=d/b-{lesser}") < self.kls(
+ f"=d/b-{greater}"
+ ), f"d/b-{lesser} < d/b-{greater}"
+ assert not (
+ self.kls(f"=d/b-{lesser}") > self.kls(f"=d/b-{greater}")
+ ), f"!: d/b-{lesser} < d/b-{greater}"
+ assert self.kls(f"=d/b-{greater}") > self.kls(
+ f"=d/b-{lesser}"
+ ), f"d/b-{greater} > d/b-{lesser}"
+ assert not (
+ self.kls(f"=d/b-{greater}") < self.kls(f"=d/b-{lesser}")
+ ), f"!: d/b-{greater} > d/b-{lesser}"
+
+ assert self.kls("!!=d/b-1", eapi="2") > self.kls("!=d/b-1")
assert self.kls("!=d/b-1") < self.kls("!!=d/b-1")
assert self.kls("!=d/b-1") == self.kls("!=d/b-1")
def test_compatibility(self):
- self.assertNotMatch(self.kls('=dev-util/diffball-0.7'),
- FakePkg('dev-util/diffball-0.7.0'))
+ self.assertNotMatch(
+ self.kls("=dev-util/diffball-0.7"), FakePkg("dev-util/diffball-0.7.0")
+ )
# see bug http://bugs.gentoo.org/152127
- self.assertNotMatch(self.kls('>=sys-apps/portage-2.1.0_pre3-r5'),
- FakePkg('sys-apps/portage-2.1_pre3-r5'))
+ self.assertNotMatch(
+ self.kls(">=sys-apps/portage-2.1.0_pre3-r5"),
+ FakePkg("sys-apps/portage-2.1_pre3-r5"),
+ )
def test_combined(self):
- p = FakePkg('dev-util/diffball-0.7', repo=FakeRepo(repo_id='gentoo'))
- self.assertMatch(self.kls('=dev-util/diffball-0.7::gentoo'), p)
- self.assertMatch(self.kls('dev-util/diffball::gentoo'), p)
- self.assertNotMatch(self.kls('=dev-util/diffball-0.7:1::gentoo'),
- FakePkg('dev-util/diffball-0.7', slot='2'))
+ p = FakePkg("dev-util/diffball-0.7", repo=FakeRepo(repo_id="gentoo"))
+ self.assertMatch(self.kls("=dev-util/diffball-0.7::gentoo"), p)
+ self.assertMatch(self.kls("dev-util/diffball::gentoo"), p)
+ self.assertNotMatch(
+ self.kls("=dev-util/diffball-0.7:1::gentoo"),
+ FakePkg("dev-util/diffball-0.7", slot="2"),
+ )
def test_unversioned(self):
assert self.kls("dev-util/diffball").is_simple
@@ -529,42 +629,50 @@ class TestAtom(TestRestriction):
assert not self.kls("dev-util/diffball[x]").is_simple
assert not self.kls("dev-util/diffball[x?]").is_simple
- @pytest.mark.parametrize(("original", "wanted"), (
- ("<dev-util/diffball-2", "<dev-util/diffball-2"),
- ("<dev-util/diffball-2[debug=,test=]", "<dev-util/diffball-2"),
- ("=dev-util/diffball-2", "=dev-util/diffball-2"),
- ("=dev-util/diffball-2[debug=,test=]", "=dev-util/diffball-2"),
- ("=dev-util/diffball-2*", "=dev-util/diffball-2*"),
- ("=dev-util/diffball-2*[debug=,test=]", "=dev-util/diffball-2*"),
- ("dev-util/diffball:0", "dev-util/diffball:0"),
- ("dev-util/diffball:0[debug=,test=]", "dev-util/diffball:0"),
- ("dev-util/diffball:0/1.12", "dev-util/diffball:0/1.12"),
- ("dev-util/diffball:0/1.12[debug=,test=]", "dev-util/diffball:0/1.12"),
- ("!dev-util/diffball", "!dev-util/diffball"),
- ("!dev-util/diffball[debug=,test=]", "!dev-util/diffball"),
- ("!!dev-util/diffball", "!!dev-util/diffball"),
- ("!!dev-util/diffball[debug=,test=]", "!!dev-util/diffball"),
- ))
+ @pytest.mark.parametrize(
+ ("original", "wanted"),
+ (
+ ("<dev-util/diffball-2", "<dev-util/diffball-2"),
+ ("<dev-util/diffball-2[debug=,test=]", "<dev-util/diffball-2"),
+ ("=dev-util/diffball-2", "=dev-util/diffball-2"),
+ ("=dev-util/diffball-2[debug=,test=]", "=dev-util/diffball-2"),
+ ("=dev-util/diffball-2*", "=dev-util/diffball-2*"),
+ ("=dev-util/diffball-2*[debug=,test=]", "=dev-util/diffball-2*"),
+ ("dev-util/diffball:0", "dev-util/diffball:0"),
+ ("dev-util/diffball:0[debug=,test=]", "dev-util/diffball:0"),
+ ("dev-util/diffball:0/1.12", "dev-util/diffball:0/1.12"),
+ ("dev-util/diffball:0/1.12[debug=,test=]", "dev-util/diffball:0/1.12"),
+ ("!dev-util/diffball", "!dev-util/diffball"),
+ ("!dev-util/diffball[debug=,test=]", "!dev-util/diffball"),
+ ("!!dev-util/diffball", "!!dev-util/diffball"),
+ ("!!dev-util/diffball[debug=,test=]", "!!dev-util/diffball"),
+ ),
+ )
def test_get_atom_without_use_deps(self, original, wanted):
orig_atom = self.kls(original)
assert str(orig_atom.get_atom_without_use_deps) == wanted
- @pytest.mark.parametrize(('dep', 'iuse', 'use', 'wanted', 'eapi'), (
- ("x(-)", {'x'}, {'x'}, True, '5'),
- ("x(-)", {'x'}, (), False, '5'),
- ("x(+)", (), (), True, '5'),
- ("x(-)", (), (), False, '5'),
- ("x(-),y(-)", (), (), False, '5'),
- ("x(-),y(-)", {'x', 'y'}, ("x", "y"), True, '5'),
- ("x(+),y(-)", (), (), False, '5'),
- ("x(+),y(-)", {"y"}, (), False, '5'),
- ("x(+),y(-)", {'y'}, {"y"}, True, '5'),
- # verify that it's not sensitive to iuse defaults
- ("x(-)", {"+x"}, {"x"}, True, '5'),
- ("x(+)", {"-x"}, {"x"}, True, '5'),
- ))
+ @pytest.mark.parametrize(
+ ("dep", "iuse", "use", "wanted", "eapi"),
+ (
+ ("x(-)", {"x"}, {"x"}, True, "5"),
+ ("x(-)", {"x"}, (), False, "5"),
+ ("x(+)", (), (), True, "5"),
+ ("x(-)", (), (), False, "5"),
+ ("x(-),y(-)", (), (), False, "5"),
+ ("x(-),y(-)", {"x", "y"}, ("x", "y"), True, "5"),
+ ("x(+),y(-)", (), (), False, "5"),
+ ("x(+),y(-)", {"y"}, (), False, "5"),
+ ("x(+),y(-)", {"y"}, {"y"}, True, "5"),
+ # verify that it's not sensitive to iuse defaults
+ ("x(-)", {"+x"}, {"x"}, True, "5"),
+ ("x(+)", {"-x"}, {"x"}, True, "5"),
+ ),
+ )
def test_use_dep_defaults(self, dep, iuse, use, wanted, eapi):
- pkg = FakePkg("dev-util/diffball-1", eapi=eapi, iuse=frozenset(iuse), use=frozenset(use))
- a = self.kls(f'dev-util/diffball[{dep}]')
- #import pdb;pdb.set_trace()
+ pkg = FakePkg(
+ "dev-util/diffball-1", eapi=eapi, iuse=frozenset(iuse), use=frozenset(use)
+ )
+ a = self.kls(f"dev-util/diffball[{dep}]")
+ # import pdb;pdb.set_trace()
assert a.match(pkg) == wanted
diff --git a/tests/ebuild/test_conditionals.py b/tests/ebuild/test_conditionals.py
index 8c0e985bd..a0101dc33 100644
--- a/tests/ebuild/test_conditionals.py
+++ b/tests/ebuild/test_conditionals.py
@@ -9,13 +9,13 @@ from snakeoil.sequences import iflatten_instance
class base:
-
class kls(conditionals.DepSet):
__slots__ = ()
parse_depset = None
- def gen_depset(self, string, operators=None, element_kls=str,
- element_func=None, **kwds):
+ def gen_depset(
+ self, string, operators=None, element_kls=str, element_func=None, **kwds
+ ):
if element_func is not None:
kwds["element_func"] = element_func
if operators is None:
@@ -24,18 +24,40 @@ class base:
class TestDepSetParsing(base):
-
- @pytest.mark.parametrize("depset", (
- "( )", "( a b c", "(a b c )",
- "( a b c)", "x?( a )",
- "x? (a )", "x? (a)", "x? ( a b)",
- "x? ( x? () )", "x? ( x? (a)", "(", ")", "x?",
- "||(", "||()", "||( )", "|| ()",
- "|| (", "|| )", "||)", "|| ( x? ( )",
- "|| (x )", "|| ( x)",
- "a|", "a?", "a||b",
- "x? y", "( x )?", "||?"
- ))
+ @pytest.mark.parametrize(
+ "depset",
+ (
+ "( )",
+ "( a b c",
+ "(a b c )",
+ "( a b c)",
+ "x?( a )",
+ "x? (a )",
+ "x? (a)",
+ "x? ( a b)",
+ "x? ( x? () )",
+ "x? ( x? (a)",
+ "(",
+ ")",
+ "x?",
+ "||(",
+ "||()",
+ "||( )",
+ "|| ()",
+ "|| (",
+ "|| )",
+ "||)",
+ "|| ( x? ( )",
+ "|| (x )",
+ "|| ( x)",
+ "a|",
+ "a?",
+ "a||b",
+ "x? y",
+ "( x )?",
+ "||?",
+ ),
+ )
def test_DepsetParseError(self, depset):
with pytest.raises(DepsetParseError):
self.gen_depset(depset)
@@ -57,8 +79,7 @@ class TestDepSetParsing(base):
depth = 0
conditionals = []
for x in i:
- for t, s in ((boolean.OrRestriction, "||"),
- (boolean.AndRestriction, "&&")):
+ for t, s in ((boolean.OrRestriction, "||"), (boolean.AndRestriction, "&&")):
if isinstance(x, t):
yield s
yield "("
@@ -70,8 +91,9 @@ class TestDepSetParsing(base):
if isinstance(x, packages.Conditional):
assert x.attr == "use"
conditionals.insert(
- depth, list(self.mangle_cond_payload(x.restriction)))
- yield set(iflatten_instance(conditionals[:depth + 1]))
+ depth, list(self.mangle_cond_payload(x.restriction))
+ )
+ yield set(iflatten_instance(conditionals[: depth + 1]))
yield "("
i.appendleft(")")
i.appendleft(x.payload)
@@ -88,16 +110,37 @@ class TestDepSetParsing(base):
("", []),
("( a b )", ("&&", "(", "a", "b", ")")),
"|| ( a b )",
- ("a || ( a ( b ) c || ( d ) )",
- ["a", "||", "(", "a", "b", "c", "d", ")"]),
+ ("a || ( a ( b ) c || ( d ) )", ["a", "||", "(", "a", "b", "c", "d", ")"]),
(" x? ( a b )", (["x"], "(", "a", "b", ")")),
("x? ( y? ( a ) )", (["x"], "(", ["x", "y"], "(", "a", ")", ")")),
("|| ( || ( a b ) )", ["||", "(", "a", "b", ")"]),
"|| ( || ( a b ) c )",
- ("x? ( a !y? ( || ( b c ) d ) e ) f1 f? ( g h ) i", (
- ["x"], "(", "a", ["x", "!y"], "(", "||", "(", "b",
- "c", ")", "d", ")", "e", ")", "f1",
- ["f"], "(", "g", "h", ")", "i"))
+ (
+ "x? ( a !y? ( || ( b c ) d ) e ) f1 f? ( g h ) i",
+ (
+ ["x"],
+ "(",
+ "a",
+ ["x", "!y"],
+ "(",
+ "||",
+ "(",
+ "b",
+ "c",
+ ")",
+ "d",
+ ")",
+ "e",
+ ")",
+ "f1",
+ ["f"],
+ "(",
+ "g",
+ "h",
+ ")",
+ "i",
+ ),
+ ),
)
@pytest.mark.parametrize("depset", depsets)
@@ -126,41 +169,49 @@ class TestDepSetParsing(base):
if isinstance(x, str):
v2.append(x)
else:
- v2.append(x[-1] + '?')
- v = ' '.join(v2)
+ v2.append(x[-1] + "?")
+ v = " ".join(v2)
else:
- v = ' '.join(depset.split())
- v = ' '.join(v.replace("&&", "").split())
+ v = " ".join(depset.split())
+ v = " ".join(v.replace("&&", "").split())
assert str(base.gen_depset(self, depset)) == v
def check_known_conditionals(self, text, conditionals, **kwds):
d = self.gen_depset(text, **kwds)
assert sorted(d.known_conditionals) == sorted(conditionals.split())
# ensure it does the lookup *once*
- object.__setattr__(d, 'restrictions', ())
+ object.__setattr__(d, "restrictions", ())
assert not d.restrictions
assert sorted(d.known_conditionals) == sorted(conditionals.split())
- @pytest.mark.parametrize(("text", "conditionals"), (
- ("a? ( b )", "a"),
- ("a? ( b a? ( c ) )", "a"),
- ("a b c d e ( f )", ""),
- ("!a? ( b? ( c ) )", "a b"),
- ))
+ @pytest.mark.parametrize(
+ ("text", "conditionals"),
+ (
+ ("a? ( b )", "a"),
+ ("a? ( b a? ( c ) )", "a"),
+ ("a b c d e ( f )", ""),
+ ("!a? ( b? ( c ) )", "a b"),
+ ),
+ )
def test_known_conditionals(self, text, conditionals):
self.check_known_conditionals(text, conditionals)
def test_known_conditionals_transitive_use(self):
self.check_known_conditionals(
- "a/b[c=] a/b[!d=] b/a[e?] b/a[!f?]", "c d e f", element_func=atom,
- transitive_use_atoms=True)
+ "a/b[c=] a/b[!d=] b/a[e?] b/a[!f?]",
+ "c d e f",
+ element_func=atom,
+ transitive_use_atoms=True,
+ )
self.check_known_conditionals(
- "|| ( b/a[e?] a/c )", "e", element_func=atom,
- transitive_use_atoms=True)
+ "|| ( b/a[e?] a/c )", "e", element_func=atom, transitive_use_atoms=True
+ )
def test_element_func(self):
- assert self.gen_depset("asdf fdas", element_func=str).element_class == "".__class__
+ assert (
+ self.gen_depset("asdf fdas", element_func=str).element_class == "".__class__
+ )
def test_disabling_or(self):
with pytest.raises(DepsetParseError):
@@ -171,14 +222,16 @@ class TestDepSetParsing(base):
class TestDepSetConditionalsInspection(base):
-
def test_sanity_has_conditionals(self):
assert not bool(self.gen_depset("a b").has_conditionals)
assert not bool(self.gen_depset("( a b ) || ( c d )").has_conditionals)
assert bool(self.gen_depset("x? ( a )").has_conditionals)
assert bool(self.gen_depset("( x? ( a ) )").has_conditionals)
- assert bool(self.gen_depset("|| ( a/b[c=] b/d )", element_kls=atom,
- transitive_use_atoms=True).has_conditionals)
+ assert bool(
+ self.gen_depset(
+ "|| ( a/b[c=] b/d )", element_kls=atom, transitive_use_atoms=True
+ ).has_conditionals
+ )
def flatten_cond(self, c):
l = set()
@@ -198,8 +251,12 @@ class TestDepSetConditionalsInspection(base):
return l
def check_conds(self, s, r, element_kls=str, **kwds):
- nc = {k: self.flatten_cond(v) for k, v in
- self.gen_depset(s, element_kls=element_kls, **kwds).node_conds.items()}
+ nc = {
+ k: self.flatten_cond(v)
+ for k, v in self.gen_depset(
+ s, element_kls=element_kls, **kwds
+ ).node_conds.items()
+ }
d = {element_kls(k): v for k, v in r.items()}
for k, v in d.items():
if isinstance(v, str):
@@ -209,30 +266,38 @@ class TestDepSetConditionalsInspection(base):
assert nc == d
- @pytest.mark.parametrize(("text", "result"), (
- ("x? ( y )", {"y":"x"}),
- ("x? ( y ) z? ( y )", {"y":["z", "x"]}),
- ("x? ( z? ( w? ( y ) ) )", {"y":"w z x"}),
- ("!x? ( y )", {"y":"!x"}),
- ("!x? ( z? ( y a ) )", {"y":"!x z", "a":"!x z"}),
- ("x ( y )", {}),
- ("x ( y? ( z ) )", {"z":"y"}), # needs to dig down as deep as required
- ("x y? ( x )", {}), # x isn't controlled by a conditional, shouldn't be in the list
- ("|| ( y? ( x ) x )", {}), # x cannot be filtered down since x is accessible via non conditional path
- ("|| ( y? ( x ) z )", {"x":"y"}),
- ))
+ @pytest.mark.parametrize(
+ ("text", "result"),
+ (
+ ("x? ( y )", {"y": "x"}),
+ ("x? ( y ) z? ( y )", {"y": ["z", "x"]}),
+ ("x? ( z? ( w? ( y ) ) )", {"y": "w z x"}),
+ ("!x? ( y )", {"y": "!x"}),
+ ("!x? ( z? ( y a ) )", {"y": "!x z", "a": "!x z"}),
+ ("x ( y )", {}),
+ ("x ( y? ( z ) )", {"z": "y"}), # needs to dig down as deep as required
+ (
+ "x y? ( x )",
+ {},
+ ), # x isn't controlled by a conditional, shouldn't be in the list
+ (
+ "|| ( y? ( x ) x )",
+ {},
+ ), # x cannot be filtered down since x is accessible via non conditional path
+ ("|| ( y? ( x ) z )", {"x": "y"}),
+ ),
+ )
def test_node_conds(self, text, result):
self.check_conds(text, result)
- @pytest.mark.parametrize(("text", "result"), (
- ("a/b[c=]", {"a/b[c]":"c", "a/b[-c]":"!c"}),
- ))
+ @pytest.mark.parametrize(
+ ("text", "result"), (("a/b[c=]", {"a/b[c]": "c", "a/b[-c]": "!c"}),)
+ )
def test_node_conds_atom(self, text, result):
self.check_conds(text, result, element_kls=atom, transitive_use_atoms=True)
class TestDepSetEvaluate(base):
-
def test_evaluation(self):
flag_set = list(sorted(f"x{x}" for x in range(2000)))
for vals in (
@@ -259,14 +324,25 @@ class TestDepSetEvaluate(base):
# we assert it in the tests to make sure some 'special' ebuild dev doesn't trigger
# it on a user's machine, thus the abuse leveled here.
("a/b", "a/b[!c?,%s]" % (",".join(x + "?" for x in flag_set)), "c"),
- ("a/b", "a/b[%s]" % (",".join("%s?" % (x,) for x in flag_set)), "",
- " ".join(flag_set)),
- ("a/b[c,x0]", "a/b[c?,%s]" % (",".join(x + "?" for x in flag_set)), "c",
- " ".join(flag_set[1:])),
- ("a/b[c,%s]" % (','.join(flag_set),),
- "a/b[c?,%s]" % (",".join(x + "?" for x in flag_set)), "c",
- ""),
- ):
+ (
+ "a/b",
+ "a/b[%s]" % (",".join("%s?" % (x,) for x in flag_set)),
+ "",
+ " ".join(flag_set),
+ ),
+ (
+ "a/b[c,x0]",
+ "a/b[c?,%s]" % (",".join(x + "?" for x in flag_set)),
+ "c",
+ " ".join(flag_set[1:]),
+ ),
+ (
+ "a/b[c,%s]" % (",".join(flag_set),),
+ "a/b[c?,%s]" % (",".join(x + "?" for x in flag_set)),
+ "c",
+ "",
+ ),
+ ):
result = vals[0]
src = vals[1]
@@ -276,17 +352,17 @@ class TestDepSetEvaluate(base):
if len(vals) > 3:
tristate = vals[3].split()
kwds = {}
- if '/' in src:
+ if "/" in src:
kls = atom
flags = src.split("[", 1)[-1]
if "?" in flags or "=" in flags:
- kwds['transitive_use_atoms'] = True
+ kwds["transitive_use_atoms"] = True
else:
kls = str
orig = self.gen_depset(src, element_kls=kls, **kwds)
- collapsed = orig.evaluate_depset(use,
- tristate_filter=tristate)
- assert str(collapsed) == result, \
- f"expected {result!r} got {collapsed!r}\nraw depset: {src!r}\nuse: {use!r}, tristate: {tristate!r}"
- if not ('?' in src or kwds.get("transitive_use_atoms")):
+ collapsed = orig.evaluate_depset(use, tristate_filter=tristate)
+ assert (
+ str(collapsed) == result
+ ), f"expected {result!r} got {collapsed!r}\nraw depset: {src!r}\nuse: {use!r}, tristate: {tristate!r}"
+ if not ("?" in src or kwds.get("transitive_use_atoms")):
assert orig is collapsed
diff --git a/tests/ebuild/test_cpv.py b/tests/ebuild/test_cpv.py
index 72f545bb8..e33fe20bf 100644
--- a/tests/ebuild/test_cpv.py
+++ b/tests/ebuild/test_cpv.py
@@ -9,39 +9,68 @@ def generate_misc_sufs():
suf_nums = list(range(100))
shuffle(suf_nums)
- good_sufs = (simple_good_sufs + [f"{x}{suf_nums.pop()}" for x in simple_good_sufs])
+ good_sufs = simple_good_sufs + [f"{x}{suf_nums.pop()}" for x in simple_good_sufs]
l = len(good_sufs)
- good_sufs = good_sufs + [
- good_sufs[x] + good_sufs[l - x - 1] for x in range(l)]
+ good_sufs = good_sufs + [good_sufs[x] + good_sufs[l - x - 1] for x in range(l)]
- bad_sufs = ["_a", "_9", "_"] + [x+" " for x in simple_good_sufs]
+ bad_sufs = ["_a", "_9", "_"] + [x + " " for x in simple_good_sufs]
return good_sufs, bad_sufs
class TestCPV:
good_cats = (
- "dev-util", "dev+", "dev-util+", "DEV-UTIL", "aaa0",
- "aaa-0", "multi/depth", "cross-dev_idiot.hacks-suck", "a",
- "foo---", "multi--hyphen")
- bad_cats = (".util", "_dev", "", "dev-util ", "multi//depth")
- good_pkgs = ("diffball", "a9", "a9+", "a-100dpi", "diff-mode-",
- "multi--hyphen", "timidity--", "frob---", "diffball-9-")
- bad_pkgs = ("diffball ", "diffball-9", "a-3D", "-df", "+dfa",
- "timidity--9f", "ormaybe---13_beta")
-
- good_cp = (
- "bbb-9/foon", "dev-util/diffball", "dev-util/diffball-a9",
- "dev-ut-asdf/emacs-cvs", "xfce-base/xfce4", "bah/f-100dpi",
- "dev-util/diffball-blah-monkeys", "virtual/7z")
+ "dev-util",
+ "dev+",
+ "dev-util+",
+ "DEV-UTIL",
+ "aaa0",
+ "aaa-0",
+ "multi/depth",
+ "cross-dev_idiot.hacks-suck",
+ "a",
+ "foo---",
+ "multi--hyphen",
+ )
+ bad_cats = (".util", "_dev", "", "dev-util ", "multi//depth")
+ good_pkgs = (
+ "diffball",
+ "a9",
+ "a9+",
+ "a-100dpi",
+ "diff-mode-",
+ "multi--hyphen",
+ "timidity--",
+ "frob---",
+ "diffball-9-",
+ )
+ bad_pkgs = (
+ "diffball ",
+ "diffball-9",
+ "a-3D",
+ "-df",
+ "+dfa",
+ "timidity--9f",
+ "ormaybe---13_beta",
+ )
+
+ good_cp = (
+ "bbb-9/foon",
+ "dev-util/diffball",
+ "dev-util/diffball-a9",
+ "dev-ut-asdf/emacs-cvs",
+ "xfce-base/xfce4",
+ "bah/f-100dpi",
+ "dev-util/diffball-blah-monkeys",
+ "virtual/7z",
+ )
good_vers = ("1", "2.3.4", "2.3.4a", "02.3", "2.03", "3d", "3D")
- bad_vers = ("2.3a.4", "2.a.3", "2.3_", "2.3 ", "2.3.", "cvs.2")
+ bad_vers = ("2.3a.4", "2.a.3", "2.3_", "2.3 ", "2.3.", "cvs.2")
good_sufs, bad_sufs = generate_misc_sufs()
- good_revs = ("-r1", "-r300", "-r0", "",
- "-r1000000000000000000")
+ good_revs = ("-r1", "-r300", "-r0", "", "-r1000000000000000000")
bad_revs = ("-r", "-ra", "-r", "-R1")
testing_secondary_args = False
@@ -56,12 +85,14 @@ class TestCPV:
def test_simple_key(self):
with pytest.raises(cpv.InvalidCPV):
self.make_inst("da", "ba-3", "3.3")
- for src in [[("dev-util", "diffball", "0.7.1"), "dev-util/diffball"],
+ for src in [
+ [("dev-util", "diffball", "0.7.1"), "dev-util/diffball"],
["dev-util/diffball"],
["dev-perl/mod_perl"],
["dev-perl/mod_p"],
[("dev-perl", "mod-p", ""), "dev-perl/mod-p"],
- ["dev-perl/mod-p-1", "dev-perl/mod-p"],]:
+ ["dev-perl/mod-p-1", "dev-perl/mod-p"],
+ ]:
if len(src) == 1:
key = src[0]
else:
@@ -71,7 +102,7 @@ class TestCPV:
vals = pkgver.rsplit("-", 1)
if len(vals) == 1:
pkg = pkgver
- ver = ''
+ ver = ""
else:
pkg, ver = vals
else:
@@ -88,32 +119,32 @@ class TestCPV:
def test_parsing(self):
# check for gentoo bug 263787
- self.process_pkg(False, 'app-text', 'foo-123-bar')
- self.process_ver(False, 'app-text', 'foo-123-bar', '2.0017a_p', '-r5')
+ self.process_pkg(False, "app-text", "foo-123-bar")
+ self.process_ver(False, "app-text", "foo-123-bar", "2.0017a_p", "-r5")
with pytest.raises(cpv.InvalidCPV):
- cpv.UnversionedCPV('app-text/foo-123')
+ cpv.UnversionedCPV("app-text/foo-123")
for cat_ret, cats in [[False, self.good_cats], [True, self.bad_cats]]:
for cat in cats:
- for pkg_ret, pkgs in [[False, self.good_pkgs],
- [True, self.bad_pkgs]]:
+ for pkg_ret, pkgs in [[False, self.good_pkgs], [True, self.bad_pkgs]]:
for pkg in pkgs:
self.process_pkg(cat_ret or pkg_ret, cat, pkg)
for cp in self.good_cp:
cat, pkg = cp.rsplit("/", 1)
- for rev_ret, revs in [[False, self.good_revs],
- [True, self.bad_revs]]:
+ for rev_ret, revs in [[False, self.good_revs], [True, self.bad_revs]]:
for rev in revs:
- for ver_ret, vers in [[False, self.good_vers],
- [True, self.bad_vers]]:
+ for ver_ret, vers in [
+ [False, self.good_vers],
+ [True, self.bad_vers],
+ ]:
for ver in vers:
- self.process_ver(ver_ret or rev_ret, cat, pkg,
- ver, rev)
+ self.process_ver(ver_ret or rev_ret, cat, pkg, ver, rev)
for x in (10, 18, 19, 36, 100):
assert cpv.CPV("da", "ba", f"1-r0{'0' * x}").revision == 0
- assert \
- int(cpv.CPV("da", "ba", f"1-r1{'0' * x}1").revision) == int(f"1{'0' * x}1")
+ assert int(cpv.CPV("da", "ba", f"1-r1{'0' * x}1").revision) == int(
+ f"1{'0' * x}1"
+ )
def process_pkg(self, ret, cat, pkg):
if ret:
@@ -161,16 +192,16 @@ class TestCPV:
for suf in self.bad_sufs:
# check standalone.
- self.process_suf(True, cat, pkg, ver+suf, rev)
+ self.process_suf(True, cat, pkg, ver + suf, rev)
def process_suf(self, ret, cat, pkg, ver, rev):
if ret:
with pytest.raises(cpv.InvalidCPV):
- self.make_inst(cat, pkg, ver+rev)
+ self.make_inst(cat, pkg, ver + rev)
else:
# redundant in light of process_ver... combine these somehow.
c = self.make_inst(cat, pkg, ver + rev)
- if rev == '' or rev == '-r0':
+ if rev == "" or rev == "-r0":
assert c.cpvstr == f"{cat}/{pkg}-{ver}"
assert c.revision == 0
if rev:
@@ -198,10 +229,10 @@ class TestCPV:
if suf == "":
sufs = [suf]
else:
- sufs = [suf, f'{suf}4']
+ sufs = [suf, f"{suf}4"]
for x in sufs:
- cur = vkls(f'{base}{x}{rev}')
- assert cur == vkls(f'{base}{x}{rev}')
+ cur = vkls(f"{base}{x}{rev}")
+ assert cur == vkls(f"{base}{x}{rev}")
if last is not None:
assert cur > last
@@ -234,9 +265,11 @@ class TestCPV:
assert vkls("da/ba-6.01.0-r0") == vkls("da/ba-6.01.0-r00")
assert vkls("da/ba-6.01.0-r1") == vkls("da/ba-6.01.0-r001")
- for v1, v2 in (("1.001000000000000000001", "1.001000000000000000002"),
+ for v1, v2 in (
+ ("1.001000000000000000001", "1.001000000000000000002"),
("1.00100000000", "1.0010000000000000001"),
- ("1.01", "1.1")):
+ ("1.01", "1.1"),
+ ):
assert vkls(f"da/ba-{v2}") > vkls(f"da/ba-{v1}")
for x in (18, 36, 100):
@@ -247,14 +280,17 @@ class TestCPV:
s = "0" * x
assert vkls(f"da/ba-1-r10{s}1") > vkls(f"da/ba-1-r1{s}1")
- assert vkls('sys-apps/net-tools-1.60_p2010081516093') > \
- vkls('sys-apps/net-tools-1.60_p2009072801401')
+ assert vkls("sys-apps/net-tools-1.60_p2010081516093") > vkls(
+ "sys-apps/net-tools-1.60_p2009072801401"
+ )
- assert vkls('sys-apps/net-tools-1.60_p20100815160931') > \
- vkls('sys-apps/net-tools-1.60_p20090728014017')
+ assert vkls("sys-apps/net-tools-1.60_p20100815160931") > vkls(
+ "sys-apps/net-tools-1.60_p20090728014017"
+ )
- assert vkls('sys-apps/net-tools-1.60_p20100815160931') > \
- vkls('sys-apps/net-tools-1.60_p20090728014017-r1')
+ assert vkls("sys-apps/net-tools-1.60_p20100815160931") > vkls(
+ "sys-apps/net-tools-1.60_p20090728014017-r1"
+ )
# Regression test: python does comparison slightly differently
# if the classes do not match exactly (it prefers rich
@@ -262,12 +298,17 @@ class TestCPV:
class DummySubclass(cpv.CPV):
pass
- assert DummySubclass("da/ba-6.0_alpha0_p1", versioned=True) != vkls("da/ba-6.0_alpha")
- assert DummySubclass("da/ba-6.0_alpha0", versioned=True) == vkls("da/ba-6.0_alpha")
+ assert DummySubclass("da/ba-6.0_alpha0_p1", versioned=True) != vkls(
+ "da/ba-6.0_alpha"
+ )
+ assert DummySubclass("da/ba-6.0_alpha0", versioned=True) == vkls(
+ "da/ba-6.0_alpha"
+ )
assert DummySubclass("da/ba-6.0", versioned=True) != "foon"
- assert DummySubclass("da/ba-6.0", versioned=True) == \
- DummySubclass("da/ba-6.0-r0", versioned=True)
+ assert DummySubclass("da/ba-6.0", versioned=True) == DummySubclass(
+ "da/ba-6.0-r0", versioned=True
+ )
def test_no_init(self):
"""Test if the cpv is in a somewhat sane state if __init__ fails.
@@ -279,10 +320,10 @@ class TestCPV:
uninited = cpv.CPV.__new__(cpv.CPV)
broken = cpv.CPV.__new__(cpv.CPV)
with pytest.raises(cpv.InvalidCPV):
- broken.__init__('broken', versioned=True)
+ broken.__init__("broken", versioned=True)
for thing in (uninited, broken):
# the c version returns None, the py version does not have the attr
- getattr(thing, 'cpvstr', None)
+ getattr(thing, "cpvstr", None)
repr(thing)
str(thing)
# The c version returns a constant, the py version raises
diff --git a/tests/ebuild/test_digest.py b/tests/ebuild/test_digest.py
index 86d32e471..be4985b24 100644
--- a/tests/ebuild/test_digest.py
+++ b/tests/ebuild/test_digest.py
@@ -8,8 +8,7 @@ from snakeoil.data_source import local_source
# "Line too long" (and our custom more aggressive version of that)
# pylint: disable-msg=C0301,CPC01
-digest_contents = \
-"""MD5 98db1465629693fc434d4dc52db93838 Python-2.4.2.tar.bz2 7853169
+digest_contents = """MD5 98db1465629693fc434d4dc52db93838 Python-2.4.2.tar.bz2 7853169
RMD160 c511d2b76b5394742d285e71570a2bcd3c1fa871 Python-2.4.2.tar.bz2 7853169
SHA256 e163b95ee56819c0f3c58ef9278c30b9e49302c2f1a1917680ca894d33929f7e Python-2.4.2.tar.bz2 7853169
MD5 2fa54dd51b6a8f1c46e5baf741e90f7e python-2.4-patches-1.tar.bz2 7820
@@ -19,12 +18,14 @@ digest_chksum = (
("size", int(7853169)),
("md5", int("98db1465629693fc434d4dc52db93838", 16)),
("rmd160", int("c511d2b76b5394742d285e71570a2bcd3c1fa871", 16)),
- ("sha256", int("e163b95ee56819c0f3c58ef9278c30b9e49302c2f1a1917680ca894d33929f7e", 16))
+ (
+ "sha256",
+ int("e163b95ee56819c0f3c58ef9278c30b9e49302c2f1a1917680ca894d33929f7e", 16),
+ ),
)
# ripped straight from the glep
-pure_manifest2 = \
-"""AUX ldif-buffer-overflow-fix.diff 5007 RMD160 1354a6bd2687430b628b78aaf43f5c793d2f0704 SHA1 424e1dfca06488f605b9611160020227ecdd03ac
+pure_manifest2 = """AUX ldif-buffer-overflow-fix.diff 5007 RMD160 1354a6bd2687430b628b78aaf43f5c793d2f0704 SHA1 424e1dfca06488f605b9611160020227ecdd03ac
AUX procmime.patch 977 RMD160 39a51a4d654759b15d1644a79fb6e8921130df3c SHA1 d76929f6dfc2179281f7ccee5789aab4e970ba9e
EBUILD sylpheed-claws-1.0.5-r1.ebuild 3906 RMD160 cdd546c128db2dea7044437de01ec96e12b4f5bf SHA1 a84b49e76961d7a9100852b64c2bfbf9b053d45e
EBUILD sylpheed-claws-1.9.100.ebuild 4444 RMD160 89326038bfc694dafd22f10400a08d3f930fb2bd SHA1 8895342f3f0cc6fcbdd0fdada2ad8e23ce539d23
@@ -50,7 +51,7 @@ for x in pure_manifest2.split("\n"):
class TestManifest:
- convert_source = staticmethod(lambda x:x)
+ convert_source = staticmethod(lambda x: x)
def get_manifest(self, data):
fd, fn = tempfile.mkstemp()
@@ -71,11 +72,14 @@ class TestManifest:
(dist, aux, ebuild, misc) = self.get_manifest(s)
def test_manifest2(self):
- (dist, aux, ebuild, misc) = \
- self.get_manifest(pure_manifest2)
+ (dist, aux, ebuild, misc) = self.get_manifest(pure_manifest2)
- for dtype, d in (("DIST", dist), ("AUX", aux),
- ("EBUILD", ebuild), ("MISC", misc)):
+ for dtype, d in (
+ ("DIST", dist),
+ ("AUX", aux),
+ ("EBUILD", ebuild),
+ ("MISC", misc),
+ ):
req_d = pure_manifest2_chksums[dtype]
assert set(req_d) == set(d)
for k, v in req_d.items():
diff --git a/tests/ebuild/test_eapi.py b/tests/ebuild/test_eapi.py
index 48ae0429d..cbe0e3a52 100644
--- a/tests/ebuild/test_eapi.py
+++ b/tests/ebuild/test_eapi.py
@@ -20,36 +20,40 @@ def test_get_eapi():
class TestEAPI:
-
def test_register(self, tmp_path):
# re-register known EAPI
with pytest.raises(ValueError):
EAPI.register(magic="0")
- mock_ebd_temp = str(shutil.copytree(EBD_PATH, tmp_path / 'ebd'))
- with mock.patch('pkgcore.ebuild.eapi.bash_version') as bash_version, \
- mock.patch.dict(eapi.EAPI.known_eapis), \
- mock.patch('pkgcore.ebuild.eapi.const.EBD_PATH', mock_ebd_temp):
+ mock_ebd_temp = str(shutil.copytree(EBD_PATH, tmp_path / "ebd"))
+ with mock.patch(
+ "pkgcore.ebuild.eapi.bash_version"
+ ) as bash_version, mock.patch.dict(eapi.EAPI.known_eapis), mock.patch(
+ "pkgcore.ebuild.eapi.const.EBD_PATH", mock_ebd_temp
+ ):
# inadequate bash version
- bash_version.return_value = '3.1'
+ bash_version.return_value = "3.1"
with pytest.raises(SystemExit) as excinfo:
- new_eapi = EAPI.register(magic='new', optionals={'bash_compat': '3.2'})
- assert "EAPI 'new' requires >=bash-3.2, system version: 3.1" == excinfo.value.args[0]
+ new_eapi = EAPI.register(magic="new", optionals={"bash_compat": "3.2"})
+ assert (
+ "EAPI 'new' requires >=bash-3.2, system version: 3.1"
+ == excinfo.value.args[0]
+ )
# adequate system bash versions
- bash_version.return_value = '3.2'
- test_eapi = EAPI.register(magic='test', optionals={'bash_compat': '3.2'})
- assert test_eapi._magic == 'test'
- bash_version.return_value = '4.2'
- test_eapi = EAPI.register(magic='test1', optionals={'bash_compat': '4.1'})
- assert test_eapi._magic == 'test1'
+ bash_version.return_value = "3.2"
+ test_eapi = EAPI.register(magic="test", optionals={"bash_compat": "3.2"})
+ assert test_eapi._magic == "test"
+ bash_version.return_value = "4.2"
+ test_eapi = EAPI.register(magic="test1", optionals={"bash_compat": "4.1"})
+ assert test_eapi._magic == "test1"
def test_is_supported(self, caplog):
assert eapi6.is_supported
with mock.patch.dict(eapi.EAPI.known_eapis):
# partially supported EAPI is flagged as such
- test_eapi = EAPI.register("test", optionals={'is_supported': False})
+ test_eapi = EAPI.register("test", optionals={"is_supported": False})
assert test_eapi.is_supported
assert caplog.text.endswith("EAPI 'test' isn't fully supported\n")
@@ -64,4 +68,4 @@ class TestEAPI:
def test_ebd_env(self):
for eapi_str, eapi_obj in EAPI.known_eapis.items():
- assert eapi_obj.ebd_env['EAPI'] == eapi_str
+ assert eapi_obj.ebd_env["EAPI"] == eapi_str
diff --git a/tests/ebuild/test_ebuild_src.py b/tests/ebuild/test_ebuild_src.py
index c05167c5f..9d07d55f4 100644
--- a/tests/ebuild/test_ebuild_src.py
+++ b/tests/ebuild/test_ebuild_src.py
@@ -19,39 +19,51 @@ class TestBase:
kls = ebuild_src.base
- def get_pkg(self, data=None, cpv='dev-util/diffball-0.1-r1', repo=None,
- pre_args=(), suppress_unsupported=True):
+ def get_pkg(
+ self,
+ data=None,
+ cpv="dev-util/diffball-0.1-r1",
+ repo=None,
+ pre_args=(),
+ suppress_unsupported=True,
+ ):
o = self.kls(*(list(pre_args) + [repo, cpv]))
if data is not None:
- eapi_data = data.pop('EAPI', 0)
+ eapi_data = data.pop("EAPI", 0)
if eapi_data is not None:
- object.__setattr__(o, 'eapi', get_eapi(
- str(eapi_data), suppress_unsupported=suppress_unsupported))
- object.__setattr__(o, 'data', data)
+ object.__setattr__(
+ o,
+ "eapi",
+ get_eapi(str(eapi_data), suppress_unsupported=suppress_unsupported),
+ )
+ object.__setattr__(o, "data", data)
return o
def make_parent(self, **methods):
class kls:
locals().update(methods)
+
return kls()
def test_init(self):
- o = self.get_pkg({}, cpv='dev-util/diffball-0.1-r1')
- assert o.category == 'dev-util'
- assert o.package == 'diffball'
- assert o.fullver == '0.1-r1'
- assert o.PN == 'diffball'
- assert o.P == 'diffball-0.1'
- assert o.PF == 'diffball-0.1-r1'
- assert o.PR == 'r1'
- assert self.get_pkg({}, 'dev-util/diffball-0.1').PR == 'r0'
+ o = self.get_pkg({}, cpv="dev-util/diffball-0.1-r1")
+ assert o.category == "dev-util"
+ assert o.package == "diffball"
+ assert o.fullver == "0.1-r1"
+ assert o.PN == "diffball"
+ assert o.P == "diffball-0.1"
+ assert o.PF == "diffball-0.1-r1"
+ assert o.PR == "r1"
+ assert self.get_pkg({}, "dev-util/diffball-0.1").PR == "r0"
def test_path(self):
l = []
- path = '/random/path/to/foo-0.ebuild'
+ path = "/random/path/to/foo-0.ebuild"
+
def f(self, cpv):
l.append(cpv)
return path
+
c = self.make_parent(_get_ebuild_path=f)
o = self.get_pkg({}, repo=c)
assert o.path == path
@@ -59,9 +71,11 @@ class TestBase:
def test_ebuild(self):
l = []
+
def f(self, cpv):
l.append(cpv)
return 1
+
c = self.make_parent(get_ebuild_src=f)
o = self.get_pkg({}, repo=c)
assert o.ebuild == 1
@@ -69,69 +83,70 @@ class TestBase:
def test_fetch_metadata(self):
def f(self, cpv, **options):
- return {'1': '2'}
+ return {"1": "2"}
+
o = self.get_pkg(repo=self.make_parent(_get_metadata=f))
- assert o.data == {'1': '2'}
+ assert o.data == {"1": "2"}
def test_license(self):
- o = self.get_pkg({'LICENSE': 'GPL2 FOON'})
- assert list(o.license) == ['GPL2', 'FOON']
+ o = self.get_pkg({"LICENSE": "GPL2 FOON"})
+ assert list(o.license) == ["GPL2", "FOON"]
def test_description(self):
- o = self.get_pkg({'DESCRIPTION': ' foon\n asdf '})
- assert o.description == 'foon\n asdf'
+ o = self.get_pkg({"DESCRIPTION": " foon\n asdf "})
+ assert o.description == "foon\n asdf"
def test_iuse(self):
o = self.get_pkg({})
assert o.iuse == frozenset()
- o = self.get_pkg({'IUSE': 'build pkg foon'})
- assert o.iuse == frozenset(['build', 'foon', 'pkg'])
+ o = self.get_pkg({"IUSE": "build pkg foon"})
+ assert o.iuse == frozenset(["build", "foon", "pkg"])
def test_iuse_stripped(self):
o = self.get_pkg({})
assert o.iuse_stripped == frozenset()
- o = self.get_pkg({'IUSE': 'build pkg foon'})
- assert o.iuse_stripped == frozenset(['build', 'foon', 'pkg'])
- o = self.get_pkg({'EAPI': '1', 'IUSE': '+build -pkg foon'})
- assert o.iuse_stripped == frozenset(['build', 'foon', 'pkg'])
+ o = self.get_pkg({"IUSE": "build pkg foon"})
+ assert o.iuse_stripped == frozenset(["build", "foon", "pkg"])
+ o = self.get_pkg({"EAPI": "1", "IUSE": "+build -pkg foon"})
+ assert o.iuse_stripped == frozenset(["build", "foon", "pkg"])
def test_iuse_effective(self):
o = self.get_pkg({})
assert o.iuse_effective == frozenset()
- o = self.get_pkg({'IUSE': 'build pkg foon'})
- assert o.iuse_effective == frozenset(['build', 'foon', 'pkg'])
- o = self.get_pkg({'EAPI': '1', 'IUSE': '+build -pkg foon'})
- assert o.iuse_effective == frozenset(['build', 'foon', 'pkg'])
+ o = self.get_pkg({"IUSE": "build pkg foon"})
+ assert o.iuse_effective == frozenset(["build", "foon", "pkg"])
+ o = self.get_pkg({"EAPI": "1", "IUSE": "+build -pkg foon"})
+ assert o.iuse_effective == frozenset(["build", "foon", "pkg"])
def test_properties(self):
o = self.get_pkg({})
assert sorted(o.properties.evaluate_depset([])) == []
- o = self.get_pkg({'PROPERTIES': ''})
+ o = self.get_pkg({"PROPERTIES": ""})
assert sorted(o.properties.evaluate_depset([])) == []
- o = self.get_pkg({'PROPERTIES': 'interactive'})
- assert sorted(o.properties.evaluate_depset([])) == ['interactive']
+ o = self.get_pkg({"PROPERTIES": "interactive"})
+ assert sorted(o.properties.evaluate_depset([])) == ["interactive"]
def test_homepage(self):
- o = self.get_pkg({'HOMEPAGE': ' http://slashdot/ '})
- assert o.homepage == ('http://slashdot/',)
- o = self.get_pkg({'HOMEPAGE': 'http://foozball.org https://foobar.com'})
- assert o.homepage == ('http://foozball.org', 'https://foobar.com')
+ o = self.get_pkg({"HOMEPAGE": " http://slashdot/ "})
+ assert o.homepage == ("http://slashdot/",)
+ o = self.get_pkg({"HOMEPAGE": "http://foozball.org https://foobar.com"})
+ assert o.homepage == ("http://foozball.org", "https://foobar.com")
def test_fullslot(self):
- o = self.get_pkg({'SLOT': '0'})
- assert o.fullslot == '0'
+ o = self.get_pkg({"SLOT": "0"})
+ assert o.fullslot == "0"
# subslot support
for eapi_str, eapi in EAPI.known_eapis.items():
if eapi.options.sub_slotting:
- o = self.get_pkg({'EAPI': eapi_str, 'SLOT': '0/0'})
- assert o.fullslot == '0/0'
- o = self.get_pkg({'EAPI': eapi_str, 'SLOT': '1/2'})
- assert o.fullslot == '1/2'
- o = self.get_pkg({'EAPI': eapi_str, 'SLOT': '1/foo-1'})
- assert o.fullslot == '1/foo-1'
+ o = self.get_pkg({"EAPI": eapi_str, "SLOT": "0/0"})
+ assert o.fullslot == "0/0"
+ o = self.get_pkg({"EAPI": eapi_str, "SLOT": "1/2"})
+ assert o.fullslot == "1/2"
+ o = self.get_pkg({"EAPI": eapi_str, "SLOT": "1/foo-1"})
+ assert o.fullslot == "1/foo-1"
else:
- o = self.get_pkg({'EAPI': eapi_str, 'SLOT': '0/0'})
+ o = self.get_pkg({"EAPI": eapi_str, "SLOT": "0/0"})
with pytest.raises(errors.MetadataException):
o.fullslot
@@ -140,21 +155,21 @@ class TestBase:
self.get_pkg({}).fullslot
# empty SLOT variable
with pytest.raises(errors.MetadataException):
- self.get_pkg({'SLOT': ''}).fullslot
+ self.get_pkg({"SLOT": ""}).fullslot
def test_slot(self):
- o = self.get_pkg({'SLOT': '0'})
- assert o.slot == '0'
+ o = self.get_pkg({"SLOT": "0"})
+ assert o.slot == "0"
# subslot support
for eapi_str, eapi in EAPI.known_eapis.items():
if eapi.options.sub_slotting:
- o = self.get_pkg({'EAPI': eapi_str, 'SLOT': '1/2'})
- assert o.slot == '1'
- o = self.get_pkg({'EAPI': eapi_str, 'SLOT': '1/foo-1'})
- assert o.slot == '1'
+ o = self.get_pkg({"EAPI": eapi_str, "SLOT": "1/2"})
+ assert o.slot == "1"
+ o = self.get_pkg({"EAPI": eapi_str, "SLOT": "1/foo-1"})
+ assert o.slot == "1"
else:
- o = self.get_pkg({'EAPI': eapi_str, 'SLOT': '1/2'})
+ o = self.get_pkg({"EAPI": eapi_str, "SLOT": "1/2"})
with pytest.raises(errors.MetadataException):
o.slot
@@ -163,23 +178,23 @@ class TestBase:
self.get_pkg({}).slot
# empty SLOT variable
with pytest.raises(errors.MetadataException):
- self.get_pkg({'SLOT': ''}).slot
+ self.get_pkg({"SLOT": ""}).slot
def test_subslot(self):
- o = self.get_pkg({'SLOT': '0'})
- assert o.subslot == '0'
- o = self.get_pkg({'SLOT': '1'})
- assert o.subslot == '1'
+ o = self.get_pkg({"SLOT": "0"})
+ assert o.subslot == "0"
+ o = self.get_pkg({"SLOT": "1"})
+ assert o.subslot == "1"
# subslot support
for eapi_str, eapi in EAPI.known_eapis.items():
if eapi.options.sub_slotting:
- o = self.get_pkg({'EAPI': eapi_str, 'SLOT': '1/2'})
- assert o.subslot == '2'
- o = self.get_pkg({'EAPI': eapi_str, 'SLOT': '1/foo-1'})
- assert o.subslot == 'foo-1'
+ o = self.get_pkg({"EAPI": eapi_str, "SLOT": "1/2"})
+ assert o.subslot == "2"
+ o = self.get_pkg({"EAPI": eapi_str, "SLOT": "1/foo-1"})
+ assert o.subslot == "foo-1"
else:
- o = self.get_pkg({'EAPI': eapi_str, 'SLOT': '1/2'})
+ o = self.get_pkg({"EAPI": eapi_str, "SLOT": "1/2"})
with pytest.raises(errors.MetadataException):
o.subslot
@@ -188,269 +203,376 @@ class TestBase:
self.get_pkg({}).subslot
# empty SLOT variable
with pytest.raises(errors.MetadataException):
- self.get_pkg({'SLOT': ''}).subslot
+ self.get_pkg({"SLOT": ""}).subslot
def test_restrict(self):
- o = self.get_pkg({'RESTRICT': 'strip fetch strip'})
- assert sorted(o.restrict) == ['fetch', 'strip', 'strip']
- o = self.get_pkg({'RESTRICT': 'x? ( foo ) !x? ( dar )'})
- assert sorted(o.restrict.evaluate_depset([])) == ['dar']
+ o = self.get_pkg({"RESTRICT": "strip fetch strip"})
+ assert sorted(o.restrict) == ["fetch", "strip", "strip"]
+ o = self.get_pkg({"RESTRICT": "x? ( foo ) !x? ( dar )"})
+ assert sorted(o.restrict.evaluate_depset([])) == ["dar"]
# ensure restrict doesn't have || () in it
with pytest.raises(errors.MetadataException):
- getattr(self.get_pkg({'RESTRICT': '|| ( foon dar )'}), 'restrict')
+ getattr(self.get_pkg({"RESTRICT": "|| ( foon dar )"}), "restrict")
def test_eapi(self):
- assert str(self.get_pkg({'EAPI': '0'}).eapi) == '0'
- assert self.get_pkg({'EAPI': '0'}).eapi.is_supported
- assert not self.get_pkg({'EAPI': '0.1'}).eapi.is_supported
- assert self.get_pkg({'EAPI': 'foon'}, suppress_unsupported=False).eapi is None
+ assert str(self.get_pkg({"EAPI": "0"}).eapi) == "0"
+ assert self.get_pkg({"EAPI": "0"}).eapi.is_supported
+ assert not self.get_pkg({"EAPI": "0.1"}).eapi.is_supported
+ assert self.get_pkg({"EAPI": "foon"}, suppress_unsupported=False).eapi is None
with pytest.raises(errors.MetadataException):
- getattr(self.get_pkg({'EAPI': 0, 'DEPEND': "d/b:0"}), 'depend')
+ getattr(self.get_pkg({"EAPI": 0, "DEPEND": "d/b:0"}), "depend")
with pytest.raises(errors.MetadataException):
- getattr(self.get_pkg({'EAPI': 0, 'RDEPEND': "d/b:0"}), 'rdepend')
+ getattr(self.get_pkg({"EAPI": 0, "RDEPEND": "d/b:0"}), "rdepend")
with pytest.raises(errors.MetadataException):
- getattr(self.get_pkg({'EAPI': 1, 'DEPEND': "d/b[x,y]"}), 'depend')
+ getattr(self.get_pkg({"EAPI": 1, "DEPEND": "d/b[x,y]"}), "depend")
with pytest.raises(errors.MetadataException):
- getattr(self.get_pkg({'EAPI': 1, 'DEPEND': "d/b::foon"}), 'depend')
- assert self.get_pkg({'EAPI': 2, 'DEPEND': 'a/b[x=]'}).depend.node_conds
- pkg = self.get_pkg({'EAPI': 1, 'DEPEND': 'a/b[x=]'})
+ getattr(self.get_pkg({"EAPI": 1, "DEPEND": "d/b::foon"}), "depend")
+ assert self.get_pkg({"EAPI": 2, "DEPEND": "a/b[x=]"}).depend.node_conds
+ pkg = self.get_pkg({"EAPI": 1, "DEPEND": "a/b[x=]"})
with pytest.raises(errors.MetadataException):
- getattr(pkg, 'depend')
+ getattr(pkg, "depend")
def test_get_parsed_eapi(self, tmpdir):
# ebuild has a real path on the fs
def _path(self, cpv, eapi_str):
ebuild = pjoin(str(tmpdir), "temp-0.ebuild")
- with open(ebuild, 'w') as f:
- f.write(textwrap.dedent(f'''\
+ with open(ebuild, "w") as f:
+ f.write(
+ textwrap.dedent(
+ f"""\
# Copyright
# License
- EAPI={eapi_str}'''))
+ EAPI={eapi_str}"""
+ )
+ )
return local_source(str(ebuild))
# ebuild is a faked obj
def _src(self, cpv, eapi_str):
- return data_source(f'EAPI={eapi_str}')
+ return data_source(f"EAPI={eapi_str}")
for func in (_path, _src):
# verify parsing known EAPIs
for eapi_str in EAPI.known_eapis.keys():
c = self.make_parent(get_ebuild_src=post_curry(func, eapi_str))
- o = self.get_pkg({'EAPI': None}, repo=c)
+ o = self.get_pkg({"EAPI": None}, repo=c)
assert str(o.eapi) == eapi_str
# check explicitly unsetting EAPI equates to EAPI=0
- for eapi_str in ('', '""', "''"):
+ for eapi_str in ("", '""', "''"):
c = self.make_parent(get_ebuild_src=post_curry(func, eapi_str))
- o = self.get_pkg({'EAPI': None}, repo=c)
- assert str(o.eapi) == '0'
+ o = self.get_pkg({"EAPI": None}, repo=c)
+ assert str(o.eapi) == "0"
def test_keywords(self):
- assert list(self.get_pkg({'KEYWORDS': ''}).keywords) == []
- assert sorted(self.get_pkg({'KEYWORDS': 'x86 amd64'}).keywords) == sorted(['x86', 'amd64'])
+ assert list(self.get_pkg({"KEYWORDS": ""}).keywords) == []
+ assert sorted(self.get_pkg({"KEYWORDS": "x86 amd64"}).keywords) == sorted(
+ ["x86", "amd64"]
+ )
def test_sorted_keywords(self):
- assert self.get_pkg({'KEYWORDS': ''}).sorted_keywords == ()
- assert self.get_pkg({'KEYWORDS': 'amd64 x86'}).sorted_keywords == ('amd64', 'x86')
- assert self.get_pkg({'KEYWORDS': 'x86 amd64'}).sorted_keywords == ('amd64', 'x86')
- assert (
- self.get_pkg({'KEYWORDS': '~amd64 ~amd64-fbsd ~x86'}).sorted_keywords ==
- ('~amd64', '~x86', '~amd64-fbsd'))
- assert (
- self.get_pkg({'KEYWORDS': '~amd64 ~x86 ~amd64-fbsd'}).sorted_keywords ==
- ('~amd64', '~x86', '~amd64-fbsd'))
-
- @pytest.mark.parametrize(('depset', 'attr', 'expected', 'eapi'), (
- pytest.param('dev-util/diffball || ( dev-util/foo x86? ( dev-util/bsdiff ) )', 'depend', None, '0', id='depend'),
- pytest.param('dev-util/diffball || ( dev-util/foo x86? ( dev-util/bsdiff ) )', 'rdepend', None, '0', id='rdepend'),
- pytest.param('dev-util/diffball x86? ( virtual/boo )', 'pdepend', None, '0', id='pdepend'),
- # BDEPEND in EAPI 7
- pytest.param('dev-util/diffball x86? ( virtual/boo )', 'bdepend', None, '7', id='bdepend'),
- # BDEPEND is ignored in EAPIs <= 6
- pytest.param('dev-util/diffball x86? ( virtual/boo )', 'bdepend', '', '0', id='no_bdepend'),
- # IDEPEND in EAPI 8
- pytest.param('dev-util/diffball x86? ( virtual/boo )', 'idepend', None, '8', id='idepend'),
- # IDEPEND is ignored in EAPIs <= 7
- pytest.param('dev-util/diffball x86? ( virtual/boo )', 'idepend', '', '0', id='no_idepend'),
- ))
+ assert self.get_pkg({"KEYWORDS": ""}).sorted_keywords == ()
+ assert self.get_pkg({"KEYWORDS": "amd64 x86"}).sorted_keywords == (
+ "amd64",
+ "x86",
+ )
+ assert self.get_pkg({"KEYWORDS": "x86 amd64"}).sorted_keywords == (
+ "amd64",
+ "x86",
+ )
+ assert self.get_pkg(
+ {"KEYWORDS": "~amd64 ~amd64-fbsd ~x86"}
+ ).sorted_keywords == ("~amd64", "~x86", "~amd64-fbsd")
+ assert self.get_pkg(
+ {"KEYWORDS": "~amd64 ~x86 ~amd64-fbsd"}
+ ).sorted_keywords == ("~amd64", "~x86", "~amd64-fbsd")
+
+ @pytest.mark.parametrize(
+ ("depset", "attr", "expected", "eapi"),
+ (
+ pytest.param(
+ "dev-util/diffball || ( dev-util/foo x86? ( dev-util/bsdiff ) )",
+ "depend",
+ None,
+ "0",
+ id="depend",
+ ),
+ pytest.param(
+ "dev-util/diffball || ( dev-util/foo x86? ( dev-util/bsdiff ) )",
+ "rdepend",
+ None,
+ "0",
+ id="rdepend",
+ ),
+ pytest.param(
+ "dev-util/diffball x86? ( virtual/boo )",
+ "pdepend",
+ None,
+ "0",
+ id="pdepend",
+ ),
+ # BDEPEND in EAPI 7
+ pytest.param(
+ "dev-util/diffball x86? ( virtual/boo )",
+ "bdepend",
+ None,
+ "7",
+ id="bdepend",
+ ),
+ # BDEPEND is ignored in EAPIs <= 6
+ pytest.param(
+ "dev-util/diffball x86? ( virtual/boo )",
+ "bdepend",
+ "",
+ "0",
+ id="no_bdepend",
+ ),
+ # IDEPEND in EAPI 8
+ pytest.param(
+ "dev-util/diffball x86? ( virtual/boo )",
+ "idepend",
+ None,
+ "8",
+ id="idepend",
+ ),
+ # IDEPEND is ignored in EAPIs <= 7
+ pytest.param(
+ "dev-util/diffball x86? ( virtual/boo )",
+ "idepend",
+ "",
+ "0",
+ id="no_idepend",
+ ),
+ ),
+ )
def test_check_depends(self, depset, attr, expected, eapi):
if expected is None:
expected = depset
data_name = attr.upper()
- o = self.get_pkg({data_name: depset, 'EAPI': eapi})
+ o = self.get_pkg({data_name: depset, "EAPI": eapi})
assert str(getattr(o, attr)) == expected
- o = self.get_pkg({data_name: '', 'EAPI': eapi})
- assert str(getattr(o, attr)) == ''
+ o = self.get_pkg({data_name: "", "EAPI": eapi})
+ assert str(getattr(o, attr)) == ""
if expected:
with pytest.raises(errors.MetadataException):
- getattr(self.get_pkg({data_name: '|| ( ', 'EAPI': eapi}), attr)
+ getattr(self.get_pkg({data_name: "|| ( ", "EAPI": eapi}), attr)
def test_fetchables(self):
l = []
+
def f(self, cpv, allow_missing=False):
l.append(cpv)
- return allow_missing, {'monkey.tgz': {}, 'boon.tgz': {}, 'foon.tar.gz': {}}
+ return allow_missing, {"monkey.tgz": {}, "boon.tgz": {}, "foon.tar.gz": {}}
+
repo = self.make_parent(_get_digests=f)
parent = self.make_parent(_parent_repo=repo)
# verify it does digest lookups...
- o = self.get_pkg({'SRC_URI': 'http://foo.com/bar.tgz'}, repo=parent)
+ o = self.get_pkg({"SRC_URI": "http://foo.com/bar.tgz"}, repo=parent)
with pytest.raises(errors.MetadataException):
- getattr(o, 'fetchables')
+ getattr(o, "fetchables")
assert l == [o]
# basic tests;
for x in range(0, 3):
- f = self.get_pkg({'SRC_URI': 'http://foo.com/monkey.tgz',
- 'EAPI': str(x)},
- repo=parent).fetchables
- assert list(f[0].uri) == ['http://foo.com/monkey.tgz']
- assert f[0].filename == 'monkey.tgz'
-
- f = self.get_pkg({'SRC_URI': 'http://foo.com/monkey.tgz '
- 'http://dar/boon.tgz', 'EAPI': '2'},
- repo=parent).fetchables
- assert [list(x.uri) for x in f] == [['http://foo.com/monkey.tgz'], ['http://dar/boon.tgz']]
- assert [x.filename for x in f] == ['monkey.tgz', 'boon.tgz']
-
- f = self.get_pkg({'SRC_URI': 'http://foo.com/monkey.tgz -> foon.tar.gz',
- 'EAPI': '2'},
- repo=parent).fetchables
- assert list(f[0].uri) == ['http://foo.com/monkey.tgz']
- assert f[0].filename == 'foon.tar.gz'
-
- o = self.get_pkg({'SRC_URI': 'http://foo.com/monkey.tgz -> ',
- 'EAPI': '2'}, repo=parent)
+ f = self.get_pkg(
+ {"SRC_URI": "http://foo.com/monkey.tgz", "EAPI": str(x)}, repo=parent
+ ).fetchables
+ assert list(f[0].uri) == ["http://foo.com/monkey.tgz"]
+ assert f[0].filename == "monkey.tgz"
+
+ f = self.get_pkg(
+ {
+ "SRC_URI": "http://foo.com/monkey.tgz " "http://dar/boon.tgz",
+ "EAPI": "2",
+ },
+ repo=parent,
+ ).fetchables
+ assert [list(x.uri) for x in f] == [
+ ["http://foo.com/monkey.tgz"],
+ ["http://dar/boon.tgz"],
+ ]
+ assert [x.filename for x in f] == ["monkey.tgz", "boon.tgz"]
+
+ f = self.get_pkg(
+ {"SRC_URI": "http://foo.com/monkey.tgz -> foon.tar.gz", "EAPI": "2"},
+ repo=parent,
+ ).fetchables
+ assert list(f[0].uri) == ["http://foo.com/monkey.tgz"]
+ assert f[0].filename == "foon.tar.gz"
+
+ o = self.get_pkg(
+ {"SRC_URI": "http://foo.com/monkey.tgz -> ", "EAPI": "2"}, repo=parent
+ )
with pytest.raises(errors.MetadataException):
- getattr(o, 'fetchables')
+ getattr(o, "fetchables")
# verify it collapses multiple basenames down to the same.
- f = self.get_pkg({'SRC_URI': 'http://foo.com/monkey.tgz '
- 'http://foo.com2/monkey.tgz'}, repo=parent).fetchables
- assert list(f[0].uri) == ['http://foo.com/monkey.tgz', 'http://foo.com2/monkey.tgz']
+ f = self.get_pkg(
+ {"SRC_URI": "http://foo.com/monkey.tgz " "http://foo.com2/monkey.tgz"},
+ repo=parent,
+ ).fetchables
+ assert list(f[0].uri) == [
+ "http://foo.com/monkey.tgz",
+ "http://foo.com2/monkey.tgz",
+ ]
- mirror = fetch.mirror(['http://boon.com/'], 'mirror1')
- parent = self.make_parent(_parent_repo=repo, mirrors={'mirror1': mirror})
+ mirror = fetch.mirror(["http://boon.com/"], "mirror1")
+ parent = self.make_parent(_parent_repo=repo, mirrors={"mirror1": mirror})
f = self.get_pkg(
- {'SRC_URI': 'mirror://mirror1/foon/monkey.tgz'}, repo=parent).fetchables
- assert list(f[0].uri) == ['http://boon.com/foon/monkey.tgz']
+ {"SRC_URI": "mirror://mirror1/foon/monkey.tgz"}, repo=parent
+ ).fetchables
+ assert list(f[0].uri) == ["http://boon.com/foon/monkey.tgz"]
# unknown mirrors get ignored (and flagged by pkgcheck)
- pkg = self.get_pkg({'SRC_URI': 'mirror://mirror2/foon/monkey.tgz'}, repo=parent)
+ pkg = self.get_pkg({"SRC_URI": "mirror://mirror2/foon/monkey.tgz"}, repo=parent)
assert pkg.fetchables
- assert (
- [list(x.uri) for x in self.get_pkg(
- {'EAPI': '2', 'SRC_URI': 'mirror://mirror1/monkey.tgz -> foon.tar.gz'},
- repo=parent).fetchables] ==
- [['http://boon.com/monkey.tgz']])
-
- parent = self.make_parent(_parent_repo=repo,
- mirrors={'mirror1': mirror}, default_mirrors=fetch.default_mirror(
- ['http://default.com/dist/', 'http://default2.com/'],
- 'default'))
-
- assert (
- [list(x.uri) for x in self.get_pkg(
- {'EAPI': '2', 'SRC_URI': 'mirror://mirror1/monkey.tgz -> foon.tar.gz'},
- repo=parent).fetchables] ==
- [[
- 'http://default.com/dist/foon.tar.gz',
- 'http://default2.com/foon.tar.gz',
- 'http://boon.com/monkey.tgz']])
+ assert [
+ list(x.uri)
+ for x in self.get_pkg(
+ {"EAPI": "2", "SRC_URI": "mirror://mirror1/monkey.tgz -> foon.tar.gz"},
+ repo=parent,
+ ).fetchables
+ ] == [["http://boon.com/monkey.tgz"]]
+
+ parent = self.make_parent(
+ _parent_repo=repo,
+ mirrors={"mirror1": mirror},
+ default_mirrors=fetch.default_mirror(
+ ["http://default.com/dist/", "http://default2.com/"], "default"
+ ),
+ )
+
+ assert [
+ list(x.uri)
+ for x in self.get_pkg(
+ {"EAPI": "2", "SRC_URI": "mirror://mirror1/monkey.tgz -> foon.tar.gz"},
+ repo=parent,
+ ).fetchables
+ ] == [
+ [
+ "http://default.com/dist/foon.tar.gz",
+ "http://default2.com/foon.tar.gz",
+ "http://boon.com/monkey.tgz",
+ ]
+ ]
parent = self.make_parent(_parent_repo=repo, default_mirrors=mirror)
- f = self.get_pkg({'SRC_URI': 'http://foo.com/monkey.tgz'},
- repo=parent).fetchables
- assert list(f[0].uri) == ['http://boon.com/monkey.tgz', 'http://foo.com/monkey.tgz']
+ f = self.get_pkg(
+ {"SRC_URI": "http://foo.com/monkey.tgz"}, repo=parent
+ ).fetchables
+ assert list(f[0].uri) == [
+ "http://boon.com/monkey.tgz",
+ "http://foo.com/monkey.tgz",
+ ]
# skip default mirrors
- pkg = self.get_pkg({'SRC_URI': 'http://foo.com/monkey.tgz'}, repo=parent)
+ pkg = self.get_pkg({"SRC_URI": "http://foo.com/monkey.tgz"}, repo=parent)
f = pkg.generate_fetchables(skip_default_mirrors=True)
- assert list(f[0].uri) == ['http://foo.com/monkey.tgz']
+ assert list(f[0].uri) == ["http://foo.com/monkey.tgz"]
# test primaryuri...
- mirror2 = fetch.mirror(['http://boon2.com/'], 'default')
- parent = self.make_parent(_parent_repo=repo, default_mirrors=mirror,
- mirrors={'mirror1': mirror2})
- f = self.get_pkg({'SRC_URI': 'http://foo.com/monkey.tgz '
- 'mirror://mirror1/boon.tgz', 'RESTRICT': 'primaryuri'},
- repo=parent).fetchables
- assert list(f[0].uri) == ['http://foo.com/monkey.tgz', 'http://boon.com/monkey.tgz']
- assert list(f[1].uri) == ['http://boon2.com/boon.tgz', 'http://boon.com/boon.tgz']
+ mirror2 = fetch.mirror(["http://boon2.com/"], "default")
+ parent = self.make_parent(
+ _parent_repo=repo, default_mirrors=mirror, mirrors={"mirror1": mirror2}
+ )
+ f = self.get_pkg(
+ {
+ "SRC_URI": "http://foo.com/monkey.tgz " "mirror://mirror1/boon.tgz",
+ "RESTRICT": "primaryuri",
+ },
+ repo=parent,
+ ).fetchables
+ assert list(f[0].uri) == [
+ "http://foo.com/monkey.tgz",
+ "http://boon.com/monkey.tgz",
+ ]
+ assert list(f[1].uri) == [
+ "http://boon2.com/boon.tgz",
+ "http://boon.com/boon.tgz",
+ ]
assert len(f) == 2
# restrict=mirror..
- f = self.get_pkg({'SRC_URI': 'http://foo.com/monkey.tgz',
- 'RESTRICT': 'mirror'}, repo=parent).fetchables
- assert list(f[0].uri) == ['http://foo.com/monkey.tgz']
+ f = self.get_pkg(
+ {"SRC_URI": "http://foo.com/monkey.tgz", "RESTRICT": "mirror"}, repo=parent
+ ).fetchables
+ assert list(f[0].uri) == ["http://foo.com/monkey.tgz"]
assert len(f) == 1
# test uri for when there *is* no uri
- f = self.get_pkg({'SRC_URI': 'monkey.tgz'}, repo=parent).fetchables
+ f = self.get_pkg({"SRC_URI": "monkey.tgz"}, repo=parent).fetchables
assert list(f[0].uri) == []
def test_required_use(self):
for eapi_str, eapi in EAPI.known_eapis.items():
# Check all EAPIs for REQUIRED_USE parsing, EAPIs that don't support it
# should return depsets that evaluate to False.
- pkg = self.get_pkg({'EAPI': eapi_str, 'REQUIRED_USE': 'test? ( foo )'})
- assert bool(pkg.required_use) == eapi.options.has_required_use, \
- f"failure parsing REQUIRED_USE for EAPI '{eapi}'"
+ pkg = self.get_pkg({"EAPI": eapi_str, "REQUIRED_USE": "test? ( foo )"})
+ assert (
+ bool(pkg.required_use) == eapi.options.has_required_use
+ ), f"failure parsing REQUIRED_USE for EAPI '{eapi}'"
# Render various REQUIRED_USE deps with set USE flag states and
# check for satisfiability.
if eapi.options.has_required_use:
required_use_data = (
- ('foo', '', 'foo', True),
- ('foo', '', '', False),
- ('!foo', 'foo', '', True),
- ('!foo', 'foo', 'foo', False),
- ('foo bar', '', 'foo bar', True),
- ('foo bar', '', 'bar foo', True),
- ('( foo bar )', '', 'foo', False),
- ('( foo bar )', '', 'foo bar', True),
- ('test? ( foo )', 'test', 'foo', True),
- ('test? ( foo )', 'test', '', False),
- ('test? ( !foo )', 'test', 'foo', False),
- ('test? ( !foo )', 'test', '', True),
- ('test? ( foo bar )', 'test', 'foo bar', True),
- ('!test? ( foo )', 'test', '', True),
- ('!test? ( foo )', '', 'foo', True),
- ('|| ( test foo )', 'test', 'test', True),
- ('|| ( test foo )', 'test', 'test foo', True),
- ('|| ( test foo ) bar? ( foo )', 'test', 'test', True),
- ('|| ( test foo ) bar? ( foo )', 'bar', 'foo', True),
- ('^^ ( bar foo )', '', 'bar', True),
- ('^^ ( bar foo )', '', 'foo', True),
- ('^^ ( bar foo )', '', '', False),
+ ("foo", "", "foo", True),
+ ("foo", "", "", False),
+ ("!foo", "foo", "", True),
+ ("!foo", "foo", "foo", False),
+ ("foo bar", "", "foo bar", True),
+ ("foo bar", "", "bar foo", True),
+ ("( foo bar )", "", "foo", False),
+ ("( foo bar )", "", "foo bar", True),
+ ("test? ( foo )", "test", "foo", True),
+ ("test? ( foo )", "test", "", False),
+ ("test? ( !foo )", "test", "foo", False),
+ ("test? ( !foo )", "test", "", True),
+ ("test? ( foo bar )", "test", "foo bar", True),
+ ("!test? ( foo )", "test", "", True),
+ ("!test? ( foo )", "", "foo", True),
+ ("|| ( test foo )", "test", "test", True),
+ ("|| ( test foo )", "test", "test foo", True),
+ ("|| ( test foo ) bar? ( foo )", "test", "test", True),
+ ("|| ( test foo ) bar? ( foo )", "bar", "foo", True),
+ ("^^ ( bar foo )", "", "bar", True),
+ ("^^ ( bar foo )", "", "foo", True),
+ ("^^ ( bar foo )", "", "", False),
)
if eapi.options.required_use_one_of:
required_use_data += (
- ('?? ( bar foo )', '', 'bar', True),
- ('?? ( bar foo )', '', 'foo', True),
- ('?? ( bar foo )', '', '', True),
+ ("?? ( bar foo )", "", "bar", True),
+ ("?? ( bar foo )", "", "foo", True),
+ ("?? ( bar foo )", "", "", True),
)
else:
# EAPIs that don't support the ?? operator raise metadata exceptions if used.
- pkg = self.get_pkg({'EAPI': eapi_str, 'REQUIRED_USE': '?? ( bar foo )'})
+ pkg = self.get_pkg(
+ {"EAPI": eapi_str, "REQUIRED_USE": "?? ( bar foo )"}
+ )
with pytest.raises(errors.MetadataException) as cm:
- getattr(pkg, 'required_use')
- assert f"EAPI '{eapi_str}' doesn't support '??' operator" in cm.value.error
+ getattr(pkg, "required_use")
+ assert (
+ f"EAPI '{eapi_str}' doesn't support '??' operator"
+ in cm.value.error
+ )
for required_use, iuse, use, satisfied in required_use_data:
- pkg = self.get_pkg({'EAPI': eapi_str, 'REQUIRED_USE': required_use})
+ pkg = self.get_pkg({"EAPI": eapi_str, "REQUIRED_USE": required_use})
required_use_deps = pkg.required_use.evaluate_depset(iuse.split())
for node in required_use_deps:
- assert node.match(use.split()) is satisfied, \
- f'REQUIRED_USE="{required_use}", IUSE="{iuse}", ' \
+ assert node.match(use.split()) is satisfied, (
+ f'REQUIRED_USE="{required_use}", IUSE="{iuse}", '
f'USE="{use}", satisfied="{satisfied}"'
+ )
def test_live(self):
o = self.get_pkg({})
assert not o.live
- o = self.get_pkg({'PROPERTIES': 'live'})
+ o = self.get_pkg({"PROPERTIES": "live"})
assert o.live
@@ -469,6 +591,7 @@ class TestPackage(TestBase):
def test_mtime_(self):
l = []
+
def f(self, cpv):
l.append(cpv)
return 100
@@ -479,13 +602,12 @@ class TestPackage(TestBase):
assert l == [o]
def make_shared_pkg_data(self, manifest=None, metadata_xml=None):
- return self.get_pkg(
- pre_args=(repo_objs.SharedPkgData(metadata_xml, manifest),))
+ return self.get_pkg(pre_args=(repo_objs.SharedPkgData(metadata_xml, manifest),))
@pytest.mark.parametrize("attr", ("longdescription", "maintainers"))
def test_metadata_xml(self, attr):
m = repo_objs.MetadataXml(None)
- object.__setattr__(m, "_"+attr, "foon")
+ object.__setattr__(m, "_" + attr, "foon")
object.__setattr__(m, "_source", None)
o = self.make_shared_pkg_data(metadata_xml=m)
assert getattr(o, attr) == "foon"
@@ -500,23 +622,30 @@ class TestPackageFactory:
kls = ebuild_src.package_factory
- def mkinst(self, repo=None, cache=(), eclasses=None, mirrors={},
- default_mirrors={}, **overrides):
+ def mkinst(
+ self,
+ repo=None,
+ cache=(),
+ eclasses=None,
+ mirrors={},
+ default_mirrors={},
+ **overrides,
+ ):
o = self.kls(repo, cache, eclasses, mirrors, default_mirrors)
for k, v in overrides.items():
object.__setattr__(o, k, v)
return o
def test_mirrors(self):
- mirrors_d = {'gentoo': ['http://bar/', 'http://far/']}
+ mirrors_d = {"gentoo": ["http://bar/", "http://far/"]}
mirrors = {k: fetch.mirror(v, k) for k, v in mirrors_d.items()}
pf = self.mkinst(mirrors=mirrors_d)
assert len(pf._cache) == 0
assert sorted(pf.mirrors) == sorted(mirrors)
- assert pf.mirrors['gentoo'] == mirrors['gentoo']
+ assert pf.mirrors["gentoo"] == mirrors["gentoo"]
assert pf.default_mirrors == None
- def_mirrors = ['http://def1/', 'http://blah1/']
+ def_mirrors = ["http://def1/", "http://blah1/"]
pf = self.mkinst(default_mirrors=def_mirrors)
assert pf.mirrors == {}
assert list(pf.default_mirrors) == def_mirrors
@@ -524,42 +653,53 @@ class TestPackageFactory:
def test_get_ebuild_src(self):
assert (
self.mkinst(
- repo=SimpleNamespace(_get_ebuild_src=lambda s: f"lincoln haunts me: {s}")
- ).get_ebuild_src("1") ==
- "lincoln haunts me: 1")
+ repo=SimpleNamespace(
+ _get_ebuild_src=lambda s: f"lincoln haunts me: {s}"
+ )
+ ).get_ebuild_src("1")
+ == "lincoln haunts me: 1"
+ )
def test_get_ebuild_mtime(self, tmpdir):
f = pjoin(str(tmpdir), "temp-0.ebuild")
- open(f, 'w').close()
+ open(f, "w").close()
mtime = self.mkinst(
- repo=SimpleNamespace(_get_ebuild_path=lambda s: f))._get_ebuild_mtime(None)
+ repo=SimpleNamespace(_get_ebuild_path=lambda s: f)
+ )._get_ebuild_mtime(None)
assert mtime == os.stat(f).st_mtime
def test_get_metadata(self):
- ec = FakeEclassCache('/nonexistent/path')
- pkg = SimpleNamespace(_mtime_=100, cpvstr='dev-util/diffball-0.71', path='bollocks')
+ ec = FakeEclassCache("/nonexistent/path")
+ pkg = SimpleNamespace(
+ _mtime_=100, cpvstr="dev-util/diffball-0.71", path="bollocks"
+ )
class fake_cache(dict):
readonly = False
validate_result = False
+
def validate_entry(self, *args):
return self.validate_result
- cache1 = fake_cache({pkg.cpvstr: {'_mtime_': 100, 'marker': 1}})
+ cache1 = fake_cache({pkg.cpvstr: {"_mtime_": 100, "marker": 1}})
cache2 = fake_cache({})
- class explode_kls(AssertionError): pass
+ class explode_kls(AssertionError):
+ pass
def explode(name, *args, **kwargs):
raise explode_kls(
- f"{name} was called with {args!r} and {kwargs!r}, shouldn't be invoked.")
+ f"{name} was called with {args!r} and {kwargs!r}, shouldn't be invoked."
+ )
pf = self.mkinst(
- cache=(cache2, cache1), eclasses=ec,
- _update_metadata=partial(explode, '_update_metadata'))
+ cache=(cache2, cache1),
+ eclasses=ec,
+ _update_metadata=partial(explode, "_update_metadata"),
+ )
cache1.validate_result = True
- assert pf._get_metadata(pkg) == {'marker': 1, '_mtime_': 100}
+ assert pf._get_metadata(pkg) == {"marker": 1, "_mtime_": 100}
assert list(cache1.keys()) == [pkg.cpvstr]
assert not cache2
@@ -574,14 +714,23 @@ class TestPackageFactory:
# Note that this is known crap eclass data; partially lazyness, partially
# to validate the eclass validation is left to ec cache only.
- cache2.update({pkg.cpvstr:
- {'_mtime_': 200, '_eclasses_': {'eclass1': (None, 100)}, 'marker': 2}
- })
+ cache2.update(
+ {
+ pkg.cpvstr: {
+ "_mtime_": 200,
+ "_eclasses_": {"eclass1": (None, 100)},
+ "marker": 2,
+ }
+ }
+ )
cache2.readonly = True
with pytest.raises(explode_kls):
pf._get_metadata(pkg)
assert list(cache2.keys()) == [pkg.cpvstr]
# keep in mind the backend assumes it gets its own copy of the data.
# thus, modifying (popping _mtime_) _is_ valid
- assert cache2[pkg.cpvstr] == \
- {'_eclasses_': {'eclass1': (None, 100)}, 'marker': 2, '_mtime_': 200}
+ assert cache2[pkg.cpvstr] == {
+ "_eclasses_": {"eclass1": (None, 100)},
+ "marker": 2,
+ "_mtime_": 200,
+ }
diff --git a/tests/ebuild/test_eclass.py b/tests/ebuild/test_eclass.py
index 52890aac0..56bf2ec39 100644
--- a/tests/ebuild/test_eclass.py
+++ b/tests/ebuild/test_eclass.py
@@ -5,14 +5,15 @@ from snakeoil.contexts import chdir
class FakeEclass:
def __init__(self, path, contents):
self.path = path
- with open(path, 'w') as f:
+ with open(path, "w") as f:
f.write(contents)
class FakeEclassCache:
def __init__(self, temp_dir, eclasses):
- self.eclasses = dict((name, FakeEclass(name, contents))
- for name, contents in eclasses.items())
+ self.eclasses = dict(
+ (name, FakeEclass(name, contents)) for name, contents in eclasses.items()
+ )
def get_eclass(self, name):
return self.eclasses.get(name)
@@ -24,13 +25,13 @@ class FakeEclassRepo:
def make_eclass(name, provides=None):
- eclass = f'# @ECLASS: {name}.eclass\n'
+ eclass = f"# @ECLASS: {name}.eclass\n"
if provides is not None:
- eclass += f'# @PROVIDES: {provides}\n'
+ eclass += f"# @PROVIDES: {provides}\n"
return eclass
-FOO_ECLASS = '''
+FOO_ECLASS = """
# @ECLASS: foo.eclass
# @MAINTAINER:
# Random Person <maintainer@random.email>
@@ -106,115 +107,148 @@ _foo_internal_func() { :; }
# @DESCRIPTION:
# Public stub function.
foo_public_func() { :; }
-'''
+"""
class TestEclassDoc:
def test_foo_eclass(self, tmp_path):
- (tmp_path / 'foo.eclass').write_text(FOO_ECLASS)
- doc = eclass.EclassDoc(str(tmp_path / 'foo.eclass'))
- assert doc.name == 'foo.eclass'
- assert doc.vcsurl == 'https://example.com/foo.eclass'
- assert doc.blurb == 'Test eclass.'
- assert doc.deprecated == 'bar or frobnicate'
- assert doc.raw_provides == ('bar',)
- assert doc.maintainers == ('Random Person <maintainer@random.email>',)
- assert doc.authors == ('Another Person <another@random.email>',
- 'Random Person <maintainer@random.email>',)
- assert doc.bugreports == '::\n\n Report bugs somewhere.'
- assert doc.description == ('::\n\n'
- ' Yadda yadda yadda.\n'
- ' Lots to say here.\n\n'
- ' Really, very interesting eclass.\n\n'
- 'How to use it\n'
- '~~~~~~~~~~~~~\n'
- '::\n\n'
- ' Somehow.')
- assert doc.example == ('::\n\n'
- ' inherit foo\n\n'
- ' src_prepare() {\n'
- ' foo_public_func\n'
- ' }')
+ (tmp_path / "foo.eclass").write_text(FOO_ECLASS)
+ doc = eclass.EclassDoc(str(tmp_path / "foo.eclass"))
+ assert doc.name == "foo.eclass"
+ assert doc.vcsurl == "https://example.com/foo.eclass"
+ assert doc.blurb == "Test eclass."
+ assert doc.deprecated == "bar or frobnicate"
+ assert doc.raw_provides == ("bar",)
+ assert doc.maintainers == ("Random Person <maintainer@random.email>",)
+ assert doc.authors == (
+ "Another Person <another@random.email>",
+ "Random Person <maintainer@random.email>",
+ )
+ assert doc.bugreports == "::\n\n Report bugs somewhere."
+ assert doc.description == (
+ "::\n\n"
+ " Yadda yadda yadda.\n"
+ " Lots to say here.\n\n"
+ " Really, very interesting eclass.\n\n"
+ "How to use it\n"
+ "~~~~~~~~~~~~~\n"
+ "::\n\n"
+ " Somehow."
+ )
+ assert doc.example == (
+ "::\n\n"
+ " inherit foo\n\n"
+ " src_prepare() {\n"
+ " foo_public_func\n"
+ " }"
+ )
assert doc.supported_eapis == frozenset(map(str, range(8)))
- assert doc.function_names == frozenset(('_foo_internal_func', 'foo_public_func'))
- assert doc.internal_function_names == frozenset(('_foo_internal_func',))
+ assert doc.function_names == frozenset(
+ ("_foo_internal_func", "foo_public_func")
+ )
+ assert doc.internal_function_names == frozenset(("_foo_internal_func",))
- assert doc.function_variable_names == frozenset(('FOO_PUBLIC_VAR', '_FOO_INTERNAL_VAR'))
+ assert doc.function_variable_names == frozenset(
+ ("FOO_PUBLIC_VAR", "_FOO_INTERNAL_VAR")
+ )
- assert doc.variable_names == frozenset(('FOO_PUBLIC_ECLASS_VAR',
- '_FOO_INTERNAL_ECLASS_VAR',
- 'FOO_ANOTHER_ECLASS_VAR'))
- assert doc.internal_variable_names == frozenset(('_FOO_INTERNAL_ECLASS_VAR',))
+ assert doc.variable_names == frozenset(
+ (
+ "FOO_PUBLIC_ECLASS_VAR",
+ "_FOO_INTERNAL_ECLASS_VAR",
+ "FOO_ANOTHER_ECLASS_VAR",
+ )
+ )
+ assert doc.internal_variable_names == frozenset(("_FOO_INTERNAL_ECLASS_VAR",))
assert len(doc.functions) == 2
- assert doc.functions[0] == {'name': '_foo_internal_func',
- 'returns': 'nothing special',
- 'deprecated': False,
- 'internal': True,
- 'maintainers': ('Some Person <someone@random.email>',),
- 'description': '::\n\n Internal stub function.',
- 'usage': '<bar> [<baz>]'}
-
- assert doc.functions[1] == {'name': 'foo_public_func',
- 'returns': None,
- 'deprecated': 'bar_public_func',
- 'internal': False,
- 'maintainers': None,
- 'description': '::\n\n Public stub function.',
- 'usage': None}
+ assert doc.functions[0] == {
+ "name": "_foo_internal_func",
+ "returns": "nothing special",
+ "deprecated": False,
+ "internal": True,
+ "maintainers": ("Some Person <someone@random.email>",),
+ "description": "::\n\n Internal stub function.",
+ "usage": "<bar> [<baz>]",
+ }
+
+ assert doc.functions[1] == {
+ "name": "foo_public_func",
+ "returns": None,
+ "deprecated": "bar_public_func",
+ "internal": False,
+ "maintainers": None,
+ "description": "::\n\n Public stub function.",
+ "usage": None,
+ }
assert len(doc.function_variables) == 2
- assert doc.function_variables[0] == {'name': '_FOO_INTERNAL_VAR',
- 'deprecated': False,
- 'default_unset': True,
- 'internal': True,
- 'required': False,
- 'description': '::\n\n Internal variable for foo_public_func.'}
- assert doc.function_variables[1] == {'name': 'FOO_PUBLIC_VAR',
- 'deprecated': 'BAR_PUBLIC_VAR',
- 'default_unset': False,
- 'internal': False,
- 'required': True,
- 'description': '::\n\n Public variable for foo_public_func.'}
+ assert doc.function_variables[0] == {
+ "name": "_FOO_INTERNAL_VAR",
+ "deprecated": False,
+ "default_unset": True,
+ "internal": True,
+ "required": False,
+ "description": "::\n\n Internal variable for foo_public_func.",
+ }
+ assert doc.function_variables[1] == {
+ "name": "FOO_PUBLIC_VAR",
+ "deprecated": "BAR_PUBLIC_VAR",
+ "default_unset": False,
+ "internal": False,
+ "required": True,
+ "description": "::\n\n Public variable for foo_public_func.",
+ }
assert len(doc.variables) == 3
- assert doc.variables[0] == {'name': '_FOO_INTERNAL_ECLASS_VAR',
- 'deprecated': False,
- 'default_unset': True,
- 'internal': True,
- 'required': False,
- 'pre_inherit': False,
- 'user_variable': False,
- 'output_variable': False,
- 'description': '::\n\n Internal variable.'}
- assert doc.variables[1] == {'name': 'FOO_PUBLIC_ECLASS_VAR',
- 'deprecated': 'BAR_PUBLIC_ECLASS_VAR',
- 'default_unset': False,
- 'internal': False,
- 'required': True,
- 'pre_inherit': True,
- 'user_variable': False,
- 'output_variable': False,
- 'description': '::\n\n Public variable.'}
- assert doc.variables[2] == {'name': 'FOO_ANOTHER_ECLASS_VAR',
- 'deprecated': False,
- 'default_unset': False,
- 'internal': False,
- 'required': False,
- 'pre_inherit': False,
- 'user_variable': False,
- 'output_variable': False,
- 'description': '::\n\n Yet another variable.'}
+ assert doc.variables[0] == {
+ "name": "_FOO_INTERNAL_ECLASS_VAR",
+ "deprecated": False,
+ "default_unset": True,
+ "internal": True,
+ "required": False,
+ "pre_inherit": False,
+ "user_variable": False,
+ "output_variable": False,
+ "description": "::\n\n Internal variable.",
+ }
+ assert doc.variables[1] == {
+ "name": "FOO_PUBLIC_ECLASS_VAR",
+ "deprecated": "BAR_PUBLIC_ECLASS_VAR",
+ "default_unset": False,
+ "internal": False,
+ "required": True,
+ "pre_inherit": True,
+ "user_variable": False,
+ "output_variable": False,
+ "description": "::\n\n Public variable.",
+ }
+ assert doc.variables[2] == {
+ "name": "FOO_ANOTHER_ECLASS_VAR",
+ "deprecated": False,
+ "default_unset": False,
+ "internal": False,
+ "required": False,
+ "pre_inherit": False,
+ "user_variable": False,
+ "output_variable": False,
+ "description": "::\n\n Yet another variable.",
+ }
def test_recursive_provides(self, tmp_path):
with chdir(tmp_path):
- repo = FakeEclassRepo(str(tmp_path), {
- 'foo': FOO_ECLASS,
- 'bar': make_eclass('bar', provides='deep1 deep2'),
- 'deep1': make_eclass('deep1 deep2'),
- 'deep2': make_eclass('deep2 foo'),
- })
- assert (sorted(eclass.EclassDoc(repo.eclass_cache.get_eclass('foo').path,
- repo=repo).provides) ==
- ['bar', 'deep1', 'deep2'])
+ repo = FakeEclassRepo(
+ str(tmp_path),
+ {
+ "foo": FOO_ECLASS,
+ "bar": make_eclass("bar", provides="deep1 deep2"),
+ "deep1": make_eclass("deep1 deep2"),
+ "deep2": make_eclass("deep2 foo"),
+ },
+ )
+ assert sorted(
+ eclass.EclassDoc(
+ repo.eclass_cache.get_eclass("foo").path, repo=repo
+ ).provides
+ ) == ["bar", "deep1", "deep2"]
diff --git a/tests/ebuild/test_eclass_cache.py b/tests/ebuild/test_eclass_cache.py
index 279afbe82..813d8944e 100644
--- a/tests/ebuild/test_eclass_cache.py
+++ b/tests/ebuild/test_eclass_cache.py
@@ -9,29 +9,31 @@ from snakeoil.osutils import pjoin
class FakeEclassCache(eclass_cache.base):
-
def __init__(self, path):
eclass_cache.base.__init__(self, location=path, eclassdir=path)
self.eclasses = {
- "eclass1":LazilyHashedPath(path, mtime=100),
- "eclass2":LazilyHashedPath(path, mtime=200)}
+ "eclass1": LazilyHashedPath(path, mtime=100),
+ "eclass2": LazilyHashedPath(path, mtime=200),
+ }
class TestBase:
-
@pytest.fixture(autouse=True)
def _setup(self):
- path = '/nonexistent/path/'
+ path = "/nonexistent/path/"
self.ec = FakeEclassCache(path)
self.ec_locs = {"eclass1": path, "eclass2": path}
- @pytest.mark.parametrize(('result', 'ec', 'mtime'), (
- (False, 'eclass3', 100),
- (True, 'eclass1', 100),
- (False, 'eclass1', 200),
- ))
+ @pytest.mark.parametrize(
+ ("result", "ec", "mtime"),
+ (
+ (False, "eclass3", 100),
+ (True, "eclass1", 100),
+ (False, "eclass1", 200),
+ ),
+ )
def test_rebuild_eclass_entry(self, result, ec, mtime):
- data = [(ec, [('mtime', mtime)])]
+ data = [(ec, [("mtime", mtime)])]
got = self.ec.rebuild_cache_entry(data)
assert bool(got) == result
@@ -43,11 +45,10 @@ class TestBase:
assert data is self.ec.get_eclass_data(keys)
assert set(keys) == set(data)
data = self.ec.get_eclass_data(["eclass1"])
- assert data == {'eclass1': self.ec.eclasses['eclass1']}
+ assert data == {"eclass1": self.ec.eclasses["eclass1"]}
class TestEclassCache(TestBase):
-
@pytest.fixture(autouse=True)
def _setup(self, tmp_path):
for x, mtime in (("eclass1", 100), ("eclass2", 200)):
@@ -70,19 +71,18 @@ class TestEclassCache(TestBase):
class TestStackedCaches(TestEclassCache):
-
@pytest.fixture(autouse=True)
def _setup(self, tmp_path):
(loc1 := tmp_path / "stack1").mkdir()
- (loc1 / 'eclass1.eclass').touch()
- os.utime(loc1 / 'eclass1.eclass', (100, 100))
+ (loc1 / "eclass1.eclass").touch()
+ os.utime(loc1 / "eclass1.eclass", (100, 100))
ec1 = eclass_cache.cache(str(loc1))
(loc2 := tmp_path / "stack2").mkdir()
- (loc2 / 'eclass2.eclass').touch()
- os.utime(loc2 / 'eclass2.eclass', (100, 100))
+ (loc2 / "eclass2.eclass").touch()
+ os.utime(loc2 / "eclass2.eclass", (100, 100))
ec2 = eclass_cache.cache(str(loc2))
self.ec = eclass_cache.StackedCaches([ec1, ec2])
self.ec_locs = {"eclass1": str(loc1), "eclass2": str(loc2)}
# make a shadowed file to verify it's not seen
- (loc2 / 'eclass1.eclass').touch()
+ (loc2 / "eclass1.eclass").touch()
diff --git a/tests/ebuild/test_filter_env.py b/tests/ebuild/test_filter_env.py
index 89d91f5c2..c79223e72 100644
--- a/tests/ebuild/test_filter_env.py
+++ b/tests/ebuild/test_filter_env.py
@@ -6,26 +6,41 @@ from pkgcore.ebuild.filter_env import main_run
class TestFilterEnv:
-
- def get_output(self, raw_data, funcs=None, vars=None, preserve_funcs=False,
- preserve_vars=False, debug=False, global_envvar_callback=None):
+ def get_output(
+ self,
+ raw_data,
+ funcs=None,
+ vars=None,
+ preserve_funcs=False,
+ preserve_vars=False,
+ debug=False,
+ global_envvar_callback=None,
+ ):
out = io.BytesIO()
if funcs:
- funcs = funcs.split(',')
+ funcs = funcs.split(",")
if vars:
- vars = vars.split(',')
- main_run(out, raw_data, vars, funcs, preserve_vars, preserve_funcs,
- global_envvar_callback=global_envvar_callback)
- return out.getvalue().decode('utf-8')
+ vars = vars.split(",")
+ main_run(
+ out,
+ raw_data,
+ vars,
+ funcs,
+ preserve_vars,
+ preserve_funcs,
+ global_envvar_callback=global_envvar_callback,
+ )
+ return out.getvalue().decode("utf-8")
def test_function_foo(self):
- ret = ''.join(self.get_output("function foo() {:;}", funcs="foo"))
- assert ret == ''
- ret = ''.join(self.get_output("functionfoo() {:;}", funcs="foo"))
- assert ret == 'functionfoo() {:;}'
+ ret = "".join(self.get_output("function foo() {:;}", funcs="foo"))
+ assert ret == ""
+ ret = "".join(self.get_output("functionfoo() {:;}", funcs="foo"))
+ assert ret == "functionfoo() {:;}"
def test_simple(self):
- data = textwrap.dedent("""\
+ data = textwrap.dedent(
+ """\
foo() {
:
}
@@ -33,39 +48,43 @@ class TestFilterEnv:
bar() {
:
}
- """)
- ret = ''.join(self.get_output(data))
- assert 'foo' in ret
- assert 'bar' in ret
- ret = ''.join(self.get_output(data, funcs='foo'))
- assert 'foo' not in ret
- assert 'bar' in ret
- ret = ''.join(self.get_output(data, funcs='bar'))
- assert 'foo' in ret
- assert 'bar' not in ret
- ret = ''.join(self.get_output(data, funcs='bar,foo'))
- assert 'foo' not in ret
- assert 'bar' not in ret
+ """
+ )
+ ret = "".join(self.get_output(data))
+ assert "foo" in ret
+ assert "bar" in ret
+ ret = "".join(self.get_output(data, funcs="foo"))
+ assert "foo" not in ret
+ assert "bar" in ret
+ ret = "".join(self.get_output(data, funcs="bar"))
+ assert "foo" in ret
+ assert "bar" not in ret
+ ret = "".join(self.get_output(data, funcs="bar,foo"))
+ assert "foo" not in ret
+ assert "bar" not in ret
def test1(self):
- data = textwrap.dedent("""\
+ data = textwrap.dedent(
+ """\
MODULE_NAMES=${MODULE_NAMES//${i}(*};
tc-arch ()
{
tc-ninja_magic_to_arch portage $@
}
- """)
- ret = ''.join(self.get_output(data, vars='MODULE_NAMES'))
- assert 'MODULE_NAMES' not in ret
- assert 'tc-arch' in ret
+ """
+ )
+ ret = "".join(self.get_output(data, vars="MODULE_NAMES"))
+ assert "MODULE_NAMES" not in ret
+ assert "tc-arch" in ret
def test_comments(self):
data = "dar=${yar##.%}\nfoo() {\n:\n}\n"
- ret = ''.join(self.get_output(data, vars='dar'))
- assert 'dar' not in ret
- assert 'foo' in ret
+ ret = "".join(self.get_output(data, vars="dar"))
+ assert "dar" not in ret
+ assert "foo" in ret
- data = textwrap.dedent("""\
+ data = textwrap.dedent(
+ """\
src_unpack() {
use idn && {
# BIND 9.4.0 doesn't have this patch
@@ -76,12 +95,14 @@ class TestFilterEnv:
src_compile() {
:
}
- """)
- assert 'src_unpack' in ''.join(self.get_output(data, funcs='src_compile'))
- ret = ''.join(self.get_output(data, funcs='src_unpack'))
- assert 'src_compile' in ret
- assert 'src_unpack' not in ret
- data = textwrap.dedent("""\
+ """
+ )
+ assert "src_unpack" in "".join(self.get_output(data, funcs="src_compile"))
+ ret = "".join(self.get_output(data, funcs="src_unpack"))
+ assert "src_compile" in ret
+ assert "src_unpack" not in ret
+ data = textwrap.dedent(
+ """\
src_install() {
local -f ${f##*=}
}
@@ -89,19 +110,25 @@ class TestFilterEnv:
pkg_postinst() {
:
}
- """)
- assert 'pkg_postinst' not in ''.join(self.get_output(data, funcs='pkg_postinst'))
- data = textwrap.dedent("""\
+ """
+ )
+ assert "pkg_postinst" not in "".join(
+ self.get_output(data, funcs="pkg_postinst")
+ )
+ data = textwrap.dedent(
+ """\
src_unpack() {
fnames=$(scanelf -pyqs__uClibc_start_main -F%F#s)
}
src_compile() {
:
}
- """)
- assert 'src_compile' in ''.join(self.get_output(data, funcs='src_unpack'))
+ """
+ )
+ assert "src_compile" in "".join(self.get_output(data, funcs="src_unpack"))
- data = textwrap.dedent("""\
+ data = textwrap.dedent(
+ """\
findtclver() {
[ "$(#i)" = "3" ]
}
@@ -109,11 +136,13 @@ class TestFilterEnv:
pkg_setup() {
:
}
- """)
- assert 'pkg_setup' in ''.join(self.get_output(data, funcs='findtclver'))
+ """
+ )
+ assert "pkg_setup" in "".join(self.get_output(data, funcs="findtclver"))
def test_here(self):
- data = textwrap.dedent("""\
+ data = textwrap.dedent(
+ """\
src_install() {
cat >${D}/etc/modules.d/davfs2 <<EOF
alias char-major-67 coda
@@ -124,10 +153,12 @@ class TestFilterEnv:
pkg_setup() {
:
}
- """)
- assert 'pkg_setup' not in ''.join(self.get_output(data, funcs='pkg_setup'))
+ """
+ )
+ assert "pkg_setup" not in "".join(self.get_output(data, funcs="pkg_setup"))
- data = textwrap.dedent("""\
+ data = textwrap.dedent(
+ """\
pkg_setup() {
while read line; do elog "${line}"; done <<EOF
The default behaviour of tcsh has significantly changed starting from
@@ -147,11 +178,13 @@ class TestFilterEnv:
pkg_foo() {
:
}
- """)
- assert 'pkg_foo' not in ''.join(self.get_output(data, funcs='pkg_foo'))
+ """
+ )
+ assert "pkg_foo" not in "".join(self.get_output(data, funcs="pkg_foo"))
def test_vars(self):
- data = textwrap.dedent("""\
+ data = textwrap.dedent(
+ """\
f() {
x=$y
}
@@ -159,10 +192,12 @@ class TestFilterEnv:
z() {
:
}
- """)
- assert 'z' in ''.join(self.get_output(data, funcs='f'))
+ """
+ )
+ assert "z" in "".join(self.get_output(data, funcs="f"))
- data = textwrap.dedent("""\
+ data = textwrap.dedent(
+ """\
f() {
x="${y}"
}
@@ -170,10 +205,12 @@ class TestFilterEnv:
z() {
:
}
- """)
- assert 'z' in ''.join(self.get_output(data, funcs='f'))
+ """
+ )
+ assert "z" in "".join(self.get_output(data, funcs="f"))
- data = textwrap.dedent("""\
+ data = textwrap.dedent(
+ """\
src_compile() {
$(ABI=foo get_libdir)
}
@@ -181,11 +218,13 @@ class TestFilterEnv:
pkg_setup() {
:
}
- """)
- assert 'pkg_setup' in ''.join(self.get_output(data, funcs='src_compile'))
+ """
+ )
+ assert "pkg_setup" in "".join(self.get_output(data, funcs="src_compile"))
def test_quoting(self):
- data = textwrap.dedent("""\
+ data = textwrap.dedent(
+ """\
pkg_postinst() {
einfo " /bin/ls ${ROOT}etc/init.d/net.* | grep -v '/net.lo$' | xargs -n1 ln -sfvn net.lo"
}
@@ -193,10 +232,12 @@ class TestFilterEnv:
pkg_setup() {
:
}
- """)
- assert 'pkg_setup' in ''.join(self.get_output(data, funcs='pkg_postinst'))
+ """
+ )
+ assert "pkg_setup" in "".join(self.get_output(data, funcs="pkg_postinst"))
- data = textwrap.dedent("""\
+ data = textwrap.dedent(
+ """\
src_unpack() {
testExp=$'\177\105\114\106\001\001\001'
}
@@ -204,20 +245,24 @@ class TestFilterEnv:
src_install() {
:
}
- """)
- assert 'src_install' in ''.join(self.get_output(data, funcs='src_unpack'))
+ """
+ )
+ assert "src_install" in "".join(self.get_output(data, funcs="src_unpack"))
def test_arg_awareness(self):
data = "f() {\n x \\{}\n}\n"
- assert '}' not in ''.join(self.get_output(data, 'f'))
-
- @pytest.mark.parametrize(("data", "var_list"), (
- ("f(){\nX=dar\n}", set()),
- ("f(){\nX=dar\n}\nY=a", {'Y'}),
- ("f(){\nX=dar\n}\nmy command\nY=a\nf=$(dar)", {'Y', 'f'}),
- ("f(){\nX=dar\n}\nmy command\nY=a\nf=$(dar) foon\n", {'Y', 'f'}),
- ("f(){\nX=dar foon\n}\nY=dar\nf2(){Z=dar;}\n", {'Y'})
- ))
+ assert "}" not in "".join(self.get_output(data, "f"))
+
+ @pytest.mark.parametrize(
+ ("data", "var_list"),
+ (
+ ("f(){\nX=dar\n}", set()),
+ ("f(){\nX=dar\n}\nY=a", {"Y"}),
+ ("f(){\nX=dar\n}\nmy command\nY=a\nf=$(dar)", {"Y", "f"}),
+ ("f(){\nX=dar\n}\nmy command\nY=a\nf=$(dar) foon\n", {"Y", "f"}),
+ ("f(){\nX=dar foon\n}\nY=dar\nf2(){Z=dar;}\n", {"Y"}),
+ ),
+ )
def test_print_vars(self, data, var_list):
l = set()
self.get_output(data, global_envvar_callback=l.add)
diff --git a/tests/ebuild/test_formatter.py b/tests/ebuild/test_formatter.py
index ec897b2ce..4f5cf424a 100644
--- a/tests/ebuild/test_formatter.py
+++ b/tests/ebuild/test_formatter.py
@@ -11,7 +11,10 @@ class FakeMutatedPkg(FakePkg):
def __str__(self):
# Yes this should be less hackish (and hardcoded values suck),
# but we can't really subclass MutatedPkg so this will have to do
- return f"MutatedPkg(built ebuild: {self.cpvstr}, overrides=('depend', 'rdepend'))"
+ return (
+ f"MutatedPkg(built ebuild: {self.cpvstr}, overrides=('depend', 'rdepend'))"
+ )
+
class FakeEbuildSrc(FakePkg):
def __str__(self):
@@ -21,17 +24,18 @@ class FakeEbuildSrc(FakePkg):
class FakeOp:
- def __init__(self, package, oldpackage=None, desc='add'):
+ def __init__(self, package, oldpackage=None, desc="add"):
self.pkg = package
if oldpackage:
self.old_pkg = oldpackage
- self.desc = 'replace'
+ self.desc = "replace"
else:
self.desc = desc
+
class BaseFormatterTest:
prefix = ()
- suffix = ('\n',)
+ suffix = ("\n",)
def setup_method(self):
self.fakeout = FakeStreamFormatter()
@@ -46,12 +50,15 @@ class BaseFormatterTest:
try:
ret = self.formatterClass.format(internal_self, *args, **kwds)
except Exception as exc:
- assert autoline == self.fakeout.autoline, \
- f"exception thrown {exc}, autoline was {autoline}, now is {self.fakeout.autoline}"
+ assert (
+ autoline == self.fakeout.autoline
+ ), f"exception thrown {exc}, autoline was {autoline}, now is {self.fakeout.autoline}"
raise
- assert autoline == self.fakeout.autoline, \
- f"autoline was {autoline}, now is {self.fakeout.autoline}"
+ assert (
+ autoline == self.fakeout.autoline
+ ), f"autoline was {autoline}, now is {self.fakeout.autoline}"
return ret
+
return state_verifying_class
def newFormatter(self, **kwargs):
@@ -89,17 +96,17 @@ class BaseFormatterTest:
for arg in args:
if isinstance(arg, str):
- strings.append(arg.encode('utf-8'))
+ strings.append(arg.encode("utf-8"))
elif isinstance(arg, bytes):
strings.append(arg)
else:
- objects.append(b''.join(strings))
+ objects.append(b"".join(strings))
strings = []
objects.append(arg)
- objects.append(b''.join(strings))
+ objects.append(b"".join(strings))
# Hack because a list with an empty string in is True
- if objects == [b'']:
+ if objects == [b""]:
objects = []
assert self.fakeout.stream == objects
@@ -107,7 +114,7 @@ class BaseFormatterTest:
def test_end(self):
# Sub-classes should override this if they print something in end()
- self.formatter.format(FakeOp(FakeMutatedPkg('dev-util/diffball-1.1')))
+ self.formatter.format(FakeOp(FakeMutatedPkg("dev-util/diffball-1.1")))
self.fakeout.resetstream()
self.formatter.end()
self.assertOut(suffix=())
@@ -119,14 +126,17 @@ class TestBasicFormatter(BaseFormatterTest):
def test_install(self):
# Make sure we ignore versions...
- self.formatter.format(FakeOp(FakeMutatedPkg('dev-util/diffball-1.1')))
- self.assertOut('dev-util/diffball')
+ self.formatter.format(FakeOp(FakeMutatedPkg("dev-util/diffball-1.1")))
+ self.assertOut("dev-util/diffball")
def test_reinstall(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.4'),
- FakeMutatedPkg('app-arch/bzip2-1.0.4')))
- self.assertOut('app-arch/bzip2')
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.4"),
+ FakeMutatedPkg("app-arch/bzip2-1.0.4"),
+ )
+ )
+ self.assertOut("app-arch/bzip2")
class TestPkgcoreFormatter(BaseFormatterTest):
@@ -134,30 +144,46 @@ class TestPkgcoreFormatter(BaseFormatterTest):
formatterClass = PkgcoreFormatter
def test_install(self):
- self.formatter.format(FakeOp(FakeEbuildSrc('dev-util/diffball-1.0')))
+ self.formatter.format(FakeOp(FakeEbuildSrc("dev-util/diffball-1.0")))
self.assertOut("add dev-util/diffball-1.0")
def test_install_repo(self):
- self.formatter.format(FakeOp(FakeEbuildSrc('dev-util/diffball-1.0',
- repo=FakeRepo(repo_id='gentoo', location='/var/gentoo/repos/gentoo'))))
+ self.formatter.format(
+ FakeOp(
+ FakeEbuildSrc(
+ "dev-util/diffball-1.0",
+ repo=FakeRepo(
+ repo_id="gentoo", location="/var/gentoo/repos/gentoo"
+ ),
+ )
+ )
+ )
self.assertOut("add dev-util/diffball-1.0::gentoo")
def test_reinstall(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('dev-util/diffball-1.2'),
- FakeMutatedPkg('dev-util/diffball-1.1')))
- self.assertOut(
- "replace dev-util/diffball-1.1, "
- "dev-util/diffball-1.2")
+ FakeOp(
+ FakeEbuildSrc("dev-util/diffball-1.2"),
+ FakeMutatedPkg("dev-util/diffball-1.1"),
+ )
+ )
+ self.assertOut("replace dev-util/diffball-1.1, " "dev-util/diffball-1.2")
def test_reinstall_repo(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('dev-util/diffball-1.2',
- repo=FakeRepo(repo_id='gentoo', location='/var/gentoo/repos/gentoo')),
- FakeMutatedPkg('dev-util/diffball-1.1')))
+ FakeOp(
+ FakeEbuildSrc(
+ "dev-util/diffball-1.2",
+ repo=FakeRepo(
+ repo_id="gentoo", location="/var/gentoo/repos/gentoo"
+ ),
+ ),
+ FakeMutatedPkg("dev-util/diffball-1.1"),
+ )
+ )
self.assertOut(
- "replace dev-util/diffball-1.1, "
- "dev-util/diffball-1.2::gentoo")
+ "replace dev-util/diffball-1.1, " "dev-util/diffball-1.2::gentoo"
+ )
class CountingFormatterTest(BaseFormatterTest):
@@ -166,125 +192,161 @@ class CountingFormatterTest(BaseFormatterTest):
endsuffix = "\n"
def newFormatter(self, **kwargs):
- kwargs.setdefault('verbosity', 1)
+ kwargs.setdefault("verbosity", 1)
return super().newFormatter(**kwargs)
def assertEnd(self, *args, **kwargs):
- kwargs.setdefault('prefix', self.endprefix)
- kwargs.setdefault('suffix', self.endsuffix)
+ kwargs.setdefault("prefix", self.endprefix)
+ kwargs.setdefault("suffix", self.endsuffix)
super().assertOut(*args, **kwargs)
def test_end(self):
self.formatter.end()
- self.assertEnd('\nTotal: 0 packages')
+ self.assertEnd("\nTotal: 0 packages")
def test_end_new(self):
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6')))
+ self.formatter.format(FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6")))
self.fakeout.resetstream()
self.formatter.end()
- self.assertEnd('\nTotal: 1 package (1 new)')
+ self.assertEnd("\nTotal: 1 package (1 new)")
def test_end_new_multiple(self):
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6')))
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/gzip-1.6')))
+ self.formatter.format(FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6")))
+ self.formatter.format(FakeOp(FakeEbuildSrc("app-arch/gzip-1.6")))
self.fakeout.resetstream()
self.formatter.end()
- self.assertEnd('\nTotal: 2 packages (2 new)')
+ self.assertEnd("\nTotal: 2 packages (2 new)")
def test_end_newslot(self):
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', slot='1')))
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", slot="1"))
+ )
self.fakeout.resetstream()
self.formatter.end()
- self.assertEnd('\nTotal: 1 package (1 in new slot)')
+ self.assertEnd("\nTotal: 1 package (1 in new slot)")
def test_end_newslot_multiple(self):
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', slot='1')))
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/gzip-1.6', slot='2')))
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", slot="1"))
+ )
+ self.formatter.format(FakeOp(FakeEbuildSrc("app-arch/gzip-1.6", slot="2")))
self.fakeout.resetstream()
self.formatter.end()
- self.assertEnd('\nTotal: 2 packages (2 in new slots)')
+ self.assertEnd("\nTotal: 2 packages (2 in new slots)")
def test_end_downgrade(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6'),
- FakeMutatedPkg('app-arch/bzip2-1.0.4')))
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.3-r6"),
+ FakeMutatedPkg("app-arch/bzip2-1.0.4"),
+ )
+ )
self.fakeout.resetstream()
self.formatter.end()
- self.assertEnd('\nTotal: 1 package (1 downgrade)')
+ self.assertEnd("\nTotal: 1 package (1 downgrade)")
def test_end_downgrade_multiple(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6'),
- FakeMutatedPkg('app-arch/bzip2-1.0.4')))
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.3-r6"),
+ FakeMutatedPkg("app-arch/bzip2-1.0.4"),
+ )
+ )
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/gzip-1.5'),
- FakeMutatedPkg('app-arch/gzip-1.6')))
+ FakeOp(
+ FakeEbuildSrc("app-arch/gzip-1.5"), FakeMutatedPkg("app-arch/gzip-1.6")
+ )
+ )
self.fakeout.resetstream()
self.formatter.end()
- self.assertEnd('\nTotal: 2 packages (2 downgrades)')
+ self.assertEnd("\nTotal: 2 packages (2 downgrades)")
def test_end_upgrade(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.4'),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.4"),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6"),
+ )
+ )
self.fakeout.resetstream()
self.formatter.end()
- self.assertEnd('\nTotal: 1 package (1 upgrade)')
+ self.assertEnd("\nTotal: 1 package (1 upgrade)")
def test_end_upgrade_multiple(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.4'),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.4"),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6"),
+ )
+ )
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/gzip-1.6'),
- FakeMutatedPkg('app-arch/gzip-1.5')))
+ FakeOp(
+ FakeEbuildSrc("app-arch/gzip-1.6"), FakeMutatedPkg("app-arch/gzip-1.5")
+ )
+ )
self.fakeout.resetstream()
self.formatter.end()
- self.assertEnd('\nTotal: 2 packages (2 upgrades)')
+ self.assertEnd("\nTotal: 2 packages (2 upgrades)")
def test_end_reinstall(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.4'),
- FakeMutatedPkg('app-arch/bzip2-1.0.4')))
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.4"),
+ FakeMutatedPkg("app-arch/bzip2-1.0.4"),
+ )
+ )
self.fakeout.resetstream()
self.formatter.end()
- self.assertEnd('\nTotal: 1 package (1 reinstall)')
+ self.assertEnd("\nTotal: 1 package (1 reinstall)")
def test_end_reinstall_multiple(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.4'),
- FakeMutatedPkg('app-arch/bzip2-1.0.4')))
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.4"),
+ FakeMutatedPkg("app-arch/bzip2-1.0.4"),
+ )
+ )
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/gzip-1.6'),
- FakeMutatedPkg('app-arch/gzip-1.6')))
+ FakeOp(
+ FakeEbuildSrc("app-arch/gzip-1.6"), FakeMutatedPkg("app-arch/gzip-1.6")
+ )
+ )
self.fakeout.resetstream()
self.formatter.end()
- self.assertEnd('\nTotal: 2 packages (2 reinstalls)')
+ self.assertEnd("\nTotal: 2 packages (2 reinstalls)")
def test_end_all_ops_order(self):
# new
- self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/pkga-1.0.3-r6')))
+ self.formatter.format(FakeOp(FakeEbuildSrc("app-arch/pkga-1.0.3-r6")))
# new slot
- self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/pkgb-1.0.3-r6', slot='1')))
+ self.formatter.format(FakeOp(FakeEbuildSrc("app-arch/pkgb-1.0.3-r6", slot="1")))
# downgrade
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/pkgc-1.0.3-r6'),
- FakeMutatedPkg('app-arch/pkgc-1.0.4')))
+ FakeOp(
+ FakeEbuildSrc("app-arch/pkgc-1.0.3-r6"),
+ FakeMutatedPkg("app-arch/pkgc-1.0.4"),
+ )
+ )
# upgrade
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/pkgd-1.0.4'),
- FakeMutatedPkg('app-arch/pkgd-1.0.3-r6')))
+ FakeOp(
+ FakeEbuildSrc("app-arch/pkgd-1.0.4"),
+ FakeMutatedPkg("app-arch/pkgd-1.0.3-r6"),
+ )
+ )
# reinstall
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/pkge-1.0.4'),
- FakeMutatedPkg('app-arch/pkge-1.0.4')))
+ FakeOp(
+ FakeEbuildSrc("app-arch/pkge-1.0.4"),
+ FakeMutatedPkg("app-arch/pkge-1.0.4"),
+ )
+ )
self.fakeout.resetstream()
self.formatter.end()
self.assertEnd(
- '\nTotal: 5 packages (1 new, 1 upgrade, 1 downgrade, 1 in new slot, 1 reinstall)')
+ "\nTotal: 5 packages (1 new, 1 upgrade, 1 downgrade, 1 in new slot, 1 reinstall)"
+ )
class TestPortageFormatter(BaseFormatterTest):
@@ -292,156 +354,454 @@ class TestPortageFormatter(BaseFormatterTest):
formatterClass = PortageFormatter
def setup_method(self):
- pkg = FakeMutatedPkg('app-arch/bzip2-1.0.1-r1', slot='0')
- masked_atom = atom('>=app-arch/bzip2-2.0')
+ pkg = FakeMutatedPkg("app-arch/bzip2-1.0.1-r1", slot="0")
+ masked_atom = atom(">=app-arch/bzip2-2.0")
self.domain_settings = {"ACCEPT_KEYWORDS": ("amd64",)}
self.repo1 = FakeRepo(
- repo_id='gentoo', location='/var/gentoo/repos/gentoo',
- masks=(masked_atom,), domain_settings=self.domain_settings)
+ repo_id="gentoo",
+ location="/var/gentoo/repos/gentoo",
+ masks=(masked_atom,),
+ domain_settings=self.domain_settings,
+ )
self.repo2 = FakeRepo(
- repo_id='repo2', location='/var/gentoo/repos/repo2',
- domain_settings=self.domain_settings)
- self.vdb = FakeRepo(repo_id='vdb', pkgs=[pkg])
+ repo_id="repo2",
+ location="/var/gentoo/repos/repo2",
+ domain_settings=self.domain_settings,
+ )
+ self.vdb = FakeRepo(repo_id="vdb", pkgs=[pkg])
super().setup_method()
def newFormatter(self, **kwargs):
- kwargs.setdefault('quiet_repo_display', False)
- kwargs.setdefault('installed_repos', self.vdb)
+ kwargs.setdefault("quiet_repo_display", False)
+ kwargs.setdefault("installed_repos", self.vdb)
return super().newFormatter(**kwargs)
def repo_id(self, repo):
- if getattr(self.formatter, 'verbosity', 0):
- return '::' + repo.repo_id
- return ''
+ if getattr(self.formatter, "verbosity", 0):
+ return "::" + repo.repo_id
+ return ""
def test_new(self):
- self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.4')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.4', Reset())
+ self.formatter.format(FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.4")))
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.4",
+ Reset(),
+ )
def test_remove(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.4'), desc='remove'))
- self.assertOut('[', Color('fg', 'red'), 'uninstall', Reset(),
- ' ] ', Color('fg', 'red'), 'app-arch/bzip2-1.0.4', Reset())
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.4"), desc="remove")
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "red"),
+ "uninstall",
+ Reset(),
+ " ] ",
+ Color("fg", "red"),
+ "app-arch/bzip2-1.0.4",
+ Reset(),
+ )
def test_upgrade(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.4'),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'cyan'), Bold(), 'U', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.4', Reset(), ' ',
- Color('fg', 'blue'), Bold(), '[1.0.3-r6]', Reset())
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.4"),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6"),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "cyan"),
+ Bold(),
+ "U",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.4",
+ Reset(),
+ " ",
+ Color("fg", "blue"),
+ Bold(),
+ "[1.0.3-r6]",
+ Reset(),
+ )
def test_downgrade(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6'),
- FakeMutatedPkg('app-arch/bzip2-1.0.4')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'cyan'), Bold(), 'U', Reset(),
- Color('fg', 'blue'), Bold(), 'D' , Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(), ' ',
- Color('fg', 'blue'), Bold(), '[1.0.4]', Reset())
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.3-r6"),
+ FakeMutatedPkg("app-arch/bzip2-1.0.4"),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "cyan"),
+ Bold(),
+ "U",
+ Reset(),
+ Color("fg", "blue"),
+ Bold(),
+ "D",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ " ",
+ Color("fg", "blue"),
+ Bold(),
+ "[1.0.4]",
+ Reset(),
+ )
def test_reinstall(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', repo=self.repo1),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', repo=self.repo1)))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), f'app-arch/bzip2-1.0.3-r6{self.repo_id(self.repo1)}', Reset())
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", repo=self.repo1),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6", repo=self.repo1),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ f"app-arch/bzip2-1.0.3-r6{self.repo_id(self.repo1)}",
+ Reset(),
+ )
def test_reinstall_from_new_repo(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', repo=self.repo1),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', repo=self.repo2)))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), f'app-arch/bzip2-1.0.3-r6{self.repo_id(self.repo1)}', Reset(),
- ' ', Color('fg', 'blue'), Bold(), f'[1.0.3-r6{self.repo_id(self.repo2)}]', Reset())
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", repo=self.repo1),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6", repo=self.repo2),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ f"app-arch/bzip2-1.0.3-r6{self.repo_id(self.repo1)}",
+ Reset(),
+ " ",
+ Color("fg", "blue"),
+ Bold(),
+ f"[1.0.3-r6{self.repo_id(self.repo2)}]",
+ Reset(),
+ )
def test_new_use(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static'])))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' USE="', Color('fg', 'red'), Bold(), 'static', Reset(), '"')
+ FakeOp(
+ FakeEbuildSrc(
+ "app-arch/bzip2-1.0.3-r6", iuse=["static"], use=["static"]
+ )
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="',
+ Color("fg", "red"),
+ Bold(),
+ "static",
+ Reset(),
+ '"',
+ )
def test_new_nouse(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static'])))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' USE="', Color('fg', 'blue'), Bold(), '-static', Reset(), '"')
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", iuse=["static"]))
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="',
+ Color("fg", "blue"),
+ Bold(),
+ "-static",
+ Reset(),
+ '"',
+ )
def test_nouse(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static']),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' USE="', Color('fg', 'yellow'), Bold(), '-static', Reset(), '%"')
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", iuse=["static"]),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6"),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="',
+ Color("fg", "yellow"),
+ Bold(),
+ "-static",
+ Reset(),
+ '%"',
+ )
def test_iuse_defaults(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', eapi='1', iuse=['+static', '-junk'], use=['static']),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' USE="', Color('fg', 'yellow'), Bold(), 'static', Reset(), "%* ",
- Color('fg', 'yellow'), Bold(), '-junk', Reset(), '%"')
+ FakeOp(
+ FakeEbuildSrc(
+ "app-arch/bzip2-1.0.3-r6",
+ eapi="1",
+ iuse=["+static", "-junk"],
+ use=["static"],
+ ),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6"),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="',
+ Color("fg", "yellow"),
+ Bold(),
+ "static",
+ Reset(),
+ "%* ",
+ Color("fg", "yellow"),
+ Bold(),
+ "-junk",
+ Reset(),
+ '%"',
+ )
def test_use_enabled(self):
- self.formatter.format(FakeOp(
- FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static']),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', iuse=['static']),
- ))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' USE="', Color('fg', 'green'), Bold(), 'static', Reset(), '*"')
+ self.formatter.format(
+ FakeOp(
+ FakeEbuildSrc(
+ "app-arch/bzip2-1.0.3-r6", iuse=["static"], use=["static"]
+ ),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6", iuse=["static"]),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="',
+ Color("fg", "green"),
+ Bold(),
+ "static",
+ Reset(),
+ '*"',
+ )
def test_use_disabled(self):
- self.formatter.format(FakeOp(
- FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static']),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static']),
- ))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' USE="', Color('fg', 'green'), Bold(), '-static', Reset(), '*"')
+ self.formatter.format(
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", iuse=["static"]),
+ FakeMutatedPkg(
+ "app-arch/bzip2-1.0.3-r6", iuse=["static"], use=["static"]
+ ),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="',
+ Color("fg", "green"),
+ Bold(),
+ "-static",
+ Reset(),
+ '*"',
+ )
def test_multiuse(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static', 'bootstrap'], use=['static']),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' USE="', Color('fg', 'yellow'), Bold(), 'static', Reset(), '%* ',
- Color('fg', 'yellow'), Bold(), '-bootstrap', Reset(), '%"')
+ FakeOp(
+ FakeEbuildSrc(
+ "app-arch/bzip2-1.0.3-r6",
+ iuse=["static", "bootstrap"],
+ use=["static"],
+ ),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6"),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="',
+ Color("fg", "yellow"),
+ Bold(),
+ "static",
+ Reset(),
+ "%* ",
+ Color("fg", "yellow"),
+ Bold(),
+ "-bootstrap",
+ Reset(),
+ '%"',
+ )
def test_misc(self):
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', slot='1')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), Color('fg', 'green'), Bold(),
- 'S', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' ', Color('fg', 'blue'), Bold(), '[1.0.1-r1]', Reset())
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", slot="1"))
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ Color("fg", "green"),
+ Bold(),
+ "S",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ " ",
+ Color("fg", "blue"),
+ Bold(),
+ "[1.0.1-r1]",
+ Reset(),
+ )
def test_fetch_restrict_no_fetchables(self):
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', restrict='fetch')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'f', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset())
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", restrict="fetch"))
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "f",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ )
# TODO
def test_fetch_restrict_missing_fetchables(self):
@@ -452,126 +812,356 @@ class TestPortageFormatter(BaseFormatterTest):
pass
def test_added_iuse_disabled(self):
- self.formatter.format(FakeOp(
- FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['bootstrap']),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6'),
- ))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' USE="', Color('fg', 'yellow'), Bold(), '-bootstrap', Reset(), '%"')
+ self.formatter.format(
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", iuse=["bootstrap"]),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6"),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="',
+ Color("fg", "yellow"),
+ Bold(),
+ "-bootstrap",
+ Reset(),
+ '%"',
+ )
def test_added_iuse_enabled(self):
- self.formatter.format(FakeOp(
- FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static']),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6'),
- ))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' USE="', Color('fg', 'yellow'), Bold(), 'static', Reset(), '%*"')
+ self.formatter.format(
+ FakeOp(
+ FakeEbuildSrc(
+ "app-arch/bzip2-1.0.3-r6", iuse=["static"], use=["static"]
+ ),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6"),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="',
+ Color("fg", "yellow"),
+ Bold(),
+ "static",
+ Reset(),
+ '%*"',
+ )
def test_dropped_iuse_disabled(self):
- self.formatter.format(FakeOp(
- FakeEbuildSrc('app-arch/bzip2-1.0.3-r6'),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', iuse=['bootstrap']),
- ))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset())
+ self.formatter.format(
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.3-r6"),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6", iuse=["bootstrap"]),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ )
def test_dropped_iuse_enabled(self):
- self.formatter.format(FakeOp(
- FakeEbuildSrc('app-arch/bzip2-1.0.3-r6'),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static']),
- ))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset())
+ self.formatter.format(
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.3-r6"),
+ FakeMutatedPkg(
+ "app-arch/bzip2-1.0.3-r6", iuse=["static"], use=["static"]
+ ),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ )
def test_use_expand(self):
self.formatter = self.newFormatter(use_expand=set(["foo", "bar"]))
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6',
- iuse=['foo_static', 'foo_bootstrap', 'bar_baz'],
- use=['foo_static', 'bar_baz']),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' BAR="', Color('fg', 'yellow'), Bold(), 'baz', Reset(), '%*"',
- ' FOO="', Color('fg', 'yellow'), Bold(), 'static', Reset(), '%* ',
- Color('fg', 'yellow'), Bold(), '-bootstrap', Reset(), '%"')
+ FakeOp(
+ FakeEbuildSrc(
+ "app-arch/bzip2-1.0.3-r6",
+ iuse=["foo_static", "foo_bootstrap", "bar_baz"],
+ use=["foo_static", "bar_baz"],
+ ),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6"),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' BAR="',
+ Color("fg", "yellow"),
+ Bold(),
+ "baz",
+ Reset(),
+ '%*"',
+ ' FOO="',
+ Color("fg", "yellow"),
+ Bold(),
+ "static",
+ Reset(),
+ "%* ",
+ Color("fg", "yellow"),
+ Bold(),
+ "-bootstrap",
+ Reset(),
+ '%"',
+ )
def test_disabled_use(self):
- self.formatter.pkg_get_use = lambda pkg: (set(), set(), set(['static']))
+ self.formatter.pkg_get_use = lambda pkg: (set(), set(), set(["static"]))
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static', 'bootstrap'])))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", iuse=["static", "bootstrap"])
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
' USE="',
- Color('fg', 'blue'), Bold(), '-bootstrap', Reset(), ' ',
- '(', Color('fg', 'blue'), Bold(), '-static', Reset(), ')"')
+ Color("fg", "blue"),
+ Bold(),
+ "-bootstrap",
+ Reset(),
+ " ",
+ "(",
+ Color("fg", "blue"),
+ Bold(),
+ "-static",
+ Reset(),
+ ')"',
+ )
def test_forced_use(self):
- self.formatter.pkg_get_use = lambda pkg: (set(['static']), set(), set())
+ self.formatter.pkg_get_use = lambda pkg: (set(["static"]), set(), set())
# new pkg: static use flag forced on
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static'])))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' USE="(', Color('fg', 'red'), Bold(), 'static', Reset(), ')"')
+ FakeOp(
+ FakeEbuildSrc(
+ "app-arch/bzip2-1.0.3-r6", iuse=["static"], use=["static"]
+ )
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="(',
+ Color("fg", "red"),
+ Bold(),
+ "static",
+ Reset(),
+ ')"',
+ )
# rebuilt pkg: toggled static use flag forced on
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static']),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', iuse=['static'])))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' USE="(', Color('fg', 'green'), Bold(), 'static', Reset(), '*)"')
+ FakeOp(
+ FakeEbuildSrc(
+ "app-arch/bzip2-1.0.3-r6", iuse=["static"], use=["static"]
+ ),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6", iuse=["static"]),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="(',
+ Color("fg", "green"),
+ Bold(),
+ "static",
+ Reset(),
+ '*)"',
+ )
# rebuilt pkg: new static use flag forced on
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static']),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' USE="(', Color('fg', 'yellow'), Bold(), 'static', Reset(), '%*)"')
+ FakeOp(
+ FakeEbuildSrc(
+ "app-arch/bzip2-1.0.3-r6", iuse=["static"], use=["static"]
+ ),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6"),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="(',
+ Color("fg", "yellow"),
+ Bold(),
+ "static",
+ Reset(),
+ '%*)"',
+ )
def test_forced_use_expand(self):
self.formatter = self.newFormatter(use_expand=set(["ABI_X86", "TARGETS"]))
- self.formatter.pkg_get_use = lambda pkg: (set(['targets_X86']), set(), set())
+ self.formatter.pkg_get_use = lambda pkg: (set(["targets_X86"]), set(), set())
# rebuilt pkg: new abi_x86_64 and targets_X86 USE flags,
# with abi_x86_64 disabled and targets_X86 forced on
self.formatter.format(
- FakeOp(FakeEbuildSrc(
- 'app-arch/bzip2-1.0.3-r6',
- iuse=['abi_x86_64', 'targets_X86'],
- use=['targets_X86']),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' ABI_X86="', Color('fg', 'yellow'), Bold(), '-64', Reset(), '%"',
- ' TARGETS="(', Color('fg', 'yellow'), Bold(), 'X86', Reset(), '%*)"')
+ FakeOp(
+ FakeEbuildSrc(
+ "app-arch/bzip2-1.0.3-r6",
+ iuse=["abi_x86_64", "targets_X86"],
+ use=["targets_X86"],
+ ),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6"),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' ABI_X86="',
+ Color("fg", "yellow"),
+ Bold(),
+ "-64",
+ Reset(),
+ '%"',
+ ' TARGETS="(',
+ Color("fg", "yellow"),
+ Bold(),
+ "X86",
+ Reset(),
+ '%*)"',
+ )
def test_worldfile_atom(self):
- self.formatter.world_list = [atom('app-arch/bzip2')]
- self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6')))
- self.assertOut('[', Color('fg', 'green'), Bold(), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ] ',
- Color('fg', 'green'), Bold(), 'app-arch/bzip2-1.0.3-r6', Reset())
+ self.formatter.world_list = [atom("app-arch/bzip2")]
+ self.formatter.format(FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6")))
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ Bold(),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ Bold(),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ )
-class TestPortageVerboseFormatter(TestPortageFormatter):
+class TestPortageVerboseFormatter(TestPortageFormatter):
def newFormatter(self, **kwargs):
kwargs.setdefault("verbosity", 1)
kwargs.setdefault("unstable_arch", "~amd64")
@@ -579,196 +1169,646 @@ class TestPortageVerboseFormatter(TestPortageFormatter):
def test_install_symbol_unkeyworded(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', repo=self.repo1, keywords=())))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ',
- Color('fg', 'red'), Bold(), '*', Reset(), '] ',
- Color('fg', 'green'), f'app-arch/bzip2-1.0.3-r6{self.repo_id(self.repo1)}', Reset())
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", repo=self.repo1, keywords=())
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ " ",
+ Color("fg", "red"),
+ Bold(),
+ "*",
+ Reset(),
+ "] ",
+ Color("fg", "green"),
+ f"app-arch/bzip2-1.0.3-r6{self.repo_id(self.repo1)}",
+ Reset(),
+ )
def test_install_symbol_unstable(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', repo=self.repo1, keywords=('~amd64',))))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ',
- Color('fg', 'yellow'), Bold(), '~', Reset(), '] ',
- Color('fg', 'green'), f'app-arch/bzip2-1.0.3-r6{self.repo_id(self.repo1)}', Reset())
+ FakeOp(
+ FakeEbuildSrc(
+ "app-arch/bzip2-1.0.3-r6", repo=self.repo1, keywords=("~amd64",)
+ )
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "~",
+ Reset(),
+ "] ",
+ Color("fg", "green"),
+ f"app-arch/bzip2-1.0.3-r6{self.repo_id(self.repo1)}",
+ Reset(),
+ )
def test_install_symbol_masked(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-2.1', repo=self.repo1)))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ',
- Color('fg', 'red'), Bold(), '#', Reset(), '] ',
- Color('fg', 'green'), f'app-arch/bzip2-2.1{self.repo_id(self.repo1)}', Reset())
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-2.1", repo=self.repo1))
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ " ",
+ Color("fg", "red"),
+ Bold(),
+ "#",
+ Reset(),
+ "] ",
+ Color("fg", "green"),
+ f"app-arch/bzip2-2.1{self.repo_id(self.repo1)}",
+ Reset(),
+ )
def test_repo_id(self):
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', repo=self.repo1)))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6::gentoo', Reset())
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', repo=self.repo2)))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6::repo2', Reset())
- self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.4', repo=self.repo1),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', repo=self.repo1)))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'cyan'), Bold(), 'U', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.4::gentoo', Reset(), ' ',
- Color('fg', 'blue'), Bold(), '[1.0.3-r6::gentoo]', Reset())
- self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.4', repo=self.repo2),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', repo=self.repo1)))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'cyan'), Bold(), 'U', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.4::repo2', Reset(), ' ',
- Color('fg', 'blue'), Bold(), '[1.0.3-r6::gentoo]', Reset())
- self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.4', repo=self.repo1),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', repo=self.repo2)))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'cyan'), Bold(), 'U', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.4::gentoo', Reset(), ' ',
- Color('fg', 'blue'), Bold(), '[1.0.3-r6::repo2]', Reset())
- self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.4', repo=self.repo2),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', repo=self.repo2)))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'cyan'), Bold(), 'U', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.4::repo2', Reset(), ' ',
- Color('fg', 'blue'), Bold(), '[1.0.3-r6::repo2]', Reset())
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", repo=self.repo1))
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6::gentoo",
+ Reset(),
+ )
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", repo=self.repo2))
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6::repo2",
+ Reset(),
+ )
+ self.formatter.format(
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.4", repo=self.repo1),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6", repo=self.repo1),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "cyan"),
+ Bold(),
+ "U",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.4::gentoo",
+ Reset(),
+ " ",
+ Color("fg", "blue"),
+ Bold(),
+ "[1.0.3-r6::gentoo]",
+ Reset(),
+ )
+ self.formatter.format(
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.4", repo=self.repo2),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6", repo=self.repo1),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "cyan"),
+ Bold(),
+ "U",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.4::repo2",
+ Reset(),
+ " ",
+ Color("fg", "blue"),
+ Bold(),
+ "[1.0.3-r6::gentoo]",
+ Reset(),
+ )
+ self.formatter.format(
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.4", repo=self.repo1),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6", repo=self.repo2),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "cyan"),
+ Bold(),
+ "U",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.4::gentoo",
+ Reset(),
+ " ",
+ Color("fg", "blue"),
+ Bold(),
+ "[1.0.3-r6::repo2]",
+ Reset(),
+ )
+ self.formatter.format(
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.4", repo=self.repo2),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6", repo=self.repo2),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "cyan"),
+ Bold(),
+ "U",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.4::repo2",
+ Reset(),
+ " ",
+ Color("fg", "blue"),
+ Bold(),
+ "[1.0.3-r6::repo2]",
+ Reset(),
+ )
def test_misc(self):
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', slot='0')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset())
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', slot='0', subslot='0')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset())
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', slot='0', subslot='2')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6:0/2', Reset())
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', slot='foo')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), Color('fg', 'green'), Bold(),
- 'S', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6:foo', Reset(),
- ' ', Color('fg', 'blue'), Bold(), '[1.0.1-r1:0]', Reset())
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', slot='1', subslot='0')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), Color('fg', 'green'), Bold(),
- 'S', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6:1/0', Reset(),
- ' ', Color('fg', 'blue'), Bold(), '[1.0.1-r1:0]', Reset())
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', slot='2', subslot='foo')))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), Color('fg', 'green'), Bold(),
- 'S', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6:2/foo', Reset(),
- ' ', Color('fg', 'blue'), Bold(), '[1.0.1-r1:0]', Reset())
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", slot="0"))
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ )
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", slot="0", subslot="0"))
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ )
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", slot="0", subslot="2"))
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6:0/2",
+ Reset(),
+ )
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", slot="foo"))
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ Color("fg", "green"),
+ Bold(),
+ "S",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6:foo",
+ Reset(),
+ " ",
+ Color("fg", "blue"),
+ Bold(),
+ "[1.0.1-r1:0]",
+ Reset(),
+ )
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", slot="1", subslot="0"))
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ Color("fg", "green"),
+ Bold(),
+ "S",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6:1/0",
+ Reset(),
+ " ",
+ Color("fg", "blue"),
+ Bold(),
+ "[1.0.1-r1:0]",
+ Reset(),
+ )
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", slot="2", subslot="foo"))
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ Color("fg", "green"),
+ Bold(),
+ "S",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6:2/foo",
+ Reset(),
+ " ",
+ Color("fg", "blue"),
+ Bold(),
+ "[1.0.1-r1:0]",
+ Reset(),
+ )
def test_dropped_iuse_disabled(self):
- self.formatter.format(FakeOp(
- FakeEbuildSrc('app-arch/bzip2-1.0.3-r6'),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', iuse=['bootstrap']),
- ))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' USE="(', Color('fg', 'yellow'), Bold(), '-bootstrap', Reset(), '%)"')
+ self.formatter.format(
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.3-r6"),
+ FakeMutatedPkg("app-arch/bzip2-1.0.3-r6", iuse=["bootstrap"]),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="(',
+ Color("fg", "yellow"),
+ Bold(),
+ "-bootstrap",
+ Reset(),
+ '%)"',
+ )
def test_dropped_iuse_enabled(self):
- self.formatter.format(FakeOp(
- FakeEbuildSrc('app-arch/bzip2-1.0.3-r6'),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static']),
- ))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' USE="(', Color('fg', 'yellow'), Bold(), '-static', Reset(), '%*)"')
+ self.formatter.format(
+ FakeOp(
+ FakeEbuildSrc("app-arch/bzip2-1.0.3-r6"),
+ FakeMutatedPkg(
+ "app-arch/bzip2-1.0.3-r6", iuse=["static"], use=["static"]
+ ),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="(',
+ Color("fg", "yellow"),
+ Bold(),
+ "-static",
+ Reset(),
+ '%*)"',
+ )
def test_changed_use(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6',
- iuse=['static', 'bootstrap', 'perl', 'foobar', 'rice'],
- use=['static', 'rice']),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6',
- iuse=['bootstrap', 'foobar', 'rice', 'kazaam'],
- use=['foobar'])))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(), ' USE="',
- Color('fg', 'green'), Bold(), 'rice', Reset(), '* ',
- Color('fg', 'yellow'), Bold(), 'static', Reset(), '%* ',
- Color('fg', 'blue'), Bold(), '-bootstrap', Reset(), ' ',
- Color('fg', 'green'), Bold(), '-foobar', Reset(), '* ',
- Color('fg', 'yellow'), Bold(), '-perl', Reset(), '% ',
- '(', Color('fg', 'yellow'), Bold(), '-kazaam', Reset(), '%)"')
+ FakeOp(
+ FakeEbuildSrc(
+ "app-arch/bzip2-1.0.3-r6",
+ iuse=["static", "bootstrap", "perl", "foobar", "rice"],
+ use=["static", "rice"],
+ ),
+ FakeMutatedPkg(
+ "app-arch/bzip2-1.0.3-r6",
+ iuse=["bootstrap", "foobar", "rice", "kazaam"],
+ use=["foobar"],
+ ),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="',
+ Color("fg", "green"),
+ Bold(),
+ "rice",
+ Reset(),
+ "* ",
+ Color("fg", "yellow"),
+ Bold(),
+ "static",
+ Reset(),
+ "%* ",
+ Color("fg", "blue"),
+ Bold(),
+ "-bootstrap",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "-foobar",
+ Reset(),
+ "* ",
+ Color("fg", "yellow"),
+ Bold(),
+ "-perl",
+ Reset(),
+ "% ",
+ "(",
+ Color("fg", "yellow"),
+ Bold(),
+ "-kazaam",
+ Reset(),
+ '%)"',
+ )
def test_forced_use_verbose(self):
- self.formatter.pkg_get_use = lambda pkg: (set(['static']), set(), set())
+ self.formatter.pkg_get_use = lambda pkg: (set(["static"]), set(), set())
# rebuilt pkg: unchanged static use flag forced on
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static']),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static'])))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' USE="(', Color('fg', 'red'), Bold(), 'static', Reset(), ')"')
+ FakeOp(
+ FakeEbuildSrc(
+ "app-arch/bzip2-1.0.3-r6", iuse=["static"], use=["static"]
+ ),
+ FakeMutatedPkg(
+ "app-arch/bzip2-1.0.3-r6", iuse=["static"], use=["static"]
+ ),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="(',
+ Color("fg", "red"),
+ Bold(),
+ "static",
+ Reset(),
+ ')"',
+ )
def test_removed_use(self):
self.formatter.format(
- FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static']),
- FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', iuse=['static', 'bootstrap', 'foo'], use=['static', 'foo'])))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(),
- ' USE="', Color('fg', 'red'), Bold(), 'static', Reset(), ' ',
- '(', Color('fg', 'yellow'), Bold(), '-bootstrap', Reset(), '%) ',
- '(', Color('fg', 'yellow'), Bold(), '-foo', Reset(), '%*)"')
+ FakeOp(
+ FakeEbuildSrc(
+ "app-arch/bzip2-1.0.3-r6", iuse=["static"], use=["static"]
+ ),
+ FakeMutatedPkg(
+ "app-arch/bzip2-1.0.3-r6",
+ iuse=["static", "bootstrap", "foo"],
+ use=["static", "foo"],
+ ),
+ )
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "yellow"),
+ Bold(),
+ "R",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ ' USE="',
+ Color("fg", "red"),
+ Bold(),
+ "static",
+ Reset(),
+ " ",
+ "(",
+ Color("fg", "yellow"),
+ Bold(),
+ "-bootstrap",
+ Reset(),
+ "%) ",
+ "(",
+ Color("fg", "yellow"),
+ Bold(),
+ "-foo",
+ Reset(),
+ '%*)"',
+ )
def test_end(self):
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', repo=self.repo1)))
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', repo=self.repo2)))
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", repo=self.repo1))
+ )
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", repo=self.repo2))
+ )
self.fakeout.resetstream()
self.formatter.end()
- self.assertOut('\nTotal: 2 packages (2 new)\n\n',
- suffix=[''])
+ self.assertOut("\nTotal: 2 packages (2 new)\n\n", suffix=[""])
+
class TestPortageVerboseRepoIdFormatter(TestPortageVerboseFormatter):
- suffix = [Color("fg", "cyan"), ' [1]\n']
+ suffix = [Color("fg", "cyan"), " [1]\n"]
def setup_method(self):
super().setup_method()
self.repo3 = FakeRepo(
- location='/var/gentoo/repos/repo3', domain_settings=self.domain_settings)
+ location="/var/gentoo/repos/repo3", domain_settings=self.domain_settings
+ )
def newFormatter(self, **kwargs):
kwargs.setdefault("quiet_repo_display", True)
return super().newFormatter(**kwargs)
def repo_id(self, repo):
- return ''
+ return ""
def test_repo_id(self):
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', repo=self.repo1)))
- self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(),
- ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ] ',
- Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset())
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", repo=self.repo1))
+ )
+ self.assertOut(
+ "[",
+ Color("fg", "green"),
+ "ebuild",
+ Reset(),
+ " ",
+ Color("fg", "green"),
+ Bold(),
+ "N",
+ Reset(),
+ " ] ",
+ Color("fg", "green"),
+ "app-arch/bzip2-1.0.3-r6",
+ Reset(),
+ )
def test_end(self):
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', repo=self.repo1)))
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', repo=self.repo2)))
- self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', repo=self.repo3)))
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", repo=self.repo1))
+ )
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", repo=self.repo2))
+ )
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc("app-arch/bzip2-1.0.3-r6", repo=self.repo3))
+ )
self.fakeout.resetstream()
self.formatter.end()
- self.assertOut('\nTotal: 3 packages (3 new)\n\n',
- ' ', Color('fg', 'cyan'), '[1]', Reset(),' gentoo (/var/gentoo/repos/gentoo)\n',
- ' ', Color('fg', 'cyan'), '[2]', Reset(),' repo2 (/var/gentoo/repos/repo2)\n',
- ' ', Color('fg', 'cyan'), '[3]', Reset(),' /var/gentoo/repos/repo3\n',
- suffix=[''])
+ self.assertOut(
+ "\nTotal: 3 packages (3 new)\n\n",
+ " ",
+ Color("fg", "cyan"),
+ "[1]",
+ Reset(),
+ " gentoo (/var/gentoo/repos/gentoo)\n",
+ " ",
+ Color("fg", "cyan"),
+ "[2]",
+ Reset(),
+ " repo2 (/var/gentoo/repos/repo2)\n",
+ " ",
+ Color("fg", "cyan"),
+ "[3]",
+ Reset(),
+ " /var/gentoo/repos/repo3\n",
+ suffix=[""],
+ )
diff --git a/tests/ebuild/test_misc.py b/tests/ebuild/test_misc.py
index 53a3e7bf0..978208902 100644
--- a/tests/ebuild/test_misc.py
+++ b/tests/ebuild/test_misc.py
@@ -26,9 +26,12 @@ class Test_collapsed_restrict_to_data:
# ensure AlwaysFalse is ignored.
self.assert_state(self.kls([(AlwaysFalse, srange)]))
# check always ordering.
- self.assert_state(self.kls([(AlwaysTrue, ['x'])],
- [(AlwaysTrue, ['x', 'y']), (AlwaysTrue, ['-x'])]),
- defaults=['y'])
+ self.assert_state(
+ self.kls(
+ [(AlwaysTrue, ["x"])], [(AlwaysTrue, ["x", "y"]), (AlwaysTrue, ["-x"])]
+ ),
+ defaults=["y"],
+ )
class TestIncrementalExpansion:
@@ -39,7 +42,7 @@ class TestIncrementalExpansion:
self.f(("-a", "b", "-b", "-b", "c"), orig=s)
assert set(s) == {"c"}
with pytest.raises(ValueError):
- self.f({'-'})
+ self.f({"-"})
def test_non_finalized(self):
s = set("ab")
@@ -47,9 +50,9 @@ class TestIncrementalExpansion:
assert set(s) == {"-a", "-b", "c"}
def test_starred(self):
- s = set('ab')
- self.f(('c', '-*', 'd'), orig=s)
- assert set(s) == {'d'}
+ s = set("ab")
+ self.f(("c", "-*", "d"), orig=s)
+ assert set(s) == {"d"}
def test_IncrementalsDict():
@@ -68,7 +71,7 @@ def test_IncrementalsDict():
del d["a1"]
del expected["a1"]
assert d == expected
- assert d['i1'] == "1 2"
+ assert d["i1"] == "1 2"
assert d
assert set(d) == {"i1"}
d.clear()
@@ -76,17 +79,24 @@ def test_IncrementalsDict():
assert len(d) == 0
-@pytest.mark.parametrize('expected,source,target',
- [('../../bin/foo', '/bin/foo', '/usr/bin/foo'),
- ('../../../doc/foo-1', '/usr/share/doc/foo-1', '/usr/share/texmf-site/doc/fonts/foo'),
- ('../../opt/bar/foo', '/opt/bar/foo', '/usr/bin/foo'),
- ('../c/d/e', '/a/b/c/d/e', 'a/b/f/g'),
- ('b/f', '/a/b///./c/d/../e/..//../f', '/a/././///g/../h'),
- ('../h', '/a/././///g/../h', '/a/b///./c/d/../e/..//../f'),
- ('.', '/foo', '/foo/bar'),
- ('..', '/foo', '/foo/bar/baz'),
- ('../../fo . o/b ar', '/fo . o/b ar', '/baz / qu .. ux/qu x'),
- (r'../../f"o\o/b$a[]r', r'/f"o\o/b$a[]r', r'/ba\z/qu$u"x/qux'),
- ])
+@pytest.mark.parametrize(
+ "expected,source,target",
+ [
+ ("../../bin/foo", "/bin/foo", "/usr/bin/foo"),
+ (
+ "../../../doc/foo-1",
+ "/usr/share/doc/foo-1",
+ "/usr/share/texmf-site/doc/fonts/foo",
+ ),
+ ("../../opt/bar/foo", "/opt/bar/foo", "/usr/bin/foo"),
+ ("../c/d/e", "/a/b/c/d/e", "a/b/f/g"),
+ ("b/f", "/a/b///./c/d/../e/..//../f", "/a/././///g/../h"),
+ ("../h", "/a/././///g/../h", "/a/b///./c/d/../e/..//../f"),
+ (".", "/foo", "/foo/bar"),
+ ("..", "/foo", "/foo/bar/baz"),
+ ("../../fo . o/b ar", "/fo . o/b ar", "/baz / qu .. ux/qu x"),
+ (r'../../f"o\o/b$a[]r', r'/f"o\o/b$a[]r', r'/ba\z/qu$u"x/qux'),
+ ],
+)
def test_get_relative_dosym_target(expected, source, target):
assert expected == misc.get_relative_dosym_target(source, target)
diff --git a/tests/ebuild/test_portage_conf.py b/tests/ebuild/test_portage_conf.py
index dcaaedceb..395373b85 100644
--- a/tests/ebuild/test_portage_conf.py
+++ b/tests/ebuild/test_portage_conf.py
@@ -17,101 +17,109 @@ load_repos_conf = PortageConfig.load_repos_conf
class TestMakeConf:
-
def test_load_defaults(self):
make_globals = {}
- load_make_conf(make_globals, pjoin(const.CONFIG_PATH, 'make.globals'))
- assert 'PORTAGE_TMPDIR' in make_globals
+ load_make_conf(make_globals, pjoin(const.CONFIG_PATH, "make.globals"))
+ assert "PORTAGE_TMPDIR" in make_globals
def test_nonexistent_file(self, tmp_path):
d = {}
# by default files are required
with pytest.raises(config_errors.ParsingError):
- load_make_conf(d, tmp_path / 'make.globals')
+ load_make_conf(d, tmp_path / "make.globals")
# should return empty dict when not required
- load_make_conf(d, tmp_path / 'make.conf', required=False)
+ load_make_conf(d, tmp_path / "make.conf", required=False)
assert not d
@pytest.mark.skipif(os.getuid() == 0, reason="need to be non root")
def test_unreadable_file(self, tmp_path):
d = {}
- (path := tmp_path / 'file').touch()
+ (path := tmp_path / "file").touch()
path.chmod(stat.S_IWUSR)
with pytest.raises(base_errors.PermissionDenied):
load_make_conf(d, path)
def test_overrides_incrementals(self, tmp_path):
- (path := tmp_path / 'file').write_bytes(b'DISTDIR=foo\n')
+ (path := tmp_path / "file").write_bytes(b"DISTDIR=foo\n")
d = {}
- load_make_conf(d, pjoin(const.CONFIG_PATH, 'make.globals'))
+ load_make_conf(d, pjoin(const.CONFIG_PATH, "make.globals"))
load_make_conf(d, path, allow_sourcing=True, incrementals=True)
- assert d['DISTDIR'] == 'foo'
+ assert d["DISTDIR"] == "foo"
def test_load_make_conf_dir(self, tmp_path):
# load files from dir and symlinked dir
- (make_conf_dir := tmp_path / 'make.conf').mkdir()
- (make_conf_dir / 'a').write_text('DISTDIR=foo\n')
- (make_conf_sym := tmp_path / 'make.conf.sym').symlink_to(make_conf_dir)
+ (make_conf_dir := tmp_path / "make.conf").mkdir()
+ (make_conf_dir / "a").write_text("DISTDIR=foo\n")
+ (make_conf_sym := tmp_path / "make.conf.sym").symlink_to(make_conf_dir)
d = {}
- load_make_conf(d, pjoin(const.CONFIG_PATH, 'make.globals'))
+ load_make_conf(d, pjoin(const.CONFIG_PATH, "make.globals"))
sym_d = d.copy()
load_make_conf(d, make_conf_dir)
load_make_conf(sym_d, make_conf_sym)
assert d == sym_d
- assert d['DISTDIR'] == 'foo'
+ assert d["DISTDIR"] == "foo"
class TestReposConf:
-
def test_load_defaults(self):
- _, global_repos_conf = load_repos_conf(pjoin(const.CONFIG_PATH, 'repos.conf'))
- assert 'gentoo' in global_repos_conf
+ _, global_repos_conf = load_repos_conf(pjoin(const.CONFIG_PATH, "repos.conf"))
+ assert "gentoo" in global_repos_conf
def test_nonexistent_file(self, tmp_path):
with pytest.raises(config_errors.ParsingError):
- load_repos_conf(tmp_path / 'repos.conf')
+ load_repos_conf(tmp_path / "repos.conf")
@pytest.mark.skipif(os.getuid() == 0, reason="need to be non root")
def test_unreadable_file(self, tmp_path):
- (path := tmp_path / 'file').touch()
+ (path := tmp_path / "file").touch()
path.chmod(stat.S_IWUSR)
with pytest.raises(base_errors.PermissionDenied):
load_repos_conf(path)
def test_blank_file(self, tmp_path, caplog):
- (path := tmp_path / 'file').touch()
+ (path := tmp_path / "file").touch()
load_repos_conf(path)
- assert 'file is empty' in caplog.text
+ assert "file is empty" in caplog.text
def test_garbage_file(self, tmp_path):
- (path := tmp_path / 'file').write_bytes(binascii.b2a_hex(os.urandom(10)))
+ (path := tmp_path / "file").write_bytes(binascii.b2a_hex(os.urandom(10)))
with pytest.raises(config_errors.ConfigurationError):
load_repos_conf(path)
def test_missing_location(self, tmp_path, caplog):
- (path := tmp_path / 'file').write_text(textwrap.dedent('''\
+ (path := tmp_path / "file").write_text(
+ textwrap.dedent(
+ """\
[foo]
- sync-uri = git://foo.git'''))
+ sync-uri = git://foo.git"""
+ )
+ )
load_repos_conf(path)
assert "'foo' repo missing location setting" in caplog.text
def test_bad_priority(self, tmp_path, caplog):
# bad priority value causes fallback to the default
- (path := tmp_path / 'file').write_text(textwrap.dedent('''\
+ (path := tmp_path / "file").write_text(
+ textwrap.dedent(
+ """\
[foo]
priority = foo
location = /var/gentoo/repos/foo
[gentoo]
- location = /var/gentoo/repos/gentoo'''))
+ location = /var/gentoo/repos/gentoo"""
+ )
+ )
defaults, repos = load_repos_conf(path)
- assert repos['foo']['priority'] == 0
+ assert repos["foo"]["priority"] == 0
assert "'foo' repo has invalid priority setting" in caplog.text
def test_overriding_defaults_same_file(self, tmp_path):
# overriding defaults in the same file throws an exception from configparser
- (path := tmp_path / 'file').write_text(textwrap.dedent('''\
+ (path := tmp_path / "file").write_text(
+ textwrap.dedent(
+ """\
[DEFAULT]
main-repo = gentoo
[DEFAULT]
@@ -121,61 +129,81 @@ class TestReposConf:
priority = foo
location = /var/gentoo/repos/foo
[gentoo]
- location = /var/gentoo/repos/gentoo'''))
+ location = /var/gentoo/repos/gentoo"""
+ )
+ )
with pytest.raises(config_errors.ConfigurationError):
load_repos_conf(path)
def test_undefined_main_repo(self, tmp_path):
# undefined main repo with 'gentoo' missing
- (path := tmp_path / 'file').write_text(textwrap.dedent('''\
+ (path := tmp_path / "file").write_text(
+ textwrap.dedent(
+ """\
[foo]
- location = /var/gentoo/repos/foo'''))
+ location = /var/gentoo/repos/foo"""
+ )
+ )
with pytest.raises(config_errors.UserConfigError):
load_repos_conf(path)
def test_optional_default_section(self, tmp_path, caplog):
# default section isn't required as long as gentoo repo exists
- (path := tmp_path / 'file').write_text(textwrap.dedent('''\
+ (path := tmp_path / "file").write_text(
+ textwrap.dedent(
+ """\
[foo]
location = /var/gentoo/repos/foo
[gentoo]
- location = /var/gentoo/repos/gentoo'''))
+ location = /var/gentoo/repos/gentoo"""
+ )
+ )
defaults, repos = load_repos_conf(path)
- assert defaults['main-repo'] == 'gentoo'
- assert list(repos.keys()) == ['foo', 'gentoo']
+ assert defaults["main-repo"] == "gentoo"
+ assert list(repos.keys()) == ["foo", "gentoo"]
assert not caplog.text
def test_overriding_sections_same_file(self, tmp_path):
# overriding sections in the same file throws an exception from configparser
- (path := tmp_path / 'file').write_text(textwrap.dedent('''\
+ (path := tmp_path / "file").write_text(
+ textwrap.dedent(
+ """\
[DEFAULT]
main-repo = foo
[foo]
priority = 3
location = /var/gentoo/repos/gentoo
[foo]
- location = /var/gentoo/repos/foo'''))
+ location = /var/gentoo/repos/foo"""
+ )
+ )
with pytest.raises(config_errors.ConfigurationError):
load_repos_conf(path)
def test_load_repos_conf_dir(self, tmp_path):
# repo priority sorting and dir/symlink scanning
- (repos_conf_dir := tmp_path / 'repos.conf').mkdir()
- shutil.copyfile(pjoin(const.CONFIG_PATH, 'repos.conf'), repos_conf_dir / 'repos.conf')
- (repos_conf_sym := tmp_path / 'repos.conf.sym').symlink_to(repos_conf_dir)
-
- (repos_conf_sym / 'file').write_text(textwrap.dedent('''\
+ (repos_conf_dir := tmp_path / "repos.conf").mkdir()
+ shutil.copyfile(
+ pjoin(const.CONFIG_PATH, "repos.conf"), repos_conf_dir / "repos.conf"
+ )
+ (repos_conf_sym := tmp_path / "repos.conf.sym").symlink_to(repos_conf_dir)
+
+ (repos_conf_sym / "file").write_text(
+ textwrap.dedent(
+ """\
[bar]
location = /var/gentoo/repos/bar
[foo]
location = /var/gentoo/repos/foo
- priority = 10'''))
+ priority = 10"""
+ )
+ )
defaults, repos = load_repos_conf(repos_conf_dir)
sym_defaults, sym_repos = load_repos_conf(repos_conf_sym)
assert defaults == sym_defaults
assert repos == sym_repos
- assert defaults['main-repo'] == 'gentoo'
- assert list(repos.keys()) == ['foo', 'bar', 'gentoo', 'binpkgs']
+ assert defaults["main-repo"] == "gentoo"
+ assert list(repos.keys()) == ["foo", "bar", "gentoo", "binpkgs"]
diff --git a/tests/ebuild/test_profiles.py b/tests/ebuild/test_profiles.py
index a7abfe9f2..7957c7c53 100644
--- a/tests/ebuild/test_profiles.py
+++ b/tests/ebuild/test_profiles.py
@@ -22,9 +22,8 @@ class ProfileNode(profiles.ProfileNode):
class profile_mixin:
-
def mk_profile(self, tmp_path, profile_name):
- return self.mk_profiles(tmp_path, {'name': profile_name})
+ return self.mk_profiles(tmp_path, {"name": profile_name})
def mk_profiles(self, tmp_path, *profiles, **kwds):
for x in tmp_path.iterdir():
@@ -48,9 +47,14 @@ class profile_mixin:
def assertEqualChunks(self, given_mapping, desired_mapping):
def f(chunk):
- return chunked_data(chunk.key, tuple(sorted(chunk.neg)), tuple(sorted(chunk.pos)))
+ return chunked_data(
+ chunk.key, tuple(sorted(chunk.neg)), tuple(sorted(chunk.pos))
+ )
+
given_mapping.optimize()
- return self._assertEqualPayload(given_mapping.render_to_dict(), desired_mapping, f, chunked_data)
+ return self._assertEqualPayload(
+ given_mapping.render_to_dict(), desired_mapping, f, chunked_data
+ )
def assertEqualPayload(self, given_mapping, desired_mapping):
def f(chunk):
@@ -67,20 +71,26 @@ class profile_mixin:
for key, desired in desired_mapping.items():
got = given_mapping[key]
# sanity check the desired data, occasionally screw this up
- assert not isinstance(desired, bare_kls), f"key {key!r}, bad test invocation; " \
+ assert not isinstance(desired, bare_kls), (
+ f"key {key!r}, bad test invocation; "
f"bare {bare_kls.__name__} instead of a tuple; val {got!r}"
+ )
assert isinstance(got, tuple), f"key {key!r}, non tuple: {got!r}"
- assert not isinstance(got, bare_kls), f"key {key!r}, bare {bare_kls.__name__}, " \
- f"rather than tuple: {got!r}"
- assert all(isinstance(x, bare_kls) for x in got), \
- f"non {bare_kls.__name__} instance: key {key!r}, got {got!r}; types {list(map(type, got))}"
- got2, desired2 = tuple(map(reformat_f, got)), tuple(map(reformat_f, desired))
+ assert not isinstance(got, bare_kls), (
+ f"key {key!r}, bare {bare_kls.__name__}, " f"rather than tuple: {got!r}"
+ )
+ assert all(
+ isinstance(x, bare_kls) for x in got
+ ), f"non {bare_kls.__name__} instance: key {key!r}, got {got!r}; types {list(map(type, got))}"
+ got2, desired2 = tuple(map(reformat_f, got)), tuple(
+ map(reformat_f, desired)
+ )
assert got2 == desired2
-
empty = ((), ())
+
class TestPmsProfileNode(profile_mixin):
klass = staticmethod(ProfileNode)
@@ -109,12 +119,17 @@ class TestPmsProfileNode(profile_mixin):
path = tmp_path / self.profile
self.write_file(tmp_path, filename, data)
getattr(self.klass(path), attr)
- self.write_file(tmp_path, filename, "-")
+ self.write_file(tmp_path, filename, "-")
self.wipe_path(path / filename)
- def simple_eapi_awareness_check(self, tmp_path, filename, attr,
- bad_data="dev-util/diffball\ndev-util/bsdiff:1",
- good_data="dev-util/diffball\ndev-util/bsdiff"):
+ def simple_eapi_awareness_check(
+ self,
+ tmp_path,
+ filename,
+ attr,
+ bad_data="dev-util/diffball\ndev-util/bsdiff:1",
+ good_data="dev-util/diffball\ndev-util/bsdiff",
+ ):
# validate unset eapi=0 prior
self.parsing_checks(tmp_path, filename, attr, data=good_data)
self.write_file(tmp_path, "eapi", "1")
@@ -125,9 +140,9 @@ class TestPmsProfileNode(profile_mixin):
def test_eapi(self, tmp_path):
path = tmp_path / self.profile
- assert str(self.klass(path).eapi) == '0'
+ assert str(self.klass(path).eapi) == "0"
self.write_file(tmp_path, "eapi", "1")
- assert str(self.klass(path).eapi) == '1'
+ assert str(self.klass(path).eapi) == "1"
def test_packages(self, tmp_path):
path = tmp_path / self.profile
@@ -138,10 +153,17 @@ class TestPmsProfileNode(profile_mixin):
self.write_file(tmp_path, "packages", "#foo\ndev-util/diffball\n")
assert self.klass(path).system == empty
- self.write_file(tmp_path, "packages", "-dev-util/diffball\ndev-foo/bar\n*dev-sys/atom\n"
- "-*dev-sys/atom2\nlock-foo/dar")
- assert self.klass(path).system == ((atom("dev-sys/atom2"),), (atom("dev-sys/atom"),))
- self.simple_eapi_awareness_check(tmp_path, 'packages', 'system')
+ self.write_file(
+ tmp_path,
+ "packages",
+ "-dev-util/diffball\ndev-foo/bar\n*dev-sys/atom\n"
+ "-*dev-sys/atom2\nlock-foo/dar",
+ )
+ assert self.klass(path).system == (
+ (atom("dev-sys/atom2"),),
+ (atom("dev-sys/atom"),),
+ )
+ self.simple_eapi_awareness_check(tmp_path, "packages", "system")
def test_deprecated(self, tmp_path):
path = tmp_path / self.profile
@@ -156,9 +178,15 @@ class TestPmsProfileNode(profile_mixin):
assert self.klass(path).pkg_provided == ((), ())
self.parsing_checks(tmp_path, "package.provided", "pkg_provided")
self.write_file(tmp_path, "package.provided", "-dev-util/diffball-1.0")
- assert self.klass(path).pkg_provided == ((CPV.versioned("dev-util/diffball-1.0"),), ())
+ assert self.klass(path).pkg_provided == (
+ (CPV.versioned("dev-util/diffball-1.0"),),
+ (),
+ )
self.write_file(tmp_path, "package.provided", "dev-util/diffball-1.0")
- assert self.klass(path).pkg_provided == ((), (CPV.versioned("dev-util/diffball-1.0"),))
+ assert self.klass(path).pkg_provided == (
+ (),
+ (CPV.versioned("dev-util/diffball-1.0"),),
+ )
def test_masks(self, tmp_path):
path = tmp_path / self.profile
@@ -168,7 +196,7 @@ class TestPmsProfileNode(profile_mixin):
assert self.klass(path).masks == ((), (atom("dev-util/diffball"),))
self.write_file(tmp_path, "package.mask", "-dev-util/diffball")
assert self.klass(path).masks == ((atom("dev-util/diffball"),), ())
- self.simple_eapi_awareness_check(tmp_path, 'package.mask', 'masks')
+ self.simple_eapi_awareness_check(tmp_path, "package.mask", "masks")
def test_unmasks(self, tmp_path):
path = tmp_path / self.profile
@@ -178,7 +206,7 @@ class TestPmsProfileNode(profile_mixin):
assert self.klass(path).unmasks == ((), (atom("dev-util/diffball"),))
self.write_file(tmp_path, "package.unmask", "-dev-util/diffball")
assert self.klass(path).unmasks == ((atom("dev-util/diffball"),), ())
- self.simple_eapi_awareness_check(tmp_path, 'package.unmask', 'unmasks')
+ self.simple_eapi_awareness_check(tmp_path, "package.unmask", "unmasks")
def test_pkg_deprecated(self, tmp_path):
path = tmp_path / self.profile
@@ -188,44 +216,64 @@ class TestPmsProfileNode(profile_mixin):
assert self.klass(path).pkg_deprecated == ((), (atom("dev-util/diffball"),))
self.write_file(tmp_path, "package.deprecated", "-dev-util/diffball")
assert self.klass(path).pkg_deprecated == ((atom("dev-util/diffball"),), ())
- self.simple_eapi_awareness_check(tmp_path, 'package.deprecated', 'pkg_deprecated')
+ self.simple_eapi_awareness_check(
+ tmp_path, "package.deprecated", "pkg_deprecated"
+ )
def _check_package_use_files(self, tmp_path, caplog, path, filename, attr):
self.write_file(tmp_path, filename, "dev-util/bar X")
- self.assertEqualChunks(getattr(self.klass(path), attr),
- {"dev-util/bar":(chunked_data(atom("dev-util/bar"), (), ('X',)),)})
+ self.assertEqualChunks(
+ getattr(self.klass(path), attr),
+ {"dev-util/bar": (chunked_data(atom("dev-util/bar"), (), ("X",)),)},
+ )
caplog.clear()
self.write_file(tmp_path, filename, "-dev-util/bar X")
- getattr(self.klass(path), attr) # illegal atom, but only a log is thrown
+ getattr(self.klass(path), attr) # illegal atom, but only a log is thrown
assert "invalid package atom: '-dev-util/bar'" in caplog.text
# verify collapsing optimizations
self.write_file(tmp_path, filename, "dev-util/foo X\ndev-util/foo X")
- self.assertEqualChunks(getattr(self.klass(path), attr),
- {"dev-util/foo":(chunked_data(atom("dev-util/foo"), (), ('X',)),)})
+ self.assertEqualChunks(
+ getattr(self.klass(path), attr),
+ {"dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X",)),)},
+ )
self.write_file(tmp_path, filename, "d-u/a X\n=d-u/a-1 X")
- self.assertEqualChunks(getattr(self.klass(path), attr),
- {"d-u/a":(chunked_data(atom("d-u/a"), (), ('X',)),)})
+ self.assertEqualChunks(
+ getattr(self.klass(path), attr),
+ {"d-u/a": (chunked_data(atom("d-u/a"), (), ("X",)),)},
+ )
self.write_file(tmp_path, filename, "d-u/a X\n=d-u/a-1 -X")
- self.assertEqualChunks(getattr(self.klass(path), attr),
- {"d-u/a":(chunked_data(atom("d-u/a"), (), ('X',)),
- chunked_data(atom("=d-u/a-1"), ('X',), ()),)})
+ self.assertEqualChunks(
+ getattr(self.klass(path), attr),
+ {
+ "d-u/a": (
+ chunked_data(atom("d-u/a"), (), ("X",)),
+ chunked_data(atom("=d-u/a-1"), ("X",), ()),
+ )
+ },
+ )
self.write_file(tmp_path, filename, "=d-u/a-1 X\nd-u/a X")
- self.assertEqualChunks(getattr(self.klass(path), attr),
- {"d-u/a":(chunked_data(atom("d-u/a"), (), ('X',)),)})
+ self.assertEqualChunks(
+ getattr(self.klass(path), attr),
+ {"d-u/a": (chunked_data(atom("d-u/a"), (), ("X",)),)},
+ )
self.write_file(tmp_path, filename, "dev-util/bar -X\ndev-util/foo X")
- self.assertEqualChunks(getattr(self.klass(path), attr),
- {"dev-util/bar":(chunked_data(atom("dev-util/bar"), ('X',), ()),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), (), ('X',)),)})
+ self.assertEqualChunks(
+ getattr(self.klass(path), attr),
+ {
+ "dev-util/bar": (chunked_data(atom("dev-util/bar"), ("X",), ()),),
+ "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X",)),),
+ },
+ )
caplog.clear()
self.write_file(tmp_path, filename, "dev-util/diffball")
- getattr(self.klass(path), attr) # missing use flag, but only a log is thrown
+ getattr(self.klass(path), attr) # missing use flag, but only a log is thrown
assert "missing USE flag(s): 'dev-util/diffball'" in caplog.text
def test_pkg_keywords(self, tmp_path):
@@ -240,7 +288,9 @@ class TestPmsProfileNode(profile_mixin):
assert self.klass(path).keywords == ()
self.write_file(tmp_path, "package.keywords", ">=dev-util/foo-2 -amd64 ~amd64")
- assert self.klass(path).keywords == ((atom(">=dev-util/foo-2"), ("-amd64", "~amd64")),)
+ assert self.klass(path).keywords == (
+ (atom(">=dev-util/foo-2"), ("-amd64", "~amd64")),
+ )
def test_pkg_accept_keywords(self, tmp_path):
path = tmp_path / self.profile
@@ -249,7 +299,9 @@ class TestPmsProfileNode(profile_mixin):
self.write_file(tmp_path, "package.accept_keywords", "mmx")
self.write_file(tmp_path, "package.accept_keywords", "dev-util/foo ~amd64")
- assert self.klass(path).accept_keywords == ((atom("dev-util/foo"), ("~amd64",)),)
+ assert self.klass(path).accept_keywords == (
+ (atom("dev-util/foo"), ("~amd64",)),
+ )
self.write_file(tmp_path, "package.accept_keywords", "")
assert self.klass(path).accept_keywords == ()
@@ -267,37 +319,67 @@ class TestPmsProfileNode(profile_mixin):
self.parsing_checks(tmp_path, "use.mask", "masked_use")
self.write_file(tmp_path, "use.mask", "")
- self._check_package_use_files(tmp_path, caplog, path, "package.use.mask", 'masked_use')
+ self._check_package_use_files(
+ tmp_path, caplog, path, "package.use.mask", "masked_use"
+ )
self.write_file(tmp_path, "package.use.mask", "dev-util/bar -X\ndev-util/foo X")
self.write_file(tmp_path, "use.mask", "mmx")
- self.assertEqualChunks(self.klass(path).masked_use, {
- "dev-util/bar": (chunked_data(atom("dev-util/bar"), ('X',), ('mmx',)),),
- "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ('X', 'mmx')),),
- atrue:(chunked_data(packages.AlwaysTrue, (), ("mmx",)),),
- })
+ self.assertEqualChunks(
+ self.klass(path).masked_use,
+ {
+ "dev-util/bar": (chunked_data(atom("dev-util/bar"), ("X",), ("mmx",)),),
+ "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X", "mmx")),),
+ atrue: (chunked_data(packages.AlwaysTrue, (), ("mmx",)),),
+ },
+ )
self.write_file(tmp_path, "use.mask", "mmx\n-foon")
- self.assertEqualChunks(self.klass(path).masked_use, {
- "dev-util/bar": (chunked_data(atom("dev-util/bar"), ('X', 'foon'), ('mmx',)),),
- "dev-util/foo": (chunked_data(atom("dev-util/foo"), ('foon',), ('X', 'mmx',)),),
- atrue: (chunked_data(packages.AlwaysTrue, ('foon',), ('mmx',)),),
- })
+ self.assertEqualChunks(
+ self.klass(path).masked_use,
+ {
+ "dev-util/bar": (
+ chunked_data(atom("dev-util/bar"), ("X", "foon"), ("mmx",)),
+ ),
+ "dev-util/foo": (
+ chunked_data(
+ atom("dev-util/foo"),
+ ("foon",),
+ (
+ "X",
+ "mmx",
+ ),
+ ),
+ ),
+ atrue: (chunked_data(packages.AlwaysTrue, ("foon",), ("mmx",)),),
+ },
+ )
# verify that use.mask is layered first, then package.use.mask
self.write_file(tmp_path, "package.use.mask", "dev-util/bar -mmx foon")
- self.assertEqualChunks(self.klass(path).masked_use, {
- atrue: (chunked_data(atrue, ('foon',), ('mmx',)),),
- "dev-util/bar": (chunked_data(atom("dev-util/bar"), ('mmx',), ('foon',)),),
- })
+ self.assertEqualChunks(
+ self.klass(path).masked_use,
+ {
+ atrue: (chunked_data(atrue, ("foon",), ("mmx",)),),
+ "dev-util/bar": (
+ chunked_data(atom("dev-util/bar"), ("mmx",), ("foon",)),
+ ),
+ },
+ )
self.write_file(tmp_path, "package.use.mask", "")
- self.assertEqualChunks(self.klass(path).masked_use,
- {atrue:(chunked_data(atrue, ('foon',),('mmx',)),)})
- self.simple_eapi_awareness_check(tmp_path, 'package.use.mask', 'masked_use',
- bad_data='=de/bs-1:1 x\nda/bs y',
- good_data='=de/bs-1 x\nda/bs y')
+ self.assertEqualChunks(
+ self.klass(path).masked_use,
+ {atrue: (chunked_data(atrue, ("foon",), ("mmx",)),)},
+ )
+ self.simple_eapi_awareness_check(
+ tmp_path,
+ "package.use.mask",
+ "masked_use",
+ bad_data="=de/bs-1:1 x\nda/bs y",
+ good_data="=de/bs-1 x\nda/bs y",
+ )
def test_stable_masked_use(self, tmp_path, caplog):
path = tmp_path / self.profile
@@ -307,50 +389,78 @@ class TestPmsProfileNode(profile_mixin):
self.write_file(tmp_path, "use.stable.mask", "mmx")
self.write_file(tmp_path, "package.use.stable.mask", "dev-util/bar mmx")
self.assertEqualChunks(self.klass(path).stable_masked_use, {})
- self.wipe_path(path / 'use.stable.mask')
- self.wipe_path(path / 'package.use.stable.mask')
-
- self.simple_eapi_awareness_check(tmp_path, 'package.use.stable.mask', 'stable_masked_use',
- bad_data='=de/bs-1:1 x\nda/bs y',
- good_data='=de/bs-1 x\nda/bs y')
+ self.wipe_path(path / "use.stable.mask")
+ self.wipe_path(path / "package.use.stable.mask")
+
+ self.simple_eapi_awareness_check(
+ tmp_path,
+ "package.use.stable.mask",
+ "stable_masked_use",
+ bad_data="=de/bs-1:1 x\nda/bs y",
+ good_data="=de/bs-1 x\nda/bs y",
+ )
self.write_file(tmp_path, "eapi", "5")
self.assertEqualChunks(self.klass(path).stable_masked_use, {})
self.parsing_checks(tmp_path, "package.use.stable.mask", "stable_masked_use")
self.parsing_checks(tmp_path, "use.stable.mask", "stable_masked_use")
- self._check_package_use_files(tmp_path, caplog, path, "package.use.stable.mask", 'stable_masked_use')
+ self._check_package_use_files(
+ tmp_path, caplog, path, "package.use.stable.mask", "stable_masked_use"
+ )
- self.write_file(tmp_path, "package.use.stable.mask", "dev-util/bar -X\ndev-util/foo X")
+ self.write_file(
+ tmp_path, "package.use.stable.mask", "dev-util/bar -X\ndev-util/foo X"
+ )
self.write_file(tmp_path, "use.stable.mask", "mmx")
- self.assertEqualChunks(self.klass(path).stable_masked_use,
- {"dev-util/bar":
- (chunked_data(atom("dev-util/bar"), ('X',), ('mmx',)),),
- "dev-util/foo":
- (chunked_data(atom("dev-util/foo"), (), ('X', 'mmx')),),
- atrue:(chunked_data(packages.AlwaysTrue, (), ("mmx",)),)
- })
+ self.assertEqualChunks(
+ self.klass(path).stable_masked_use,
+ {
+ "dev-util/bar": (chunked_data(atom("dev-util/bar"), ("X",), ("mmx",)),),
+ "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X", "mmx")),),
+ atrue: (chunked_data(packages.AlwaysTrue, (), ("mmx",)),),
+ },
+ )
self.write_file(tmp_path, "use.stable.mask", "mmx\n-foon")
- self.assertEqualChunks(self.klass(path).stable_masked_use,
- {"dev-util/bar":
- (chunked_data(atom("dev-util/bar"), ('X', 'foon'), ('mmx',)),),
- "dev-util/foo":
- (chunked_data(atom("dev-util/foo"), ('foon',), ('X', 'mmx',)),),
- atrue:(chunked_data(packages.AlwaysTrue, ('foon',), ('mmx',)),)
- })
+ self.assertEqualChunks(
+ self.klass(path).stable_masked_use,
+ {
+ "dev-util/bar": (
+ chunked_data(atom("dev-util/bar"), ("X", "foon"), ("mmx",)),
+ ),
+ "dev-util/foo": (
+ chunked_data(
+ atom("dev-util/foo"),
+ ("foon",),
+ (
+ "X",
+ "mmx",
+ ),
+ ),
+ ),
+ atrue: (chunked_data(packages.AlwaysTrue, ("foon",), ("mmx",)),),
+ },
+ )
# verify that use.stable.mask is layered first, then package.use.stable.mask
self.write_file(tmp_path, "package.use.stable.mask", "dev-util/bar -mmx foon")
- self.assertEqualChunks(self.klass(path).stable_masked_use,
- {atrue:(chunked_data(atrue, ('foon',), ('mmx',)),),
- "dev-util/bar":(chunked_data(atom("dev-util/bar"), ('mmx',), ('foon',)),)
- })
+ self.assertEqualChunks(
+ self.klass(path).stable_masked_use,
+ {
+ atrue: (chunked_data(atrue, ("foon",), ("mmx",)),),
+ "dev-util/bar": (
+ chunked_data(atom("dev-util/bar"), ("mmx",), ("foon",)),
+ ),
+ },
+ )
self.write_file(tmp_path, "package.use.stable.mask", "")
- self.assertEqualChunks(self.klass(path).stable_masked_use,
- {atrue:(chunked_data(atrue, ('foon',),('mmx',)),)})
+ self.assertEqualChunks(
+ self.klass(path).stable_masked_use,
+ {atrue: (chunked_data(atrue, ("foon",), ("mmx",)),)},
+ )
# verify that settings stack in the following order:
# use.mask -> use.stable.mask -> package.use.mask -> package.use.stable.mask
@@ -358,23 +468,32 @@ class TestPmsProfileNode(profile_mixin):
self.write_file(tmp_path, "use.stable.mask", "-foon")
self.write_file(tmp_path, "package.use.mask", "dev-util/foo -mmx")
self.write_file(tmp_path, "package.use.stable.mask", "dev-util/bar foon")
- self.assertEqualChunks(self.klass(path).stable_masked_use,
- {"dev-util/foo":
- (chunked_data(atom("dev-util/foo"), ('foon', 'mmx'), ()),),
- "dev-util/bar":
- (chunked_data(atom("dev-util/bar"), (), ('foon', 'mmx')),),
- atrue:(chunked_data(atrue, ('foon',), ('mmx',)),)
- })
+ self.assertEqualChunks(
+ self.klass(path).stable_masked_use,
+ {
+ "dev-util/foo": (
+ chunked_data(atom("dev-util/foo"), ("foon", "mmx"), ()),
+ ),
+ "dev-util/bar": (
+ chunked_data(atom("dev-util/bar"), (), ("foon", "mmx")),
+ ),
+ atrue: (chunked_data(atrue, ("foon",), ("mmx",)),),
+ },
+ )
self.write_file(tmp_path, "use.mask", "-mmx")
self.write_file(tmp_path, "use.stable.mask", "foon")
self.write_file(tmp_path, "package.use.mask", "dev-util/foo mmx")
self.write_file(tmp_path, "package.use.stable.mask", "dev-util/foo -foon")
- self.assertEqualChunks(self.klass(path).stable_masked_use,
- {"dev-util/foo":
- (chunked_data(atom("dev-util/foo"), ('foon',), ('mmx',)),),
- atrue:(chunked_data(atrue, ('mmx',), ('foon',)),)
- })
+ self.assertEqualChunks(
+ self.klass(path).stable_masked_use,
+ {
+ "dev-util/foo": (
+ chunked_data(atom("dev-util/foo"), ("foon",), ("mmx",)),
+ ),
+ atrue: (chunked_data(atrue, ("mmx",), ("foon",)),),
+ },
+ )
def test_forced_use(self, tmp_path, caplog):
path = tmp_path / self.profile
@@ -383,39 +502,72 @@ class TestPmsProfileNode(profile_mixin):
self.parsing_checks(tmp_path, "use.force", "forced_use")
self.write_file(tmp_path, "use.force", "")
- self._check_package_use_files(tmp_path, caplog, path, "package.use.force", 'forced_use')
+ self._check_package_use_files(
+ tmp_path, caplog, path, "package.use.force", "forced_use"
+ )
- self.write_file(tmp_path, "package.use.force", "dev-util/bar -X\ndev-util/foo X")
+ self.write_file(
+ tmp_path, "package.use.force", "dev-util/bar -X\ndev-util/foo X"
+ )
self.write_file(tmp_path, "use.force", "mmx")
- self.assertEqualChunks(self.klass(path).forced_use, {
- "dev-util/bar": (chunked_data(atom("dev-util/bar"), ('X',), ('mmx',)),),
- "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ('X', 'mmx')),),
- atrue: (chunked_data(atrue, (), ('mmx',)),),
- })
+ self.assertEqualChunks(
+ self.klass(path).forced_use,
+ {
+ "dev-util/bar": (chunked_data(atom("dev-util/bar"), ("X",), ("mmx",)),),
+ "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X", "mmx")),),
+ atrue: (chunked_data(atrue, (), ("mmx",)),),
+ },
+ )
self.write_file(tmp_path, "use.force", "mmx\n-foon")
- self.assertEqualChunks(self.klass(path).forced_use, {
- "dev-util/bar": (chunked_data(atom("dev-util/bar"), ('X', 'foon',), ('mmx',)),),
- "dev-util/foo": (chunked_data(atom("dev-util/foo"), ('foon',), ('X', 'mmx')),),
- atrue: (chunked_data(atrue, ('foon',), ('mmx',)),),
- })
+ self.assertEqualChunks(
+ self.klass(path).forced_use,
+ {
+ "dev-util/bar": (
+ chunked_data(
+ atom("dev-util/bar"),
+ (
+ "X",
+ "foon",
+ ),
+ ("mmx",),
+ ),
+ ),
+ "dev-util/foo": (
+ chunked_data(atom("dev-util/foo"), ("foon",), ("X", "mmx")),
+ ),
+ atrue: (chunked_data(atrue, ("foon",), ("mmx",)),),
+ },
+ )
# verify that use.force is layered first, then package.use.force
self.write_file(tmp_path, "package.use.force", "dev-util/bar -mmx foon")
p = self.klass(path)
- self.assertEqualChunks(self.klass(path).forced_use, {
- atrue: (chunked_data(atrue, ('foon',), ('mmx',)),),
- "dev-util/bar": (chunked_data(atom("dev-util/bar"), ('mmx',), ('foon',)),),
- })
+ self.assertEqualChunks(
+ self.klass(path).forced_use,
+ {
+ atrue: (chunked_data(atrue, ("foon",), ("mmx",)),),
+ "dev-util/bar": (
+ chunked_data(atom("dev-util/bar"), ("mmx",), ("foon",)),
+ ),
+ },
+ )
self.write_file(tmp_path, "package.use.force", "")
- self.assertEqualChunks(self.klass(path).forced_use, {
- atrue: (chunked_data(atrue, ('foon',), ('mmx',)),),
- })
- self.simple_eapi_awareness_check(tmp_path, 'package.use.force', 'forced_use',
- bad_data='=de/bs-1:1 x\nda/bs y',
- good_data='=de/bs-1 x\nda/bs y')
+ self.assertEqualChunks(
+ self.klass(path).forced_use,
+ {
+ atrue: (chunked_data(atrue, ("foon",), ("mmx",)),),
+ },
+ )
+ self.simple_eapi_awareness_check(
+ tmp_path,
+ "package.use.force",
+ "forced_use",
+ bad_data="=de/bs-1:1 x\nda/bs y",
+ good_data="=de/bs-1 x\nda/bs y",
+ )
def test_stable_forced_use(self, tmp_path, caplog):
path = tmp_path / self.profile
@@ -425,48 +577,81 @@ class TestPmsProfileNode(profile_mixin):
self.write_file(tmp_path, "use.stable.force", "mmx")
self.write_file(tmp_path, "package.use.stable.force", "dev-util/bar mmx")
self.assertEqualChunks(self.klass(path).stable_forced_use, {})
- self.wipe_path(path / 'use.stable.force')
- self.wipe_path(path / 'package.use.stable.force')
-
- self.simple_eapi_awareness_check(tmp_path, 'package.use.stable.force', 'stable_forced_use',
- bad_data='=de/bs-1:1 x\nda/bs y',
- good_data='=de/bs-1 x\nda/bs y')
+ self.wipe_path(path / "use.stable.force")
+ self.wipe_path(path / "package.use.stable.force")
+
+ self.simple_eapi_awareness_check(
+ tmp_path,
+ "package.use.stable.force",
+ "stable_forced_use",
+ bad_data="=de/bs-1:1 x\nda/bs y",
+ good_data="=de/bs-1 x\nda/bs y",
+ )
self.write_file(tmp_path, "eapi", "5")
self.assertEqualChunks(self.klass(path).stable_forced_use, {})
self.parsing_checks(tmp_path, "package.use.stable.force", "stable_forced_use")
self.parsing_checks(tmp_path, "use.stable.force", "stable_forced_use")
- self._check_package_use_files(tmp_path, caplog, path, "package.use.stable.force", 'stable_forced_use')
+ self._check_package_use_files(
+ tmp_path, caplog, path, "package.use.stable.force", "stable_forced_use"
+ )
- self.write_file(tmp_path, "package.use.stable.force", "dev-util/bar -X\ndev-util/foo X")
+ self.write_file(
+ tmp_path, "package.use.stable.force", "dev-util/bar -X\ndev-util/foo X"
+ )
self.write_file(tmp_path, "use.stable.force", "mmx")
- self.assertEqualChunks(self.klass(path).stable_forced_use, {
- "dev-util/bar": (chunked_data(atom("dev-util/bar"), ('X',), ('mmx',)),),
- "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ('X', 'mmx')),),
- atrue: (chunked_data(atrue, (), ('mmx',)),),
- })
+ self.assertEqualChunks(
+ self.klass(path).stable_forced_use,
+ {
+ "dev-util/bar": (chunked_data(atom("dev-util/bar"), ("X",), ("mmx",)),),
+ "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X", "mmx")),),
+ atrue: (chunked_data(atrue, (), ("mmx",)),),
+ },
+ )
self.write_file(tmp_path, "use.stable.force", "mmx\n-foon")
- self.assertEqualChunks(self.klass(path).stable_forced_use, {
- "dev-util/bar": (chunked_data(atom("dev-util/bar"), ('X', 'foon',), ('mmx',)),),
- "dev-util/foo": (chunked_data(atom("dev-util/foo"), ('foon',), ('X', 'mmx')),),
- atrue: (chunked_data(atrue, ('foon',), ('mmx',)),),
- })
+ self.assertEqualChunks(
+ self.klass(path).stable_forced_use,
+ {
+ "dev-util/bar": (
+ chunked_data(
+ atom("dev-util/bar"),
+ (
+ "X",
+ "foon",
+ ),
+ ("mmx",),
+ ),
+ ),
+ "dev-util/foo": (
+ chunked_data(atom("dev-util/foo"), ("foon",), ("X", "mmx")),
+ ),
+ atrue: (chunked_data(atrue, ("foon",), ("mmx",)),),
+ },
+ )
# verify that use.stable.force is layered first, then package.use.stable.force
self.write_file(tmp_path, "package.use.stable.force", "dev-util/bar -mmx foon")
p = self.klass(path)
- self.assertEqualChunks(self.klass(path).stable_forced_use, {
- atrue: (chunked_data(atrue, ('foon',), ('mmx',)),),
- "dev-util/bar": (chunked_data(atom("dev-util/bar"), ('mmx',), ('foon',)),),
- })
+ self.assertEqualChunks(
+ self.klass(path).stable_forced_use,
+ {
+ atrue: (chunked_data(atrue, ("foon",), ("mmx",)),),
+ "dev-util/bar": (
+ chunked_data(atom("dev-util/bar"), ("mmx",), ("foon",)),
+ ),
+ },
+ )
self.write_file(tmp_path, "package.use.stable.force", "")
- self.assertEqualChunks(self.klass(path).stable_forced_use, {
- atrue: (chunked_data(atrue, ('foon',), ('mmx',)),),
- })
+ self.assertEqualChunks(
+ self.klass(path).stable_forced_use,
+ {
+ atrue: (chunked_data(atrue, ("foon",), ("mmx",)),),
+ },
+ )
# verify that settings stack in the following order:
# use.force -> use.stable.force -> package.use.force -> package.use.stable.force
@@ -474,39 +659,59 @@ class TestPmsProfileNode(profile_mixin):
self.write_file(tmp_path, "use.stable.force", "-foon")
self.write_file(tmp_path, "package.use.force", "dev-util/foo -mmx")
self.write_file(tmp_path, "package.use.stable.force", "dev-util/bar foon")
- self.assertEqualChunks(self.klass(path).stable_forced_use, {
- "dev-util/foo": (chunked_data(atom("dev-util/foo"), ('foon', 'mmx'), ()),),
- "dev-util/bar": (chunked_data(atom("dev-util/bar"), (), ('foon', 'mmx')),),
- atrue: (chunked_data(atrue, ('foon',), ('mmx',)),)
- })
+ self.assertEqualChunks(
+ self.klass(path).stable_forced_use,
+ {
+ "dev-util/foo": (
+ chunked_data(atom("dev-util/foo"), ("foon", "mmx"), ()),
+ ),
+ "dev-util/bar": (
+ chunked_data(atom("dev-util/bar"), (), ("foon", "mmx")),
+ ),
+ atrue: (chunked_data(atrue, ("foon",), ("mmx",)),),
+ },
+ )
self.write_file(tmp_path, "use.force", "-mmx")
self.write_file(tmp_path, "use.stable.force", "foon")
self.write_file(tmp_path, "package.use.force", "dev-util/foo mmx")
self.write_file(tmp_path, "package.use.stable.force", "dev-util/foo -foon")
- self.assertEqualChunks(self.klass(path).stable_forced_use, {
- "dev-util/foo": (chunked_data(atom("dev-util/foo"), ('foon',), ('mmx',)),),
- atrue: (chunked_data(atrue, ('mmx',), ('foon',)),),
- })
+ self.assertEqualChunks(
+ self.klass(path).stable_forced_use,
+ {
+ "dev-util/foo": (
+ chunked_data(atom("dev-util/foo"), ("foon",), ("mmx",)),
+ ),
+ atrue: (chunked_data(atrue, ("mmx",), ("foon",)),),
+ },
+ )
def test_pkg_use(self, tmp_path, caplog):
path = tmp_path / self.profile
self.assertEqualChunks(self.klass(path).pkg_use, {})
self.parsing_checks(tmp_path, "package.use", "pkg_use")
- self._check_package_use_files(tmp_path, caplog, path, "package.use", 'pkg_use')
+ self._check_package_use_files(tmp_path, caplog, path, "package.use", "pkg_use")
self.write_file(tmp_path, "package.use", "dev-util/bar -X\ndev-util/foo X")
- self.assertEqualChunks(self.klass(path).pkg_use, {
- "dev-util/bar": (chunked_data(atom("dev-util/bar"), ('X',), ()),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), (), ('X',)),)})
- self.simple_eapi_awareness_check(tmp_path, 'package.use', 'pkg_use',
- bad_data='=de/bs-1:1 x\nda/bs y',
- good_data='=de/bs-1 x\nda/bs y')
+ self.assertEqualChunks(
+ self.klass(path).pkg_use,
+ {
+ "dev-util/bar": (chunked_data(atom("dev-util/bar"), ("X",), ()),),
+ "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X",)),),
+ },
+ )
+ self.simple_eapi_awareness_check(
+ tmp_path,
+ "package.use",
+ "pkg_use",
+ bad_data="=de/bs-1:1 x\nda/bs y",
+ good_data="=de/bs-1 x\nda/bs y",
+ )
def test_parents(self, tmp_path):
path = tmp_path / self.profile
- (path / 'child').mkdir()
+ (path / "child").mkdir()
self.write_file(tmp_path, "parent", "..", profile=f"{self.profile}/child")
p = self.klass(path / "child")
assert len(p.parents) == 1
@@ -516,24 +721,24 @@ class TestPmsProfileNode(profile_mixin):
path = tmp_path / self.profile
assert self.klass(path).default_env == {}
self.write_file(tmp_path, "make.defaults", "X=foo\n")
- assert self.klass(path).default_env == {'X':'foo'}
- self.write_file(tmp_path, 'make.defaults', 'y=narf\nx=${y}\n')
- assert self.klass(path).default_env == {'y':'narf', 'x':'narf'}
+ assert self.klass(path).default_env == {"X": "foo"}
+ self.write_file(tmp_path, "make.defaults", "y=narf\nx=${y}\n")
+ assert self.klass(path).default_env == {"y": "narf", "x": "narf"}
# ensure make.defaults can access the proceeding env.
- (child := tmp_path / self.profile / 'child').mkdir()
- self.write_file(tmp_path, 'make.defaults', 'x="${x} twice"', profile=child)
- self.write_file(tmp_path, 'parent', '..', profile=child)
- assert self.klass(child).default_env == {'y':'narf', 'x':'narf twice'}
+ (child := tmp_path / self.profile / "child").mkdir()
+ self.write_file(tmp_path, "make.defaults", 'x="${x} twice"', profile=child)
+ self.write_file(tmp_path, "parent", "..", profile=child)
+ assert self.klass(child).default_env == {"y": "narf", "x": "narf twice"}
def test_default_env_incrementals(self, tmp_path):
assert "USE" in const.incrementals
profile1 = tmp_path / self.profile
(profile2 := profile1 / "sub").mkdir()
(profile3 := profile2 / "sub").mkdir()
- self.write_file(tmp_path, "make.defaults", 'USE=foo', profile=profile1)
- self.write_file(tmp_path, "make.defaults", 'x=dar', profile=profile2)
+ self.write_file(tmp_path, "make.defaults", "USE=foo", profile=profile1)
+ self.write_file(tmp_path, "make.defaults", "x=dar", profile=profile2)
self.write_file(tmp_path, "parent", "..", profile=profile2)
- self.write_file(tmp_path, "make.defaults", 'USE=-foo', profile=profile3)
+ self.write_file(tmp_path, "make.defaults", "USE=-foo", profile=profile3)
self.write_file(tmp_path, "parent", "..", profile=profile3)
assert self.klass(profile1).default_env == dict(USE="foo")
assert self.klass(profile2).default_env == dict(USE="foo", x="dar")
@@ -542,7 +747,7 @@ class TestPmsProfileNode(profile_mixin):
def test_bashrc(self, tmp_path):
path = tmp_path / self.profile
assert self.klass(path).bashrc is None
- self.write_file(tmp_path, "profile.bashrc", '')
+ self.write_file(tmp_path, "profile.bashrc", "")
assert self.klass(path).bashrc is not None
def test_pkg_bashrc(self, tmp_path, caplog):
@@ -550,22 +755,40 @@ class TestPmsProfileNode(profile_mixin):
assert not self.klass(path).pkg_bashrc
self.write_file(tmp_path, "package.bashrc", "@dsfg", profile=self.profile)
assert not self.klass(path).pkg_bashrc
- self.write_file(tmp_path, "package.bashrc", "dev-util/foo", profile=self.profile)
+ self.write_file(
+ tmp_path, "package.bashrc", "dev-util/foo", profile=self.profile
+ )
assert not self.klass(path).pkg_bashrc
- self.write_file(tmp_path, "package.bashrc", "dev-util/foo file1 file2\ndev-util/bar file3", profile=self.profile)
+ self.write_file(
+ tmp_path,
+ "package.bashrc",
+ "dev-util/foo file1 file2\ndev-util/bar file3",
+ profile=self.profile,
+ )
assert not self.klass(path).pkg_bashrc
assert not caplog.text
class TestPortage1ProfileNode(TestPmsProfileNode):
- can_be_dirs = frozenset([
- "package.accept_keywords", "package.keywords",
- "package.mask", "package.provided", "package.unmask",
- "package.use", "package.use.force", "package.use.mask",
- "package.use.stable.force", "package.use.stable.mask",
- "use.force", "use.mask", "use.stable.mask", "use.stable.force"
- ])
+ can_be_dirs = frozenset(
+ [
+ "package.accept_keywords",
+ "package.keywords",
+ "package.mask",
+ "package.provided",
+ "package.unmask",
+ "package.use",
+ "package.use.force",
+ "package.use.mask",
+ "package.use.stable.force",
+ "package.use.stable.mask",
+ "use.force",
+ "use.mask",
+ "use.stable.mask",
+ "use.stable.force",
+ ]
+ )
klass = partial(TestPmsProfileNode.klass, pms_strict=False)
@@ -586,11 +809,11 @@ class TestPortage1ProfileNode(TestPmsProfileNode):
path = tmp_path / self.profile
self.write_file(tmp_path, "package.keywords", "dev-util/foo amd64")
- (path / "package.keywords" / ".test").write_text('dev-util/foo x86')
+ (path / "package.keywords" / ".test").write_text("dev-util/foo x86")
assert self.klass(path).keywords == ((atom("dev-util/foo"), ("amd64",)),)
self.write_file(tmp_path, "package.keywords", "")
- (path / "package.keywords" / ".test").write_text('dev-util/foo x86')
+ (path / "package.keywords" / ".test").write_text("dev-util/foo x86")
assert not self.klass(path).keywords
@@ -599,9 +822,13 @@ class TestPortage2ProfileNode(TestPortage1ProfileNode):
profile = os.path.join("profiles", "default")
def setup_repo(self, tmp_path):
- (tmp_path / "profiles" / "repo_name").write_bytes(binascii.b2a_hex(os.urandom(10)))
+ (tmp_path / "profiles" / "repo_name").write_bytes(
+ binascii.b2a_hex(os.urandom(10))
+ )
(tmp_path / "metadata").mkdir()
- (tmp_path / "metadata" / "layout.conf").write_text("masters = ''\nprofile-formats = portage-2")
+ (tmp_path / "metadata" / "layout.conf").write_text(
+ "masters = ''\nprofile-formats = portage-2"
+ )
class TestProfileBashrcProfileNode(TestPmsProfileNode):
@@ -609,15 +836,16 @@ class TestProfileBashrcProfileNode(TestPmsProfileNode):
profile = os.path.join("profiles", "default")
def assert_pkg_bashrc(self, actual, expected):
- assert expected == {
- str(k): [s.path for s in v]
- for k, v in actual
- }
+ assert expected == {str(k): [s.path for s in v] for k, v in actual}
def setup_repo(self, tmp_path):
- (tmp_path / "profiles" / "repo_name").write_bytes(binascii.b2a_hex(os.urandom(10)))
+ (tmp_path / "profiles" / "repo_name").write_bytes(
+ binascii.b2a_hex(os.urandom(10))
+ )
(tmp_path / "metadata").mkdir()
- (tmp_path / "metadata" / "layout.conf").write_text("masters = ''\nprofile-formats = profile-bashrcs")
+ (tmp_path / "metadata" / "layout.conf").write_text(
+ "masters = ''\nprofile-formats = profile-bashrcs"
+ )
def test_pkg_bashrc(self, tmp_path, caplog):
path = tmp_path / self.profile
@@ -631,21 +859,38 @@ class TestProfileBashrcProfileNode(TestPmsProfileNode):
assert "line 1: parsing error: invalid package atom: '@dsfg'" in caplog.text
caplog.clear()
- self.write_file(tmp_path, "package.bashrc", "dev-util/foo", profile=self.profile)
+ self.write_file(
+ tmp_path, "package.bashrc", "dev-util/foo", profile=self.profile
+ )
assert not self.klass(path).pkg_bashrc
assert "line 1: missing bashrc files: 'dev-util/foo'" in caplog.text
caplog.clear()
- self.write_file(tmp_path, "package.bashrc", "dev-util/foo file1", profile=self.profile)
- self.assert_pkg_bashrc(self.klass(path).pkg_bashrc, {"dev-util/foo": [str(path / "bashrc/file1")]})
+ self.write_file(
+ tmp_path, "package.bashrc", "dev-util/foo file1", profile=self.profile
+ )
+ self.assert_pkg_bashrc(
+ self.klass(path).pkg_bashrc, {"dev-util/foo": [str(path / "bashrc/file1")]}
+ )
assert not caplog.text
caplog.clear()
- self.write_file(tmp_path, "package.bashrc", "dev-util/foo file1 file2\ndev-util/bar file3", profile=self.profile)
- self.assert_pkg_bashrc(self.klass(path).pkg_bashrc, {
- "dev-util/foo": [str(path / "bashrc/file1"), str(path / "bashrc/file2")],
- "dev-util/bar": [str(path / "bashrc/file3")],
- })
+ self.write_file(
+ tmp_path,
+ "package.bashrc",
+ "dev-util/foo file1 file2\ndev-util/bar file3",
+ profile=self.profile,
+ )
+ self.assert_pkg_bashrc(
+ self.klass(path).pkg_bashrc,
+ {
+ "dev-util/foo": [
+ str(path / "bashrc/file1"),
+ str(path / "bashrc/file2"),
+ ],
+ "dev-util/bar": [str(path / "bashrc/file3")],
+ },
+ )
assert not caplog.text
@@ -654,9 +899,13 @@ class TestProfileSetProfileNode(TestPmsProfileNode):
profile = os.path.join("profiles", "default")
def setup_repo(self, tmp_path):
- (tmp_path / "profiles" / "repo_name").write_bytes(binascii.b2a_hex(os.urandom(10)))
+ (tmp_path / "profiles" / "repo_name").write_bytes(
+ binascii.b2a_hex(os.urandom(10))
+ )
(tmp_path / "metadata").mkdir()
- (tmp_path / "metadata" / "layout.conf").write_text("masters = ''\nprofile-formats = profile-set")
+ (tmp_path / "metadata" / "layout.conf").write_text(
+ "masters = ''\nprofile-formats = profile-set"
+ )
def test_packages(self, tmp_path):
self.write_file(tmp_path, "packages", "dev-sys/atom\n-dev-sys/atom2\n")
@@ -676,10 +925,7 @@ class TestOnDiskProfile(profile_mixin):
return self.kls(str(basepath or tmp_path), profile, config, **kwds)
def test_stacking(self, tmp_path):
- self.mk_profiles(tmp_path,
- {},
- {}
- )
+ self.mk_profiles(tmp_path, {}, {})
base = self.get_profile(tmp_path, "0")
assert [x.path for x in base.stack] == [str(tmp_path), str(tmp_path / "0")]
assert len(base.system) == 0
@@ -690,13 +936,14 @@ class TestOnDiskProfile(profile_mixin):
assert len(base.bashrc) == 0
def test_packages(self, tmp_path):
- self.mk_profiles(tmp_path,
- {"packages":"*dev-util/diffball\ndev-util/foo\ndev-util/foo2\n"},
- {"packages":"*dev-util/foo\n-*dev-util/diffball\n-dev-util/foo2\n"},
- {"packages":"*dev-util/foo\n", "parent":"0"},
- {"packages":"-*\n*dev-util/foo\n", "parent":"0"},
- {"packages":"*dev-util/foo\n-*\n", "parent":"0"},
- {"packages":"-*\n", "parent":"0"},
+ self.mk_profiles(
+ tmp_path,
+ {"packages": "*dev-util/diffball\ndev-util/foo\ndev-util/foo2\n"},
+ {"packages": "*dev-util/foo\n-*dev-util/diffball\n-dev-util/foo2\n"},
+ {"packages": "*dev-util/foo\n", "parent": "0"},
+ {"packages": "-*\n*dev-util/foo\n", "parent": "0"},
+ {"packages": "*dev-util/foo\n-*\n", "parent": "0"},
+ {"packages": "-*\n", "parent": "0"},
)
p = self.get_profile(tmp_path, "0")
assert sorted(p.system) == sorted([atom("dev-util/diffball")])
@@ -707,7 +954,9 @@ class TestOnDiskProfile(profile_mixin):
assert not sorted(p.masks)
p = self.get_profile(tmp_path, "2")
- assert sorted(p.system) == sorted([atom("dev-util/diffball"), atom("dev-util/foo")])
+ assert sorted(p.system) == sorted(
+ [atom("dev-util/diffball"), atom("dev-util/foo")]
+ )
p = self.get_profile(tmp_path, "3")
assert sorted(p.system) == sorted([atom("dev-util/foo")])
@@ -719,42 +968,57 @@ class TestOnDiskProfile(profile_mixin):
assert p.system == frozenset()
def test_masks(self, tmp_path):
- self.mk_profiles(tmp_path,
- {"package.mask":"dev-util/foo"},
+ self.mk_profiles(
+ tmp_path,
+ {"package.mask": "dev-util/foo"},
{},
- {"package.mask":"-dev-util/confcache\ndev-util/foo"},
- **{"package.mask":"dev-util/confcache"}
+ {"package.mask": "-dev-util/confcache\ndev-util/foo"},
+ **{"package.mask": "dev-util/confcache"},
+ )
+ assert sorted(self.get_profile(tmp_path, "0").masks) == sorted(
+ atom("dev-util/" + x) for x in ["confcache", "foo"]
+ )
+ assert sorted(self.get_profile(tmp_path, "1").masks) == sorted(
+ atom("dev-util/" + x) for x in ["confcache", "foo"]
)
- assert sorted(self.get_profile(tmp_path, "0").masks) == sorted(atom("dev-util/" + x) for x in ["confcache", "foo"])
- assert sorted(self.get_profile(tmp_path, "1").masks) == sorted(atom("dev-util/" + x) for x in ["confcache", "foo"])
assert sorted(self.get_profile(tmp_path, "2").masks) == [atom("dev-util/foo")]
def test_unmasks(self, tmp_path):
- self.mk_profiles(tmp_path,
- {"package.unmask":"dev-util/foo"},
+ self.mk_profiles(
+ tmp_path,
+ {"package.unmask": "dev-util/foo"},
{},
- {"package.unmask":"dev-util/confcache"}
+ {"package.unmask": "dev-util/confcache"},
+ )
+ assert self.get_profile(tmp_path, "0").unmasks == frozenset(
+ [atom("dev-util/foo")]
+ )
+ assert self.get_profile(tmp_path, "1").unmasks == frozenset(
+ [atom("dev-util/foo")]
+ )
+ assert self.get_profile(tmp_path, "2").unmasks == frozenset(
+ [atom("dev-util/" + x) for x in ("confcache", "foo")]
)
- assert self.get_profile(tmp_path, "0").unmasks == frozenset([atom("dev-util/foo")])
- assert self.get_profile(tmp_path, "1").unmasks == frozenset([atom("dev-util/foo")])
- assert self.get_profile(tmp_path, "2").unmasks == frozenset([atom("dev-util/" + x) for x in ("confcache", "foo")])
def test_pkg_deprecated(self, tmp_path):
- self.mk_profiles(tmp_path,
- {"package.deprecated":"dev-util/foo"},
+ self.mk_profiles(
+ tmp_path,
+ {"package.deprecated": "dev-util/foo"},
{},
- {"package.deprecated":"dev-util/confcache"}
+ {"package.deprecated": "dev-util/confcache"},
+ )
+ assert self.get_profile(tmp_path, "0").pkg_deprecated == frozenset(
+ [atom("dev-util/foo")]
+ )
+ assert self.get_profile(tmp_path, "1").pkg_deprecated == frozenset(
+ [atom("dev-util/foo")]
+ )
+ assert self.get_profile(tmp_path, "2").pkg_deprecated == frozenset(
+ [atom("dev-util/" + x) for x in ("confcache", "foo")]
)
- assert self.get_profile(tmp_path, "0").pkg_deprecated == frozenset([atom("dev-util/foo")])
- assert self.get_profile(tmp_path, "1").pkg_deprecated == frozenset([atom("dev-util/foo")])
- assert self.get_profile(tmp_path, "2").pkg_deprecated == frozenset([atom("dev-util/" + x) for x in ("confcache", "foo")])
def test_bashrc(self, tmp_path):
- self.mk_profiles(tmp_path,
- {"profile.bashrc":""},
- {},
- {"profile.bashrc":""}
- )
+ self.mk_profiles(tmp_path, {"profile.bashrc": ""}, {}, {"profile.bashrc": ""})
assert len(self.get_profile(tmp_path, "0").bashrc) == 1
assert len(self.get_profile(tmp_path, "1").bashrc) == 1
assert len(self.get_profile(tmp_path, "2").bashrc) == 2
@@ -763,384 +1027,698 @@ class TestOnDiskProfile(profile_mixin):
self.mk_profiles(tmp_path, {})
assert not self.get_profile(tmp_path, "0").keywords
- self.mk_profiles(tmp_path,
+ self.mk_profiles(
+ tmp_path,
{"package.keywords": "dev-util/foo amd64"},
{},
- {"package.keywords": ">=dev-util/foo-2 -amd64 ~amd64"}
+ {"package.keywords": ">=dev-util/foo-2 -amd64 ~amd64"},
+ )
+ assert self.get_profile(tmp_path, "0").keywords == (
+ (atom("dev-util/foo"), ("amd64",)),
+ )
+ assert self.get_profile(tmp_path, "1").keywords == (
+ (atom("dev-util/foo"), ("amd64",)),
+ )
+ assert self.get_profile(tmp_path, "2").keywords == (
+ (atom("dev-util/foo"), ("amd64",)),
+ (atom(">=dev-util/foo-2"), ("-amd64", "~amd64")),
)
- assert self.get_profile(tmp_path, "0").keywords == ((atom("dev-util/foo"), ("amd64",)),)
- assert self.get_profile(tmp_path, "1").keywords == ((atom("dev-util/foo"), ("amd64",)),)
- assert self.get_profile(tmp_path, "2").keywords == ((atom("dev-util/foo"), ("amd64",)),
- (atom(">=dev-util/foo-2"), ("-amd64", "~amd64")))
def test_pkg_accept_keywords(self, tmp_path):
self.mk_profiles(tmp_path, {})
assert not self.get_profile(tmp_path, "0").accept_keywords
- self.mk_profiles(tmp_path,
+ self.mk_profiles(
+ tmp_path,
{"package.accept_keywords": "dev-util/foo ~amd64"},
{},
{"package.accept_keywords": "dev-util/bar **"},
- {"package.accept_keywords": "dev-util/baz"}
+ {"package.accept_keywords": "dev-util/baz"},
)
- assert self.get_profile(tmp_path, "0").accept_keywords == ((atom("dev-util/foo"), ("~amd64",)),)
- assert self.get_profile(tmp_path, "1").accept_keywords == ((atom("dev-util/foo"), ("~amd64",)),)
- assert self.get_profile(tmp_path, "2").accept_keywords == ((atom("dev-util/foo"), ("~amd64",)),
- (atom("dev-util/bar"), ("**",)))
- assert self.get_profile(tmp_path, "3").accept_keywords == ((atom("dev-util/foo"), ("~amd64",)),
+ assert self.get_profile(tmp_path, "0").accept_keywords == (
+ (atom("dev-util/foo"), ("~amd64",)),
+ )
+ assert self.get_profile(tmp_path, "1").accept_keywords == (
+ (atom("dev-util/foo"), ("~amd64",)),
+ )
+ assert self.get_profile(tmp_path, "2").accept_keywords == (
+ (atom("dev-util/foo"), ("~amd64",)),
(atom("dev-util/bar"), ("**",)),
- (atom("dev-util/baz"), ()))
+ )
+ assert self.get_profile(tmp_path, "3").accept_keywords == (
+ (atom("dev-util/foo"), ("~amd64",)),
+ (atom("dev-util/bar"), ("**",)),
+ (atom("dev-util/baz"), ()),
+ )
def test_masked_use(self, tmp_path):
self.mk_profiles(tmp_path, {})
self.assertEqualPayload(self.get_profile(tmp_path, "0").masked_use, {})
- self.mk_profiles(tmp_path,
- {"use.mask":"X\nmmx\n"},
- {},
- {"use.mask":"-X"})
+ self.mk_profiles(tmp_path, {"use.mask": "X\nmmx\n"}, {}, {"use.mask": "-X"})
- self.assertEqualPayload(self.get_profile(tmp_path, "0").masked_use,
- {atrue:(chunked_data(atrue, (), ('X', 'mmx')),)})
-
- self.assertEqualPayload(self.get_profile(tmp_path, "1").masked_use,
- {atrue:(chunked_data(atrue, (), ('X', 'mmx',)),)})
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "0").masked_use,
+ {atrue: (chunked_data(atrue, (), ("X", "mmx")),)},
+ )
- self.assertEqualPayload(self.get_profile(tmp_path, "2").masked_use,
- {atrue:(chunked_data(atrue, ('X',), ('mmx',)),)})
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "1").masked_use,
+ {
+ atrue: (
+ chunked_data(
+ atrue,
+ (),
+ (
+ "X",
+ "mmx",
+ ),
+ ),
+ )
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "2").masked_use,
+ {atrue: (chunked_data(atrue, ("X",), ("mmx",)),)},
+ )
- self.mk_profiles(tmp_path,
- {"use.mask":"X\nmmx\n", "package.use.mask":"dev-util/foo cups"},
+ self.mk_profiles(
+ tmp_path,
+ {"use.mask": "X\nmmx\n", "package.use.mask": "dev-util/foo cups"},
{"package.use.mask": "dev-util/foo -cups"},
- {"use.mask":"-X", "package.use.mask": "dev-util/blah X"})
-
- self.assertEqualPayload(self.get_profile(tmp_path, "0").masked_use,
- {atrue:(chunked_data(atrue, (), ('X', 'mmx')),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), (), ("X", "cups", "mmx")),),
- })
-
- self.assertEqualPayload(self.get_profile(tmp_path, "1").masked_use,
- {atrue:(chunked_data(atrue, (), ('X', 'mmx')),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), ('cups',), ("X", "mmx")),),
- })
-
- self.assertEqualPayload(self.get_profile(tmp_path, "2").masked_use,
- {atrue:(chunked_data(atrue, ('X',), ('mmx',)),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), ('X', 'cups'), ("mmx",)),),
- "dev-util/blah":(chunked_data(atom("dev-util/blah"), (), ("X", "mmx",)),)
- })
-
- self.mk_profiles(tmp_path,
- {"use.mask":"X", "package.use.mask":"dev-util/foo -X"},
- {"use.mask":"X"},
- {"package.use.mask":"dev-util/foo -X"})
-
- self.assertEqualPayload(self.get_profile(tmp_path, "0").masked_use,
- {atrue:(chunked_data(atrue, (), ("X",)),),
- "dev-util/foo": (chunked_data(atom("dev-util/foo"), ('X',), ()),)
- })
- self.assertEqualPayload(self.get_profile(tmp_path, "1").masked_use,
- {atrue:(chunked_data(atrue, (), ("X",)),),
- "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X",)),)
- })
- self.assertEqualPayload(self.get_profile(tmp_path, "2").masked_use,
- {atrue:(chunked_data(atrue, (), ("X")),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), ("X",), (),),)
- })
+ {"use.mask": "-X", "package.use.mask": "dev-util/blah X"},
+ )
+
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "0").masked_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X", "mmx")),),
+ "dev-util/foo": (
+ chunked_data(atom("dev-util/foo"), (), ("X", "cups", "mmx")),
+ ),
+ },
+ )
+
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "1").masked_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X", "mmx")),),
+ "dev-util/foo": (
+ chunked_data(atom("dev-util/foo"), ("cups",), ("X", "mmx")),
+ ),
+ },
+ )
+
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "2").masked_use,
+ {
+ atrue: (chunked_data(atrue, ("X",), ("mmx",)),),
+ "dev-util/foo": (
+ chunked_data(atom("dev-util/foo"), ("X", "cups"), ("mmx",)),
+ ),
+ "dev-util/blah": (
+ chunked_data(
+ atom("dev-util/blah"),
+ (),
+ (
+ "X",
+ "mmx",
+ ),
+ ),
+ ),
+ },
+ )
+
+ self.mk_profiles(
+ tmp_path,
+ {"use.mask": "X", "package.use.mask": "dev-util/foo -X"},
+ {"use.mask": "X"},
+ {"package.use.mask": "dev-util/foo -X"},
+ )
+
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "0").masked_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X",)),),
+ "dev-util/foo": (chunked_data(atom("dev-util/foo"), ("X",), ()),),
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "1").masked_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X",)),),
+ "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X",)),),
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "2").masked_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X")),),
+ "dev-util/foo": (
+ chunked_data(
+ atom("dev-util/foo"),
+ ("X",),
+ (),
+ ),
+ ),
+ },
+ )
# pkgcore bug 237; per PMS, later profiles can punch wholes in the
# ranges applicable.
- self.mk_profiles(tmp_path,
- {"package.use.mask":"dev-util/foo X"},
- {"package.use.mask":">=dev-util/foo-1 -X"},
- {"package.use.mask":">=dev-util/foo-2 X"},
- {"package.use.mask":"dev-util/foo X", "name":"collapse_p"},
- {"package.use.mask":"dev-util/foo -X", "parent":"2", "name":"collapse_n"},
- )
+ self.mk_profiles(
+ tmp_path,
+ {"package.use.mask": "dev-util/foo X"},
+ {"package.use.mask": ">=dev-util/foo-1 -X"},
+ {"package.use.mask": ">=dev-util/foo-2 X"},
+ {"package.use.mask": "dev-util/foo X", "name": "collapse_p"},
+ {
+ "package.use.mask": "dev-util/foo -X",
+ "parent": "2",
+ "name": "collapse_n",
+ },
+ )
- self.assertEqualPayload(self.get_profile(tmp_path, "collapse_p").masked_use,
- {"dev-util/foo":(chunked_data(atom("dev-util/foo"), (), ("X",)),)
- })
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "collapse_p").masked_use,
+ {"dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X",)),)},
+ )
- self.assertEqualPayload(self.get_profile(tmp_path, "collapse_n").masked_use,
- {"dev-util/foo":(chunked_data(atom("dev-util/foo"), ("X",), (),),),
- })
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "collapse_n").masked_use,
+ {
+ "dev-util/foo": (
+ chunked_data(
+ atom("dev-util/foo"),
+ ("X",),
+ (),
+ ),
+ ),
+ },
+ )
def test_stable_masked_use(self, tmp_path):
self.mk_profiles(tmp_path, {})
self.assertEqualPayload(self.get_profile(tmp_path, "0").stable_masked_use, {})
- self.mk_profiles(tmp_path,
- {"eapi":"5", "use.stable.mask":"X\nmmx\n"},
- {"eapi":"5"},
- {"eapi":"5", "use.stable.mask":"-X"})
-
- self.assertEqualPayload(self.get_profile(tmp_path, "0").stable_masked_use,
- {atrue:(chunked_data(atrue, (), ('X', 'mmx')),)})
-
- self.assertEqualPayload(self.get_profile(tmp_path, "1").stable_masked_use,
- {atrue:(chunked_data(atrue, (), ('X', 'mmx',)),)})
-
- self.assertEqualPayload(self.get_profile(tmp_path, "2").stable_masked_use,
- {atrue:(chunked_data(atrue, ('X',), ('mmx',)),)})
-
- self.mk_profiles(tmp_path,
- {"eapi":"5", "use.stable.mask":"X\nmmx\n", "package.use.stable.mask":"dev-util/foo cups"},
- {"eapi":"5", "package.use.stable.mask": "dev-util/foo -cups"},
- {"eapi":"5", "use.stable.mask":"-X", "package.use.stable.mask": "dev-util/blah X"})
-
- self.assertEqualPayload(self.get_profile(tmp_path, "0").stable_masked_use,
- {atrue:(chunked_data(atrue, (), ('X', 'mmx')),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), (), ("X", "cups", "mmx")),),
- })
-
- self.assertEqualPayload(self.get_profile(tmp_path, "1").stable_masked_use,
- {atrue:(chunked_data(atrue, (), ('X', 'mmx')),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), ('cups',), ("X", "mmx")),),
- })
-
- self.assertEqualPayload(self.get_profile(tmp_path, "2").stable_masked_use,
- {atrue:(chunked_data(atrue, ('X',), ('mmx',)),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), ('X', 'cups'), ("mmx",)),),
- "dev-util/blah":(chunked_data(atom("dev-util/blah"), (), ("X", "mmx",)),)
- })
-
- self.mk_profiles(tmp_path,
- {"eapi":"5", "use.stable.mask":"X", "package.use.stable.mask":"dev-util/foo -X"},
- {"eapi":"5", "use.stable.mask":"X"},
- {"eapi":"5", "package.use.stable.mask":"dev-util/foo -X"})
-
- self.assertEqualPayload(self.get_profile(tmp_path, "0").stable_masked_use,
- {atrue:(chunked_data(atrue, (), ("X",)),),
- "dev-util/foo": (chunked_data(atom("dev-util/foo"), ('X',), ()),)
- })
- self.assertEqualPayload(self.get_profile(tmp_path, "1").stable_masked_use,
- {atrue:(chunked_data(atrue, (), ("X",)),),
- "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X",)),)
- })
- self.assertEqualPayload(self.get_profile(tmp_path, "2").stable_masked_use,
- {atrue:(chunked_data(atrue, (), ("X")),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), ("X",), (),),)
- })
+ self.mk_profiles(
+ tmp_path,
+ {"eapi": "5", "use.stable.mask": "X\nmmx\n"},
+ {"eapi": "5"},
+ {"eapi": "5", "use.stable.mask": "-X"},
+ )
+
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "0").stable_masked_use,
+ {atrue: (chunked_data(atrue, (), ("X", "mmx")),)},
+ )
+
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "1").stable_masked_use,
+ {
+ atrue: (
+ chunked_data(
+ atrue,
+ (),
+ (
+ "X",
+ "mmx",
+ ),
+ ),
+ )
+ },
+ )
+
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "2").stable_masked_use,
+ {atrue: (chunked_data(atrue, ("X",), ("mmx",)),)},
+ )
+
+ self.mk_profiles(
+ tmp_path,
+ {
+ "eapi": "5",
+ "use.stable.mask": "X\nmmx\n",
+ "package.use.stable.mask": "dev-util/foo cups",
+ },
+ {"eapi": "5", "package.use.stable.mask": "dev-util/foo -cups"},
+ {
+ "eapi": "5",
+ "use.stable.mask": "-X",
+ "package.use.stable.mask": "dev-util/blah X",
+ },
+ )
+
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "0").stable_masked_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X", "mmx")),),
+ "dev-util/foo": (
+ chunked_data(atom("dev-util/foo"), (), ("X", "cups", "mmx")),
+ ),
+ },
+ )
+
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "1").stable_masked_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X", "mmx")),),
+ "dev-util/foo": (
+ chunked_data(atom("dev-util/foo"), ("cups",), ("X", "mmx")),
+ ),
+ },
+ )
+
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "2").stable_masked_use,
+ {
+ atrue: (chunked_data(atrue, ("X",), ("mmx",)),),
+ "dev-util/foo": (
+ chunked_data(atom("dev-util/foo"), ("X", "cups"), ("mmx",)),
+ ),
+ "dev-util/blah": (
+ chunked_data(
+ atom("dev-util/blah"),
+ (),
+ (
+ "X",
+ "mmx",
+ ),
+ ),
+ ),
+ },
+ )
+
+ self.mk_profiles(
+ tmp_path,
+ {
+ "eapi": "5",
+ "use.stable.mask": "X",
+ "package.use.stable.mask": "dev-util/foo -X",
+ },
+ {"eapi": "5", "use.stable.mask": "X"},
+ {"eapi": "5", "package.use.stable.mask": "dev-util/foo -X"},
+ )
+
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "0").stable_masked_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X",)),),
+ "dev-util/foo": (chunked_data(atom("dev-util/foo"), ("X",), ()),),
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "1").stable_masked_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X",)),),
+ "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X",)),),
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "2").stable_masked_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X")),),
+ "dev-util/foo": (
+ chunked_data(
+ atom("dev-util/foo"),
+ ("X",),
+ (),
+ ),
+ ),
+ },
+ )
# pkgcore bug 237; per PMS, later profiles can punch wholes in the
# ranges applicable.
- self.mk_profiles(tmp_path,
- {"eapi":"5", "package.use.stable.mask":"dev-util/foo X"},
- {"eapi":"5", "package.use.stable.mask":">=dev-util/foo-1 -X"},
- {"eapi":"5", "package.use.stable.mask":">=dev-util/foo-2 X"},
- {"eapi":"5", "package.use.stable.mask":"dev-util/foo X", "name":"collapse_p"},
- {"eapi":"5", "package.use.stable.mask":"dev-util/foo -X", "parent":"2", "name":"collapse_n"},
- )
+ self.mk_profiles(
+ tmp_path,
+ {"eapi": "5", "package.use.stable.mask": "dev-util/foo X"},
+ {"eapi": "5", "package.use.stable.mask": ">=dev-util/foo-1 -X"},
+ {"eapi": "5", "package.use.stable.mask": ">=dev-util/foo-2 X"},
+ {
+ "eapi": "5",
+ "package.use.stable.mask": "dev-util/foo X",
+ "name": "collapse_p",
+ },
+ {
+ "eapi": "5",
+ "package.use.stable.mask": "dev-util/foo -X",
+ "parent": "2",
+ "name": "collapse_n",
+ },
+ )
- self.assertEqualPayload(self.get_profile(tmp_path, "collapse_p").stable_masked_use,
- {"dev-util/foo":(chunked_data(atom("dev-util/foo"), (), ("X",)),)
- })
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "collapse_p").stable_masked_use,
+ {"dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X",)),)},
+ )
- self.assertEqualPayload(self.get_profile(tmp_path, "collapse_n").stable_masked_use,
- {"dev-util/foo":(chunked_data(atom("dev-util/foo"), ("X",), (),),),
- })
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "collapse_n").stable_masked_use,
+ {
+ "dev-util/foo": (
+ chunked_data(
+ atom("dev-util/foo"),
+ ("X",),
+ (),
+ ),
+ ),
+ },
+ )
def test_forced_use(self, tmp_path):
self.mk_profiles(tmp_path, {})
self.assertEqualPayload(self.get_profile(tmp_path, "0").forced_use, {})
- self.mk_profiles(tmp_path,
- {"use.force":"X\nmmx\n"},
- {},
- {"use.force":"-X"})
+ self.mk_profiles(tmp_path, {"use.force": "X\nmmx\n"}, {}, {"use.force": "-X"})
- self.assertEqualPayload(self.get_profile(tmp_path, "0").forced_use,
- {atrue:(chunked_data(atrue, (), ('X', 'mmx')),)})
- self.assertEqualPayload(self.get_profile(tmp_path, "1").forced_use,
- {atrue:(chunked_data(atrue, (), ('X', 'mmx')),)})
- self.assertEqualPayload(self.get_profile(tmp_path, "2").forced_use,
- {atrue:(chunked_data(atrue, ('X',), ('mmx',)),)})
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "0").forced_use,
+ {atrue: (chunked_data(atrue, (), ("X", "mmx")),)},
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "1").forced_use,
+ {atrue: (chunked_data(atrue, (), ("X", "mmx")),)},
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "2").forced_use,
+ {atrue: (chunked_data(atrue, ("X",), ("mmx",)),)},
+ )
- self.mk_profiles(tmp_path,
- {"use.force":"X\nmmx\n", "package.use.force":"dev-util/foo cups"},
+ self.mk_profiles(
+ tmp_path,
+ {"use.force": "X\nmmx\n", "package.use.force": "dev-util/foo cups"},
{"package.use.force": "dev-util/foo -cups"},
- {"use.force":"-X", "package.use.force": "dev-util/blah X"})
-
- self.assertEqualPayload(self.get_profile(tmp_path, "0").forced_use,
- {atrue:(chunked_data(atrue, (), ('X', 'mmx')),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), (), ("X", "mmx", "cups",)),),
- })
- self.assertEqualPayload(self.get_profile(tmp_path, "1").forced_use,
- {atrue:(chunked_data(atrue, (), ('X', 'mmx')),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), ('cups',), ("X", "mmx")),),
- })
- self.assertEqualPayload(self.get_profile(tmp_path, "2").forced_use,
- {atrue:(chunked_data(atrue, ('X',), ('mmx',)),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), ('cups', 'X'), ('mmx',)),),
- "dev-util/blah":(chunked_data(atom("dev-util/blah"), (), ('X', "mmx")),),
- })
-
- self.mk_profiles(tmp_path,
- {"use.force":"X", "package.use.force":"dev-util/foo -X"},
- {"use.force":"X"},
- {"package.use.force":"dev-util/foo -X"})
-
- self.assertEqualPayload(self.get_profile(tmp_path, "0").forced_use,
- {atrue:(chunked_data(atrue, (), ("X",)),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), ('X',), ()),),
- })
- self.assertEqualPayload(self.get_profile(tmp_path, "1").forced_use,
- {atrue:(chunked_data(atrue, (), ("X",)),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), (), ('X',)),),
- })
- self.assertEqualPayload(self.get_profile(tmp_path, "2").forced_use,
- {atrue:(chunked_data(atrue, (), ("X",)),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), ('X',), ()),),
- })
+ {"use.force": "-X", "package.use.force": "dev-util/blah X"},
+ )
+
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "0").forced_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X", "mmx")),),
+ "dev-util/foo": (
+ chunked_data(
+ atom("dev-util/foo"),
+ (),
+ (
+ "X",
+ "mmx",
+ "cups",
+ ),
+ ),
+ ),
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "1").forced_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X", "mmx")),),
+ "dev-util/foo": (
+ chunked_data(atom("dev-util/foo"), ("cups",), ("X", "mmx")),
+ ),
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "2").forced_use,
+ {
+ atrue: (chunked_data(atrue, ("X",), ("mmx",)),),
+ "dev-util/foo": (
+ chunked_data(atom("dev-util/foo"), ("cups", "X"), ("mmx",)),
+ ),
+ "dev-util/blah": (
+ chunked_data(atom("dev-util/blah"), (), ("X", "mmx")),
+ ),
+ },
+ )
+
+ self.mk_profiles(
+ tmp_path,
+ {"use.force": "X", "package.use.force": "dev-util/foo -X"},
+ {"use.force": "X"},
+ {"package.use.force": "dev-util/foo -X"},
+ )
+
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "0").forced_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X",)),),
+ "dev-util/foo": (chunked_data(atom("dev-util/foo"), ("X",), ()),),
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "1").forced_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X",)),),
+ "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X",)),),
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "2").forced_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X",)),),
+ "dev-util/foo": (chunked_data(atom("dev-util/foo"), ("X",), ()),),
+ },
+ )
def test_stable_forced_use(self, tmp_path):
self.mk_profiles(tmp_path, {})
self.assertEqualPayload(self.get_profile(tmp_path, "0").stable_forced_use, {})
- self.mk_profiles(tmp_path,
- {"eapi":"5", "use.stable.force":"X\nmmx\n"},
- {"eapi":"5"},
- {"eapi":"5", "use.stable.force":"-X"}
- )
-
- self.assertEqualPayload(self.get_profile(tmp_path, "0").stable_forced_use,
- {atrue:(chunked_data(atrue, (), ('X', 'mmx')),)})
- self.assertEqualPayload(self.get_profile(tmp_path, "1").stable_forced_use,
- {atrue:(chunked_data(atrue, (), ('X', 'mmx')),)})
- self.assertEqualPayload(self.get_profile(tmp_path, "2").stable_forced_use,
- {atrue:(chunked_data(atrue, ('X',), ('mmx',)),)})
-
- self.mk_profiles(tmp_path,
- {"eapi":"5", "use.stable.force":"X\nmmx\n", "package.use.stable.force":"dev-util/foo cups"},
- {"eapi":"5", "package.use.stable.force":"dev-util/foo -cups"},
- {"eapi":"5", "use.stable.force":"-X", "package.use.stable.force":"dev-util/blah X"}
- )
-
- self.assertEqualPayload(self.get_profile(tmp_path, "0").stable_forced_use,
- {atrue:(chunked_data(atrue, (), ('X', 'mmx')),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), (), ("X", "mmx", "cups",)),),
- })
- self.assertEqualPayload(self.get_profile(tmp_path, "1").stable_forced_use,
- {atrue:(chunked_data(atrue, (), ('X', 'mmx')),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), ('cups',), ("X", "mmx")),),
- })
- self.assertEqualPayload(self.get_profile(tmp_path, "2").stable_forced_use,
- {atrue:(chunked_data(atrue, ('X',), ('mmx',)),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), ('cups', 'X'), ('mmx',)),),
- "dev-util/blah":(chunked_data(atom("dev-util/blah"), (), ('X', "mmx")),),
- })
-
- self.mk_profiles(tmp_path,
- {"eapi":"5", "use.stable.force":"X", "package.use.stable.force":"dev-util/foo -X"},
- {"eapi":"5", "use.stable.force":"X"},
- {"eapi":"5", "package.use.stable.force":"dev-util/foo -X"})
-
- self.assertEqualPayload(self.get_profile(tmp_path, "0").stable_forced_use,
- {atrue:(chunked_data(atrue, (), ("X",)),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), ('X',), ()),),
- })
- self.assertEqualPayload(self.get_profile(tmp_path, "1").stable_forced_use,
- {atrue:(chunked_data(atrue, (), ("X",)),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), (), ('X',)),),
- })
- self.assertEqualPayload(self.get_profile(tmp_path, "2").stable_forced_use,
- {atrue:(chunked_data(atrue, (), ("X",)),),
- "dev-util/foo":(chunked_data(atom("dev-util/foo"), ('X',), ()),),
- })
+ self.mk_profiles(
+ tmp_path,
+ {"eapi": "5", "use.stable.force": "X\nmmx\n"},
+ {"eapi": "5"},
+ {"eapi": "5", "use.stable.force": "-X"},
+ )
+
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "0").stable_forced_use,
+ {atrue: (chunked_data(atrue, (), ("X", "mmx")),)},
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "1").stable_forced_use,
+ {atrue: (chunked_data(atrue, (), ("X", "mmx")),)},
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "2").stable_forced_use,
+ {atrue: (chunked_data(atrue, ("X",), ("mmx",)),)},
+ )
+
+ self.mk_profiles(
+ tmp_path,
+ {
+ "eapi": "5",
+ "use.stable.force": "X\nmmx\n",
+ "package.use.stable.force": "dev-util/foo cups",
+ },
+ {"eapi": "5", "package.use.stable.force": "dev-util/foo -cups"},
+ {
+ "eapi": "5",
+ "use.stable.force": "-X",
+ "package.use.stable.force": "dev-util/blah X",
+ },
+ )
+
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "0").stable_forced_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X", "mmx")),),
+ "dev-util/foo": (
+ chunked_data(
+ atom("dev-util/foo"),
+ (),
+ (
+ "X",
+ "mmx",
+ "cups",
+ ),
+ ),
+ ),
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "1").stable_forced_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X", "mmx")),),
+ "dev-util/foo": (
+ chunked_data(atom("dev-util/foo"), ("cups",), ("X", "mmx")),
+ ),
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "2").stable_forced_use,
+ {
+ atrue: (chunked_data(atrue, ("X",), ("mmx",)),),
+ "dev-util/foo": (
+ chunked_data(atom("dev-util/foo"), ("cups", "X"), ("mmx",)),
+ ),
+ "dev-util/blah": (
+ chunked_data(atom("dev-util/blah"), (), ("X", "mmx")),
+ ),
+ },
+ )
+
+ self.mk_profiles(
+ tmp_path,
+ {
+ "eapi": "5",
+ "use.stable.force": "X",
+ "package.use.stable.force": "dev-util/foo -X",
+ },
+ {"eapi": "5", "use.stable.force": "X"},
+ {"eapi": "5", "package.use.stable.force": "dev-util/foo -X"},
+ )
+
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "0").stable_forced_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X",)),),
+ "dev-util/foo": (chunked_data(atom("dev-util/foo"), ("X",), ()),),
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "1").stable_forced_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X",)),),
+ "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X",)),),
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "2").stable_forced_use,
+ {
+ atrue: (chunked_data(atrue, (), ("X",)),),
+ "dev-util/foo": (chunked_data(atom("dev-util/foo"), ("X",), ()),),
+ },
+ )
def test_pkg_use(self, tmp_path):
self.mk_profiles(tmp_path, {})
self.assertEqualPayload(self.get_profile(tmp_path, "0").pkg_use, {})
- self.mk_profiles(tmp_path,
- {"package.use":"dev-util/bsdiff X mmx\n"},
+ self.mk_profiles(
+ tmp_path,
+ {"package.use": "dev-util/bsdiff X mmx\n"},
{},
- {"package.use":"dev-util/bsdiff -X\n"},
- {"package.use":"dev-util/bsdiff -mmx\ndev-util/diffball X"},
- {"package.use":"dev-util/bsdiff X\ndev-util/diffball -X\n"}
- )
+ {"package.use": "dev-util/bsdiff -X\n"},
+ {"package.use": "dev-util/bsdiff -mmx\ndev-util/diffball X"},
+ {"package.use": "dev-util/bsdiff X\ndev-util/diffball -X\n"},
+ )
- self.assertEqualPayload(self.get_profile(tmp_path, "0").pkg_use,
- {'dev-util/bsdiff':
- (chunked_data(atom("dev-util/bsdiff"), (), ('X', 'mmx')),)
- })
- self.assertEqualPayload(self.get_profile(tmp_path, "1").pkg_use,
- {'dev-util/bsdiff':
- (chunked_data(atom("dev-util/bsdiff"), (), ('X', 'mmx')),)
- })
- self.assertEqualPayload(self.get_profile(tmp_path, "2").pkg_use,
- {'dev-util/bsdiff':
- (chunked_data(atom("dev-util/bsdiff"), ('X',), ('mmx',)),)
- })
- self.assertEqualPayload(self.get_profile(tmp_path, "3").pkg_use,
- {'dev-util/diffball':
- (chunked_data(atom("dev-util/diffball"), (), ('X',)),),
- 'dev-util/bsdiff':
- (chunked_data(atom("dev-util/bsdiff"), ('X', 'mmx'), ()),),
- })
- self.assertEqualPayload(self.get_profile(tmp_path, "4").pkg_use,
- {'dev-util/diffball':
- (chunked_data(atom("dev-util/diffball"), ('X',), ()),),
- 'dev-util/bsdiff':
- (chunked_data(atom("dev-util/bsdiff"), ('mmx',), ('X',)),),
- })
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "0").pkg_use,
+ {
+ "dev-util/bsdiff": (
+ chunked_data(atom("dev-util/bsdiff"), (), ("X", "mmx")),
+ )
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "1").pkg_use,
+ {
+ "dev-util/bsdiff": (
+ chunked_data(atom("dev-util/bsdiff"), (), ("X", "mmx")),
+ )
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "2").pkg_use,
+ {
+ "dev-util/bsdiff": (
+ chunked_data(atom("dev-util/bsdiff"), ("X",), ("mmx",)),
+ )
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "3").pkg_use,
+ {
+ "dev-util/diffball": (
+ chunked_data(atom("dev-util/diffball"), (), ("X",)),
+ ),
+ "dev-util/bsdiff": (
+ chunked_data(atom("dev-util/bsdiff"), ("X", "mmx"), ()),
+ ),
+ },
+ )
+ self.assertEqualPayload(
+ self.get_profile(tmp_path, "4").pkg_use,
+ {
+ "dev-util/diffball": (
+ chunked_data(atom("dev-util/diffball"), ("X",), ()),
+ ),
+ "dev-util/bsdiff": (
+ chunked_data(atom("dev-util/bsdiff"), ("mmx",), ("X",)),
+ ),
+ },
+ )
def test_default_env(self, tmp_path):
- assert 'USE' in const.incrementals_unfinalized
- assert 'USE' in const.incrementals
- assert 'USE_EXPAND' in const.incrementals
+ assert "USE" in const.incrementals_unfinalized
+ assert "USE" in const.incrementals
+ assert "USE_EXPAND" in const.incrementals
# first, verify it behaves correctly for unfinalized incrementals.
self.mk_profiles(tmp_path, {})
assert not self.get_profile(tmp_path, "0").default_env
- self.mk_profiles(tmp_path,
- {"make.defaults":"USE=y\n"},
+ self.mk_profiles(
+ tmp_path,
+ {"make.defaults": "USE=y\n"},
{},
- {"make.defaults":"USE=-y\nY=foo\n"})
- assert self.get_profile(tmp_path, '0').default_env == {"USE": ('y', )}
- assert self.get_profile(tmp_path, '1').default_env == {"USE": ('y', )}
- assert self.get_profile(tmp_path, '2').default_env == {'Y': 'foo', "USE": ('y', '-y')}
+ {"make.defaults": "USE=-y\nY=foo\n"},
+ )
+ assert self.get_profile(tmp_path, "0").default_env == {"USE": ("y",)}
+ assert self.get_profile(tmp_path, "1").default_env == {"USE": ("y",)}
+ assert self.get_profile(tmp_path, "2").default_env == {
+ "Y": "foo",
+ "USE": ("y", "-y"),
+ }
# next, verify it optimizes for the finalized incrementals
self.mk_profiles(tmp_path, {})
assert not self.get_profile(tmp_path, "0").default_env
- self.mk_profiles(tmp_path,
- {"make.defaults":"USE_EXPAND=y\n"},
+ self.mk_profiles(
+ tmp_path,
+ {"make.defaults": "USE_EXPAND=y\n"},
{},
- {"make.defaults":"USE_EXPAND=-y\nY=foo\n"})
- assert self.get_profile(tmp_path, '0').default_env == {"USE_EXPAND": ('y', )}
- assert self.get_profile(tmp_path, '1').default_env == {"USE_EXPAND": ('y', )}
- assert self.get_profile(tmp_path, '2').default_env == {'Y': 'foo'}
+ {"make.defaults": "USE_EXPAND=-y\nY=foo\n"},
+ )
+ assert self.get_profile(tmp_path, "0").default_env == {"USE_EXPAND": ("y",)}
+ assert self.get_profile(tmp_path, "1").default_env == {"USE_EXPAND": ("y",)}
+ assert self.get_profile(tmp_path, "2").default_env == {"Y": "foo"}
def test_iuse_effective(self, tmp_path, tmp_path_factory):
# TODO: add subprofiles for testing incrementals
- self.mk_profiles(tmp_path,
+ self.mk_profiles(
+ tmp_path,
{},
- {'eapi': '0',
- 'make.defaults':
- 'IUSE_IMPLICIT="abi_x86_64 foo"\n'
+ {
+ "eapi": "0",
+ "make.defaults": 'IUSE_IMPLICIT="abi_x86_64 foo"\n'
'USE_EXPAND_IMPLICIT="ARCH ELIBC"\n'
'USE_EXPAND_UNPREFIXED="ARCH"\n'
'USE_EXPAND="ABI_X86 ELIBC"\n'
'USE_EXPAND_VALUES_ARCH="amd64 arm"\n'
- 'USE_EXPAND_VALUES_ELIBC="glibc uclibc"\n'},
- {'eapi': '5',
- 'make.defaults':
- 'IUSE_IMPLICIT="abi_x86_64 foo"\n'
+ 'USE_EXPAND_VALUES_ELIBC="glibc uclibc"\n',
+ },
+ {
+ "eapi": "5",
+ "make.defaults": 'IUSE_IMPLICIT="abi_x86_64 foo"\n'
'USE_EXPAND_IMPLICIT="ARCH ELIBC"\n'
'USE_EXPAND_UNPREFIXED="ARCH"\n'
'USE_EXPAND="ABI_X86 ELIBC"\n'
'USE_EXPAND_VALUES_ARCH="amd64 arm"\n'
- 'USE_EXPAND_VALUES_ELIBC="glibc uclibc"\n'})
+ 'USE_EXPAND_VALUES_ELIBC="glibc uclibc"\n',
+ },
+ )
# create repo dir and symlink profiles into it, necessary since the
# repoconfig attr is used for EAPI < 5 to retrieve known arches and
# doesn't work without a proper repo dir including a 'profiles' subdir
repo = tmp_path_factory.mktemp("repo")
- (repo / 'metadata').mkdir()
- (basepath := repo / 'profiles').symlink_to(tmp_path)
+ (repo / "metadata").mkdir()
+ (basepath := repo / "profiles").symlink_to(tmp_path)
# avoid RepoConfig warnings on initialization
- (repo / 'metadata' / 'layout.conf').write_text('repo-name = test\nmasters = gentoo\n')
+ (repo / "metadata" / "layout.conf").write_text(
+ "repo-name = test\nmasters = gentoo\n"
+ )
class RepoConfig(repo_objs.RepoConfig):
# re-inherited to disable inst-caching
@@ -1148,55 +1726,69 @@ class TestOnDiskProfile(profile_mixin):
# disable instance caching on RepoConfig otherwise the known arches
# value will be cached
- with mock.patch('pkgcore.ebuild.repo_objs.RepoConfig', RepoConfig):
- assert self.get_profile(tmp_path, '0', basepath).iuse_effective == frozenset()
- (basepath / 'arch.list').write_text('amd64\narm\n')
- assert self.get_profile(tmp_path, '0', basepath).iuse_effective == frozenset(['amd64', 'arm'])
- assert self.get_profile(tmp_path, '1', basepath).iuse_effective == frozenset(['amd64', 'arm', 'elibc_glibc', 'elibc_uclibc'])
- assert self.get_profile(tmp_path, '2', basepath).iuse_effective == frozenset(['abi_x86_64', 'foo', 'amd64', 'arm', 'abi_x86_64',
- 'elibc_glibc', 'elibc_uclibc'])
+ with mock.patch("pkgcore.ebuild.repo_objs.RepoConfig", RepoConfig):
+ assert (
+ self.get_profile(tmp_path, "0", basepath).iuse_effective == frozenset()
+ )
+ (basepath / "arch.list").write_text("amd64\narm\n")
+ assert self.get_profile(
+ tmp_path, "0", basepath
+ ).iuse_effective == frozenset(["amd64", "arm"])
+ assert self.get_profile(
+ tmp_path, "1", basepath
+ ).iuse_effective == frozenset(
+ ["amd64", "arm", "elibc_glibc", "elibc_uclibc"]
+ )
+ assert self.get_profile(
+ tmp_path, "2", basepath
+ ).iuse_effective == frozenset(
+ [
+ "abi_x86_64",
+ "foo",
+ "amd64",
+ "arm",
+ "abi_x86_64",
+ "elibc_glibc",
+ "elibc_uclibc",
+ ]
+ )
def test_provides_repo(self, tmp_path):
self.mk_profiles(tmp_path, {})
assert len(self.get_profile(tmp_path, "0").provides_repo) == 0
- self.mk_profiles(tmp_path,
- {"package.provided":"dev-util/diffball-0.7.1"})
- assert ["dev-util/diffball-0.7.1"] == [x.cpvstr for x in
- self.get_profile(tmp_path, "0").provides_repo]
+ self.mk_profiles(tmp_path, {"package.provided": "dev-util/diffball-0.7.1"})
+ assert ["dev-util/diffball-0.7.1"] == [
+ x.cpvstr for x in self.get_profile(tmp_path, "0").provides_repo
+ ]
- self.mk_profiles(tmp_path,
- {"package.provided":"dev-util/diffball-0.7.1"},
- {"package.provided":
- "-dev-util/diffball-0.7.1\ndev-util/bsdiff-0.4"}
+ self.mk_profiles(
+ tmp_path,
+ {"package.provided": "dev-util/diffball-0.7.1"},
+ {"package.provided": "-dev-util/diffball-0.7.1\ndev-util/bsdiff-0.4"},
)
- assert ["dev-util/bsdiff-0.4"] == [x.cpvstr for x in
- sorted(self.get_profile(tmp_path, "1").provides_repo)]
+ assert ["dev-util/bsdiff-0.4"] == [
+ x.cpvstr for x in sorted(self.get_profile(tmp_path, "1").provides_repo)
+ ]
def test_deprecated(self, tmp_path):
self.mk_profiles(tmp_path, {})
assert not self.get_profile(tmp_path, "0").deprecated
- self.mk_profiles(tmp_path,
- {"deprecated":"replacement\nfoon\n"},
- {}
- )
+ self.mk_profiles(tmp_path, {"deprecated": "replacement\nfoon\n"}, {})
assert not self.get_profile(tmp_path, "1").deprecated
- self.mk_profiles(tmp_path,
- {},
- {"deprecated":"replacement\nfoon\n"}
- )
+ self.mk_profiles(tmp_path, {}, {"deprecated": "replacement\nfoon\n"})
assert self.get_profile(tmp_path, "1").deprecated
def test_eapi(self, tmp_path):
self.mk_profiles(tmp_path, {})
- assert str(self.get_profile(tmp_path, "0").eapi) == '0'
+ assert str(self.get_profile(tmp_path, "0").eapi) == "0"
self.mk_profiles(tmp_path, {"eapi": "5\n"})
- assert str(self.get_profile(tmp_path, "0").eapi) == '5'
+ assert str(self.get_profile(tmp_path, "0").eapi) == "5"
def test_from_abspath(self, tmp_path):
- self.mk_profiles(tmp_path, {'name': 'profiles'}, {'name': 'profiles/1'})
- base = tmp_path / 'profiles'
- p = self.kls.from_abspath(str(base / '1'))
+ self.mk_profiles(tmp_path, {"name": "profiles"}, {"name": "profiles/1"})
+ base = tmp_path / "profiles"
+ p = self.kls.from_abspath(str(base / "1"))
assert p is not None
assert normpath(p.basepath) == normpath(str(base))
- assert normpath(p.profile) == normpath(str(base / '1'))
+ assert normpath(p.profile) == normpath(str(base / "1"))
diff --git a/tests/ebuild/test_repo_objs.py b/tests/ebuild/test_repo_objs.py
index aeaadc70c..3babe3a11 100644
--- a/tests/ebuild/test_repo_objs.py
+++ b/tests/ebuild/test_repo_objs.py
@@ -11,12 +11,17 @@ from snakeoil.mappings import ImmutableDict
class TestMetadataXml:
-
@staticmethod
- def get_metadata_xml(maintainers=(), comments=(), local_use={},
- longdescription=None, maint_type=None,
- proxied=None, stabilize_allarches=False):
- cs = '\n'.join(comments)
+ def get_metadata_xml(
+ maintainers=(),
+ comments=(),
+ local_use={},
+ longdescription=None,
+ maint_type=None,
+ proxied=None,
+ stabilize_allarches=False,
+ ):
+ cs = "\n".join(comments)
ms = us = ls = ""
if maintainers:
ms = []
@@ -27,32 +32,32 @@ class TestMetadataXml:
if len(x) > 2:
ms[-1] += f"\n<description>{x[2]}</description>"
if len(x) > 3:
- raise ValueError('maintainer data has too many fields')
- maint_type = f'type="{maint_type}"' if maint_type is not None else ''
- proxied = f'proxied="{proxied}"' if proxied is not None else ''
- ms = '\n'.join(f'<maintainer {maint_type} {proxied}>{x}</maintainer>'
- for x in ms)
+ raise ValueError("maintainer data has too many fields")
+ maint_type = f'type="{maint_type}"' if maint_type is not None else ""
+ proxied = f'proxied="{proxied}"' if proxied is not None else ""
+ ms = "\n".join(
+ f"<maintainer {maint_type} {proxied}>{x}</maintainer>" for x in ms
+ )
if local_use:
- us = ['<use>']
+ us = ["<use>"]
for flag, desc in local_use.items():
us.append(f'<flag name="{flag}">{desc}</flag>')
- us.append('</use>')
- us = '\n'.join(us)
+ us.append("</use>")
+ us = "\n".join(us)
if longdescription:
ls = f"<longdescription>{longdescription}</longdescription>\n"
sa = "<stabilize-allarches/>" if stabilize_allarches else ""
- s = \
-f"""<?xml version="1.0" encoding="UTF-8"?>
+ s = f"""<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE pkgmetadata SYSTEM "http://www.gentoo.org/dtd/metadata.dtd">
<pkgmetadata>
{cs}{ms}{us}{ls}{sa}</pkgmetadata>"""
- return repo_objs.MetadataXml(data_source(s.encode('utf-8')))
+ return repo_objs.MetadataXml(data_source(s.encode("utf-8")))
def test_empty_maintainers(self):
assert () == self.get_metadata_xml().maintainers
def test_maintainer_needed(self):
- mx = self.get_metadata_xml(comments=('<!-- maintainer-needed -->',))
+ mx = self.get_metadata_xml(comments=("<!-- maintainer-needed -->",))
assert mx.maintainers == ()
def test_multiple_maintainers(self):
@@ -64,9 +69,11 @@ f"""<?xml version="1.0" encoding="UTF-8"?>
def test_maintainer_name_with_email(self):
mx = self.get_metadata_xml(
- maintainers=(("funkymonkey@gmail.com", "funky monkey \N{SNOWMAN}"),))
- assert ("funky monkey \N{SNOWMAN} <funkymonkey@gmail.com>",) == \
- tuple(map(str, mx.maintainers))
+ maintainers=(("funkymonkey@gmail.com", "funky monkey \N{SNOWMAN}"),)
+ )
+ assert ("funky monkey \N{SNOWMAN} <funkymonkey@gmail.com>",) == tuple(
+ map(str, mx.maintainers)
+ )
assert "funkymonkey@gmail.com" in mx.maintainers
assert "funky monkey \N{SNOWMAN}" in mx.maintainers
assert "funkymonkey@gmail.com" == mx.maintainers[0].email
@@ -76,8 +83,7 @@ f"""<?xml version="1.0" encoding="UTF-8"?>
assert mx.maintainers[0].proxied is None
def test_maintainer_with_desc(self):
- mx = self.get_metadata_xml(
- maintainers=(("foo@bar.com", "foobar", "Foobar"),))
+ mx = self.get_metadata_xml(maintainers=(("foo@bar.com", "foobar", "Foobar"),))
assert ("foobar <foo@bar.com> (Foobar)",) == tuple(map(str, mx.maintainers))
assert "foo@bar.com" in mx.maintainers
assert "foobar" in mx.maintainers
@@ -89,8 +95,8 @@ f"""<?xml version="1.0" encoding="UTF-8"?>
def test_maintainer_with_type(self):
mx = self.get_metadata_xml(
- maintainers=(("foo@bar.com", "foobar"),),
- maint_type='person')
+ maintainers=(("foo@bar.com", "foobar"),), maint_type="person"
+ )
assert ("foobar <foo@bar.com>",) == tuple(map(str, mx.maintainers))
assert "foo@bar.com" in mx.maintainers
assert "foobar" in mx.maintainers
@@ -102,9 +108,8 @@ f"""<?xml version="1.0" encoding="UTF-8"?>
def test_maintainer_with_proxied(self):
mx = self.get_metadata_xml(
- maintainers=(("foo@bar.com", "foobar"),),
- maint_type='person',
- proxied='yes')
+ maintainers=(("foo@bar.com", "foobar"),), maint_type="person", proxied="yes"
+ )
assert ("foobar <foo@bar.com>",) == tuple(map(str, mx.maintainers))
assert "foo@bar.com" in mx.maintainers
assert "foobar" in mx.maintainers
@@ -123,17 +128,14 @@ f"""<?xml version="1.0" encoding="UTF-8"?>
"bar": "description for bar (<pkg>app-foo/bar</pkg> required)",
}
metadata_xml = self.get_metadata_xml(local_use=local_use)
- pkg_tag_re = re.compile(r'</?pkg>')
- local_use = dict(
- (k, pkg_tag_re.sub('', v))
- for k, v in local_use.items())
+ pkg_tag_re = re.compile(r"</?pkg>")
+ local_use = dict((k, pkg_tag_re.sub("", v)) for k, v in local_use.items())
assert local_use == metadata_xml.local_use
def test_longdesc(self):
# empty...
assert None == self.get_metadata_xml().longdescription
- s = \
-"""
+ s = """
I saw the best minds of my generation destroyed by madness, starving
hysterical naked, dragging themselves throughout the negro streets at dawn
looking for an angry fix, angle-headed hipsters burning for the ancient
@@ -146,17 +148,21 @@ through universities with radiant cool eyes hallucinating Arkansas and
Blake-light tragedy among the scholars of war.
"""
- assert " ".join(s.split()) == self.get_metadata_xml(longdescription=s).longdescription
+ assert (
+ " ".join(s.split())
+ == self.get_metadata_xml(longdescription=s).longdescription
+ )
def test_stabilize_allarches(self):
# missing
assert False == self.get_metadata_xml().stabilize_allarches
# present
- assert True == self.get_metadata_xml(stabilize_allarches=True).stabilize_allarches
+ assert (
+ True == self.get_metadata_xml(stabilize_allarches=True).stabilize_allarches
+ )
class TestProjectsXml:
-
@staticmethod
def get_projects_xml(s=None):
default_s = """<?xml version="1.0" encoding="UTF-8"?>
@@ -254,16 +260,17 @@ class TestProjectsXml:
</projects>"""
if s is None:
s = default_s
- return repo_objs.ProjectsXml(data_source(s.encode('utf-8')))
+ return repo_objs.ProjectsXml(data_source(s.encode("utf-8")))
def test_empty(self):
- assert self.get_projects_xml('').projects == {}
+ assert self.get_projects_xml("").projects == {}
def test_invalid_xml(self):
- assert self.get_projects_xml('<foo><bar></foo>').projects == {}
+ assert self.get_projects_xml("<foo><bar></foo>").projects == {}
def test_project_member_without_email(self):
- p = self.get_projects_xml("""<?xml version="1.0" encoding="UTF-8"?>
+ p = self.get_projects_xml(
+ """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE projects SYSTEM "http://www.gentoo.org/dtd/projects.dtd">
<projects>
<project>
@@ -277,11 +284,13 @@ class TestProjectsXml:
</member>
</project>
</projects>
-""").projects['nolead@example.com']
+"""
+ ).projects["nolead@example.com"]
assert p.members == ()
def test_subproject_without_ref(self):
- p = self.get_projects_xml("""<?xml version="1.0" encoding="UTF-8"?>
+ p = self.get_projects_xml(
+ """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE projects SYSTEM "http://www.gentoo.org/dtd/projects.dtd">
<projects>
<project>
@@ -297,11 +306,13 @@ class TestProjectsXml:
<subproject inherit-members="1"/>
</project>
</projects>
-""").projects['nolead@example.com']
+"""
+ ).projects["nolead@example.com"]
assert p.subprojects == ()
def test_subproject_with_invalid_ref(self):
- p = self.get_projects_xml("""<?xml version="1.0" encoding="UTF-8"?>
+ p = self.get_projects_xml(
+ """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE projects SYSTEM "http://www.gentoo.org/dtd/projects.dtd">
<projects>
<project>
@@ -317,12 +328,14 @@ class TestProjectsXml:
<subproject ref="nonexist@example.com" inherit-members="1"/>
</project>
</projects>
-""").projects['nolead@example.com']
+"""
+ ).projects["nolead@example.com"]
assert p.subprojects[0].project is None
assert p.recursive_members == p.members
def test_deep_subproject_with_invalid_ref(self):
- p = self.get_projects_xml("""<?xml version="1.0" encoding="UTF-8"?>
+ p = self.get_projects_xml(
+ """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE projects SYSTEM "http://www.gentoo.org/dtd/projects.dtd">
<projects>
<project>
@@ -345,33 +358,37 @@ class TestProjectsXml:
<subproject ref="nolead@example.com" inherit-members="1"/>
</project>
</projects>
-""").projects
- assert (p['subprojects@example.com'].recursive_members ==
- p['nolead@example.com'].members)
+"""
+ ).projects
+ assert (
+ p["subprojects@example.com"].recursive_members
+ == p["nolead@example.com"].members
+ )
def test_basic_metadata(self):
projects = self.get_projects_xml().projects
for email, project in projects.items():
assert project.email == email
- assert project.name.startswith('Project with')
+ assert project.name.startswith("Project with")
assert project.url == (
- 'https://projects.example.com/' + email.split('@')[0])
- assert project.description == 'Here is the description'
+ "https://projects.example.com/" + email.split("@")[0]
+ )
+ assert project.description == "Here is the description"
def test_no_members(self):
- p = self.get_projects_xml().projects['nomembers@example.com']
+ p = self.get_projects_xml().projects["nomembers@example.com"]
assert p.members == ()
assert p.leads == ()
assert p.subprojects == ()
assert p.recursive_members == ()
def test_no_lead(self):
- p = self.get_projects_xml().projects['nolead@example.com']
- assert p.members[0].email == 'fulldev1@example.com'
- assert p.members[0].name == 'Full Dev'
- assert p.members[0].role == 'Somebody'
+ p = self.get_projects_xml().projects["nolead@example.com"]
+ assert p.members[0].email == "fulldev1@example.com"
+ assert p.members[0].name == "Full Dev"
+ assert p.members[0].role == "Somebody"
assert not p.members[0].is_lead
- assert p.members[1].email == 'dev1@example.com'
+ assert p.members[1].email == "dev1@example.com"
assert p.members[1].name is None
assert p.members[1].role is None
assert not p.members[1].is_lead
@@ -380,12 +397,12 @@ class TestProjectsXml:
assert p.recursive_members == p.members
def test_have_lead(self):
- p = self.get_projects_xml().projects['lead@example.com']
- assert p.members[0].email == 'fulldev2@example.com'
- assert p.members[0].name == 'Full Dev'
- assert p.members[0].role == 'Somebody'
+ p = self.get_projects_xml().projects["lead@example.com"]
+ assert p.members[0].email == "fulldev2@example.com"
+ assert p.members[0].name == "Full Dev"
+ assert p.members[0].role == "Somebody"
assert not p.members[0].is_lead
- assert p.members[1].email == 'dev2@example.com'
+ assert p.members[1].email == "dev2@example.com"
assert p.members[1].name is None
assert p.members[1].role is None
assert p.members[1].is_lead
@@ -394,12 +411,12 @@ class TestProjectsXml:
assert p.recursive_members == p.members
def test_subprojects(self):
- p = self.get_projects_xml().projects['subprojects@example.com']
- assert p.subprojects[0].email == 'nolead@example.com'
- assert p.subprojects[0].name == 'Project with no lead'
+ p = self.get_projects_xml().projects["subprojects@example.com"]
+ assert p.subprojects[0].email == "nolead@example.com"
+ assert p.subprojects[0].name == "Project with no lead"
assert not p.subprojects[0].inherit_members
- assert p.subprojects[1].email == 'lead@example.com'
- assert p.subprojects[1].name == 'Project with a lead'
+ assert p.subprojects[1].email == "lead@example.com"
+ assert p.subprojects[1].name == "Project with a lead"
assert not p.subprojects[1].inherit_members
assert p.recursive_members == p.members
@@ -407,118 +424,119 @@ class TestProjectsXml:
def unlead(members):
for m in members:
yield repo_objs.ProjectMember(
- email=m.email, name=m.name, role=m.role, is_lead=False)
+ email=m.email, name=m.name, role=m.role, is_lead=False
+ )
def test_recursive_subprojects(self):
projects = self.get_projects_xml().projects
- p = projects['recursive-subprojects@example.com']
- sp = projects['nolead@example.com']
- assert p.subprojects[0].email == 'nolead@example.com'
- assert p.subprojects[0].name == 'Project with no lead'
+ p = projects["recursive-subprojects@example.com"]
+ sp = projects["nolead@example.com"]
+ assert p.subprojects[0].email == "nolead@example.com"
+ assert p.subprojects[0].name == "Project with no lead"
assert p.subprojects[0].inherit_members
- assert p.subprojects[1].email == 'lead@example.com'
- assert p.subprojects[1].name == 'Project with a lead'
+ assert p.subprojects[1].email == "lead@example.com"
+ assert p.subprojects[1].name == "Project with a lead"
assert not p.subprojects[1].inherit_members
# one extra member should be inherited from sp
- assert p.recursive_members == (
- p.members + tuple(self.unlead(sp.members[:1])))
+ assert p.recursive_members == (p.members + tuple(self.unlead(sp.members[:1])))
# lead of sp should not be considered lead of p
assert not p.recursive_members[3].is_lead
def test_super_recursive_subprojects(self):
projects = self.get_projects_xml().projects
- p = projects['super-recursive-subprojects@example.com']
- sp = projects['recursive-subprojects@example.com']
- ssp = projects['nolead@example.com']
- assert p.subprojects[0].email == 'recursive-subprojects@example.com'
- assert p.subprojects[0].name == 'Project with recursive subprojects'
+ p = projects["super-recursive-subprojects@example.com"]
+ sp = projects["recursive-subprojects@example.com"]
+ ssp = projects["nolead@example.com"]
+ assert p.subprojects[0].email == "recursive-subprojects@example.com"
+ assert p.subprojects[0].name == "Project with recursive subprojects"
assert p.subprojects[0].inherit_members
# one extra member should be inherited from ssp
assert p.recursive_members == (
- p.members + tuple(self.unlead(sp.members + ssp.members[:1])))
+ p.members + tuple(self.unlead(sp.members + ssp.members[:1]))
+ )
# lead of sp should not be considered lead of p
assert not p.recursive_members[5].is_lead
class TestRepoConfig:
-
@pytest.fixture(autouse=True)
def _setup(self, tmpdir):
self.repo_path = str(tmpdir)
- self.profiles_base = os.path.join(self.repo_path, 'profiles')
+ self.profiles_base = os.path.join(self.repo_path, "profiles")
self.metadata_path = os.path.join(
- self.repo_path, repo_objs.RepoConfig.layout_offset)
- self.eapi_path = os.path.join(self.profiles_base, 'eapi')
+ self.repo_path, repo_objs.RepoConfig.layout_offset
+ )
+ self.eapi_path = os.path.join(self.profiles_base, "eapi")
def test_nonexistent_repo(self):
# Newly configured, nonexistent repos shouldn't cause issues.
- repo_config = repo_objs.RepoConfig('nonexistent')
- assert repo_config.location == 'nonexistent'
+ repo_config = repo_objs.RepoConfig("nonexistent")
+ assert repo_config.location == "nonexistent"
def test_default_eapi(self):
os.mkdir(self.profiles_base)
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert str(repo_config.eapi) == '0'
+ assert str(repo_config.eapi) == "0"
def test_empty_file_eapi(self):
os.mkdir(self.profiles_base)
touch(self.eapi_path)
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert str(repo_config.eapi) == '0'
+ assert str(repo_config.eapi) == "0"
def test_empty_content_eapi(self):
os.mkdir(self.profiles_base)
- with open(self.eapi_path, 'w+') as f:
- f.write(' \n')
+ with open(self.eapi_path, "w+") as f:
+ f.write(" \n")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert str(repo_config.eapi) == '0'
+ assert str(repo_config.eapi) == "0"
def test_unknown_eapi(self):
os.mkdir(self.profiles_base)
- with open(self.eapi_path, 'w+') as f:
- f.write('unknown_eapi')
+ with open(self.eapi_path, "w+") as f:
+ f.write("unknown_eapi")
with pytest.raises(repo_errors.UnsupportedRepo) as excinfo:
repo_objs.RepoConfig(self.repo_path)
assert isinstance(excinfo.value.repo, repo_objs.RepoConfig)
def test_known_eapi(self):
os.mkdir(self.profiles_base)
- with open(self.eapi_path, 'w+') as f:
- f.write('6')
+ with open(self.eapi_path, "w+") as f:
+ f.write("6")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert str(repo_config.eapi) == '6'
+ assert str(repo_config.eapi) == "6"
def test_bad_data_known_eapi(self, caplog):
os.mkdir(self.profiles_base)
- with open(self.eapi_path, 'w+') as f:
- f.write('4\nfoo\nbar')
+ with open(self.eapi_path, "w+") as f:
+ f.write("4\nfoo\nbar")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert str(repo_config.eapi) == '4'
- assert 'multiple lines detected' in caplog.text
+ assert str(repo_config.eapi) == "4"
+ assert "multiple lines detected" in caplog.text
def test_bad_data_unknown_eapi(self, caplog):
os.mkdir(self.profiles_base)
- with open(self.eapi_path, 'w+') as f:
- f.write('eapi\nfoo\nbar')
+ with open(self.eapi_path, "w+") as f:
+ f.write("eapi\nfoo\nbar")
with pytest.raises(repo_errors.UnsupportedRepo) as excinfo:
repo_objs.RepoConfig(self.repo_path)
assert isinstance(excinfo.value.repo, repo_objs.RepoConfig)
- assert 'multiple lines detected' in caplog.text
+ assert "multiple lines detected" in caplog.text
def test_is_empty(self, caplog):
caplog.set_level(logging.DEBUG)
# nonexistent repo
- repo_config = repo_objs.RepoConfig('nonexistent')
+ repo_config = repo_objs.RepoConfig("nonexistent")
assert repo_config.is_empty
- assert caplog.text == ''
+ assert caplog.text == ""
caplog.clear()
del repo_config
# empty repo
repo_config = repo_objs.RepoConfig(self.repo_path)
assert repo_config.is_empty
- assert 'repo is empty:' in caplog.text
+ assert "repo is empty:" in caplog.text
caplog.clear()
del repo_config
@@ -542,79 +560,79 @@ class TestRepoConfig:
del repo_config
# bad data formatting
- with open(self.metadata_path, 'w') as f:
- f.write('repo-name repo')
+ with open(self.metadata_path, "w") as f:
+ f.write("repo-name repo")
repo_config = repo_objs.RepoConfig(self.repo_path)
assert repo_config.repo_name is None
- assert 'bash parse error' in caplog.text
+ assert "bash parse error" in caplog.text
caplog.clear()
del repo_config
# bad data formatting + name
- with open(self.metadata_path, 'w') as f:
- f.write('foo bar\nrepo-name = repo0')
+ with open(self.metadata_path, "w") as f:
+ f.write("foo bar\nrepo-name = repo0")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.repo_name == 'repo0'
- assert 'bash parse error' in caplog.text
+ assert repo_config.repo_name == "repo0"
+ assert "bash parse error" in caplog.text
caplog.clear()
del repo_config
# unset
- with open(self.metadata_path, 'w') as f:
- f.write('repo-name =')
+ with open(self.metadata_path, "w") as f:
+ f.write("repo-name =")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.repo_name == ''
+ assert repo_config.repo_name == ""
del repo_config
# whitespace
- with open(self.metadata_path, 'w') as f:
- f.write('repo-name = \n')
+ with open(self.metadata_path, "w") as f:
+ f.write("repo-name = \n")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.repo_name == ''
+ assert repo_config.repo_name == ""
del repo_config
# whitespace + name
- with open(self.metadata_path, 'w') as f:
- f.write('repo-name = repo \n')
+ with open(self.metadata_path, "w") as f:
+ f.write("repo-name = repo \n")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.repo_name == 'repo'
+ assert repo_config.repo_name == "repo"
del repo_config
# regular name
- with open(self.metadata_path, 'w') as f:
- f.write('repo-name = repo1')
+ with open(self.metadata_path, "w") as f:
+ f.write("repo-name = repo1")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.repo_name == 'repo1'
+ assert repo_config.repo_name == "repo1"
del repo_config
def test_manifests(self):
# nonexistent file
repo_config = repo_objs.RepoConfig(self.repo_path)
assert repo_config.manifests == {
- 'disabled': False,
- 'strict': True,
- 'thin': False,
- 'signed': True,
- 'hashes': repo_objs.RepoConfig.default_hashes,
- 'required_hashes': repo_objs.RepoConfig.default_required_hashes,
+ "disabled": False,
+ "strict": True,
+ "thin": False,
+ "signed": True,
+ "hashes": repo_objs.RepoConfig.default_hashes,
+ "required_hashes": repo_objs.RepoConfig.default_required_hashes,
}
del repo_config
# regular data
os.mkdir(os.path.dirname(self.metadata_path))
- with open(self.metadata_path, 'w') as f:
- f.write('manifest-hashes = foo\n')
- f.write('manifest-required-hashes = bar\n')
+ with open(self.metadata_path, "w") as f:
+ f.write("manifest-hashes = foo\n")
+ f.write("manifest-required-hashes = bar\n")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.manifests.hashes == ('size', 'foo')
- assert repo_config.manifests.required_hashes == ('size', 'bar')
+ assert repo_config.manifests.hashes == ("size", "foo")
+ assert repo_config.manifests.required_hashes == ("size", "bar")
del repo_config
def test_masters(self, caplog):
# empty repo
repo_config = repo_objs.RepoConfig(self.repo_path)
assert repo_config.masters == ()
- assert caplog.text == ''
+ assert caplog.text == ""
caplog.clear()
del repo_config
@@ -628,142 +646,142 @@ class TestRepoConfig:
# explicit empty masters for standalone repo
os.mkdir(os.path.dirname(self.metadata_path))
- with open(self.metadata_path, 'w') as f:
- f.write('masters =\n')
- repo_config = repo_objs.RepoConfig(self.repo_path, config_name='foo')
+ with open(self.metadata_path, "w") as f:
+ f.write("masters =\n")
+ repo_config = repo_objs.RepoConfig(self.repo_path, config_name="foo")
assert repo_config.masters == ()
- assert caplog.text == ''
+ assert caplog.text == ""
caplog.clear()
del repo_config
# overlay repo with masters
- with open(self.metadata_path, 'w') as f:
- f.write('masters = foo bar\n')
- repo_config = repo_objs.RepoConfig(self.repo_path, config_name='a')
- assert repo_config.masters == ('foo', 'bar')
+ with open(self.metadata_path, "w") as f:
+ f.write("masters = foo bar\n")
+ repo_config = repo_objs.RepoConfig(self.repo_path, config_name="a")
+ assert repo_config.masters == ("foo", "bar")
del repo_config
# overlay repo with duplicate masters
- with open(self.metadata_path, 'w') as f:
- f.write('masters = foo bar foo baz\n')
- repo_config = repo_objs.RepoConfig(self.repo_path, config_name='b')
- assert repo_config.masters == ('foo', 'bar', 'baz')
+ with open(self.metadata_path, "w") as f:
+ f.write("masters = foo bar foo baz\n")
+ repo_config = repo_objs.RepoConfig(self.repo_path, config_name="b")
+ assert repo_config.masters == ("foo", "bar", "baz")
del repo_config
def test_cache_format(self, caplog):
# empty repo
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.cache_format == 'md5-dict'
+ assert repo_config.cache_format == "md5-dict"
del repo_config
# explicit empty setting
os.mkdir(os.path.dirname(self.metadata_path))
- with open(self.metadata_path, 'w') as f:
- f.write('cache-formats =\n')
+ with open(self.metadata_path, "w") as f:
+ f.write("cache-formats =\n")
repo_config = repo_objs.RepoConfig(self.repo_path)
assert repo_config.cache_format is None
del repo_config
# unknown formats
- with open(self.metadata_path, 'w') as f:
- f.write('cache-formats = foo bar\n')
+ with open(self.metadata_path, "w") as f:
+ f.write("cache-formats = foo bar\n")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.cache_format == 'md5-dict'
- assert 'unknown cache format:' in caplog.text
+ assert repo_config.cache_format == "md5-dict"
+ assert "unknown cache format:" in caplog.text
caplog.clear()
del repo_config
# known format
- with open(self.metadata_path, 'w') as f:
- f.write('cache-formats = pms\n')
+ with open(self.metadata_path, "w") as f:
+ f.write("cache-formats = pms\n")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.cache_format == 'pms'
+ assert repo_config.cache_format == "pms"
del repo_config
# multiple formats -- favored format is selected
- with open(self.metadata_path, 'w') as f:
- f.write('cache-formats = pms md5-dict\n')
+ with open(self.metadata_path, "w") as f:
+ f.write("cache-formats = pms md5-dict\n")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.cache_format == 'md5-dict'
+ assert repo_config.cache_format == "md5-dict"
del repo_config
# unknown + known
- with open(self.metadata_path, 'w') as f:
- f.write('cache-formats = foo md5-dict\n')
+ with open(self.metadata_path, "w") as f:
+ f.write("cache-formats = foo md5-dict\n")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.cache_format == 'md5-dict'
+ assert repo_config.cache_format == "md5-dict"
del repo_config
def test_profile_formats(self, caplog):
os.mkdir(self.profiles_base)
- with open(os.path.join(self.profiles_base, 'repo_name'), 'w') as f:
- f.write('pms_name')
+ with open(os.path.join(self.profiles_base, "repo_name"), "w") as f:
+ f.write("pms_name")
# empty repo
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.profile_formats == {'pms'}
+ assert repo_config.profile_formats == {"pms"}
del repo_config
caplog.clear()
# explicit empty setting
os.mkdir(os.path.dirname(self.metadata_path))
- with open(self.metadata_path, 'w') as f:
- f.write('masters =\nprofile-formats =\n')
+ with open(self.metadata_path, "w") as f:
+ f.write("masters =\nprofile-formats =\n")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.profile_formats == {'pms'}
+ assert repo_config.profile_formats == {"pms"}
assert not caplog.text
caplog.clear()
del repo_config
# message shown at info log level
caplog.set_level(logging.INFO)
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert 'has explicitly unset profile-formats' in caplog.text
+ assert "has explicitly unset profile-formats" in caplog.text
caplog.clear()
del repo_config
# unknown formats
caplog.set_level(logging.WARNING)
- with open(self.metadata_path, 'w') as f:
- f.write('masters =\nprofile-formats = foo bar\n')
+ with open(self.metadata_path, "w") as f:
+ f.write("masters =\nprofile-formats = foo bar\n")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.profile_formats == {'pms'}
+ assert repo_config.profile_formats == {"pms"}
assert not caplog.text
caplog.clear()
del repo_config
# message shown at info log level
caplog.set_level(logging.INFO)
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert 'has unsupported profile format' in caplog.text
+ assert "has unsupported profile format" in caplog.text
caplog.clear()
del repo_config
# unknown + known
caplog.set_level(logging.WARNING)
- with open(self.metadata_path, 'w') as f:
- f.write('masters =\nprofile-formats = foo portage-2\n')
+ with open(self.metadata_path, "w") as f:
+ f.write("masters =\nprofile-formats = foo portage-2\n")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.profile_formats == {'pms', 'portage-2'}
+ assert repo_config.profile_formats == {"pms", "portage-2"}
assert not caplog.text
caplog.clear()
del repo_config
# message shown at info log level
caplog.set_level(logging.INFO)
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert 'has unsupported profile format' in caplog.text
+ assert "has unsupported profile format" in caplog.text
caplog.clear()
del repo_config
# known formats
caplog.set_level(logging.WARNING)
- with open(self.metadata_path, 'w') as f:
- f.write('profile-formats = portage-1 portage-2\n')
+ with open(self.metadata_path, "w") as f:
+ f.write("profile-formats = portage-1 portage-2\n")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.profile_formats == {'portage-1', 'portage-2'}
+ assert repo_config.profile_formats == {"portage-1", "portage-2"}
del repo_config
def test_pms_repo_name(self):
os.mkdir(self.profiles_base)
- repo_name_path = os.path.join(self.profiles_base, 'repo_name')
+ repo_name_path = os.path.join(self.profiles_base, "repo_name")
# nonexistent file
repo_config = repo_objs.RepoConfig(self.repo_path)
@@ -773,61 +791,61 @@ class TestRepoConfig:
# empty file
touch(repo_name_path)
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.pms_repo_name == ''
+ assert repo_config.pms_repo_name == ""
del repo_config
# whitespace
- with open(repo_name_path, 'w') as f:
- f.write(' \n')
+ with open(repo_name_path, "w") as f:
+ f.write(" \n")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.pms_repo_name == ''
+ assert repo_config.pms_repo_name == ""
del repo_config
# whitespace + name
- with open(repo_name_path, 'w') as f:
- f.write(' repo \n')
+ with open(repo_name_path, "w") as f:
+ f.write(" repo \n")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.pms_repo_name == 'repo'
+ assert repo_config.pms_repo_name == "repo"
del repo_config
# regular name
- with open(repo_name_path, 'w') as f:
- f.write('newrepo')
+ with open(repo_name_path, "w") as f:
+ f.write("newrepo")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.pms_repo_name == 'newrepo'
+ assert repo_config.pms_repo_name == "newrepo"
del repo_config
# regular name EOLed
- with open(repo_name_path, 'w') as f:
- f.write('newrepo2\n')
+ with open(repo_name_path, "w") as f:
+ f.write("newrepo2\n")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.pms_repo_name == 'newrepo2'
+ assert repo_config.pms_repo_name == "newrepo2"
del repo_config
# multi-line
- with open(repo_name_path, 'w') as f:
- f.write('newrepo3\nfoobar')
+ with open(repo_name_path, "w") as f:
+ f.write("newrepo3\nfoobar")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.pms_repo_name == 'newrepo3'
+ assert repo_config.pms_repo_name == "newrepo3"
del repo_config
# binary data
- with open(repo_name_path, 'wb') as f:
- f.write(b'\x6e\x65\x77\x72\x65\x70\x6f\x34')
+ with open(repo_name_path, "wb") as f:
+ f.write(b"\x6e\x65\x77\x72\x65\x70\x6f\x34")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.pms_repo_name == 'newrepo4'
+ assert repo_config.pms_repo_name == "newrepo4"
del repo_config
def test_repo_id(self, caplog):
# nonexistent repo
- repo_config = repo_objs.RepoConfig('nonexistent')
- assert repo_config.repo_id == 'nonexistent'
+ repo_config = repo_objs.RepoConfig("nonexistent")
+ assert repo_config.repo_id == "nonexistent"
del repo_config
# empty repo
repo_config = repo_objs.RepoConfig(self.repo_path)
assert repo_config.repo_id == self.repo_path
- assert caplog.text == ''
+ assert caplog.text == ""
caplog.clear()
del repo_config
@@ -835,53 +853,53 @@ class TestRepoConfig:
os.mkdir(self.profiles_base)
repo_config = repo_objs.RepoConfig(self.repo_path)
assert repo_config.repo_id == self.repo_path
- assert 'repo lacks a defined name:' in caplog.text
+ assert "repo lacks a defined name:" in caplog.text
caplog.clear()
del repo_config
# pms repo name exists
- with open(os.path.join(self.profiles_base, 'repo_name'), 'w') as f:
- f.write('pms_name')
+ with open(os.path.join(self.profiles_base, "repo_name"), "w") as f:
+ f.write("pms_name")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.repo_id == 'pms_name'
+ assert repo_config.repo_id == "pms_name"
del repo_config
# layout.conf repo name exists
os.mkdir(os.path.dirname(self.metadata_path))
- with open(self.metadata_path, 'w') as f:
- f.write('repo-name = metadata_name')
+ with open(self.metadata_path, "w") as f:
+ f.write("repo-name = metadata_name")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.repo_id == 'metadata_name'
+ assert repo_config.repo_id == "metadata_name"
del repo_config
# config name exists
- repo_config = repo_objs.RepoConfig(self.repo_path, config_name='config_name')
- assert repo_config.repo_id == 'config_name'
+ repo_config = repo_objs.RepoConfig(self.repo_path, config_name="config_name")
+ assert repo_config.repo_id == "config_name"
del repo_config
def test_known_arches(self):
# nonexistent repo
- repo_config = repo_objs.RepoConfig('nonexistent')
+ repo_config = repo_objs.RepoConfig("nonexistent")
assert repo_config.known_arches == frozenset()
del repo_config
# empty file
os.mkdir(self.profiles_base)
- arches_path = os.path.join(self.profiles_base, 'arch.list')
+ arches_path = os.path.join(self.profiles_base, "arch.list")
touch(arches_path)
repo_config = repo_objs.RepoConfig(self.repo_path)
assert repo_config.known_arches == frozenset()
del repo_config
# single entry
- with open(arches_path, 'w') as f:
- f.write('foo')
+ with open(arches_path, "w") as f:
+ f.write("foo")
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.known_arches == frozenset(['foo'])
+ assert repo_config.known_arches == frozenset(["foo"])
del repo_config
# multiple entries with whitespaces and comments
- with open(arches_path, 'w') as f:
+ with open(arches_path, "w") as f:
f.write(
"""
amd64
@@ -889,64 +907,67 @@ class TestRepoConfig:
# prefix
foo-bar
- """)
+ """
+ )
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.known_arches == frozenset(['amd64', 'x86', 'foo-bar'])
+ assert repo_config.known_arches == frozenset(["amd64", "x86", "foo-bar"])
del repo_config
def test_arches_desc(self):
# nonexistent repo
- repo_config = repo_objs.RepoConfig('nonexistent')
- empty = {'stable': set(), 'transitional': set(), 'testing': set()}
+ repo_config = repo_objs.RepoConfig("nonexistent")
+ empty = {"stable": set(), "transitional": set(), "testing": set()}
assert repo_config.arches_desc == ImmutableDict(empty)
del repo_config
# empty file
os.mkdir(self.profiles_base)
- arches_desc_path = os.path.join(self.profiles_base, 'arches.desc')
+ arches_desc_path = os.path.join(self.profiles_base, "arches.desc")
touch(arches_desc_path)
repo_config = repo_objs.RepoConfig(self.repo_path)
assert repo_config.arches_desc == ImmutableDict(empty)
del repo_config
# regular entries
- with open(os.path.join(self.profiles_base, 'arch.list'), 'w') as f:
+ with open(os.path.join(self.profiles_base, "arch.list"), "w") as f:
f.write(
"""
amd64
alpha
foo
- """)
- with open(arches_desc_path, 'w') as f:
+ """
+ )
+ with open(arches_desc_path, "w") as f:
f.write(
"""
# arches.desc file
amd64 stable
alpha testing
- """)
+ """
+ )
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.arches_desc['stable'] == {'amd64'}
- assert repo_config.arches_desc['testing'] == {'alpha'}
- assert repo_config.arches_desc['transitional'] == set()
+ assert repo_config.arches_desc["stable"] == {"amd64"}
+ assert repo_config.arches_desc["testing"] == {"alpha"}
+ assert repo_config.arches_desc["transitional"] == set()
del repo_config
def test_use_desc(self):
# nonexistent repo
- repo_config = repo_objs.RepoConfig('nonexistent')
+ repo_config = repo_objs.RepoConfig("nonexistent")
assert repo_config.use_desc == ()
del repo_config
# empty file
os.mkdir(self.profiles_base)
- use_desc_path = os.path.join(self.profiles_base, 'use.desc')
+ use_desc_path = os.path.join(self.profiles_base, "use.desc")
touch(use_desc_path)
repo_config = repo_objs.RepoConfig(self.repo_path)
assert repo_config.use_desc == ()
del repo_config
# regular entries
- with open(use_desc_path, 'w') as f:
+ with open(use_desc_path, "w") as f:
f.write(
"""
# copy
@@ -955,28 +976,29 @@ class TestRepoConfig:
foo1 - enable foo1
foo2 - enable foo2
bar3 - add bar3 support
- """)
+ """
+ )
repo_config = repo_objs.RepoConfig(self.repo_path)
assert 3 == len(repo_config.use_desc)
del repo_config
def test_use_expand_desc(self):
# nonexistent repo
- repo_config = repo_objs.RepoConfig('nonexistent')
+ repo_config = repo_objs.RepoConfig("nonexistent")
assert repo_config.use_expand_desc == {}
del repo_config
# empty file
- use_expand_desc_path = os.path.join(self.profiles_base, 'desc')
+ use_expand_desc_path = os.path.join(self.profiles_base, "desc")
os.makedirs(use_expand_desc_path)
- use_expand_desc_file = os.path.join(use_expand_desc_path, 'foo.desc')
+ use_expand_desc_file = os.path.join(use_expand_desc_path, "foo.desc")
touch(use_expand_desc_file)
repo_config = repo_objs.RepoConfig(self.repo_path)
- assert repo_config.use_expand_desc == {'foo': ()}
+ assert repo_config.use_expand_desc == {"foo": ()}
del repo_config
# regular entries
- with open(use_expand_desc_file, 'w') as f:
+ with open(use_expand_desc_file, "w") as f:
f.write(
"""
# copy
@@ -984,31 +1006,30 @@ class TestRepoConfig:
bar - add bar support
baz - build using baz
- """)
+ """
+ )
repo_config = repo_objs.RepoConfig(self.repo_path)
assert repo_config.use_expand_desc == {
- 'foo': (
- ('foo_bar', 'add bar support'),
- ('foo_baz', 'build using baz')
- )}
+ "foo": (("foo_bar", "add bar support"), ("foo_baz", "build using baz"))
+ }
del repo_config
def test_use_local_desc(self):
# nonexistent repo
- repo_config = repo_objs.RepoConfig('nonexistent')
+ repo_config = repo_objs.RepoConfig("nonexistent")
assert repo_config.use_local_desc == ()
del repo_config
# empty file
os.mkdir(self.profiles_base)
- use_local_desc_path = os.path.join(self.profiles_base, 'use.local.desc')
+ use_local_desc_path = os.path.join(self.profiles_base, "use.local.desc")
touch(use_local_desc_path)
repo_config = repo_objs.RepoConfig(self.repo_path)
assert repo_config.use_local_desc == ()
del repo_config
# regular entries
- with open(use_local_desc_path, 'w') as f:
+ with open(use_local_desc_path, "w") as f:
f.write(
"""
# copy
@@ -1017,20 +1038,21 @@ class TestRepoConfig:
cat/pkg1:foo1 - enable foo1
cat1/pkg2:foo2 - enable foo2
cat2/pkg3:bar3 - add bar3 support
- """)
+ """
+ )
repo_config = repo_objs.RepoConfig(self.repo_path)
assert 3 == len(repo_config.use_local_desc)
del repo_config
def test_updates(self):
# nonexistent repo
- repo_config = repo_objs.RepoConfig('nonexistent')
+ repo_config = repo_objs.RepoConfig("nonexistent")
assert repo_config.updates == {}
del repo_config
# empty file
- updates_path = os.path.join(self.profiles_base, 'updates')
- updates_file_path = os.path.join(updates_path, '1Q-2019')
+ updates_path = os.path.join(self.profiles_base, "updates")
+ updates_file_path = os.path.join(updates_path, "1Q-2019")
os.makedirs(updates_path)
touch(updates_file_path)
repo_config = repo_objs.RepoConfig(self.repo_path)
@@ -1039,50 +1061,50 @@ class TestRepoConfig:
# simple pkg move
# TODO: move pkg_updates content tests to its own module
- with open(updates_file_path, 'w') as f:
- f.write('move cat1/pkg1 cat2/pkg1\n')
+ with open(updates_file_path, "w") as f:
+ f.write("move cat1/pkg1 cat2/pkg1\n")
repo_config = repo_objs.RepoConfig(self.repo_path)
expected_updates = {
- 'cat1/pkg1': [('move', atom.atom('cat1/pkg1'), atom.atom('cat2/pkg1'))],
+ "cat1/pkg1": [("move", atom.atom("cat1/pkg1"), atom.atom("cat2/pkg1"))],
}
assert repo_config.updates == expected_updates
del repo_config
# extraneous file should be ignored
- extra_file_path = os.path.join(updates_path, 'frobnicate')
- with open(extra_file_path, 'w') as f:
- f.write('move cat1/pkg2 cat1/pkg3\n')
+ extra_file_path = os.path.join(updates_path, "frobnicate")
+ with open(extra_file_path, "w") as f:
+ f.write("move cat1/pkg2 cat1/pkg3\n")
repo_config = repo_objs.RepoConfig(self.repo_path)
assert repo_config.updates == expected_updates
del repo_config
def test_updates_eapi8(self):
# empty file
- updates_path = os.path.join(self.profiles_base, 'updates')
- updates_file_path = os.path.join(updates_path, '2021.1')
+ updates_path = os.path.join(self.profiles_base, "updates")
+ updates_file_path = os.path.join(updates_path, "2021.1")
os.makedirs(updates_path)
touch(updates_file_path)
- with open(os.path.join(self.profiles_base, 'eapi'), 'w') as f:
- f.write('8\n')
+ with open(os.path.join(self.profiles_base, "eapi"), "w") as f:
+ f.write("8\n")
repo_config = repo_objs.RepoConfig(self.repo_path)
assert repo_config.updates == {}
del repo_config
# simple pkg move
# TODO: move pkg_updates content tests to its own module
- with open(updates_file_path, 'w') as f:
- f.write('move cat1/pkg1 cat2/pkg1\n')
+ with open(updates_file_path, "w") as f:
+ f.write("move cat1/pkg1 cat2/pkg1\n")
repo_config = repo_objs.RepoConfig(self.repo_path)
expected_updates = {
- 'cat1/pkg1': [('move', atom.atom('cat1/pkg1'), atom.atom('cat2/pkg1'))],
+ "cat1/pkg1": [("move", atom.atom("cat1/pkg1"), atom.atom("cat2/pkg1"))],
}
assert repo_config.updates == expected_updates
del repo_config
# extraneous file should be ignored
- extra_file_path = os.path.join(updates_path, '.frobnicate')
- with open(extra_file_path, 'w') as f:
- f.write('move cat1/pkg2 cat1/pkg3\n')
+ extra_file_path = os.path.join(updates_path, ".frobnicate")
+ with open(extra_file_path, "w") as f:
+ f.write("move cat1/pkg2 cat1/pkg3\n")
repo_config = repo_objs.RepoConfig(self.repo_path)
assert repo_config.updates == expected_updates
del repo_config
diff --git a/tests/ebuild/test_repository.py b/tests/ebuild/test_repository.py
index cfee767ca..85a248d8d 100644
--- a/tests/ebuild/test_repository.py
+++ b/tests/ebuild/test_repository.py
@@ -11,30 +11,33 @@ from snakeoil.contexts import chdir
class TestUnconfiguredTree:
-
def mk_tree(self, path, *args, **kwds):
- eclasses = kwds.pop('eclass_cache', None)
+ eclasses = kwds.pop("eclass_cache", None)
if eclasses is None:
- (epath := path / 'eclass').mkdir(parents=True, exist_ok=True)
+ (epath := path / "eclass").mkdir(parents=True, exist_ok=True)
eclasses = eclass_cache.cache(str(epath))
- (path / 'profiles').mkdir(exist_ok=True)
- return repository.UnconfiguredTree(str(path), eclass_cache=eclasses, *args, **kwds)
+ (path / "profiles").mkdir(exist_ok=True)
+ return repository.UnconfiguredTree(
+ str(path), eclass_cache=eclasses, *args, **kwds
+ )
@pytest.fixture
def pdir(self, tmp_path):
- (pdir := tmp_path / 'profiles').mkdir(exist_ok=True)
+ (pdir := tmp_path / "profiles").mkdir(exist_ok=True)
# silence missing masters warnings
- if not (tmp_path / 'metadata').exists():
- (tmp_path / 'metadata').mkdir()
- (tmp_path / 'metadata' / 'layout.conf').write_text('masters =\n')
+ if not (tmp_path / "metadata").exists():
+ (tmp_path / "metadata").mkdir()
+ (tmp_path / "metadata" / "layout.conf").write_text("masters =\n")
return pdir
def test_repo_from_file(self, tmp_path):
- (tmp_path / 'random').write_text('random')
+ (tmp_path / "random").write_text("random")
with pytest.raises(errors.InitializationError):
- return repository.UnconfiguredTree(str(tmp_path / 'random'), eclass_cache=None)
+ return repository.UnconfiguredTree(
+ str(tmp_path / "random"), eclass_cache=None
+ )
def test_basics(self, tmp_path, pdir, caplog):
repo = self.mk_tree(tmp_path)
@@ -43,35 +46,39 @@ class TestUnconfiguredTree:
assert repr(repo)
caplog.clear()
- self.mk_tree(tmp_path / 'missing')
+ self.mk_tree(tmp_path / "missing")
assert caplog.text
def test_thirdpartymirrors(self, tmp_path, pdir):
- (pdir / 'thirdpartymirrors').write_text(textwrap.dedent('''\
+ (pdir / "thirdpartymirrors").write_text(
+ textwrap.dedent(
+ """\
spork http://sporks/ http://moresporks/
foon foon://foons/
- '''))
+ """
+ )
+ )
mirrors = self.mk_tree(tmp_path).mirrors
- assert set(mirrors) == {'spork', 'foon'}
- assert set(mirrors['spork']) == {'http://moresporks/', 'http://sporks/'}
- (pdir / 'thirdpartymirrors').write_text("foon dar\n")
- assert set(self.mk_tree(tmp_path).mirrors.keys()) == {'foon'}
+ assert set(mirrors) == {"spork", "foon"}
+ assert set(mirrors["spork"]) == {"http://moresporks/", "http://sporks/"}
+ (pdir / "thirdpartymirrors").write_text("foon dar\n")
+ assert set(self.mk_tree(tmp_path).mirrors.keys()) == {"foon"}
def test_repo_id(self, tmp_path):
- repo = self.mk_tree(dir1 := tmp_path / '1')
+ repo = self.mk_tree(dir1 := tmp_path / "1")
assert repo.repo_id == str(dir1)
- (dir2 := tmp_path / '2').mkdir(0o755)
- (dir2 / 'profiles').mkdir()
- (dir2 / 'profiles' / 'repo_name').write_text('testrepo\n')
+ (dir2 := tmp_path / "2").mkdir(0o755)
+ (dir2 / "profiles").mkdir()
+ (dir2 / "profiles" / "repo_name").write_text("testrepo\n")
repo = self.mk_tree(dir2)
- assert repo.repo_id == 'testrepo'
+ assert repo.repo_id == "testrepo"
def test_licenses(self, tmp_path):
- licenses = {'GPL-2', 'GPL-3+', 'BSD'}
- (tmp_path / 'licenses').mkdir()
+ licenses = {"GPL-2", "GPL-3+", "BSD"}
+ (tmp_path / "licenses").mkdir()
for license in licenses:
- (tmp_path / 'licenses' / license).touch()
+ (tmp_path / "licenses" / license).touch()
repo = self.mk_tree(tmp_path)
assert set(repo.licenses) == licenses
@@ -80,35 +87,35 @@ class TestUnconfiguredTree:
assert repo.masters == ()
def test_path_restrict(self, tmp_path, tmp_path_factory):
- repo_dir = tmp_path_factory.mktemp('repo', numbered=True)
- sym_repo_dir = tmp_path_factory.mktemp('sym_repo', numbered=True)
+ repo_dir = tmp_path_factory.mktemp("repo", numbered=True)
+ sym_repo_dir = tmp_path_factory.mktemp("sym_repo", numbered=True)
sym_repo_dir.rmdir()
sym_repo_dir.symlink_to(repo_dir)
- (repo_dir / 'profiles').mkdir()
- (repo_dir / 'profiles' / 'repo_name').write_text('testrepo\n')
- (repo_dir / 'profiles' / 'categories').write_text('cat\ntac\n')
- (repo_dir / 'skel.ebuild').touch()
- (repo_dir / 'cat' / 'foo').mkdir(parents=True)
- (repo_dir / 'cat' / 'foo' / 'Manifest').touch()
- (repo_dir / 'cat' / 'foo' / 'foo-1.ebuild').write_text('SLOT=0\n')
- (repo_dir / 'cat' / 'foo' / 'foo-2.ebuild').write_text('SLOT=0\n')
- (repo_dir / 'cat' / 'bar').mkdir(parents=True)
- (repo_dir / 'cat' / 'bar' / 'bar-1.ebuild').write_text('SLOT=0\n')
- (repo_dir / 'tac' / 'oof').mkdir(parents=True)
- (repo_dir / 'tac' / 'oof' / 'oof-1.ebuild').write_text('SLOT=0\n')
+ (repo_dir / "profiles").mkdir()
+ (repo_dir / "profiles" / "repo_name").write_text("testrepo\n")
+ (repo_dir / "profiles" / "categories").write_text("cat\ntac\n")
+ (repo_dir / "skel.ebuild").touch()
+ (repo_dir / "cat" / "foo").mkdir(parents=True)
+ (repo_dir / "cat" / "foo" / "Manifest").touch()
+ (repo_dir / "cat" / "foo" / "foo-1.ebuild").write_text("SLOT=0\n")
+ (repo_dir / "cat" / "foo" / "foo-2.ebuild").write_text("SLOT=0\n")
+ (repo_dir / "cat" / "bar").mkdir(parents=True)
+ (repo_dir / "cat" / "bar" / "bar-1.ebuild").write_text("SLOT=0\n")
+ (repo_dir / "tac" / "oof").mkdir(parents=True)
+ (repo_dir / "tac" / "oof" / "oof-1.ebuild").write_text("SLOT=0\n")
for d in (repo_dir, sym_repo_dir):
repo = self.mk_tree(d)
location = Path(repo.location)
for path in (
tmp_path, # path not in repo
- location / 'a', # nonexistent category dir
+ location / "a", # nonexistent category dir
# location / 'profiles', # non-category dir
- location / 'skel.ebuild', # not in the correct cat/PN dir layout
- location / 'cat' / 'a', # nonexistent package dir
- location / 'cat' / 'foo' / 'foo-0.ebuild', # nonexistent ebuild file
- location / 'cat' / 'foo' / 'Manifest', # non-ebuild file
+ location / "skel.ebuild", # not in the correct cat/PN dir layout
+ location / "cat" / "a", # nonexistent package dir
+ location / "cat" / "foo" / "foo-0.ebuild", # nonexistent ebuild file
+ location / "cat" / "foo" / "Manifest", # non-ebuild file
):
with pytest.raises(ValueError):
repo.path_restrict(str(path))
@@ -121,7 +128,7 @@ class TestUnconfiguredTree:
assert len(repo.match(restriction)) == 4
# category dir
- restriction = repo.path_restrict(str(location / 'cat'))
+ restriction = repo.path_restrict(str(location / "cat"))
assert len(restriction) == 2
assert isinstance(restriction[1], restricts.CategoryDep)
# matches all 3 ebuilds in the category
@@ -129,14 +136,14 @@ class TestUnconfiguredTree:
# relative category dir
with chdir(repo.location):
- restriction = repo.path_restrict('cat')
+ restriction = repo.path_restrict("cat")
assert len(restriction) == 2
assert isinstance(restriction[1], restricts.CategoryDep)
# matches all 3 ebuilds in the category
assert len(repo.match(restriction)) == 3
# package dir
- restriction = repo.path_restrict(str(location / 'cat' / 'foo'))
+ restriction = repo.path_restrict(str(location / "cat" / "foo"))
assert len(restriction) == 3
assert isinstance(restriction[2], restricts.PackageDep)
# matches both ebuilds in the package dir
@@ -144,91 +151,106 @@ class TestUnconfiguredTree:
# relative package dir
with chdir(repo.location):
- restriction = repo.path_restrict('cat/foo')
+ restriction = repo.path_restrict("cat/foo")
assert len(restriction) == 3
assert isinstance(restriction[2], restricts.PackageDep)
# matches both ebuilds in the package dir
assert len(repo.match(restriction)) == 2
# ebuild file
- restriction = repo.path_restrict(str(location / 'cat' / 'foo' / 'foo-1.ebuild'))
+ restriction = repo.path_restrict(
+ str(location / "cat" / "foo" / "foo-1.ebuild")
+ )
assert len(restriction) == 4
assert isinstance(restriction[3], restricts.VersionMatch)
# specific ebuild version match
assert len(repo.match(restriction)) == 1
# relative ebuild file path
- with chdir((location / 'cat' / 'foo').resolve()):
- restriction = repo.path_restrict('./foo-1.ebuild')
+ with chdir((location / "cat" / "foo").resolve()):
+ restriction = repo.path_restrict("./foo-1.ebuild")
assert len(restriction) == 4
assert isinstance(restriction[3], restricts.VersionMatch)
# specific ebuild version match
assert len(repo.match(restriction)) == 1
def test_categories_packages(self, tmp_path):
- (tmp_path / 'cat' / 'pkg').mkdir(parents=True)
- (tmp_path / 'empty' / 'empty').mkdir(parents=True)
- (tmp_path / 'cat' / 'pkg' / 'pkg-3.ebuild').touch()
+ (tmp_path / "cat" / "pkg").mkdir(parents=True)
+ (tmp_path / "empty" / "empty").mkdir(parents=True)
+ (tmp_path / "cat" / "pkg" / "pkg-3.ebuild").touch()
repo = self.mk_tree(tmp_path)
- assert {'cat': (), 'empty': ()} == dict(repo.categories)
- assert {'cat': ('pkg',), 'empty': ('empty',)} == dict(repo.packages)
- assert {('cat', 'pkg'): ('3',), ('empty', 'empty'): ()} == dict(repo.versions)
+ assert {"cat": (), "empty": ()} == dict(repo.categories)
+ assert {"cat": ("pkg",), "empty": ("empty",)} == dict(repo.packages)
+ assert {("cat", "pkg"): ("3",), ("empty", "empty"): ()} == dict(repo.versions)
def test_package_mask(self, tmp_path, pdir):
- (pdir / 'package.mask').write_text(textwrap.dedent('''\
+ (pdir / "package.mask").write_text(
+ textwrap.dedent(
+ """\
# lalala
it-is/broken
<just/newer-than-42
- '''))
+ """
+ )
+ )
repo = self.mk_tree(tmp_path)
- assert set(repo.pkg_masks) == {atom('it-is/broken'), atom('<just/newer-than-42')}
+ assert set(repo.pkg_masks) == {
+ atom("it-is/broken"),
+ atom("<just/newer-than-42"),
+ }
class TestSlavedTree(TestUnconfiguredTree):
-
def mk_tree(self, path, *args, **kwds):
if path != self.dir_slave:
self.dir_slave = path
- self.dir_master = path.parent / (path.name + 'master')
- (self.dir_slave / 'profiles').mkdir(parents=True, exist_ok=True)
- (self.dir_master / 'profiles').mkdir(parents=True, exist_ok=True)
+ self.dir_master = path.parent / (path.name + "master")
+ (self.dir_slave / "profiles").mkdir(parents=True, exist_ok=True)
+ (self.dir_master / "profiles").mkdir(parents=True, exist_ok=True)
- eclasses = kwds.pop('eclass_cache', None)
+ eclasses = kwds.pop("eclass_cache", None)
if eclasses is None:
- (epath := path / 'eclass').mkdir(parents=True, exist_ok=True)
+ (epath := path / "eclass").mkdir(parents=True, exist_ok=True)
eclasses = eclass_cache.cache(str(epath))
- self.master_repo = repository.UnconfiguredTree(str(self.dir_master), eclass_cache=eclasses, *args, **kwds)
+ self.master_repo = repository.UnconfiguredTree(
+ str(self.dir_master), eclass_cache=eclasses, *args, **kwds
+ )
masters = (self.master_repo,)
- return repository.UnconfiguredTree(str(self.dir_slave), eclass_cache=eclasses, masters=masters, *args, **kwds)
+ return repository.UnconfiguredTree(
+ str(self.dir_slave), eclass_cache=eclasses, masters=masters, *args, **kwds
+ )
@pytest.fixture(autouse=True)
def master_repo(self, tmp_path_factory):
- self.dir_master = tmp_path_factory.mktemp('master', numbered=True)
- (self.dir_master / 'metadata').mkdir()
- (self.dir_master / 'metadata' / 'layout.conf').write_text('masters =\n')
- (self.dir_master / 'profiles').mkdir()
- (self.dir_master / 'profiles' / 'repo_name').write_text('master\n')
+ self.dir_master = tmp_path_factory.mktemp("master", numbered=True)
+ (self.dir_master / "metadata").mkdir()
+ (self.dir_master / "metadata" / "layout.conf").write_text("masters =\n")
+ (self.dir_master / "profiles").mkdir()
+ (self.dir_master / "profiles" / "repo_name").write_text("master\n")
return self.dir_master
@pytest.fixture(autouse=True)
def slave_repo(self, tmp_path):
self.dir_slave = tmp_path
- (self.dir_slave / 'metadata').mkdir()
- (self.dir_slave / 'metadata' / 'layout.conf').write_text('masters = master\n')
- (self.dir_slave / 'profiles').mkdir()
- (self.dir_slave / 'profiles' / 'repo_name').write_text('slave\n')
+ (self.dir_slave / "metadata").mkdir()
+ (self.dir_slave / "metadata" / "layout.conf").write_text("masters = master\n")
+ (self.dir_slave / "profiles").mkdir()
+ (self.dir_slave / "profiles" / "repo_name").write_text("slave\n")
return self.dir_slave
- @pytest.mark.parametrize(("master", "slave", "expected"), (
- (('cat',), (), ('cat',)),
- ((), ('cat',), ('cat',)),
- (('sys-apps', 'foo'), ('cat', 'foo'), ('cat', 'foo', 'sys-apps')),
- ))
+ @pytest.mark.parametrize(
+ ("master", "slave", "expected"),
+ (
+ (("cat",), (), ("cat",)),
+ ((), ("cat",), ("cat",)),
+ (("sys-apps", "foo"), ("cat", "foo"), ("cat", "foo", "sys-apps")),
+ ),
+ )
def test_categories(self, master_repo, slave_repo, master, slave, expected):
# categories are inherited from masters
- (master_repo / 'profiles' / 'categories').write_text('\n'.join(master))
- (slave_repo / 'profiles' / 'categories').write_text('\n'.join(slave))
+ (master_repo / "profiles" / "categories").write_text("\n".join(master))
+ (slave_repo / "profiles" / "categories").write_text("\n".join(slave))
for cat in master:
(master_repo / cat).mkdir(0o755)
for cat in slave:
@@ -237,55 +259,77 @@ class TestSlavedTree(TestUnconfiguredTree):
assert tuple(sorted(repo.categories)) == expected
def test_licenses(self, master_repo, slave_repo):
- master_licenses = ('GPL-2', 'GPL-3+', 'BSD')
- slave_licenses = ('BSD-2', 'MIT')
- (master_repo / 'licenses').mkdir()
+ master_licenses = ("GPL-2", "GPL-3+", "BSD")
+ slave_licenses = ("BSD-2", "MIT")
+ (master_repo / "licenses").mkdir()
for license in master_licenses:
- (master_repo / 'licenses' / license).touch()
- (slave_repo / 'licenses').mkdir()
+ (master_repo / "licenses" / license).touch()
+ (slave_repo / "licenses").mkdir()
for license in slave_licenses:
- (slave_repo / 'licenses' / license).touch()
+ (slave_repo / "licenses" / license).touch()
repo = self.mk_tree(slave_repo)
assert set(repo.licenses) == set(master_licenses + slave_licenses)
def test_license_groups(self, master_repo, slave_repo):
- master_licenses = ('GPL-2', 'BSD')
- slave_licenses = ('BSD-2', 'MIT')
+ master_licenses = ("GPL-2", "BSD")
+ slave_licenses = ("BSD-2", "MIT")
- (master_repo / 'licenses').mkdir()
+ (master_repo / "licenses").mkdir()
for license in master_licenses:
- (master_repo / 'licenses' / license).touch()
- (master_repo / 'profiles' / 'license_groups').write_text(f'FREE {" ".join(master_licenses)}\nOSI-APPROVED @FREE\n')
+ (master_repo / "licenses" / license).touch()
+ (master_repo / "profiles" / "license_groups").write_text(
+ f'FREE {" ".join(master_licenses)}\nOSI-APPROVED @FREE\n'
+ )
- (slave_repo / 'licenses').mkdir()
+ (slave_repo / "licenses").mkdir()
for license in slave_licenses:
- (slave_repo / 'licenses' / license).touch()
- (slave_repo / 'profiles' / 'license_groups').write_text(f'MISC-FREE @FREE {" ".join(slave_licenses)}\nFSF-APPROVED MIT\nOSI-APPROVED @FSF-APPROVED\n')
+ (slave_repo / "licenses" / license).touch()
+ (slave_repo / "profiles" / "license_groups").write_text(
+ f'MISC-FREE @FREE {" ".join(slave_licenses)}\nFSF-APPROVED MIT\nOSI-APPROVED @FSF-APPROVED\n'
+ )
repo = self.mk_tree(slave_repo)
assert set(repo.licenses) == set(master_licenses + slave_licenses)
- assert set(repo.licenses.groups) == {'FREE', 'FSF-APPROVED', 'MISC-FREE', 'OSI-APPROVED'}
- assert 'BSD' in repo.licenses.groups['MISC-FREE']
+ assert set(repo.licenses.groups) == {
+ "FREE",
+ "FSF-APPROVED",
+ "MISC-FREE",
+ "OSI-APPROVED",
+ }
+ assert "BSD" in repo.licenses.groups["MISC-FREE"]
def test_package_deprecated(self, slave_repo, master_repo):
- (master_repo / 'profiles' / 'package.deprecated').write_text(textwrap.dedent('''\
+ (master_repo / "profiles" / "package.deprecated").write_text(
+ textwrap.dedent(
+ """\
# lalala
it-is/deprecated
<just/newer-than-42
- '''))
+ """
+ )
+ )
repo = self.mk_tree(slave_repo)
- assert set(repo.deprecated) == {atom('it-is/deprecated'), atom('<just/newer-than-42')}
+ assert set(repo.deprecated) == {
+ atom("it-is/deprecated"),
+ atom("<just/newer-than-42"),
+ }
def test_use_expand_desc(self, slave_repo, master_repo):
use_expand_desc = {
- 'example': (('example_foo', 'Build with foo'),
- ('example_bar', 'Build with bar'))
+ "example": (
+ ("example_foo", "Build with foo"),
+ ("example_bar", "Build with bar"),
+ )
}
- (master_repo / 'profiles' / 'desc').mkdir()
- (master_repo / 'profiles' / 'desc' / 'example').write_text(textwrap.dedent('''\
+ (master_repo / "profiles" / "desc").mkdir()
+ (master_repo / "profiles" / "desc" / "example").write_text(
+ textwrap.dedent(
+ """\
foo - Build with foo
bar - Build with bar
- '''))
+ """
+ )
+ )
repo = self.mk_tree(slave_repo)
assert use_expand_desc == dict(repo.use_expand_desc)
diff --git a/tests/fetch/test_base.py b/tests/fetch/test_base.py
index b89ffd2e8..d5ec2436b 100644
--- a/tests/fetch/test_base.py
+++ b/tests/fetch/test_base.py
@@ -6,7 +6,7 @@ from pkgcore.fetch import base, errors, fetchable
from snakeoil import data_source
from snakeoil.chksum import get_handlers
-repeating_str = 'asdf'
+repeating_str = "asdf"
data = repeating_str * 4000
handlers = get_handlers()
@@ -16,13 +16,15 @@ from snakeoil.mappings import LazyValDict
def _callback(chf):
return handlers[chf](data_source.data_source(data))
+
+
chksums = LazyValDict(frozenset(handlers.keys()), _callback)
# get a non size based chksum
known_chksum = [x for x in handlers.keys() if x != "size"][0]
-class TestFetcher:
+class TestFetcher:
@pytest.fixture(autouse=True)
def _setup(self, tmpdir):
self.fp = os.path.join(str(tmpdir), "test")
@@ -35,6 +37,7 @@ class TestFetcher:
def test__call__(self):
l = []
+
class c(base.fetcher):
def fetch(self, *a, **kw):
l.extend((a, kw))
@@ -56,20 +59,26 @@ class TestFetcher:
self.fetcher._verify(self.fp, self.obj, handlers=subhandlers)
self.fetcher._verify(self.fp, self.obj)
assert None == self.fetcher._verify(
- self.fp, self.obj, handlers=subhandlers, all_chksums=False)
+ self.fp, self.obj, handlers=subhandlers, all_chksums=False
+ )
def test_size_verification_first(self):
self.write_data()
chksum_data = dict(chksums.items())
l = []
+
def f(chf, fp):
l.append(chf)
return chksum_data[chf]
- subhandlers = {"size": partial(f, 'size'), known_chksum:partial(f, known_chksum)}
+
+ subhandlers = {
+ "size": partial(f, "size"),
+ known_chksum: partial(f, known_chksum),
+ }
# exact size verification
self.fetcher._verify(self.fp, self.obj, handlers=subhandlers, all_chksums=False)
- assert ['size', known_chksum] == l
+ assert ["size", known_chksum] == l
for x in (-100, 100):
while l:
l.pop(-1)
@@ -77,14 +86,16 @@ class TestFetcher:
if x > 0:
with pytest.raises(errors.ChksumFailure) as excinfo:
self.fetcher._verify(
- self.fp, self.obj, handlers=subhandlers, all_chksums=False)
- assert excinfo.value.chksum == 'size'
+ self.fp, self.obj, handlers=subhandlers, all_chksums=False
+ )
+ assert excinfo.value.chksum == "size"
else:
with pytest.raises(errors.FetchError) as excinfo:
self.fetcher._verify(
- self.fp, self.obj, handlers=subhandlers, all_chksums=False)
+ self.fp, self.obj, handlers=subhandlers, all_chksums=False
+ )
assert excinfo.value.resumable
- assert ['size'] == l
+ assert ["size"] == l
def test_normal(self):
self.write_data()
@@ -101,10 +112,11 @@ class TestFetcher:
self.write_data(data + "foon")
with pytest.raises(errors.ChksumFailure) as excinfo:
self.fetcher._verify(self.fp, self.obj)
- assert excinfo.value.chksum == 'size'
+ assert excinfo.value.chksum == "size"
# verify they're ran one, and only once
l = []
+
def f(chf, fp):
l.append(chf)
return chksums[chf]
diff --git a/tests/fetch/test_init.py b/tests/fetch/test_init.py
index 7f69fe150..cf9cbbafc 100644
--- a/tests/fetch/test_init.py
+++ b/tests/fetch/test_init.py
@@ -11,21 +11,22 @@ def assert_uri(obj, uri):
class TestFetchable:
-
def test_init(self):
- o = fetch.fetchable("dar", uri=["asdf"], chksums={"asdf":1})
+ o = fetch.fetchable("dar", uri=["asdf"], chksums={"asdf": 1})
assert o.filename == "dar"
assert_uri(o.uri, ["asdf"])
- assert o.chksums == {"asdf":1}
+ assert o.chksums == {"asdf": 1}
def test_eq_ne(self):
- o1 = fetch.fetchable("dar", uri=["asdf"], chksums={"asdf":1})
+ o1 = fetch.fetchable("dar", uri=["asdf"], chksums={"asdf": 1})
assert o1 == o1
- o2 = fetch.fetchable("dar", uri=["asdf"], chksums={"asdf":1})
+ o2 = fetch.fetchable("dar", uri=["asdf"], chksums={"asdf": 1})
assert o1 == o2
- assert o1 != fetch.fetchable("dar1", uri=["asdf"], chksums={"asdf":1})
- assert o1 != fetch.fetchable("dar", uri=["asdf1"], chksums={"asdf":1})
- assert o1 != fetch.fetchable("dar", uri=["asdf1"], chksums={"asdf":1, "foon":1})
+ assert o1 != fetch.fetchable("dar1", uri=["asdf"], chksums={"asdf": 1})
+ assert o1 != fetch.fetchable("dar", uri=["asdf1"], chksums={"asdf": 1})
+ assert o1 != fetch.fetchable(
+ "dar", uri=["asdf1"], chksums={"asdf": 1, "foon": 1}
+ )
class TestMirror:
@@ -53,8 +54,8 @@ class TestMirror:
assert mirror[1] == self.default_mirrors[1]
def test_eq_ne(self, mirror):
- assert mirror == self.kls(self.default_mirrors, 'fork')
- assert mirror != self.kls(self.default_mirrors + ['http://fark'], 'fork')
+ assert mirror == self.kls(self.default_mirrors, "fork")
+ assert mirror != self.kls(self.default_mirrors + ["http://fark"], "fork")
class TestDefaultMirror(TestMirror):
@@ -63,7 +64,6 @@ class TestDefaultMirror(TestMirror):
class Test_uri_list:
-
@pytest.fixture
def uril(self):
return fetch.uri_list("cows")
@@ -86,21 +86,18 @@ class Test_uri_list:
uril.add_mirror(mirror)
assert list(uril) == ["me/cows", "WI/cows"]
uril.add_mirror(mirror, "foon/boon")
- assert_uri(uril,
- ["me/cows", "WI/cows", "me/foon/boon", "WI/foon/boon"])
+ assert_uri(uril, ["me/cows", "WI/cows", "me/foon/boon", "WI/foon/boon"])
def test_uris(self, uril):
uril.add_uri("blar")
assert_uri(uril, ["blar"])
def test_combined(self, uril):
- l = ["blarn", "me/cows", "WI/cows", "madison",
- "belleville/cows", "verona/cows"]
+ l = ["blarn", "me/cows", "WI/cows", "madison", "belleville/cows", "verona/cows"]
uril.add_uri("blarn")
uril.add_mirror(fetch.mirror(["me", "WI"], "asdf"))
uril.add_uri("madison")
- uril.add_mirror(fetch.default_mirror(
- ["belleville", "verona"], "foon"))
+ uril.add_mirror(fetch.default_mirror(["belleville", "verona"], "foon"))
assert_uri(uril, l)
def test_nonzero(self):
diff --git a/tests/fs/fs_util.py b/tests/fs/fs_util.py
index 71e22c380..c453a43f0 100644
--- a/tests/fs/fs_util.py
+++ b/tests/fs/fs_util.py
@@ -5,6 +5,7 @@ testing. Do not use it in non-test code.
"""
from pkgcore.fs import fs
+
# we use pre_curry to preserve the docs for the wrapped target
from snakeoil.currying import pre_curry
@@ -16,8 +17,7 @@ for key in dir(fs):
val = getattr(fs, key)
# protection; issubclass pukes if it's not a class.
# downside, this works on new style only
- if isinstance(val, type) and issubclass(val, fs.fsBase) and \
- val is not fs.fsBase:
+ if isinstance(val, type) and issubclass(val, fs.fsBase) and val is not fs.fsBase:
locals()[f"_original_{key}"] = val
val = pre_curry(val, strict=False)
val.__doc__ = locals()[f"_original_{key}"].__doc__
diff --git a/tests/fs/test_contents.py b/tests/fs/test_contents.py
index c5c08dc01..e039314de 100644
--- a/tests/fs/test_contents.py
+++ b/tests/fs/test_contents.py
@@ -5,24 +5,34 @@ import pytest
from pkgcore.fs import contents, fs
mk_file = partial(fs.fsFile, strict=False)
-mk_dir = partial(fs.fsDir, strict=False)
+mk_dir = partial(fs.fsDir, strict=False)
mk_link = partial(fs.fsLink, strict=False)
-mk_dev = partial(fs.fsDev, strict=False)
+mk_dev = partial(fs.fsDev, strict=False)
mk_fifo = partial(fs.fsFifo, strict=False)
class TestContentsSet:
- files = list(map(mk_file, ["/etc/blah", "/etc/foo", "/etc/dar",
- "/tmp/dar",
- "/tmp/blah/foo/long/ass/file/name/but/not/that/bad/really"]))
- dirs = list(map(mk_dir, ["/tmp", "/blah", "/tmp/dar",
- "/usr/", "/usr/bin"]))
- links = [fs.fsLink(x, os.path.dirname(x), strict=False) for x in
- ["/tmp/foo", "/usr/X11R6/lib", "/nagga/noo"]]
- devs = list(map(mk_dev,
- [f"dev/{x}" for x in ["sda1", "hda", "hda2", "disks/ide1"]]))
- fifos = list(map(mk_fifo,
- [f"tmp/{y}" for y in ("dar", "boo", "bah")]))
+ files = list(
+ map(
+ mk_file,
+ [
+ "/etc/blah",
+ "/etc/foo",
+ "/etc/dar",
+ "/tmp/dar",
+ "/tmp/blah/foo/long/ass/file/name/but/not/that/bad/really",
+ ],
+ )
+ )
+ dirs = list(map(mk_dir, ["/tmp", "/blah", "/tmp/dar", "/usr/", "/usr/bin"]))
+ links = [
+ fs.fsLink(x, os.path.dirname(x), strict=False)
+ for x in ["/tmp/foo", "/usr/X11R6/lib", "/nagga/noo"]
+ ]
+ devs = list(
+ map(mk_dev, [f"dev/{x}" for x in ["sda1", "hda", "hda2", "disks/ide1"]])
+ )
+ fifos = list(map(mk_fifo, [f"tmp/{y}" for y in ("dar", "boo", "bah")]))
all = dirs + links + devs + fifos
def test_init(self):
@@ -39,7 +49,9 @@ class TestContentsSet:
for x in self.links:
cs.add(x)
assert x in cs
- assert len(cs) == len(set(x.location for x in self.files + self.dirs + self.links))
+ assert len(cs) == len(
+ set(x.location for x in self.files + self.dirs + self.links)
+ )
with pytest.raises(AttributeError):
contents.contentsSet(mutable=False).add(self.devs[0])
with pytest.raises(TypeError):
@@ -64,8 +76,9 @@ class TestContentsSet:
def test_contains(self):
cs = contents.contentsSet(mutable=True)
- for x in [y[0] for y in [
- self.files, self.dirs, self.links, self.devs, self.fifos]]:
+ for x in [
+ y[0] for y in [self.files, self.dirs, self.links, self.devs, self.fifos]
+ ]:
assert x not in cs
assert x.location not in cs
cs.add(x)
@@ -126,17 +139,19 @@ class TestContentsSet:
assert set(contents.contentsSet(s)) == s
def test_check_instance(self):
- for x in [y[0] for y in [
- self.files, self.dirs, self.links, self.devs, self.fifos]]:
+ for x in [
+ y[0] for y in [self.files, self.dirs, self.links, self.devs, self.fifos]
+ ]:
assert tuple(contents.check_instance(x)) == (x.location, x)
with pytest.raises(TypeError):
contents.check_instance(1)
-
def check_set_op(self, name, ret, source=None):
if source is None:
- source = ([fs.fsDir("/tmp", strict=False)],
- [fs.fsFile("/tmp", strict=False)])
+ source = (
+ [fs.fsDir("/tmp", strict=False)],
+ [fs.fsFile("/tmp", strict=False)],
+ )
c1, c2 = [contents.contentsSet(x) for x in source]
if name.endswith("_update"):
@@ -157,23 +172,46 @@ class TestContentsSet:
fstrings = {"/a", "/b", "/c", "/d"}
f = tuple(map(mk_file, fstrings))
- @pytest.mark.parametrize("name, ret, source", (
- pytest.param("intersection", {"/tmp"}, None, id="intersection"),
- pytest.param("intersection_update", {"/tmp"}, None, id="intersection_update"),
- pytest.param("difference", set(), None, id="difference"),
- pytest.param("difference_update", set(), None, id="difference_update"),
- pytest.param("symmetric_difference", set(), None, id="symmetric_difference"),
- pytest.param("symmetric_difference_update", set(), None, id="symmetric_difference_update"),
-
- pytest.param("union", {"/tmp"}, None, id="union1"),
- pytest.param("union", fstrings, (f[:2], f[2:]), id="union2"),
- pytest.param("symmetric_difference", fstrings, (f[:2], f[2:]), id="symmetric_difference2"),
- pytest.param("symmetric_difference_update", fstrings, (f[:2], f[2:]), id="symmetric_difference_update2"),
- ))
+ @pytest.mark.parametrize(
+ "name, ret, source",
+ (
+ pytest.param("intersection", {"/tmp"}, None, id="intersection"),
+ pytest.param(
+ "intersection_update", {"/tmp"}, None, id="intersection_update"
+ ),
+ pytest.param("difference", set(), None, id="difference"),
+ pytest.param("difference_update", set(), None, id="difference_update"),
+ pytest.param(
+ "symmetric_difference", set(), None, id="symmetric_difference"
+ ),
+ pytest.param(
+ "symmetric_difference_update",
+ set(),
+ None,
+ id="symmetric_difference_update",
+ ),
+ pytest.param("union", {"/tmp"}, None, id="union1"),
+ pytest.param("union", fstrings, (f[:2], f[2:]), id="union2"),
+ pytest.param(
+ "symmetric_difference",
+ fstrings,
+ (f[:2], f[2:]),
+ id="symmetric_difference2",
+ ),
+ pytest.param(
+ "symmetric_difference_update",
+ fstrings,
+ (f[:2], f[2:]),
+ id="symmetric_difference_update2",
+ ),
+ ),
+ )
def test_check_set_op(self, name, ret, source):
if source is None:
- source = ([fs.fsDir("/tmp", strict=False)],
- [fs.fsFile("/tmp", strict=False)])
+ source = (
+ [fs.fsDir("/tmp", strict=False)],
+ [fs.fsFile("/tmp", strict=False)],
+ )
c1, c2 = [contents.contentsSet(x) for x in source]
if name.endswith("_update"):
@@ -198,52 +236,75 @@ class TestContentsSet:
cset2 = contents.contentsSet(data2)
f = getattr(cset1, name)
got = f(cset2)
- assert got == required, \
- f"{name}: expected {required}, got {got}\ncset1={cset1!r}\ncset2={cset2!r}"
-
- @pytest.mark.parametrize(("required", "data1", "data2"), (
- (True, [mk_file("/foon")], [mk_file("/foon")]),
- (False, [mk_file("/foon")], [mk_file("/dev")]),
- (False, [mk_file("/dev"), mk_file("/dar")], [mk_file("/dev")]),
- (True, [mk_file("/dev"), mk_file("/dar")],
- [mk_file("/dev"), mk_file("/dar"), mk_file("/asdf")]),
- ))
+ assert (
+ got == required
+ ), f"{name}: expected {required}, got {got}\ncset1={cset1!r}\ncset2={cset2!r}"
+
+ @pytest.mark.parametrize(
+ ("required", "data1", "data2"),
+ (
+ (True, [mk_file("/foon")], [mk_file("/foon")]),
+ (False, [mk_file("/foon")], [mk_file("/dev")]),
+ (False, [mk_file("/dev"), mk_file("/dar")], [mk_file("/dev")]),
+ (
+ True,
+ [mk_file("/dev"), mk_file("/dar")],
+ [mk_file("/dev"), mk_file("/dar"), mk_file("/asdf")],
+ ),
+ ),
+ )
def test_issubset(self, required, data1, data2):
self.check_complex_set_op("issubset", required, data1, data2)
- @pytest.mark.parametrize(("required", "data1", "data2"), (
- (True, [mk_file("/foon")], [mk_file("/foon")]),
- (False, [mk_file("/foon")], [mk_file("/dev")]),
- (True, [mk_file("/dev"), mk_file("/dar")], [mk_file("/dev")]),
- (False, [mk_file("/dev")], [mk_file("/dev"), mk_file("/dev2")]),
- ))
+ @pytest.mark.parametrize(
+ ("required", "data1", "data2"),
+ (
+ (True, [mk_file("/foon")], [mk_file("/foon")]),
+ (False, [mk_file("/foon")], [mk_file("/dev")]),
+ (True, [mk_file("/dev"), mk_file("/dar")], [mk_file("/dev")]),
+ (False, [mk_file("/dev")], [mk_file("/dev"), mk_file("/dev2")]),
+ ),
+ )
def test_issuperset(self, required, data1, data2):
self.check_complex_set_op("issuperset", required, data1, data2)
- @pytest.mark.parametrize(("required", "data1", "data2"), (
- (False, [mk_file("/foon")], [mk_file("/foon")]),
- (True, [mk_file("/foon")], [mk_file("/dev")]),
- (False, [mk_file("/dev"), mk_file("/dar")], [mk_file("/dev")]),
- (False, [mk_file("/dev"), mk_file("/dar")],
- [mk_file("/dev"), mk_file("/dar"), mk_file("/asdf")]),
- (False, [mk_file("/dev"), mk_file("/dar")],
- [mk_file("/dev"), mk_file("/dar"), mk_file("/asdf")]),
- (True, [mk_file("/dev"), mk_file("/dar")],
- [mk_file("/dev2"), mk_file("/dar2"), mk_file("/asdf")]),
- ))
+ @pytest.mark.parametrize(
+ ("required", "data1", "data2"),
+ (
+ (False, [mk_file("/foon")], [mk_file("/foon")]),
+ (True, [mk_file("/foon")], [mk_file("/dev")]),
+ (False, [mk_file("/dev"), mk_file("/dar")], [mk_file("/dev")]),
+ (
+ False,
+ [mk_file("/dev"), mk_file("/dar")],
+ [mk_file("/dev"), mk_file("/dar"), mk_file("/asdf")],
+ ),
+ (
+ False,
+ [mk_file("/dev"), mk_file("/dar")],
+ [mk_file("/dev"), mk_file("/dar"), mk_file("/asdf")],
+ ),
+ (
+ True,
+ [mk_file("/dev"), mk_file("/dar")],
+ [mk_file("/dev2"), mk_file("/dar2"), mk_file("/asdf")],
+ ),
+ ),
+ )
def test_isdisjoint(self, required, data1, data2):
self.check_complex_set_op("isdisjoint", required, data1, data2)
def test_child_nodes(self):
- assert {'/usr', '/usr/bin', '/usr/foo'} == {
- x.location for x in contents.contentsSet(
- [mk_dir("/usr"), mk_dir("/usr/bin"), mk_file("/usr/foo")])}
+ assert {"/usr", "/usr/bin", "/usr/foo"} == {
+ x.location
+ for x in contents.contentsSet(
+ [mk_dir("/usr"), mk_dir("/usr/bin"), mk_file("/usr/foo")]
+ )
+ }
def test_map_directory_structure(self):
- old = contents.contentsSet([mk_dir("/dir"),
- mk_link("/sym", "dir")])
- new = contents.contentsSet([mk_file("/sym/a"),
- mk_dir("/sym")])
+ old = contents.contentsSet([mk_dir("/dir"), mk_link("/sym", "dir")])
+ new = contents.contentsSet([mk_file("/sym/a"), mk_dir("/sym")])
# verify the machinery is working as expected.
ret = new.map_directory_structure(old)
assert set(ret) == {mk_dir("/dir"), mk_file("/dir/a")}
@@ -255,22 +316,29 @@ class TestContentsSet:
new.add(mk_dir("/sym/sym"))
ret = new.map_directory_structure(old)
- assert set(ret) == {mk_dir("/dir"), mk_file("/dir/a"),
- mk_dir("/dir/dir2"), mk_file("/dir/dir2/b")}
-
+ assert set(ret) == {
+ mk_dir("/dir"),
+ mk_file("/dir/a"),
+ mk_dir("/dir/dir2"),
+ mk_file("/dir/dir2/b"),
+ }
def test_add_missing_directories(self):
- src = [mk_file("/dir1/a"), mk_file("/dir2/dir3/b"),
- mk_dir("/dir1/dir4")]
+ src = [mk_file("/dir1/a"), mk_file("/dir2/dir3/b"), mk_dir("/dir1/dir4")]
cs = contents.contentsSet(src)
cs.add_missing_directories()
- assert {x.location for x in cs} == \
- {'/dir1', '/dir1/a', '/dir1/dir4', '/dir2', '/dir2/dir3', '/dir2/dir3/b'}
- obj = cs['/dir1']
+ assert {x.location for x in cs} == {
+ "/dir1",
+ "/dir1/a",
+ "/dir1/dir4",
+ "/dir2",
+ "/dir2/dir3",
+ "/dir2/dir3/b",
+ }
+ obj = cs["/dir1"]
assert obj.mode == 0o775
def test_inode_map(self):
-
def check_it(target):
d = {k: set(v) for k, v in cs.inode_map().items()}
target = {k: set(v) for k, v in target.items()}
@@ -279,19 +347,19 @@ class TestContentsSet:
cs = contents.contentsSet()
f1 = mk_file("/f", dev=1, inode=1)
cs.add(f1)
- check_it({(1,1):[f1]})
+ check_it({(1, 1): [f1]})
f2 = mk_file("/x", dev=1, inode=2)
cs.add(f2)
- check_it({(1,1):[f1], (1,2):[f2]})
+ check_it({(1, 1): [f1], (1, 2): [f2]})
f3 = mk_file("/y", dev=2, inode=1)
cs.add(f3)
- check_it({(1,1):[f1], (1,2):[f2], (2,1):[f3]})
+ check_it({(1, 1): [f1], (1, 2): [f2], (2, 1): [f3]})
f4 = mk_file("/z", dev=1, inode=1)
cs.add(f4)
- check_it({(1,1):[f1, f4], (1,2):[f2], (2,1):[f3]})
+ check_it({(1, 1): [f1, f4], (1, 2): [f2], (2, 1): [f3]})
class Test_offset_rewriting:
@@ -303,8 +371,12 @@ class Test_offset_rewriting:
f = [f"/foon/{x}" for x in range(10)]
f.extend(f"/foon/{x}/blah" for x in range(5))
f = [fs.fsFile(x, strict=False) for x in f]
- assert {x.location for x in f} == {x.location for x in self.offset_insert('/', f)}
- assert {f'/usr{x.location}' for x in f} == {x.location for x in self.offset_insert('/usr', f)}
+ assert {x.location for x in f} == {
+ x.location for x in self.offset_insert("/", f)
+ }
+ assert {f"/usr{x.location}" for x in f} == {
+ x.location for x in self.offset_insert("/usr", f)
+ }
def test_change_offset(self):
f = [f"/foon/{x}" for x in range(10)]
@@ -312,13 +384,23 @@ class Test_offset_rewriting:
f = [fs.fsFile(x, strict=False) for x in f]
assert {x.location for x in f} == {
y.location
- for y in self.change_offset('/usr', '/', (
- x.change_attributes(location=f'/usr{x.location}') for x in f))}
+ for y in self.change_offset(
+ "/usr",
+ "/",
+ (x.change_attributes(location=f"/usr{x.location}") for x in f),
+ )
+ }
assert {x.location for x in f} == {
y.location
- for y in self.change_offset('/usr', '/', (
- x.change_attributes(location=f'/usr/{x.location}') for x in f))}
- assert {f'/usr{x.location}' for x in f} == {
+ for y in self.change_offset(
+ "/usr",
+ "/",
+ (x.change_attributes(location=f"/usr/{x.location}") for x in f),
+ )
+ }
+ assert {f"/usr{x.location}" for x in f} == {
y.location
- for y in self.change_offset('/', '/usr', (
- x.change_attributes(location=f'/{x.location}') for x in f))}
+ for y in self.change_offset(
+ "/", "/usr", (x.change_attributes(location=f"/{x.location}") for x in f)
+ )
+ }
diff --git a/tests/fs/test_fs.py b/tests/fs/test_fs.py
index ffb4b1296..d29b58c11 100644
--- a/tests/fs/test_fs.py
+++ b/tests/fs/test_fs.py
@@ -15,14 +15,14 @@ class base:
return self.kls(location, **kwds)
def test_basename(self):
- assert self.make_obj(location='/asdf').basename == 'asdf'
- assert self.make_obj(location='/a/b').basename == 'b'
+ assert self.make_obj(location="/asdf").basename == "asdf"
+ assert self.make_obj(location="/a/b").basename == "b"
def test_dirname(self):
- assert self.make_obj(location='/asdf').dirname == '/'
- assert self.make_obj(location='/a/b').dirname == '/a'
+ assert self.make_obj(location="/asdf").dirname == "/"
+ assert self.make_obj(location="/a/b").dirname == "/a"
- @pytest.mark.parametrize("loc", ('/tmp/a', '/tmp//a', '/tmp//', '/tmp/a/..'))
+ @pytest.mark.parametrize("loc", ("/tmp/a", "/tmp//a", "/tmp//", "/tmp/a/.."))
def test_location_normalization(self, loc):
assert self.make_obj(location=loc).location == normpath(loc)
@@ -81,15 +81,17 @@ class base:
def test_default_attrs(self):
assert self.make_obj(location="/adsf").mode is None
+
class tmp(self.kls):
__default_attrs__ = self.kls.__default_attrs__.copy()
- __default_attrs__['tmp'] = lambda self2:getattr(self2, 'a', 1)
- __attrs__ = self.kls.__attrs__ + ('tmp',)
- __slots__ = ('a', 'tmp')
+ __default_attrs__["tmp"] = lambda self2: getattr(self2, "a", 1)
+ __attrs__ = self.kls.__attrs__ + ("tmp",)
+ __slots__ = ("a", "tmp")
+
try:
self.kls = tmp
- assert self.make_obj('/adsf', strict=False).tmp == 1
- t = self.make_obj('/asdf', a='foon', strict=False)
+ assert self.make_obj("/adsf", strict=False).tmp == 1
+ t = self.make_obj("/asdf", a="foon", strict=False)
assert t.tmp == "foon"
finally:
del self.kls
@@ -106,14 +108,19 @@ class Test_fsFile(base):
raw_data = f.read()
assert o.data.text_fileobj().read() == raw_data
- o = self.make_obj("/bin/this-file-should-not-exist-nor-be-read",
- data=data_source(raw_data))
+ o = self.make_obj(
+ "/bin/this-file-should-not-exist-nor-be-read", data=data_source(raw_data)
+ )
assert o.data.text_fileobj().read() == raw_data
keys = list(o.chksums.keys())
- assert [o.chksums[x] for x in keys] == list(get_chksums(data_source(raw_data), *keys))
+ assert [o.chksums[x] for x in keys] == list(
+ get_chksums(data_source(raw_data), *keys)
+ )
chksums = dict(iter(o.chksums.items()))
- assert set(self.make_obj(chksums=chksums).chksums.items()) == set(chksums.items())
+ assert set(self.make_obj(chksums=chksums).chksums.items()) == set(
+ chksums.items()
+ )
def test_chksum_regen(self):
data_source = object()
@@ -122,9 +129,12 @@ class Test_fsFile(base):
chksums1 = obj.chksums
assert chksums1 is not obj.change_attributes(data=data_source).chksums
- assert chksums1 is obj.change_attributes(data=data_source, chksums=obj.chksums).chksums
+ assert (
+ chksums1
+ is obj.change_attributes(data=data_source, chksums=obj.chksums).chksums
+ )
- obj2 = self.make_obj(__file__, chksums={1:2})
+ obj2 = self.make_obj(__file__, chksums={1: 2})
assert obj2.chksums is obj2.change_attributes(data=data_source).chksums
@@ -142,16 +152,22 @@ class Test_fsLink(base):
assert self.make_obj(target="../foon").target == "../foon"
def test_resolved_target(self):
- assert self.make_obj(location="/tmp/foon", target="dar").resolved_target == "/tmp/dar"
- assert self.make_obj(location="/tmp/foon", target="/dar").resolved_target == "/dar"
+ assert (
+ self.make_obj(location="/tmp/foon", target="dar").resolved_target
+ == "/tmp/dar"
+ )
+ assert (
+ self.make_obj(location="/tmp/foon", target="/dar").resolved_target == "/dar"
+ )
def test_cmp(self):
obj1 = self.make_obj(
- location='/usr/lib64/opengl/nvidia/lib/libnvidia-tls.so.1',
- target='../tls/libnvidia-tls.so.1')
+ location="/usr/lib64/opengl/nvidia/lib/libnvidia-tls.so.1",
+ target="../tls/libnvidia-tls.so.1",
+ )
obj2 = self.make_obj(
- location='/usr/lib32/opengl/nvidia/lib/libGL.s',
- target='libGL.so.173.14.09')
+ location="/usr/lib32/opengl/nvidia/lib/libGL.s", target="libGL.so.173.14.09"
+ )
assert obj1 > obj2
assert obj2 < obj1
@@ -184,6 +200,6 @@ def test_is_funcs():
assert not fs.isreg(object())
assert not fs.isfifo(object())
- assert fs.isdir(fs.fsDir('/tmp', strict=False))
- assert not fs.isreg(fs.fsDir('/tmp', strict=False))
- assert fs.isreg(fs.fsFile('/tmp', strict=False))
+ assert fs.isdir(fs.fsDir("/tmp", strict=False))
+ assert not fs.isreg(fs.fsDir("/tmp", strict=False))
+ assert fs.isreg(fs.fsFile("/tmp", strict=False))
diff --git a/tests/fs/test_livefs.py b/tests/fs/test_livefs.py
index daa6ccf36..f6fe28d0c 100644
--- a/tests/fs/test_livefs.py
+++ b/tests/fs/test_livefs.py
@@ -8,7 +8,6 @@ from pkgcore.fs.contents import contentsSet
class TestFsObjs:
-
def check_attrs(self, obj, path, offset=None):
if offset is None:
st = path.lstat()
@@ -81,7 +80,7 @@ class TestFsObjs:
# do offset verification now.
offset = path
for obj in livefs.iter_scan(str(path), offset=str(offset)):
- self.check_attrs(obj, Path(obj.location).relative_to('/'), offset=offset)
+ self.check_attrs(obj, Path(obj.location).relative_to("/"), offset=offset)
seen = []
for obj in livefs.iter_scan(str(files[0])):
@@ -97,21 +96,23 @@ class TestFsObjs:
# regular directory scanning
sorted_files = livefs.sorted_scan(str(tmp_path))
- assert sorted_files == [str(tmp_path / x) for x in ('blah', 'dar', 'tmp')]
+ assert sorted_files == [str(tmp_path / x) for x in ("blah", "dar", "tmp")]
# nonexistent paths
- nonexistent_path = str(tmp_path / 'foobar')
+ nonexistent_path = str(tmp_path / "foobar")
assert livefs.sorted_scan(nonexistent_path) == []
- assert livefs.sorted_scan(nonexistent_path, nonexistent=True) == [nonexistent_path]
+ assert livefs.sorted_scan(nonexistent_path, nonexistent=True) == [
+ nonexistent_path
+ ]
def test_sorted_scan_hidden(self, tmp_path):
for x in (".tmp", "blah"):
(tmp_path / x).touch()
sorted_files = livefs.sorted_scan(str(tmp_path))
- assert [str(tmp_path / x) for x in ('.tmp', 'blah')] == sorted_files
+ assert [str(tmp_path / x) for x in (".tmp", "blah")] == sorted_files
sorted_files = livefs.sorted_scan(str(tmp_path), hidden=False)
- assert [str(tmp_path / x) for x in ('blah', )] == sorted_files
+ assert [str(tmp_path / x) for x in ("blah",)] == sorted_files
def test_sorted_scan_backup(self, tmp_path):
for x in ("blah", "blah~"):
@@ -120,7 +121,7 @@ class TestFsObjs:
sorted_files = livefs.sorted_scan(str(tmp_path))
assert [str(tmp_path / x) for x in ("blah", "blah~")] == sorted_files
sorted_files = livefs.sorted_scan(str(tmp_path), backup=False)
- assert [str(tmp_path / x) for x in ('blah', )] == sorted_files
+ assert [str(tmp_path / x) for x in ("blah",)] == sorted_files
def test_relative_sym(self, tmp_path):
(path := tmp_path / "relative-symlink-test").symlink_to("../sym1/blah")
@@ -128,13 +129,17 @@ class TestFsObjs:
assert o.target == "../sym1/blah"
def test_intersect(self, tmp_path):
- (tmp_path / 'reg').touch()
- cset = contentsSet([fs.fsFile('reg', strict=False)])
+ (tmp_path / "reg").touch()
+ cset = contentsSet([fs.fsFile("reg", strict=False)])
cset = cset.insert_offset(str(tmp_path))
assert contentsSet(livefs.intersect(cset)) == cset
- cset = contentsSet([fs.fsFile('reg/foon', strict=False),
- fs.fsFile('reg/dar', strict=False),
- fs.fsDir('reg/dir', strict=False)]).insert_offset(str(tmp_path))
+ cset = contentsSet(
+ [
+ fs.fsFile("reg/foon", strict=False),
+ fs.fsFile("reg/dar", strict=False),
+ fs.fsDir("reg/dir", strict=False),
+ ]
+ ).insert_offset(str(tmp_path))
assert not list(livefs.intersect(cset))
- cset = contentsSet([fs.fsDir('reg', strict=False)])
+ cset = contentsSet([fs.fsDir("reg", strict=False)])
assert not list(livefs.intersect(cset))
diff --git a/tests/fs/test_ops.py b/tests/fs/test_ops.py
index 00aaccf3e..578ddf5c3 100644
--- a/tests/fs/test_ops.py
+++ b/tests/fs/test_ops.py
@@ -9,22 +9,24 @@ from snakeoil.data_source import local_source
def verify(obj, kwds):
stat = os.stat(obj.location)
- for attr, keyword in (("st_mtime", "mtime"),
- ("st_gid", "gid"),
- ("st_uid", "uid")):
+ for attr, keyword in (("st_mtime", "mtime"), ("st_gid", "gid"), ("st_uid", "uid")):
if keyword in kwds:
assert getattr(stat, attr) == kwds[keyword], f"testing {keyword}"
if "mode" in kwds:
assert (stat.st_mode & 0o4777) == kwds["mode"]
-@pytest.mark.parametrize(("creator_func", "kls"), (
- pytest.param(os.mkdir, fs.fsDir, id="dir"),
- pytest.param(lambda s: open(s, "w").close(), fs.fsFile, id="file"),
-))
+@pytest.mark.parametrize(
+ ("creator_func", "kls"),
+ (
+ pytest.param(os.mkdir, fs.fsDir, id="dir"),
+ pytest.param(lambda s: open(s, "w").close(), fs.fsFile, id="file"),
+ ),
+)
def test_default_ensure_perms(tmp_path, creator_func, kls):
- kwds = dict(mtime=0o1234, uid=os.getuid(), gid=os.getgid(),
- mode=0o775, dev=None, inode=None)
+ kwds = dict(
+ mtime=0o1234, uid=os.getuid(), gid=os.getgid(), mode=0o775, dev=None, inode=None
+ )
o = kls(str(tmp_path / "blah"), **kwds)
creator_func(o.location)
assert ops.ensure_perms(o)
@@ -55,15 +57,20 @@ def test_default_mkdir(tmp_path):
class TestCopyFile:
-
def test_it(self, tmp_path):
content = "\n".join("asdf" for _ in range(10))
(src := tmp_path / "copy_test_src").write_text(content)
dest = tmp_path / "copy_test_dest"
- kwds = {"mtime":10321, "uid":os.getuid(), "gid":os.getgid(),
- "mode":0o664, "data":local_source(str(src)), "dev":None,
- "inode":None}
+ kwds = {
+ "mtime": 10321,
+ "uid": os.getuid(),
+ "gid": os.getgid(),
+ "mode": 0o664,
+ "data": local_source(str(src)),
+ "dev": None,
+ "inode": None,
+ }
o = fs.fsFile(str(dest), **kwds)
assert ops.copyfile(o)
assert dest.read_text() == content
@@ -79,8 +86,9 @@ class TestCopyFile:
)
group = group[0]
fp = str(tmp_path / "sym")
- o = fs.fsSymlink(fp, mtime=10321, uid=os.getuid(), gid=group,
- mode=0o664, target='target')
+ o = fs.fsSymlink(
+ fp, mtime=10321, uid=os.getuid(), gid=group, mode=0o664, target="target"
+ )
assert ops.copyfile(o)
assert os.lstat(fp).st_gid == group
assert os.lstat(fp).st_uid == os.getuid()
@@ -97,7 +105,9 @@ class TestCopyFile:
livefs.gen_obj(fp).change_attributes(location=path)()
# test sym over a directory.
- f = fs.fsSymlink(path, fp, mode=0o644, mtime=0, uid=os.getuid(), gid=os.getgid())
+ f = fs.fsSymlink(
+ path, fp, mode=0o644, mtime=0, uid=os.getuid(), gid=os.getgid()
+ )
with pytest.raises(TypeError):
ops.copyfile(f)
os.unlink(fp)
@@ -141,7 +151,6 @@ class ContentsMixin:
class TestMergeContents(ContentsMixin):
-
@pytest.fixture
def generic_merge_bits(self, request, tmp_path):
entries = getattr(self, request.param)
@@ -154,7 +163,9 @@ class TestMergeContents(ContentsMixin):
assert livefs.scan(src, offset=src) == livefs.scan(dest, offset=dest)
return src, dest, cset
- @pytest.mark.parametrize("generic_merge_bits", ("entries_norm1", "entries_rec1"), indirect=True)
+ @pytest.mark.parametrize(
+ "generic_merge_bits", ("entries_norm1", "entries_rec1"), indirect=True
+ )
def test_callback(self, generic_merge_bits):
src, dest, cset = generic_merge_bits
new_cset = contents.contentsSet(contents.offset_rewriter(dest, cset))
@@ -170,7 +181,7 @@ class TestMergeContents(ContentsMixin):
assert ops.merge_contents(cset, offset=str(dest))
assert cset == livefs.scan(src, offset=str(dest))
- @pytest.mark.parametrize("generic_merge_bits", ("entries_norm1", ), indirect=True)
+ @pytest.mark.parametrize("generic_merge_bits", ("entries_norm1",), indirect=True)
def test_exact_overwrite(self, generic_merge_bits):
src, dest, cset = generic_merge_bits
assert ops.merge_contents(cset, offset=dest)
@@ -179,8 +190,9 @@ class TestMergeContents(ContentsMixin):
(path := tmp_path / "sym").mkdir()
fp = tmp_path / "trg"
# test sym over a directory.
- f = fs.fsSymlink(str(path), str(fp), mode=0o644, mtime=0,
- uid=os.getuid(), gid=os.getgid())
+ f = fs.fsSymlink(
+ str(path), str(fp), mode=0o644, mtime=0, uid=os.getuid(), gid=os.getgid()
+ )
cset = contents.contentsSet([f])
with pytest.raises(ops.FailedCopy):
ops.merge_contents(cset)
@@ -199,7 +211,6 @@ class TestMergeContents(ContentsMixin):
class TestUnmergeContents(ContentsMixin):
-
@pytest.fixture
def generic_unmerge_bits(self, request, tmp_path):
entries = getattr(self, request.param)
@@ -208,25 +219,27 @@ class TestUnmergeContents(ContentsMixin):
cset = livefs.scan(img, offset=img)
return img, cset
- @pytest.mark.parametrize("generic_unmerge_bits", ("entries_norm1", "entries_rec1"), indirect=True)
+ @pytest.mark.parametrize(
+ "generic_unmerge_bits", ("entries_norm1", "entries_rec1"), indirect=True
+ )
def test_callback(self, generic_unmerge_bits):
img, cset = generic_unmerge_bits
s = set(contents.offset_rewriter(img, cset))
ops.unmerge_contents(cset, offset=img, callback=s.remove)
assert not s
- @pytest.mark.parametrize("generic_unmerge_bits", ("entries_norm1", ), indirect=True)
+ @pytest.mark.parametrize("generic_unmerge_bits", ("entries_norm1",), indirect=True)
def test_empty_removal(self, tmp_path, generic_unmerge_bits):
img, cset = generic_unmerge_bits
assert ops.unmerge_contents(cset, offset=str(tmp_path / "dest"))
- @pytest.mark.parametrize("generic_unmerge_bits", ("entries_norm1", ), indirect=True)
+ @pytest.mark.parametrize("generic_unmerge_bits", ("entries_norm1",), indirect=True)
def test_exact_removal(self, generic_unmerge_bits):
img, cset = generic_unmerge_bits
assert ops.unmerge_contents(cset, offset=img)
assert not livefs.scan(img, offset=img)
- @pytest.mark.parametrize("generic_unmerge_bits", ("entries_norm1", ), indirect=True)
+ @pytest.mark.parametrize("generic_unmerge_bits", ("entries_norm1",), indirect=True)
def test_lingering_file(self, generic_unmerge_bits):
img, cset = generic_unmerge_bits
dirs = [k for k, v in self.entries_norm1.items() if v[0] == "dir"]
diff --git a/tests/merge/test_engine.py b/tests/merge/test_engine.py
index bbebb33a1..35421a4c8 100644
--- a/tests/merge/test_engine.py
+++ b/tests/merge/test_engine.py
@@ -9,7 +9,6 @@ from .util import fake_engine
class fake_pkg:
-
def __init__(self, contents, label=None):
self.label = label
self.contents = contents
@@ -44,17 +43,17 @@ class TestMergeEngineCsets:
return getattr(MergeEngine, target)(engine, engine.csets, *args)
def test_generate_offset_cset(self):
- engine = fake_engine(csets={"new_cset":self.simple_cset},
- offset='/')
+ engine = fake_engine(csets={"new_cset": self.simple_cset}, offset="/")
+
def run(engine, cset):
- return self.run_cset('generate_offset_cset', engine,
- lambda e, c:c[cset])
+ return self.run_cset("generate_offset_cset", engine, lambda e, c: c[cset])
- self.assertCsetEqual(self.simple_cset, run(engine, 'new_cset'))
- engine.offset = '/foon/'
- run(engine, 'new_cset')
- self.assertCsetEqual(self.simple_cset.insert_offset(engine.offset),
- run(engine, 'new_cset'))
+ self.assertCsetEqual(self.simple_cset, run(engine, "new_cset"))
+ engine.offset = "/foon/"
+ run(engine, "new_cset")
+ self.assertCsetEqual(
+ self.simple_cset.insert_offset(engine.offset), run(engine, "new_cset")
+ )
def test_get_pkg_contents(self):
new_cset = MergeEngine.get_pkg_contents(None, None, fake_pkg(self.simple_cset))
@@ -64,17 +63,15 @@ class TestMergeEngineCsets:
def test_get_remove_cset(self):
files = contentsSet(self.simple_cset.iterfiles(invert=True))
- engine = fake_engine(csets={'install':files,
- 'old_cset':self.simple_cset})
- self.assertCsetEqual(self.simple_cset.iterfiles(),
- self.run_cset('get_remove_cset', engine))
+ engine = fake_engine(csets={"install": files, "old_cset": self.simple_cset})
+ self.assertCsetEqual(
+ self.simple_cset.iterfiles(), self.run_cset("get_remove_cset", engine)
+ )
def test_get_replace_cset(self):
files = contentsSet(self.simple_cset.iterfiles(invert=True))
- engine = fake_engine(csets={'install':files,
- 'old_cset':self.simple_cset})
- self.assertCsetEqual(files,
- self.run_cset('get_replace_cset', engine))
+ engine = fake_engine(csets={"install": files, "old_cset": self.simple_cset})
+ self.assertCsetEqual(files, self.run_cset("get_replace_cset", engine))
def test_rewrite_awareness(self, tmp_path):
src = contentsSet(self.simple_cset)
@@ -82,11 +79,11 @@ class TestMergeEngineCsets:
trg = src.difference(["/usr/lib/donkey"])
trg.add(fsFile("/usr/lib64/donkey"))
trg = trg.insert_offset(str(tmp_path))
- (tmp_path / 'usr' / 'lib64').mkdir(parents=True)
- (tmp_path / 'usr' / 'lib').symlink_to("lib64")
+ (tmp_path / "usr" / "lib64").mkdir(parents=True)
+ (tmp_path / "usr" / "lib").symlink_to("lib64")
pkg = fake_pkg(src)
engine = MergeEngine.install(str(tmp_path), pkg, offset=str(tmp_path))
- result = engine.csets['resolved_install']
+ result = engine.csets["resolved_install"]
assert set(result.iterfiles()) == set(trg.iterfiles())
@pytest.mark.skip("contentset should handle this")
@@ -98,22 +95,21 @@ class TestMergeEngineCsets:
trg = trg.insert_offset(str(tmp_path))
pkg = fake_pkg(src)
engine = MergeEngine.install(str(tmp_path), pkg, offset=str(tmp_path))
- result = engine.csets['new_cset']
+ result = engine.csets["new_cset"]
assert set(result.iterfiles()) == set(trg.iterfiles())
def test_get_livefs_intersect_cset(self, tmp_path):
old_cset = self.simple_cset.insert_offset(str(tmp_path))
# have to add it; scan adds the root node
old_cset.add(fsDir(str(tmp_path)))
- (tmp_path / 'usr').mkdir()
- (tmp_path / 'usr' / 'dar').touch()
- (tmp_path / 'foon').touch()
+ (tmp_path / "usr").mkdir()
+ (tmp_path / "usr" / "dar").touch()
+ (tmp_path / "foon").touch()
# note that this *is* a sym in the cset; adding this specific
# check so that if the code differs, the test breaks, and the tests
# get updated (additionally, folks may not be aware of the potential)
- (tmp_path / 'broken-symlink').touch()
- engine = fake_engine(csets={'test':old_cset})
+ (tmp_path / "broken-symlink").touch()
+ engine = fake_engine(csets={"test": old_cset})
existent = livefs.scan(str(tmp_path))
- generated = self.run_cset('_get_livefs_intersect_cset', engine,
- 'test')
+ generated = self.run_cset("_get_livefs_intersect_cset", engine, "test")
assert generated == existent
diff --git a/tests/merge/test_triggers.py b/tests/merge/test_triggers.py
index 7190fe000..454eeb5b8 100644
--- a/tests/merge/test_triggers.py
+++ b/tests/merge/test_triggers.py
@@ -21,11 +21,12 @@ from .util import fake_engine, fake_reporter, fake_trigger
def _render_msg(func, msg, *args, **kwargs):
func(msg % (args if args else kwargs))
+
def make_fake_reporter(**kwargs):
- kwargs = dict((key, partial(_render_msg, val))
- for key, val in kwargs.items())
+ kwargs = dict((key, partial(_render_msg, val)) for key, val in kwargs.items())
return fake_reporter(**kwargs)
+
class TestBase:
kls = fake_trigger
@@ -43,7 +44,7 @@ class TestBase:
def test_label(self):
assert self.mk_trigger().label == str(self.kls.__name__)
assert fake_trigger().label == str(fake_trigger.__name__)
- assert fake_trigger(_label='foon').label == 'foon'
+ assert fake_trigger(_label="foon").label == "foon"
def test_localize(self):
o = self.mk_trigger()
@@ -53,10 +54,12 @@ class TestBase:
assert fake_trigger(required_csets=None).get_required_csets(None) is None
assert fake_trigger(required_csets=None).get_required_csets(1) is None
assert fake_trigger(required_csets=None).get_required_csets("") is None
- o = fake_trigger(required_csets={"foo":["dar"], "bar":1})
+ o = fake_trigger(required_csets={"foo": ["dar"], "bar": 1})
assert o.get_required_csets("foo") == ["dar"]
assert o.get_required_csets("bar") == 1
- assert fake_trigger(required_csets=("dar", "foo")).get_required_csets("bar") == ("dar", "foo")
+ assert fake_trigger(required_csets=("dar", "foo")).get_required_csets(
+ "bar"
+ ) == ("dar", "foo")
assert fake_trigger(required_csets=()).get_required_csets("") == ()
def test_register(self):
@@ -70,13 +73,13 @@ class TestBase:
# shouldn't puke.
o = self.mk_trigger(mode=1, _hooks=("2"))
o.register(engine)
- assert engine._triggers == [('2', o, None)]
+ assert engine._triggers == [("2", o, None)]
engine._triggers = []
# verify it's treating "all csets" differently from "no csets"
o = self.mk_trigger(mode=1, _hooks=("2"), required_csets=())
o.register(engine)
- assert engine._triggers == [('2', o, ())]
+ assert engine._triggers == [("2", o, ())]
# should handle keyerror thrown from the engine for missing hooks.
engine = fake_engine(mode=1, blocked_hooks=("foon", "dar"))
@@ -86,33 +89,41 @@ class TestBase:
o = self.mk_trigger(mode=1, _hooks=("foon", "bar"), required_csets=(3,))
o.register(engine)
- assert engine._triggers == [('bar', o, (3,))]
+ assert engine._triggers == [("bar", o, (3,))]
engine._triggers = []
o = self.mk_trigger(mode=1, _hooks="bar", required_csets=None)
o.register(engine)
- assert engine._triggers == [('bar', o, None)]
+ assert engine._triggers == [("bar", o, None)]
def test_call(self):
# test "I want all csets"
def get_csets(required_csets, csets, fallback=None):
- o = self.mk_trigger(required_csets={1:required_csets, 2:fallback},
- mode=(1,))
+ o = self.mk_trigger(
+ required_csets={1: required_csets, 2: fallback}, mode=(1,)
+ )
engine = fake_engine(csets=csets, mode=1)
o(engine, csets)
- assert [x[0] for x in o._called] == [engine]*len(o._called)
+ assert [x[0] for x in o._called] == [engine] * len(o._called)
return [list(x[1:]) for x in o._called]
d = object()
- assert get_csets(None, d, [1]) == [[d]], \
- "raw csets mapping should be passed through without conversion" \
+ assert get_csets(None, d, [1]) == [[d]], (
+ "raw csets mapping should be passed through without conversion"
" for required_csets=None"
+ )
- assert get_csets([1,2], {1: 1,2: 2}) == [[1, 2]],"basic mapping through failed"
- assert get_csets([], {}) == [[]], "for no required csets, must have no args passed"
+ assert get_csets([1, 2], {1: 1, 2: 2}) == [
+ [1, 2]
+ ], "basic mapping through failed"
+ assert get_csets([], {}) == [
+ []
+ ], "for no required csets, must have no args passed"
def test_module_constants():
- assert {const.REPLACE_MODE, const.UNINSTALL_MODE} == set(triggers.UNINSTALLING_MODES)
+ assert {const.REPLACE_MODE, const.UNINSTALL_MODE} == set(
+ triggers.UNINSTALLING_MODES
+ )
assert {const.REPLACE_MODE, const.INSTALL_MODE} == set(triggers.INSTALLING_MODES)
@@ -125,18 +136,18 @@ class Test_mtime_watcher:
t = self.kls()
t.set_state([str(tmp_path)])
assert list(t.saved_mtimes) == o
- (tmp_path / 'file').touch()
- t.set_state([str(tmp_path), str(tmp_path / 'file')])
+ (tmp_path / "file").touch()
+ t.set_state([str(tmp_path), str(tmp_path / "file")])
assert list(t.saved_mtimes) == o
- os.mkdir(loc := str(tmp_path / 'dir'))
+ os.mkdir(loc := str(tmp_path / "dir"))
o.append(gen_obj(loc))
o.sort()
t.set_state([x.location for x in o])
assert sorted(t.saved_mtimes) == o
# test syms.
- os.mkdir(src := str(tmp_path / 'dir2'))
- os.symlink(src, loc := str(tmp_path / 'foo'))
+ os.mkdir(src := str(tmp_path / "dir2"))
+ os.symlink(src, loc := str(tmp_path / "foo"))
locs = [x.location for x in o]
# insert a crap location to ensure it handles it.
@@ -163,7 +174,7 @@ class Test_mtime_watcher:
# thus ignored.
t.set_state(locs, stat_func=os.lstat)
assert sorted(t.saved_mtimes) == o
- (tmp_path / 'bar').touch()
+ (tmp_path / "bar").touch()
assert t.check_state()
# test dead sym filtering for stat.
@@ -203,6 +214,7 @@ def castrate_trigger(base_kls, **kwargs):
class castrated_trigger(base_kls):
enable_regen = False
+
def __init__(self, *args2, **kwargs2):
self._passed_in_args = []
base_kls.__init__(self, *args2, **kwargs2)
@@ -229,7 +241,9 @@ class trigger_mixin:
self.trigger = self.kls()
-@pytest.mark.skipif(not sys.platform.startswith('linux'), reason='supported on Linux only')
+@pytest.mark.skipif(
+ not sys.platform.startswith("linux"), reason="supported on Linux only"
+)
class Test_ldconfig(trigger_mixin):
# use the kls indirection for when *bsd version of ldconfig trigger
@@ -239,29 +253,37 @@ class Test_ldconfig(trigger_mixin):
def test_read_ld_so_conf(self, tmp_path):
# test the defaults first. should create etc and the file.
- assert set(self.trigger.read_ld_so_conf(str(tmp_path))) == {str(tmp_path / x) for x in self.trigger.default_ld_path}
- o = gen_obj(str(tmp_path / 'etc'))
+ assert set(self.trigger.read_ld_so_conf(str(tmp_path))) == {
+ str(tmp_path / x) for x in self.trigger.default_ld_path
+ }
+ o = gen_obj(str(tmp_path / "etc"))
assert o.mode == 0o755
assert fs.isdir(o)
- assert (tmp_path / 'etc/ld.so.conf').exists()
+ assert (tmp_path / "etc/ld.so.conf").exists()
# test normal functioning.
- (tmp_path / 'etc/ld.so.conf').write_text("\n".join(("/foon", "dar", "blarnsball", "#comment")))
- assert set(self.trigger.read_ld_so_conf(str(tmp_path))) == {str(tmp_path / x) for x in ("foon", "dar", "blarnsball")}
-
- @pytest.mark.parametrize(("touches", "mkdirs", "same_mtime"), (
- # ensure it doesn't explode for missing dirs.
- ([], False, False),
- ([], True, False),
-
- (['test-lib/foon'], True, False),
- (['test-lib/foon'], True, True),
- ))
+ (tmp_path / "etc/ld.so.conf").write_text(
+ "\n".join(("/foon", "dar", "blarnsball", "#comment"))
+ )
+ assert set(self.trigger.read_ld_so_conf(str(tmp_path))) == {
+ str(tmp_path / x) for x in ("foon", "dar", "blarnsball")
+ }
+
+ @pytest.mark.parametrize(
+ ("touches", "mkdirs", "same_mtime"),
+ (
+ # ensure it doesn't explode for missing dirs.
+ ([], False, False),
+ ([], True, False),
+ (["test-lib/foon"], True, False),
+ (["test-lib/foon"], True, True),
+ ),
+ )
def test_trigger(self, tmp_path, touches, mkdirs, same_mtime):
- dirs=['test-lib', 'test-lib2']
+ dirs = ["test-lib", "test-lib2"]
ensure_dirs(tmp_path / "etc")
- (tmp_path / "etc/ld.so.conf").write_text("\n".join('/' + x for x in dirs))
+ (tmp_path / "etc/ld.so.conf").write_text("\n".join("/" + x for x in dirs))
# force directory mtime to 1s less.
past = time.time() - 10.0
if mkdirs:
@@ -270,24 +292,27 @@ class Test_ldconfig(trigger_mixin):
os.utime(tmp_path / x, (past, past))
self.reset_objects()
- self.engine.phase = 'pre_merge'
+ self.engine.phase = "pre_merge"
self.engine.mode = const.INSTALL_MODE
self.trigger(self.engine, {})
assert not self.trigger._passed_in_args
resets = set()
for x in touches:
- (fp := tmp_path / x.lstrip('/')).touch()
+ (fp := tmp_path / x.lstrip("/")).touch()
if same_mtime:
os.utime(fp, (past, past))
resets.add(fp.parent)
for x in resets:
os.utime(x, (past, past))
- self.engine.phase = 'post_merge'
+ self.engine.phase = "post_merge"
self.trigger(self.engine, {})
- assert [[getattr(x, 'offset', None) for x in y] for y in self.trigger._passed_in_args] == [[str(tmp_path)]]
+ assert [
+ [getattr(x, "offset", None) for x in y]
+ for y in self.trigger._passed_in_args
+ ] == [[str(tmp_path)]]
class TestInfoRegen(trigger_mixin):
@@ -296,14 +321,16 @@ class TestInfoRegen(trigger_mixin):
@property
def kls(self):
- return castrate_trigger(self.raw_kls, locations=['/'])
+ return castrate_trigger(self.raw_kls, locations=["/"])
- info_data = textwrap.dedent("""\
+ info_data = textwrap.dedent(
+ """\
INFO-DIR-SECTION Network Applications
START-INFO-DIR-ENTRY
* Wget: (wget). The non-interactive network downloader.
END-INFO-DIR-ENTRY
- """)
+ """
+ )
def reset_objects(self, **kwargs):
trigger_mixin.reset_objects(self, **kwargs)
@@ -311,7 +338,7 @@ class TestInfoRegen(trigger_mixin):
def test_binary_path(self):
try:
- path = process.find_binary('install-info')
+ path = process.find_binary("install-info")
except process.CommandNotFound:
path = None
assert path == self.trigger.get_binary_path()
@@ -319,25 +346,28 @@ class TestInfoRegen(trigger_mixin):
with os_environ("PATH"):
assert self.trigger.get_binary_path() is None
- @pytest.mark.skipif(triggers.InfoRegen().get_binary_path() is None, reason="can't verify regen behavior due to install-info not being available")
+ @pytest.mark.skipif(
+ triggers.InfoRegen().get_binary_path() is None,
+ reason="can't verify regen behavior due to install-info not being available",
+ )
def test_regen(self, tmp_path):
o = self.raw_kls()
path = o.get_binary_path()
# test it without the directory existing.
- assert not list(o.regen(path, str(tmp_path / 'foo')))
- assert not (tmp_path / 'foo').exists()
- (tmp_path / 'foo.info').write_text(self.info_data)
+ assert not list(o.regen(path, str(tmp_path / "foo")))
+ assert not (tmp_path / "foo").exists()
+ (tmp_path / "foo.info").write_text(self.info_data)
# no issues.
assert not list(o.regen(path, str(tmp_path)))
- assert (tmp_path / 'dir').exists(), "info dir file wasn't created"
+ assert (tmp_path / "dir").exists(), "info dir file wasn't created"
# drop the last line, verify it returns that file.
- (tmp_path / 'foo2.info').write_text('\n'.join(self.info_data.splitlines()[:-1]))
+ (tmp_path / "foo2.info").write_text("\n".join(self.info_data.splitlines()[:-1]))
# should ignore \..* files
(tmp_path / ".foo.info").touch()
(tmp_path / "dir").unlink()
- assert list(o.regen(path, self.dir)) == [str(tmp_path / 'foo2.info')]
- assert (tmp_path / 'dir').exists(), "info dir file wasn't created"
+ assert list(o.regen(path, self.dir)) == [str(tmp_path / "foo2.info")]
+ assert (tmp_path / "dir").exists(), "info dir file wasn't created"
def run_trigger(self, phase, expected_regen=()):
l = []
@@ -345,59 +375,66 @@ class TestInfoRegen(trigger_mixin):
self.trigger._passed_in_args = []
self.engine.phase = phase
self.trigger(self.engine, {})
- assert list(map(normpath, (x[1] for x in self.trigger._passed_in_args))) == list(map(normpath, expected_regen))
+ assert list(
+ map(normpath, (x[1] for x in self.trigger._passed_in_args))
+ ) == list(map(normpath, expected_regen))
return l
- @pytest.mark.skipif(triggers.InfoRegen().get_binary_path() is None, reason="can't verify regen behavior due to install-info not being available")
+ @pytest.mark.skipif(
+ triggers.InfoRegen().get_binary_path() is None,
+ reason="can't verify regen behavior due to install-info not being available",
+ )
def test_trigger(self, tmp_path):
with os_environ("PATH"):
- self.engine.phase = 'post_merge'
+ self.engine.phase = "post_merge"
assert self.trigger(self.engine, {}) is None
# verify it runs when dir is missing.
# doesn't create the file since no info files.
self.reset_objects()
- assert not self.run_trigger('pre_merge', [])
- assert not self.run_trigger('post_merge', [self.dir])
+ assert not self.run_trigger("pre_merge", [])
+ assert not self.run_trigger("post_merge", [self.dir])
# add an info, and verify it generated.
- (tmp_path / 'foo.info').write_text(self.info_data)
+ (tmp_path / "foo.info").write_text(self.info_data)
self.reset_objects()
self.trigger.enable_regen = True
- assert not self.run_trigger('pre_merge', [])
- assert not self.run_trigger('post_merge', [self.dir])
+ assert not self.run_trigger("pre_merge", [])
+ assert not self.run_trigger("post_merge", [self.dir])
# verify it doesn't; mtime is fine
self.reset_objects()
self.trigger.enable_regen = True
- assert not self.run_trigger('pre_merge', [])
- assert not self.run_trigger('post_merge', [])
+ assert not self.run_trigger("pre_merge", [])
+ assert not self.run_trigger("post_merge", [])
# verify it handles quoting properly, and that it ignores
# complaints about duplicates.
self.reset_objects()
self.trigger.enable_regen = True
- assert not self.run_trigger('pre_merge', [])
- (tmp_path / 'blaidd drwg.info').write_text(self.info_data)
- assert not self.run_trigger('post_merge', [self.dir])
+ assert not self.run_trigger("pre_merge", [])
+ (tmp_path / "blaidd drwg.info").write_text(self.info_data)
+ assert not self.run_trigger("post_merge", [self.dir])
# verify it passes back failures.
self.reset_objects()
self.trigger.enable_regen = True
- assert not self.run_trigger('pre_merge', [])
- (tmp_path / "tiza grande.info").write_text('\n'.join(self.info_data.splitlines()[:-1]))
- l = self.run_trigger('post_merge', [self.dir])
+ assert not self.run_trigger("pre_merge", [])
+ (tmp_path / "tiza grande.info").write_text(
+ "\n".join(self.info_data.splitlines()[:-1])
+ )
+ l = self.run_trigger("post_merge", [self.dir])
assert len(l) == 1
- assert 'tiza grande.info' in l[0]
+ assert "tiza grande.info" in l[0]
# verify it holds off on info regen till after unmerge for replaces.
self.reset_objects(mode=const.REPLACE_MODE)
- assert not self.run_trigger('pre_merge', [])
- assert not self.run_trigger('post_merge', [])
- assert not self.run_trigger('pre_unmerge', [])
+ assert not self.run_trigger("pre_merge", [])
+ assert not self.run_trigger("post_merge", [])
+ assert not self.run_trigger("pre_unmerge", [])
(tmp_path / "tiza grande.info").unlink()
- assert not self.run_trigger('post_unmerge', [self.dir])
+ assert not self.run_trigger("post_unmerge", [self.dir])
class single_attr_change_base:
@@ -413,28 +450,37 @@ class single_attr_change_base:
def test_metadata(self):
assert self.kls._engine_types == triggers.INSTALLING_MODES
- assert self.kls.required_csets == ('new_cset', )
- assert self.kls._hooks == ('pre_merge', )
+ assert self.kls.required_csets == ("new_cset",)
+ assert self.kls._hooks == ("pre_merge",)
@property
def trigger(self):
return self.kls(1, 2)
- @pytest.mark.parametrize("cset", (
- (),
- (fs.fsFile("/foon", mode=0o644, uid=2, gid=1, strict=False), ),
- (fs.fsFile("/foon", mode=0o646, uid=1, gid=1, strict=False), ),
- (fs.fsFile("/foon", mode=0o4766, uid=1, gid=2, strict=False), ),
- (fs.fsFile("/blarn", mode=0o2700, uid=2, gid=2, strict=False),
- fs.fsDir("/dir", mode=0o500, uid=2, gid=2, strict=False), ),
- (fs.fsFile("/blarn", mode=0o2776, uid=2, gid=2, strict=False),
- fs.fsDir("/dir", mode=0o2777, uid=1, gid=2, strict=False), ),
- (fs.fsFile("/blarn", mode=0o6772, uid=2, gid=2, strict=False),
- fs.fsDir("/dir", mode=0o4774, uid=1, gid=1, strict=False), ),
- ))
+ @pytest.mark.parametrize(
+ "cset",
+ (
+ (),
+ (fs.fsFile("/foon", mode=0o644, uid=2, gid=1, strict=False),),
+ (fs.fsFile("/foon", mode=0o646, uid=1, gid=1, strict=False),),
+ (fs.fsFile("/foon", mode=0o4766, uid=1, gid=2, strict=False),),
+ (
+ fs.fsFile("/blarn", mode=0o2700, uid=2, gid=2, strict=False),
+ fs.fsDir("/dir", mode=0o500, uid=2, gid=2, strict=False),
+ ),
+ (
+ fs.fsFile("/blarn", mode=0o2776, uid=2, gid=2, strict=False),
+ fs.fsDir("/dir", mode=0o2777, uid=1, gid=2, strict=False),
+ ),
+ (
+ fs.fsFile("/blarn", mode=0o6772, uid=2, gid=2, strict=False),
+ fs.fsDir("/dir", mode=0o4774, uid=1, gid=1, strict=False),
+ ),
+ ),
+ )
def test_trigger_contents(self, cset):
new = contentsSet(orig := sorted(cset))
- self.trigger(fake_engine(mode=const.INSTALL_MODE), {'new_cset': new})
+ self.trigger(fake_engine(mode=const.INSTALL_MODE), {"new_cset": new})
new = sorted(new)
assert len(orig) == len(new)
for x, y in zip(orig, new):
@@ -446,29 +492,28 @@ class single_attr_change_base:
assert self.good_val(val) == getattr(y, attr)
else:
assert self.good_val(val) == getattr(y, attr)
- elif attr != 'chksums':
+ elif attr != "chksums":
# abuse self as unique singleton.
assert getattr(x, attr, self) == getattr(y, attr, self)
-
class Test_fix_uid_perms(single_attr_change_base):
kls = triggers.fix_uid_perms
- attr = 'uid'
+ attr = "uid"
class Test_fix_gid_perms(single_attr_change_base):
kls = triggers.fix_gid_perms
- attr = 'gid'
+ attr = "gid"
class Test_fix_set_bits(single_attr_change_base):
kls = triggers.fix_set_bits
- trigger = property(lambda self:self.kls())
- attr = 'mode'
+ trigger = property(lambda self: self.kls())
+ attr = "mode"
@staticmethod
def good_val(val):
@@ -482,7 +527,7 @@ class Test_detect_world_writable(single_attr_change_base):
kls = triggers.detect_world_writable
_trigger_override = None
- attr = 'mode'
+ attr = "mode"
@property
def trigger(self):
@@ -491,9 +536,10 @@ class Test_detect_world_writable(single_attr_change_base):
return self._trigger_override()
def good_val(self, val):
- assert self._trigger_override is None, \
- "bug in test code; good_val should not be invoked when a " \
+ assert self._trigger_override is None, (
+ "bug in test code; good_val should not be invoked when a "
"trigger override is in place."
+ )
return val & ~0o002
def test_lazyness(self):
@@ -514,25 +560,28 @@ class Test_detect_world_writable(single_attr_change_base):
self._trigger_override = self.kls()
def run(fs_objs, fix_perms=False):
- self.kls(fix_perms=fix_perms).trigger(engine,
- contentsSet(fs_objs))
+ self.kls(fix_perms=fix_perms).trigger(engine, contentsSet(fs_objs))
- run([fs.fsFile('/foon', mode=0o770, strict=False)])
+ run([fs.fsFile("/foon", mode=0o770, strict=False)])
assert not warnings
- run([fs.fsFile('/foon', mode=0o772, strict=False)])
+ run([fs.fsFile("/foon", mode=0o772, strict=False)])
assert len(warnings) == 1
- assert '/foon' in warnings[0]
+ assert "/foon" in warnings[0]
warnings[:] = []
- run([fs.fsFile('/dar', mode=0o776, strict=False),
- fs.fsFile('/bar', mode=0o776, strict=False),
- fs.fsFile('/far', mode=0o770, strict=False)])
+ run(
+ [
+ fs.fsFile("/dar", mode=0o776, strict=False),
+ fs.fsFile("/bar", mode=0o776, strict=False),
+ fs.fsFile("/far", mode=0o770, strict=False),
+ ]
+ )
assert len(warnings) == 2
- assert '/dar' in ' '.join(warnings)
- assert '/bar' in ' '.join(warnings)
- assert '/far' not in ' '.join(warnings)
+ assert "/dar" in " ".join(warnings)
+ assert "/bar" in " ".join(warnings)
+ assert "/far" not in " ".join(warnings)
class TestPruneFiles:
@@ -540,40 +589,46 @@ class TestPruneFiles:
kls = triggers.PruneFiles
def test_metadata(self):
- assert self.kls.required_csets == ('new_cset', )
- assert self.kls._hooks == ('pre_merge', )
+ assert self.kls.required_csets == ("new_cset",)
+ assert self.kls._hooks == ("pre_merge",)
assert self.kls._engine_types == triggers.INSTALLING_MODES
def test_it(self):
- orig = contentsSet([
- fs.fsFile('/cheddar', strict=False),
- fs.fsFile('/sporks-suck', strict=False),
- fs.fsDir('/foons-rule', strict=False),
- fs.fsDir('/mango', strict=False)
- ])
+ orig = contentsSet(
+ [
+ fs.fsFile("/cheddar", strict=False),
+ fs.fsFile("/sporks-suck", strict=False),
+ fs.fsDir("/foons-rule", strict=False),
+ fs.fsDir("/mango", strict=False),
+ ]
+ )
engine = fake_engine(mode=const.INSTALL_MODE)
+
def run(func):
new = contentsSet(orig)
- self.kls(func)(engine, {'new_cset':new})
+ self.kls(func)(engine, {"new_cset": new})
return new
assert orig == run(lambda s: False)
assert not run(post_curry(isinstance, fs.fsDir)).dirs()
- assert sorted(orig.files()) == sorted(run(post_curry(isinstance, fs.fsDir)).dirs(True))
+ assert sorted(orig.files()) == sorted(
+ run(post_curry(isinstance, fs.fsDir)).dirs(True)
+ )
# check noisiness.
info = []
- engine = fake_engine(observer=make_fake_reporter(info=info.append),
- mode=const.REPLACE_MODE)
+ engine = fake_engine(
+ observer=make_fake_reporter(info=info.append), mode=const.REPLACE_MODE
+ )
- run(lambda s:False)
+ run(lambda s: False)
assert not info
run(post_curry(isinstance, fs.fsDir))
assert len(info) == 2
# ensure only the relevant files show.
- assert '/cheddar' not in ' '.join(info)
- assert '/sporks-suck' not in ' '.join(info)
- assert '/foons-rule' in ' '.join(info)
- assert '/mango' in ' '.join(info)
+ assert "/cheddar" not in " ".join(info)
+ assert "/sporks-suck" not in " ".join(info)
+ assert "/foons-rule" in " ".join(info)
+ assert "/mango" in " ".join(info)
diff --git a/tests/merge/util.py b/tests/merge/util.py
index 3e1cf436b..7d76c7999 100644
--- a/tests/merge/util.py
+++ b/tests/merge/util.py
@@ -4,11 +4,10 @@ from pkgcore.merge import triggers
class fake_trigger(triggers.base):
-
def __init__(self, **kwargs):
self._called = []
- if isinstance(kwargs.get('_hooks', False), str):
- kwargs['_hooks'] = (kwargs['_hooks'],)
+ if isinstance(kwargs.get("_hooks", False), str):
+ kwargs["_hooks"] = (kwargs["_hooks"],)
for k, v in kwargs.items():
if callable(v):
v = partial(v, self)
@@ -19,9 +18,8 @@ class fake_trigger(triggers.base):
class fake_engine:
-
def __init__(self, **kwargs):
- kwargs.setdefault('observer', None)
+ kwargs.setdefault("observer", None)
self._triggers = []
for k, v in kwargs.items():
if callable(v):
diff --git a/tests/package/test_base.py b/tests/package/test_base.py
index 09fdf38b1..7c2b987f2 100644
--- a/tests/package/test_base.py
+++ b/tests/package/test_base.py
@@ -5,18 +5,17 @@ import pytest
from pkgcore.package import base
-def fake_pkg(cat='dev-util', pkg='bsdiff', ver='1.0', **attrs):
- attrs.setdefault('category', cat)
- attrs.setdefault('pkg', pkg)
- attrs.setdefault('ver', ver)
- attrs.setdefault('key', f"{cat}/{pkg}")
- attrs.setdefault('cpvstr', f"{cat}/{pkg}-{ver}")
- attrs.setdefault('built', False)
+def fake_pkg(cat="dev-util", pkg="bsdiff", ver="1.0", **attrs):
+ attrs.setdefault("category", cat)
+ attrs.setdefault("pkg", pkg)
+ attrs.setdefault("ver", ver)
+ attrs.setdefault("key", f"{cat}/{pkg}")
+ attrs.setdefault("cpvstr", f"{cat}/{pkg}-{ver}")
+ attrs.setdefault("built", False)
return SimpleNamespace(**attrs)
class mixin:
-
def mk_inst(self):
raise NotImplementedError(self, "mk_inst")
@@ -46,9 +45,8 @@ class TestBasePkg(mixin):
def test_getattr(self):
class Class(base.base):
__slotting_intentionally_disabled__ = True
- _get_attr = {str(x): partial((lambda a, s: a), x)
- for x in range(10)}
- _get_attr["a"] = lambda s:"foo"
+ _get_attr = {str(x): partial((lambda a, s: a), x) for x in range(10)}
+ _get_attr["a"] = lambda s: "foo"
__getattr__ = base.dynamic_getattr_dict
o = Class()
@@ -65,6 +63,7 @@ class TestWrapper(mixin):
def mk_inst(self, overrides=None, **kwds):
kls = self.kls
if overrides:
+
class kls(self.kls):
locals().update(overrides)
__slots__ = ()
@@ -77,4 +76,4 @@ class TestWrapper(mixin):
assert not self.mk_inst().built
assert self.mk_inst(built=True).built
# verify that wrapping will override it
- assert not self.mk_inst(overrides={'built':False}, built=True).built
+ assert not self.mk_inst(overrides={"built": False}, built=True).built
diff --git a/tests/package/test_metadata.py b/tests/package/test_metadata.py
index 450afd2a2..90e3857fc 100644
--- a/tests/package/test_metadata.py
+++ b/tests/package/test_metadata.py
@@ -2,10 +2,14 @@ from pkgcore.package import base, metadata
def make_pkg_kls(attrs=(), callbacks={}):
-
class simple_pkg(base.base):
_get_attr = callbacks
- __slots__ = ("_args", "_kwds", "_data", "_fetch_called",) + tuple(attrs)
+ __slots__ = (
+ "_args",
+ "_kwds",
+ "_data",
+ "_fetch_called",
+ ) + tuple(attrs)
def __init__(self, *args, **kwds):
self._args = args
@@ -18,6 +22,7 @@ def make_pkg_kls(attrs=(), callbacks={}):
class metadata_pkg(metadata.DeriveMetadataKls(simple_pkg)):
__slots__ = ()
+
def _fetch_metadata(self):
self._fetch_called = True
return self._data
@@ -26,7 +31,6 @@ def make_pkg_kls(attrs=(), callbacks={}):
class TestMetadataPackage:
-
def test_init(self):
class repo:
_parent_repo = "foon"
@@ -41,5 +45,5 @@ class TestMetadataPackage:
def test_getdata(self):
kls = make_pkg_kls()
- o = kls(None, data={'a': 'b'})
- assert o.data == {'a': 'b'}
+ o = kls(None, data={"a": "b"})
+ assert o.data == {"a": "b"}
diff --git a/tests/package/test_mutated.py b/tests/package/test_mutated.py
index 0e6b208e2..7d845f661 100644
--- a/tests/package/test_mutated.py
+++ b/tests/package/test_mutated.py
@@ -10,6 +10,7 @@ from snakeoil.klass import inject_richcmp_methods_from_cmp
def passthru(val, self):
return val
+
class FakePkg(base):
# XXX why isn't this using existing classes?
@@ -32,10 +33,9 @@ class FakePkg(base):
class TestMutatedPkg:
-
def make_fakepkg(self, pkg="dar", ver=1, data=None):
if data is None:
- data = {"a":1}
+ data = {"a": 1}
return FakePkg(pkg, ver, data)
def test_raw_pkg(self):
@@ -57,6 +57,6 @@ class TestMutatedPkg:
def test_getattr(self):
pkg = self.make_fakepkg()
assert MutatedPkg(pkg, {}).a == 1
- assert MutatedPkg(pkg, {"a":2}).a == 2
+ assert MutatedPkg(pkg, {"a": 2}).a == 2
with pytest.raises(AttributeError):
getattr(MutatedPkg(pkg, {}), "b")
diff --git a/tests/pkgsets/test_filelist.py b/tests/pkgsets/test_filelist.py
index d41d244f7..5f34e1398 100644
--- a/tests/pkgsets/test_filelist.py
+++ b/tests/pkgsets/test_filelist.py
@@ -21,18 +21,23 @@ class TestFileList:
return os.getgid()
def gen_pkgset(self, tmp_path, contents):
- (tmp_path / 'file').write_text(contents)
- return self.kls(tmp_path / 'file', gid=self.gid)
+ (tmp_path / "file").write_text(contents)
+ return self.kls(tmp_path / "file", gid=self.gid)
def test_contains(self, tmp_path):
- assert atom("x11-base/xorg-x11") in self.gen_pkgset(tmp_path, "x11-base/xorg-x11")
+ assert atom("x11-base/xorg-x11") in self.gen_pkgset(
+ tmp_path, "x11-base/xorg-x11"
+ )
def test_len(self, tmp_path):
- assert len(self.gen_pkgset(tmp_path, "x11-base/xorg-x11\ndev-util/diffball")) == 2
+ assert (
+ len(self.gen_pkgset(tmp_path, "x11-base/xorg-x11\ndev-util/diffball")) == 2
+ )
def test_iter(self, tmp_path):
- assert set(self.gen_pkgset(tmp_path, "dev-util/diffball\ndev-util/bsdiff")) == \
- {atom(x) for x in ["dev-util/diffball", "dev-util/bsdiff"]}
+ assert set(self.gen_pkgset(tmp_path, "dev-util/diffball\ndev-util/bsdiff")) == {
+ atom(x) for x in ["dev-util/diffball", "dev-util/bsdiff"]
+ }
def test_add(self, tmp_path):
s = self.gen_pkgset(tmp_path, "dev-util/diffball\n=dev-util/bsdiff-0.4")
@@ -40,16 +45,27 @@ class TestFileList:
s.add(atom("=dev-util/lib-1"))
s.flush()
- assert {atom(line) for line in (tmp_path / 'file').read_text().splitlines()} == \
- set(map(atom, ("dev-util/diffball", "=dev-util/bsdiff-0.4",
- "dev-util/foon", "=dev-util/lib-1")))
+ assert {
+ atom(line) for line in (tmp_path / "file").read_text().splitlines()
+ } == set(
+ map(
+ atom,
+ (
+ "dev-util/diffball",
+ "=dev-util/bsdiff-0.4",
+ "dev-util/foon",
+ "=dev-util/lib-1",
+ ),
+ )
+ )
def test_remove(self, tmp_path):
s = self.gen_pkgset(tmp_path, "=dev-util/diffball-0.4\ndev-util/bsdiff")
s.remove(atom("=dev-util/diffball-0.4"))
s.flush()
- assert {line.strip() for line in (tmp_path / 'file').read_text().splitlines()} == \
- {"dev-util/bsdiff"}
+ assert {
+ line.strip() for line in (tmp_path / "file").read_text().splitlines()
+ } == {"dev-util/bsdiff"}
def test_subset_awareness(self, tmp_path):
s = self.gen_pkgset(tmp_path, "@world\ndev-util/bsdiff")
@@ -58,7 +74,7 @@ class TestFileList:
def test_ignore_comments(self, tmp_path):
s = self.gen_pkgset(tmp_path, "#foon\ndev-util/bsdiff")
- assert [str(x) for x in s] == ['dev-util/bsdiff']
+ assert [str(x) for x in s] == ["dev-util/bsdiff"]
class TestWorldFile(TestFileList):
@@ -71,15 +87,17 @@ class TestWorldFile(TestFileList):
s.add(atom("=dev-util/lib-1"))
s.add(atom("dev-util/mylib:2"))
s.flush()
- assert {line.strip() for line in (tmp_path / 'file').read_text().splitlines()} == \
- {"dev-util/bsdiff", "dev-util/foon", "dev-util/lib", "dev-util/mylib:2"}
+ assert {
+ line.strip() for line in (tmp_path / "file").read_text().splitlines()
+ } == {"dev-util/bsdiff", "dev-util/foon", "dev-util/lib", "dev-util/mylib:2"}
def test_remove(self, tmp_path):
s = self.gen_pkgset(tmp_path, "dev-util/diffball\ndev-util/bsdiff")
s.remove(atom("=dev-util/diffball-0.4"))
s.flush()
- assert {line.strip() for line in (tmp_path / 'file').read_text().splitlines()} == \
- {"dev-util/bsdiff"}
+ assert {
+ line.strip() for line in (tmp_path / "file").read_text().splitlines()
+ } == {"dev-util/bsdiff"}
def test_subset_awareness(self, tmp_path):
s = self.gen_pkgset(tmp_path, "@world\ndev-util/bsdiff")
@@ -87,5 +105,5 @@ class TestWorldFile(TestFileList):
def test_subset_awareness2(self, tmp_path, caplog):
s = self.gen_pkgset(tmp_path, "@world\ndev-util/bsdiff")
- assert [str(x) for x in s] == ['dev-util/bsdiff']
+ assert [str(x) for x in s] == ["dev-util/bsdiff"]
assert "set item 'world'" in caplog.text
diff --git a/tests/pkgsets/test_glsa.py b/tests/pkgsets/test_glsa.py
index 56afbdf75..4ad975c76 100644
--- a/tests/pkgsets/test_glsa.py
+++ b/tests/pkgsets/test_glsa.py
@@ -13,71 +13,87 @@ pkgs_set = (
class TestGlsaDirSet:
-
def mk_glsa(self, tmp_path, feed):
for idx, data in enumerate(feed):
(tmp_path / f"glsa-200611-{idx:02d}.xml").write_text(mk_glsa(data))
- @pytest.mark.parametrize(("vuln_range", "ver_matches", "ver_nonmatches"), (
- pytest.param(">=1-r2", ["1-r2", "1-r7", "2"], ["0", "1"], id="ge"),
- pytest.param(">1-r2", ["1-r7", "2"], ["0", "1", "1-r2"], id="gt"),
- pytest.param("<=1-r2", ["1", "1-r1"], ["1-r3", "2"], id="le"),
- pytest.param("<1-r2", ["1", "1-r0"], ["1-r2", "2"], id="lt"),
- pytest.param("=1-r2", ["1-r2"], ["1-r3", "1", "2"], id="eq"),
- pytest.param("=1*", ["1-r2", "1.0.2", "10"], ["2", "3", "0"], id="eq_glob"),
- pytest.param("~>=1-r2", ["1-r2", "1-r7"], ["2", "1-r1", "1"], id="rge"),
- pytest.param("~>1-r1", ["1-r2", "1-r6"], ["2", "1-r1", "1"], id="rgt"),
- pytest.param("~<=1-r2", ["1-r2", "1", "1-r1"], ["2", "0.9", "1-r3"], id="rle"),
- pytest.param("~<1-r2", ["1", "1-r1"], ["2", "0.9", "1-r2"], id="rlt"),
- pytest.param("~>=2", ["2", "2-r1"], ["1", "2_p1", "2.1", "3"], id="rge_r0"),
- pytest.param("~>2", ["2-r1", "2-r2"], ["1", "2", "2_p1", "2.1"], id="rgt_r0"),
- pytest.param("~<=2", ["2"], ["1", "2-r1", "2_p1", "3"], id="rle_r0"),
- pytest.param("~<2", [], ["1", "2", "2-r1", "2.1", "3"], id="rlt_r0"),
- ))
+ @pytest.mark.parametrize(
+ ("vuln_range", "ver_matches", "ver_nonmatches"),
+ (
+ pytest.param(">=1-r2", ["1-r2", "1-r7", "2"], ["0", "1"], id="ge"),
+ pytest.param(">1-r2", ["1-r7", "2"], ["0", "1", "1-r2"], id="gt"),
+ pytest.param("<=1-r2", ["1", "1-r1"], ["1-r3", "2"], id="le"),
+ pytest.param("<1-r2", ["1", "1-r0"], ["1-r2", "2"], id="lt"),
+ pytest.param("=1-r2", ["1-r2"], ["1-r3", "1", "2"], id="eq"),
+ pytest.param("=1*", ["1-r2", "1.0.2", "10"], ["2", "3", "0"], id="eq_glob"),
+ pytest.param("~>=1-r2", ["1-r2", "1-r7"], ["2", "1-r1", "1"], id="rge"),
+ pytest.param("~>1-r1", ["1-r2", "1-r6"], ["2", "1-r1", "1"], id="rgt"),
+ pytest.param(
+ "~<=1-r2", ["1-r2", "1", "1-r1"], ["2", "0.9", "1-r3"], id="rle"
+ ),
+ pytest.param("~<1-r2", ["1", "1-r1"], ["2", "0.9", "1-r2"], id="rlt"),
+ pytest.param("~>=2", ["2", "2-r1"], ["1", "2_p1", "2.1", "3"], id="rge_r0"),
+ pytest.param(
+ "~>2", ["2-r1", "2-r2"], ["1", "2", "2_p1", "2.1"], id="rgt_r0"
+ ),
+ pytest.param("~<=2", ["2"], ["1", "2-r1", "2_p1", "3"], id="rle_r0"),
+ pytest.param("~<2", [], ["1", "2", "2-r1", "2.1", "3"], id="rlt_r0"),
+ ),
+ )
def test_range(self, tmp_path, vuln_range, ver_matches, ver_nonmatches):
self.mk_glsa(tmp_path, [("dev-util/diffball", ([], [vuln_range]))])
restrict = list(OrRestriction(*tuple(glsa.GlsaDirSet(str(tmp_path)))))
- if len(restrict) == 0: # exception thrown
+ if len(restrict) == 0: # exception thrown
restrict.append(AlwaysBool(negate=False))
assert len(restrict) == 1
restrict = restrict[0]
for ver in ver_matches:
pkg = cpv.VersionedCPV(f"dev-util/diffball-{ver}")
- assert restrict.match(pkg), f"pkg {pkg} must match for {vuln_range!r}: {restrict}"
+ assert restrict.match(
+ pkg
+ ), f"pkg {pkg} must match for {vuln_range!r}: {restrict}"
for ver in ver_nonmatches:
pkg = cpv.VersionedCPV(f"dev-util/diffball-{ver}")
- assert not restrict.match(pkg), "pkg {pkg} must not match for {vuln_range!r}: {restrict}"
+ assert not restrict.match(
+ pkg
+ ), "pkg {pkg} must not match for {vuln_range!r}: {restrict}"
def test_iter(self, tmp_path):
self.mk_glsa(tmp_path, pkgs_set)
g = glsa.GlsaDirSet(str(tmp_path))
l = list(g)
- assert {x.key for x in l} == {'dev-util/diffball', 'dev-util/bsdiff'}
+ assert {x.key for x in l} == {"dev-util/diffball", "dev-util/bsdiff"}
def test_pkg_grouped_iter(self, tmp_path):
self.mk_glsa(tmp_path, pkgs_set + (("dev-util/bsdiff", ([], ["~>=2-r1"])),))
g = glsa.GlsaDirSet(str(tmp_path))
l = list(g.pkg_grouped_iter(sorter=sorted))
- assert {x.key for x in l} == {'dev-util/diffball', 'dev-util/bsdiff'}
+ assert {x.key for x in l} == {"dev-util/diffball", "dev-util/bsdiff"}
# main interest is dev-util/bsdiff
r = l[0]
- pkgs = [cpv.VersionedCPV(f"dev-util/bsdiff-{ver}")
- for ver in ("0", "1", "1.1", "2", "2-r1")]
+ pkgs = [
+ cpv.VersionedCPV(f"dev-util/bsdiff-{ver}")
+ for ver in ("0", "1", "1.1", "2", "2-r1")
+ ]
assert [x.fullver for x in pkgs if r.match(x)] == ["1.1", "2-r1"]
def test_slots(self, tmp_path):
slotted_pkgs_set = pkgs_set + (
- ("dev-util/pkgcheck", '1', ([">=2"], [">1"]), '*'),
+ ("dev-util/pkgcheck", "1", ([">=2"], [">1"]), "*"),
)
self.mk_glsa(tmp_path, slotted_pkgs_set)
g = glsa.GlsaDirSet(str(tmp_path))
l = list(g)
- assert {x.key for x in l} == {'dev-util/diffball', 'dev-util/bsdiff', 'dev-util/pkgcheck'}
+ assert {x.key for x in l} == {
+ "dev-util/diffball",
+ "dev-util/bsdiff",
+ "dev-util/pkgcheck",
+ }
restrict = OrRestriction(*tuple(glsa.GlsaDirSet(str(tmp_path))))
- assert restrict.match(atom.atom('=dev-util/pkgcheck-1-r1:1'))
- assert not restrict.match(atom.atom('=dev-util/pkgcheck-1:1'))
- assert not restrict.match(atom.atom('=dev-util/pkgcheck-2:1'))
- assert not restrict.match(atom.atom('=dev-util/pkgcheck-1:0'))
- assert not restrict.match(atom.atom('dev-util/pkgcheck:0'))
- assert not restrict.match(atom.atom('dev-util/pkgcheck'))
+ assert restrict.match(atom.atom("=dev-util/pkgcheck-1-r1:1"))
+ assert not restrict.match(atom.atom("=dev-util/pkgcheck-1:1"))
+ assert not restrict.match(atom.atom("=dev-util/pkgcheck-2:1"))
+ assert not restrict.match(atom.atom("=dev-util/pkgcheck-1:0"))
+ assert not restrict.match(atom.atom("dev-util/pkgcheck:0"))
+ assert not restrict.match(atom.atom("dev-util/pkgcheck"))
diff --git a/tests/pkgsets/test_installed.py b/tests/pkgsets/test_installed.py
index 824756285..8b4f7ec5c 100644
--- a/tests/pkgsets/test_installed.py
+++ b/tests/pkgsets/test_installed.py
@@ -22,17 +22,24 @@ class FakePkg:
class TestInstalled:
-
def test_iter(self):
- fake_vdb = SimpleTree({
- "dev-util": {
- "diffball": ["1.0"],
- "bsdiff": ["1.2", "1.3"],
- }
- }, pkg_klass=FakePkg)
-
- assert set(installed.Installed([fake_vdb])) == \
- {"dev-util/diffball", "dev-util/bsdiff"}
-
- assert set(installed.VersionedInstalled([fake_vdb])) == \
- {"dev-util/diffball-1.0", "dev-util/bsdiff-1.2", "dev-util/bsdiff-1.3"}
+ fake_vdb = SimpleTree(
+ {
+ "dev-util": {
+ "diffball": ["1.0"],
+ "bsdiff": ["1.2", "1.3"],
+ }
+ },
+ pkg_klass=FakePkg,
+ )
+
+ assert set(installed.Installed([fake_vdb])) == {
+ "dev-util/diffball",
+ "dev-util/bsdiff",
+ }
+
+ assert set(installed.VersionedInstalled([fake_vdb])) == {
+ "dev-util/diffball-1.0",
+ "dev-util/bsdiff-1.2",
+ "dev-util/bsdiff-1.3",
+ }
diff --git a/tests/repository/test_filtered.py b/tests/repository/test_filtered.py
index 083ec93d9..312e2528b 100644
--- a/tests/repository/test_filtered.py
+++ b/tests/repository/test_filtered.py
@@ -6,11 +6,13 @@ from pkgcore.restrictions import packages, values
class TestVisibility:
-
def setup_repos(self, restrictions=None):
- repo = SimpleTree({
- "dev-util": {"diffball": ["1.0", "0.7"], "bsdiff": ["0.4.1", "0.4.2"]},
- "dev-lib": {"fake": ["1.0", "1.0-r1"]}})
+ repo = SimpleTree(
+ {
+ "dev-util": {"diffball": ["1.0", "0.7"], "bsdiff": ["0.4.1", "0.4.2"]},
+ "dev-lib": {"fake": ["1.0", "1.0-r1"]},
+ }
+ )
if restrictions is None:
restrictions = atom("dev-util/diffball")
vrepo = filtered.tree(repo, restrictions)
@@ -24,20 +26,34 @@ class TestVisibility:
assert sorted(vrepo.itermatch(a2)) == []
repo, vrepo = self.setup_repos(atom("=dev-util/diffball-1.0"))
assert sorted(vrepo.itermatch(a)) == sorted(repo.itermatch(a))
- assert sorted(vrepo.itermatch(a2)) == sorted([VersionedCPV("dev-util/diffball-0.7")])
- repo, vrepo = self.setup_repos(packages.PackageRestriction(
- "package", values.OrRestriction(
- *[values.StrExactMatch(x) for x in ("diffball", "fake")])))
- assert sorted(vrepo.itermatch(packages.AlwaysTrue)) == \
- sorted(repo.itermatch(atom("dev-util/bsdiff")))
+ assert sorted(vrepo.itermatch(a2)) == sorted(
+ [VersionedCPV("dev-util/diffball-0.7")]
+ )
+ repo, vrepo = self.setup_repos(
+ packages.PackageRestriction(
+ "package",
+ values.OrRestriction(
+ *[values.StrExactMatch(x) for x in ("diffball", "fake")]
+ ),
+ )
+ )
+ assert sorted(vrepo.itermatch(packages.AlwaysTrue)) == sorted(
+ repo.itermatch(atom("dev-util/bsdiff"))
+ )
# check sentinel value handling.
vrepo = filtered.tree(repo, a2, sentinel_val=True)
- assert sorted(x.cpvstr for x in vrepo) == \
- sorted(['dev-util/diffball-0.7', 'dev-util/diffball-1.0'])
+ assert sorted(x.cpvstr for x in vrepo) == sorted(
+ ["dev-util/diffball-0.7", "dev-util/diffball-1.0"]
+ )
def test_iter(self):
- repo, vrepo = self.setup_repos(packages.PackageRestriction(
- "package", values.OrRestriction(
- *[values.StrExactMatch(x) for x in ("diffball", "fake")])))
+ repo, vrepo = self.setup_repos(
+ packages.PackageRestriction(
+ "package",
+ values.OrRestriction(
+ *[values.StrExactMatch(x) for x in ("diffball", "fake")]
+ ),
+ )
+ )
assert sorted(vrepo) == sorted(repo.itermatch(atom("dev-util/bsdiff")))
diff --git a/tests/repository/test_multiplex.py b/tests/repository/test_multiplex.py
index 10b26cc47..155e2d227 100644
--- a/tests/repository/test_multiplex.py
+++ b/tests/repository/test_multiplex.py
@@ -13,10 +13,12 @@ class TestMultiplex:
kls = staticmethod(tree)
tree1_pkgs = (
("dev-util/diffball", ["1.0", "0.7"]),
- ("dev-lib/fake", ["1.0", "1.0-r1"]))
+ ("dev-lib/fake", ["1.0", "1.0-r1"]),
+ )
tree2_pkgs = (
("dev-util/diffball", ["1.0", "1.1"]),
- ("dev-lib/bsdiff", ["1.0", "2.0"]))
+ ("dev-lib/bsdiff", ["1.0", "2.0"]),
+ )
tree1_list = [f"{k}-{ver}" for k, v in tree1_pkgs for ver in v]
tree2_list = [f"{k}-{ver}" for k, v in tree2_pkgs for ver in v]
@@ -30,27 +32,32 @@ class TestMultiplex:
self.d2.setdefault(cat, {}).setdefault(pkg, []).extend(ver)
self.d1 = OrderedDict(
- (k, OrderedDict(self.d1[k].items()))
- for k in sorted(self.d1, reverse=True))
+ (k, OrderedDict(self.d1[k].items())) for k in sorted(self.d1, reverse=True)
+ )
self.d2 = OrderedDict(
- (k, OrderedDict(self.d2[k].items()))
- for k in sorted(self.d2, reverse=True))
+ (k, OrderedDict(self.d2[k].items())) for k in sorted(self.d2, reverse=True)
+ )
self.tree1 = SimpleTree(self.d1)
self.tree2 = SimpleTree(self.d2)
self.ctree = self.kls(self.tree1, self.tree2)
def test_iter(self):
- assert sorted(x.cpvstr for x in self.ctree) == \
- sorted(self.tree1_list + self.tree2_list)
+ assert sorted(x.cpvstr for x in self.ctree) == sorted(
+ self.tree1_list + self.tree2_list
+ )
def test_itermatch(self):
imatch = self.ctree.itermatch
- assert sorted(x.cpvstr for x in imatch(packages.AlwaysTrue)) == \
- sorted(self.tree1_list + self.tree2_list)
+ assert sorted(x.cpvstr for x in imatch(packages.AlwaysTrue)) == sorted(
+ self.tree1_list + self.tree2_list
+ )
p = packages.PackageRestriction("package", values.StrExactMatch("diffball"))
- assert sorted(x.cpvstr for x in imatch(p)) == \
- [y for y in sorted(self.tree1_list + self.tree2_list) if "/diffball" in y]
+ assert sorted(x.cpvstr for x in imatch(p)) == [
+ y for y in sorted(self.tree1_list + self.tree2_list) if "/diffball" in y
+ ]
def test_sorting(self):
- assert list(x.cpvstr for x in self.ctree.itermatch(packages.AlwaysTrue, sorter=rev_sorted)) == \
- rev_sorted(self.tree1_list + self.tree2_list)
+ assert list(
+ x.cpvstr
+ for x in self.ctree.itermatch(packages.AlwaysTrue, sorter=rev_sorted)
+ ) == rev_sorted(self.tree1_list + self.tree2_list)
diff --git a/tests/repository/test_prototype.py b/tests/repository/test_prototype.py
index 10b4c4a6d..fb846eb2a 100644
--- a/tests/repository/test_prototype.py
+++ b/tests/repository/test_prototype.py
@@ -13,21 +13,20 @@ from snakeoil.currying import post_curry
class TestPrototype:
-
def setup_method(self):
# we use an OrderedDict here specifically to trigger any sorter
# related bugs
d = {
"dev-util": {"diffball": ["1.0", "0.7"], "bsdiff": ["0.4.1", "0.4.2"]},
- "dev-lib": {"fake": ["1.0", "1.0-r1"]}}
- self.repo = SimpleTree(
- OrderedDict((k, d[k]) for k in sorted(d, reverse=True)))
+ "dev-lib": {"fake": ["1.0", "1.0-r1"]},
+ }
+ self.repo = SimpleTree(OrderedDict((k, d[k]) for k in sorted(d, reverse=True)))
def test_concurrent_access(self):
iall = iter(self.repo)
self.repo.match(atom("dev-lib/fake"))
pkg = next(iall)
- if pkg.category == 'dev-util':
+ if pkg.category == "dev-util":
self.repo.match(atom("dev-lib/fake"))
else:
self.repo.match(atom("dev-util/diffball"))
@@ -36,17 +35,21 @@ class TestPrototype:
def test_internal_lookups(self):
assert sorted(self.repo.categories) == sorted(["dev-lib", "dev-util"])
- assert \
- sorted(map("/".join, self.repo.versions)) == \
- sorted([x for x in ["dev-util/diffball", "dev-util/bsdiff", "dev-lib/fake"]])
- assert \
- sorted(
- f"{cp[0]}/{cp[1]}-{v}"
- for cp, t in self.repo.versions.items() for v in t) == \
- sorted([
- "dev-util/diffball-1.0", "dev-util/diffball-0.7",
- "dev-util/bsdiff-0.4.1", "dev-util/bsdiff-0.4.2",
- "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"])
+ assert sorted(map("/".join, self.repo.versions)) == sorted(
+ [x for x in ["dev-util/diffball", "dev-util/bsdiff", "dev-lib/fake"]]
+ )
+ assert sorted(
+ f"{cp[0]}/{cp[1]}-{v}" for cp, t in self.repo.versions.items() for v in t
+ ) == sorted(
+ [
+ "dev-util/diffball-1.0",
+ "dev-util/diffball-0.7",
+ "dev-util/bsdiff-0.4.1",
+ "dev-util/bsdiff-0.4.2",
+ "dev-lib/fake-1.0",
+ "dev-lib/fake-1.0-r1",
+ ]
+ )
def test_simple_query(self):
a = atom("=dev-util/diffball-1.0")
@@ -57,98 +60,144 @@ class TestPrototype:
def test_identify_candidates(self):
with pytest.raises(TypeError):
self.repo.match("asdf")
- rc = packages.PackageRestriction(
- "category", values.StrExactMatch("dev-util"))
- assert \
- sorted(set(x.package for x in self.repo.itermatch(rc))) == \
- sorted(["diffball", "bsdiff"])
- rp = packages.PackageRestriction(
- "package", values.StrExactMatch("diffball"))
- assert list(x.version for x in self.repo.itermatch(rp, sorter=sorted)) == ["0.7", "1.0"]
- assert \
- self.repo.match(packages.OrRestriction(rc, rp), sorter=sorted) == \
- sorted(VersionedCPV(x) for x in (
- "dev-util/diffball-0.7", "dev-util/diffball-1.0",
- "dev-util/bsdiff-0.4.1", "dev-util/bsdiff-0.4.2"))
- assert \
- sorted(self.repo.itermatch(packages.AndRestriction(rc, rp))) == \
- sorted(VersionedCPV(x) for x in (
- "dev-util/diffball-0.7", "dev-util/diffball-1.0"))
+ rc = packages.PackageRestriction("category", values.StrExactMatch("dev-util"))
+ assert sorted(set(x.package for x in self.repo.itermatch(rc))) == sorted(
+ ["diffball", "bsdiff"]
+ )
+ rp = packages.PackageRestriction("package", values.StrExactMatch("diffball"))
+ assert list(x.version for x in self.repo.itermatch(rp, sorter=sorted)) == [
+ "0.7",
+ "1.0",
+ ]
+ assert self.repo.match(packages.OrRestriction(rc, rp), sorter=sorted) == sorted(
+ VersionedCPV(x)
+ for x in (
+ "dev-util/diffball-0.7",
+ "dev-util/diffball-1.0",
+ "dev-util/bsdiff-0.4.1",
+ "dev-util/bsdiff-0.4.2",
+ )
+ )
+ assert sorted(self.repo.itermatch(packages.AndRestriction(rc, rp))) == sorted(
+ VersionedCPV(x) for x in ("dev-util/diffball-0.7", "dev-util/diffball-1.0")
+ )
assert sorted(self.repo) == self.repo.match(packages.AlwaysTrue, sorter=sorted)
# mix/match cat/pkg to check that it handles that corner case
# properly for sorting.
- assert \
- sorted(self.repo, reverse=True) == \
- self.repo.match(packages.OrRestriction(
- rc, rp, packages.AlwaysTrue),
- sorter=partial(sorted, reverse=True))
- rc2 = packages.PackageRestriction(
- "category", values.StrExactMatch("dev-lib"))
+ assert sorted(self.repo, reverse=True) == self.repo.match(
+ packages.OrRestriction(rc, rp, packages.AlwaysTrue),
+ sorter=partial(sorted, reverse=True),
+ )
+ rc2 = packages.PackageRestriction("category", values.StrExactMatch("dev-lib"))
assert sorted(self.repo.itermatch(packages.AndRestriction(rp, rc2))) == []
# note this mixes a category level match, and a pkg level
# match. they *must* be treated as an or.
- assert \
- sorted(self.repo.itermatch(packages.OrRestriction(rp, rc2))) == \
- sorted(VersionedCPV(x) for x in (
- "dev-util/diffball-0.7", "dev-util/diffball-1.0",
- "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"))
+ assert sorted(self.repo.itermatch(packages.OrRestriction(rp, rc2))) == sorted(
+ VersionedCPV(x)
+ for x in (
+ "dev-util/diffball-0.7",
+ "dev-util/diffball-1.0",
+ "dev-lib/fake-1.0",
+ "dev-lib/fake-1.0-r1",
+ )
+ )
# this is similar to the test above, but mixes a cat/pkg
# candidate with a pkg candidate
- rp2 = packages.PackageRestriction(
- "package", values.StrExactMatch("fake"))
+ rp2 = packages.PackageRestriction("package", values.StrExactMatch("fake"))
r = packages.OrRestriction(atom("dev-util/diffball"), rp2)
- assert \
- sorted(self.repo.itermatch(r)) == \
- sorted(VersionedCPV(x) for x in (
- "dev-util/diffball-0.7", "dev-util/diffball-1.0",
- "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"))
-
- assert \
- sorted(self.repo.itermatch(
- packages.OrRestriction(packages.AlwaysTrue, rp2))) == \
- sorted(VersionedCPV(x) for x in (
- "dev-util/diffball-0.7", "dev-util/diffball-1.0",
- "dev-util/bsdiff-0.4.1", "dev-util/bsdiff-0.4.2",
- "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"))
-
- assert \
- sorted(self.repo.itermatch(packages.PackageRestriction(
- 'category', values.StrExactMatch('dev-util', negate=True)))) == \
- sorted(VersionedCPV(x) for x in ("dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"))
+ assert sorted(self.repo.itermatch(r)) == sorted(
+ VersionedCPV(x)
+ for x in (
+ "dev-util/diffball-0.7",
+ "dev-util/diffball-1.0",
+ "dev-lib/fake-1.0",
+ "dev-lib/fake-1.0-r1",
+ )
+ )
+
+ assert sorted(
+ self.repo.itermatch(packages.OrRestriction(packages.AlwaysTrue, rp2))
+ ) == sorted(
+ VersionedCPV(x)
+ for x in (
+ "dev-util/diffball-0.7",
+ "dev-util/diffball-1.0",
+ "dev-util/bsdiff-0.4.1",
+ "dev-util/bsdiff-0.4.2",
+ "dev-lib/fake-1.0",
+ "dev-lib/fake-1.0-r1",
+ )
+ )
+
+ assert sorted(
+ self.repo.itermatch(
+ packages.PackageRestriction(
+ "category", values.StrExactMatch("dev-util", negate=True)
+ )
+ )
+ ) == sorted(
+ VersionedCPV(x) for x in ("dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")
+ )
obj = SimpleNamespace(livefs=False)
- pkg_cls = post_curry(MutatedPkg, {'repo': obj})
- assert \
- sorted(self.repo.itermatch(boolean.AndRestriction(boolean.OrRestriction(
+ pkg_cls = post_curry(MutatedPkg, {"repo": obj})
+ assert sorted(
+ self.repo.itermatch(
+ boolean.AndRestriction(
+ boolean.OrRestriction(
+ packages.PackageRestriction(
+ "repo.livefs", values.EqualityMatch(False)
+ ),
+ packages.PackageRestriction(
+ "category", values.StrExactMatch("virtual")
+ ),
+ ),
+ atom("dev-lib/fake"),
+ ),
+ pkg_cls=pkg_cls,
+ )
+ ) == sorted(
+ VersionedCPV(x) for x in ("dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")
+ )
+
+ assert sorted(
+ self.repo.itermatch(
packages.PackageRestriction(
- "repo.livefs", values.EqualityMatch(False)),
+ "category",
+ values.StrExactMatch("dev-lib", negate=True),
+ negate=True,
+ )
+ )
+ ) == sorted(
+ VersionedCPV(x) for x in ("dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")
+ )
+
+ assert sorted(
+ self.repo.itermatch(
packages.PackageRestriction(
- "category", values.StrExactMatch("virtual"))),
- atom("dev-lib/fake")),
- pkg_cls=pkg_cls)) == \
- sorted(VersionedCPV(x) for x in (
- "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"))
-
- assert \
- sorted(self.repo.itermatch(packages.PackageRestriction(
- 'category', values.StrExactMatch('dev-lib', negate=True),
- negate=True))) == \
- sorted(VersionedCPV(x) for x in (
- "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"))
-
- assert \
- sorted(self.repo.itermatch(packages.PackageRestriction(
- 'category', values.StrExactMatch('dev-lib', negate=True), negate=True))) == \
- sorted(VersionedCPV(x) for x in (
- "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"))
+ "category",
+ values.StrExactMatch("dev-lib", negate=True),
+ negate=True,
+ )
+ )
+ ) == sorted(
+ VersionedCPV(x) for x in ("dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")
+ )
def test_iter(self):
- expected = sorted(VersionedCPV(x) for x in (
- "dev-util/diffball-1.0", "dev-util/diffball-0.7",
- "dev-util/bsdiff-0.4.1", "dev-util/bsdiff-0.4.2",
- "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"))
+ expected = sorted(
+ VersionedCPV(x)
+ for x in (
+ "dev-util/diffball-1.0",
+ "dev-util/diffball-0.7",
+ "dev-util/bsdiff-0.4.1",
+ "dev-util/bsdiff-0.4.2",
+ "dev-lib/fake-1.0",
+ "dev-lib/fake-1.0-r1",
+ )
+ )
assert sorted(self.repo) == expected
def test_notify_remove(self):
@@ -175,8 +224,9 @@ class TestPrototype:
def test_notify_add(self):
pkg = VersionedCPV("dev-util/diffball-1.2")
self.repo.notify_add_package(pkg)
- assert sorted(self.repo.versions[(pkg.category, pkg.package)]) == \
- sorted(["1.0", "1.2", "0.7"])
+ assert sorted(self.repo.versions[(pkg.category, pkg.package)]) == sorted(
+ ["1.0", "1.2", "0.7"]
+ )
pkg = VersionedCPV("foo/bar-1.0")
self.repo.notify_add_package(pkg)
@@ -190,7 +240,7 @@ class TestPrototype:
self.repo.notify_add_package(pkg)
assert (pkg.category, pkg.package) in self.repo.versions
- def _simple_redirect_test(self, attr, arg1='=dev-util/diffball-1.0', arg2=None):
+ def _simple_redirect_test(self, attr, arg1="=dev-util/diffball-1.0", arg2=None):
l = []
uniq_obj = object()
@@ -198,10 +248,12 @@ class TestPrototype:
a = a[1:-1]
l.extend((a, kw))
return uniq_obj
+
# if replace, override _replace since replace reflects to it
class my_ops(operations):
- locals()[f'_cmd_implementation_{attr}'] = f
+ locals()[f"_cmd_implementation_{attr}"] = f
+
self.repo.operations_kls = my_ops
args = [self.repo.match(atom(arg1))]
if arg2:
@@ -220,11 +272,13 @@ class TestPrototype:
simple_check(op, args)
assert not l[1]
simple_check(op, args)
- assert 'force' not in l[1]
+ assert "force" not in l[1]
self.repo.frozen = True
assert not self.repo.operations.supports(attr)
assert not hasattr(self.repo.operations, attr)
- test_replace = post_curry(_simple_redirect_test, 'replace', arg2='dev-util/diffball-1.1')
- test_uninstall = post_curry(_simple_redirect_test, 'uninstall')
- test_install = post_curry(_simple_redirect_test, 'install')
+ test_replace = post_curry(
+ _simple_redirect_test, "replace", arg2="dev-util/diffball-1.1"
+ )
+ test_uninstall = post_curry(_simple_redirect_test, "uninstall")
+ test_install = post_curry(_simple_redirect_test, "install")
diff --git a/tests/resolver/test_choice_point.py b/tests/resolver/test_choice_point.py
index 99901952a..49e820522 100644
--- a/tests/resolver/test_choice_point.py
+++ b/tests/resolver/test_choice_point.py
@@ -4,47 +4,62 @@ from pkgcore.restrictions.boolean import AndRestriction, OrRestriction
class fake_package:
-
def __init__(self, **kwds):
for k, v in (
- ("bdepend", AndRestriction()),
- ("depend", AndRestriction()),
- ("rdepend", AndRestriction()),
- ("pdepend", AndRestriction()),
- ("idepend", AndRestriction()),
- ("slot", 0),
- ("key", None),
- ("marker", None)):
+ ("bdepend", AndRestriction()),
+ ("depend", AndRestriction()),
+ ("rdepend", AndRestriction()),
+ ("pdepend", AndRestriction()),
+ ("idepend", AndRestriction()),
+ ("slot", 0),
+ ("key", None),
+ ("marker", None),
+ ):
setattr(self, k, kwds.get(k, v))
-class TestChoicePoint:
+class TestChoicePoint:
@staticmethod
def gen_choice_point():
- return choice_point("asdf", (
- fake_package(marker=1, depend=OrRestriction(
- "ordep1", "ordep2", "dependordep"),
- rdepend=AndRestriction(
+ return choice_point(
+ "asdf",
+ (
+ fake_package(
+ marker=1,
+ depend=OrRestriction("ordep1", "ordep2", "dependordep"),
+ rdepend=AndRestriction(
OrRestriction("ordep1", "andordep2"),
- "anddep1", "anddep2", "pkg1and"),
- pdepend=OrRestriction("prdep1", "or3")),
- fake_package(marker=2, depend=AndRestriction(
- "anddep1", "anddep2"),
- rdepend=OrRestriction("or1", "or2"),
- pdepend=OrRestriction("prdep1", "or3"))))
+ "anddep1",
+ "anddep2",
+ "pkg1and",
+ ),
+ pdepend=OrRestriction("prdep1", "or3"),
+ ),
+ fake_package(
+ marker=2,
+ depend=AndRestriction("anddep1", "anddep2"),
+ rdepend=OrRestriction("or1", "or2"),
+ pdepend=OrRestriction("prdep1", "or3"),
+ ),
+ ),
+ )
def test_depend_rdepend_stepping(self):
c = self.gen_choice_point()
assert c.depend == [["ordep1", "ordep2", "dependordep"]]
- assert sorted(c.rdepend) == sorted([['anddep1'], ['anddep2'], ['ordep1', 'andordep2'], ['pkg1and']])
+ assert sorted(c.rdepend) == sorted(
+ [["anddep1"], ["anddep2"], ["ordep1", "andordep2"], ["pkg1and"]]
+ )
c.reduce_atoms("ordep1")
- assert c.depend == [['ordep2', 'dependordep']]
- assert sorted(c.rdepend) == sorted([['anddep1'], ['anddep2'], ['andordep2'], ['pkg1and']])
+ assert c.depend == [["ordep2", "dependordep"]]
+ assert sorted(c.rdepend) == sorted(
+ [["anddep1"], ["anddep2"], ["andordep2"], ["pkg1and"]]
+ )
c.reduce_atoms("pkg1and")
c.reduce_atoms("or1")
assert c.rdepend == [["or2"]]
c.reduce_atoms("prdep1")
- assert c.depend == [['anddep1'], ['anddep2']]
+ assert c.depend == [["anddep1"], ["anddep2"]]
assert c.pdepend == [["or3"]]
c.reduce_atoms("or3")
with pytest.raises(IndexError):
diff --git a/tests/resolver/test_pigeonholes.py b/tests/resolver/test_pigeonholes.py
index 480ad3fb4..80123f157 100644
--- a/tests/resolver/test_pigeonholes.py
+++ b/tests/resolver/test_pigeonholes.py
@@ -7,7 +7,7 @@ from .test_choice_point import fake_package
class fake_blocker(restriction.base):
- __slots__ = ('key', 'blocks')
+ __slots__ = ("key", "blocks")
def __init__(self, key, blocks=()):
restriction.base.__init__(self)
@@ -24,7 +24,6 @@ class fake_blocker(restriction.base):
class TestSlot:
-
def test_add(self):
c = PigeonHoledSlots()
o = fake_package()
diff --git a/tests/resolver/test_plan.py b/tests/resolver/test_plan.py
index a84929b21..b7c7c2e8f 100644
--- a/tests/resolver/test_plan.py
+++ b/tests/resolver/test_plan.py
@@ -3,12 +3,23 @@ from pkgcore.resolver import plan
from pkgcore.test.misc import FakePkg
-@pytest.mark.parametrize(("sorter", "vers", "expected", "iter_sort_target"), (
- pytest.param(plan.highest_iter_sort, [7,9,3,2], [9,7,3,2], True, id="highest iter"),
- pytest.param(plan.lowest_iter_sort, [7,9,4,2], [2,4,7,9], True, id="lowest iter"),
- pytest.param(plan.pkg_sort_highest, [1,9,7,10], [10,9,7,1], False, id="pkg highest"),
- pytest.param(plan.pkg_sort_lowest, [11,9,1,6], [1,6,9,11], False, id="pkg lowest"),
-))
+@pytest.mark.parametrize(
+ ("sorter", "vers", "expected", "iter_sort_target"),
+ (
+ pytest.param(
+ plan.highest_iter_sort, [7, 9, 3, 2], [9, 7, 3, 2], True, id="highest iter"
+ ),
+ pytest.param(
+ plan.lowest_iter_sort, [7, 9, 4, 2], [2, 4, 7, 9], True, id="lowest iter"
+ ),
+ pytest.param(
+ plan.pkg_sort_highest, [1, 9, 7, 10], [10, 9, 7, 1], False, id="pkg highest"
+ ),
+ pytest.param(
+ plan.pkg_sort_lowest, [11, 9, 1, 6], [1, 6, 9, 11], False, id="pkg lowest"
+ ),
+ ),
+)
def test_pkg_sorting(sorter, vers, expected, iter_sort_target):
pkgs = [FakePkg(f"d-b/a-{x}") for x in vers]
if iter_sort_target:
diff --git a/tests/restrictions/test_boolean.py b/tests/restrictions/test_boolean.py
index b204281cf..2fd3e0043 100644
--- a/tests/restrictions/test_boolean.py
+++ b/tests/restrictions/test_boolean.py
@@ -2,8 +2,8 @@ import pytest
from pkgcore.restrictions import boolean, restriction
-true = restriction.AlwaysBool(node_type='foo', negate=True)
-false = restriction.AlwaysBool(node_type='foo', negate=False)
+true = restriction.AlwaysBool(node_type="foo", negate=True)
+false = restriction.AlwaysBool(node_type="foo", negate=False)
class AlwaysForcableBool(boolean.base):
@@ -22,26 +22,26 @@ class base:
def test_invalid_restrictions(self):
with pytest.raises(TypeError):
- self.kls(42, node_type='foo')
- base = self.kls(node_type='foo')
+ self.kls(42, node_type="foo")
+ base = self.kls(node_type="foo")
with pytest.raises(TypeError):
base.add_restriction(42)
with pytest.raises(TypeError):
base.add_restriction()
def test_init_finalize(self):
- final = self.kls(true, node_type='foo', finalize=True)
+ final = self.kls(true, node_type="foo", finalize=True)
# since it becomes a tuple, throws a AttributeError
with pytest.raises(TypeError):
final.add_restriction(false)
- final = self.kls(true, node_type='foo')
+ final = self.kls(true, node_type="foo")
# since it becomes a tuple, throws a AttributeError
with pytest.raises(TypeError):
final.add_restriction(false)
def test_finalize(self):
- base = self.kls(true, node_type='foo', finalize=False)
+ base = self.kls(true, node_type="foo", finalize=False)
base.add_restriction(false)
base.finalize()
with pytest.raises(TypeError):
@@ -50,26 +50,31 @@ class base:
def test_change_restrictions(self):
base = self.kls(true, false)
assert self.kls(false, true) == base.change_restrictions(false, true)
- assert self.kls(false, true) != base.change_restrictions(false, true, negate=True)
- assert self.kls(false, true, negate=True) == base.change_restrictions(false, true, negate=True)
+ assert self.kls(false, true) != base.change_restrictions(
+ false, true, negate=True
+ )
+ assert self.kls(false, true, negate=True) == base.change_restrictions(
+ false, true, negate=True
+ )
def test_add_restriction(self):
with pytest.raises(TypeError):
self.kls(true, finalize=True).add_restriction(false)
with pytest.raises(TypeError):
- self.kls(node_type='foon').add_restriction(false)
+ self.kls(node_type="foon").add_restriction(false)
k = self.kls(finalize=False)
k.add_restriction(false)
assert k.restrictions == [false]
# TODO total_len? what does it do?
+
class TestBase(base):
kls = boolean.base
def test_base(self):
- base = self.kls(true, false, node_type='foo')
+ base = self.kls(true, false, node_type="foo")
assert len(base) == 2
assert list(base) == [true, false]
with pytest.raises(NotImplementedError):
@@ -88,32 +93,41 @@ class TestAndRestriction(base):
kls = boolean.AndRestriction
def test_match(self):
- assert self.kls(true, true, node_type='foo').match(None)
- assert not self.kls(false, true, true, node_type='foo').match(None)
- assert not self.kls(true, false, true, node_type='foo').match(None)
+ assert self.kls(true, true, node_type="foo").match(None)
+ assert not self.kls(false, true, true, node_type="foo").match(None)
+ assert not self.kls(true, false, true, node_type="foo").match(None)
def test_negate_match(self):
- assert self.kls(false, true, node_type='foo', negate=True).match(None)
- assert self.kls(true, false, node_type='foo', negate=True).match(None)
- assert self.kls(false, false, node_type='foo', negate=True).match(None)
- assert not self.kls(true, true, node_type='foo', negate=True).match(None)
+ assert self.kls(false, true, node_type="foo", negate=True).match(None)
+ assert self.kls(true, false, node_type="foo", negate=True).match(None)
+ assert self.kls(false, false, node_type="foo", negate=True).match(None)
+ assert not self.kls(true, true, node_type="foo", negate=True).match(None)
def test_dnf_solutions(self):
assert self.kls(true, true).dnf_solutions() == [[true, true]]
- assert self.kls(self.kls(true, true), true).dnf_solutions() == [[true, true, true]]
- assert (list(map(set, self.kls(
- true, true,
- boolean.OrRestriction(false, true)).dnf_solutions())) ==
- [set([true, true, false]), set([true, true, true])])
+ assert self.kls(self.kls(true, true), true).dnf_solutions() == [
+ [true, true, true]
+ ]
+ assert list(
+ map(
+ set,
+ self.kls(
+ true, true, boolean.OrRestriction(false, true)
+ ).dnf_solutions(),
+ )
+ ) == [set([true, true, false]), set([true, true, true])]
assert self.kls().dnf_solutions() == [[]]
def test_cnf_solutions(self):
assert self.kls(true, true).cnf_solutions() == [[true], [true]]
- assert self.kls(self.kls(true, true), true).cnf_solutions() == [[true], [true], [true]]
- assert (list(self.kls(
- true, true,
- boolean.OrRestriction(false, true)).cnf_solutions()) ==
- list([[true], [true], [false, true]]))
+ assert self.kls(self.kls(true, true), true).cnf_solutions() == [
+ [true],
+ [true],
+ [true],
+ ]
+ assert list(
+ self.kls(true, true, boolean.OrRestriction(false, true)).cnf_solutions()
+ ) == list([[true], [true], [false, true]])
assert not self.kls().cnf_solutions()
@@ -122,42 +136,53 @@ class TestOrRestriction(base):
kls = boolean.OrRestriction
def test_match(self):
- assert self.kls(true, true, node_type='foo').match(None)
- assert self.kls(false, true, false, node_type='foo').match(None)
- assert self.kls(true, false, false, node_type='foo').match(None)
- assert self.kls(false, false, true, node_type='foo').match(None)
- assert not self.kls(false, false, node_type='foo').match(None)
+ assert self.kls(true, true, node_type="foo").match(None)
+ assert self.kls(false, true, false, node_type="foo").match(None)
+ assert self.kls(true, false, false, node_type="foo").match(None)
+ assert self.kls(false, false, true, node_type="foo").match(None)
+ assert not self.kls(false, false, node_type="foo").match(None)
def test_negate_match(self):
for x in ((true, false), (false, true), (true, true)):
- assert not self.kls(node_type='foo', negate=True, *x).match(None)
- assert self.kls(false, false, node_type='foo', negate=True).match(None)
+ assert not self.kls(node_type="foo", negate=True, *x).match(None)
+ assert self.kls(false, false, node_type="foo", negate=True).match(None)
def test_dnf_solutions(self):
assert self.kls(true, true).dnf_solutions() == [[true], [true]]
- assert (list(map(set, self.kls(
- true, true,
- boolean.AndRestriction(false, true)).dnf_solutions())) ==
- list(map(set, [[true], [true], [false, true]])))
- assert self.kls(self.kls(true, false), true).dnf_solutions() == [[true], [false], [true]]
+ assert list(
+ map(
+ set,
+ self.kls(
+ true, true, boolean.AndRestriction(false, true)
+ ).dnf_solutions(),
+ )
+ ) == list(map(set, [[true], [true], [false, true]]))
+ assert self.kls(self.kls(true, false), true).dnf_solutions() == [
+ [true],
+ [false],
+ [true],
+ ]
assert self.kls().dnf_solutions() == [[]]
def test_cnf_solutions(self):
assert self.kls(true, true).cnf_solutions() == [[true, true]]
- assert ([set(x) for x in self.kls(
- true, true,
- boolean.AndRestriction(false, true)).cnf_solutions()] ==
- [set(x) for x in [[true, false], [true, true]]])
-
- assert ([set(x) for x in self.kls(self.kls(
- true, true,
- boolean.AndRestriction(false, true))).cnf_solutions()] ==
- [set(x) for x in [[true, false], [true, true]]])
-
- assert (set(self.kls(
- self.kls(true, false),
- true).cnf_solutions()[0]) ==
- set([true, false, true]))
+ assert [
+ set(x)
+ for x in self.kls(
+ true, true, boolean.AndRestriction(false, true)
+ ).cnf_solutions()
+ ] == [set(x) for x in [[true, false], [true, true]]]
+
+ assert [
+ set(x)
+ for x in self.kls(
+ self.kls(true, true, boolean.AndRestriction(false, true))
+ ).cnf_solutions()
+ ] == [set(x) for x in [[true, false], [true, true]]]
+
+ assert set(self.kls(self.kls(true, false), true).cnf_solutions()[0]) == set(
+ [true, false, true]
+ )
assert not self.kls().cnf_solutions()
@@ -167,8 +192,8 @@ class TestJustOneRestriction(base):
kls = boolean.JustOneRestriction
def test_match(self):
- assert self.kls(true, false, node_type='foo').match(None)
- assert self.kls(false, true, false, node_type='foo').match(None)
- assert not self.kls(false, false, node_type='foo').match(None)
- assert not self.kls(true, false, true, node_type='foo').match(None)
- assert not self.kls(true, true, true, node_type='foo').match(None)
+ assert self.kls(true, false, node_type="foo").match(None)
+ assert self.kls(false, true, false, node_type="foo").match(None)
+ assert not self.kls(false, false, node_type="foo").match(None)
+ assert not self.kls(true, false, true, node_type="foo").match(None)
+ assert not self.kls(true, true, true, node_type="foo").match(None)
diff --git a/tests/restrictions/test_delegated.py b/tests/restrictions/test_delegated.py
index 520285b54..4ffdd1abf 100644
--- a/tests/restrictions/test_delegated.py
+++ b/tests/restrictions/test_delegated.py
@@ -13,15 +13,20 @@ class Test_delegate(TestRestriction):
self.kls(None, None)
y = True
l = []
+
def f(x, mode):
l.append(mode)
- if mode == 'force_False':
+ if mode == "force_False":
return not y
return y
for negated in (False, True):
+
def assertIt(got, expected):
- assert got == expected, f"got={got!r}, expected={expected!r}, negate={negated!r}"
+ assert (
+ got == expected
+ ), f"got={got!r}, expected={expected!r}, negate={negated!r}"
+
y = True
l[:] = []
o = self.kls(f, negate=negated)
@@ -31,11 +36,29 @@ class Test_delegate(TestRestriction):
self.assertNotMatches(o, [None], negated=negated)
if negated:
- assertIt(l, ['match', 'force_False', 'force_True',
- 'match', 'force_False', 'force_True'])
+ assertIt(
+ l,
+ [
+ "match",
+ "force_False",
+ "force_True",
+ "match",
+ "force_False",
+ "force_True",
+ ],
+ )
else:
- assertIt(l, ['match', 'force_True', 'force_False',
- 'match', 'force_True', 'force_False'])
+ assertIt(
+ l,
+ [
+ "match",
+ "force_True",
+ "force_False",
+ "match",
+ "force_True",
+ "force_False",
+ ],
+ )
def test_caching(self):
assert not self.kls.inst_caching
diff --git a/tests/restrictions/test_packages.py b/tests/restrictions/test_packages.py
index e8cf0cab0..e5d4df07c 100644
--- a/tests/restrictions/test_packages.py
+++ b/tests/restrictions/test_packages.py
@@ -19,6 +19,7 @@ class TestPackageRestriction(TestRestriction):
if packages.PackageRestriction is packages.PackageRestriction:
kls = packages.PackageRestriction
else:
+
class kls(packages.PackageRestriction, packages.PackageRestriction_mixin):
__slots__ = ()
__inst_caching__ = packages.PackageRestriction.__inst_caching__
@@ -50,8 +51,11 @@ class TestPackageRestriction(TestRestriction):
raise exceptions_d.get(attr[4:], None)()
raise AttributeError("monkey lover")
- exceptions_d = {"KeyboardInterrupt":KeyboardInterrupt,
- "RuntimeError":RuntimeError, "SystemExit":SystemExit}
+ exceptions_d = {
+ "KeyboardInterrupt": KeyboardInterrupt,
+ "RuntimeError": RuntimeError,
+ "SystemExit": SystemExit,
+ }
for mode in ("match", "force_True", "force_False"):
caplog.clear()
@@ -62,7 +66,9 @@ class TestPackageRestriction(TestRestriction):
# ensure various exceptions are passed through
for k in (KeyboardInterrupt, RuntimeError, SystemExit):
with pytest.raises(k):
- getattr(self.kls(f"exc_{k.__name__}", AlwaysSelfIntersect), mode)(foo())
+ getattr(self.kls(f"exc_{k.__name__}", AlwaysSelfIntersect), mode)(
+ foo()
+ )
# check that it only does string comparison in exception catching.
class foo:
@@ -74,19 +80,23 @@ class TestPackageRestriction(TestRestriction):
assert not self.kls("foon", AlwaysSelfIntersect).match(foo())
- @pytest.mark.parametrize('value', ('val', 'val.dar'))
+ @pytest.mark.parametrize("value", ("val", "val.dar"))
def test_attr(self, value):
assert self.kls(value, values.AlwaysTrue).attr == value
assert self.kls(value, values.AlwaysTrue).attrs == (value,)
def test_eq(self):
- assert self.kls('one', values.AlwaysTrue) == self.kls('one', values.AlwaysTrue)
- assert self.kls('one', values.AlwaysTrue) != self.kls('one', values.AlwaysTrue, negate=True)
- assert self.kls('one', values.AlwaysTrue) != self.kls('two', values.AlwaysTrue)
- assert self.kls('one', values.AlwaysTrue, negate=True) != self.kls('one', values.AlwaysFalse, negate=True)
+ assert self.kls("one", values.AlwaysTrue) == self.kls("one", values.AlwaysTrue)
+ assert self.kls("one", values.AlwaysTrue) != self.kls(
+ "one", values.AlwaysTrue, negate=True
+ )
+ assert self.kls("one", values.AlwaysTrue) != self.kls("two", values.AlwaysTrue)
+ assert self.kls("one", values.AlwaysTrue, negate=True) != self.kls(
+ "one", values.AlwaysFalse, negate=True
+ )
def test_hash(self):
- inst = self.kls('one.dar', AlwaysSelfIntersect())
+ inst = self.kls("one.dar", AlwaysSelfIntersect())
hash(inst)
@@ -95,7 +105,7 @@ class values_callback(values.base):
__slots__ = ("callback",)
def __init__(self, callback):
- object.__setattr__(self, 'callback', callback)
+ object.__setattr__(self, "callback", callback)
def match(self, val):
return self.callback((None, val))
@@ -112,7 +122,10 @@ class TestPackageRestrictionMulti:
if packages.PackageRestriction is packages.PackageRestriction:
kls = packages.PackageRestrictionMulti
else:
- class kls(packages.PackageRestrictionMulti, packages.PackageRestrictionMulti_mixin):
+
+ class kls(
+ packages.PackageRestrictionMulti, packages.PackageRestrictionMulti_mixin
+ ):
__slots__ = ()
__inst_caching__ = packages.PackageRestrictionMulti.__inst_caching__
@@ -125,6 +138,7 @@ class TestPackageRestrictionMulti:
def test_values(self):
l = []
+
def f(*args):
assert len(args) == 1
l.append(args[0])
@@ -136,29 +150,48 @@ class TestPackageRestrictionMulti:
o.match(pkg)
assert not l
- pkg['repo'] = 1
+ pkg["repo"] = 1
o.match(pkg)
assert not l
- pkg['asdf'] = AttrAccessible(far=2)
+ pkg["asdf"] = AttrAccessible(far=2)
o.match(pkg)
- assert l == [(None, [2, 1],)]
+ assert l == [
+ (
+ None,
+ [2, 1],
+ )
+ ]
l.clear()
o.force_True(pkg)
- assert l == [(True, pkg, ('asdf.far', 'repo'), [2, 1],)]
+ assert l == [
+ (
+ True,
+ pkg,
+ ("asdf.far", "repo"),
+ [2, 1],
+ )
+ ]
l.clear()
o.force_False(pkg)
- assert l == [(False, pkg, ('asdf.far', 'repo'), [2, 1],)]
+ assert l == [
+ (
+ False,
+ pkg,
+ ("asdf.far", "repo"),
+ [2, 1],
+ )
+ ]
def test_conditional():
- p = (packages.PackageRestriction('one', values.AlwaysTrue),)
- p2 = (packages.PackageRestriction('one', values.AlwaysFalse),)
+ p = (packages.PackageRestriction("one", values.AlwaysTrue),)
+ p2 = (packages.PackageRestriction("one", values.AlwaysFalse),)
v = values.AlwaysTrue
v2 = values.AlwaysFalse
- assert packages.Conditional('use', v, p) == packages.Conditional('use', v, p)
- assert packages.Conditional('use', v2, p) != packages.Conditional('use', v, p)
- assert packages.Conditional('use', v, p) != packages.Conditional('use', v, p2)
- assert packages.Conditional('use1', v, p) != packages.Conditional('use', v, p)
+ assert packages.Conditional("use", v, p) == packages.Conditional("use", v, p)
+ assert packages.Conditional("use", v2, p) != packages.Conditional("use", v, p)
+ assert packages.Conditional("use", v, p) != packages.Conditional("use", v, p2)
+ assert packages.Conditional("use1", v, p) != packages.Conditional("use", v, p)
diff --git a/tests/restrictions/test_required_use.py b/tests/restrictions/test_required_use.py
index a669c5038..84168a21d 100644
--- a/tests/restrictions/test_required_use.py
+++ b/tests/restrictions/test_required_use.py
@@ -6,116 +6,170 @@ from pkgcore.ebuild.eapi import get_eapi
from pkgcore.ebuild.ebuild_src import base as ebuild
from pkgcore.restrictions.required_use import find_constraint_satisfaction as solver
+
def parse(required_use):
- o = ebuild(None, 'dev-util/diffball-0.1-r1')
- object.__setattr__(o, 'eapi', get_eapi('8', suppress_unsupported=True))
- object.__setattr__(o, 'data', {'REQUIRED_USE': required_use})
+ o = ebuild(None, "dev-util/diffball-0.1-r1")
+ object.__setattr__(o, "eapi", get_eapi("8", suppress_unsupported=True))
+ object.__setattr__(o, "data", {"REQUIRED_USE": required_use})
return o.required_use
+
def test_simple():
- required_use = parse(required_use='bar foo')
- assert tuple(solver(required_use, {'bar', 'foo'})) == ({'bar': True, 'foo': True},)
+ required_use = parse(required_use="bar foo")
+ assert tuple(solver(required_use, {"bar", "foo"})) == ({"bar": True, "foo": True},)
+
def test_negative_simple():
- required_use = parse(required_use='!bar foo')
- assert tuple(solver(required_use, {'bar', 'foo'})) == ({'bar': False, 'foo': True},)
+ required_use = parse(required_use="!bar foo")
+ assert tuple(solver(required_use, {"bar", "foo"})) == ({"bar": False, "foo": True},)
+
def test_missing_iuse():
- required_use = parse(required_use='!bar foo? ( bar )')
- assert tuple(solver(required_use, {'bar'})) == ({'bar': False, 'foo': False},)
-
-@pytest.mark.parametrize(('required_use', 'exclude'), (
- ('bar? ( foo )', {'bar': True, 'foo': False}),
- ('bar? ( !foo )', {'bar': True, 'foo': True}),
- ('!bar? ( foo )', {'bar': False, 'foo': False}),
- ('!bar? ( !foo )', {'bar': False, 'foo': True}),
-))
+ required_use = parse(required_use="!bar foo? ( bar )")
+ assert tuple(solver(required_use, {"bar"})) == ({"bar": False, "foo": False},)
+
+
+@pytest.mark.parametrize(
+ ("required_use", "exclude"),
+ (
+ ("bar? ( foo )", {"bar": True, "foo": False}),
+ ("bar? ( !foo )", {"bar": True, "foo": True}),
+ ("!bar? ( foo )", {"bar": False, "foo": False}),
+ ("!bar? ( !foo )", {"bar": False, "foo": True}),
+ ),
+)
def test_condition(required_use, exclude):
required_use = parse(required_use=required_use)
- solutions = tuple(solver(required_use, {'bar', 'foo'}))
+ solutions = tuple(solver(required_use, {"bar", "foo"}))
assert len(solutions) == 3
assert exclude not in solutions
-@pytest.mark.parametrize(('required_use', 'exclude'), (
- ('?? ( bar foo )', {'bar': True, 'foo': True}),
- ('?? ( !bar foo )', {'bar': False, 'foo': True}),
- ('?? ( bar !foo )', {'bar': True, 'foo': False}),
- ('?? ( !bar !foo )', {'bar': False, 'foo': False}),
-))
+
+@pytest.mark.parametrize(
+ ("required_use", "exclude"),
+ (
+ ("?? ( bar foo )", {"bar": True, "foo": True}),
+ ("?? ( !bar foo )", {"bar": False, "foo": True}),
+ ("?? ( bar !foo )", {"bar": True, "foo": False}),
+ ("?? ( !bar !foo )", {"bar": False, "foo": False}),
+ ),
+)
def test_at_most(required_use, exclude):
required_use = parse(required_use=required_use)
- solutions = tuple(solver(required_use, {'bar', 'foo'}))
+ solutions = tuple(solver(required_use, {"bar", "foo"}))
assert len(solutions) == 3
assert exclude not in solutions
-@pytest.mark.parametrize(('required_use', 'exclude'), (
- ('|| ( bar foo )', {'bar': False, 'foo': False}),
- ('|| ( !bar foo )', {'bar': True, 'foo': False}),
- ('|| ( bar !foo )', {'bar': False, 'foo': True}),
- ('|| ( !bar !foo )', {'bar': True, 'foo': True}),
-))
+
+@pytest.mark.parametrize(
+ ("required_use", "exclude"),
+ (
+ ("|| ( bar foo )", {"bar": False, "foo": False}),
+ ("|| ( !bar foo )", {"bar": True, "foo": False}),
+ ("|| ( bar !foo )", {"bar": False, "foo": True}),
+ ("|| ( !bar !foo )", {"bar": True, "foo": True}),
+ ),
+)
def test_or(required_use, exclude):
required_use = parse(required_use=required_use)
- solutions = tuple(solver(required_use, {'bar', 'foo'}))
+ solutions = tuple(solver(required_use, {"bar", "foo"}))
assert len(solutions) == 3
assert exclude not in solutions
-@pytest.mark.parametrize(('required_use', 'include'), (
- ('bar foo', {'bar': True, 'foo': True}),
- ('!bar foo', {'bar': False, 'foo': True}),
- ('bar !foo', {'bar': True, 'foo': False}),
- ('!bar !foo', {'bar': False, 'foo': False}),
-))
+
+@pytest.mark.parametrize(
+ ("required_use", "include"),
+ (
+ ("bar foo", {"bar": True, "foo": True}),
+ ("!bar foo", {"bar": False, "foo": True}),
+ ("bar !foo", {"bar": True, "foo": False}),
+ ("!bar !foo", {"bar": False, "foo": False}),
+ ),
+)
def test_and(required_use, include):
required_use = parse(required_use=required_use)
- solutions = tuple(solver(required_use, {'bar', 'foo'}))
+ solutions = tuple(solver(required_use, {"bar", "foo"}))
assert solutions == (include,)
-@pytest.mark.parametrize(('required_use', 'iuse', 'force_true'), (
- pytest.param(
- 'test? ( jpeg jpeg2k tiff truetype )',
- {'examples', 'imagequant', 'jpeg', 'jpeg2k', 'lcms', 'test', 'tiff', 'tk', 'truetype', 'webp', 'xcb', 'zlib'},
- {'test'},
- id='pillow'),
- pytest.param(
- 'test? ( cuda gpl? ( openssl? ( bindist ) fdk? ( bindist ) ) ) cuda? ( nvenc ) ^^ ( openssl fdk )',
- {'cuda', 'gpl', 'openssl', 'bindist', 'fdk', 'test', 'nvenc'},
- {'test', 'fdk'},
- id='ffmpeg'),
- pytest.param(
- '|| ( openssl ( gnutls ssl ) ) ssl? ( ( gnutls openssl ) )',
- {'openssl', 'gnutls', 'ssl'},
- {'ssl'},
- id='weird'),
- pytest.param(
- '|| ( ssl ( gnutls? ( openssl ) ) )',
- {'openssl', 'gnutls', 'ssl'},
- {'gnutls'},
- id='weird2'),
-))
+
+@pytest.mark.parametrize(
+ ("required_use", "iuse", "force_true"),
+ (
+ pytest.param(
+ "test? ( jpeg jpeg2k tiff truetype )",
+ {
+ "examples",
+ "imagequant",
+ "jpeg",
+ "jpeg2k",
+ "lcms",
+ "test",
+ "tiff",
+ "tk",
+ "truetype",
+ "webp",
+ "xcb",
+ "zlib",
+ },
+ {"test"},
+ id="pillow",
+ ),
+ pytest.param(
+ "test? ( cuda gpl? ( openssl? ( bindist ) fdk? ( bindist ) ) ) cuda? ( nvenc ) ^^ ( openssl fdk )",
+ {"cuda", "gpl", "openssl", "bindist", "fdk", "test", "nvenc"},
+ {"test", "fdk"},
+ id="ffmpeg",
+ ),
+ pytest.param(
+ "|| ( openssl ( gnutls ssl ) ) ssl? ( ( gnutls openssl ) )",
+ {"openssl", "gnutls", "ssl"},
+ {"ssl"},
+ id="weird",
+ ),
+ pytest.param(
+ "|| ( ssl ( gnutls? ( openssl ) ) )",
+ {"openssl", "gnutls", "ssl"},
+ {"gnutls"},
+ id="weird2",
+ ),
+ ),
+)
def test_complex_force_true(required_use, iuse, force_true):
required_use = parse(required_use=required_use)
solution = None
for solution in islice(solver(required_use, iuse, force_true=force_true), 20):
assert all(solution[flag] for flag in force_true)
use_flags = tuple(k for k, v in solution.items() if v)
- misses = [restrict for restrict in required_use.evaluate_depset(use_flags) if not restrict.match(use_flags)]
+ misses = [
+ restrict
+ for restrict in required_use.evaluate_depset(use_flags)
+ if not restrict.match(use_flags)
+ ]
assert not misses
assert solution is not None
-@pytest.mark.parametrize(('required_use', 'iuse', 'force_false'), (
- pytest.param(
- '|| ( openssl ( gnutls ssl ) )',
- {'openssl', 'gnutls', 'ssl'},
- {'openssl'},
- id='custom'),
-))
+
+@pytest.mark.parametrize(
+ ("required_use", "iuse", "force_false"),
+ (
+ pytest.param(
+ "|| ( openssl ( gnutls ssl ) )",
+ {"openssl", "gnutls", "ssl"},
+ {"openssl"},
+ id="custom",
+ ),
+ ),
+)
def test_complex_force_false(required_use, iuse, force_false):
required_use = parse(required_use=required_use)
solution = None
for solution in islice(solver(required_use, iuse, force_false=force_false), 20):
assert all(not solution[flag] for flag in force_false)
use_flags = tuple(k for k, v in solution.items() if v)
- misses = [restrict for restrict in required_use.evaluate_depset(use_flags) if not restrict.match(use_flags)]
+ misses = [
+ restrict
+ for restrict in required_use.evaluate_depset(use_flags)
+ if not restrict.match(use_flags)
+ ]
assert not misses
assert solution is not None
diff --git a/tests/restrictions/test_restriction.py b/tests/restrictions/test_restriction.py
index a1db20ec6..893c722a4 100644
--- a/tests/restrictions/test_restriction.py
+++ b/tests/restrictions/test_restriction.py
@@ -10,10 +10,10 @@ from .utils import TestRestriction
class SillyBool(restriction.base):
"""Extra stupid version of AlwaysBool to test base.force_{True,False}."""
- __slots__ = ('negate',)
+ __slots__ = ("negate",)
def __init__(self, negate=False):
- object.__setattr__(self, 'negate', negate)
+ object.__setattr__(self, "negate", negate)
def match(self, *args, **kwargs):
return not self.negate
@@ -49,7 +49,7 @@ class TestBase(TestRestriction):
class TestAlwaysBool(TestRestriction):
- bool_kls = partial(restriction.AlwaysBool, 'foo')
+ bool_kls = partial(restriction.AlwaysBool, "foo")
def test_true(self):
true_r = self.bool_kls(True)
@@ -82,19 +82,18 @@ class NoneMatch(restriction.base):
return val is None
def __repr__(self):
- return '<NoneMatch>'
+ return "<NoneMatch>"
def __str__(self):
- return 'NoneMatch'
+ return "NoneMatch"
class TestAnyMatch(TestRestriction):
-
def test_basic(self):
for negate in (False, True):
- inst = restriction.AnyMatch(NoneMatch(), 'spork', negate=negate)
- self.assertMatch(inst, ['spork', None], negated=negate)
- self.assertNotMatch(inst, ['spork'], negated=negate)
+ inst = restriction.AnyMatch(NoneMatch(), "spork", negate=negate)
+ self.assertMatch(inst, ["spork", None], negated=negate)
+ self.assertNotMatch(inst, ["spork"], negated=negate)
self.assertNotMatch(inst, (), negated=negate)
# just test these do not traceback
diff --git a/tests/restrictions/test_util.py b/tests/restrictions/test_util.py
index 50298211c..0c5a3ef68 100644
--- a/tests/restrictions/test_util.py
+++ b/tests/restrictions/test_util.py
@@ -2,12 +2,15 @@ from pkgcore.restrictions import packages, util, values
class TestCollectPackageRestrictions:
-
def test_collect_all(self):
prs = [packages.PackageRestriction("category", values.AlwaysTrue)] * 10
- assert prs == list(util.collect_package_restrictions(
- packages.AndRestriction(
- packages.OrRestriction(), packages.AndRestriction(), *prs)))
+ assert prs == list(
+ util.collect_package_restrictions(
+ packages.AndRestriction(
+ packages.OrRestriction(), packages.AndRestriction(), *prs
+ )
+ )
+ )
def test_collect_specific(self):
prs = {
@@ -16,11 +19,13 @@ class TestCollectPackageRestrictions:
}
r = packages.AndRestriction(
- packages.OrRestriction(*prs.values()), packages.AlwaysTrue)
+ packages.OrRestriction(*prs.values()), packages.AlwaysTrue
+ )
for k, v in prs.items():
assert [v] == list(util.collect_package_restrictions(r, attrs=[k]))
- r = packages.AndRestriction(packages.OrRestriction(
- *prs.values()), *prs.values())
+ r = packages.AndRestriction(
+ packages.OrRestriction(*prs.values()), *prs.values()
+ )
for k, v in prs.items():
assert [v] * 2 == list(util.collect_package_restrictions(r, attrs=[k]))
diff --git a/tests/restrictions/test_values.py b/tests/restrictions/test_values.py
index 3198c311c..a971e69d4 100644
--- a/tests/restrictions/test_values.py
+++ b/tests/restrictions/test_values.py
@@ -9,6 +9,7 @@ from .utils import TestRestriction
class SillyBool(values.base):
"""Extra stupid version of AlwaysBool to test base.force_{True,False}."""
+
def __init__(self, negate=False):
object.__setattr__(self, "negate", negate)
@@ -17,10 +18,9 @@ class SillyBool(values.base):
class TestBase(TestRestriction):
-
def test_force(self):
- self.assertMatches(SillyBool(negate=False), None, [None]*3)
- self.assertNotMatches(SillyBool(negate=True), None, [None]*3)
+ self.assertMatches(SillyBool(negate=False), None, [None] * 3)
+ self.assertNotMatches(SillyBool(negate=True), None, [None] * 3)
class TestGetAttr(TestRestriction):
@@ -33,8 +33,8 @@ class TestGetAttr(TestRestriction):
# takes the right number of arguments.
# TODO test negate handling
- succeeds = values.GetAttrRestriction('test', values.AlwaysTrue)
- fails = values.GetAttrRestriction('test', values.AlwaysFalse)
+ succeeds = values.GetAttrRestriction("test", values.AlwaysTrue)
+ fails = values.GetAttrRestriction("test", values.AlwaysFalse)
class Dummy:
test = True
@@ -43,11 +43,12 @@ class TestGetAttr(TestRestriction):
class FakePackage:
"""XXX this is vastly too minimal."""
+
value = dummy
pkg = FakePackage()
- args = [pkg, 'value', dummy]
+ args = [pkg, "value", dummy]
self.assertForceTrue(succeeds, args)
self.assertNotForceFalse(succeeds, args)
self.assertNotForceTrue(fails, args)
@@ -60,37 +61,49 @@ class TestStrRegex(TestRestriction):
@pytest.mark.parametrize("negated", (True, False))
def test_match(self, negated):
- self.assertMatches(self.kls('foo.*r', match=True,
- negate=negated),
- 'foobar', [None, None, 'foobar'], negated=negated)
- self.assertNotMatches(self.kls('foo.*r', match=True,
- negate=negated),
- 'afoobar', [None, None, 'afoobar'], negated=negated)
+ self.assertMatches(
+ self.kls("foo.*r", match=True, negate=negated),
+ "foobar",
+ [None, None, "foobar"],
+ negated=negated,
+ )
+ self.assertNotMatches(
+ self.kls("foo.*r", match=True, negate=negated),
+ "afoobar",
+ [None, None, "afoobar"],
+ negated=negated,
+ )
@pytest.mark.parametrize("negated", (True, False))
def test_search(self, negated):
- self.assertMatches(self.kls('foo.*r', negate=negated),
- 'asdfoobar', [None, None, 'asdfoobar'], negated=negated)
- self.assertNotMatches(self.kls('foo.*r', negate=negated),
- 'afobar', [None, None, 'afobar'], negated=negated)
+ self.assertMatches(
+ self.kls("foo.*r", negate=negated),
+ "asdfoobar",
+ [None, None, "asdfoobar"],
+ negated=negated,
+ )
+ self.assertNotMatches(
+ self.kls("foo.*r", negate=negated),
+ "afobar",
+ [None, None, "afobar"],
+ negated=negated,
+ )
def test_case_sensitivity(self):
- self.assertNotMatches(self.kls('foo'), 'FoO', ['FOo']*3)
- self.assertMatches(self.kls('foo', False), 'FoO', ['fOo']*3)
+ self.assertNotMatches(self.kls("foo"), "FoO", ["FOo"] * 3)
+ self.assertMatches(self.kls("foo", False), "FoO", ["fOo"] * 3)
def test_str(self):
- assert 'search spork' == str(self.kls('spork'))
- assert 'not search spork' == str(self.kls('spork', negate=True))
- assert 'match spork' == str(self.kls('spork', match=True))
- assert 'not match spork' == str(self.kls('spork', match=True, negate=True))
+ assert "search spork" == str(self.kls("spork"))
+ assert "not search spork" == str(self.kls("spork", negate=True))
+ assert "match spork" == str(self.kls("spork", match=True))
+ assert "not match spork" == str(self.kls("spork", match=True, negate=True))
def test_repr(self):
for restr, string in (
- (self.kls('spork'), "<StrRegex 'spork' search @"),
- (self.kls('spork', match=True),
- "<StrRegex 'spork' match @"),
- (self.kls('spork', negate=True),
- "<StrRegex 'spork' negated search @"),
+ (self.kls("spork"), "<StrRegex 'spork' search @"),
+ (self.kls("spork", match=True), "<StrRegex 'spork' match @"),
+ (self.kls("spork", negate=True), "<StrRegex 'spork' negated search @"),
):
assert repr(restr).startswith(string), (restr, string)
@@ -100,6 +113,7 @@ class TestStrExactMatch(TestRestriction):
if values.StrExactMatch is values.StrExactMatch:
kls = values.StrExactMatch
else:
+
class kls(values.StrExactMatch, values.base):
__slots__ = ()
__inst_caching__ = True
@@ -112,10 +126,18 @@ class TestStrExactMatch(TestRestriction):
@pytest.mark.parametrize("negated", (True, False))
def test_case_sensitive(self, negated):
- self.assertMatches(self.kls('package', negate=negated),
- 'package', ['package']*3, negated=negated)
- self.assertNotMatches(self.kls('Package', negate=negated),
- 'package', ['package']*3, negated=negated)
+ self.assertMatches(
+ self.kls("package", negate=negated),
+ "package",
+ ["package"] * 3,
+ negated=negated,
+ )
+ self.assertNotMatches(
+ self.kls("Package", negate=negated),
+ "package",
+ ["package"] * 3,
+ negated=negated,
+ )
@pytest.mark.parametrize("negated", (True, False))
def test_case_insensitive(self, negated):
@@ -123,27 +145,41 @@ class TestStrExactMatch(TestRestriction):
# we test 1/0, since bool protocol is supported for those kwds-
# thus we verify it, more specifically we verify the cpy
# support.
- self.assertMatches(self.kls('package', case_sensitive=True,
- negate=negated),
- 'package', ['package']*3, negated=negated)
- self.assertMatches(self.kls('package', case_sensitive=1,
- negate=negated),
- 'package', ['package']*3, negated=negated)
- self.assertMatches(self.kls('Package', case_sensitive=False,
- negate=negated),
- 'package', ['package']*3, negated=negated)
- self.assertMatches(self.kls('Package', case_sensitive=0,
- negate=negated),
- 'package', ['package']*3, negated=negated)
+ self.assertMatches(
+ self.kls("package", case_sensitive=True, negate=negated),
+ "package",
+ ["package"] * 3,
+ negated=negated,
+ )
+ self.assertMatches(
+ self.kls("package", case_sensitive=1, negate=negated),
+ "package",
+ ["package"] * 3,
+ negated=negated,
+ )
+ self.assertMatches(
+ self.kls("Package", case_sensitive=False, negate=negated),
+ "package",
+ ["package"] * 3,
+ negated=negated,
+ )
+ self.assertMatches(
+ self.kls("Package", case_sensitive=0, negate=negated),
+ "package",
+ ["package"] * 3,
+ negated=negated,
+ )
@pytest.mark.parametrize("negate", (True, False))
def test__eq__(self, negate):
assert self.kls("rsync", negate=negate) == self.kls("rsync", negate=negate)
for x in "Ca":
- assert self.kls("rsync", negate=negate) != self.kls("rsyn"+x, negate=negate)
- assert (
- self.kls("Rsync", case_sensitive=False, negate=negate) ==
- self.kls("rsync", case_sensitive=False, negate=negate))
+ assert self.kls("rsync", negate=negate) != self.kls(
+ "rsyn" + x, negate=negate
+ )
+ assert self.kls("Rsync", case_sensitive=False, negate=negate) == self.kls(
+ "rsync", case_sensitive=False, negate=negate
+ )
class TestStrGlobMatch(TestRestriction):
@@ -152,49 +188,83 @@ class TestStrGlobMatch(TestRestriction):
@pytest.mark.parametrize("negated", (True, False))
def test_matching(self, negated):
- self.assertMatches(self.kls('pack', negate=negated),
- 'package', ['package']*3, negated=negated)
- self.assertNotMatches(self.kls('pack', negate=negated),
- 'apack', ['apack']*3, negated=negated)
+ self.assertMatches(
+ self.kls("pack", negate=negated),
+ "package",
+ ["package"] * 3,
+ negated=negated,
+ )
+ self.assertNotMatches(
+ self.kls("pack", negate=negated), "apack", ["apack"] * 3, negated=negated
+ )
# case sensitive...
- self.assertMatches(self.kls('pAcK', case_sensitive=False,
- negate=negated),
- 'pack', ['pack']*3, negated=negated)
- self.assertNotMatches(self.kls('pAcK',
- case_sensitive=True, negate=negated),
- 'pack', ['pack']*3, negated=negated)
+ self.assertMatches(
+ self.kls("pAcK", case_sensitive=False, negate=negated),
+ "pack",
+ ["pack"] * 3,
+ negated=negated,
+ )
+ self.assertNotMatches(
+ self.kls("pAcK", case_sensitive=True, negate=negated),
+ "pack",
+ ["pack"] * 3,
+ negated=negated,
+ )
# check prefix.
- self.assertMatches(self.kls('pAck', case_sensitive=False,
- prefix=True, negate=negated),
- 'packa', ['packa']*3, negated=negated)
-
- self.assertNotMatches(self.kls('pack', prefix=False,
- negate=negated),
- 'apacka', ['apacka']*3, negated=negated)
-
- self.assertMatches(self.kls('pack', prefix=False,
- negate=negated),
- 'apack', ['apack']*3, negated=negated)
+ self.assertMatches(
+ self.kls("pAck", case_sensitive=False, prefix=True, negate=negated),
+ "packa",
+ ["packa"] * 3,
+ negated=negated,
+ )
+
+ self.assertNotMatches(
+ self.kls("pack", prefix=False, negate=negated),
+ "apacka",
+ ["apacka"] * 3,
+ negated=negated,
+ )
+
+ self.assertMatches(
+ self.kls("pack", prefix=False, negate=negated),
+ "apack",
+ ["apack"] * 3,
+ negated=negated,
+ )
# daft, but verifies it's not doing contains.
- self.assertNotMatches(self.kls('pack', prefix=False,
- negate=negated),
- 'apacka', ['apacka']*3, negated=negated)
-
- self.assertNotMatches(self.kls('pack', prefix=False,
- case_sensitive=False, negate=negated),
- 'aPacka', ['aPacka']*3, negated=negated)
+ self.assertNotMatches(
+ self.kls("pack", prefix=False, negate=negated),
+ "apacka",
+ ["apacka"] * 3,
+ negated=negated,
+ )
+
+ self.assertNotMatches(
+ self.kls("pack", prefix=False, case_sensitive=False, negate=negated),
+ "aPacka",
+ ["aPacka"] * 3,
+ negated=negated,
+ )
def test__eq__(self):
assert self.kls("rsync", prefix=False) != self.kls("rsync", prefix=True)
for negate in (True, False):
assert self.kls("rsync", negate=negate) == self.kls("rsync", negate=negate)
for x in "Ca":
- assert self.kls("rsync", negate=negate) != self.kls("rsyn"+x, negate=negate)
- assert self.kls("Rsync", case_sensitive=False, negate=negate) != self.kls("rsync", case_sensitive=True, negate=negate)
- assert self.kls("rsync", case_sensitive=False, negate=negate) != self.kls("rsync", case_sensitive=True, negate=negate)
- assert self.kls("rsync", case_sensitive=False, negate=negate) != self.kls("rsync", case_sensitive=True, negate=not negate)
+ assert self.kls("rsync", negate=negate) != self.kls(
+ "rsyn" + x, negate=negate
+ )
+ assert self.kls("Rsync", case_sensitive=False, negate=negate) != self.kls(
+ "rsync", case_sensitive=True, negate=negate
+ )
+ assert self.kls("rsync", case_sensitive=False, negate=negate) != self.kls(
+ "rsync", case_sensitive=True, negate=negate
+ )
+ assert self.kls("rsync", case_sensitive=False, negate=negate) != self.kls(
+ "rsync", case_sensitive=True, negate=not negate
+ )
assert self.kls("rsync", negate=True) != self.kls("rsync", negate=False)
@@ -203,18 +273,24 @@ class TestEqualityMatch(TestRestriction):
kls = staticmethod(values.EqualityMatch)
@pytest.mark.parametrize("negated", (True, False))
- @pytest.mark.parametrize(("x", "y", "ret"), (
- ("asdf", "asdf", True),
- ("asdf", "fdsa", False),
- (1, 1, True), (1,2, False),
- (list(range(2)), list(range(2)), True),
- (list(range(2)), reversed(list(range(2))), False),
- (True, True, True),
- (True, False, False),
- (False, True, False),
- ))
+ @pytest.mark.parametrize(
+ ("x", "y", "ret"),
+ (
+ ("asdf", "asdf", True),
+ ("asdf", "fdsa", False),
+ (1, 1, True),
+ (1, 2, False),
+ (list(range(2)), list(range(2)), True),
+ (list(range(2)), reversed(list(range(2))), False),
+ (True, True, True),
+ (True, False, False),
+ (False, True, False),
+ ),
+ )
def test_match(self, x, y, ret, negated):
- self.assertMatches(self.kls(x, negate=negated), y, [y]*3, negated=(ret ^ (not negated)))
+ self.assertMatches(
+ self.kls(x, negate=negated), y, [y] * 3, negated=(ret ^ (not negated))
+ )
def test__eq__(self):
for negate in (True, False):
@@ -225,6 +301,7 @@ class TestEqualityMatch(TestRestriction):
def test__hash__(self):
def f(*args, **kwds):
return hash(self.kls(*args, **kwds))
+
assert f("dar") == f("dar")
assert f("dar") == f("dar", negate=False)
assert f("dar", negate=True) != f("dar", negate=False)
@@ -242,51 +319,61 @@ class TestContainmentMatch(TestRestriction):
(list(range(10)), list(range(10)), True),
(list(range(10)), [], False),
(list(range(10)), set(range(10)), True),
- (set(range(10)), list(range(10)), True)):
+ (set(range(10)), list(range(10)), True),
+ ):
for negated in (False, True):
- self.assertMatches(self.kls(x, negate=negated,
- disable_inst_caching=True),
- y, [y]*3, negated=(ret == negated))
+ self.assertMatches(
+ self.kls(x, negate=negated, disable_inst_caching=True),
+ y,
+ [y] * 3,
+ negated=(ret == negated),
+ )
for negated in (False, True):
# intentionally differing for the force_* args; slips in
# an extra data set for testing.
- self.assertMatches(self.kls(range(10), match_all=True, negate=negated),
- list(range(20)), [list(range(10))]*3, negated=negated)
- self.assertNotMatches(self.kls(range(10), match_all=True, negate=negated),
- list(range(5)), [list(range(5))]*3, negated=negated)
-
- self.assertNotMatches(self.kls("asdf"), "fdsa", ["fdas"]*3)
- self.assertMatches(self.kls("asdf"), "asdf", ["asdf"]*3)
- self.assertMatches(self.kls("asdf"), "asdffdsa", ["asdffdsa"]*3)
- self.assertMatches(self.kls("b"), "aba", ["aba"]*3)
+ self.assertMatches(
+ self.kls(range(10), match_all=True, negate=negated),
+ list(range(20)),
+ [list(range(10))] * 3,
+ negated=negated,
+ )
+ self.assertNotMatches(
+ self.kls(range(10), match_all=True, negate=negated),
+ list(range(5)),
+ [list(range(5))] * 3,
+ negated=negated,
+ )
+
+ self.assertNotMatches(self.kls("asdf"), "fdsa", ["fdas"] * 3)
+ self.assertMatches(self.kls("asdf"), "asdf", ["asdf"] * 3)
+ self.assertMatches(self.kls("asdf"), "asdffdsa", ["asdffdsa"] * 3)
+ self.assertMatches(self.kls("b"), "aba", ["aba"] * 3)
@pytest.mark.parametrize("negate", (True, False))
def test__eq__(self, negate):
- assert self.kls(range(100), negate=negate) == \
- self.kls(range(100), negate=negate), \
- f"range(100), negate={negate}"
- assert (self.kls("1", negate=not negate) !=
- self.kls("1", negate=negate))
- assert (
- self.kls((1, 2, 3), match_all=True, negate=negate) ==
- self.kls((1, 2, 3), match_all=True, negate=negate))
- assert (
- self.kls((1, 2), match_all=True, negate=negate) !=
- self.kls((1, 2, 3), match_all=True, negate=negate))
- assert (
- self.kls([1, 2, 3], match_all=False, negate=negate) !=
- self.kls([1, 2, 3], match_all=True, negate=negate))
+ assert self.kls(range(100), negate=negate) == self.kls(
+ range(100), negate=negate
+ ), f"range(100), negate={negate}"
+ assert self.kls("1", negate=not negate) != self.kls("1", negate=negate)
+ assert self.kls((1, 2, 3), match_all=True, negate=negate) == self.kls(
+ (1, 2, 3), match_all=True, negate=negate
+ )
+ assert self.kls((1, 2), match_all=True, negate=negate) != self.kls(
+ (1, 2, 3), match_all=True, negate=negate
+ )
+ assert self.kls([1, 2, 3], match_all=False, negate=negate) != self.kls(
+ [1, 2, 3], match_all=True, negate=negate
+ )
class TestFlatteningRestriction:
-
@pytest.mark.parametrize("negate", (True, False))
def test_basic(self, negate):
inst = values.FlatteningRestriction(
- tuple, values.AnyMatch(values.EqualityMatch(None)),
- negate=negate)
+ tuple, values.AnyMatch(values.EqualityMatch(None)), negate=negate
+ )
assert not negate == inst.match([7, 8, [9, None]])
assert negate == inst.match([7, 8, (9, None)])
# Just check this does not raise
@@ -295,12 +382,11 @@ class TestFlatteningRestriction:
class TestFunctionRestriction:
-
@pytest.mark.parametrize("negate", (True, False))
def test_basic(self, negate):
-
def yes(val):
return True
+
def no(val):
return False
diff --git a/tests/restrictions/utils.py b/tests/restrictions/utils.py
index e3977e769..7d6f40aaa 100644
--- a/tests/restrictions/utils.py
+++ b/tests/restrictions/utils.py
@@ -1,37 +1,38 @@
class TestRestriction:
-
- def _assertMatch(self, obj, args, mode='match', negated=False, msg=None):
+ def _assertMatch(self, obj, args, mode="match", negated=False, msg=None):
if msg is None:
- msg = ''
+ msg = ""
else:
msg = "; msg=" + msg
if negated:
- assert not getattr(obj, mode)(*args), "%r must not match %r, mode=%s, negated=%r%s" % \
- (obj, args, mode, negated, msg)
+ assert not getattr(obj, mode)(
+ *args
+ ), "%r must not match %r, mode=%s, negated=%r%s" % (
+ obj,
+ args,
+ mode,
+ negated,
+ msg,
+ )
else:
- assert getattr(obj, mode)(*args), \
- "%r must match %r, mode=%s, not negated%s" % \
- (obj, args, mode, msg)
+ assert getattr(obj, mode)(
+ *args
+ ), "%r must match %r, mode=%s, not negated%s" % (obj, args, mode, msg)
- def assertMatch(self, obj, target, mode='match', negated=False, msg=None):
+ def assertMatch(self, obj, target, mode="match", negated=False, msg=None):
return self._assertMatch(obj, (target,), mode=mode, negated=negated, msg=msg)
- def assertNotMatch(self, obj, target, mode='match', negated=False,
- msg=None):
- return self.assertMatch(obj, target, mode=mode, negated=not negated,
- msg=msg)
-
+ def assertNotMatch(self, obj, target, mode="match", negated=False, msg=None):
+ return self.assertMatch(obj, target, mode=mode, negated=not negated, msg=msg)
- def assertMatches(self, obj, target, force_args=None, negated=False,
- msg=None):
+ def assertMatches(self, obj, target, force_args=None, negated=False, msg=None):
if force_args is None:
force_args = (target,)
self.assertMatch(obj, target, negated=negated, msg=msg)
self.assertForceTrue(obj, force_args, negated=negated, msg=msg)
self.assertNotForceFalse(obj, force_args, negated=negated, msg=msg)
- def assertNotMatches(self, obj, target, force_args=None, negated=False,
- msg=None):
+ def assertNotMatches(self, obj, target, force_args=None, negated=False, msg=None):
if force_args is None:
force_args = (target,)
self.assertNotMatch(obj, target, negated=negated, msg=msg)
@@ -39,17 +40,21 @@ class TestRestriction:
self.assertForceFalse(obj, force_args, negated=negated, msg=msg)
def assertForceTrue(self, obj, target, negated=False, msg=None):
- return self._assertMatch(obj, target, mode='force_True',
- negated=negated, msg=msg)
+ return self._assertMatch(
+ obj, target, mode="force_True", negated=negated, msg=msg
+ )
def assertNotForceTrue(self, obj, target, negated=False, msg=None):
- return self._assertMatch(obj, target, mode='force_True',
- negated=not negated, msg=msg)
+ return self._assertMatch(
+ obj, target, mode="force_True", negated=not negated, msg=msg
+ )
def assertForceFalse(self, obj, target, negated=False, msg=None):
- return self._assertMatch(obj, target, mode='force_False',
- negated=negated, msg=msg)
+ return self._assertMatch(
+ obj, target, mode="force_False", negated=negated, msg=msg
+ )
def assertNotForceFalse(self, obj, target, negated=False, msg=None):
- return self._assertMatch(obj, target, mode='force_False',
- negated=not negated, msg=msg)
+ return self._assertMatch(
+ obj, target, mode="force_False", negated=not negated, msg=msg
+ )
diff --git a/tests/scripts/test_patom.py b/tests/scripts/test_patom.py
index 423cfcaf4..499030de0 100644
--- a/tests/scripts/test_patom.py
+++ b/tests/scripts/test_patom.py
@@ -3,56 +3,80 @@ import pytest
from pkgcore.scripts import patom
from pkgcore.test.scripts.helpers import ArgParseMixin
+
class TestFormat(ArgParseMixin):
_argparser = patom.argparser
def test_empty(self):
- self.assertOut([], '--format', '%{PACKAGE}')
+ self.assertOut([], "--format", "%{PACKAGE}")
def test_unversioned(self):
- self.assertOut(['spork'], '--format', '%{PACKAGE}', 'dev-utils/spork')
+ self.assertOut(["spork"], "--format", "%{PACKAGE}", "dev-utils/spork")
def test_versioned(self):
- self.assertOut(['spork'], '--format', '%{PACKAGE}', 'dev-utils/spork-1')
+ self.assertOut(["spork"], "--format", "%{PACKAGE}", "dev-utils/spork-1")
def test_versioned_op(self):
- self.assertOut(['spork'], '--format', '%{PACKAGE}', '=dev-utils/spork-1')
+ self.assertOut(["spork"], "--format", "%{PACKAGE}", "=dev-utils/spork-1")
def test_unversioned_op(self):
- self.assertErr(["malformed atom: '=dev-utils/spork': invalid package atom: '=dev-utils/spork'"],
- '--format', '%{PACKAGE}', '=dev-utils/spork')
+ self.assertErr(
+ [
+ "malformed atom: '=dev-utils/spork': invalid package atom: '=dev-utils/spork'"
+ ],
+ "--format",
+ "%{PACKAGE}",
+ "=dev-utils/spork",
+ )
def test_unknown_key(self):
- self.assertErr(["bad format: '%{UNKNOWN}'"],
- '--format', '%{UNKNOWN}', 'dev-utils/spork')
-
- @pytest.mark.parametrize(('key', 'expected'), (
- pytest.param('%{CATEGORY}', 'dev-utils', id='category'),
- pytest.param('%{PACKAGE}', 'spork', id='package'),
- pytest.param('%{VERSION}', '1.2.3_p20221014_p1', id='version'),
- pytest.param('%{FULLVER}', '1.2.3_p20221014_p1-r12', id='fullver'),
- pytest.param('%{REVISION}', '12', id='revision'),
- pytest.param('%{SLOT}', '15', id='slot'),
- pytest.param('%{SUBSLOT}', '2', id='subslot'),
- pytest.param('%{REPO_ID}', 'gentoo', id='repo_id'),
- pytest.param('%{OP}', '>=', id='op'),
- ))
+ self.assertErr(
+ ["bad format: '%{UNKNOWN}'"], "--format", "%{UNKNOWN}", "dev-utils/spork"
+ )
+
+ @pytest.mark.parametrize(
+ ("key", "expected"),
+ (
+ pytest.param("%{CATEGORY}", "dev-utils", id="category"),
+ pytest.param("%{PACKAGE}", "spork", id="package"),
+ pytest.param("%{VERSION}", "1.2.3_p20221014_p1", id="version"),
+ pytest.param("%{FULLVER}", "1.2.3_p20221014_p1-r12", id="fullver"),
+ pytest.param("%{REVISION}", "12", id="revision"),
+ pytest.param("%{SLOT}", "15", id="slot"),
+ pytest.param("%{SUBSLOT}", "2", id="subslot"),
+ pytest.param("%{REPO_ID}", "gentoo", id="repo_id"),
+ pytest.param("%{OP}", ">=", id="op"),
+ ),
+ )
def test_atom_keys(self, key, expected):
- self.assertOut([expected], '--format', key, '!!>=dev-utils/spork-1.2.3_p20221014_p1-r12:15/2::gentoo[use]')
+ self.assertOut(
+ [expected],
+ "--format",
+ key,
+ "!!>=dev-utils/spork-1.2.3_p20221014_p1-r12:15/2::gentoo[use]",
+ )
def test_unset(self):
- self.assertOut(['<unset>'], '--format', '%{VERSION}', 'dev-utils/spork')
- self.assertOut([''], '--format', '%[VERSION]', 'dev-utils/spork')
+ self.assertOut(["<unset>"], "--format", "%{VERSION}", "dev-utils/spork")
+ self.assertOut([""], "--format", "%[VERSION]", "dev-utils/spork")
def test_other_text(self):
- self.assertOut(['repo/dev-utils/spork.ebuild'], '--format', 'repo/%{CATEGORY}/%{PACKAGE}.ebuild', 'dev-utils/spork-2.5')
-
- @pytest.mark.parametrize('format', (
- '%{CATEGORY]',
- '%[CATEGORY}',
- '%{}',
- '%[]',
- ))
+ self.assertOut(
+ ["repo/dev-utils/spork.ebuild"],
+ "--format",
+ "repo/%{CATEGORY}/%{PACKAGE}.ebuild",
+ "dev-utils/spork-2.5",
+ )
+
+ @pytest.mark.parametrize(
+ "format",
+ (
+ "%{CATEGORY]",
+ "%[CATEGORY}",
+ "%{}",
+ "%[]",
+ ),
+ )
def test_ignore_format(self, format):
- self.assertOut([format], '--format', format, 'dev-utils/spork-2.5')
+ self.assertOut([format], "--format", format, "dev-utils/spork-2.5")
diff --git a/tests/scripts/test_pclean.py b/tests/scripts/test_pclean.py
index 4dfe942ac..2b13eb01c 100644
--- a/tests/scripts/test_pclean.py
+++ b/tests/scripts/test_pclean.py
@@ -9,4 +9,4 @@ class TestCommandline(ArgParseMixin):
suppress_domain = True
def test_parser(self):
- self.assertError('the following arguments are required: subcommand')
+ self.assertError("the following arguments are required: subcommand")
diff --git a/tests/scripts/test_pclonecache.py b/tests/scripts/test_pclonecache.py
index fb4722231..0509a8f8a 100644
--- a/tests/scripts/test_pclonecache.py
+++ b/tests/scripts/test_pclonecache.py
@@ -6,7 +6,7 @@ from pkgcore.test.scripts.helpers import ArgParseMixin
class Cache:
- pkgcore_config_type = ConfigHint(typename='cache')
+ pkgcore_config_type = ConfigHint(typename="cache")
def __init__(self, readonly=True):
self.readonly = self.frozen = readonly
@@ -17,18 +17,28 @@ class TestCommandline(ArgParseMixin):
_argparser = pclonecache.argparser
def test_parser(self):
+ self.assertError("the following arguments are required: target", "spork")
self.assertError(
- 'the following arguments are required: target',
- 'spork')
- self.assertError(
- "argument source: couldn't find cache 'spork'",
- 'spork', 'spork2')
+ "argument source: couldn't find cache 'spork'", "spork", "spork2"
+ )
self.assertError(
"argument target: couldn't find cache 'spork2' (available: spork)",
- 'spork', 'spork2',
- spork=basics.HardCodedConfigSection({'class': Cache}))
+ "spork",
+ "spork2",
+ spork=basics.HardCodedConfigSection({"class": Cache}),
+ )
self.assertError(
"argument target: cache 'spork2' is readonly",
- 'spork', 'spork2',
- spork=basics.HardCodedConfigSection({'class': Cache,}),
- spork2=basics.HardCodedConfigSection({'class': Cache,}))
+ "spork",
+ "spork2",
+ spork=basics.HardCodedConfigSection(
+ {
+ "class": Cache,
+ }
+ ),
+ spork2=basics.HardCodedConfigSection(
+ {
+ "class": Cache,
+ }
+ ),
+ )
diff --git a/tests/scripts/test_pconfig.py b/tests/scripts/test_pconfig.py
index 7b3a1586d..ca8e98fe6 100644
--- a/tests/scripts/test_pconfig.py
+++ b/tests/scripts/test_pconfig.py
@@ -5,32 +5,45 @@ from pkgcore.scripts import pconfig
from pkgcore.test.scripts.helpers import ArgParseMixin
-@configurable({'reff': 'ref:spork'})
+@configurable({"reff": "ref:spork"})
def spork(reff):
"""Test thing."""
+
def foon():
pass
-@configurable(typename='spork')
+
+@configurable(typename="spork")
def pseudospork():
pass
-@configurable(typename='multi', allow_unknowns=True, types={
- 'string': 'str', 'boolean': 'bool', 'list': 'list',
- 'callable': 'callable', 'lazy_ref': 'lazy_ref:spork',
- 'ref': 'ref:spork', 'lazy_refs': 'lazy_refs:spork',
- 'refs': 'refs:spork',
- })
+
+@configurable(
+ typename="multi",
+ allow_unknowns=True,
+ types={
+ "string": "str",
+ "boolean": "bool",
+ "list": "list",
+ "callable": "callable",
+ "lazy_ref": "lazy_ref:spork",
+ "ref": "ref:spork",
+ "lazy_refs": "lazy_refs:spork",
+ "refs": "refs:spork",
+ },
+)
def multi(**kwargs):
"""Just something taking all kinds of args."""
+
# "in positional but not in required" is an error.
-@configurable(positional=['foon'])
+@configurable(positional=["foon"])
def broken_type(*args):
"""Noop."""
-@configurable(types={'inc': 'list'}, allow_unknowns=True)
+
+@configurable(types={"inc": "list"}, allow_unknowns=True)
def increment(inc=()):
"""Noop."""
@@ -40,44 +53,44 @@ class DescribeClassTest(ArgParseMixin):
_argparser = pconfig.describe_class
def test_parser(self, capsys):
- self.assertError(
- 'the following arguments are required: target_class')
+ self.assertError("the following arguments are required: target_class")
module = "module 'pkgcore'"
with pytest.raises(SystemExit):
- self.parse('pkgcore.spork')
+ self.parse("pkgcore.spork")
captured = capsys.readouterr()
assert captured.err.strip() == (
- f"argument target_class: Failed importing target 'pkgcore.spork': '{module} has no attribute 'spork''")
+ f"argument target_class: Failed importing target 'pkgcore.spork': '{module} has no attribute 'spork''"
+ )
with pytest.raises(SystemExit):
- self.parse('pkgcore.a', 'pkgcore.b')
+ self.parse("pkgcore.a", "pkgcore.b")
captured = capsys.readouterr()
assert captured.err.strip() == (
- f"argument target_class: Failed importing target 'pkgcore.a': '{module} has no attribute 'a''")
+ f"argument target_class: Failed importing target 'pkgcore.a': '{module} has no attribute 'a''"
+ )
- self.parse('pkgcore.scripts')
+ self.parse("pkgcore.scripts")
def test_describe_class(self):
self.assertOut(
- ['typename is spork',
- 'Test thing.',
- '',
- 'reff: ref:spork (required)'],
- 'tests.scripts.test_pconfig.spork')
+ ["typename is spork", "Test thing.", "", "reff: ref:spork (required)"],
+ "tests.scripts.test_pconfig.spork",
+ )
self.assertOut(
- ['typename is increment',
- 'Noop.',
- 'values not listed are handled as strings',
- '',
- 'inc: list'],
- 'tests.scripts.test_pconfig.increment')
+ [
+ "typename is increment",
+ "Noop.",
+ "values not listed are handled as strings",
+ "",
+ "inc: list",
+ ],
+ "tests.scripts.test_pconfig.increment",
+ )
def test_broken_type(self):
- self.assertErr(
- ['Not a valid type!'],
- 'tests.scripts.test_pconfig.broken_type')
+ self.assertErr(["Not a valid type!"], "tests.scripts.test_pconfig.broken_type")
class TestClasses(ArgParseMixin):
@@ -87,39 +100,49 @@ class TestClasses(ArgParseMixin):
def test_classes(self):
sections = []
for i in range(10):
- @configurable(typename='spork')
+
+ @configurable(typename="spork")
def noop():
"""noop"""
+
noop.__name__ = str(i)
- sections.append(basics.HardCodedConfigSection({'class': noop}))
+ sections.append(basics.HardCodedConfigSection({"class": noop}))
+ self.assertOut(
+ ["tests.scripts.test_pconfig.foon"],
+ spork=basics.HardCodedConfigSection({"class": foon}),
+ )
self.assertOut(
- ['tests.scripts.test_pconfig.foon'],
- spork=basics.HardCodedConfigSection({'class': foon}))
- self.assertOut([
- 'tests.scripts.test_pconfig.0',
- 'tests.scripts.test_pconfig.1',
- 'tests.scripts.test_pconfig.2',
- 'tests.scripts.test_pconfig.3',
- 'tests.scripts.test_pconfig.4',
- 'tests.scripts.test_pconfig.5',
- 'tests.scripts.test_pconfig.multi',
- 'tests.scripts.test_pconfig.pseudospork',
- 'tests.scripts.test_pconfig.spork',
- ],
- bork=basics.HardCodedConfigSection({
- 'class': pseudospork, 'bork': True, 'inherit-only': True}),
- multi=basics.HardCodedConfigSection({
- 'class': multi,
- 'ref': sections[0],
- 'refs': sections[1:3],
- 'lazy_ref': sections[3],
- 'lazy_refs': sections[4:6],
- 'random': 'unknown',
- }),
- spork=basics.HardCodedConfigSection({
- 'class': spork,
- 'reff': basics.HardCodedConfigSection({
- 'class': pseudospork})}))
+ [
+ "tests.scripts.test_pconfig.0",
+ "tests.scripts.test_pconfig.1",
+ "tests.scripts.test_pconfig.2",
+ "tests.scripts.test_pconfig.3",
+ "tests.scripts.test_pconfig.4",
+ "tests.scripts.test_pconfig.5",
+ "tests.scripts.test_pconfig.multi",
+ "tests.scripts.test_pconfig.pseudospork",
+ "tests.scripts.test_pconfig.spork",
+ ],
+ bork=basics.HardCodedConfigSection(
+ {"class": pseudospork, "bork": True, "inherit-only": True}
+ ),
+ multi=basics.HardCodedConfigSection(
+ {
+ "class": multi,
+ "ref": sections[0],
+ "refs": sections[1:3],
+ "lazy_ref": sections[3],
+ "lazy_refs": sections[4:6],
+ "random": "unknown",
+ }
+ ),
+ spork=basics.HardCodedConfigSection(
+ {
+ "class": spork,
+ "reff": basics.HardCodedConfigSection({"class": pseudospork}),
+ }
+ ),
+ )
class TestDump(ArgParseMixin):
@@ -128,99 +151,112 @@ class TestDump(ArgParseMixin):
def test_dump(self):
self.assertOut(
- ["'spork' {",
- ' # typename of this section: foon',
- ' class tests.scripts.test_pconfig.foon;',
- '}',
- ''],
- spork=basics.HardCodedConfigSection({'class': foon}))
+ [
+ "'spork' {",
+ " # typename of this section: foon",
+ " class tests.scripts.test_pconfig.foon;",
+ "}",
+ "",
+ ],
+ spork=basics.HardCodedConfigSection({"class": foon}),
+ )
def test_default(self):
self.assertOut(
- ["'spork' {",
- ' # typename of this section: foon',
- ' class tests.scripts.test_pconfig.foon;',
- ' default true;',
- '}',
- ''],
- spork=basics.HardCodedConfigSection({'class': foon,
- 'default': True}))
+ [
+ "'spork' {",
+ " # typename of this section: foon",
+ " class tests.scripts.test_pconfig.foon;",
+ " default true;",
+ "}",
+ "",
+ ],
+ spork=basics.HardCodedConfigSection({"class": foon, "default": True}),
+ )
def test_uncollapsable(self):
self.assertOut(
- '',
- spork=basics.HardCodedConfigSection({
- 'class': foon, 'broken': True, 'inherit-only': True}))
+ "",
+ spork=basics.HardCodedConfigSection(
+ {"class": foon, "broken": True, "inherit-only": True}
+ ),
+ )
def test_serialise(self):
- nest = basics.HardCodedConfigSection({'class': pseudospork})
+ nest = basics.HardCodedConfigSection({"class": pseudospork})
self.assertOut(
- ["'spork' {",
- ' # typename of this section: multi',
- ' class tests.scripts.test_pconfig.multi;',
- ' # type: bool',
- ' boolean True;',
- ' # type: callable',
- ' callable tests.scripts.test_pconfig.multi;',
- ' # type: lazy_ref:spork',
- ' lazy_ref {',
- ' # typename of this section: spork',
- ' class tests.scripts.test_pconfig.pseudospork;',
- ' };',
- ' # type: lazy_refs:spork',
- ' lazy_refs {',
- ' # typename of this section: spork',
- ' class tests.scripts.test_pconfig.pseudospork;',
- ' } {',
- ' # typename of this section: spork',
- ' class tests.scripts.test_pconfig.pseudospork;',
- ' };',
- ' # type: list',
- " list 'a' 'b\\' \"c';",
- ' # type: ref:spork',
- ' ref {',
- ' # typename of this section: spork',
- ' class tests.scripts.test_pconfig.pseudospork;',
- ' };',
- ' # type: refs:spork',
- ' refs {',
- ' # typename of this section: spork',
- ' class tests.scripts.test_pconfig.pseudospork;',
- ' } {',
- ' # typename of this section: spork',
- ' class tests.scripts.test_pconfig.pseudospork;',
- ' };',
- ' # type: str',
- ' string \'it is a "stringy" \\\'string\\\'\';',
- ' # type: str',
- " unknown 'random';",
- '}',
- ''],
- spork=basics.HardCodedConfigSection({
- 'class': multi,
- 'string': 'it is a "stringy" \'string\'',
- 'boolean': True,
- 'list': ['a', 'b\' "c'],
- 'callable': multi,
- 'ref': nest,
- 'lazy_ref': nest,
- 'refs': [nest, nest],
- 'lazy_refs': [nest, nest],
- 'unknown': 'random',
- }))
+ [
+ "'spork' {",
+ " # typename of this section: multi",
+ " class tests.scripts.test_pconfig.multi;",
+ " # type: bool",
+ " boolean True;",
+ " # type: callable",
+ " callable tests.scripts.test_pconfig.multi;",
+ " # type: lazy_ref:spork",
+ " lazy_ref {",
+ " # typename of this section: spork",
+ " class tests.scripts.test_pconfig.pseudospork;",
+ " };",
+ " # type: lazy_refs:spork",
+ " lazy_refs {",
+ " # typename of this section: spork",
+ " class tests.scripts.test_pconfig.pseudospork;",
+ " } {",
+ " # typename of this section: spork",
+ " class tests.scripts.test_pconfig.pseudospork;",
+ " };",
+ " # type: list",
+ " list 'a' 'b\\' \"c';",
+ " # type: ref:spork",
+ " ref {",
+ " # typename of this section: spork",
+ " class tests.scripts.test_pconfig.pseudospork;",
+ " };",
+ " # type: refs:spork",
+ " refs {",
+ " # typename of this section: spork",
+ " class tests.scripts.test_pconfig.pseudospork;",
+ " } {",
+ " # typename of this section: spork",
+ " class tests.scripts.test_pconfig.pseudospork;",
+ " };",
+ " # type: str",
+ " string 'it is a \"stringy\" \\'string\\'';",
+ " # type: str",
+ " unknown 'random';",
+ "}",
+ "",
+ ],
+ spork=basics.HardCodedConfigSection(
+ {
+ "class": multi,
+ "string": "it is a \"stringy\" 'string'",
+ "boolean": True,
+ "list": ["a", "b' \"c"],
+ "callable": multi,
+ "ref": nest,
+ "lazy_ref": nest,
+ "refs": [nest, nest],
+ "lazy_refs": [nest, nest],
+ "unknown": "random",
+ }
+ ),
+ )
def test_one_typename(self):
self.assertOut(
- ["'spork' {",
- ' # typename of this section: spork',
- ' class tests.scripts.test_pconfig.pseudospork;',
- '}',
- '',
- ],
- 'spork',
- spork=basics.HardCodedConfigSection({'class': pseudospork}),
- foon=basics.HardCodedConfigSection({'class': foon}),
- )
+ [
+ "'spork' {",
+ " # typename of this section: spork",
+ " class tests.scripts.test_pconfig.pseudospork;",
+ "}",
+ "",
+ ],
+ "spork",
+ spork=basics.HardCodedConfigSection({"class": pseudospork}),
+ foon=basics.HardCodedConfigSection({"class": foon}),
+ )
class TestUncollapsable(ArgParseMixin):
@@ -229,21 +265,19 @@ class TestUncollapsable(ArgParseMixin):
def test_uncollapsable(self):
self.assertOut(
- ["section foon:",
- " Collapsing section named 'foon'",
- " cannot collapse inherit-only section"
- "",
- "",
- "section spork:",
- " Collapsing section named 'spork'",
- " type tests.scripts.test_pconfig.spork needs settings for 'reff'"
- "",
- "",
+ [
+ "section foon:",
+ " Collapsing section named 'foon'",
+ " cannot collapse inherit-only section" "",
+ "",
+ "section spork:",
+ " Collapsing section named 'spork'",
+ " type tests.scripts.test_pconfig.spork needs settings for 'reff'" "",
+ "",
],
- spork=basics.HardCodedConfigSection({'class': spork}),
- foon=basics.HardCodedConfigSection({'class': spork,
- 'inherit-only': True}),
- )
+ spork=basics.HardCodedConfigSection({"class": spork}),
+ foon=basics.HardCodedConfigSection({"class": spork, "inherit-only": True}),
+ )
class TestConfigurables(ArgParseMixin):
@@ -251,27 +285,26 @@ class TestConfigurables(ArgParseMixin):
_argparser = pconfig.configurables
def test_configurables(self):
- self.assertError(
- 'unrecognized arguments: bar',
- 'foo', 'bar')
+ self.assertError("unrecognized arguments: bar", "foo", "bar")
class WeirdSection(basics.ConfigSection):
-
def __contains__(self, key):
- return key == 'sects'
+ return key == "sects"
def keys(self):
- return ['sects']
+ return ["sects"]
def render_value(self, central, name, arg_type):
- if name != 'sects':
+ if name != "sects":
raise KeyError(name)
- if arg_type != 'repr':
- raise errors.ConfigurationError(f'{arg_type!r} unsupported')
- return 'refs', [
- ['spork', basics.HardCodedConfigSection({'foo': 'bar'})],
- None, None]
+ if arg_type != "repr":
+ raise errors.ConfigurationError(f"{arg_type!r} unsupported")
+ return "refs", [
+ ["spork", basics.HardCodedConfigSection({"foo": "bar"})],
+ None,
+ None,
+ ]
class TestDumpUncollapsed(ArgParseMixin):
@@ -280,65 +313,68 @@ class TestDumpUncollapsed(ArgParseMixin):
def test_dump_uncollapsed(self):
self.assertOut(
- ['# Warning:',
- '# Do not copy this output to a configuration file directly,',
- '# because the types you see here are only guesses.',
- '# A value used as "list" in the collapsed config will often',
- '# show up as "string" here and may need to be converted',
- '# (for example from space-separated to comma-separated)',
- '# to work in a config file with a different format.',
- '',
- '********',
- 'Source 1',
- '********',
- '',
- 'foon',
- '====',
- '# type: callable',
- "'class' = tests.scripts.test_pconfigspork",
- '# type: bool',
- "'inherit-only' = True",
- '# type: refs',
- "'refs' = ",
- ' nested section 1',
- ' ================',
- ' # type: str',
- " 'crystal' = 'clear'",
- '',
- ' nested section 2',
- ' ================',
- ' # type: refs',
- " 'sects.prepend' = ",
- ' nested section 1',
- ' ================',
- " named section 'spork'",
- '',
- ' nested section 2',
- ' ================',
- ' # type: str',
- " 'foo' = 'bar'",
- '',
- '',
- '# type: list',
- "'seq' = 'a' 'b c'",
- '# type: str',
- "'str' = 'quote \\'\" unquote'",
- '',
- 'spork',
- '=====',
- '# type: callable',
- "'class' = tests.scripts.test_pconfigspork",
- '',
- ],
- spork=basics.HardCodedConfigSection({'class': spork}),
- foon=basics.HardCodedConfigSection({
- 'class': spork,
- 'inherit-only': True,
- 'refs': [
- basics.HardCodedConfigSection({'crystal': 'clear'}),
+ [
+ "# Warning:",
+ "# Do not copy this output to a configuration file directly,",
+ "# because the types you see here are only guesses.",
+ '# A value used as "list" in the collapsed config will often',
+ '# show up as "string" here and may need to be converted',
+ "# (for example from space-separated to comma-separated)",
+ "# to work in a config file with a different format.",
+ "",
+ "********",
+ "Source 1",
+ "********",
+ "",
+ "foon",
+ "====",
+ "# type: callable",
+ "'class' = tests.scripts.test_pconfigspork",
+ "# type: bool",
+ "'inherit-only' = True",
+ "# type: refs",
+ "'refs' = ",
+ " nested section 1",
+ " ================",
+ " # type: str",
+ " 'crystal' = 'clear'",
+ "",
+ " nested section 2",
+ " ================",
+ " # type: refs",
+ " 'sects.prepend' = ",
+ " nested section 1",
+ " ================",
+ " named section 'spork'",
+ "",
+ " nested section 2",
+ " ================",
+ " # type: str",
+ " 'foo' = 'bar'",
+ "",
+ "",
+ "# type: list",
+ "'seq' = 'a' 'b c'",
+ "# type: str",
+ "'str' = 'quote \\'\" unquote'",
+ "",
+ "spork",
+ "=====",
+ "# type: callable",
+ "'class' = tests.scripts.test_pconfigspork",
+ "",
+ ],
+ spork=basics.HardCodedConfigSection({"class": spork}),
+ foon=basics.HardCodedConfigSection(
+ {
+ "class": spork,
+ "inherit-only": True,
+ "refs": [
+ basics.HardCodedConfigSection({"crystal": "clear"}),
WeirdSection(),
- ],
- 'seq': ['a', 'b c'],
- 'str': 'quote \'" unquote',
- }),
- )
+ ],
+ "seq": ["a", "b c"],
+ "str": "quote '\" unquote",
+ }
+ ),
+ )
diff --git a/tests/scripts/test_pebuild.py b/tests/scripts/test_pebuild.py
index ff84440af..afa097582 100644
--- a/tests/scripts/test_pebuild.py
+++ b/tests/scripts/test_pebuild.py
@@ -7,31 +7,33 @@ from pkgcore.test.scripts.helpers import ArgParseMixin
class FakeDomain:
- pkgcore_config_type = ConfigHint({'repo': 'ref:repo'}, typename='domain')
+ pkgcore_config_type = ConfigHint({"repo": "ref:repo"}, typename="domain")
def __init__(self, repo):
object.__init__(self)
self.ebuild_repos_unfiltered = repo
-@configurable(typename='repo')
+@configurable(typename="repo")
def fake_repo():
pkgs = [
- FakePkg('app-arch/bzip2-1.0.1-r1', slot='0'),
- FakePkg('app-arch/bzip2-1.0.5-r2', slot='0'),
- FakePkg('sys-apps/coreutils-8.25', slot='0'),
- FakePkg('x11-libs/gtk+-2.24', slot='2'),
- FakePkg('x11-libs/gtk+-3.22', slot='3'),
+ FakePkg("app-arch/bzip2-1.0.1-r1", slot="0"),
+ FakePkg("app-arch/bzip2-1.0.5-r2", slot="0"),
+ FakePkg("sys-apps/coreutils-8.25", slot="0"),
+ FakePkg("x11-libs/gtk+-2.24", slot="2"),
+ FakePkg("x11-libs/gtk+-3.22", slot="3"),
]
- repo = FakeRepo(repo_id='gentoo', pkgs=pkgs)
+ repo = FakeRepo(repo_id="gentoo", pkgs=pkgs)
return repo
-domain_config = basics.HardCodedConfigSection({
- 'class': FakeDomain,
- 'repo': basics.HardCodedConfigSection({'class': fake_repo}),
- 'default': True,
-})
+domain_config = basics.HardCodedConfigSection(
+ {
+ "class": FakeDomain,
+ "repo": basics.HardCodedConfigSection({"class": fake_repo}),
+ "default": True,
+ }
+)
class TestCommandline(ArgParseMixin):
@@ -39,9 +41,11 @@ class TestCommandline(ArgParseMixin):
_argparser = pebuild.argparser
def test_parser(self):
- self.assertError('the following arguments are required: target, phase')
- self.assertError('the following arguments are required: phase', 'dev-util/diffball')
+ self.assertError("the following arguments are required: target, phase")
+ self.assertError(
+ "the following arguments are required: phase", "dev-util/diffball"
+ )
# working initialization
- config = self.parse('sys-apps/coreutils', 'bar', 'baz', domain=domain_config)
- assert config.phase == ['bar', 'baz']
+ config = self.parse("sys-apps/coreutils", "bar", "baz", domain=domain_config)
+ assert config.phase == ["bar", "baz"]
diff --git a/tests/scripts/test_pmaint.py b/tests/scripts/test_pmaint.py
index 43814caee..63846456d 100644
--- a/tests/scripts/test_pmaint.py
+++ b/tests/scripts/test_pmaint.py
@@ -16,19 +16,19 @@ Options = AttrAccessible
class fake_operations(operations):
-
def _cmd_implementation_install(self, pkg, observer):
self.repo.installed.append(pkg)
- return derive_op('add_data', install, self.repo, pkg, observer)
+ return derive_op("add_data", install, self.repo, pkg, observer)
def _cmd_implementation_uninstall(self, pkg, observer):
self.repo.uninstalled.append(pkg)
- return derive_op('remove_data', uninstall, self.repo, pkg, observer)
+ return derive_op("remove_data", uninstall, self.repo, pkg, observer)
def _cmd_implementation_replace(self, oldpkg, newpkg, observer):
self.repo.replaced.append((oldpkg, newpkg))
- return derive_op(('add_data', 'remove_data'),
- replace, self.repo, oldpkg, newpkg, observer)
+ return derive_op(
+ ("add_data", "remove_data"), replace, self.repo, oldpkg, newpkg, observer
+ )
class FakeRepo(util.SimpleTree):
@@ -47,16 +47,17 @@ class FakeRepo(util.SimpleTree):
def make_repo_config(repo_data, livefs=False, frozen=False, repo_id=None):
def repo():
return FakeRepo(repo_data, livefs=livefs, frozen=frozen, repo_id=repo_id)
- repo.pkgcore_config_type = ConfigHint(typename='repo')
- return basics.HardCodedConfigSection({'class':repo})
+
+ repo.pkgcore_config_type = ConfigHint(typename="repo")
+ return basics.HardCodedConfigSection({"class": repo})
class FakeDomain:
- pkgcore_config_type = ConfigHint({'repos': 'refs:repo',
- 'binpkg': 'refs:repo',
- 'vdb': 'refs:repo'},
- typename='domain')
+ pkgcore_config_type = ConfigHint(
+ {"repos": "refs:repo", "binpkg": "refs:repo", "vdb": "refs:repo"},
+ typename="domain",
+ )
def __init__(self, repos, binpkg, vdb):
super().__init__()
@@ -74,23 +75,24 @@ def make_domain(repo=None, binpkg=None, vdb=None):
binpkg = {}
if vdb is None:
vdb = {}
- repos_config = make_repo_config(repo, repo_id='fake')
- binpkg_config = make_repo_config(binpkg, frozen=False, repo_id='fake_binpkg')
- vdb_config = make_repo_config(vdb, repo_id='fake_vdb')
+ repos_config = make_repo_config(repo, repo_id="fake")
+ binpkg_config = make_repo_config(binpkg, frozen=False, repo_id="fake_binpkg")
+ vdb_config = make_repo_config(vdb, repo_id="fake_vdb")
- return basics.HardCodedConfigSection({
- 'class': FakeDomain,
- 'repos': [repos_config],
- 'binpkg': [binpkg_config],
- 'vdb': [vdb_config],
- 'default': True,
- })
+ return basics.HardCodedConfigSection(
+ {
+ "class": FakeDomain,
+ "repos": [repos_config],
+ "binpkg": [binpkg_config],
+ "vdb": [vdb_config],
+ "default": True,
+ }
+ )
class FakeSyncer(base.Syncer):
-
- def __init__(self, *args, **kwargs):
- self.succeed = kwargs.pop('succeed', True)
+ def __init__(self, *args, **kwargs):
+ self.succeed = kwargs.pop("succeed", True)
super().__init__(*args, **kwargs)
self.synced = False
@@ -101,18 +103,20 @@ class FakeSyncer(base.Syncer):
class SyncableRepo(syncable.tree, util.SimpleTree):
- pkgcore_config_type = ConfigHint(typename='repo_config')
+ pkgcore_config_type = ConfigHint(typename="repo_config")
def __init__(self, succeed=True):
util.SimpleTree.__init__(self, {})
- syncer = FakeSyncer('/fake', 'fake', succeed=succeed)
+ syncer = FakeSyncer("/fake", "fake", succeed=succeed)
syncable.tree.__init__(self, syncer)
-success_section = basics.HardCodedConfigSection({'class': SyncableRepo,
- 'succeed': True})
-failure_section = basics.HardCodedConfigSection({'class': SyncableRepo,
- 'succeed': False})
+success_section = basics.HardCodedConfigSection(
+ {"class": SyncableRepo, "succeed": True}
+)
+failure_section = basics.HardCodedConfigSection(
+ {"class": SyncableRepo, "succeed": False}
+)
class TestSync(ArgParseMixin):
@@ -121,45 +125,60 @@ class TestSync(ArgParseMixin):
def test_parser(self):
values = self.parse(repo=success_section)
- assert ['repo'] == [x[0] for x in values.repos]
- values = self.parse('repo', repo=success_section)
- assert ['repo'] == [x[0] for x in values.repos]
+ assert ["repo"] == [x[0] for x in values.repos]
+ values = self.parse("repo", repo=success_section)
+ assert ["repo"] == [x[0] for x in values.repos]
def test_sync(self):
- config = self.assertOut([
- "*** syncing myrepo",
- "*** synced myrepo",
- ], myrepo=success_section)
- assert config.repo_config['myrepo']._syncer.synced
- self.assertOut([
- "*** syncing myrepo",
- "!!! failed syncing myrepo",
- ], myrepo=failure_section)
- self.assertOutAndErr([
- "*** syncing goodrepo",
- "*** synced goodrepo",
- "*** syncing badrepo",
- "!!! failed syncing badrepo",
- "",
- "*** sync results:",
- "*** synced: goodrepo",
- "!!! failed: badrepo",
- ], [], 'goodrepo', 'badrepo', goodrepo=success_section, badrepo=failure_section)
+ config = self.assertOut(
+ [
+ "*** syncing myrepo",
+ "*** synced myrepo",
+ ],
+ myrepo=success_section,
+ )
+ assert config.repo_config["myrepo"]._syncer.synced
+ self.assertOut(
+ [
+ "*** syncing myrepo",
+ "!!! failed syncing myrepo",
+ ],
+ myrepo=failure_section,
+ )
+ self.assertOutAndErr(
+ [
+ "*** syncing goodrepo",
+ "*** synced goodrepo",
+ "*** syncing badrepo",
+ "!!! failed syncing badrepo",
+ "",
+ "*** sync results:",
+ "*** synced: goodrepo",
+ "!!! failed: badrepo",
+ ],
+ [],
+ "goodrepo",
+ "badrepo",
+ goodrepo=success_section,
+ badrepo=failure_section,
+ )
class fake_pkg(CPV):
-
def __init__(self, repo, *a, **kw):
CPV.__init__(self, *a, **kw)
- object.__setattr__(self, 'repo', repo)
+ object.__setattr__(self, "repo", repo)
+
def derive_op(name, op, *a, **kw):
if isinstance(name, str):
name = [name]
- name = ['finalize_data'] + list(name)
+ name = ["finalize_data"] + list(name)
+
class new_op(op):
def f(*a, **kw):
return True
+
for x in name:
locals()[x] = f
del f, x
@@ -179,48 +198,71 @@ class TestCopy(ArgParseMixin):
def test_normal_function(self):
ret, config, out = self.execute_main(
- 'fake_binpkg', '--source-repo', 'fake_vdb',
- '*',
- domain=make_domain(vdb={'sys-apps':{'portage':['2.1', '2.3']}}),
+ "fake_binpkg",
+ "--source-repo",
+ "fake_vdb",
+ "*",
+ domain=make_domain(vdb={"sys-apps": {"portage": ["2.1", "2.3"]}}),
)
assert ret == 0, "expected non zero exit code"
- assert list(map(str, config.target_repo.installed)) == ['sys-apps/portage-2.1', 'sys-apps/portage-2.3']
- assert config.target_repo.uninstalled == config.target_repo.replaced, \
- "uninstalled should be the same as replaced; empty"
-
- d = {'sys-apps':{'portage':['2.1', '2.2']}}
+ assert list(map(str, config.target_repo.installed)) == [
+ "sys-apps/portage-2.1",
+ "sys-apps/portage-2.3",
+ ]
+ assert (
+ config.target_repo.uninstalled == config.target_repo.replaced
+ ), "uninstalled should be the same as replaced; empty"
+
+ d = {"sys-apps": {"portage": ["2.1", "2.2"]}}
ret, config, out = self.execute_main(
- 'fake_binpkg', '--source-repo', 'fake_vdb',
- '=sys-apps/portage-2.1',
+ "fake_binpkg",
+ "--source-repo",
+ "fake_vdb",
+ "=sys-apps/portage-2.1",
domain=make_domain(binpkg=d, vdb=d),
)
assert ret == 0, "expected non zero exit code"
- assert [list(map(str, x)) for x in config.target_repo.replaced] == [['sys-apps/portage-2.1', 'sys-apps/portage-2.1']]
- assert config.target_repo.uninstalled == config.target_repo.installed, \
- "installed should be the same as uninstalled; empty"
+ assert [list(map(str, x)) for x in config.target_repo.replaced] == [
+ ["sys-apps/portage-2.1", "sys-apps/portage-2.1"]
+ ]
+ assert (
+ config.target_repo.uninstalled == config.target_repo.installed
+ ), "installed should be the same as uninstalled; empty"
def test_ignore_existing(self):
ret, config, out = self.execute_main(
- 'fake_binpkg', '--source-repo', 'fake_vdb',
- '*', '--ignore-existing',
- domain=make_domain(vdb={'sys-apps':{'portage':['2.1', '2.3']}}),
+ "fake_binpkg",
+ "--source-repo",
+ "fake_vdb",
+ "*",
+ "--ignore-existing",
+ domain=make_domain(vdb={"sys-apps": {"portage": ["2.1", "2.3"]}}),
)
assert ret == 0, "expected non zero exit code"
- assert list(map(str, config.target_repo.installed)) == ['sys-apps/portage-2.1', 'sys-apps/portage-2.3']
- assert config.target_repo.uninstalled == config.target_repo.replaced, \
- "uninstalled should be the same as replaced; empty"
+ assert list(map(str, config.target_repo.installed)) == [
+ "sys-apps/portage-2.1",
+ "sys-apps/portage-2.3",
+ ]
+ assert (
+ config.target_repo.uninstalled == config.target_repo.replaced
+ ), "uninstalled should be the same as replaced; empty"
ret, config, out = self.execute_main(
- 'fake_binpkg', '--source-repo', 'fake_vdb',
- '*', '--ignore-existing',
+ "fake_binpkg",
+ "--source-repo",
+ "fake_vdb",
+ "*",
+ "--ignore-existing",
domain=make_domain(
- binpkg={'sys-apps':{'portage':['2.1']}},
- vdb={'sys-apps':{'portage':['2.1', '2.3']}}),
+ binpkg={"sys-apps": {"portage": ["2.1"]}},
+ vdb={"sys-apps": {"portage": ["2.1", "2.3"]}},
+ ),
)
assert ret == 0, "expected non zero exit code"
- assert list(map(str, config.target_repo.installed)) == ['sys-apps/portage-2.3']
- assert config.target_repo.uninstalled == config.target_repo.replaced, \
- "uninstalled should be the same as replaced; empty"
+ assert list(map(str, config.target_repo.installed)) == ["sys-apps/portage-2.3"]
+ assert (
+ config.target_repo.uninstalled == config.target_repo.replaced
+ ), "uninstalled should be the same as replaced; empty"
class TestRegen(ArgParseMixin):
@@ -229,7 +271,6 @@ class TestRegen(ArgParseMixin):
def test_parser(self):
- options = self.parse(
- 'fake', '--threads', '2', domain=make_domain())
+ options = self.parse("fake", "--threads", "2", domain=make_domain())
assert isinstance(options.repos[0], util.SimpleTree)
assert options.threads == 2
diff --git a/tests/scripts/test_pmerge.py b/tests/scripts/test_pmerge.py
index ccac02457..b4cea5912 100644
--- a/tests/scripts/test_pmerge.py
+++ b/tests/scripts/test_pmerge.py
@@ -8,81 +8,75 @@ from pkgcore.util.parserestrict import parse_match
# TODO: make repo objs into configurable fixtures
class TestTargetParsing:
-
def test_base_targets(self):
- repo = SimpleTree({'spork': {'foon': ('1', '1.0.1', '2')}})
- installed_repos = SimpleTree({'foo': {'bar': ('1',)}})
- for cat in ('', 'spork/'):
- a = pmerge.parse_target(parse_match(f'={cat}foon-1'), repo, installed_repos)
+ repo = SimpleTree({"spork": {"foon": ("1", "1.0.1", "2")}})
+ installed_repos = SimpleTree({"foo": {"bar": ("1",)}})
+ for cat in ("", "spork/"):
+ a = pmerge.parse_target(parse_match(f"={cat}foon-1"), repo, installed_repos)
assert len(a) == 1
- assert a[0].key == 'spork/foon'
- assert [x.fullver for x in repo.itermatch(a[0])] == ['1']
- a = pmerge.parse_target(parse_match(f'{cat}foon'), repo, installed_repos)
+ assert a[0].key == "spork/foon"
+ assert [x.fullver for x in repo.itermatch(a[0])] == ["1"]
+ a = pmerge.parse_target(parse_match(f"{cat}foon"), repo, installed_repos)
assert len(a) == 1
- assert a[0].key == 'spork/foon'
- assert (
- sorted(x.fullver for x in repo.itermatch(a[0])) ==
- sorted(['1', '1.0.1', '2'])
+ assert a[0].key == "spork/foon"
+ assert sorted(x.fullver for x in repo.itermatch(a[0])) == sorted(
+ ["1", "1.0.1", "2"]
)
def test_no_matches(self):
- repo = SimpleTree({
- 'spork': {'foon': ('1',)},
- 'spork2': {'foon': ('2',)}})
- installed_repos = SimpleTree({'foo': {'bar': ('1',)}})
+ repo = SimpleTree({"spork": {"foon": ("1",)}, "spork2": {"foon": ("2",)}})
+ installed_repos = SimpleTree({"foo": {"bar": ("1",)}})
with pytest.raises(pmerge.NoMatches):
pmerge.parse_target(parse_match("foo"), repo, installed_repos)
def test_ambiguous(self):
- repo = SimpleTree({
- 'spork': {'foon': ('1',)},
- 'spork2': {'foon': ('2',)}})
- installed_repos = SimpleTree({'foo': {'bar': ('1',)}})
+ repo = SimpleTree({"spork": {"foon": ("1",)}, "spork2": {"foon": ("2",)}})
+ installed_repos = SimpleTree({"foo": {"bar": ("1",)}})
with pytest.raises(pmerge.AmbiguousQuery):
pmerge.parse_target(parse_match("foon"), repo, installed_repos)
def test_globbing(self):
- repo = SimpleTree({
- 'spork': {'foon': ('1',)},
- 'spork2': {'foon': ('2',)}})
- installed_repos = SimpleTree({'foo': {'bar': ('1',)}})
- a = pmerge.parse_target(parse_match('*/foon'), repo, installed_repos)
+ repo = SimpleTree({"spork": {"foon": ("1",)}, "spork2": {"foon": ("2",)}})
+ installed_repos = SimpleTree({"foo": {"bar": ("1",)}})
+ a = pmerge.parse_target(parse_match("*/foon"), repo, installed_repos)
assert len(a) == 2
def test_collision_repo(self):
# test pkg name collision between real and virtual pkgs in a repo, but not installed
# repos, the real pkg will be selected over the virtual
- installed_repos = SimpleTree({'foo': {'baz': ('1',)}})
- repo = SimpleTree({'foo': {'bar': ('1',)}, 'virtual': {'bar': ('1',)}})
+ installed_repos = SimpleTree({"foo": {"baz": ("1",)}})
+ repo = SimpleTree({"foo": {"bar": ("1",)}, "virtual": {"bar": ("1",)}})
a = pmerge.parse_target(parse_match("bar"), repo, installed_repos)
assert len(a) == 1
- assert a[0].key == 'foo/bar'
- assert [x.key for x in repo.match(a[0])] == ['foo/bar']
+ assert a[0].key == "foo/bar"
+ assert [x.key for x in repo.match(a[0])] == ["foo/bar"]
def test_collision_livefs(self):
# test pkg name collision between real and virtual pkgs on livefs
# repos, the real pkg will be selected over the virtual
- installed_repos = SimpleTree({'foo': {'bar': ('1',)}, 'virtual': {'bar': ('0',)}})
- repo = SimpleTree({'foo': {'bar': ('1',)}, 'virtual': {'bar': ('1',)}})
+ installed_repos = SimpleTree(
+ {"foo": {"bar": ("1",)}, "virtual": {"bar": ("0",)}}
+ )
+ repo = SimpleTree({"foo": {"bar": ("1",)}, "virtual": {"bar": ("1",)}})
a = pmerge.parse_target(parse_match("bar"), repo, installed_repos)
assert len(a) == 1
- assert a[0].key == 'foo/bar'
- assert [x.key for x in repo.match(a[0])] == ['foo/bar']
+ assert a[0].key == "foo/bar"
+ assert [x.key for x in repo.match(a[0])] == ["foo/bar"]
def test_collision_slotted(self):
pkgs = [
- FakePkg('foo/bar-1.0.1', slot='0'),
- FakePkg('foo/bar-2.0.2', slot='2'),
- FakePkg('foon/bar-3.4.5', slot='0'),
+ FakePkg("foo/bar-1.0.1", slot="0"),
+ FakePkg("foo/bar-2.0.2", slot="2"),
+ FakePkg("foon/bar-3.4.5", slot="0"),
]
installed_pkgs = [
- FakePkg('foo/bar-1.0.0', slot='0'),
- FakePkg('foo/bar-2.0.1', slot='2'),
+ FakePkg("foo/bar-1.0.0", slot="0"),
+ FakePkg("foo/bar-2.0.1", slot="2"),
]
installed_repos = FakeRepo(pkgs=installed_pkgs)
repo = FakeRepo(pkgs=pkgs)
a = pmerge.parse_target(parse_match("bar:0"), repo, installed_repos)
assert len(a) == 1
- assert a[0].key == 'foo/bar'
- assert a[0].match(atom('foo/bar:0'))
- assert not a[0].match(atom('foo/bar:2'))
+ assert a[0].key == "foo/bar"
+ assert a[0].match(atom("foo/bar:0"))
+ assert not a[0].match(atom("foo/bar:2"))
diff --git a/tests/scripts/test_pquery.py b/tests/scripts/test_pquery.py
index 84b185ef2..b0a41af90 100644
--- a/tests/scripts/test_pquery.py
+++ b/tests/scripts/test_pquery.py
@@ -8,9 +8,9 @@ from pkgcore.test.scripts.helpers import ArgParseMixin
class FakeDomain:
- pkgcore_config_type = ConfigHint({'repos': 'refs:repo',
- 'vdb': 'refs:repo'},
- typename='domain')
+ pkgcore_config_type = ConfigHint(
+ {"repos": "refs:repo", "vdb": "refs:repo"}, typename="domain"
+ )
def __init__(self, repos, vdb):
object.__init__(self)
@@ -18,22 +18,24 @@ class FakeDomain:
self.installed_repos = vdb
-@configurable(typename='repo')
+@configurable(typename="repo")
def fake_repo():
- return util.SimpleTree({'spork': {'foon': ('1', '2')}})
+ return util.SimpleTree({"spork": {"foon": ("1", "2")}})
-@configurable(typename='repo')
+@configurable(typename="repo")
def fake_vdb():
return util.SimpleTree({})
-domain_config = basics.HardCodedConfigSection({
- 'class': FakeDomain,
- 'repos': [basics.HardCodedConfigSection({'class': fake_repo})],
- 'vdb': [basics.HardCodedConfigSection({'class': fake_vdb})],
- 'default': True,
-})
+domain_config = basics.HardCodedConfigSection(
+ {
+ "class": FakeDomain,
+ "repos": [basics.HardCodedConfigSection({"class": fake_repo})],
+ "vdb": [basics.HardCodedConfigSection({"class": fake_vdb})],
+ "default": True,
+ }
+)
class TestCommandline(ArgParseMixin):
@@ -42,30 +44,36 @@ class TestCommandline(ArgParseMixin):
def test_parser(self):
self.assertError(
- 'argument --min: not allowed with argument --max',
- '--max', '--min')
- self.parse('--all', domain=domain_config)
+ "argument --min: not allowed with argument --max", "--max", "--min"
+ )
+ self.parse("--all", domain=domain_config)
def test_no_domain(self):
self.assertError(
"config error: no default object of type 'domain' found. "
"Please either fix your configuration, or set the domain via the --domain option.",
- '--all')
+ "--all",
+ )
def test_no_description(self):
- self.assertOut([
- ' * spork/foon-2',
- ' repo: MISSING',
- ' description: MISSING',
- ' homepage: MISSING',
- ' license: MISSING',
- '',
- ], '-v', '--max', '--all', test_domain=domain_config)
+ self.assertOut(
+ [
+ " * spork/foon-2",
+ " repo: MISSING",
+ " description: MISSING",
+ " homepage: MISSING",
+ " license: MISSING",
+ "",
+ ],
+ "-v",
+ "--max",
+ "--all",
+ test_domain=domain_config,
+ )
def test_atom(self):
- config = self.parse(
- '--print-revdep', 'a/spork', '--all', domain=domain_config)
- assert config.print_revdep == [atom.atom('a/spork')]
+ config = self.parse("--print-revdep", "a/spork", "--all", domain=domain_config)
+ assert config.print_revdep == [atom.atom("a/spork")]
def test_no_contents(self):
- self.assertOut([], '--contents', '--all', test_domain=domain_config)
+ self.assertOut([], "--contents", "--all", test_domain=domain_config)
diff --git a/tests/sync/test_base.py b/tests/sync/test_base.py
index 34511541c..22fb6e82e 100644
--- a/tests/sync/test_base.py
+++ b/tests/sync/test_base.py
@@ -12,10 +12,9 @@ existing_uid = pwd.getpwnam(existing_user).pw_uid
class TestSyncer:
-
@pytest.fixture(autouse=True)
def _setup(self, tmp_path):
- self.repo_path = str(tmp_path / 'repo')
+ self.repo_path = str(tmp_path / "repo")
def test_split_users(self):
o = base.Syncer(self.repo_path, "http://dar")
@@ -37,23 +36,23 @@ class TestSyncer:
with pytest.raises(base.MissingLocalUser):
base.Syncer(self.repo_path, f"foo_nonexistent_user::foon@site")
- @mock.patch('snakeoil.process.spawn.spawn')
+ @mock.patch("snakeoil.process.spawn.spawn")
def test_usersync_disabled(self, spawn):
o = base.Syncer(self.repo_path, f"http://foo/bar.git", usersync=False)
o.uid == os_data.uid
o.gid == os_data.gid
- @mock.patch('snakeoil.process.spawn.spawn')
+ @mock.patch("snakeoil.process.spawn.spawn")
def test_usersync_portage_perms(self, spawn):
# sync uses portage perms if repo dir doesn't exist
o = base.Syncer(self.repo_path, f"http://foo/bar.git", usersync=True)
o.uid == os_data.portage_uid
o.gid == os_data.portage_gid
- @mock.patch('snakeoil.process.spawn.spawn')
+ @mock.patch("snakeoil.process.spawn.spawn")
def test_usersync_repo_dir_perms(self, spawn):
# and repo dir perms if it does exist
- with mock.patch('os.stat') as stat:
+ with mock.patch("os.stat") as stat:
stat.return_value = mock.Mock(st_uid=1234, st_gid=5678)
o = base.Syncer(self.repo_path, f"http://foo/bar.git", usersync=True)
stat.assert_called()
@@ -61,62 +60,60 @@ class TestSyncer:
assert o.gid == 5678
-@mock.patch('snakeoil.process.find_binary')
+@mock.patch("snakeoil.process.find_binary")
class TestExternalSyncer:
-
@pytest.fixture(autouse=True)
def _setup(self, tmp_path):
- self.repo_path = str(tmp_path / 'repo')
+ self.repo_path = str(tmp_path / "repo")
def test_missing_binary(self, find_binary):
- find_binary.side_effect = CommandNotFound('foo')
+ find_binary.side_effect = CommandNotFound("foo")
with pytest.raises(base.MissingBinary):
- base.ExternalSyncer(self.repo_path, 'http://dar')
+ base.ExternalSyncer(self.repo_path, "http://dar")
def test_existing_binary(self, find_binary):
# fake external syncer
class FooSyncer(base.ExternalSyncer):
- binary = 'foo'
+ binary = "foo"
# fake that the external binary exists
find_binary.side_effect = lambda x: x
- o = FooSyncer(self.repo_path, 'http://dar')
- assert o.uri == 'http://dar'
- assert o.binary == 'foo'
+ o = FooSyncer(self.repo_path, "http://dar")
+ assert o.uri == "http://dar"
+ assert o.binary == "foo"
- @mock.patch('snakeoil.process.spawn.spawn')
+ @mock.patch("snakeoil.process.spawn.spawn")
def test_usersync(self, spawn, find_binary):
# fake external syncer
class FooSyncer(base.ExternalSyncer):
- binary = 'foo'
+ binary = "foo"
# fake that the external binary exists
find_binary.side_effect = lambda x: x
- o = FooSyncer(self.repo_path, 'http://dar')
+ o = FooSyncer(self.repo_path, "http://dar")
o.uid = 1234
o.gid = 2345
- o._spawn('cmd', pipes={})
- assert spawn.call_args[1]['uid'] == o.uid
- assert spawn.call_args[1]['gid'] == o.gid
+ o._spawn("cmd", pipes={})
+ assert spawn.call_args[1]["uid"] == o.uid
+ assert spawn.call_args[1]["gid"] == o.gid
-@mock.patch('snakeoil.process.find_binary', return_value='git')
-@mock.patch('snakeoil.process.spawn.spawn')
+@mock.patch("snakeoil.process.find_binary", return_value="git")
+@mock.patch("snakeoil.process.spawn.spawn")
class TestVcsSyncer:
-
def test_basedir_perms_error(self, spawn, find_binary, tmp_path):
- syncer = git.git_syncer(str(tmp_path), 'git://blah.git')
+ syncer = git.git_syncer(str(tmp_path), "git://blah.git")
with pytest.raises(base.PathError):
- with mock.patch('os.stat') as stat:
- stat.side_effect = EnvironmentError('fake exception')
+ with mock.patch("os.stat") as stat:
+ stat.side_effect = EnvironmentError("fake exception")
syncer.sync()
def test_basedir_is_file_error(self, spawn, find_binary, tmp_path):
repo = tmp_path / "repo"
repo.touch()
- syncer = git.git_syncer(str(repo), 'git://blah.git')
+ syncer = git.git_syncer(str(repo), "git://blah.git")
# basedir gets '/' appended by default and stat errors out
with pytest.raises(base.PathError) as excinfo:
@@ -129,32 +126,30 @@ class TestVcsSyncer:
assert "isn't a directory" in str(excinfo.value)
def test_verbose_sync(self, spawn, find_binary, tmp_path):
- syncer = git.git_syncer(str(tmp_path), 'git://blah.git')
+ syncer = git.git_syncer(str(tmp_path), "git://blah.git")
syncer.sync(verbosity=1)
- assert '-v' == spawn.call_args[0][0][-1]
+ assert "-v" == spawn.call_args[0][0][-1]
syncer.sync(verbosity=2)
- assert '-vv' == spawn.call_args[0][0][-1]
+ assert "-vv" == spawn.call_args[0][0][-1]
def test_quiet_sync(self, spawn, find_binary, tmp_path):
- syncer = git.git_syncer(str(tmp_path), 'git://blah.git')
+ syncer = git.git_syncer(str(tmp_path), "git://blah.git")
syncer.sync(verbosity=-1)
- assert '-q' == spawn.call_args[0][0][-1]
+ assert "-q" == spawn.call_args[0][0][-1]
class TestGenericSyncer:
-
def test_init(self):
with pytest.raises(base.UriError):
- base.GenericSyncer('/', 'seriouslynotaprotocol://blah/')
+ base.GenericSyncer("/", "seriouslynotaprotocol://blah/")
- syncer = base.GenericSyncer('/', f'tar+https://blah.tar.gz')
+ syncer = base.GenericSyncer("/", f"tar+https://blah.tar.gz")
assert tar.tar_syncer is syncer.__class__
class TestDisabledSyncer:
-
def test_init(self):
- syncer = base.DisabledSyncer('/foo/bar', f'https://blah.git')
+ syncer = base.DisabledSyncer("/foo/bar", f"https://blah.git")
assert syncer.disabled
# syncing should also be disabled
assert not syncer.uri
@@ -162,14 +157,13 @@ class TestDisabledSyncer:
class TestAutodetectSyncer:
-
def test_no_syncer_detected(self, tmp_path):
syncer = base.AutodetectSyncer(str(tmp_path))
assert isinstance(syncer, base.DisabledSyncer)
- @mock.patch('snakeoil.process.find_binary', return_value='git')
+ @mock.patch("snakeoil.process.find_binary", return_value="git")
def test_syncer_detected(self, find_binary, tmp_path):
- d = tmp_path / '.git'
+ d = tmp_path / ".git"
d.mkdir()
syncer = base.AutodetectSyncer(str(tmp_path))
assert isinstance(syncer, git.git_syncer)
diff --git a/tests/sync/test_bzr.py b/tests/sync/test_bzr.py
index e1b45d4f4..f036c6478 100644
--- a/tests/sync/test_bzr.py
+++ b/tests/sync/test_bzr.py
@@ -6,10 +6,9 @@ from snakeoil.process import CommandNotFound
class TestBzrSyncer:
-
@pytest.fixture(autouse=True)
def _setup(self, tmp_path):
- self.repo_path = tmp_path / 'repo'
+ self.repo_path = tmp_path / "repo"
def test_uri_parse(self):
assert bzr.bzr_syncer.parse_uri("bzr+http://dar") == "http://dar"
@@ -18,13 +17,13 @@ class TestBzrSyncer:
bzr.bzr_syncer.parse_uri("bzr://dar")
# external binary doesn't exist
- with mock.patch('snakeoil.process.find_binary') as find_binary:
- find_binary.side_effect = CommandNotFound('bzr')
+ with mock.patch("snakeoil.process.find_binary") as find_binary:
+ find_binary.side_effect = CommandNotFound("bzr")
with pytest.raises(base.SyncError):
bzr.bzr_syncer(str(self.repo_path), "bzr+http://foon.com/dar")
# fake that the external binary exists
- with mock.patch('snakeoil.process.find_binary') as find_binary:
- find_binary.return_value = 'bzr'
+ with mock.patch("snakeoil.process.find_binary") as find_binary:
+ find_binary.return_value = "bzr"
o = bzr.bzr_syncer(str(self.repo_path), "bzr+http://dar")
o.uri == "http://dar"
diff --git a/tests/sync/test_cvs.py b/tests/sync/test_cvs.py
index d92e5765b..7c02619fa 100644
--- a/tests/sync/test_cvs.py
+++ b/tests/sync/test_cvs.py
@@ -6,26 +6,26 @@ from snakeoil.process import CommandNotFound
class TestCVSSyncer:
-
@pytest.fixture(autouse=True)
def _setup(self, tmp_path):
- self.repo_path = tmp_path / 'repo'
+ self.repo_path = tmp_path / "repo"
def test_uri_parse(self):
# external binary doesn't exist
- with mock.patch('snakeoil.process.find_binary') as find_binary:
- find_binary.side_effect = CommandNotFound('cvs')
+ with mock.patch("snakeoil.process.find_binary") as find_binary:
+ find_binary.side_effect = CommandNotFound("cvs")
with pytest.raises(base.SyncError):
- cvs.cvs_syncer(
- str(self.repo_path), "cvs+/bin/sh://foon.com/dar")
+ cvs.cvs_syncer(str(self.repo_path), "cvs+/bin/sh://foon.com/dar")
# fake that the external binary exists
- with mock.patch('snakeoil.process.find_binary') as find_binary:
- find_binary.return_value = 'cvs'
+ with mock.patch("snakeoil.process.find_binary") as find_binary:
+ find_binary.return_value = "cvs"
# nonexistent rsh
- with mock.patch('pkgcore.sync.base.ExternalSyncer.require_binary') as require_binary:
- require_binary.side_effect = base.MissingBinary('', 'rsh')
+ with mock.patch(
+ "pkgcore.sync.base.ExternalSyncer.require_binary"
+ ) as require_binary:
+ require_binary.side_effect = base.MissingBinary("", "rsh")
with pytest.raises(base.SyncError):
cvs.cvs_syncer(str(self.repo_path), "cvs+rsh://foon.com/dar")
@@ -41,8 +41,10 @@ class TestCVSSyncer:
assert o.rsh == None
assert o.env["CVSROOT"] == ":pserver:dar"
- with mock.patch('pkgcore.sync.base.ExternalSyncer.require_binary') as require_binary:
- require_binary.return_value = '/bin/sh'
+ with mock.patch(
+ "pkgcore.sync.base.ExternalSyncer.require_binary"
+ ) as require_binary:
+ require_binary.return_value = "/bin/sh"
o = cvs.cvs_syncer(str(self.repo_path), "cvs+/bin/sh://dar:module")
assert o.rsh == "/bin/sh"
assert o.uri == ":ext:dar"
diff --git a/tests/sync/test_darcs.py b/tests/sync/test_darcs.py
index 96a7bf71e..4abdaa102 100644
--- a/tests/sync/test_darcs.py
+++ b/tests/sync/test_darcs.py
@@ -6,10 +6,9 @@ from snakeoil.process import CommandNotFound
class TestDarcsSyncer:
-
@pytest.fixture(autouse=True)
def _setup(self, tmp_path):
- self.repo_path = tmp_path / 'repo'
+ self.repo_path = tmp_path / "repo"
def test_uri_parse(self):
assert darcs.darcs_syncer.parse_uri("darcs+http://dar") == "http://dar"
@@ -18,13 +17,13 @@ class TestDarcsSyncer:
darcs.darcs_syncer.parse_uri("darcs://dar")
# external binary doesn't exist
- with mock.patch('snakeoil.process.find_binary') as find_binary:
- find_binary.side_effect = CommandNotFound('darcs')
+ with mock.patch("snakeoil.process.find_binary") as find_binary:
+ find_binary.side_effect = CommandNotFound("darcs")
with pytest.raises(base.SyncError):
darcs.darcs_syncer(str(self.repo_path), "darcs+http://foon.com/dar")
# fake that the external binary exists
- with mock.patch('snakeoil.process.find_binary') as find_binary:
- find_binary.return_value = 'bzr'
+ with mock.patch("snakeoil.process.find_binary") as find_binary:
+ find_binary.return_value = "bzr"
o = darcs.darcs_syncer(str(self.repo_path), "darcs+http://dar")
assert o.uri == "http://dar"
diff --git a/tests/sync/test_git.py b/tests/sync/test_git.py
index 8ef8c8456..16a12b6e2 100644
--- a/tests/sync/test_git.py
+++ b/tests/sync/test_git.py
@@ -7,10 +7,9 @@ from snakeoil.process import CommandNotFound
class TestGitSyncer:
-
@pytest.fixture(autouse=True)
def _setup(self, tmp_path):
- self.repo_path = tmp_path / 'repo'
+ self.repo_path = tmp_path / "repo"
def test_uri_parse(self):
assert git.git_syncer.parse_uri("git+http://dar") == "http://dar"
@@ -19,42 +18,42 @@ class TestGitSyncer:
git.git_syncer.parse_uri("git+://dar")
# external binary doesn't exist
- with mock.patch('snakeoil.process.find_binary') as find_binary:
- find_binary.side_effect = CommandNotFound('git')
+ with mock.patch("snakeoil.process.find_binary") as find_binary:
+ find_binary.side_effect = CommandNotFound("git")
with pytest.raises(base.SyncError):
git.git_syncer(str(self.repo_path), "git+http://foon.com/dar")
# fake that the external binary exists
- with mock.patch('snakeoil.process.find_binary') as find_binary:
- find_binary.return_value = 'git'
- for proto in ('http', 'https'):
+ with mock.patch("snakeoil.process.find_binary") as find_binary:
+ find_binary.return_value = "git"
+ for proto in ("http", "https"):
for uri in (f"git+{proto}://repo.git", f"{proto}://repo.git"):
o = git.git_syncer(str(self.repo_path), uri)
assert o.uri == f"{proto}://repo.git"
- @mock.patch('snakeoil.process.spawn.spawn')
+ @mock.patch("snakeoil.process.spawn.spawn")
def test_sync(self, spawn):
- uri = 'git://foo.git'
- with mock.patch('snakeoil.process.find_binary', return_value='git'):
+ uri = "git://foo.git"
+ with mock.patch("snakeoil.process.find_binary", return_value="git"):
syncer = git.git_syncer(str(self.repo_path), uri)
# initial sync
syncer.sync()
assert spawn.call_args[0] == (
- ['git', 'clone', uri, str(self.repo_path) + os.path.sep],)
- assert spawn.call_args[1]['cwd'] is None
+ ["git", "clone", uri, str(self.repo_path) + os.path.sep],
+ )
+ assert spawn.call_args[1]["cwd"] is None
# repo update
self.repo_path.mkdir()
syncer.sync()
- assert spawn.call_args[0] == (['git', 'pull'],)
- assert spawn.call_args[1]['cwd'] == syncer.basedir
+ assert spawn.call_args[0] == (["git", "pull"],)
+ assert spawn.call_args[1]["cwd"] == syncer.basedir
@pytest.mark_network
class TestGitSyncerReal:
-
def test_sync(self, tmp_path):
- path = tmp_path / 'repo'
+ path = tmp_path / "repo"
syncer = git.git_syncer(str(path), "https://github.com/pkgcore/pkgrepo.git")
assert syncer.sync()
- assert os.path.exists(os.path.join(path, 'metadata', 'layout.conf'))
+ assert os.path.exists(os.path.join(path, "metadata", "layout.conf"))
assert syncer.sync()
diff --git a/tests/sync/test_git_svn.py b/tests/sync/test_git_svn.py
index 8dc8cd8e9..a79c15cc2 100644
--- a/tests/sync/test_git_svn.py
+++ b/tests/sync/test_git_svn.py
@@ -8,10 +8,9 @@ from snakeoil.process import CommandNotFound
class TestGitSVNSyncer:
-
@pytest.fixture(autouse=True)
def _setup(self, tmp_path):
- self.repo_path = tmp_path / 'repo'
+ self.repo_path = tmp_path / "repo"
def test_uri_parse(self):
assert git_svn.git_svn_syncer.parse_uri("git+svn+http://dar") == "http://dar"
@@ -20,13 +19,15 @@ class TestGitSVNSyncer:
git_svn.git_svn_syncer.parse_uri("git+svn+://dar")
# external binary doesn't exist
- with mock.patch('snakeoil.process.find_binary') as find_binary:
- find_binary.side_effect = CommandNotFound('git')
+ with mock.patch("snakeoil.process.find_binary") as find_binary:
+ find_binary.side_effect = CommandNotFound("git")
with pytest.raises(base.SyncError):
- git_svn.git_svn_syncer(str(self.repo_path), "git+svn+http://foon.com/dar")
+ git_svn.git_svn_syncer(
+ str(self.repo_path), "git+svn+http://foon.com/dar"
+ )
# fake that the external binary exists
- with mock.patch('snakeoil.process.find_binary') as find_binary:
- find_binary.return_value = 'git'
+ with mock.patch("snakeoil.process.find_binary") as find_binary:
+ find_binary.return_value = "git"
o = git_svn.git_svn_syncer(str(self.repo_path), "git+svn+http://dar")
assert o.uri == "http://dar"
diff --git a/tests/sync/test_hg.py b/tests/sync/test_hg.py
index 59904af9d..490eac054 100644
--- a/tests/sync/test_hg.py
+++ b/tests/sync/test_hg.py
@@ -7,10 +7,9 @@ from snakeoil.process import CommandNotFound
class TestHgSyncer:
-
@pytest.fixture(autouse=True)
def _setup(self, tmp_path):
- self.repo_path = tmp_path / 'repo'
+ self.repo_path = tmp_path / "repo"
def test_uri_parse(self):
assert hg.hg_syncer.parse_uri("hg+http://dar") == "http://dar"
@@ -20,30 +19,31 @@ class TestHgSyncer:
hg.hg_syncer.parse_uri("hg://dar")
# external binary doesn't exist
- with mock.patch('snakeoil.process.find_binary') as find_binary:
- find_binary.side_effect = CommandNotFound('svn')
+ with mock.patch("snakeoil.process.find_binary") as find_binary:
+ find_binary.side_effect = CommandNotFound("svn")
with pytest.raises(base.SyncError):
hg.hg_syncer(str(self.repo_path), "hg+http://foon.com/dar")
# fake that the external binary exists
- with mock.patch('snakeoil.process.find_binary') as find_binary:
- find_binary.return_value = 'hg'
+ with mock.patch("snakeoil.process.find_binary") as find_binary:
+ find_binary.return_value = "hg"
o = hg.hg_syncer(str(self.repo_path), "hg+http://dar")
assert o.uri == "http://dar"
- @mock.patch('snakeoil.process.spawn.spawn')
+ @mock.patch("snakeoil.process.spawn.spawn")
def test_sync(self, spawn):
- uri = 'https://foo/bar'
- with mock.patch('snakeoil.process.find_binary', return_value='hg'):
- syncer = hg.hg_syncer(str(self.repo_path), f'hg+{uri}')
+ uri = "https://foo/bar"
+ with mock.patch("snakeoil.process.find_binary", return_value="hg"):
+ syncer = hg.hg_syncer(str(self.repo_path), f"hg+{uri}")
# initial sync
syncer.sync()
assert spawn.call_args[0] == (
- ['hg', 'clone', uri, str(self.repo_path) + os.path.sep],)
- assert spawn.call_args[1]['cwd'] is None
+ ["hg", "clone", uri, str(self.repo_path) + os.path.sep],
+ )
+ assert spawn.call_args[1]["cwd"] is None
# repo update
self.repo_path.mkdir()
syncer.sync()
- assert spawn.call_args[0] == (['hg', 'pull', '-u', uri],)
- assert spawn.call_args[1]['cwd'] == syncer.basedir
+ assert spawn.call_args[0] == (["hg", "pull", "-u", uri],)
+ assert spawn.call_args[1]["cwd"] == syncer.basedir
diff --git a/tests/sync/test_rsync.py b/tests/sync/test_rsync.py
index 14bfc59f9..8bfba18f0 100644
--- a/tests/sync/test_rsync.py
+++ b/tests/sync/test_rsync.py
@@ -10,41 +10,39 @@ from snakeoil.process import CommandNotFound
def fake_ips(num):
"""Generate simple IPv4 addresses given the amount to create."""
- return [
- (None, None, None, None, ('.'.join(str(x) * 4), 0))
- for x in range(num)
- ]
+ return [(None, None, None, None, (".".join(str(x) * 4), 0)) for x in range(num)]
-@mock.patch('socket.getaddrinfo', return_value=fake_ips(3))
-@mock.patch('snakeoil.process.spawn.spawn')
+@mock.patch("socket.getaddrinfo", return_value=fake_ips(3))
+@mock.patch("snakeoil.process.spawn.spawn")
class TestRsyncSyncer:
_syncer_class = rsync.rsync_syncer
@pytest.fixture(autouse=True)
def _setup(self, tmp_path):
- self.repo_path = str(tmp_path / 'repo')
- with mock.patch('snakeoil.process.find_binary', return_value='rsync'):
+ self.repo_path = str(tmp_path / "repo")
+ with mock.patch("snakeoil.process.find_binary", return_value="rsync"):
self.syncer = self._syncer_class(
- self.repo_path, "rsync://rsync.gentoo.org/gentoo-portage")
+ self.repo_path, "rsync://rsync.gentoo.org/gentoo-portage"
+ )
- @mock.patch('snakeoil.process.find_binary')
+ @mock.patch("snakeoil.process.find_binary")
def test_uri_parse_rsync_missing(self, find_binary, spawn, getaddrinfo):
- find_binary.side_effect = CommandNotFound('rsync')
+ find_binary.side_effect = CommandNotFound("rsync")
with pytest.raises(base.SyncError):
- self._syncer_class(self.repo_path, 'rsync://foon.com/dar')
+ self._syncer_class(self.repo_path, "rsync://foon.com/dar")
- @mock.patch('snakeoil.process.find_binary')
+ @mock.patch("snakeoil.process.find_binary")
def test_uri_parse(self, find_binary, spawn, getaddrinfo):
find_binary.side_effect = lambda x: x
- o = self._syncer_class(self.repo_path, 'rsync://dar/module')
- assert o.uri == 'rsync://dar/module/'
+ o = self._syncer_class(self.repo_path, "rsync://dar/module")
+ assert o.uri == "rsync://dar/module/"
assert o.rsh == None
- o = self._syncer_class(self.repo_path, 'rsync+/bin/sh://dar/module')
- assert o.uri == 'rsync://dar/module/'
- assert o.rsh == '/bin/sh'
+ o = self._syncer_class(self.repo_path, "rsync+/bin/sh://dar/module")
+ assert o.uri == "rsync://dar/module/"
+ assert o.rsh == "/bin/sh"
def test_successful_sync(self, spawn, getaddrinfo):
spawn.return_value = 0
@@ -55,21 +53,21 @@ class TestRsyncSyncer:
spawn.return_value = 1
with pytest.raises(base.SyncError) as excinfo:
assert self.syncer.sync()
- assert str(excinfo.value).startswith('rsync command syntax error:')
+ assert str(excinfo.value).startswith("rsync command syntax error:")
spawn.assert_called_once()
def test_failed_disk_space_sync(self, spawn, getaddrinfo):
spawn.return_value = 11
with pytest.raises(base.SyncError) as excinfo:
assert self.syncer.sync()
- assert str(excinfo.value) == 'rsync ran out of disk space'
+ assert str(excinfo.value) == "rsync ran out of disk space"
spawn.assert_called_once()
def test_retried_sync(self, spawn, getaddrinfo):
spawn.return_value = 99
with pytest.raises(base.SyncError) as excinfo:
assert self.syncer.sync()
- assert str(excinfo.value) == 'all attempts failed'
+ assert str(excinfo.value) == "all attempts failed"
# rsync should retry every resolved IP related to the sync URI
assert len(spawn.mock_calls) == 3
@@ -79,14 +77,14 @@ class TestRsyncSyncer:
getaddrinfo.return_value = fake_ips(self.syncer.retries + 1)
with pytest.raises(base.SyncError) as excinfo:
assert self.syncer.sync()
- assert str(excinfo.value) == 'all attempts failed'
+ assert str(excinfo.value) == "all attempts failed"
assert len(spawn.mock_calls) == self.syncer.retries
def test_failed_dns_sync(self, spawn, getaddrinfo):
getaddrinfo.side_effect = OSError()
with pytest.raises(base.SyncError) as excinfo:
assert self.syncer.sync()
- assert str(excinfo.value).startswith('DNS resolution failed')
+ assert str(excinfo.value).startswith("DNS resolution failed")
spawn.assert_not_called()
@@ -97,21 +95,23 @@ class TestRsyncTimestampSyncer(TestRsyncSyncer):
@pytest.mark_network
class TestRsyncSyncerReal:
-
def test_sync(self, tmp_path):
# perform a tarball sync for initial week-old base
- path = tmp_path / 'repo'
+ path = tmp_path / "repo"
week_old = datetime.datetime.now() - datetime.timedelta(days=7)
date_str = week_old.strftime("%Y%m%d")
syncer = tar_syncer(
- str(path), f"http://distfiles.gentoo.org/snapshots/portage-{date_str}.tar.xz")
+ str(path),
+ f"http://distfiles.gentoo.org/snapshots/portage-{date_str}.tar.xz",
+ )
assert syncer.sync()
- timestamp = os.path.join(path, 'metadata', 'timestamp.chk')
+ timestamp = os.path.join(path, "metadata", "timestamp.chk")
assert os.path.exists(timestamp)
stat = os.stat(timestamp)
# run rsync over the unpacked repo tarball to update to the latest tree
syncer = rsync.rsync_timestamp_syncer(
- str(path), "rsync://rsync.gentoo.org/gentoo-portage")
+ str(path), "rsync://rsync.gentoo.org/gentoo-portage"
+ )
assert syncer.sync()
assert stat != os.stat(timestamp)
diff --git a/tests/sync/test_sqfs.py b/tests/sync/test_sqfs.py
index 47b53058b..88e23da22 100644
--- a/tests/sync/test_sqfs.py
+++ b/tests/sync/test_sqfs.py
@@ -6,9 +6,10 @@ from pkgcore.sync.sqfs import sqfs_syncer
class TestSqfsSyncer:
-
def test_uri_parse(self):
- assert sqfs_syncer.parse_uri("sqfs+http://repo.lzo.sqfs") == "http://repo.lzo.sqfs"
+ assert (
+ sqfs_syncer.parse_uri("sqfs+http://repo.lzo.sqfs") == "http://repo.lzo.sqfs"
+ )
# missing actual URI protocol
with pytest.raises(base.UriError):
@@ -24,12 +25,12 @@ class TestSqfsSyncer:
@pytest.mark_network
class TestSqfsSyncerReal:
-
def test_sync(self, tmp_path):
- path = tmp_path / 'repo'
+ path = tmp_path / "repo"
syncer = sqfs_syncer(
- str(path),
- "sqfs+http://distfiles.gentoo.org/snapshots/squashfs/gentoo-current.lzo.sqfs")
+ str(path),
+ "sqfs+http://distfiles.gentoo.org/snapshots/squashfs/gentoo-current.lzo.sqfs",
+ )
assert syncer.sync()
sqfs = os.path.join(syncer.basedir, syncer.basename)
assert os.path.exists(sqfs)
diff --git a/tests/sync/test_svn.py b/tests/sync/test_svn.py
index 77757b225..1edeffc28 100644
--- a/tests/sync/test_svn.py
+++ b/tests/sync/test_svn.py
@@ -6,23 +6,22 @@ from snakeoil.process import CommandNotFound
class TestSVNSyncer:
-
@pytest.fixture(autouse=True)
def _setup(self, tmp_path):
- self.repo_path = tmp_path / 'repo'
+ self.repo_path = tmp_path / "repo"
def test_uri_parse(self):
with pytest.raises(base.UriError):
svn.svn_syncer.parse_uri("svn+://dar")
# external binary doesn't exist
- with mock.patch('snakeoil.process.find_binary') as find_binary:
- find_binary.side_effect = CommandNotFound('svn')
+ with mock.patch("snakeoil.process.find_binary") as find_binary:
+ find_binary.side_effect = CommandNotFound("svn")
with pytest.raises(base.SyncError):
svn.svn_syncer(str(self.repo_path), "svn+http://foon.com/dar")
# fake that the external binary exists
- with mock.patch('snakeoil.process.find_binary') as find_binary:
- find_binary.return_value = 'svn'
+ with mock.patch("snakeoil.process.find_binary") as find_binary:
+ find_binary.return_value = "svn"
o = svn.svn_syncer(str(self.repo_path), "svn+http://dar")
assert o.uri == "http://dar"
diff --git a/tests/sync/test_tar.py b/tests/sync/test_tar.py
index e8be19958..0e65ea56d 100644
--- a/tests/sync/test_tar.py
+++ b/tests/sync/test_tar.py
@@ -6,7 +6,6 @@ from pkgcore.sync.tar import tar_syncer
class TestTarSyncer:
-
def test_uri_parse(self):
assert tar_syncer.parse_uri("tar+http://repo.tar.gz") == "http://repo.tar.gz"
@@ -19,7 +18,7 @@ class TestTarSyncer:
tar_syncer.parse_uri("tar+https://repo.tar.foo")
for ext in tar_syncer.supported_exts:
- for proto in ('http', 'https'):
+ for proto in ("http", "https"):
for uri in (f"tar+{proto}://repo{ext}", f"{proto}://repo{ext}"):
o = tar_syncer("/tmp/foon", uri)
assert o.uri == f"{proto}://repo{ext}"
@@ -27,13 +26,13 @@ class TestTarSyncer:
@pytest.mark_network
class TestTarSyncerReal:
-
def test_sync(self, tmp_path):
- path = tmp_path / 'repo'
+ path = tmp_path / "repo"
syncer = tar_syncer(
- str(path), "https://github.com/pkgcore/pkgrepo/archive/master.tar.gz")
+ str(path), "https://github.com/pkgcore/pkgrepo/archive/master.tar.gz"
+ )
assert syncer.sync()
- layout_conf = os.path.join(path, 'metadata', 'layout.conf')
+ layout_conf = os.path.join(path, "metadata", "layout.conf")
assert os.path.exists(layout_conf)
stat = os.stat(layout_conf)
# re-sync and verify that the repo didn't get replaced
diff --git a/tests/test_gpg.py b/tests/test_gpg.py
index f5fd2991c..f59d94417 100644
--- a/tests/test_gpg.py
+++ b/tests/test_gpg.py
@@ -2,22 +2,25 @@ from pkgcore import gpg
class TestSkipSignatures:
-
def test_simple_skipping(self):
for header in (
- [],
- ["-----BEGIN PGP SIGNED MESSAGE-----\n", "Hash: Sha1\n", "\n"]):
+ [],
+ ["-----BEGIN PGP SIGNED MESSAGE-----\n", "Hash: Sha1\n", "\n"],
+ ):
d = [
"asdf\n",
"fdsa\n",
"-----BEGIN PGP SIGNATURE-----\n",
"this isn't a valid sig...\n",
"-----END PGP SIGNATURE-----\n",
- "foon\n"]
+ "foon\n",
+ ]
d2 = header + d
parsed = list(gpg.skip_signatures(d2))
required = [d[0], d[1], d[-1]]
- assert parsed == required, f"{parsed!r} != {required!r} for header {header!r}"
+ assert (
+ parsed == required
+ ), f"{parsed!r} != {required!r} for header {header!r}"
def test_signed_signed(self):
d = [
@@ -39,5 +42,6 @@ class TestSkipSignatures:
"-----BEGIN PGP SIGNATURE-----\n",
" not valid...\n",
"-----END PGP SIGNATURE-----\n",
- "asdf\n"]
+ "asdf\n",
+ ]
assert list(gpg.skip_signatures(d)) == ["blah\n", "foon\n", "asdf\n"]
diff --git a/tests/test_plugin.py b/tests/test_plugin.py
index e74726d9f..e6c1136ff 100644
--- a/tests/test_plugin.py
+++ b/tests/test_plugin.py
@@ -14,32 +14,34 @@ class LowPlug:
class TestModules:
-
def setup_method(self, method):
self.dir = tempfile.mkdtemp()
self.dir2 = tempfile.mkdtemp()
# force plugin module to use package dir for cache dir by setting
# system/user cache dirs to nonexistent paths
- self.patcher = mock.patch('pkgcore.plugin.const')
+ self.patcher = mock.patch("pkgcore.plugin.const")
const = self.patcher.start()
- const.SYSTEM_CACHE_PATH = pjoin(self.dir, 'nonexistent')
- const.USER_CACHE_PATH = pjoin(self.dir, 'nonexistent')
+ const.SYSTEM_CACHE_PATH = pjoin(self.dir, "nonexistent")
+ const.USER_CACHE_PATH = pjoin(self.dir, "nonexistent")
# Set up some test modules for our use.
- self.packdir = pjoin(self.dir, 'mod_testplug')
- self.packdir2 = pjoin(self.dir2, 'mod_testplug')
+ self.packdir = pjoin(self.dir, "mod_testplug")
+ self.packdir2 = pjoin(self.dir2, "mod_testplug")
os.mkdir(self.packdir)
os.mkdir(self.packdir2)
- with open(pjoin(self.packdir, '__init__.py'), 'w') as init:
- init.write('''
+ with open(pjoin(self.packdir, "__init__.py"), "w") as init:
+ init.write(
+ """
from pkgcore.plugins import extend_path
extend_path(__path__, __name__)
-''')
- filename = pjoin(self.packdir, 'plug.py')
- with open(filename, 'w') as plug:
- plug.write('''
+"""
+ )
+ filename = pjoin(self.packdir, "plug.py")
+ with open(filename, "w") as plug:
+ plug.write(
+ """
class DisabledPlug:
disabled = True
@@ -59,15 +61,17 @@ pkgcore_plugins = {
'tests.test_plugin.LowPlug',
]
}
-''')
+"""
+ )
# Move the mtime 2 seconds into the past so the .pyc file has
# a different mtime.
st = os.stat(filename)
os.utime(filename, (st.st_atime, st.st_mtime - 2))
- with open(pjoin(self.packdir, 'plug2.py'), 'w') as plug2:
- plug2.write('# I do not have any pkgcore_plugins for you!\n')
- with open(pjoin(self.packdir2, 'plug.py'), 'w') as plug:
- plug.write('''
+ with open(pjoin(self.packdir, "plug2.py"), "w") as plug2:
+ plug2.write("# I do not have any pkgcore_plugins for you!\n")
+ with open(pjoin(self.packdir2, "plug.py"), "w") as plug:
+ plug.write(
+ """
# This file is later on sys.path than the plug.py in packdir, so it should
# not have any effect on the tests.
@@ -75,7 +79,8 @@ class HiddenPlug:
priority = 8
pkgcore_plugins = {'plugtest': [HiddenPlug]}
-''')
+"""
+ )
# Append it to the path
sys.path.insert(0, self.dir2)
sys.path.insert(0, self.dir)
@@ -90,17 +95,19 @@ pkgcore_plugins = {'plugtest': [HiddenPlug]}
shutil.rmtree(self.dir)
shutil.rmtree(self.dir2)
# make sure we don't keep the sys.modules entries around
- sys.modules.pop('mod_testplug', None)
- sys.modules.pop('mod_testplug.plug', None)
- sys.modules.pop('mod_testplug.plug2', None)
+ sys.modules.pop("mod_testplug", None)
+ sys.modules.pop("mod_testplug.plug", None)
+ sys.modules.pop("mod_testplug.plug2", None)
def test_extend_path(self):
import mod_testplug
+
expected = stable_unique(
- pjoin(p, 'mod_testplug')
- for p in sys.path if os.path.isdir(p))
- assert expected == mod_testplug.__path__, \
- set(expected) ^ set(mod_testplug.__path__)
+ pjoin(p, "mod_testplug") for p in sys.path if os.path.isdir(p)
+ )
+ assert expected == mod_testplug.__path__, set(expected) ^ set(
+ mod_testplug.__path__
+ )
def _runit(self, method):
plugin._global_cache.clear()
@@ -110,58 +117,62 @@ pkgcore_plugins = {'plugtest': [HiddenPlug]}
plugin._global_cache.clear()
method()
method()
- assert mtime == \
- os.path.getmtime(pjoin(self.packdir, plugin.CACHE_FILENAME))
+ assert mtime == os.path.getmtime(pjoin(self.packdir, plugin.CACHE_FILENAME))
# We cannot write this since it contains an unimportable plugin.
assert not os.path.exists(pjoin(self.packdir2, plugin.CACHE_FILENAME))
def _test_plug(self):
import mod_testplug
- assert plugin.get_plugin('spork', mod_testplug) is None
- plugins = list(plugin.get_plugins('plugtest', mod_testplug))
+
+ assert plugin.get_plugin("spork", mod_testplug) is None
+ plugins = list(plugin.get_plugins("plugtest", mod_testplug))
assert len(plugins) == 2, plugins
- plugin.get_plugin('plugtest', mod_testplug)
- assert 'HighPlug' == \
- plugin.get_plugin('plugtest', mod_testplug).__class__.__name__
+ plugin.get_plugin("plugtest", mod_testplug)
+ assert (
+ "HighPlug" == plugin.get_plugin("plugtest", mod_testplug).__class__.__name__
+ )
with open(pjoin(self.packdir, plugin.CACHE_FILENAME)) as f:
lines = f.readlines()
assert len(lines) == 3
assert plugin.CACHE_HEADER + "\n" == lines[0]
lines.pop(0)
lines.sort()
- mtime = int(os.path.getmtime(pjoin(self.packdir, 'plug2.py')))
- assert f'plug2:{mtime}:\n' == lines[0]
- mtime = int(os.path.getmtime(pjoin(self.packdir, 'plug.py')))
+ mtime = int(os.path.getmtime(pjoin(self.packdir, "plug2.py")))
+ assert f"plug2:{mtime}:\n" == lines[0]
+ mtime = int(os.path.getmtime(pjoin(self.packdir, "plug.py")))
assert (
- f'plug:{mtime}:plugtest,7,1:plugtest,1,tests.test_plugin.LowPlug:plugtest,0,0\n'
- == lines[1])
+ f"plug:{mtime}:plugtest,7,1:plugtest,1,tests.test_plugin.LowPlug:plugtest,0,0\n"
+ == lines[1]
+ )
def test_plug(self):
self._runit(self._test_plug)
def _test_no_unneeded_import(self):
import mod_testplug
- list(plugin.get_plugins('spork', mod_testplug))
- sys.modules.pop('mod_testplug.plug')
+
+ list(plugin.get_plugins("spork", mod_testplug))
+ sys.modules.pop("mod_testplug.plug")
# This one is not loaded if we are testing with a good cache.
- sys.modules.pop('mod_testplug.plug2', None)
- list(plugin.get_plugins('plugtest', mod_testplug))
+ sys.modules.pop("mod_testplug.plug2", None)
+ list(plugin.get_plugins("plugtest", mod_testplug))
# Extra messages since getting all of sys.modules printed is annoying.
- assert 'mod_testplug.plug' in sys.modules, 'plug not loaded'
- assert 'mod_testplug.plug2' not in sys.modules, 'plug2 loaded'
+ assert "mod_testplug.plug" in sys.modules, "plug not loaded"
+ assert "mod_testplug.plug2" not in sys.modules, "plug2 loaded"
def test_no_unneeded_import(self):
self._runit(self._test_no_unneeded_import)
def test_cache_corruption(self):
print(plugin.const)
- print('wheeeeee')
+ print("wheeeeee")
import mod_testplug
- list(plugin.get_plugins('spork', mod_testplug))
+
+ list(plugin.get_plugins("spork", mod_testplug))
filename = pjoin(self.packdir, plugin.CACHE_FILENAME)
- cachefile = open(filename, 'a')
+ cachefile = open(filename, "a")
try:
- cachefile.write('corruption\n')
+ cachefile.write("corruption\n")
finally:
cachefile.close()
# Shift the file into the past a little or the rewritten file
@@ -171,16 +182,17 @@ pkgcore_plugins = {'plugtest': [HiddenPlug]}
os.utime(filename, (st.st_atime, corrupt_mtime))
plugin._global_cache.clear()
self._test_plug()
- good_mtime = os.path.getmtime(
- pjoin(self.packdir, plugin.CACHE_FILENAME))
+ good_mtime = os.path.getmtime(pjoin(self.packdir, plugin.CACHE_FILENAME))
plugin._global_cache.clear()
self._test_plug()
- assert good_mtime == os.path.getmtime(pjoin(self.packdir, plugin.CACHE_FILENAME))
+ assert good_mtime == os.path.getmtime(
+ pjoin(self.packdir, plugin.CACHE_FILENAME)
+ )
assert good_mtime != corrupt_mtime
def test_rewrite_on_remove(self):
- filename = pjoin(self.packdir, 'extra.py')
- plug = open(filename, 'w')
+ filename = pjoin(self.packdir, "extra.py")
+ plug = open(filename, "w")
try:
plug.write('pkgcore_plugins = {"plugtest": [object()]}\n')
finally:
@@ -188,7 +200,8 @@ pkgcore_plugins = {'plugtest': [HiddenPlug]}
plugin._global_cache.clear()
import mod_testplug
- assert len(list(plugin.get_plugins('plugtest', mod_testplug))) == 3
+
+ assert len(list(plugin.get_plugins("plugtest", mod_testplug))) == 3
os.unlink(filename)
@@ -196,21 +209,24 @@ pkgcore_plugins = {'plugtest': [HiddenPlug]}
self._test_plug()
def test_priority_caching(self):
- plug3 = open(pjoin(self.packdir, 'plug3.py'), 'w')
+ plug3 = open(pjoin(self.packdir, "plug3.py"), "w")
try:
- plug3.write('''
+ plug3.write(
+ """
class LowPlug:
priority = 6
pkgcore_plugins = {
'plugtest': [LowPlug()],
}
-''')
+"""
+ )
finally:
plug3.close()
- plug4 = open(pjoin(self.packdir, 'plug4.py'), 'w')
+ plug4 = open(pjoin(self.packdir, "plug4.py"), "w")
try:
- plug4.write('''
+ plug4.write(
+ """
# First file tried, only a disabled plugin.
class HighDisabledPlug:
priority = 15
@@ -219,12 +235,14 @@ class HighDisabledPlug:
pkgcore_plugins = {
'plugtest': [HighDisabledPlug()],
}
-''')
+"""
+ )
finally:
plug4.close()
- plug5 = open(pjoin(self.packdir, 'plug5.py'), 'w')
+ plug5 = open(pjoin(self.packdir, "plug5.py"), "w")
try:
- plug5.write('''
+ plug5.write(
+ """
# Second file tried, with a skipped low priority plugin.
class HighDisabledPlug:
priority = 12
@@ -236,12 +254,14 @@ class LowPlug:
pkgcore_plugins = {
'plugtest': [HighDisabledPlug(), LowPlug()],
}
-''')
+"""
+ )
finally:
plug5.close()
- plug6 = open(pjoin(self.packdir, 'plug6.py'), 'w')
+ plug6 = open(pjoin(self.packdir, "plug6.py"), "w")
try:
- plug6.write('''
+ plug6.write(
+ """
# Not tried, bogus priority.
class BogusPlug:
priority = 'spoon'
@@ -249,44 +269,48 @@ class BogusPlug:
pkgcore_plugins = {
'plugtest': [BogusPlug()],
}
-''')
+"""
+ )
finally:
plug6.close()
self._runit(self._test_priority_caching)
def _test_priority_caching(self):
import mod_testplug
- list(plugin.get_plugins('spork', mod_testplug))
- sys.modules.pop('mod_testplug.plug', None)
- sys.modules.pop('mod_testplug.plug2', None)
- sys.modules.pop('mod_testplug.plug3', None)
- sys.modules.pop('mod_testplug.plug4', None)
- sys.modules.pop('mod_testplug.plug5', None)
- sys.modules.pop('mod_testplug.plug6', None)
- best_plug = plugin.get_plugin('plugtest', mod_testplug)
+
+ list(plugin.get_plugins("spork", mod_testplug))
+ sys.modules.pop("mod_testplug.plug", None)
+ sys.modules.pop("mod_testplug.plug2", None)
+ sys.modules.pop("mod_testplug.plug3", None)
+ sys.modules.pop("mod_testplug.plug4", None)
+ sys.modules.pop("mod_testplug.plug5", None)
+ sys.modules.pop("mod_testplug.plug6", None)
+ best_plug = plugin.get_plugin("plugtest", mod_testplug)
from mod_testplug import plug
+
assert plug.high_plug == best_plug
# Extra messages since getting all of sys.modules printed is annoying.
- assert 'mod_testplug.plug' in sys.modules, 'plug not loaded'
- assert 'mod_testplug.plug2' not in sys.modules, 'plug2 loaded'
- assert 'mod_testplug.plug3' not in sys.modules, 'plug3 loaded'
- assert 'mod_testplug.plug4' in sys.modules, 'plug4 not loaded'
- assert 'mod_testplug.plug5' in sys.modules, 'plug4 not loaded'
- assert 'mod_testplug.plug6' not in sys.modules, 'plug6 loaded'
+ assert "mod_testplug.plug" in sys.modules, "plug not loaded"
+ assert "mod_testplug.plug2" not in sys.modules, "plug2 loaded"
+ assert "mod_testplug.plug3" not in sys.modules, "plug3 loaded"
+ assert "mod_testplug.plug4" in sys.modules, "plug4 not loaded"
+ assert "mod_testplug.plug5" in sys.modules, "plug4 not loaded"
+ assert "mod_testplug.plug6" not in sys.modules, "plug6 loaded"
def test_header_change_invalidates_cache(self):
# Write the cache
plugin._global_cache.clear()
import mod_testplug
- list(plugin.get_plugins('testplug', mod_testplug))
+
+ list(plugin.get_plugins("testplug", mod_testplug))
# Modify the cache.
filename = pjoin(self.packdir, plugin.CACHE_FILENAME)
with open(filename) as f:
cache = f.readlines()
- cache[0] = 'not really a pkgcore plugin cache\n'
- with open(filename, 'w') as f:
- f.write(''.join(cache))
+ cache[0] = "not really a pkgcore plugin cache\n"
+ with open(filename, "w") as f:
+ f.write("".join(cache))
# And test if it is properly rewritten.
plugin._global_cache.clear()
diff --git a/tests/test_source_hygene.py b/tests/test_source_hygene.py
index cafa02d23..839d02b60 100644
--- a/tests/test_source_hygene.py
+++ b/tests/test_source_hygene.py
@@ -2,5 +2,5 @@ from snakeoil.test.modules import ExportedModules
class Test_modules(ExportedModules):
- target_namespace = 'pkgcore'
+ target_namespace = "pkgcore"
ignore_all_import_failures = True
diff --git a/tests/util/test_parserestrict.py b/tests/util/test_parserestrict.py
index d7331d090..328c921e5 100644
--- a/tests/util/test_parserestrict.py
+++ b/tests/util/test_parserestrict.py
@@ -7,21 +7,19 @@ from pkgcore.util import parserestrict
class TestMatch:
-
def test_comma_separated_containment(self):
- parser = parserestrict.comma_separated_containment('utensil')
- restrict = parser('spork,foon')
+ parser = parserestrict.comma_separated_containment("utensil")
+ restrict = parser("spork,foon")
# Icky, should really try to match a fake package.
assert isinstance(restrict, packages.PackageRestriction)
- assert 'utensil' == restrict.attr
+ assert "utensil" == restrict.attr
valrestrict = restrict.restriction
- assert valrestrict.match(('foon',))
- assert not valrestrict.match(('spork,foon',))
- assert not valrestrict.match(('foo',))
+ assert valrestrict.match(("foon",))
+ assert not valrestrict.match(("spork,foon",))
+ assert not valrestrict.match(("foo",))
class TestExtendedRestrictionGeneration:
-
def verify_text_glob(self, restrict, token):
assert isinstance(restrict, values.StrRegex), token
@@ -36,27 +34,32 @@ class TestExtendedRestrictionGeneration:
for token in ("*", ""):
i = parserestrict.convert_glob(token)
- assert i == None, (
- f"verifying None is returned on pointless restrictions, failed token: {token}")
+ assert (
+ i == None
+ ), f"verifying None is returned on pointless restrictions, failed token: {token}"
with pytest.raises(parserestrict.ParseError):
- parserestrict.convert_glob('**')
+ parserestrict.convert_glob("**")
def verify_restrict(self, restrict, attr, token):
assert isinstance(restrict, packages.PackageRestriction), token
- assert restrict.attr == attr, (
- f"verifying package attr {restrict.attr}; required({attr}), token {token}")
+ assert (
+ restrict.attr == attr
+ ), f"verifying package attr {restrict.attr}; required({attr}), token {token}"
if "*" in token:
self.verify_text_glob(restrict.restriction, token)
else:
self.verify_text(restrict.restriction, token)
- @pytest.mark.parametrize(("attr", "sfmt"), (
- ("category", "%s/*"),
- ("package", "*/%s"),
- ("package", "%s"),
- ))
+ @pytest.mark.parametrize(
+ ("attr", "sfmt"),
+ (
+ ("category", "%s/*"),
+ ("package", "*/%s"),
+ ("package", "%s"),
+ ),
+ )
@pytest.mark.parametrize("raw_token", ("package", "*bsdiff", "bsdiff*"))
def test_single_restrict_check(self, raw_token, attr, sfmt):
token = sfmt % raw_token
@@ -64,7 +67,9 @@ class TestExtendedRestrictionGeneration:
self.verify_restrict(i, attr, raw_token)
def test_combined(self):
- assert isinstance(parserestrict.parse_match("dev-util/diffball"), atom), "dev-util/diffball"
+ assert isinstance(
+ parserestrict.parse_match("dev-util/diffball"), atom
+ ), "dev-util/diffball"
for token in ("dev-*/util", "dev-*/util*", "dev-a/util*"):
i = parserestrict.parse_match(token)
assert isinstance(i, boolean.AndRestriction), token
@@ -93,9 +98,9 @@ class TestExtendedRestrictionGeneration:
self.verify_restrict(i[1], "package", token.split("::")[0])
for token, attr, n in (
- ('foo/*:5', 'category', 0),
- ('*/foo:5', 'package', 1),
- ):
+ ("foo/*:5", "category", 0),
+ ("*/foo:5", "package", 1),
+ ):
i = parserestrict.parse_match(token)
assert isinstance(i, boolean.AndRestriction), token
assert len(i) == 2
@@ -103,21 +108,21 @@ class TestExtendedRestrictionGeneration:
self.verify_restrict(i[1], attr, token.split(":")[0].split("/")[n])
for token, attr, n in (
- ('foo/*:5/5', 'category', 0),
- ('*/foo:5/5', 'package', 1),
- ):
+ ("foo/*:5/5", "category", 0),
+ ("*/foo:5/5", "package", 1),
+ ):
i = parserestrict.parse_match(token)
assert isinstance(i, boolean.AndRestriction), token
assert len(i) == 3
- slot, _sep, subslot = token.split(":")[1].partition('/')
+ slot, _sep, subslot = token.split(":")[1].partition("/")
assert isinstance(i[0], restricts.SlotDep), slot
assert isinstance(i[1], restricts.SubSlotDep), subslot
self.verify_restrict(i[2], attr, token.split(":")[0].split("/")[n])
for token, attr, n in (
- ("foo/*::gentoo", "category", 0),
- ("*/foo::gentoo", "package", 1),
- ):
+ ("foo/*::gentoo", "category", 0),
+ ("*/foo::gentoo", "package", 1),
+ ):
i = parserestrict.parse_match(token)
assert isinstance(i, boolean.AndRestriction), token
assert len(i) == 2
@@ -125,22 +130,23 @@ class TestExtendedRestrictionGeneration:
self.verify_restrict(i[1], attr, token.split("::")[0].split("/")[n])
for token, attr, n in (
- ('foo/*:5/5::gentoo', 'category', 0),
- ('*/foo:5/5::gentoo', 'package', 1),
- ):
+ ("foo/*:5/5::gentoo", "category", 0),
+ ("*/foo:5/5::gentoo", "package", 1),
+ ):
i = parserestrict.parse_match(token)
assert isinstance(i, boolean.AndRestriction), token
assert len(i) == 4
- token, repo_id = token.rsplit('::', 1)
+ token, repo_id = token.rsplit("::", 1)
assert isinstance(i[0], restricts.RepositoryDep), repo_id
- slot, _sep, subslot = token.split(":")[1].partition('/')
+ slot, _sep, subslot = token.split(":")[1].partition("/")
assert isinstance(i[1], restricts.SlotDep), slot
assert isinstance(i[2], restricts.SubSlotDep), subslot
self.verify_restrict(i[3], attr, token.split(":")[0].split("/")[n])
def test_atom_globbed(self):
assert isinstance(
- parserestrict.parse_match("=sys-devel/gcc-4*"), atom), "=sys-devel/gcc-4*"
+ parserestrict.parse_match("=sys-devel/gcc-4*"), atom
+ ), "=sys-devel/gcc-4*"
def test_use_atom(self):
o = parserestrict.parse_match("net-misc/openssh[-X]")
@@ -159,48 +165,53 @@ class TestExtendedRestrictionGeneration:
assert o.subslot
def test_subslot_package(self):
- token = 'boost:0/1.54'
+ token = "boost:0/1.54"
o = parserestrict.parse_match(token)
assert isinstance(o, boolean.AndRestriction), token
assert len(o) == 3
- slot, _sep, subslot = token.split(":")[1].partition('/')
+ slot, _sep, subslot = token.split(":")[1].partition("/")
assert isinstance(o[0], restricts.SlotDep), slot
assert isinstance(o[1], restricts.SubSlotDep), subslot
self.verify_restrict(o[2], "package", token.split(":")[0])
- @pytest.mark.parametrize("token", (
- "!dev-util/diffball",
- "dev-util/diffball-0.4",
- "=dev-util/*diffball-0.4*",
- "::gentoo",
- ))
+ @pytest.mark.parametrize(
+ "token",
+ (
+ "!dev-util/diffball",
+ "dev-util/diffball-0.4",
+ "=dev-util/*diffball-0.4*",
+ "::gentoo",
+ ),
+ )
def test_exceptions(self, token):
with pytest.raises(parserestrict.ParseError):
parserestrict.parse_match(token)
class TestParsePV:
-
def setup_method(self, method):
- self.repo = util.SimpleTree({
- 'spork': {
- 'foon': ('1', '2'),
- 'spork': ('1', '2'),
+ self.repo = util.SimpleTree(
+ {
+ "spork": {
+ "foon": ("1", "2"),
+ "spork": ("1", "2"),
+ },
+ "foon": {
+ "foon": ("2", "3"),
},
- 'foon': {
- 'foon': ('2', '3'),
- }})
+ }
+ )
def test_parse_pv(self):
for input, output in (
- ('spork/foon-3', 'spork/foon-3'),
- ('spork-1', 'spork/spork-1'),
- ('foon-3', 'foon/foon-3'),
- ):
+ ("spork/foon-3", "spork/foon-3"),
+ ("spork-1", "spork/spork-1"),
+ ("foon-3", "foon/foon-3"),
+ ):
assert output == parserestrict.parse_pv(self.repo, input).cpvstr
for bogus in (
- 'spork',
- 'foon-2',
- ):
+ "spork",
+ "foon-2",
+ ):
with pytest.raises(parserestrict.ParseError):
parserestrict.parse_pv(self.repo, bogus)