From f42d43648e882f72cbd3454ab3b1973bed1371dc Mon Sep 17 00:00:00 2001 From: Johan Wassberg Date: Wed, 26 Mar 2025 14:18:31 +0100 Subject: [PATCH 01/53] A ResourceOpts is always to added to callback execution by Resource.parse Fixes errors like: ERROR pyff.resource Failed handling resource: MDServiceListParser.parse.._update_entities() takes 1 positional argument but 2 were given --- src/pyff/samlmd.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/pyff/samlmd.py b/src/pyff/samlmd.py index 9be47176..d1f772dc 100644 --- a/src/pyff/samlmd.py +++ b/src/pyff/samlmd.py @@ -184,9 +184,9 @@ def parse(self, resource: Resource, content: str) -> SAMLParserInfo: resource.expire_time = expire_time info.expiration_time = str(expire_time) - def _extra_md(_t, info, **kwargs): + def _extra_md(_t, resource_opts, **kwargs): entityID = kwargs.get('entityID') - if info['alias'] != entityID: + if resource_opts['alias'] != entityID: return _t sp_entities = kwargs.get('sp_entities') location = kwargs.get('location') @@ -316,7 +316,7 @@ def parse(self, resource: Resource, content: str) -> EidasMDParserInfo: r = resource.add_child(location, child_opts) # this is specific post-processing for MDSL files - def _update_entities(_t, **kwargs): + def _update_entities(_t, resource_opts, **kwargs): _country_code = kwargs.get('country_code') _hide_from_discovery = kwargs.get('hide_from_discovery') for e in iter_entities(_t): From d511f304e327cf226acc2135070e75913b0747af Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 2 Apr 2025 14:55:41 +0200 Subject: [PATCH 02/53] Update python-package.yml --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index e84c09a2..20f9e0b4 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -8,7 +8,7 @@ on: branches: [ "master" ] pull_request: branches: [ "master" ] - + workflow_dispatch: jobs: build: From deb524a12970727c22c148a97e0f47b9cae5d5e7 Mon Sep 17 00:00:00 2001 From: Dick Visser Date: Fri, 11 Oct 2024 16:27:54 +0200 Subject: [PATCH 03/53] fix 276 --- src/pyff/parse.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/pyff/parse.py b/src/pyff/parse.py index cb0ad1bf..9162d743 100644 --- a/src/pyff/parse.py +++ b/src/pyff/parse.py @@ -4,6 +4,7 @@ from typing import Any, Dict, List, Optional from pydantic import BaseModel, Field +from urllib.parse import quote as urlescape from xmlsec.crypto import CertDict from pyff.constants import NS @@ -84,7 +85,7 @@ def parse(self, resource: Resource, content: str) -> ParserInfo: n = 0 for fn in find_matching_files(content, self.extensions): child_opts = resource.opts.copy(update={'alias': None}) - resource.add_child("file://" + fn, child_opts) + resource.add_child("file://" + urlescape(fn), child_opts) n += 1 if n == 0: From 9879eaef6e85a82e91c716f3767789b8534f0661 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 2 Apr 2025 15:39:08 +0200 Subject: [PATCH 04/53] Never use live data in tests! will fix later.. --- src/pyff/test/test_md_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyff/test/test_md_api.py b/src/pyff/test/test_md_api.py index 471bd87b..68cd3714 100644 --- a/src/pyff/test/test_md_api.py +++ b/src/pyff/test/test_md_api.py @@ -141,7 +141,7 @@ def test_load_and_query(self): assert r.status_code == 200 data = r.json() info = data[0] - assert 'https://box-idp.nordu.net/simplesaml/module.php/saml/sp/discoresp.php' in info['discovery_responses'] + assert 'https://box-idp.nordu.net/simplesaml/module.php/saml/sp/discoResponse' in info['discovery_responses'] class PyFFAPITestResources(PipeLineTest): """ From 74b911d0b51fc6e00d666b1da05625df066e05be Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Mon, 7 Apr 2025 14:30:16 +0200 Subject: [PATCH 05/53] Switch to ruff for formatting instead of black. This helps Language Servers keep the formatting sane. --- Makefile | 3 +-- pyproject.toml | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 pyproject.toml diff --git a/Makefile b/Makefile index d1b20d9f..3ab85c56 100644 --- a/Makefile +++ b/Makefile @@ -15,8 +15,7 @@ test_coverage: coverage combine reformat: - isort --line-width 120 --atomic --project eduid_scimapi --recursive $(SOURCE) - black --line-length 120 --target-version py37 --skip-string-normalization $(SOURCE) + ruff --format $(SOURCE) typecheck: mypy --ignore-missing-imports $(SOURCE) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..a9d009aa --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,32 @@ +[project] +name = "pyFF" +version = "2.1.5" +readme = "README.rst" +description = "Federation Feeder" +requires-python = ">=3.7" +license = {file = "LICENSE"} + +authors = [ + {name = "Leif Johansson", email = "leifj@sunet.se"}, + {name = "Fredrik Thulin", email = "redrik@thulin.net"}, + {name = "Enrique Pérez Arnaud"}, + {name = "Mikael Frykholm", email = "mifr@sunet.se"}, +] +maintainers = [ + {name = "Mikael Frykholm", email = "mifr@sunet.se"} +] + +[tool.ruff] +# Allow lines to be as long as 120. +line-length = 120 +target-version = "py37" +[tool.ruff.format] +quote-style = "preserve" + +[tool.build_sphinx] +source-dir = "docs/" +build-dir = "docs/build" +all_files = "1" + +[tool.upload_sphinx] +upload-dir = "docs/build/html" From 47be71c84a835343bbe39c5e350e6a52bfe01d5a Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Mon, 7 Apr 2025 14:41:57 +0200 Subject: [PATCH 06/53] Fix Pydantic deprecation copy > model_copy --- src/pyff/builtins.py | 13 ++++++------- src/pyff/parse.py | 8 +++++--- src/pyff/samlmd.py | 27 ++++++++++++++++++--------- 3 files changed, 29 insertions(+), 19 deletions(-) diff --git a/src/pyff/builtins.py b/src/pyff/builtins.py index 8bdd3a2d..b00cbd63 100644 --- a/src/pyff/builtins.py +++ b/src/pyff/builtins.py @@ -256,7 +256,7 @@ def fork(req: Plumbing.Request, *opts): **parsecopy** Due to a hard to find bug, fork which uses deepcopy can lose some namespaces. The parsecopy argument is a workaround. - It uses a brute force serialisation and deserialisation to get around the bug. + It uses a brute force serialisation and deserialisation to get around the bug. .. code-block:: yaml @@ -676,7 +676,7 @@ def load(req: Plumbing.Request, *opts): url = r.pop(0) # Copy parent node opts as a starting point - child_opts = req.md.rm.opts.copy(update={"via": [], "cleanup": [], "verify": None, "alias": url}) + child_opts = req.md.rm.opts.model_copy(update={"via": [], "cleanup": [], "verify": None, "alias": url}) while len(r) > 0: elt = r.pop(0) @@ -702,7 +702,7 @@ def load(req: Plumbing.Request, *opts): child_opts.verify = elt # override anything in child_opts with what is in opts - child_opts = child_opts.copy(update=_opts) + child_opts = child_opts.model_copy(update=_opts) req.md.rm.add_child(url, child_opts) @@ -814,7 +814,7 @@ def select(req: Plumbing.Request, *opts): else: _opts['as'] = opts[i] if i + 1 < len(opts): - more_opts = opts[i + 1:] + more_opts = opts[i + 1 :] _opts.update(dict(list(zip(more_opts[::2], more_opts[1::2])))) break @@ -835,7 +835,6 @@ def select(req: Plumbing.Request, *opts): entities = resolve_entities(args, lookup_fn=req.md.store.select, dedup=dedup) if req.state.get('match', None): # TODO - allow this to be passed in via normal arguments - match = req.state['match'] if isinstance(match, six.string_types): @@ -1304,7 +1303,7 @@ def xslt(req: Plumbing.Request, *opts): if stylesheet is None: raise PipeException("xslt requires stylesheet") - params = dict((k, "\'%s\'" % v) for (k, v) in list(req.args.items())) + params = dict((k, "'%s'" % v) for (k, v) in list(req.args.items())) del params['stylesheet'] try: return root(xslt_transform(req.t, stylesheet, params)) @@ -1312,6 +1311,7 @@ def xslt(req: Plumbing.Request, *opts): log.debug(traceback.format_exc()) raise ex + @pipe def indent(req: Plumbing.Request, *opts): """ @@ -1710,7 +1710,6 @@ def finalize(req: Plumbing.Request, *opts): if name is None or 0 == len(name): name = req.state.get('url', None) if name and 'baseURL' in req.args: - try: name_url = urlparse(name) base_url = urlparse(req.args.get('baseURL')) diff --git a/src/pyff/parse.py b/src/pyff/parse.py index 9162d743..7737f43b 100644 --- a/src/pyff/parse.py +++ b/src/pyff/parse.py @@ -9,7 +9,7 @@ from pyff.constants import NS from pyff.logs import get_log -from pyff.resource import Resource,ResourceInfo +from pyff.resource import Resource, ResourceInfo from pyff.utils import find_matching_files, parse_xml, root, unicode_stream, utc_now __author__ = 'leifj' @@ -30,8 +30,10 @@ def _format_key(k: str) -> str: res = {_format_key(k): v for k, v in self.dict().items()} return res + ResourceInfo.model_rebuild() + class ParserException(Exception): def __init__(self, msg, wrapped=None, data=None): self._wraped = wrapped @@ -84,7 +86,7 @@ def parse(self, resource: Resource, content: str) -> ParserInfo: info = ParserInfo(description='Directory', expiration_time='never expires') n = 0 for fn in find_matching_files(content, self.extensions): - child_opts = resource.opts.copy(update={'alias': None}) + child_opts = resource.opts.model_copy(update={'alias': None}) resource.add_child("file://" + urlescape(fn), child_opts) n += 1 @@ -122,7 +124,7 @@ def parse(self, resource: Resource, content: str) -> ParserInfo: if len(fingerprints) > 0: fp = fingerprints[0] log.debug("XRD: {} verified by {}".format(link_href, fp)) - child_opts = resource.opts.copy(update={'alias': None}) + child_opts = resource.opts.model_copy(update={'alias': None}) resource.add_child(link_href, child_opts) resource.last_seen = utc_now().replace(microsecond=0) resource.expire_time = None diff --git a/src/pyff/samlmd.py b/src/pyff/samlmd.py index 9be47176..671f32f4 100644 --- a/src/pyff/samlmd.py +++ b/src/pyff/samlmd.py @@ -86,7 +86,10 @@ def find_merge_strategy(strategy_name): def parse_saml_metadata( - source: BytesIO, opts: ResourceOpts, base_url=None, validation_errors: Optional[Dict[str, Any]] = None, + source: BytesIO, + opts: ResourceOpts, + base_url=None, + validation_errors: Optional[Dict[str, Any]] = None, ): """Parse a piece of XML and return an EntitiesDescriptor element after validation. @@ -192,7 +195,10 @@ def _extra_md(_t, info, **kwargs): location = kwargs.get('location') sp_entity = sp_entities.find("{%s}EntityDescriptor[@entityID='%s']" % (NS['md'], entityID)) if sp_entity is not None: - md_source = sp_entity.find("{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource[@src='%s']" % (NS['md'], NS['md'], NS['ti'], NS['ti'], location)) + md_source = sp_entity.find( + "{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource[@src='%s']" + % (NS['md'], NS['md'], NS['ti'], NS['ti'], location) + ) for e in iter_entities(_t): md_source.append(e) return etree.Element("{%s}EntitiesDescriptor" % NS['md']) @@ -205,11 +211,14 @@ def _extra_md(_t, info, **kwargs): entityID = e.get('entityID') info.entities.append(entityID) - md_source = e.find("{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource" % (NS['md'], NS['md'], NS['ti'], NS['ti'])) + md_source = e.find( + "{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource" + % (NS['md'], NS['md'], NS['ti'], NS['ti']) + ) if md_source is not None: location = md_source.attrib.get('src') if location is not None: - child_opts = resource.opts.copy(update={'alias': entityID}) + child_opts = resource.opts.model_copy(update={'alias': entityID}) r = resource.add_child(location, child_opts) kwargs = { 'entityID': entityID, @@ -311,7 +320,7 @@ def parse(self, resource: Resource, content: str) -> EidasMDParserInfo: info.scheme_territory, location, fp, args.get('country_code') ) ) - child_opts = resource.opts.copy(update={'alias': None}) + child_opts = resource.opts.model_copy(update={'alias': None}) child_opts.verify = fp r = resource.add_child(location, child_opts) @@ -725,7 +734,6 @@ def entity_domains(entity): def entity_extended_display_i18n(entity, default_lang=None): - name_dict = lang_dict(entity.iter("{%s}OrganizationName" % NS['md']), lambda e: e.text, default_lang=default_lang) name_dict.update( lang_dict(entity.iter("{%s}OrganizationDisplayName" % NS['md']), lambda e: e.text, default_lang=default_lang) @@ -981,7 +989,9 @@ def discojson_sp(e, global_trust_info=None, global_md_sources=None): sp['entityID'] = e.get('entityID', None) - md_sources = e.findall("{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource" % (NS['md'], NS['md'], NS['ti'], NS['ti'])) + md_sources = e.findall( + "{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource" % (NS['md'], NS['md'], NS['ti'], NS['ti']) + ) sp['extra_md'] = {} for md_source in md_sources: @@ -1041,7 +1051,6 @@ def discojson_sp(e, global_trust_info=None, global_md_sources=None): def discojson_sp_attr(e): - attribute = "https://refeds.org/entity-selection-profile" b64_trustinfos = entity_attribute(e, attribute) if b64_trustinfos is None: @@ -1395,7 +1404,7 @@ def get_key(e): except AttributeError: pass except IndexError: - log.warning("Sort pipe: unable to sort entity by '%s'. " "Entity '%s' has no such value" % (sxp, eid)) + log.warning("Sort pipe: unable to sort entity by '%s'. Entity '%s' has no such value" % (sxp, eid)) except TypeError: pass From 309cef224dfe7c676bdca93c13d5b10cf2d8378a Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Mon, 7 Apr 2025 15:57:10 +0200 Subject: [PATCH 07/53] Started removing pkg_resources. Formatting fixes. --- src/pyff/__init__.py | 9 ++------- src/pyff/api.py | 12 ++++++------ src/pyff/test/__init__.py | 9 +++++---- 3 files changed, 13 insertions(+), 17 deletions(-) diff --git a/src/pyff/__init__.py b/src/pyff/__init__.py index 5e45b2b6..7d54b185 100644 --- a/src/pyff/__init__.py +++ b/src/pyff/__init__.py @@ -2,11 +2,6 @@ pyFF is a SAML metadata aggregator. """ -import pkg_resources +import importlib -__author__ = 'Leif Johansson' -__copyright__ = "Copyright 2009-2018 SUNET and the IdentityPython Project" -__license__ = "BSD" -__maintainer__ = "leifj@sunet.se" -__status__ = "Production" -__version__ = pkg_resources.require("pyFF")[0].version +__version__ = importlib.metadata.version('pyFF') diff --git a/src/pyff/api.py b/src/pyff/api.py index 1050efbf..8b443409 100644 --- a/src/pyff/api.py +++ b/src/pyff/api.py @@ -4,7 +4,6 @@ from json import dumps from typing import Any, Dict, Generator, Iterable, List, Mapping, Optional, Tuple -import pkg_resources import pyramid.httpexceptions as exc import pytz import requests @@ -26,12 +25,13 @@ from pyff.resource import Resource from pyff.samlmd import entity_display_name from pyff.utils import b2u, dumptree, hash_id, json_serializer, utc_now +from pyff import __version__ log = get_log(__name__) class NoCache(object): - """ Dummy implementation for when caching isn't enabled """ + """Dummy implementation for when caching isn't enabled""" def __init__(self) -> None: pass @@ -70,7 +70,7 @@ def status_handler(request: Request) -> Response: if 'Validation Errors' in r.info and r.info['Validation Errors']: d[r.url] = r.info['Validation Errors'] _status = dict( - version=pkg_resources.require("pyFF")[0].version, + version=__version__, invalids=d, icon_store=dict(size=request.registry.md.icon_store.size()), jobs=[dict(id=j.id, next_run_time=j.next_run_time) for j in request.registry.scheduler.get_jobs()], @@ -163,7 +163,7 @@ def process_handler(request: Request) -> Response: _ctypes = {'xml': 'application/samlmetadata+xml;application/xml;text/xml', 'json': 'application/json'} def _d(x: Optional[str], do_split: bool = True) -> Tuple[Optional[str], Optional[str]]: - """ Split a path into a base component and an extension. """ + """Split a path into a base component and an extension.""" if x is not None: x = x.strip() @@ -214,7 +214,7 @@ def _d(x: Optional[str], do_split: bool = True) -> Tuple[Optional[str], Optional pfx = request.registry.aliases.get(alias, None) if pfx is None: log.debug("alias {} not found - passing to storage lookup".format(alias)) - path=alias #treat as path + path = alias # treat as path # content_negotiation_policy is one of three values: # 1. extension - current default, inspect the path and if it ends in @@ -478,7 +478,7 @@ def cors_headers(request: Request, response: Response) -> None: { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Methods': 'POST,GET,DELETE,PUT,OPTIONS', - 'Access-Control-Allow-Headers': ('Origin, Content-Type, Accept, ' 'Authorization'), + 'Access-Control-Allow-Headers': ('Origin, Content-Type, Accept, Authorization'), 'Access-Control-Allow-Credentials': 'true', 'Access-Control-Max-Age': '1728000', } diff --git a/src/pyff/test/__init__.py b/src/pyff/test/__init__.py index c39abfda..51cd14ef 100644 --- a/src/pyff/test/__init__.py +++ b/src/pyff/test/__init__.py @@ -6,9 +6,8 @@ import tempfile from unittest import TestCase -import pkg_resources +import importlib.resources import six - from pyff import __version__ as pyffversion # range of ports where available ports can be found @@ -118,7 +117,6 @@ def _p(args, outf=None, ignore_exit=False): class SignerTestCase(TestCase): - datadir = None private_keyspec = None public_keyspec = None @@ -128,7 +126,10 @@ def sys_exit(self, code): @classmethod def setUpClass(cls): - cls.datadir = pkg_resources.resource_filename(__name__, 'data') + with importlib.resources.path( + __name__, 'data' + ) as context: # We just want the path for now to be compatible downstream + cls.datadir = context.as_posix() cls.private_keyspec = tempfile.NamedTemporaryFile('w').name cls.public_keyspec = tempfile.NamedTemporaryFile('w').name From dd3abf00405b7f917c0caa58f9ebb49b7fdabcde Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Tue, 8 Apr 2025 15:34:48 +0200 Subject: [PATCH 08/53] Make it work with python3.9. --- pyproject.toml | 4 ++-- src/pyff/test/__init__.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a9d009aa..317d1cb1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "pyFF" version = "2.1.5" readme = "README.rst" description = "Federation Feeder" -requires-python = ">=3.7" +requires-python = ">=3.9" license = {file = "LICENSE"} authors = [ @@ -19,7 +19,7 @@ maintainers = [ [tool.ruff] # Allow lines to be as long as 120. line-length = 120 -target-version = "py37" +target-version = "py39" [tool.ruff.format] quote-style = "preserve" diff --git a/src/pyff/test/__init__.py b/src/pyff/test/__init__.py index 51cd14ef..be32c588 100644 --- a/src/pyff/test/__init__.py +++ b/src/pyff/test/__init__.py @@ -126,10 +126,10 @@ def sys_exit(self, code): @classmethod def setUpClass(cls): - with importlib.resources.path( - __name__, 'data' - ) as context: # We just want the path for now to be compatible downstream - cls.datadir = context.as_posix() + cls.datadir = importlib.resources.files( + __name__, + ).joinpath('data') + cls.private_keyspec = tempfile.NamedTemporaryFile('w').name cls.public_keyspec = tempfile.NamedTemporaryFile('w').name From 532b51fb22fa9b9053ab0e592b4a706d1e79c923 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Tue, 8 Apr 2025 16:11:10 +0200 Subject: [PATCH 09/53] ubuntu-20.04 is deprecated by github runners. --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 20f9e0b4..aa18e2d9 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -15,7 +15,7 @@ jobs: strategy: fail-fast: false matrix: - os: ["ubuntu-24.04", "ubuntu-22.04", "ubuntu-20.04"] + os: ["ubuntu-24.04", "ubuntu-22.04"] python: ["3.9", "3.10", "3.11", "3.12"] runs-on: ${{ matrix.os }} steps: From 0e460b090153c5f0b2c33614c18df7b14bccb4b7 Mon Sep 17 00:00:00 2001 From: Johan Wassberg Date: Thu, 10 Apr 2025 15:57:31 +0200 Subject: [PATCH 10/53] Store opts unique Without the deep copy all opts were added to the origin. E.g causing Lambas to be run on each "entity" instead of just the selected child. --- src/pyff/resource.py | 4 ++++ src/pyff/samlmd.py | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/pyff/resource.py b/src/pyff/resource.py index fe5ce6a3..0c8fbd14 100644 --- a/src/pyff/resource.py +++ b/src/pyff/resource.py @@ -339,6 +339,10 @@ def _replace(self, r: Resource) -> None: raise ValueError("Resource {} not present - use add_child".format(r.url)) def add_child(self, url: str, opts: ResourceOpts) -> Resource: + """ + Spent 3 man days. Make sure to make a deep copy of opts. + + """ r = Resource(url, opts) if r in self.children: log.debug(f'\n\n{self}:\nURL {url}\nReplacing child {r}') diff --git a/src/pyff/samlmd.py b/src/pyff/samlmd.py index d1f772dc..ddddf42f 100644 --- a/src/pyff/samlmd.py +++ b/src/pyff/samlmd.py @@ -209,7 +209,7 @@ def _extra_md(_t, resource_opts, **kwargs): if md_source is not None: location = md_source.attrib.get('src') if location is not None: - child_opts = resource.opts.copy(update={'alias': entityID}) + child_opts = resource.opts.model_copy(update={'alias': entityID}, deep=True) r = resource.add_child(location, child_opts) kwargs = { 'entityID': entityID, @@ -311,7 +311,7 @@ def parse(self, resource: Resource, content: str) -> EidasMDParserInfo: info.scheme_territory, location, fp, args.get('country_code') ) ) - child_opts = resource.opts.copy(update={'alias': None}) + child_opts = resource.opts.model_copy(update={'alias': None}, deep=True) child_opts.verify = fp r = resource.add_child(location, child_opts) From 9313a3713bc626c779adff4c8ff5a461b805eabd Mon Sep 17 00:00:00 2001 From: Johan Wassberg Date: Thu, 10 Apr 2025 16:14:13 +0200 Subject: [PATCH 11/53] Intitial test of the MDSL stuff --- src/pyff/test/data/eidas/countries/FR.xml | 55 ++++++++++ src/pyff/test/data/eidas/countries/GR.xml | 63 +++++++++++ src/pyff/test/data/eidas/eidas.xml | 18 ++++ src/pyff/test/test_mdsl.py | 124 ++++++++++++++++++++++ 4 files changed, 260 insertions(+) create mode 100644 src/pyff/test/data/eidas/countries/FR.xml create mode 100644 src/pyff/test/data/eidas/countries/GR.xml create mode 100644 src/pyff/test/data/eidas/eidas.xml create mode 100644 src/pyff/test/test_mdsl.py diff --git a/src/pyff/test/data/eidas/countries/FR.xml b/src/pyff/test/data/eidas/countries/FR.xml new file mode 100644 index 00000000..4db01c9e --- /dev/null +++ b/src/pyff/test/data/eidas/countries/FR.xml @@ -0,0 +1,55 @@ + + + + + + + MIIECTCCAnGgAwIBAgIBATANBgkqhkiG9w0BAQsFADAzMQswCQYDVQQGEwJGUjEkMCIGA1UECgwb +RlItRElOVU0tZUlEQVMtbm9kZS1wcmVwcm9kMB4XDTIyMDkyMjE0NDgwNloXDTMyMDkxOTE0NDgw +NlowTDELMAkGA1UEBhMCRlIxJDAiBgNVBAoMG0ZSLURJTlVNLWVJREFTLW5vZGUtcHJlcHJvZDEX +MBUGA1UEAwwOZUlEQVMtbWV0YWRhdGEwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDO +B8ffvjFTk3rBzcyBiwFFMDlYPxIlj+i1BLdLvvzSJdUHKZhegOhzNuRVCeJfoyvml9vtlwK5tfzD +iY4znZsFO/qIB1wROxrPVRq8AEw0LiPCfP1Ie1rvK2Ddaw0wUEI6wFn4ViAStP5wI3/yaOqN1cFf +JceXUbvgVfjRS5ETRVZK7UER5vMeMIPn4ESkf86d+GB0rvZoipNOyymXfgs9RU/dvjd40lrRs5rZ +Nc/l4dOrFUpxHXq/AfLsWKjmmOGx959qMmYtsK9KcQdEAQs2L/adKM+yLJL0JyHxyNnWuaUJDBwW +xV5PK/hWnkLkebkgpeB5loF7f7Ra2MnB1uzZlaAa69tSILjycdw0cOcY5+hnH6QFq74JDL4WPDR8 +FzCdcN+TY7/HvG6cTR4lPdW/iY3eZQycQqkEccQiFITAeewwVwZbQddbWhHxdtJE8P73QoZl3iSp +/TzP33Kr3im+IEX1o2PV6Ur36/cDNmc9WQWsRKTeydoIY+AOmV/odt0CAwEAAaMPMA0wCwYDVR0P +BAQDAgeAMA0GCSqGSIb3DQEBCwUAA4IBgQB2FpL/Xen8z318X2To+3nEdRthc/9OwPF9ZNzftcce +QP4Q+lxuwKFacklt5VoXURy/JMsgPtSHhar5kiFZTm2SMXVSEqVDj4UzAobVnUutiZPsFoyPWr/c +iEsWu0VSthsI31AUOvSTisy0w81rPKjNkRuCU5V+AS1Z5rBqMlkwOEiUxUMci+pZQ4VhH+mqg1oH +1rMuJ1gysYf/zMjSWGlraSbcHApIMBAjs24uSTH/O6io+9k9jLhZGZv9LpssoeSPVz3wJY6h1LHT +D5OHZYDtelO4X5ZCVRFx5kCaUpKtb4mvo72oWN9KlGvrqp6SkUF1ogovTl16sEf2FZHoN4r0+5ZN +/vS/plTsCGbe46QPGuj+FO5yN6KclM+eonmBp0JSyoABfYN1T28a4dRwE8CPWCOgJmzxuwqSOH+P +XlUbPftbx5nGsE3mRBAU1Jga2S4S+zJPHirc2lfZGh6ggdYG9kkqp7X7kiTyPyPuWU644+YaCOwv +x7804cuLUcQ8VSQ= + + + + + + + + MIIECTCCAnGgAwIBAgIBATANBgkqhkiG9w0BAQsFADAzMQswCQYDVQQGEwJGUjEkMCIGA1UECgwb +RlItRElOVU0tZUlEQVMtbm9kZS1wcmVwcm9kMB4XDTIyMDkyMjE0NDgwNloXDTMyMDkxOTE0NDgw +NlowTDELMAkGA1UEBhMCRlIxJDAiBgNVBAoMG0ZSLURJTlVNLWVJREFTLW5vZGUtcHJlcHJvZDEX +MBUGA1UEAwwOZUlEQVMtbWV0YWRhdGEwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDO +B8ffvjFTk3rBzcyBiwFFMDlYPxIlj+i1BLdLvvzSJdUHKZhegOhzNuRVCeJfoyvml9vtlwK5tfzD +iY4znZsFO/qIB1wROxrPVRq8AEw0LiPCfP1Ie1rvK2Ddaw0wUEI6wFn4ViAStP5wI3/yaOqN1cFf +JceXUbvgVfjRS5ETRVZK7UER5vMeMIPn4ESkf86d+GB0rvZoipNOyymXfgs9RU/dvjd40lrRs5rZ +Nc/l4dOrFUpxHXq/AfLsWKjmmOGx959qMmYtsK9KcQdEAQs2L/adKM+yLJL0JyHxyNnWuaUJDBwW +xV5PK/hWnkLkebkgpeB5loF7f7Ra2MnB1uzZlaAa69tSILjycdw0cOcY5+hnH6QFq74JDL4WPDR8 +FzCdcN+TY7/HvG6cTR4lPdW/iY3eZQycQqkEccQiFITAeewwVwZbQddbWhHxdtJE8P73QoZl3iSp +/TzP33Kr3im+IEX1o2PV6Ur36/cDNmc9WQWsRKTeydoIY+AOmV/odt0CAwEAAaMPMA0wCwYDVR0P +BAQDAgeAMA0GCSqGSIb3DQEBCwUAA4IBgQB2FpL/Xen8z318X2To+3nEdRthc/9OwPF9ZNzftcce +QP4Q+lxuwKFacklt5VoXURy/JMsgPtSHhar5kiFZTm2SMXVSEqVDj4UzAobVnUutiZPsFoyPWr/c +iEsWu0VSthsI31AUOvSTisy0w81rPKjNkRuCU5V+AS1Z5rBqMlkwOEiUxUMci+pZQ4VhH+mqg1oH +1rMuJ1gysYf/zMjSWGlraSbcHApIMBAjs24uSTH/O6io+9k9jLhZGZv9LpssoeSPVz3wJY6h1LHT +D5OHZYDtelO4X5ZCVRFx5kCaUpKtb4mvo72oWN9KlGvrqp6SkUF1ogovTl16sEf2FZHoN4r0+5ZN +/vS/plTsCGbe46QPGuj+FO5yN6KclM+eonmBp0JSyoABfYN1T28a4dRwE8CPWCOgJmzxuwqSOH+P +XlUbPftbx5nGsE3mRBAU1Jga2S4S+zJPHirc2lfZGh6ggdYG9kkqp7X7kiTyPyPuWU644+YaCOwv +x7804cuLUcQ8VSQ= + + + + diff --git a/src/pyff/test/data/eidas/countries/GR.xml b/src/pyff/test/data/eidas/countries/GR.xml new file mode 100644 index 00000000..a8e1626e --- /dev/null +++ b/src/pyff/test/data/eidas/countries/GR.xml @@ -0,0 +1,63 @@ + + + + + + + MIIE5zCCA0+gAwIBAgIULMraEI5oF6p3T3LH0x/21gVFOvcwDQYJKoZIhvcNAQELBQAwgYkxCzAJ +BgNVBAYTAkdSMScwJQYDVQQKDB5NaW5pc3RyeSBvZiBEaWdpdGFsIEdvdmVybmFuY2UxPTA7BgNV +BAsMNGVJREFTIE5vZGUgTWV0YWRhdGEgU2lnbmluZyB0ZXN0aW5nIGVudmlyb25tZW50IDIwMjMx +EjAQBgNVBAMMCUNvbm5lY3RvcjAeFw0yMzA5MjAxODIxMDlaFw0zMzA5MTcxODIxMDlaMIGJMQsw +CQYDVQQGEwJHUjEnMCUGA1UECgweTWluaXN0cnkgb2YgRGlnaXRhbCBHb3Zlcm5hbmNlMT0wOwYD +VQQLDDRlSURBUyBOb2RlIE1ldGFkYXRhIFNpZ25pbmcgdGVzdGluZyBlbnZpcm9ubWVudCAyMDIz +MRIwEAYDVQQDDAlDb25uZWN0b3IwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQC0PU5P +pibubglyAQJDW0Q+iWiKXnfoBV6K5yFvzzE91UT/lE2UvX3haqqj8PwsfsJsPG7M805ky2UPY5nO +U7VcnaQ3MWzz35kjNI1fvlXkR06YEkhBkr64WJ/7JTxf0wLO83ZRqBcvMqlDCqf8bPHXdjar89sS +e866Wd2rthi7Tu6Ah6buF96lXpS2d9vwnf1S9mhOmtykQ53Vs5zgqMVOaHfKwCThefoYOCyzuqNP +S1G0dQaRbXkIDpbniT96aap8Ksf0/Yx5E+o7BnvbnEsCJPHTYudTKqN7ljD8+Q4M2UNpMT05qIR7 +Zd2KfpufEV8o5YJzMCIftZH6sndBYbpDYkqM+Cd6qy84HBy2/UWgZDt2iQ1eML/Szk59wSm21szd +Q4mshSv4rTwHuTtHg+ZeiCzwJGgvnQen3WR+TGm0YyHijfI9DqIvzbM8yXF8Abp00l3sA3Grm7wo +E0YW+EbkiiJGUFrs88+P+cpcXeNkLltXSvhVosgF0JLcgoLl62UCAwEAAaNFMEMwEgYDVR0TAQH/ +BAgwBgEB/wIBADAOBgNVHQ8BAf8EBAMCB4AwHQYDVR0OBBYEFDu6OnhYjoq3nx9jmT8vwQ5tjFWD +MA0GCSqGSIb3DQEBCwUAA4IBgQARw/klO118DF5BNunXqsiExeQSQqb+n/qcBXDYLypQJomO/JAs +VEg4FEn05Z26QxyacwWxVqWhInVLFVhXgBJ5vnRTwK7RUkJCDQ+9u00oHd3JeOvygFjc7DoC5SWJ +q9hnN+pX1qK9yI+R9hs7cKPKpQ+MaNPWl0yVQCE53GZrWk+Skgl2T5/s0rY+LTYUx5d21kxq1tKE +cpzy9di+33w05uv7ozh7xcbL91wj7zbdDSibxpXmFdlY6C8BH4DOI0kRoYjWKpbuQnPrEbhQwZjI +V9S5OcEaPyiYkVB9n4Z0z3AMTD5G4X52gKmdSh/iTuZuYP4Vj1hbgNYpMHI7B5fnXisa26e2DzT4 +OFEots8gDlsf26WBEHQcJnrNzqCPTd4Zyl5Jpzg+Vo/dwknIESuZYn2l+Iu1GJCMIV+RrI/LoPD4 +FRGrw9YbcFOqAgmSdqxRj6fSb2W5WanIvc7OAT0hKQjPu1jYHDGIeXipKf1rBLjpRF/xtzU/xb4m +JDmr+yA= + + + + + + + + MIIE7TCCA1WgAwIBAgIUTo2pg82nNFjKFzNyHnCbQu1rEVYwDQYJKoZIhvcNAQELBQAwgYwxCzAJ +BgNVBAYTAkdSMScwJQYDVQQKDB5NaW5pc3RyeSBvZiBEaWdpdGFsIEdvdmVybmFuY2UxPTA7BgNV +BAsMNGVJREFTIE5vZGUgTWV0YWRhdGEgU2lnbmluZyB0ZXN0aW5nIGVudmlyb25tZW50IDIwMjMx +FTATBgNVBAMMDFByb3h5U2VydmljZTAeFw0yMzA5MjAxODM5MDZaFw0zMzA5MTcxODM5MDZaMIGM +MQswCQYDVQQGEwJHUjEnMCUGA1UECgweTWluaXN0cnkgb2YgRGlnaXRhbCBHb3Zlcm5hbmNlMT0w +OwYDVQQLDDRlSURBUyBOb2RlIE1ldGFkYXRhIFNpZ25pbmcgdGVzdGluZyBlbnZpcm9ubWVudCAy +MDIzMRUwEwYDVQQDDAxQcm94eVNlcnZpY2UwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIB +gQCqNGRL80G3XrF5wgtJIOCglAg8361zga2Hup8G+w6eQdP5bUyX9JZskTr9IUNeajUw7sRGN9GC +iflAILmzHqySnspOZZer4bUaOEKhRE0RZnhWoCyeZgdp1j9bwe2uLgRJtLQWpeGq3kEzCoSqul70 +iLegKd85f8i0S5ZgzdpjSBJcetGwQxV8bw1+3IT4/OXrL467Z2tBvPE1AClf0ETkw9y5vfI1O+Vr +OHJg8ywIKUgLdfgpCpjHGzljOVlA1ZbCplPvKOOkjRKx7BWAFwFqUbSLYjVDrhIHkCv+EsGeHLK4 +aLmxAVVpwj5qhlnxJ7vweKUEorw7GUGHhAmiM9bem5Wc3jakt06Hd8vA6/kn7Yr17feZtaBfWHAP +HoH0nvGLHk6+WU3W3/i89KLnYH+JsGfY8vSOQesaavmZy6WTEmXk6AkDrUocdC0IrMhq8duOiN3u +KrzN9t0vaRb6KMr35x7l9Sq6hxyiIjfZW+qB8434HmxKyZODCcXEncpNK8MCAwEAAaNFMEMwEgYD +VR0TAQH/BAgwBgEB/wIBADAOBgNVHQ8BAf8EBAMCB4AwHQYDVR0OBBYEFM6pVXDExVEGIVCJbVKV +/S9m7zFtMA0GCSqGSIb3DQEBCwUAA4IBgQCmADqhrXNb4iOsSOkHBghndhsT/lowKPtCicLrwfmm +twQtwA/H1JILe11r97LxIuDmFi0wdSIwx+E0ioA3BF4kcddXrYDimQD2LDQZIWFYHaMhAII89Kfv +VXnn9Ox048cJHsKrteMchCOKw1xxZYsOKr1dFPiQQOX+mP/S1aBnj+7Sr4GuTSkWXC4OiO+BaZQW +4mWe83DsngdUjHmCDrvnBT3xzUbJx5hEky2lnU1ZFZtusH6v6VElQ1KPgDBov1dTktB+r44v47DO +WZ7GUkwvoOVS0lpl7nCpD9QBBch0JmAyPI9RvVRD5vaPghD+Jv5JEiQtMJwcE2l1GYLcwo5Q1BOu +FsLJQEEmD7PUm0ZisV7GTQbRqh2YcLSHMrXOMvbvBlNdEuTDrdMvKFK/FMGh+4DHh/J3+hklGw+e +X+U++5GGUbOS6nZtDwmW/KBYMSnanWyWJ1cnhos7A2JkL429B6uhZsPIHAQn26LBz8yGx3vxze8D +wBm+w1VVhk19CHc= + + + + diff --git a/src/pyff/test/data/eidas/eidas.xml b/src/pyff/test/data/eidas/eidas.xml new file mode 100644 index 00000000..f9d28c92 --- /dev/null +++ b/src/pyff/test/data/eidas/eidas.xml @@ -0,0 +1,18 @@ + + + + Swedish E-Identification Board + urn:se:elegnamnden:eidas:mdlist:local + SE + + + + + + https://qa.md.eidas.swedenconnect.se/mdservicelist-aggregate.xml + + diff --git a/src/pyff/test/test_mdsl.py b/src/pyff/test/test_mdsl.py new file mode 100644 index 00000000..3ec44d75 --- /dev/null +++ b/src/pyff/test/test_mdsl.py @@ -0,0 +1,124 @@ +import json +import os +import shutil +import sys +import tempfile + +import pytest +import six +import yaml +from mako.lookup import TemplateLookup +from mock import patch + +from pyff import builtins +from pyff.exceptions import MetadataException +from pyff.parse import ParserException +from pyff.pipes import PipeException, Plumbing, plumbing +from pyff.repo import MDRepository +from pyff.resource import ResourceException +from pyff.test import ExitException, SignerTestCase +from pyff.utils import hash_id, parse_xml, resource_filename, root, dumptree +from pyff.constants import NS + + +__author__ = 'leifj' + +# The 'builtins' import appears unused to static analysers, ensure it isn't removed +assert builtins is not None + + +class PipeLineTest(SignerTestCase): + @pytest.fixture(autouse=True) + def _capsys(self, capsys): + self._capsys = capsys + + @property + def captured_stdout(self) -> str: + """ Return anything written to STDOUT during this test """ + out, _err = self._capsys.readouterr() # type: ignore + return out + + @property + def captured_stderr(self) -> str: + """ Return anything written to STDERR during this test """ + _out, err = self._capsys.readouterr() # type: ignore + return err + + @pytest.fixture(autouse=True) + def _caplog(self, caplog): + """ Return anything written to the logging system during this test """ + self._caplog = caplog + + @property + def captured_log_text(self) -> str: + return self._caplog.text # type: ignore + + def run_pipeline(self, pl_name, ctx=None, md=None): + if ctx is None: + ctx = dict() + + if md is None: + md = MDRepository() + + templates = TemplateLookup(directories=[os.path.join(self.datadir, 'simple-pipeline')]) + pipeline = tempfile.NamedTemporaryFile('w').name + template = templates.get_template(pl_name) + with open(pipeline, "w") as fd: + fd.write(template.render(ctx=ctx)) + res = plumbing(pipeline).process(md, state={'batch': True, 'stats': {}}) + os.unlink(pipeline) + return res, md, ctx + + def exec_pipeline(self, pstr): + md = MDRepository() + p = yaml.safe_load(six.StringIO(pstr)) + print("\n{}".format(yaml.dump(p))) + pl = Plumbing(p, pid="test") + res = pl.process(md, state={'batch': True, 'stats': {}}) + return res, md + + @classmethod + def setUpClass(cls): + SignerTestCase.setUpClass() + + def setUp(self): + SignerTestCase.setUpClass() + self.templates = TemplateLookup(directories=[os.path.join(self.datadir, 'simple-pipeline')]) + + +class ParseTest(PipeLineTest): + def test_eidas_country(self): + tmpfile = tempfile.NamedTemporaryFile('w').name + try: + self.exec_pipeline(f""" +- when eidas: + - xslt: + stylesheet: eidas-cleanup.xsl + - break + +- load: + - file://{self.datadir}/eidas/eidas.xml cleanup eidas +- select +- publish: {tmpfile} +""" + ) + xml = parse_xml(tmpfile) + assert xml is not None + entityID = "https://pre.eidas.gov.gr/EidasNode/ServiceMetadata" + with_hide_from_discovery = xml.find("{%s}EntityDescriptor[@entityID='%s']" % (NS['md'], entityID)) + assert with_hide_from_discovery is not None + search = "{%s}Extensions/{%s}EntityAttributes/{%s}Attribute[@Name='%s']" % (NS['md'], NS['mdattr'], NS['saml'],'http://macedir.org/entity-category') + ecs = with_hide_from_discovery.find(search) + assert ecs is not None + entityID2 = "https://eidas.pp.dev-franceconnect.fr/EidasNode/ServiceMetadata" + without_hide_from_discovery = xml.find("{%s}EntityDescriptor[@entityID='%s']" % (NS['md'], entityID2)) + ecs2 = without_hide_from_discovery.find(search) + assert ecs2 is None + except IOError: + pass + finally: + try: + #os.unlink(tmpfile) + pass + except (IOError, OSError): + pass From a637a2b0a252fbe80c31627e08f53bab77a753d0 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 2 Apr 2025 14:55:41 +0200 Subject: [PATCH 12/53] Update python-package.yml --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index e84c09a2..20f9e0b4 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -8,7 +8,7 @@ on: branches: [ "master" ] pull_request: branches: [ "master" ] - + workflow_dispatch: jobs: build: From 2a78c77ff2ad61bfc302d30758efcb242fdf6d62 Mon Sep 17 00:00:00 2001 From: Dick Visser Date: Fri, 11 Oct 2024 16:27:54 +0200 Subject: [PATCH 13/53] fix 276 --- src/pyff/parse.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/pyff/parse.py b/src/pyff/parse.py index cb0ad1bf..9162d743 100644 --- a/src/pyff/parse.py +++ b/src/pyff/parse.py @@ -4,6 +4,7 @@ from typing import Any, Dict, List, Optional from pydantic import BaseModel, Field +from urllib.parse import quote as urlescape from xmlsec.crypto import CertDict from pyff.constants import NS @@ -84,7 +85,7 @@ def parse(self, resource: Resource, content: str) -> ParserInfo: n = 0 for fn in find_matching_files(content, self.extensions): child_opts = resource.opts.copy(update={'alias': None}) - resource.add_child("file://" + fn, child_opts) + resource.add_child("file://" + urlescape(fn), child_opts) n += 1 if n == 0: From b7fc40a8afbbd78ff39836be5d95a412570c50a2 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 2 Apr 2025 15:39:08 +0200 Subject: [PATCH 14/53] Never use live data in tests! will fix later.. --- src/pyff/test/test_md_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyff/test/test_md_api.py b/src/pyff/test/test_md_api.py index 471bd87b..68cd3714 100644 --- a/src/pyff/test/test_md_api.py +++ b/src/pyff/test/test_md_api.py @@ -141,7 +141,7 @@ def test_load_and_query(self): assert r.status_code == 200 data = r.json() info = data[0] - assert 'https://box-idp.nordu.net/simplesaml/module.php/saml/sp/discoresp.php' in info['discovery_responses'] + assert 'https://box-idp.nordu.net/simplesaml/module.php/saml/sp/discoResponse' in info['discovery_responses'] class PyFFAPITestResources(PipeLineTest): """ From 0cc52a39145fe697eda055551682fe429e99778d Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Mon, 7 Apr 2025 14:30:16 +0200 Subject: [PATCH 15/53] Switch to ruff for formatting instead of black. This helps Language Servers keep the formatting sane. --- Makefile | 3 +-- pyproject.toml | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 pyproject.toml diff --git a/Makefile b/Makefile index d1b20d9f..3ab85c56 100644 --- a/Makefile +++ b/Makefile @@ -15,8 +15,7 @@ test_coverage: coverage combine reformat: - isort --line-width 120 --atomic --project eduid_scimapi --recursive $(SOURCE) - black --line-length 120 --target-version py37 --skip-string-normalization $(SOURCE) + ruff --format $(SOURCE) typecheck: mypy --ignore-missing-imports $(SOURCE) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..a9d009aa --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,32 @@ +[project] +name = "pyFF" +version = "2.1.5" +readme = "README.rst" +description = "Federation Feeder" +requires-python = ">=3.7" +license = {file = "LICENSE"} + +authors = [ + {name = "Leif Johansson", email = "leifj@sunet.se"}, + {name = "Fredrik Thulin", email = "redrik@thulin.net"}, + {name = "Enrique Pérez Arnaud"}, + {name = "Mikael Frykholm", email = "mifr@sunet.se"}, +] +maintainers = [ + {name = "Mikael Frykholm", email = "mifr@sunet.se"} +] + +[tool.ruff] +# Allow lines to be as long as 120. +line-length = 120 +target-version = "py37" +[tool.ruff.format] +quote-style = "preserve" + +[tool.build_sphinx] +source-dir = "docs/" +build-dir = "docs/build" +all_files = "1" + +[tool.upload_sphinx] +upload-dir = "docs/build/html" From 5f4f763f4846e5e8c436f317d467c651b8ded865 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Mon, 7 Apr 2025 14:41:57 +0200 Subject: [PATCH 16/53] Fix Pydantic deprecation copy > model_copy --- src/pyff/builtins.py | 13 ++++++------- src/pyff/parse.py | 8 +++++--- src/pyff/samlmd.py | 23 ++++++++++++++++------- 3 files changed, 27 insertions(+), 17 deletions(-) diff --git a/src/pyff/builtins.py b/src/pyff/builtins.py index 8bdd3a2d..b00cbd63 100644 --- a/src/pyff/builtins.py +++ b/src/pyff/builtins.py @@ -256,7 +256,7 @@ def fork(req: Plumbing.Request, *opts): **parsecopy** Due to a hard to find bug, fork which uses deepcopy can lose some namespaces. The parsecopy argument is a workaround. - It uses a brute force serialisation and deserialisation to get around the bug. + It uses a brute force serialisation and deserialisation to get around the bug. .. code-block:: yaml @@ -676,7 +676,7 @@ def load(req: Plumbing.Request, *opts): url = r.pop(0) # Copy parent node opts as a starting point - child_opts = req.md.rm.opts.copy(update={"via": [], "cleanup": [], "verify": None, "alias": url}) + child_opts = req.md.rm.opts.model_copy(update={"via": [], "cleanup": [], "verify": None, "alias": url}) while len(r) > 0: elt = r.pop(0) @@ -702,7 +702,7 @@ def load(req: Plumbing.Request, *opts): child_opts.verify = elt # override anything in child_opts with what is in opts - child_opts = child_opts.copy(update=_opts) + child_opts = child_opts.model_copy(update=_opts) req.md.rm.add_child(url, child_opts) @@ -814,7 +814,7 @@ def select(req: Plumbing.Request, *opts): else: _opts['as'] = opts[i] if i + 1 < len(opts): - more_opts = opts[i + 1:] + more_opts = opts[i + 1 :] _opts.update(dict(list(zip(more_opts[::2], more_opts[1::2])))) break @@ -835,7 +835,6 @@ def select(req: Plumbing.Request, *opts): entities = resolve_entities(args, lookup_fn=req.md.store.select, dedup=dedup) if req.state.get('match', None): # TODO - allow this to be passed in via normal arguments - match = req.state['match'] if isinstance(match, six.string_types): @@ -1304,7 +1303,7 @@ def xslt(req: Plumbing.Request, *opts): if stylesheet is None: raise PipeException("xslt requires stylesheet") - params = dict((k, "\'%s\'" % v) for (k, v) in list(req.args.items())) + params = dict((k, "'%s'" % v) for (k, v) in list(req.args.items())) del params['stylesheet'] try: return root(xslt_transform(req.t, stylesheet, params)) @@ -1312,6 +1311,7 @@ def xslt(req: Plumbing.Request, *opts): log.debug(traceback.format_exc()) raise ex + @pipe def indent(req: Plumbing.Request, *opts): """ @@ -1710,7 +1710,6 @@ def finalize(req: Plumbing.Request, *opts): if name is None or 0 == len(name): name = req.state.get('url', None) if name and 'baseURL' in req.args: - try: name_url = urlparse(name) base_url = urlparse(req.args.get('baseURL')) diff --git a/src/pyff/parse.py b/src/pyff/parse.py index 9162d743..7737f43b 100644 --- a/src/pyff/parse.py +++ b/src/pyff/parse.py @@ -9,7 +9,7 @@ from pyff.constants import NS from pyff.logs import get_log -from pyff.resource import Resource,ResourceInfo +from pyff.resource import Resource, ResourceInfo from pyff.utils import find_matching_files, parse_xml, root, unicode_stream, utc_now __author__ = 'leifj' @@ -30,8 +30,10 @@ def _format_key(k: str) -> str: res = {_format_key(k): v for k, v in self.dict().items()} return res + ResourceInfo.model_rebuild() + class ParserException(Exception): def __init__(self, msg, wrapped=None, data=None): self._wraped = wrapped @@ -84,7 +86,7 @@ def parse(self, resource: Resource, content: str) -> ParserInfo: info = ParserInfo(description='Directory', expiration_time='never expires') n = 0 for fn in find_matching_files(content, self.extensions): - child_opts = resource.opts.copy(update={'alias': None}) + child_opts = resource.opts.model_copy(update={'alias': None}) resource.add_child("file://" + urlescape(fn), child_opts) n += 1 @@ -122,7 +124,7 @@ def parse(self, resource: Resource, content: str) -> ParserInfo: if len(fingerprints) > 0: fp = fingerprints[0] log.debug("XRD: {} verified by {}".format(link_href, fp)) - child_opts = resource.opts.copy(update={'alias': None}) + child_opts = resource.opts.model_copy(update={'alias': None}) resource.add_child(link_href, child_opts) resource.last_seen = utc_now().replace(microsecond=0) resource.expire_time = None diff --git a/src/pyff/samlmd.py b/src/pyff/samlmd.py index ddddf42f..20363cd4 100644 --- a/src/pyff/samlmd.py +++ b/src/pyff/samlmd.py @@ -86,7 +86,10 @@ def find_merge_strategy(strategy_name): def parse_saml_metadata( - source: BytesIO, opts: ResourceOpts, base_url=None, validation_errors: Optional[Dict[str, Any]] = None, + source: BytesIO, + opts: ResourceOpts, + base_url=None, + validation_errors: Optional[Dict[str, Any]] = None, ): """Parse a piece of XML and return an EntitiesDescriptor element after validation. @@ -192,7 +195,10 @@ def _extra_md(_t, resource_opts, **kwargs): location = kwargs.get('location') sp_entity = sp_entities.find("{%s}EntityDescriptor[@entityID='%s']" % (NS['md'], entityID)) if sp_entity is not None: - md_source = sp_entity.find("{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource[@src='%s']" % (NS['md'], NS['md'], NS['ti'], NS['ti'], location)) + md_source = sp_entity.find( + "{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource[@src='%s']" + % (NS['md'], NS['md'], NS['ti'], NS['ti'], location) + ) for e in iter_entities(_t): md_source.append(e) return etree.Element("{%s}EntitiesDescriptor" % NS['md']) @@ -205,7 +211,10 @@ def _extra_md(_t, resource_opts, **kwargs): entityID = e.get('entityID') info.entities.append(entityID) - md_source = e.find("{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource" % (NS['md'], NS['md'], NS['ti'], NS['ti'])) + md_source = e.find( + "{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource" + % (NS['md'], NS['md'], NS['ti'], NS['ti']) + ) if md_source is not None: location = md_source.attrib.get('src') if location is not None: @@ -725,7 +734,6 @@ def entity_domains(entity): def entity_extended_display_i18n(entity, default_lang=None): - name_dict = lang_dict(entity.iter("{%s}OrganizationName" % NS['md']), lambda e: e.text, default_lang=default_lang) name_dict.update( lang_dict(entity.iter("{%s}OrganizationDisplayName" % NS['md']), lambda e: e.text, default_lang=default_lang) @@ -981,7 +989,9 @@ def discojson_sp(e, global_trust_info=None, global_md_sources=None): sp['entityID'] = e.get('entityID', None) - md_sources = e.findall("{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource" % (NS['md'], NS['md'], NS['ti'], NS['ti'])) + md_sources = e.findall( + "{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource" % (NS['md'], NS['md'], NS['ti'], NS['ti']) + ) sp['extra_md'] = {} for md_source in md_sources: @@ -1041,7 +1051,6 @@ def discojson_sp(e, global_trust_info=None, global_md_sources=None): def discojson_sp_attr(e): - attribute = "https://refeds.org/entity-selection-profile" b64_trustinfos = entity_attribute(e, attribute) if b64_trustinfos is None: @@ -1395,7 +1404,7 @@ def get_key(e): except AttributeError: pass except IndexError: - log.warning("Sort pipe: unable to sort entity by '%s'. " "Entity '%s' has no such value" % (sxp, eid)) + log.warning("Sort pipe: unable to sort entity by '%s'. Entity '%s' has no such value" % (sxp, eid)) except TypeError: pass From 2254a585236d7aa3bf3384118d4704fcd8182d6a Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Mon, 7 Apr 2025 15:57:10 +0200 Subject: [PATCH 17/53] Started removing pkg_resources. Formatting fixes. --- src/pyff/__init__.py | 9 ++------- src/pyff/api.py | 12 ++++++------ src/pyff/test/__init__.py | 9 +++++---- 3 files changed, 13 insertions(+), 17 deletions(-) diff --git a/src/pyff/__init__.py b/src/pyff/__init__.py index 5e45b2b6..7d54b185 100644 --- a/src/pyff/__init__.py +++ b/src/pyff/__init__.py @@ -2,11 +2,6 @@ pyFF is a SAML metadata aggregator. """ -import pkg_resources +import importlib -__author__ = 'Leif Johansson' -__copyright__ = "Copyright 2009-2018 SUNET and the IdentityPython Project" -__license__ = "BSD" -__maintainer__ = "leifj@sunet.se" -__status__ = "Production" -__version__ = pkg_resources.require("pyFF")[0].version +__version__ = importlib.metadata.version('pyFF') diff --git a/src/pyff/api.py b/src/pyff/api.py index 1050efbf..8b443409 100644 --- a/src/pyff/api.py +++ b/src/pyff/api.py @@ -4,7 +4,6 @@ from json import dumps from typing import Any, Dict, Generator, Iterable, List, Mapping, Optional, Tuple -import pkg_resources import pyramid.httpexceptions as exc import pytz import requests @@ -26,12 +25,13 @@ from pyff.resource import Resource from pyff.samlmd import entity_display_name from pyff.utils import b2u, dumptree, hash_id, json_serializer, utc_now +from pyff import __version__ log = get_log(__name__) class NoCache(object): - """ Dummy implementation for when caching isn't enabled """ + """Dummy implementation for when caching isn't enabled""" def __init__(self) -> None: pass @@ -70,7 +70,7 @@ def status_handler(request: Request) -> Response: if 'Validation Errors' in r.info and r.info['Validation Errors']: d[r.url] = r.info['Validation Errors'] _status = dict( - version=pkg_resources.require("pyFF")[0].version, + version=__version__, invalids=d, icon_store=dict(size=request.registry.md.icon_store.size()), jobs=[dict(id=j.id, next_run_time=j.next_run_time) for j in request.registry.scheduler.get_jobs()], @@ -163,7 +163,7 @@ def process_handler(request: Request) -> Response: _ctypes = {'xml': 'application/samlmetadata+xml;application/xml;text/xml', 'json': 'application/json'} def _d(x: Optional[str], do_split: bool = True) -> Tuple[Optional[str], Optional[str]]: - """ Split a path into a base component and an extension. """ + """Split a path into a base component and an extension.""" if x is not None: x = x.strip() @@ -214,7 +214,7 @@ def _d(x: Optional[str], do_split: bool = True) -> Tuple[Optional[str], Optional pfx = request.registry.aliases.get(alias, None) if pfx is None: log.debug("alias {} not found - passing to storage lookup".format(alias)) - path=alias #treat as path + path = alias # treat as path # content_negotiation_policy is one of three values: # 1. extension - current default, inspect the path and if it ends in @@ -478,7 +478,7 @@ def cors_headers(request: Request, response: Response) -> None: { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Methods': 'POST,GET,DELETE,PUT,OPTIONS', - 'Access-Control-Allow-Headers': ('Origin, Content-Type, Accept, ' 'Authorization'), + 'Access-Control-Allow-Headers': ('Origin, Content-Type, Accept, Authorization'), 'Access-Control-Allow-Credentials': 'true', 'Access-Control-Max-Age': '1728000', } diff --git a/src/pyff/test/__init__.py b/src/pyff/test/__init__.py index c39abfda..51cd14ef 100644 --- a/src/pyff/test/__init__.py +++ b/src/pyff/test/__init__.py @@ -6,9 +6,8 @@ import tempfile from unittest import TestCase -import pkg_resources +import importlib.resources import six - from pyff import __version__ as pyffversion # range of ports where available ports can be found @@ -118,7 +117,6 @@ def _p(args, outf=None, ignore_exit=False): class SignerTestCase(TestCase): - datadir = None private_keyspec = None public_keyspec = None @@ -128,7 +126,10 @@ def sys_exit(self, code): @classmethod def setUpClass(cls): - cls.datadir = pkg_resources.resource_filename(__name__, 'data') + with importlib.resources.path( + __name__, 'data' + ) as context: # We just want the path for now to be compatible downstream + cls.datadir = context.as_posix() cls.private_keyspec = tempfile.NamedTemporaryFile('w').name cls.public_keyspec = tempfile.NamedTemporaryFile('w').name From d45e7d8f6a112dce2ec8aa62b5a1abfd1bb09ce0 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Tue, 8 Apr 2025 15:34:48 +0200 Subject: [PATCH 18/53] Make it work with python3.9. --- pyproject.toml | 4 ++-- src/pyff/test/__init__.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a9d009aa..317d1cb1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "pyFF" version = "2.1.5" readme = "README.rst" description = "Federation Feeder" -requires-python = ">=3.7" +requires-python = ">=3.9" license = {file = "LICENSE"} authors = [ @@ -19,7 +19,7 @@ maintainers = [ [tool.ruff] # Allow lines to be as long as 120. line-length = 120 -target-version = "py37" +target-version = "py39" [tool.ruff.format] quote-style = "preserve" diff --git a/src/pyff/test/__init__.py b/src/pyff/test/__init__.py index 51cd14ef..be32c588 100644 --- a/src/pyff/test/__init__.py +++ b/src/pyff/test/__init__.py @@ -126,10 +126,10 @@ def sys_exit(self, code): @classmethod def setUpClass(cls): - with importlib.resources.path( - __name__, 'data' - ) as context: # We just want the path for now to be compatible downstream - cls.datadir = context.as_posix() + cls.datadir = importlib.resources.files( + __name__, + ).joinpath('data') + cls.private_keyspec = tempfile.NamedTemporaryFile('w').name cls.public_keyspec = tempfile.NamedTemporaryFile('w').name From 64c0b01565ed9d6be4ffa3f5214e6ce03052c78c Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Tue, 8 Apr 2025 16:11:10 +0200 Subject: [PATCH 19/53] ubuntu-20.04 is deprecated by github runners. --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 20f9e0b4..aa18e2d9 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -15,7 +15,7 @@ jobs: strategy: fail-fast: false matrix: - os: ["ubuntu-24.04", "ubuntu-22.04", "ubuntu-20.04"] + os: ["ubuntu-24.04", "ubuntu-22.04"] python: ["3.9", "3.10", "3.11", "3.12"] runs-on: ${{ matrix.os }} steps: From 05ad3ec5829024cc04678e61b34dbc673051c0f9 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Thu, 17 Apr 2025 08:11:54 +0200 Subject: [PATCH 20/53] pyproject.toml has precedence over setup.py so we need these here as well. --- pyproject.toml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 317d1cb1..5e560500 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,10 @@ authors = [ maintainers = [ {name = "Mikael Frykholm", email = "mifr@sunet.se"} ] +[project.scripts] +pyff = "pyff.md:main" +pyffd = "pyff.mdq:main" +samldiff = "pyff.tools:difftool" [tool.ruff] # Allow lines to be as long as 120. From c5f7641d287b233402892a93292ce5b02f0c6ebc Mon Sep 17 00:00:00 2001 From: snyk-bot Date: Fri, 2 May 2025 23:21:53 +0000 Subject: [PATCH 21/53] fix: requirements.txt to reduce vulnerabilities The following vulnerabilities are fixed by pinning transitive dependencies: - https://snyk.io/vuln/SNYK-PYTHON-SETUPTOOLS-9964606 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 5cdc6d04..00a18c72 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,5 +23,5 @@ whoosh wsgi-intercept xmldiff str2bool -setuptools>=70.0.0 # not directly required, pinned by Snyk to avoid a vulnerability +setuptools>=78.1.1 # not directly required, pinned by Snyk to avoid a vulnerability zipp>=3.19.1 # not directly required, pinned by Snyk to avoid a vulnerability \ No newline at end of file From 40e918160209964b4ae15cc5d1004d8ace8fa10a Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Mon, 5 May 2025 13:51:45 +0200 Subject: [PATCH 22/53] Added support for Python 3.13 by bumping pydantic requirements. --- .github/workflows/python-package.yml | 2 +- requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index aa18e2d9..6cea0f9b 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -16,7 +16,7 @@ jobs: fail-fast: false matrix: os: ["ubuntu-24.04", "ubuntu-22.04"] - python: ["3.9", "3.10", "3.11", "3.12"] + python: ["3.9", "3.10", "3.11", "3.12", "3.13"] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 diff --git a/requirements.txt b/requirements.txt index 00a18c72..33d84046 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,7 +9,7 @@ lxml >=4.1.1 mako pyXMLSecurity >=1.0.0 pyconfig -pydantic==2.4.* +pydantic>=2.8 pyramid pyyaml >=3.10 redis @@ -24,4 +24,4 @@ wsgi-intercept xmldiff str2bool setuptools>=78.1.1 # not directly required, pinned by Snyk to avoid a vulnerability -zipp>=3.19.1 # not directly required, pinned by Snyk to avoid a vulnerability \ No newline at end of file +zipp>=3.19.1 # not directly required, pinned by Snyk to avoid a vulnerability From a564cb7f6af354d2ff59b8044d095160dc5e26a5 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Mon, 5 May 2025 14:07:06 +0200 Subject: [PATCH 23/53] Workaround for coveralls not supporting python 3.13. --- .github/workflows/python-package.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 6cea0f9b..d955cad3 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -35,7 +35,9 @@ jobs: python -m pip install --upgrade pip if [ -f requirements.txt ]; then pip install -r requirements.txt; fi if [ -f test_requirements.txt ]; then pip install -r test_requirements.txt; fi - pip install coveralls + if [ "${{ matrix.python-version }}" != "3.13" ]; then # workaround for TheKevJames/coveralls-python#523 + pip install coveralls + fi pip install mypy python -m pip install --editable . From 1d379d46d5e83392948ed9776f769e6f358c1b3b Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Mon, 5 May 2025 14:48:38 +0200 Subject: [PATCH 24/53] Work around missing pytest coverage module. --- .github/workflows/python-package.yml | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index d955cad3..50d63777 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -44,7 +44,16 @@ jobs: - name: Run tests run: | - make quick_test - make test_coverage + python -m pytest src + if [ "${{ matrix.python-version }}" == "3.13" ]; then # workaround for TheKevJames/coveralls-python#523 + python -m pytest --no-cov src + else + python -m pytest src + coverage erase + pytest --cov=src/pyff + mv .coverage .coverage.1 + coverage combine + fi + #make typecheck From 1bdc1f0b61e1c22eb8226c3df8bb8b5feb1943a1 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Mon, 5 May 2025 14:53:07 +0200 Subject: [PATCH 25/53] Second try. --- .github/workflows/python-package.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 50d63777..c67378b5 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -44,7 +44,6 @@ jobs: - name: Run tests run: | - python -m pytest src if [ "${{ matrix.python-version }}" == "3.13" ]; then # workaround for TheKevJames/coveralls-python#523 python -m pytest --no-cov src else From 8629718c5e71ec693f4a84df6c3176ab7a38c090 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 12:26:02 +0200 Subject: [PATCH 26/53] Disable 3.13 until the coveralls upstream is fixed. --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index c67378b5..80b5df95 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -16,7 +16,7 @@ jobs: fail-fast: false matrix: os: ["ubuntu-24.04", "ubuntu-22.04"] - python: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python: ["3.9", "3.10", "3.11", "3.12"] #Disable 3.13 until https://github.com/TheKevJames/coveralls-python/issues/549 is fixed runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 From 152809ad47fb80c936cf02cb5e901f4bc94a1996 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 12:55:26 +0200 Subject: [PATCH 27/53] Use tools from virtualenv for test coverage. --- Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 3ab85c56..e5cc61a1 100644 --- a/Makefile +++ b/Makefile @@ -9,10 +9,10 @@ quick_test: PYFF_SKIP_SLOW_TESTS=1 PYTHONPATH=$(SRCDIR) pytest test_coverage: - coverage erase - PYTHONPATH=$(SRCDIR) pytest --cov=src/pyff + python -m coverage erase + python -m coverage run -m pytest --cov=src/pyff mv .coverage .coverage.1 - coverage combine + python -m coverage combine reformat: ruff --format $(SOURCE) From 63215c439e5fbc480e77567905fbf3937b17abf6 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 12:59:19 +0200 Subject: [PATCH 28/53] Remove old files that are not used any more. --- .codeclimate.yml | 33 -------- .csslintrc | 2 - .eslintignore | 1 - .eslintrc | 213 ----------------------------------------------- .travis.yml | 23 ----- 5 files changed, 272 deletions(-) delete mode 100644 .codeclimate.yml delete mode 100644 .csslintrc delete mode 100644 .eslintignore delete mode 100644 .eslintrc delete mode 100644 .travis.yml diff --git a/.codeclimate.yml b/.codeclimate.yml deleted file mode 100644 index b3aa2a1c..00000000 --- a/.codeclimate.yml +++ /dev/null @@ -1,33 +0,0 @@ ---- -engines: - csslint: - enabled: true - duplication: - enabled: true - config: - languages: - - ruby - - javascript - - python - - php - eslint: - enabled: true - fixme: - enabled: true - radon: - enabled: true -ratings: - paths: - - "**.css" - - "**.inc" - - "**.js" - - "**.jsx" - - "**.module" - - "**.php" - - "**.py" - - "**.rb" -exclude_paths: - - "src/pyff/site/static/bootstrap/*" - - "src/pyff/test/**" - - "src/pyff/site/static/js/prettify.js" - - "src/pyff/site/static/js/typeahead.js" diff --git a/.csslintrc b/.csslintrc deleted file mode 100644 index aacba956..00000000 --- a/.csslintrc +++ /dev/null @@ -1,2 +0,0 @@ ---exclude-exts=.min.css ---ignore=adjoining-classes,box-model,ids,order-alphabetical,unqualified-attributes diff --git a/.eslintignore b/.eslintignore deleted file mode 100644 index 96212a35..00000000 --- a/.eslintignore +++ /dev/null @@ -1 +0,0 @@ -**/*{.,-}min.js diff --git a/.eslintrc b/.eslintrc deleted file mode 100644 index cbf76421..00000000 --- a/.eslintrc +++ /dev/null @@ -1,213 +0,0 @@ -ecmaFeatures: - modules: true - jsx: true - -env: - amd: true - browser: true - es6: true - jquery: true - node: true - -# http://eslint.org/docs/rules/ -rules: - # Possible Errors - comma-dangle: [2, never] - no-cond-assign: 2 - no-console: 0 - no-constant-condition: 2 - no-control-regex: 2 - no-debugger: 2 - no-dupe-args: 2 - no-dupe-keys: 2 - no-duplicate-case: 2 - no-empty: 2 - no-empty-character-class: 2 - no-ex-assign: 2 - no-extra-boolean-cast: 2 - no-extra-parens: 0 - no-extra-semi: 2 - no-func-assign: 2 - no-inner-declarations: [2, functions] - no-invalid-regexp: 2 - no-irregular-whitespace: 2 - no-negated-in-lhs: 2 - no-obj-calls: 2 - no-regex-spaces: 2 - no-sparse-arrays: 2 - no-unexpected-multiline: 2 - no-unreachable: 2 - use-isnan: 2 - valid-jsdoc: 0 - valid-typeof: 2 - - # Best Practices - accessor-pairs: 2 - block-scoped-var: 0 - complexity: [2, 11] - consistent-return: 0 - curly: 0 - default-case: 0 - dot-location: 0 - dot-notation: 0 - eqeqeq: 2 - guard-for-in: 2 - no-alert: 2 - no-caller: 2 - no-case-declarations: 2 - no-div-regex: 2 - no-else-return: 0 - no-empty-label: 2 - no-empty-pattern: 2 - no-eq-null: 2 - no-eval: 2 - no-extend-native: 2 - no-extra-bind: 2 - no-fallthrough: 2 - no-floating-decimal: 0 - no-implicit-coercion: 0 - no-implied-eval: 2 - no-invalid-this: 0 - no-iterator: 2 - no-labels: 0 - no-lone-blocks: 2 - no-loop-func: 2 - no-magic-number: 0 - no-multi-spaces: 0 - no-multi-str: 0 - no-native-reassign: 2 - no-new-func: 2 - no-new-wrappers: 2 - no-new: 2 - no-octal-escape: 2 - no-octal: 2 - no-proto: 2 - no-redeclare: 2 - no-return-assign: 2 - no-script-url: 2 - no-self-compare: 2 - no-sequences: 0 - no-throw-literal: 0 - no-unused-expressions: 2 - no-useless-call: 2 - no-useless-concat: 2 - no-void: 2 - no-warning-comments: 0 - no-with: 2 - radix: 2 - vars-on-top: 0 - wrap-iife: 2 - yoda: 0 - - # Strict - strict: 0 - - # Variables - init-declarations: 0 - no-catch-shadow: 2 - no-delete-var: 2 - no-label-var: 2 - no-shadow-restricted-names: 2 - no-shadow: 0 - no-undef-init: 2 - no-undef: 0 - no-undefined: 0 - no-unused-vars: 0 - no-use-before-define: 0 - - # Node.js and CommonJS - callback-return: 2 - global-require: 2 - handle-callback-err: 2 - no-mixed-requires: 0 - no-new-require: 0 - no-path-concat: 2 - no-process-exit: 2 - no-restricted-modules: 0 - no-sync: 0 - - # Stylistic Issues - array-bracket-spacing: 0 - block-spacing: 0 - brace-style: 0 - camelcase: 0 - comma-spacing: 0 - comma-style: 0 - computed-property-spacing: 0 - consistent-this: 0 - eol-last: 0 - func-names: 0 - func-style: 0 - id-length: 0 - id-match: 0 - indent: 0 - jsx-quotes: 0 - key-spacing: 0 - linebreak-style: 0 - lines-around-comment: 0 - max-depth: 0 - max-len: 0 - max-nested-callbacks: 0 - max-params: 0 - max-statements: 0 - new-cap: 0 - new-parens: 0 - newline-after-var: 0 - no-array-constructor: 0 - no-bitwise: 0 - no-continue: 0 - no-inline-comments: 0 - no-lonely-if: 0 - no-mixed-spaces-and-tabs: 0 - no-multiple-empty-lines: 0 - no-negated-condition: 0 - no-nested-ternary: 0 - no-new-object: 0 - no-plusplus: 0 - no-restricted-syntax: 0 - no-spaced-func: 0 - no-ternary: 0 - no-trailing-spaces: 0 - no-underscore-dangle: 0 - no-unneeded-ternary: 0 - object-curly-spacing: 0 - one-var: 0 - operator-assignment: 0 - operator-linebreak: 0 - padded-blocks: 0 - quote-props: 0 - quotes: 0 - require-jsdoc: 0 - semi-spacing: 0 - semi: 0 - sort-vars: 0 - space-after-keywords: 0 - space-before-blocks: 0 - space-before-function-paren: 0 - space-before-keywords: 0 - space-in-parens: 0 - space-infix-ops: 0 - space-return-throw-case: 0 - space-unary-ops: 0 - spaced-comment: 0 - wrap-regex: 0 - - # ECMAScript 6 - arrow-body-style: 0 - arrow-parens: 0 - arrow-spacing: 0 - constructor-super: 0 - generator-star-spacing: 0 - no-arrow-condition: 0 - no-class-assign: 0 - no-const-assign: 0 - no-dupe-class-members: 0 - no-this-before-super: 0 - no-var: 0 - object-shorthand: 0 - prefer-arrow-callback: 0 - prefer-const: 0 - prefer-reflect: 0 - prefer-spread: 0 - prefer-template: 0 - require-yield: 0 diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 9dd24185..00000000 --- a/.travis.yml +++ /dev/null @@ -1,23 +0,0 @@ -language: python -sudo: required -dist: xenial -language: python -python: - - 3.7 - - 3.8 -install: - - "pip install -r requirements.txt" - - "pip install -r test_requirements.txt" - - "./setup.py develop" - - "pip install coveralls" - - "pip install pytest" - - "pip install mypy" -script: - - export PYTHONPATH=./src:$PYTHONPATH - - make test_coverage - - make typecheck -before_install: - - sudo apt-get update -qq - - sudo apt-get install -qq python-dev libyaml-dev libxml2-dev libxslt-dev libsofthsm softhsm opensc libengine-pkcs11-openssl -after_success: - coveralls From 8f04ae358f24428229c64aa76c8530d075b519f3 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 13:11:19 +0200 Subject: [PATCH 29/53] Replace deprecated pydantic .dict() with .model_dump(). Formatting fixes. --- src/pyff/resource.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/pyff/resource.py b/src/pyff/resource.py index 0c8fbd14..b65b4c41 100644 --- a/src/pyff/resource.py +++ b/src/pyff/resource.py @@ -3,6 +3,7 @@ An abstraction layer for metadata fetchers. Supports both synchronous and asynchronous fetchers with cache. """ + from __future__ import annotations import os @@ -192,7 +193,7 @@ class ResourceInfo(BaseModel): state: Optional[ResourceLoadState] = None http_headers: Dict[str, Any] = Field({}) reason: Optional[str] = None - status_code: Optional[str] = None # HTTP status code as string. TODO: change to int + status_code: Optional[str] = None # HTTP status code as string. TODO: change to int parser_info: Optional[ParserInfo] = None expired: Optional[bool] = None exception: Optional[BaseException] = None @@ -221,6 +222,7 @@ def _format_key(k: str) -> str: return res + class Resource(Watchable): def __init__(self, url: Optional[str], opts: ResourceOpts): super().__init__() @@ -280,7 +282,7 @@ def cleanup(self) -> Iterable[Callable]: # TODO: move classes to make this work def __str__(self): return "Resource {} expires at {} using ".format( self.url if self.url is not None else "(root)", self.expire_time - ) + ",".join(["{}={}".format(k, v) for k, v in sorted(list(self.opts.dict().items()))]) + ) + ",".join(["{}={}".format(k, v) for k, v in sorted(list(self.opts.model_dump().items()))]) def reload(self, fail_on_error=False): with non_blocking_lock(self.lock): @@ -485,7 +487,7 @@ def parse(self, getter: Callable[[str], Response]) -> Deque[Resource]: info.expired = False if info.parser_info: - for (eid, error) in list(info.parser_info.validation_errors.items()): + for eid, error in list(info.parser_info.validation_errors.items()): log.error(error) else: log.debug(f'Parser did not produce anything (probably ok) when parsing {self.url} {info}') From 0e5d17ed92c550b19039b55d26acba120cc05741 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 13:45:16 +0200 Subject: [PATCH 30/53] python3 -m pyupgrade --py39-plus on all python files. Removed six as a direct dependency. --- requirements.txt | 1 - src/pyff/api.py | 35 ++++---- src/pyff/builtins.py | 57 +++++++------ src/pyff/constants.py | 40 +++++----- src/pyff/exceptions.py | 5 +- src/pyff/fetch.py | 20 ++--- src/pyff/locks.py | 3 +- src/pyff/logs.py | 9 +-- src/pyff/mdq.py | 13 ++- src/pyff/parse.py | 10 +-- src/pyff/pipes.py | 65 +++++++-------- src/pyff/resource.py | 98 +++++++++++------------ src/pyff/samlmd.py | 43 +++++----- src/pyff/store.py | 48 +++++------ src/pyff/test/__init__.py | 9 +-- src/pyff/test/test_decorators.py | 4 +- src/pyff/test/test_log.py | 4 +- src/pyff/test/test_md_api.py | 24 +++--- src/pyff/test/test_mdsl.py | 19 +++-- src/pyff/test/test_pipeline.py | 110 +++++++++++++++----------- src/pyff/test/test_repo.py | 2 +- src/pyff/test/test_simple_pipeline.py | 4 +- src/pyff/test/test_store.py | 8 +- src/pyff/test/test_utils.py | 12 ++- src/pyff/tools.py | 4 +- src/pyff/utils.py | 76 ++++++++---------- 26 files changed, 360 insertions(+), 363 deletions(-) diff --git a/requirements.txt b/requirements.txt index 33d84046..0c4a643d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -18,7 +18,6 @@ requests requests_cache requests_file simplejson >=2.6.2 -six>=1.11.0 whoosh wsgi-intercept xmldiff diff --git a/src/pyff/api.py b/src/pyff/api.py index 8b443409..a0ca9bb3 100644 --- a/src/pyff/api.py +++ b/src/pyff/api.py @@ -2,7 +2,8 @@ import threading from datetime import datetime, timedelta from json import dumps -from typing import Any, Dict, Generator, Iterable, List, Mapping, Optional, Tuple +from typing import Any, Dict, List, Optional, Tuple +from collections.abc import Generator, Iterable, Mapping import pyramid.httpexceptions as exc import pytz @@ -15,7 +16,7 @@ from pyramid.request import Request from pyramid.response import Response from six import b -from six.moves.urllib_parse import quote_plus +from urllib.parse import quote_plus from pyff.constants import config from pyff.exceptions import ResourceException @@ -30,7 +31,7 @@ log = get_log(__name__) -class NoCache(object): +class NoCache: """Dummy implementation for when caching isn't enabled""" def __init__(self) -> None: @@ -82,7 +83,7 @@ def status_handler(request: Request) -> Response: return response -class MediaAccept(object): +class MediaAccept: def __init__(self, accept: str): self._type = AcceptableType(accept) @@ -110,7 +111,7 @@ def _is_xml(data: Any) -> bool: return isinstance(data, (etree._Element, etree._ElementTree)) -def _fmt(data: Any, accepter: MediaAccept) -> Tuple[str, str]: +def _fmt(data: Any, accepter: MediaAccept) -> tuple[str, str]: """ Format data according to the accepted content type of the requester. Return data as string (either XML or json) and a content-type. @@ -162,7 +163,7 @@ def process_handler(request: Request) -> Response: """ _ctypes = {'xml': 'application/samlmetadata+xml;application/xml;text/xml', 'json': 'application/json'} - def _d(x: Optional[str], do_split: bool = True) -> Tuple[Optional[str], Optional[str]]: + def _d(x: Optional[str], do_split: bool = True) -> tuple[Optional[str], Optional[str]]: """Split a path into a base component and an extension.""" if x is not None: x = x.strip() @@ -195,7 +196,7 @@ def _d(x: Optional[str], do_split: bool = True) -> Tuple[Optional[str], Optional # Enable matching on scope. match = match.split('@').pop() if match and not match.endswith('@') else match - log.debug("match={}".format(match)) + log.debug(f"match={match}") if not path_elem: path_elem = ['entities'] @@ -213,7 +214,7 @@ def _d(x: Optional[str], do_split: bool = True) -> Tuple[Optional[str], Optional if 'entities' not in alias: pfx = request.registry.aliases.get(alias, None) if pfx is None: - log.debug("alias {} not found - passing to storage lookup".format(alias)) + log.debug(f"alias {alias} not found - passing to storage lookup") path = alias # treat as path # content_negotiation_policy is one of three values: @@ -353,11 +354,11 @@ def webfinger_handler(request: Request) -> Response: if resource is None: resource = request.host_url - jrd: Dict[str, Any] = dict() + jrd: dict[str, Any] = dict() dt = datetime.now() + timedelta(hours=1) jrd['expires'] = dt.isoformat() jrd['subject'] = request.host_url - links: List[Dict[str, Any]] = list() + links: list[dict[str, Any]] = list() jrd['links'] = links _dflt_rels = { @@ -377,7 +378,7 @@ def _links(url: str, title: Any = None) -> None: suffix = "" if not url.endswith('/'): suffix = _dflt_rels[r][0] - links.append(dict(rel=r, type=_dflt_rels[r][1], href='%s/%s%s' % (request.host_url, url, suffix))) + links.append(dict(rel=r, type=_dflt_rels[r][1], href=f'{request.host_url}/{url}{suffix}')) _links('/entities/') for a in request.registry.md.store.collections(): @@ -391,7 +392,7 @@ def _links(url: str, title: Any = None) -> None: aliases = request.registry.aliases for a in aliases.keys(): for v in request.registry.md.store.attribute(aliases[a]): - _links('%s/%s' % (a, quote_plus(v))) + _links(f'{a}/{quote_plus(v)}') response = Response(dumps(jrd, default=json_serializer)) response.headers['Content-Type'] = 'application/json' @@ -407,7 +408,7 @@ def resources_handler(request: Request) -> Response: :return: a JSON representation of the set of resources currently loaded by the server """ - def _infos(resources: Iterable[Resource]) -> List[Mapping[str, Any]]: + def _infos(resources: Iterable[Resource]) -> list[Mapping[str, Any]]: return [_info(r) for r in resources if r.info.state is not None] def _info(r: Resource) -> Mapping[str, Any]: @@ -453,19 +454,19 @@ def search_handler(request: Request) -> Response: match = match.split('@').pop() if match and not match.endswith('@') else match entity_filter = request.params.get('entity_filter', '{http://pyff.io/role}idp') - log.debug("match={}".format(match)) + log.debug(f"match={match}") store = request.registry.md.store def _response() -> Generator[bytes, bytes, None]: - yield b('[') + yield b'[' in_loop = False entities = store.search(query=match.lower(), entity_filter=entity_filter) for e in entities: if in_loop: - yield b(',') + yield b',' yield b(dumps(e)) in_loop = True - yield b(']') + yield b']' response = Response(content_type='application/json') response.app_iter = _response() diff --git a/src/pyff/builtins.py b/src/pyff/builtins.py index b00cbd63..363cc2a0 100644 --- a/src/pyff/builtins.py +++ b/src/pyff/builtins.py @@ -17,11 +17,10 @@ from typing import Dict, Optional import ipaddress -import six import xmlsec from lxml import etree from lxml.etree import DocumentInvalid -from six.moves.urllib_parse import quote_plus, urlparse +from urllib.parse import quote_plus, urlparse from pyff.constants import NS from pyff.decorators import deprecated @@ -104,7 +103,7 @@ def _map(req: Plumbing.Request, *opts): def _p(e): entity_id = e.get('entityID') - ip = Plumbing(pipeline=req.args, pid="{}.each[{}]".format(req.plumbing.pid, entity_id)) + ip = Plumbing(pipeline=req.args, pid=f"{req.plumbing.pid}.each[{entity_id}]") ireq = Plumbing.Request(ip, req.md, t=e, scheduler=req.scheduler) ireq.set_id(entity_id) ireq.set_parent(req) @@ -114,7 +113,7 @@ def _p(e): pool = ThreadPool() result = pool.map(_p, iter_entities(req.t), chunksize=10) - log.info("processed {} entities".format(len(result))) + log.info(f"processed {len(result)} entities") @pipe(name="then") @@ -461,7 +460,7 @@ def sort(req: Plumbing.Request, *opts): if req.t is None: raise PipeException("Unable to sort empty document.") - _opts: Dict[str, Optional[str]] = dict(list(zip(opts[0:1], [" ".join(opts[1:])]))) + _opts: dict[str, Optional[str]] = dict(list(zip(opts[0:1], [" ".join(opts[1:])]))) if 'order_by' not in _opts: _opts['order_by'] = None sort_entities(req.t, _opts['order_by']) @@ -837,7 +836,7 @@ def select(req: Plumbing.Request, *opts): if req.state.get('match', None): # TODO - allow this to be passed in via normal arguments match = req.state['match'] - if isinstance(match, six.string_types): + if isinstance(match, str): query = [match.lower()] def _strings(elt): @@ -870,22 +869,22 @@ def _match(q, elt): if q is not None and len(q) > 0: tokens = _strings(elt) - p = re.compile(r'\b{}'.format(q), re.IGNORECASE) + p = re.compile(fr'\b{q}', re.IGNORECASE) for tstr in tokens: if p.search(tstr): return tstr return None - log.debug("matching {} in {} entities".format(match, len(entities))) + log.debug(f"matching {match} in {len(entities)} entities") entities = list(filter(lambda e: _match(match, e) is not None, entities)) - log.debug("returning {} entities after match".format(len(entities))) + log.debug(f"returning {len(entities)} entities after match") ot = entitiesdescriptor(entities, name) if ot is None: raise PipeException("empty select - stop") if req.plumbing.id != name: - log.debug("storing synthetic collection {}".format(name)) + log.debug(f"storing synthetic collection {name}") req.store.update(ot, name) return ot @@ -1202,7 +1201,7 @@ def stats(req: Plumbing.Request, *opts): raise PipeException("Your pipeline is missing a select statement.") print("---") - print("total size: {:d}".format(req.store.size())) + print(f"total size: {req.store.size():d}") if not hasattr(req.t, 'xpath'): raise PipeException("Unable to call stats on non-XML") @@ -1303,7 +1302,7 @@ def xslt(req: Plumbing.Request, *opts): if stylesheet is None: raise PipeException("xslt requires stylesheet") - params = dict((k, "'%s'" % v) for (k, v) in list(req.args.items())) + params = {k: "'%s'" % v for (k, v) in list(req.args.items())} del params['stylesheet'] try: return root(xslt_transform(req.t, stylesheet, params)) @@ -1429,13 +1428,13 @@ def check_xml_namespaces(req: Plumbing.Request, *opts): raise PipeException("Your pipeline is missing a select statement.") def _verify(elt): - if isinstance(elt.tag, six.string_types): + if isinstance(elt.tag, str): for prefix, uri in list(elt.nsmap.items()): if not uri.startswith('urn:'): u = urlparse(uri) if u.scheme not in ('http', 'https'): raise MetadataException( - "Namespace URIs must be be http(s) URIs ('{}' declared on {})".format(uri, elt.tag) + f"Namespace URIs must be be http(s) URIs ('{uri}' declared on {elt.tag})" ) with_tree(root(req.t), _verify) @@ -1508,7 +1507,7 @@ def certreport(req: Plumbing.Request, *opts): error_bits = int(req.args.get('error_bits', "1024")) warning_bits = int(req.args.get('warning_bits', "2048")) - seen: Dict[str, bool] = {} + seen: dict[str, bool] = {} for eid in req.t.xpath("//md:EntityDescriptor/@entityID", namespaces=NS, smart_strings=False): for cd in req.t.xpath( "md:EntityDescriptor[@entityID='%s']//ds:X509Certificate" % eid, namespaces=NS, smart_strings=False @@ -1530,17 +1529,17 @@ def certreport(req: Plumbing.Request, *opts): entity_elt, "certificate-error", "keysize too small", - "%s has keysize of %s bits (less than %s)" % (cert.getSubject(), keysize, error_bits), + "{} has keysize of {} bits (less than {})".format(cert.getSubject(), keysize, error_bits), ) - log.error("%s has keysize of %s" % (eid, keysize)) + log.error("{} has keysize of {}".format(eid, keysize)) elif keysize < warning_bits: annotate_entity( entity_elt, "certificate-warning", "keysize small", - "%s has keysize of %s bits (less than %s)" % (cert.getSubject(), keysize, warning_bits), + "{} has keysize of {} bits (less than {})".format(cert.getSubject(), keysize, warning_bits), ) - log.warning("%s has keysize of %s" % (eid, keysize)) + log.warning("{} has keysize of {}".format(eid, keysize)) notafter = cert.getNotAfter() if notafter is None: @@ -1560,23 +1559,23 @@ def certreport(req: Plumbing.Request, *opts): entity_elt, "certificate-error", "certificate has expired", - "%s expired %s ago" % (cert.getSubject(), -dt), + "{} expired {} ago".format(cert.getSubject(), -dt), ) - log.error("%s expired %s ago" % (eid, -dt)) + log.error("{} expired {} ago".format(eid, -dt)) elif total_seconds(dt) < warning_seconds: annotate_entity( entity_elt, "certificate-warning", "certificate about to expire", - "%s expires in %s" % (cert.getSubject(), dt), + "{} expires in {}".format(cert.getSubject(), dt), ) - log.warning("%s expires in %s" % (eid, dt)) + log.warning("{} expires in {}".format(eid, dt)) except ValueError as ex: annotate_entity( entity_elt, "certificate-error", "certificate has unknown expiration time", - "%s unknown expiration time %s" % (cert.getSubject(), notafter), + "{} unknown expiration time {}".format(cert.getSubject(), notafter), ) req.store.update(entity_elt) @@ -1623,7 +1622,7 @@ def emit(req: Plumbing.Request, ctype="application/xml", *opts): if d is not None: m = hashlib.sha1() - if not isinstance(d, six.binary_type): + if not isinstance(d, bytes): d = d.encode("utf-8") m.update(d) req.state['headers']['ETag'] = m.hexdigest() @@ -1632,7 +1631,7 @@ def emit(req: Plumbing.Request, ctype="application/xml", *opts): req.state['headers']['Content-Type'] = ctype if six.PY2: - d = six.u(d) + d = d return d @@ -1664,7 +1663,7 @@ def signcerts(req: Plumbing.Request, *opts): @pipe def finalize(req: Plumbing.Request, *opts): - """ + r""" Prepares the working document for publication/rendering. :param req: The request @@ -1716,7 +1715,7 @@ def finalize(req: Plumbing.Request, *opts): # TODO: Investigate this error, which is probably correct: # error: On Python 3 '{}'.format(b'abc') produces "b'abc'", not 'abc'; # use '{!r}'.format(b'abc') if this is desired behavior - name = "{}://{}{}".format(base_url.scheme, base_url.netloc, name_url.path) # type: ignore + name = f"{base_url.scheme}://{base_url.netloc}{name_url.path}" # type: ignore log.debug("-------- using Name: %s" % name) except ValueError as ex: log.debug(f'Got an exception while finalizing: {ex}') @@ -1752,7 +1751,7 @@ def finalize(req: Plumbing.Request, *opts): offset = dt - now e.set('validUntil', datetime2iso(dt)) except ValueError as ex: - log.error("Unable to parse validUntil: %s (%s)" % (valid_until, ex)) + log.error("Unable to parse validUntil: {} ({})".format(valid_until, ex)) # set a reasonable default: 50% of the validity # we replace this below if we have cacheDuration set diff --git a/src/pyff/constants.py b/src/pyff/constants.py index c01fa534..52bad854 100644 --- a/src/pyff/constants.py +++ b/src/pyff/constants.py @@ -58,7 +58,7 @@ def as_string(o): - if type(o) not in six.string_types: + if type(o) not in (str,): o = str(o) return o @@ -68,21 +68,21 @@ def as_int(o): def as_loglevel(o): - if type(o) in six.string_types: + if type(o) in (str,): if hasattr(logging, str(o)): o = getattr(logging, str(o)) - raise ValueError("No such loglevel: {}".format(repr(o))) + raise ValueError(f"No such loglevel: {repr(o)}") return o def as_list_of_string(o): - if type(o) in six.string_types: + if type(o) in (str,): o = re.findall(r'[^,:\s]+', o) return o def as_dict_of_string(o): - if type(o) in six.string_types: + if type(o) in (str,): o = json.loads(o) return o @@ -93,7 +93,7 @@ def as_bool(o): return o -class BaseSetting(object): +class BaseSetting: def __init__( self, name, @@ -116,12 +116,12 @@ def __init__( self.value = None self.long = long self.hidden = hidden - self.fallback = pyconfig.setting('pyff.{}'.format(self.name), default, allow_default=True) + self.fallback = pyconfig.setting(f'pyff.{self.name}', default, allow_default=True) @property def default_fmt(self): if self.default: - return "[{}]".format(str(self.default)) + return f"[{str(self.default)}]" else: return '' @@ -162,16 +162,16 @@ def short_spec(self): if (hasattr(self, 'typeconv') and self.typeconv == as_bool) or isinstance(self, InvertedSetting): return self.short else: - return '{}:'.format(self.short) + return f'{self.short}:' else: return '' def long_spec(self): long_name = self.long_name if hasattr(self, 'typeconv') and self.typeconv == as_bool: - return '{}'.format(long_name) + return f'{long_name}' else: - return '{}='.format(long_name) + return f'{long_name}=' class EnvSetting(BaseSetting): @@ -226,7 +226,7 @@ def N(*args: object, **kwargs: object) -> BaseSetting: return InvertedSetting(*args, **kwargs) -class Config(object): +class Config: """ The :py:const:`pyff.constants:config` object is a singleton instance of this Class and contains all configuration parameters available to pyFF. Each parameter can be set directly, via :py:mod:`pyconfig` @@ -467,7 +467,7 @@ class Config(object): def base_url(self): if self.public_url: return self.public_url - return "http://{}{}".format(config.host, "" if config.port == 80 else ":{}".format(config.port)) + return "http://{}{}".format(config.host, "" if config.port == 80 else f":{config.port}") @staticmethod def settings(): @@ -478,7 +478,7 @@ def settings(): def __str__(self): s = "# pyFF configuration\n" for p in self.settings(): - s += "{} = {}\n".format(p.name, p.value) + s += f"{p.name} = {p.value}\n" return s def find_setting(self, o): @@ -502,10 +502,10 @@ def help(prg): hlp = "Usage: {} [options+] \n\n" for s in config.settings(): if prg in s.cmdline and not s.deprecated and not s.hidden: - h = " --{}".format(s.long_name) + h = f" --{s.long_name}" if s.short: - h += "|-{}".format(s.short) - hlp += "{:30s} {} {}\n".format(h, s.info, s.default_fmt) + h += f"|-{s.short}" + hlp += f"{h:30s} {s.info} {s.default_fmt}\n" return hlp @@ -537,7 +537,7 @@ def parse_options(program, docs): print(docs) sys.exit(0) elif o in ('-v', '--version'): - print("{} version {}".format(program, pyff_version)) + print(f"{program} version {pyff_version}") sys.exit(0) elif o in ('-A', '--alias'): (a, colon, uri) = a.partition(':') @@ -551,11 +551,11 @@ def parse_options(program, docs): s = config.find_setting(o) if s is not None: if s.deprecated: - print("WARNING: {} is deprecated. Setting this option has no effect!".format(o)) + print(f"WARNING: {o} is deprecated. Setting this option has no effect!") else: setattr(s, 'value', a) else: - raise ValueError("Unknown option {}".format(o)) + raise ValueError(f"Unknown option {o}") if config.compat_dir and not config.base_dir: config.base_dir = config.compat_dir diff --git a/src/pyff/exceptions.py b/src/pyff/exceptions.py index 701fe0fa..9b6b7b80 100644 --- a/src/pyff/exceptions.py +++ b/src/pyff/exceptions.py @@ -6,10 +6,7 @@ class PyffException(BaseException): def __init__(self, msg, wrapped=None, data=None): self._wrapped = wrapped self._data = data - if six.PY2: - super(self.__class__, self).__init__(msg) - else: - super().__init__(msg) + super().__init__(msg) def raise_wrapped(self): raise self._wrapped diff --git a/src/pyff/fetch.py b/src/pyff/fetch.py index fcfe3c68..88ac2477 100644 --- a/src/pyff/fetch.py +++ b/src/pyff/fetch.py @@ -14,7 +14,7 @@ def make_resourcestore_instance(*args, **kwargs): return new_store(*args, **kwargs) -class ResourceStore(object): +class ResourceStore: pass @@ -37,11 +37,11 @@ def __init__(self, request, response, pool, name, content_handler): self.state('idle') def state(self, state): - self.name = "{} ({})".format(self._id, state) + self.name = f"{self._id} ({state})" def run(self): while not self.halt: - log.debug("waiting for pool {}....".format(self._id)) + log.debug(f"waiting for pool {self._id}....") with self.pool: url = self.request.get() if url is not None: @@ -53,12 +53,12 @@ def run(self): self.response.put( {'response': r, 'url': url, 'exception': None, 'last_fetched': datetime.now()} ) - log.debug("successfully fetched {}".format(url)) + log.debug(f"successfully fetched {url}") except Exception as ex: self.response.put( {'response': None, 'url': url, 'exception': ex, 'last_fetched': datetime.now()} ) - log.warning("error fetching {}".format(url)) + log.warning(f"error fetching {url}") log.warning(ex) import traceback @@ -78,7 +78,7 @@ def __init__(self, num_threads=config.worker_pool_size, name="Fetcher", content_ threading.Thread.__init__(self) Watchable.__init__(self) self._id = name - self.name = '{} (master)'.format(self._id) + self.name = f'{self._id} (master)' self.request = queue.Queue() self.response = queue.Queue() self.pool = threading.BoundedSemaphore(num_threads) @@ -96,7 +96,7 @@ def schedule(self, url): :param url: the url to fetch :return: nothing is returned. """ - log.info("scheduling fetch of {}".format(url)) + log.info(f"scheduling fetch of {url}") self.request.put(url) def stop(self): @@ -120,12 +120,12 @@ def run(self): :return: nothing is returned """ - log.debug("Fetcher ({}) ready & waiting for responses...".format(self._id)) + log.debug(f"Fetcher ({self._id}) ready & waiting for responses...") while not self.halt: info = self.response.get() if info is not None: self.notify(**info) - log.debug("Fetcher ({}) exiting...".format(self._id)) + log.debug(f"Fetcher ({self._id}) exiting...") def make_fetcher(name="Fetcher", content_handler=None): @@ -138,5 +138,5 @@ def make_fetcher(name="Fetcher", content_handler=None): """ f = Fetcher(name=name, content_handler=content_handler) f.start() - log.debug("fetcher created: {}".format(f)) + log.debug(f"fetcher created: {f}") return f diff --git a/src/pyff/locks.py b/src/pyff/locks.py index 5d120adf..61b7275b 100644 --- a/src/pyff/locks.py +++ b/src/pyff/locks.py @@ -1,5 +1,4 @@ # source http://code.activestate.com/recipes/502283/ (r1) -# -*- coding: iso-8859-15 -*- """locks.py - Read-Write lock thread lock implementation See the class documentation for more info. @@ -16,7 +15,7 @@ # --------------- -class ReadWriteLock(object): +class ReadWriteLock: """Read-Write lock class. A read-write lock differs from a standard threading.RLock() by allowing multiple threads to simultaneously hold a read lock, while allowing only a single thread to hold a write lock at the diff --git a/src/pyff/logs.py b/src/pyff/logs.py index 47cc9443..2cc089e8 100644 --- a/src/pyff/logs.py +++ b/src/pyff/logs.py @@ -5,10 +5,7 @@ import syslog from typing import Any, Optional -import six - - -class PyFFLogger(object): +class PyFFLogger: def __init__(self, name=None): if name is None: name = __name__ @@ -64,7 +61,7 @@ def log_config_file(ini: Optional[str]) -> None: if not os.path.isabs(ini): ini = os.path.join(os.getcwd(), ini) if not os.path.exists(ini): - raise ValueError("PYFF_LOGGING={} does not exist".format(ini)) + raise ValueError(f"PYFF_LOGGING={ini} does not exist") logging.config.fileConfig(ini) @@ -88,7 +85,7 @@ class SysLogLibHandler(logging.Handler): def __init__(self, facility): - if isinstance(facility, six.string_types): + if isinstance(facility, str): nf = getattr(syslog, "LOG_%s" % facility.upper(), None) if not isinstance(nf, int): raise ValueError('Invalid log facility: %s' % nf) diff --git a/src/pyff/mdq.py b/src/pyff/mdq.py index 0515c2db..a32b88ff 100644 --- a/src/pyff/mdq.py +++ b/src/pyff/mdq.py @@ -3,7 +3,6 @@ """ -from __future__ import unicode_literals import os @@ -24,13 +23,13 @@ def init(self, parser, opts, args): def __init__(self, options=None): self.options = options or {} - super(MDQApplication, self).__init__() + super().__init__() def load_config(self): - cfg = dict( - [(key, value) for key, value in iteritems(self.options) if key in self.cfg.settings and value is not None] - ) - for key, value in iteritems(cfg): + cfg = { + key: value for key, value in self.options.items() if key in self.cfg.settings and value is not None + } + for key, value in cfg.items(): self.cfg.set(key.lower(), value) def load(self): @@ -47,7 +46,7 @@ def main(): os.chdir(config.base_dir) options = { - 'bind': '{}:{}'.format(config.host, config.port), + 'bind': f'{config.host}:{config.port}', 'workers': config.worker_pool_size, 'loglevel': config.loglevel, 'preload_app': True, diff --git a/src/pyff/parse.py b/src/pyff/parse.py index 7737f43b..758f9705 100644 --- a/src/pyff/parse.py +++ b/src/pyff/parse.py @@ -20,7 +20,7 @@ class ParserInfo(BaseModel): description: str expiration_time: str # TODO: Change expiration_time into a datetime - validation_errors: Dict[str, Any] = Field({}) + validation_errors: dict[str, Any] = Field({}) def to_dict(self): def _format_key(k: str) -> str: @@ -91,7 +91,7 @@ def parse(self, resource: Resource, content: str) -> ParserInfo: n += 1 if n == 0: - raise IOError("no entities found in {}".format(content)) + raise OSError(f"no entities found in {content}") resource.never_expires = True resource.expire_time = None @@ -116,14 +116,14 @@ def parse(self, resource: Resource, content: str) -> ParserInfo: relt = root(t) for xrd in t.iter("{%s}XRD" % NS['xrd']): - for link in xrd.findall(".//{%s}Link[@rel='%s']" % (NS['xrd'], NS['md'])): + for link in xrd.findall(".//{{{}}}Link[@rel='{}']".format(NS['xrd'], NS['md'])): link_href = link.get("href") certs = CertDict(link) fingerprints = list(certs.keys()) fp = None if len(fingerprints) > 0: fp = fingerprints[0] - log.debug("XRD: {} verified by {}".format(link_href, fp)) + log.debug(f"XRD: {link_href} verified by {fp}") child_opts = resource.opts.model_copy(update={'alias': None}) resource.add_child(link_href, child_opts) resource.last_seen = utc_now().replace(microsecond=0) @@ -132,7 +132,7 @@ def parse(self, resource: Resource, content: str) -> ParserInfo: return info -_parsers: List[PyffParser] = [XRDParser(), DirectoryParser(['xml']), NoParser()] +_parsers: list[PyffParser] = [XRDParser(), DirectoryParser(['xml']), NoParser()] def add_parser(parser): diff --git a/src/pyff/pipes.py b/src/pyff/pipes.py index 29abd427..e097f9b0 100644 --- a/src/pyff/pipes.py +++ b/src/pyff/pipes.py @@ -7,7 +7,8 @@ import functools import os import traceback -from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Type, Union +from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union +from collections.abc import Iterable import yaml from apscheduler.schedulers.background import BackgroundScheduler @@ -22,7 +23,7 @@ __author__ = 'leifj' -registry: Dict[str, Callable] = dict() +registry: dict[str, Callable] = dict() def pipe(*args, **kwargs) -> Callable: @@ -46,7 +47,7 @@ def wrapper_pipe(*iargs, **ikwargs) -> Any: # locate the type annotation of 'opts' and if it exists assume it refers to a pydantic dataclass # before propagating the call to the wrapped function replace opts with the pydantic dataclass object # created from the Tuple provided - opts_type: Optional[Type] = None + opts_type: type | None = None if 'opts' in f.__annotations__: opts_type = f.__annotations__['opts'] @@ -90,7 +91,7 @@ def the_something_func(req,*opts): # self[entry_point.name] = entry_point.load() -def load_pipe(d: Any) -> Tuple[Callable, Any, str, Optional[Union[str, Dict, List]]]: +def load_pipe(d: Any) -> tuple[Callable, Any, str, str | dict | list | None]: """Return a triple callable,name,args of the pipe specified by the object d. :param d: The following alternatives for d are allowed: @@ -100,7 +101,7 @@ def load_pipe(d: Any) -> Tuple[Callable, Any, str, Optional[Union[str, Dict, Lis - d is an iterable (a list) in which case d[0] is treated as the pipe name and d[1:] becomes the args """ - def _n(_d: str) -> Tuple[str, List[str]]: + def _n(_d: str) -> tuple[str, list[str]]: lst = _d.split() _name = lst[0] _opts = lst[1:] @@ -108,7 +109,7 @@ def _n(_d: str) -> Tuple[str, List[str]]: name = None args = None - opts: List[str] = [] + opts: list[str] = [] if is_text(d): name, opts = _n(d) elif hasattr(d, '__iter__') and not type(d) is dict: @@ -135,12 +136,12 @@ def _n(_d: str) -> Tuple[str, List[str]]: return func, opts, name, args -class PipelineCallback(object): +class PipelineCallback: """ A delayed pipeline callback used as a post for parse_saml_metadata """ - def __init__(self, entry_point: str, req: Plumbing.Request, store: Optional[SAMLStoreBase] = None) -> None: + def __init__(self, entry_point: str, req: Plumbing.Request, store: SAMLStoreBase | None = None) -> None: self.entry_point = entry_point self.plumbing = Plumbing(req.scope_of(entry_point).plumbing.pipeline, f"{req.plumbing.id}-via-{entry_point}") self.req = req @@ -161,14 +162,14 @@ def __deepcopy__(self, memo: Any) -> PipelineCallback: return self def __call__(self, *args: Any, **kwargs: Any) -> Any: - log.debug("{!s}: called".format(self.plumbing)) + log.debug(f"{self.plumbing!s}: called") t = args[0] if t is None: raise ValueError("PipelineCallback must be called with a parse-tree argument") try: state = kwargs state[self.entry_point] = True - log.debug("state: {}".format(repr(state))) + log.debug(f"state: {repr(state)}") return self.plumbing.process(self.req.md, store=self.store, state=state, t=t) except Exception as ex: log.debug(traceback.format_exc()) @@ -176,7 +177,7 @@ def __call__(self, *args: Any, **kwargs: Any) -> Any: raise ex -class Plumbing(object): +class Plumbing: """ A plumbing instance represents a basic processing chain for SAML metadata. A simple, yet reasonably complete example: @@ -205,11 +206,11 @@ class Plumbing(object): would then be signed (using signer.key) and finally published in /var/metadata/public/metadata.xml """ - def __init__(self, pipeline: Iterable[Dict[str, Any]], pid: str): + def __init__(self, pipeline: Iterable[dict[str, Any]], pid: str): self._id = pid self.pipeline = pipeline - def to_json(self) -> Iterable[Dict[str, Any]]: + def to_json(self) -> Iterable[dict[str, Any]]: # TODO: to_json seems like the wrong name for this function? return self.pipeline @@ -221,13 +222,13 @@ def id(self) -> str: def pid(self) -> str: return self._id - def __iter__(self) -> Iterable[Dict[str, Any]]: + def __iter__(self) -> Iterable[dict[str, Any]]: return self.pipeline def __str__(self) -> str: - return "PL[id={!s}]".format(self.pid) + return f"PL[id={self.pid!s}]" - class Request(object): + class Request: """ Represents a single request. When processing a set of pipelines a single request is used. Any part of the pipeline may modify any of the fields. @@ -240,9 +241,9 @@ def __init__( t=None, name=None, args=None, - state: Optional[Dict[str, Any]] = None, + state: dict[str, Any] | None = None, store=None, - scheduler: Optional[BackgroundScheduler] = None, + scheduler: BackgroundScheduler | None = None, raise_exceptions: bool = True, ): if not state: @@ -252,16 +253,16 @@ def __init__( self.plumbing: Plumbing = pl self.md: MDRepository = md self.t: ElementTree = t - self._id: Optional[str] = None + self._id: str | None = None self.name = name - self.args: Optional[Union[str, Dict, List]] = args - self.state: Dict[str, Any] = state + self.args: str | dict | list | None = args + self.state: dict[str, Any] = state self.done: bool = False self._store: SAMLStoreBase = store - self.scheduler: Optional[BackgroundScheduler] = scheduler + self.scheduler: BackgroundScheduler | None = scheduler self.raise_exceptions: bool = raise_exceptions - self.exception: Optional[BaseException] = None - self.parent: Optional[Plumbing.Request] = None + self.exception: BaseException | None = None + self.parent: Plumbing.Request | None = None def scope_of(self, entry_point: str) -> Plumbing.Request: for _p in self.plumbing.pipeline: @@ -272,7 +273,7 @@ def scope_of(self, entry_point: str) -> Plumbing.Request: return self.parent.scope_of(entry_point) @property - def id(self) -> Optional[str]: + def id(self) -> str | None: if self.t is None: return None if self._id is None: @@ -281,10 +282,10 @@ def id(self) -> Optional[str]: self._id = self.t.get('Name') return self._id - def set_id(self, _id: Optional[str]) -> None: + def set_id(self, _id: str | None) -> None: self._id = _id - def set_parent(self, _parent: Optional[Plumbing.Request]) -> None: + def set_parent(self, _parent: Plumbing.Request | None) -> None: self.parent = _parent @property @@ -338,12 +339,12 @@ def process( self, md: MDRepository, args: Any = None, - state: Optional[Dict[str, Any]] = None, - t: Optional[ElementTree] = None, - store: Optional[SAMLStoreBase] = None, + state: dict[str, Any] | None = None, + t: ElementTree | None = None, + store: SAMLStoreBase | None = None, raise_exceptions: bool = True, - scheduler: Optional[BackgroundScheduler] = None, - ) -> Optional[Element]: # TODO: unsure about this return type + scheduler: BackgroundScheduler | None = None, + ) -> Element | None: # TODO: unsure about this return type """ The main entrypoint for processing a request pipeline. Calls the inner processor. diff --git a/src/pyff/resource.py b/src/pyff/resource.py index b65b4c41..02482d2f 100644 --- a/src/pyff/resource.py +++ b/src/pyff/resource.py @@ -12,7 +12,8 @@ from datetime import datetime from enum import Enum from threading import Condition, Lock -from typing import TYPE_CHECKING, Any, Callable, Deque, Dict, Iterable, List, Mapping, Optional, Tuple +from typing import TYPE_CHECKING, Any, Callable, Deque, Dict, List, Optional, Tuple +from collections.abc import Iterable, Mapping from urllib.parse import quote as urlescape import requests @@ -50,10 +51,10 @@ log = get_log(__name__) -class URLHandler(object): +class URLHandler: def __init__(self, *args, **kwargs): - log.debug("create urlhandler {} {}".format(args, kwargs)) - self.pending: Dict[str, Resource] = {} + log.debug(f"create urlhandler {args} {kwargs}") + self.pending: dict[str, Resource] = {} self.name = kwargs.pop('name', None) self.content_handler = kwargs.pop('content_handler', None) self._setup() @@ -101,7 +102,7 @@ def __call__(self, watched=None, url=None, response=None, exception=None, last_f if url in self.pending: t = self.pending[url] with self.lock: - log.debug("RESPONSE url={}, exception={} @ {}".format(url, exception, self.count)) + log.debug(f"RESPONSE url={url}, exception={exception} @ {self.count}") self.i_handle(t, url=url, response=response, exception=exception, last_fetched=last_fetched) del self.pending[url] @@ -138,7 +139,7 @@ class ResourceHandler(URLHandler): def __init__(self, *args, **kwargs): super().__init__(self, *args, **kwargs) - def thing_to_url(self, t: Resource) -> Optional[str]: + def thing_to_url(self, t: Resource) -> str | None: return t.url def i_handle(self, t: Resource, url=None, response=None, exception=None, last_fetched=None): @@ -157,14 +158,14 @@ def i_handle(self, t: Resource, url=None, response=None, exception=None, last_fe class ResourceOpts(BaseModel): - alias: Optional[str] = Field(None, alias='as') # TODO: Rename to 'name'? + alias: str | None = Field(None, alias='as') # TODO: Rename to 'name'? # a certificate (file) or a SHA1 fingerprint to use for signature verification - verify: Optional[str] = None + verify: str | None = None # TODO: move classes to make the correct typing work: Iterable[Union[Lambda, PipelineCallback]] = Field([]) - via: List[Callable] = Field([]) + via: list[Callable] = Field([]) # A list of callbacks that can be used to pre-process parsed metadata before validation. Use as a clue-bat. # TODO: sort imports to make the correct typing work: Iterable[PipelineCallback] = Field([]) - cleanup: List[Callable] = Field([]) + cleanup: list[Callable] = Field([]) fail_on_error: bool = False # remove invalid EntityDescriptor elements rather than raise an error filter_invalid: bool = True @@ -173,7 +174,7 @@ class ResourceOpts(BaseModel): verify_tls: bool = False model_config = ConfigDict(arbitrary_types_allowed=True) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: res = self.dict() # Compensate for the 'alias' field options res['as'] = res.pop('alias') @@ -190,16 +191,16 @@ class ResourceLoadState(str, Enum): class ResourceInfo(BaseModel): resource: str # URL - state: Optional[ResourceLoadState] = None - http_headers: Dict[str, Any] = Field({}) - reason: Optional[str] = None - status_code: Optional[str] = None # HTTP status code as string. TODO: change to int - parser_info: Optional[ParserInfo] = None - expired: Optional[bool] = None - exception: Optional[BaseException] = None + state: ResourceLoadState | None = None + http_headers: dict[str, Any] = Field({}) + reason: str | None = None + status_code: str | None = None # HTTP status code as string. TODO: change to int + parser_info: ParserInfo | None = None + expired: bool | None = None + exception: BaseException | None = None model_config = ConfigDict(arbitrary_types_allowed=True) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: def _format_key(k: str) -> str: special = {'http_headers': 'HTTP Response Headers'} if k in special: @@ -224,21 +225,21 @@ def _format_key(k: str) -> str: class Resource(Watchable): - def __init__(self, url: Optional[str], opts: ResourceOpts): + def __init__(self, url: str | None, opts: ResourceOpts): super().__init__() - self.url: Optional[str] = url + self.url: str | None = url self.opts: ResourceOpts = opts - self.t: Optional[ElementTree] = None + self.t: ElementTree | None = None self.type: str = "text/plain" - self.etag: Optional[str] = None - self.expire_time: Optional[datetime] = None + self.etag: str | None = None + self.expire_time: datetime | None = None self.never_expires: bool = False - self.last_seen: Optional[datetime] = None - self.last_parser: Optional['PyffParser'] = None # importing PyffParser in this module causes a loop + self.last_seen: datetime | None = None + self.last_parser: PyffParser | None = None # importing PyffParser in this module causes a loop self._infos: Deque[ResourceInfo] = deque(maxlen=config.info_buffer_size) self.children: Deque[Resource] = deque() - self.trust_info: Optional[dict] = None - self.md_sources: Optional[dict] = None + self.trust_info: dict | None = None + self.md_sources: dict | None = None self._setup() def _setup(self): @@ -246,9 +247,9 @@ def _setup(self): if "://" not in self.url: pth = os.path.abspath(self.url) if os.path.isdir(pth): - self.url = "dir://{}".format(pth) + self.url = f"dir://{pth}" elif os.path.isfile(pth) or os.path.isabs(self.url): - self.url = "file://{}".format(pth) + self.url = f"file://{pth}" if self.url.startswith('file://') or self.url.startswith('dir://'): self.never_expires = True @@ -282,7 +283,7 @@ def cleanup(self) -> Iterable[Callable]: # TODO: move classes to make this work def __str__(self): return "Resource {} expires at {} using ".format( self.url if self.url is not None else "(root)", self.expire_time - ) + ",".join(["{}={}".format(k, v) for k, v in sorted(list(self.opts.model_dump().items()))]) + ) + ",".join([f"{k}={v}" for k, v in sorted(list(self.opts.model_dump().items()))]) def reload(self, fail_on_error=False): with non_blocking_lock(self.lock): @@ -317,8 +318,7 @@ def walk(self): if self.url is not None: yield self for c in self.children: - for cn in c.walk(): - yield cn + yield from c.walk() def is_expired(self) -> bool: if self.never_expires or self.expire_time is None: @@ -338,7 +338,7 @@ def _replace(self, r: Resource) -> None: if self.children[i].url == r.url: self.children[i] = r return - raise ValueError("Resource {} not present - use add_child".format(r.url)) + raise ValueError(f"Resource {r.url} not present - use add_child") def add_child(self, url: str, opts: ResourceOpts) -> Resource: """ @@ -358,7 +358,7 @@ def add_child(self, url: str, opts: ResourceOpts) -> Resource: return r @property - def name(self) -> Optional[str]: + def name(self) -> str | None: if self.opts.alias: return self.opts.alias return self.url @@ -370,7 +370,7 @@ def info(self) -> ResourceInfo: else: return self._infos[-1] - def load_backup(self) -> Optional[str]: + def load_backup(self) -> str | None: if config.local_copy_dir is None: return None @@ -379,7 +379,7 @@ def load_backup(self) -> Optional[str]: if isinstance(res, bytes): return res.decode('utf-8') return res - except IOError as ex: + except OSError as ex: log.warning( "Caught an exception trying to load local backup for {} via {}: {}".format( self.url, self.local_copy_fn, ex @@ -387,20 +387,20 @@ def load_backup(self) -> Optional[str]: ) return None - def save_backup(self, data: Optional[str]) -> None: + def save_backup(self, data: str | None) -> None: if config.local_copy_dir is not None: try: safe_write(self.local_copy_fn, data, True) - except IOError as ex: - log.warning("unable to save backup copy of {}: {}".format(self.url, ex)) + except OSError as ex: + log.warning(f"unable to save backup copy of {self.url}: {ex}") - def load_resource(self, getter: Callable[[str], Response]) -> Tuple[Optional[str], int, ResourceInfo]: - data: Optional[str] = None + def load_resource(self, getter: Callable[[str], Response]) -> tuple[str | None, int, ResourceInfo]: + data: str | None = None status: int = 500 info = self.add_info() verify_tls = self.opts.verify_tls - log.debug("Loading resource {}".format(self.url)) + log.debug(f"Loading resource {self.url}") if not self.url: log.error(f'No URL for resource {self}') @@ -432,23 +432,23 @@ def load_resource(self, getter: Callable[[str], Response]) -> Tuple[Optional[str ) data = self.load_backup() if data is not None and len(data) > 0: - info.reason = "Retrieved from local cache because status: {} != 200".format(status) + info.reason = f"Retrieved from local cache because status: {status} != 200" status = 218 info.status_code = str(status) - except IOError as ex: + except OSError as ex: if self.local_copy_fn is not None: - log.warning("caught exception from {} - trying local backup: {}".format(self.url, ex)) + log.warning(f"caught exception from {self.url} - trying local backup: {ex}") data = self.load_backup() if data is not None and len(data) > 0: - info.reason = "Retrieved from local cache because exception: {}".format(ex) + info.reason = f"Retrieved from local cache because exception: {ex}" status = 218 if data is None or not len(data) > 0: raise ex # propagate exception if we can't find a backup if data is None or not len(data) > 0: - raise ResourceException("failed to fetch {} (status: {:d})".format(self.url, status)) + raise ResourceException(f"failed to fetch {self.url} (status: {status:d})") info.state = ResourceLoadState.Fetched @@ -482,7 +482,7 @@ def parse(self, getter: Callable[[str], Response]) -> Deque[Resource]: if self.is_expired(): info.expired = True - raise ResourceException("Resource at {} expired on {}".format(self.url, self.expire_time)) + raise ResourceException(f"Resource at {self.url} expired on {self.expire_time}") else: info.expired = False diff --git a/src/pyff/samlmd.py b/src/pyff/samlmd.py index c611a55c..61f3ef75 100644 --- a/src/pyff/samlmd.py +++ b/src/pyff/samlmd.py @@ -50,7 +50,7 @@ log = get_log(__name__) -class EntitySet(object): +class EntitySet: def __init__(self, initial=None): self._e = dict() if initial is not None: @@ -66,8 +66,7 @@ def discard(self, value): del self._e[entity_id] def __iter__(self): - for e in list(self._e.values()): - yield e + yield from list(self._e.values()) def __len__(self): return len(list(self._e.keys())) @@ -89,7 +88,7 @@ def parse_saml_metadata( source: BytesIO, opts: ResourceOpts, base_url=None, - validation_errors: Optional[Dict[str, Any]] = None, + validation_errors: Optional[dict[str, Any]] = None, ): """Parse a piece of XML and return an EntitiesDescriptor element after validation. @@ -145,7 +144,7 @@ def parse_saml_metadata( except Exception as ex: log.debug(traceback.format_exc()) - log.error("Error parsing {}: {}".format(base_url, ex)) + log.error(f"Error parsing {base_url}: {ex}") if opts.fail_on_error: raise ex @@ -157,7 +156,7 @@ def parse_saml_metadata( class SAMLParserInfo(ParserInfo): - entities: List[str] = Field([]) # list of entity ids + entities: list[str] = Field([]) # list of entity ids class SAMLMetadataResourceParser(PyffParser): @@ -193,7 +192,7 @@ def _extra_md(_t, resource_opts, **kwargs): return _t sp_entities = kwargs.get('sp_entities') location = kwargs.get('location') - sp_entity = sp_entities.find("{%s}EntityDescriptor[@entityID='%s']" % (NS['md'], entityID)) + sp_entity = sp_entities.find("{{{}}}EntityDescriptor[@entityID='{}']".format(NS['md'], entityID)) if sp_entity is not None: md_source = sp_entity.find( "{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource[@src='%s']" @@ -374,8 +373,8 @@ def filter_invalids_from_document(t: ElementTree, base_url, validation_errors) - if not xsd.validate(e): error = xml_error(xsd.error_log, m=base_url) entity_id = e.get("entityID", "(Missing entityID)") - log.warning('removing \'%s\': schema validation failed: %s' % (entity_id, xsd.error_log)) - validation_errors[entity_id] = "{}".format(xsd.error_log) + log.warning('removing \'{}\': schema validation failed: {}'.format(entity_id, xsd.error_log)) + validation_errors[entity_id] = f"{xsd.error_log}" if e.getparent() is None: return None e.getparent().remove(e) @@ -387,11 +386,11 @@ def filter_or_validate( filter_invalid: bool = False, base_url: str = "", source=None, - validation_errors: Optional[Dict[str, Any]] = None, + validation_errors: Optional[dict[str, Any]] = None, ) -> ElementTree: if validation_errors is None: validation_errors = {} - log.debug("Filtering invalids from {}".format(base_url)) + log.debug(f"Filtering invalids from {base_url}") if filter_invalid: t = filter_invalids_from_document(t, base_url=base_url, validation_errors=validation_errors) for entity_id, err in validation_errors.items(): @@ -401,13 +400,13 @@ def filter_or_validate( ) ) else: # all or nothing - log.debug("Validating (one-shot) {}".format(base_url)) + log.debug(f"Validating (one-shot) {base_url}") try: validate_document(t) except DocumentInvalid as ex: err = xml_error(ex.error_log, m=base_url) validation_errors[base_url] = err - raise MetadataException("Validation error while parsing {}: (from {}): {}".format(base_url, source, err)) + raise MetadataException(f"Validation error while parsing {base_url}: (from {source}): {err}") return t @@ -496,7 +495,7 @@ def entitiesdescriptor( if config.devel_write_xml_to_file: import os - with open("/tmp/pyff_entities_out-{}.xml".format(os.getpid()), "w") as fd: + with open(f"/tmp/pyff_entities_out-{os.getpid()}.xml", "w") as fd: fd.write(b2u(dumptree(t))) if validate: @@ -506,7 +505,7 @@ def entitiesdescriptor( ) for base_url, err in validation_errors.items(): - log.error("Validation error: @ {}: {}".format(base_url, err)) + log.error(f"Validation error: @ {base_url}: {err}") return t @@ -885,7 +884,7 @@ def sub_domains(e): def entity_scopes(e): - elt = e.findall('.//{%s}IDPSSODescriptor/{%s}Extensions/{%s}Scope' % (NS['md'], NS['md'], NS['shibmd'])) + elt = e.findall('.//{{{}}}IDPSSODescriptor/{{{}}}Extensions/{{{}}}Scope'.format(NS['md'], NS['md'], NS['shibmd'])) if elt is None or len(elt) == 0: return None return [s.text for s in elt] @@ -991,7 +990,7 @@ def discojson_sp(e, global_trust_info=None, global_md_sources=None): sp['entityID'] = e.get('entityID', None) md_sources = e.findall( - "{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource" % (NS['md'], NS['md'], NS['ti'], NS['ti']) + "{{{}}}SPSSODescriptor/{{{}}}Extensions/{{{}}}TrustInfo/{{{}}}MetadataSource".format(NS['md'], NS['md'], NS['ti'], NS['ti']) ) sp['extra_md'] = {} @@ -1295,7 +1294,7 @@ def _entity_attributes(e): def _eattribute(e, attr, nf): ea = _entity_attributes(e) - a = ea.xpath(".//saml:Attribute[@NameFormat='%s' and @Name='%s']" % (nf, attr), namespaces=NS, smart_strings=False) + a = ea.xpath(".//saml:Attribute[@NameFormat='{}' and @Name='{}']".format(nf, attr), namespaces=NS, smart_strings=False) if a is None or len(a) == 0: a = etree.Element("{%s}Attribute" % NS['saml']) a.set('NameFormat', nf) @@ -1405,11 +1404,11 @@ def get_key(e): except AttributeError: pass except IndexError: - log.warning("Sort pipe: unable to sort entity by '%s'. Entity '%s' has no such value" % (sxp, eid)) + log.warning("Sort pipe: unable to sort entity by '{}'. Entity '{}' has no such value".format(sxp, eid)) except TypeError: pass - log.debug("Generated sort key for entityID='%s' and %s='%s'" % (eid, sxp, sv)) + log.debug("Generated sort key for entityID='{}' and {}='{}'".format(eid, sxp, sv)) return sv is None, sv, eid container = root(t) @@ -1446,6 +1445,6 @@ def _set_nodecountry_in_ext(ext_elt, iso_cc): def diff(t1, t2): - s1 = set([e.get('entityID') for e in iter_entities(root(t1))]) - s2 = set([e.get('entityID') for e in iter_entities(root(t2))]) + s1 = {e.get('entityID') for e in iter_entities(root(t1))} + s2 = {e.get('entityID') for e in iter_entities(root(t2))} return s1.difference(s2) diff --git a/src/pyff/store.py b/src/pyff/store.py index c4c31b33..158e1b8d 100644 --- a/src/pyff/store.py +++ b/src/pyff/store.py @@ -66,7 +66,7 @@ def make_icon_store_instance(*args, **kwargs): return new_store(*args, **kwargs) -class Unpickled(object): +class Unpickled: def _pickle(self, data): return data @@ -128,7 +128,7 @@ def _unpickle(self, pickled_data): _unpickle_value = _unpickle -class IconStore(object): +class IconStore: def __init__(self): pass @@ -150,11 +150,11 @@ def is_valid(self, url): def __call__(self, *args, **kwargs): watched = kwargs.pop('watched', None) scheduler = kwargs.pop('scheduler', None) - log.debug("about to schedule icon refresh on {} using {}".format(self, scheduler.state)) + log.debug(f"about to schedule icon refresh on {self} using {scheduler.state}") if watched is not None and scheduler is not None: urls = [] for r in watched.walk(): - log.debug("looking at {}".format(r.url)) + log.debug(f"looking at {r.url}") if r.t is not None: for e in iter_entities(r.t): ico = entity_icon_url(e) @@ -182,7 +182,7 @@ def _load_icons(self, urls): if not self.is_valid(u): tbs.append(u) - log.debug("fetching {} icons".format(len(tbs))) + log.debug(f"fetching {len(tbs)} icons") if len(tbs) > 0: icon_handler = IconHandler(icon_store=self, name="Icons") icon_handler.schedule(tbs) @@ -227,7 +227,7 @@ def _setup(self): if not self._redis: self._redis = redis() # XXX test cases won't get correctly unpicked because of this self.icons = LRUProxyDict( - JSONDict(key="{}_icons".format(self._name), redis=self._redis, writeback=True), maxsize=config.cache_size + JSONDict(key=f"{self._name}_icons", redis=self._redis, writeback=True), maxsize=config.cache_size ) def lookup(self, uri): @@ -254,13 +254,13 @@ def __setstate__(self, state): self._setup() def reset(self): - self._redis.delete("{}_icons".format(self._name)) + self._redis.delete(f"{self._name}_icons") def size(self): return len(self.icons) -class SAMLStoreBase(object): +class SAMLStoreBase: def lookup(self, key): raise NotImplementedError() @@ -282,7 +282,7 @@ def reset(self): raise NotImplementedError() def entity_ids(self): - return set(e.get('entityID') for e in self.lookup('entities')) + return {e.get('entityID') for e in self.lookup('entities')} def _select(self, member=None): if member is None: @@ -407,7 +407,7 @@ def search(self, query=None, path=None, entity_filter=None, related=None): match_query = bool(len(query) > 0) - if isinstance(query, six.string_types): + if isinstance(query, str): query = [query.lower()] def _strings(elt): @@ -518,12 +518,12 @@ def merge(self, *args, **kwargs): class RedisWhooshStore(SAMLStoreBase): # TODO: This needs a gc mechanism for keys (uuids) def json_dict(self, name): return LRUProxyDict( - JSONDict(key='{}_{}'.format(self._name, name), redis=self._redis, writeback=True), maxsize=config.cache_size + JSONDict(key=f'{self._name}_{name}', redis=self._redis, writeback=True), maxsize=config.cache_size ) def xml_dict(self, name): return LRUProxyDict( - XMLDict(key='{}_{}'.format(self._name, name), redis=self._redis, writeback=True), maxsize=config.cache_size + XMLDict(key=f'{self._name}_{name}', redis=self._redis, writeback=True), maxsize=config.cache_size ) def __init__(self, *args, **kwargs): @@ -575,8 +575,8 @@ def __call__(self, *args, **kwargs): watched = kwargs.pop('watched', None) scheduler = kwargs.pop('scheduler', None) if watched is not None and scheduler is not None: - super(RedisWhooshStore, self).__call__(watched=watched, scheduler=scheduler) - log.debug("indexing using {}".format(scheduler)) + super().__call__(watched=watched, scheduler=scheduler) + log.debug(f"indexing using {scheduler}") if scheduler is not None: # and self._last_modified > self._last_index_time and : scheduler.add_job( RedisWhooshStore._reindex, @@ -590,7 +590,7 @@ def _reindex(self): log.debug("indexing the store...") self._last_index_time = datetime.now() seen = set() - refs = set([b2u(s) for s in self.objects.keys()]) + refs = {b2u(s) for s in self.objects.keys()} parts = self.parts.values() for ref in refs: for part in parts: @@ -606,7 +606,7 @@ def _reindex(self): with ix.writer() as writer: for ref in refs: if ref not in seen: - log.debug("removing unseen ref {}".format(ref)) + log.debug(f"removing unseen ref {ref}") del self.objects[ref] del self.parts[ref] @@ -653,7 +653,7 @@ def _index_prep(self, info): if k in self.schema.names(): if type(v) in (list, tuple): res[k] = " ".join([vv.lower() for vv in v]) - elif type(v) in six.string_types: + elif type(v) in (str,): res[k] = info[a].lower() res['sha1'] = hash_id(info['entity_id'], prefix=False) return res @@ -706,7 +706,7 @@ def size(self, a=None, v=None): elif a is not None and v is None: return len(self.attribute(a)) else: - return len(self.lookup("{!s}={!s}".format(a, v))) + return len(self.lookup(f"{a!s}={v!s}")) def _attributes(self): ix = self.storage.open_index() @@ -734,9 +734,9 @@ def _prep_key(self, key): key = key.replace('-', ' AND NOT ') for uri, a in list(ATTRS_INV.items()): key = key.replace(uri, a) - key = " {!s} ".format(key) - key = re.sub("([^=]+)=(\S+)", "\\1:\\2", key) - key = re.sub("{([^}]+)}(\S+)", "\\1:\\2", key) + key = f" {key!s} " + key = re.sub(r"([^=]+)=(\S+)", "\\1:\\2", key) + key = re.sub(r"{([^}]+)}(\S+)", "\\1:\\2", key) key = key.strip() return key @@ -783,7 +783,7 @@ def lookup(self, key): @ttl_cache(ttl=config.cache_ttl, maxsize=config.cache_size) def search(self, query=None, path=None, entity_filter=None, related=None): if entity_filter: - query = "{!s} AND {!s}".format(query, entity_filter) + query = f"{query!s} AND {entity_filter!s}" query = self._prep_key(query) qp = MultifieldParser(['content', 'domain'], schema=self.schema) q = qp.parse(query) @@ -927,7 +927,7 @@ def _lookup(self, key): m = re.match("^(.+)=(.+)$", key) if m: - return self._lookup("{%s}%s" % (m.group(1), str(m.group(2)).rstrip("/"))) + return self._lookup("{{{}}}{}".format(m.group(1), str(m.group(2)).rstrip("/"))) m = re.match("^{(.+)}(.+)$", key) if m: @@ -942,7 +942,7 @@ def _lookup(self, key): lst = [] for entityID in self.md[key]: lst.extend(self.lookup(entityID)) - log.debug("returning {} entities".format(len(lst))) + log.debug(f"returning {len(lst)} entities") return lst return [] diff --git a/src/pyff/test/__init__.py b/src/pyff/test/__init__.py index be32c588..3b858cfd 100644 --- a/src/pyff/test/__init__.py +++ b/src/pyff/test/__init__.py @@ -7,7 +7,6 @@ from unittest import TestCase import importlib.resources -import six from pyff import __version__ as pyffversion # range of ports where available ports can be found @@ -30,7 +29,7 @@ def find_unbound_port(i=0): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: sock.bind(("127.0.0.1", port)) - except socket.error: + except OSError: pass return port @@ -46,7 +45,7 @@ def run_pyffd(*args): def run_cmdline(script, *args): argv = list(*args) starter = tempfile.NamedTemporaryFile('w').name - print("starting %s using %s" % (script, starter)) + print("starting {} using {}".format(script, starter)) with open(starter, 'w') as fd: fd.write( """#!%s @@ -78,9 +77,9 @@ def run_cmdline(script, *args): out, err = proc.communicate() rv = proc.wait() os.unlink(starter) - if isinstance(out, six.binary_type): + if isinstance(out, bytes): out = out.decode('utf-8') - if isinstance(err, six.binary_type): + if isinstance(err, bytes): err = err.decode('utf-8') print(">> STDOUT ---") diff --git a/src/pyff/test/test_decorators.py b/src/pyff/test/test_decorators.py index 1b00cd91..3ff20507 100644 --- a/src/pyff/test/test_decorators.py +++ b/src/pyff/test/test_decorators.py @@ -1,8 +1,8 @@ import logging from unittest import TestCase -from mock import patch -from six import StringIO +from unittest.mock import patch +from io import StringIO from pyff.decorators import deprecated diff --git a/src/pyff/test/test_log.py b/src/pyff/test/test_log.py index 208ded4a..9a681a6a 100644 --- a/src/pyff/test/test_log.py +++ b/src/pyff/test/test_log.py @@ -1,8 +1,8 @@ import logging from unittest import TestCase -from mock import patch -from six import StringIO +from unittest.mock import patch +from io import StringIO from pyff.logs import SysLogLibHandler, log diff --git a/src/pyff/test/test_md_api.py b/src/pyff/test/test_md_api.py index 68cd3714..8b52358d 100644 --- a/src/pyff/test/test_md_api.py +++ b/src/pyff/test/test_md_api.py @@ -35,7 +35,7 @@ def setUpClass(cls): cls.mdx_template = cls.templates.get_template('mdx.fd') with open(cls.mdx, "w+") as fd: fd.write(cls.mdx_template.render(ctx=cls)) - with open(cls.mdx, 'r') as r: + with open(cls.mdx) as r: print("".join(r.readlines())) config.local_copy_dir = td cls._app = mkapp(cls.mdx) @@ -49,7 +49,7 @@ def tearDownClass(cls): def test_status(self): with RequestsInterceptor(self.app, host='127.0.0.1', port=80) as url: - r = requests.get("{}/api/status".format(url)) + r = requests.get(f"{url}/api/status") assert "application/json" in r.headers['content-type'] assert "version" in r.text assert r.status_code == 200 @@ -61,19 +61,19 @@ def test_status(self): def test_parse_robots(self): try: - import six.moves.urllib_robotparser as robotparser + import urllib.robotparser as robotparser except ImportError as ex: raise unittest.SkipTest() rp = robotparser.RobotFileParser() with UrllibInterceptor(self.app, host='127.0.0.1', port=80) as url: - rp.set_url("{}/robots.txt".format(url)) + rp.set_url(f"{url}/robots.txt") rp.read() assert not rp.can_fetch("*", url) def test_webfinger(self): with RequestsInterceptor(self.app, host='127.0.0.1', port=80) as url: - r = requests.get("{}/.well-known/webfinger?resource={}".format(url, url)) + r = requests.get(f"{url}/.well-known/webfinger?resource={url}") assert r.status_code == 200 assert "application/json" in r.headers['content-type'] data = r.json() @@ -88,7 +88,7 @@ def test_webfinger(self): def test_webfinger_rel_dj(self): with RequestsInterceptor(self.app, host='127.0.0.1', port=80) as url: - r = requests.get("{}/.well-known/webfinger?resource={}&rel=disco-json".format(url, url)) + r = requests.get(f"{url}/.well-known/webfinger?resource={url}&rel=disco-json") assert r.status_code == 200 assert "application/json" in r.headers['content-type'] data = r.json() @@ -106,11 +106,11 @@ def test_webfinger_rel_dj(self): @pytest.mark.skipif(os.environ.get('PYFF_SKIP_SLOW_TESTS') is not None, reason='Slow tests skipped') def test_load_and_query(self): with RequestsInterceptor(self.app, host='127.0.0.1', port=80) as url: - r = requests.post("{}/api/call/update".format(url)) + r = requests.post(f"{url}/api/call/update") assert "application/samlmetadata+xml" in r.headers['Content-Type'] # verify we managed to load something into the DB - r = requests.get("{}/api/status".format(url)) + r = requests.get(f"{url}/api/status") assert "application/json" in r.headers['content-type'] assert "version" in r.text assert r.status_code == 200 @@ -121,12 +121,12 @@ def test_load_and_query(self): assert int(data['store']['size']) > 0 # load the NORDUnet IdP as xml - r = requests.get("{}/entities/%7Bsha1%7Dc50752ce1d12c2b37da13a1a396b8e3895d35dd9.xml".format(url)) + r = requests.get(f"{url}/entities/%7Bsha1%7Dc50752ce1d12c2b37da13a1a396b8e3895d35dd9.xml") assert r.status_code == 200 assert 'application/samlmetadata+xml' in r.headers['Content-Type'] # load the NORDUnet IdP as json - r = requests.get("{}/entities/%7Bsha1%7Dc50752ce1d12c2b37da13a1a396b8e3895d35dd9.json".format(url)) + r = requests.get(f"{url}/entities/%7Bsha1%7Dc50752ce1d12c2b37da13a1a396b8e3895d35dd9.json") assert "application/json" in r.headers['Content-Type'] assert r.status_code == 200 data = r.json() @@ -137,7 +137,7 @@ def test_load_and_query(self): assert 'nordu.net' in info['scope'] # check that we get a discovery_responses where we expect one - r = requests.get("{}/entities/%7Bsha1%7Dc3ba81cede254454b64ee9743df19201fe34adc9.json".format(url)) + r = requests.get(f"{url}/entities/%7Bsha1%7Dc3ba81cede254454b64ee9743df19201fe34adc9.json") assert r.status_code == 200 data = r.json() info = data[0] @@ -168,7 +168,7 @@ def setUpClass(cls): - {cls.test01} """ ) - with open(cls.mdx, 'r') as r: + with open(cls.mdx) as r: print("".join(r.readlines())) cls._app = mkapp(cls.mdx) cls.app = lambda *args, **kwargs: cls._app diff --git a/src/pyff/test/test_mdsl.py b/src/pyff/test/test_mdsl.py index 3ec44d75..74553721 100644 --- a/src/pyff/test/test_mdsl.py +++ b/src/pyff/test/test_mdsl.py @@ -5,11 +5,10 @@ import tempfile import pytest -import six import yaml from mako.lookup import TemplateLookup -from mock import patch - +from unittest.mock import patch +from io import StringIO from pyff import builtins from pyff.exceptions import MetadataException from pyff.parse import ParserException @@ -71,8 +70,8 @@ def run_pipeline(self, pl_name, ctx=None, md=None): def exec_pipeline(self, pstr): md = MDRepository() - p = yaml.safe_load(six.StringIO(pstr)) - print("\n{}".format(yaml.dump(p))) + p = yaml.safe_load(StringIO(pstr)) + print(f"\n{yaml.dump(p)}") pl = Plumbing(p, pid="test") res = pl.process(md, state={'batch': True, 'stats': {}}) return res, md @@ -105,20 +104,20 @@ def test_eidas_country(self): xml = parse_xml(tmpfile) assert xml is not None entityID = "https://pre.eidas.gov.gr/EidasNode/ServiceMetadata" - with_hide_from_discovery = xml.find("{%s}EntityDescriptor[@entityID='%s']" % (NS['md'], entityID)) + with_hide_from_discovery = xml.find("{{{}}}EntityDescriptor[@entityID='{}']".format(NS['md'], entityID)) assert with_hide_from_discovery is not None - search = "{%s}Extensions/{%s}EntityAttributes/{%s}Attribute[@Name='%s']" % (NS['md'], NS['mdattr'], NS['saml'],'http://macedir.org/entity-category') + search = "{{{}}}Extensions/{{{}}}EntityAttributes/{{{}}}Attribute[@Name='{}']".format(NS['md'], NS['mdattr'], NS['saml'],'http://macedir.org/entity-category') ecs = with_hide_from_discovery.find(search) assert ecs is not None entityID2 = "https://eidas.pp.dev-franceconnect.fr/EidasNode/ServiceMetadata" - without_hide_from_discovery = xml.find("{%s}EntityDescriptor[@entityID='%s']" % (NS['md'], entityID2)) + without_hide_from_discovery = xml.find("{{{}}}EntityDescriptor[@entityID='{}']".format(NS['md'], entityID2)) ecs2 = without_hide_from_discovery.find(search) assert ecs2 is None - except IOError: + except OSError: pass finally: try: #os.unlink(tmpfile) pass - except (IOError, OSError): + except OSError: pass diff --git a/src/pyff/test/test_pipeline.py b/src/pyff/test/test_pipeline.py index d4c687f4..5e6dd09b 100644 --- a/src/pyff/test/test_pipeline.py +++ b/src/pyff/test/test_pipeline.py @@ -1,14 +1,13 @@ import json import os import shutil -import sys import tempfile import pytest -import six import yaml +from io import StringIO from mako.lookup import TemplateLookup -from mock import patch +from unittest.mock import patch from pyff import builtins from pyff.exceptions import MetadataException @@ -32,19 +31,19 @@ def _capsys(self, capsys): @property def captured_stdout(self) -> str: - """ Return anything written to STDOUT during this test """ + """Return anything written to STDOUT during this test""" out, _err = self._capsys.readouterr() # type: ignore return out @property def captured_stderr(self) -> str: - """ Return anything written to STDERR during this test """ + """Return anything written to STDERR during this test""" _out, err = self._capsys.readouterr() # type: ignore return err @pytest.fixture(autouse=True) def _caplog(self, caplog): - """ Return anything written to the logging system during this test """ + """Return anything written to the logging system during this test""" self._caplog = caplog @property @@ -69,8 +68,8 @@ def run_pipeline(self, pl_name, ctx=None, md=None): def exec_pipeline(self, pstr): md = MDRepository() - p = yaml.safe_load(six.StringIO(pstr)) - print("\n{}".format(yaml.dump(p))) + p = yaml.safe_load(StringIO(pstr)) + print(f"\n{yaml.dump(p)}") pl = Plumbing(p, pid="test") res = pl.process(md, state={'batch': True, 'stats': {}}) return res, md @@ -280,7 +279,7 @@ def _run_sort_test(expected_order, sxp, res, l): except AssertionError: print( f"Test failed on expecting missing sort value from: '{e[0]}'.\n" - f"Could not find string on the output: '{keygen_fail_str}'.\nOutput was:\n {six.u(l)}" + f"Could not find string on the output: '{keygen_fail_str}'.\nOutput was:\n {l}" ) raise except (IndexError, TypeError): @@ -294,17 +293,15 @@ def _run_sort_test(expected_order, sxp, res, l): from pyff.samlmd import iter_entities elts = list(iter_entities(res)) - print("elts: {}".format(elts)) + print(f"elts: {elts}") for i, me in enumerate(expected_order): - print("{}: {}".format(i, me)) + print(f"{i}: {me}") try: assert elts[i].attrib.get("entityID") == me[0] except AssertionError: print( - ( - f"Test failed on verifying sort position {i:d}.\nExpected: {me[0]}; " - f"Found: {elts[i].attrib.get('entityID')} " - ) + f"Test failed on verifying sort position {i:d}.\nExpected: {me[0]}; " + f"Found: {elts[i].attrib.get('entityID')} " ) raise @@ -395,14 +392,14 @@ def test_cert_report(self): eIDs = [e.get('entityID') for e in md.store] assert 'https://idp.aco.net/idp/shibboleth' in eIDs assert 'https://skriptenforum.net/shibboleth' in eIDs - with open(self.output, 'r') as fd: + with open(self.output) as fd: lines = fd.readline() assert len(lines) > 0 def test_cert_report_swamid(self): self.output = tempfile.NamedTemporaryFile('w').name res, md, ctx = self.run_pipeline("certreport-swamid.fd", self) - with open(self.output, 'r') as fd: + with open(self.output) as fd: print(fd.read()) def test_info_and_dump(self): @@ -417,7 +414,7 @@ def test_info_and_dump(self): """ ) assert 'https://idp.nordu.net/idp/shibboleth' in self.captured_stdout - except IOError: + except OSError: pass def test_end_exit(self): @@ -431,7 +428,7 @@ def test_end_exit(self): """ ) assert False - except IOError: + except OSError: pass except ExitException as ex: assert ex.code == 22 @@ -445,7 +442,7 @@ def test_single_dump(self): """ ) assert '' in self.captured_stdout - except IOError: + except OSError: pass def test_missing_select(self): @@ -473,7 +470,7 @@ def test_missing_select(self): assert False except PipeException: pass - except IOError: + except OSError: pass def test_first_select_as(self): @@ -497,12 +494,12 @@ def test_first_select_as(self): assert root(t2).get('entityID') == entity_id except PipeException: pass - except IOError: + except OSError: pass finally: try: os.unlink(tmpfile) - except (IOError, OSError): + except OSError: pass def test_prune(self): @@ -527,7 +524,7 @@ def test_prune(self): assert gone is None except PipeException: pass - except IOError: + except OSError: pass finally: try: @@ -545,7 +542,7 @@ def test_empty_store(self): assert False except PipeException: pass - except IOError: + except OSError: pass def test_empty_store2(self): @@ -559,7 +556,7 @@ def test_empty_store2(self): assert False except PipeException: pass - except IOError: + except OSError: pass def test_empty_dir_error(self): @@ -570,7 +567,7 @@ def test_empty_dir_error(self): - {self.datadir}/empty """ ) - except IOError: + except OSError: pass assert "no entities found in" in str(self.captured_log_text) @@ -597,7 +594,7 @@ def test_store_and_retrieve(self): assert t2 is not None assert root(t1).get('entityID') == root(t2).get('entityID') assert root(t2).get('entityID') == entity_id - except IOError: + except OSError: pass finally: shutil.rmtree(tmpdir) @@ -612,7 +609,7 @@ def test_empty_certreport(self): assert False except PipeException: pass - except IOError: + except OSError: pass def test_pick_invalid(self): @@ -630,7 +627,7 @@ def test_pick_invalid(self): assert False except PipeException: pass - except IOError: + except OSError: pass finally: try: @@ -666,7 +663,7 @@ def test_blacklist_single_file(self): assert not md.lookup(entity) def test_blacklist_directory(self): - """ Test filter action when loading all metadata in a directory. + """Test filter action when loading all metadata in a directory. This test has the side effect of testing some resource option inheritance mechanisms. """ @@ -731,19 +728,21 @@ def test_discojson_sp(self): tmpdir = tempfile.mkdtemp() os.rmdir(tmpdir) # lets make sure 'store' can recreate it try: - self.exec_pipeline(""" + self.exec_pipeline( + """ - load: - - file://%s/metadata/test02-sp.xml + - file://{}/metadata/test02-sp.xml - select - discojson_sp - publish: - output: %s/disco_sp.json + output: {}/disco_sp.json raw: true update_store: false -""" % (self.datadir, tmpdir)) +""".format(self.datadir, tmpdir) + ) fn = "%s/disco_sp.json" % tmpdir assert os.path.exists(fn) - with open(fn, 'r') as f: + with open(fn) as f: sp_json = json.load(f) assert 'https://example.com.com/shibboleth' in str(sp_json) @@ -752,15 +751,26 @@ def test_discojson_sp(self): assert 'customer' in example_sp_json['profiles'] customer_tinfo = example_sp_json['profiles']['customer'] assert customer_tinfo['entity'][0] == {'entity_id': 'https://example.org/idp.xml', 'include': True} - assert customer_tinfo['entities'][0] == {'select': 'http://www.swamid.se/', 'match': 'registrationAuthority', 'include': True} - assert customer_tinfo['fallback_handler'] == {'profile': 'href', 'handler': 'https://www.example.org/about'} + assert customer_tinfo['entities'][0] == { + 'select': 'http://www.swamid.se/', + 'match': 'registrationAuthority', + 'include': True, + } + assert customer_tinfo['fallback_handler'] == { + 'profile': 'href', + 'handler': 'https://www.example.org/about', + } example_sp_json_2 = sp_json[1] assert 'incommon-wayfinder' in example_sp_json_2['profiles'] tinfo = example_sp_json_2['profiles']['incommon-wayfinder'] - assert tinfo['entities'][0] == {'select': 'https://mdq.incommon.org/entities', 'match': 'md_source', 'include': True} + assert tinfo['entities'][0] == { + 'select': 'https://mdq.incommon.org/entities', + 'match': 'md_source', + 'include': True, + } assert tinfo['strict'] - except IOError: + except OSError: pass finally: shutil.rmtree(tmpdir) @@ -770,28 +780,34 @@ def test_discojson_sp_trustinfo_in_attr(self): tmpdir = tempfile.mkdtemp() os.rmdir(tmpdir) # lets make sure 'store' can recreate it try: - self.exec_pipeline(""" + self.exec_pipeline( + """ - load: - - file://%s/metadata/test-sp-trustinfo-in-attr.xml + - file://{}/metadata/test-sp-trustinfo-in-attr.xml - select - discojson_sp_attr - publish: - output: %s/disco_sp_attr.json + output: {}/disco_sp_attr.json raw: true update_store: false -""" % (self.datadir, tmpdir)) +""".format(self.datadir, tmpdir) + ) fn = "%s/disco_sp_attr.json" % tmpdir assert os.path.exists(fn) - with open(fn, 'r') as f: + with open(fn) as f: sp_json = json.load(f) assert 'https://example.com/shibboleth' in str(sp_json) example_sp_json = sp_json[0] assert 'incommon-wayfinder' in example_sp_json['profiles'] tinfo = example_sp_json['profiles']['incommon-wayfinder'] - assert tinfo['entities'][0] == {'select': 'https://mdq.incommon.org/entities', 'match': 'md_source', 'include': True} + assert tinfo['entities'][0] == { + 'select': 'https://mdq.incommon.org/entities', + 'match': 'md_source', + 'include': True, + } assert tinfo['strict'] - except IOError: + except OSError: pass finally: shutil.rmtree(tmpdir) diff --git a/src/pyff/test/test_repo.py b/src/pyff/test/test_repo.py index c2aa497b..cd35fd1a 100644 --- a/src/pyff/test/test_repo.py +++ b/src/pyff/test/test_repo.py @@ -53,7 +53,7 @@ def test_entity_attribute(self): entity_id = root(self.t).get('entityID') set_entity_attributes(root(self.t), {"http://ns.example.org": "foo"}) self.md.store.update(root(self.t), entity_id) - e = self.md.lookup("{%s}%s" % ("http://ns.example.org", 'foo'))[0] + e = self.md.lookup("{{{}}}{}".format("http://ns.example.org", 'foo'))[0] assert e is not None assert e.get('entityID') == entity_id diff --git a/src/pyff/test/test_simple_pipeline.py b/src/pyff/test/test_simple_pipeline.py index bb4362c3..cdaeb683 100644 --- a/src/pyff/test/test_simple_pipeline.py +++ b/src/pyff/test/test_simple_pipeline.py @@ -11,7 +11,7 @@ class SimplePipeLineTest(SignerTestCase): def setUp(self): - super(SimplePipeLineTest, self).setUp() + super().setUp() self.templates = TemplateLookup(directories=[os.path.join(self.datadir, 'simple-pipeline')]) self.output = tempfile.NamedTemporaryFile('w').name self.signer = tempfile.NamedTemporaryFile('w').name @@ -52,4 +52,4 @@ def test_select_single(self): assert entities[0].get('entityID') == 'https://idp.aco.net/idp/shibboleth' def tear_down(self): - super(SimplePipeLineTest, self).tearDown() + super().tearDown() diff --git a/src/pyff/test/test_store.py b/src/pyff/test/test_store.py index 8975d27a..fe298cb8 100644 --- a/src/pyff/test/test_store.py +++ b/src/pyff/test/test_store.py @@ -88,7 +88,7 @@ def test_lookup_intersect_test01(self): store = RedisWhooshStore(directory=self.dir, clear=True, name="test", redis=fakeredis.FakeStrictRedis()) store.update(self.test01, etag='test01', lazy=False) entity_id = root(self.test01).get('entityID') - e = store.lookup("%s=%s+%s=%s" % (ATTRS['domain'], 'example.com', ATTRS['role'], 'idp')) + e = store.lookup("{}={}+{}={}".format(ATTRS['domain'], 'example.com', ATTRS['role'], 'idp')) assert len(e) == 1 assert e[0] is not None assert e[0].get('entityID') is not None @@ -126,7 +126,7 @@ def test_lookup_intersect_empty_test01(self): store = RedisWhooshStore(directory=self.dir, clear=True, name="test", redis=fakeredis.FakeStrictRedis()) store.update(self.test01, etag='test01', lazy=False) entity_id = root(self.test01).get('entityID') - e = store.lookup("%s=%s+%s=%s" % (ATTRS['domain'], 'example.com', ATTRS['role'], 'sp')) + e = store.lookup("{}={}+{}={}".format(ATTRS['domain'], 'example.com', ATTRS['role'], 'sp')) assert len(e) == 0 def test_search_test01(self): @@ -249,7 +249,7 @@ def test_lookup_intersect_test01(self): store = MemoryStore() store.update(self.test01) entity_id = root(self.test01).get('entityID') - e = store.lookup("%s=%s+%s=%s" % (ATTRS['domain'], 'example.com', ATTRS['role'], 'idp')) + e = store.lookup("{}={}+{}={}".format(ATTRS['domain'], 'example.com', ATTRS['role'], 'idp')) assert len(e) == 1 assert e[0] is not None assert e[0].get('entityID') is not None @@ -259,7 +259,7 @@ def test_lookup_intersect_empty_test01(self): store = MemoryStore() store.update(self.test01) entity_id = root(self.test01).get('entityID') - e = store.lookup("%s=%s+%s=%s" % (ATTRS['domain'], 'example.com', ATTRS['role'], 'sp')) + e = store.lookup("{}={}+{}={}".format(ATTRS['domain'], 'example.com', ATTRS['role'], 'sp')) assert len(e) == 0 diff --git a/src/pyff/test/test_utils.py b/src/pyff/test/test_utils.py index be0118c7..725e365a 100644 --- a/src/pyff/test/test_utils.py +++ b/src/pyff/test/test_utils.py @@ -4,8 +4,6 @@ from threading import Thread, current_thread from unittest import TestCase -import six - from pyff import utils from pyff.constants import NS, as_list_of_string from pyff.resource import Resource, ResourceOpts @@ -88,7 +86,7 @@ def test_resource_filename(self): assert resource_filename(tmp) == tmp (d, fn) = os.path.split(tmp) assert resource_filename(fn, d) == tmp - except IOError as ex: + except OSError as ex: raise ex finally: try: @@ -99,8 +97,8 @@ def test_resource_filename(self): def test_resource_string(self): assert resource_string("missing") is None assert resource_string("missing", "gone") is None - assert resource_string('test/data/empty.txt') == six.b('empty') - assert resource_string('empty.txt', 'test/data') == six.b('empty') + assert resource_string('test/data/empty.txt') == b'empty' + assert resource_string('empty.txt', 'test/data') == b'empty' tmp = tempfile.NamedTemporaryFile('w').name with open(tmp, "w") as fd: fd.write("test") @@ -110,7 +108,7 @@ def test_resource_string(self): assert resource_string(tmp) == 'test' (d, fn) = os.path.split(tmp) assert resource_string(fn, d) == 'test' - except IOError as ex: + except OSError as ex: raise ex finally: try: @@ -212,7 +210,7 @@ def test_convert(self): (basename, _, ext) = fn.rpartition('.') mime_type = TestImage.ext_to_mime.get(ext, None) assert mime_type is not None - url = "file://{}".format(fn) + url = f"file://{fn}" assert url r = url_get(url) assert r diff --git a/src/pyff/tools.py b/src/pyff/tools.py index dae12883..5ea0d452 100644 --- a/src/pyff/tools.py +++ b/src/pyff/tools.py @@ -45,13 +45,13 @@ def difftool(): d1 = diff(r1.t, r2.t) if d1: - print("Only in {}".format(r1.url)) + print(f"Only in {r1.url}") print("\n+".join(d1)) status += 2 d2 = diff(r2.t, r1.t) if d2: - print("Only in {}".format(r2.url)) + print(f"Only in {r2.url}") print("\n+".join(d2)) status += 4 diff --git a/src/pyff/utils.py b/src/pyff/utils.py index 67392c74..ce3167d8 100644 --- a/src/pyff/utils.py +++ b/src/pyff/utils.py @@ -1,6 +1,3 @@ -# coding=utf-8 - - """ This module contains various utilities. @@ -25,7 +22,8 @@ from itertools import chain from threading import local from time import gmtime, strftime -from typing import Any, BinaryIO, Callable, Dict, List, Optional, Sequence, Set, Tuple, Union +from typing import Any, BinaryIO, Callable, Dict, List, Optional, Set, Tuple, Union +from collections.abc import Sequence import pkg_resources import requests @@ -43,7 +41,7 @@ from requests.structures import CaseInsensitiveDict from requests_cache import CachedSession from requests_file import FileAdapter -from six.moves.urllib_parse import urlparse +from urllib.parse import urlparse from pyff import __version__ from pyff.constants import NS, config @@ -99,15 +97,15 @@ def resource_string(name: str, pfx: Optional[str] = None) -> Optional[Union[str, name = os.path.expanduser(name) data: Optional[Union[str, bytes]] = None if os.path.exists(name): - with io.open(name) as fd: + with open(name) as fd: data = fd.read() elif pfx and os.path.exists(os.path.join(pfx, name)): - with io.open(os.path.join(pfx, name)) as fd: + with open(os.path.join(pfx, name)) as fd: data = fd.read() elif pkg_resources.resource_exists(__name__, name): data = pkg_resources.resource_string(__name__, name) - elif pfx and pkg_resources.resource_exists(__name__, "%s/%s" % (pfx, name)): - data = pkg_resources.resource_string(__name__, "%s/%s" % (pfx, name)) + elif pfx and pkg_resources.resource_exists(__name__, "{}/{}".format(pfx, name)): + data = pkg_resources.resource_string(__name__, "{}/{}".format(pfx, name)) return data @@ -135,8 +133,8 @@ def resource_filename(name, pfx=None): return os.path.join(pfx, name) elif pkg_resources.resource_exists(__name__, name): return pkg_resources.resource_filename(__name__, name) - elif pfx and pkg_resources.resource_exists(__name__, "%s/%s" % (pfx, name)): - return pkg_resources.resource_filename(__name__, "%s/%s" % (pfx, name)) + elif pfx and pkg_resources.resource_exists(__name__, "{}/{}".format(pfx, name)): + return pkg_resources.resource_filename(__name__, "{}/{}".format(pfx, name)) return None @@ -201,7 +199,7 @@ def first_text(elt, tag, default=None): class ResourceResolver(etree.Resolver): def __init__(self): - super(ResourceResolver, self).__init__() + super().__init__() def resolve(self, system_url, public_id, context): """ @@ -305,23 +303,20 @@ def safe_write(fn, data, mkdirs=False): fn = os.path.expanduser(fn) dirname, basename = os.path.split(fn) kwargs = dict(delete=False, prefix=".%s" % basename, dir=dirname) - if six.PY3: - kwargs['encoding'] = "utf-8" - mode = 'w+' - else: - mode = 'w+b' + kwargs['encoding'] = "utf-8" + mode = 'w+' if mkdirs: ensure_dir(fn) - if isinstance(data, six.binary_type): + if isinstance(data, bytes): data = data.decode('utf-8') with tempfile.NamedTemporaryFile(mode, **kwargs) as tmp: if six.PY2: data = data.encode('utf-8') - log.debug("safe writing {} chrs into {}".format(len(data), fn)) + log.debug(f"safe writing {len(data)} chrs into {fn}") tmp.write(data) tmpn = tmp.name if os.path.exists(tmpn) and os.stat(tmpn).st_size > 0: @@ -356,7 +351,7 @@ def root(t): def with_tree(elt, cb): cb(elt) - if isinstance(elt.tag, six.string_types): + if isinstance(elt.tag, str): for child in list(elt): with_tree(child, cb) @@ -394,7 +389,7 @@ def _lang(elt: Element, default_lang: Optional[str]) -> Optional[str]: return elt.get("{http://www.w3.org/XML/1998/namespace}lang", default_lang) -def lang_dict(elts: Sequence[Element], getter=lambda e: e, default_lang: Optional[str] = None) -> Dict[str, Callable]: +def lang_dict(elts: Sequence[Element], getter=lambda e: e, default_lang: Optional[str] = None) -> dict[str, Callable]: if default_lang is None: default_lang = config.langs[0] @@ -411,7 +406,7 @@ def find_lang(elts: Sequence[Element], lang: str, default_lang: str) -> Element: return next((e for e in elts if _lang(e, default_lang) == lang), elts[0]) -def filter_lang(elts: Any, langs: Optional[Sequence[str]] = None) -> List[Element]: +def filter_lang(elts: Any, langs: Optional[Sequence[str]] = None) -> list[Element]: if langs is None or type(langs) is not list: langs = config.langs @@ -452,7 +447,7 @@ def xslt_transform(t, stylesheet, params=None): return transform(t, **params) except etree.XSLTApplyError as ex: for entry in transform.error_log: - log.error('\tmessage from line %s, col %s: %s' % (entry.line, entry.column, entry.message)) + log.error('\tmessage from line {}, col {}: {}'.format(entry.line, entry.column, entry.message)) log.error('\tdomain: %s (%d)' % (entry.domain_name, entry.domain)) log.error('\ttype: %s (%d)' % (entry.type_name, entry.type)) log.error('\tlevel: %s (%d)' % (entry.level_name, entry.level)) @@ -499,7 +494,7 @@ def hash_id(entity: Element, hn: str = 'sha1', prefix: bool = True) -> str: hstr = hex_digest(entity_id, hn) if prefix: - return "{%s}%s" % (hn, hstr) + return "{{{}}}{}".format(hn, hstr) else: return hstr @@ -511,7 +506,7 @@ def hex_digest(data, hn='sha1'): if not hasattr(hashlib, hn): raise ValueError("Unknown digest '%s'" % hn) - if not isinstance(data, six.binary_type): + if not isinstance(data, bytes): data = data.encode("utf-8") m = getattr(hashlib, hn)() @@ -664,7 +659,7 @@ def guess_entity_software(e): def is_text(x: Any) -> bool: - return isinstance(x, six.string_types) or isinstance(x, six.text_type) + return isinstance(x, str) or isinstance(x, str) def chunks(l, n): @@ -683,7 +678,7 @@ def send(self, request, **kwargs): resp = Response() (_, _, _dir) = request.url.partition('://') if _dir is None or len(_dir) == 0: - raise ValueError("not a directory url: {}".format(request.url)) + raise ValueError(f"not a directory url: {request.url}") resp.raw = six.BytesIO(six.b(_dir)) resp.status_code = 200 resp.reason = "OK" @@ -722,20 +717,20 @@ def url_get(url: str, verify_tls: Optional[bool] = False) -> Response: s.mount('http://', adapter) s.mount('https://', adapter) - headers = {'User-Agent': "pyFF/{}".format(__version__), 'Accept': '*/*'} + headers = {'User-Agent': f"pyFF/{__version__}", 'Accept': '*/*'} _etag = None if _etag is not None: headers['If-None-Match'] = _etag try: r = s.get(url, headers=headers, verify=verify_tls, timeout=config.request_timeout) - except IOError as ex: + except OSError as ex: s = requests.Session() r = s.get(url, headers=headers, verify=verify_tls, timeout=config.request_timeout) if six.PY2: r.encoding = "utf-8" - log.debug("url_get({}) returns {} chrs encoded as {}".format(url, len(r.content), r.encoding)) + log.debug(f"url_get({url}) returns {len(r.content)} chrs encoded as {r.encoding}") if config.request_override_encoding is not None: r.encoding = config.request_override_encoding @@ -784,7 +779,7 @@ def img_to_data(data: bytes, content_type: str) -> Optional[str]: if data64 is None or len(data64) == 0: data64 = safe_b64e(data) - return 'data:{};base64,{}'.format(mime_type, data64) + return f'data:{mime_type};base64,{data64}' def short_id(data): @@ -796,15 +791,15 @@ def unicode_stream(data: str) -> io.BytesIO: return six.BytesIO(data.encode('UTF-8')) -def b2u(data: Union[str, bytes, Tuple, List, Set]) -> Union[str, bytes, Tuple, List, Set]: +def b2u(data: Union[str, bytes, tuple, list, set]) -> Union[str, bytes, tuple, list, set]: if is_text(data): return data - elif isinstance(data, six.binary_type): + elif isinstance(data, bytes): return data.decode("utf-8") elif isinstance(data, tuple) or isinstance(data, list): return [b2u(item) for item in data] elif isinstance(data, set): - return set([b2u(item) for item in data]) + return {b2u(item) for item in data} return data @@ -820,10 +815,10 @@ def json_serializer(o): if isinstance(o, threading.Thread): return o.name - raise ValueError("Object {} of type {} is not JSON-serializable via this function".format(repr(o), type(o))) + raise ValueError(f"Object {repr(o)} of type {type(o)} is not JSON-serializable via this function") -class Lambda(object): +class Lambda: def __init__(self, cb: Callable, *args, **kwargs): self._cb = cb self._args = [a for a in args] @@ -852,7 +847,7 @@ def make_default_scheduler(): elif config.scheduler_job_store == 'memory': jobstore = MemoryJobStore() else: - raise ValueError("unknown or unsupported job store type '{}'".format(config.scheduler_job_store)) + raise ValueError(f"unknown or unsupported job store type '{config.scheduler_job_store}'") return BackgroundScheduler( executors={'default': ThreadPoolExecutor(config.worker_pool_size)}, jobstores={'default': jobstore}, @@ -879,8 +874,7 @@ def __getitem__(self, item): def __iter__(self): for d in self._m: - for item in d: - yield item + yield from d def __len__(self) -> int: return sum([len(d) for d in self._m]) @@ -941,8 +935,8 @@ def is_past_ttl(last_seen, ttl=config.cache_ttl): return now > int(last_seen) + fuzz -class Watchable(object): - class Watcher(object): +class Watchable: + class Watcher: def __init__(self, cb, args, kwargs): self.cb = cb self.args = args From 3663f80428e882445a0a6d4c3bc5091b9d18d946 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 13:56:16 +0200 Subject: [PATCH 31/53] Add compat lib for python3.9. --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 0c4a643d..60eb056f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,6 +3,7 @@ pytz accept_types >=0.4.1 apscheduler==3.6.3 cachetools +eval_type_backport; python_version == '3.9' gunicorn httplib2 >=0.7.7 lxml >=4.1.1 From 6dd252bb7ef03088464c18992a162ca7c246fd0e Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 14:23:56 +0200 Subject: [PATCH 32/53] Remove six references. --- src/pyff/api.py | 3 +-- src/pyff/builtins.py | 2 -- src/pyff/constants.py | 1 - src/pyff/exceptions.py | 1 - src/pyff/mdq.py | 1 - src/pyff/store.py | 3 +-- src/pyff/utils.py | 10 ++-------- 7 files changed, 4 insertions(+), 17 deletions(-) diff --git a/src/pyff/api.py b/src/pyff/api.py index a0ca9bb3..f5632768 100644 --- a/src/pyff/api.py +++ b/src/pyff/api.py @@ -15,7 +15,6 @@ from pyramid.events import NewRequest from pyramid.request import Request from pyramid.response import Response -from six import b from urllib.parse import quote_plus from pyff.constants import config @@ -464,7 +463,7 @@ def _response() -> Generator[bytes, bytes, None]: for e in entities: if in_loop: yield b',' - yield b(dumps(e)) + yield dumps(e) in_loop = True yield b']' diff --git a/src/pyff/builtins.py b/src/pyff/builtins.py index 363cc2a0..b510568c 100644 --- a/src/pyff/builtins.py +++ b/src/pyff/builtins.py @@ -1630,8 +1630,6 @@ def emit(req: Plumbing.Request, ctype="application/xml", *opts): raise PipeException("Empty") req.state['headers']['Content-Type'] = ctype - if six.PY2: - d = d return d diff --git a/src/pyff/constants.py b/src/pyff/constants.py index 52bad854..c55d2450 100644 --- a/src/pyff/constants.py +++ b/src/pyff/constants.py @@ -11,7 +11,6 @@ from str2bool import str2bool import pyconfig -import six from pyff import __version__ as pyff_version diff --git a/src/pyff/exceptions.py b/src/pyff/exceptions.py index 9b6b7b80..6a750fd0 100644 --- a/src/pyff/exceptions.py +++ b/src/pyff/exceptions.py @@ -1,5 +1,4 @@ __author__ = 'leifj' -import six class PyffException(BaseException): diff --git a/src/pyff/mdq.py b/src/pyff/mdq.py index a32b88ff..7d23a688 100644 --- a/src/pyff/mdq.py +++ b/src/pyff/mdq.py @@ -7,7 +7,6 @@ import os import gunicorn.app.base -from six import iteritems from pyff.api import mkapp from pyff.constants import config, parse_options diff --git a/src/pyff/store.py b/src/pyff/store.py index 158e1b8d..4dead931 100644 --- a/src/pyff/store.py +++ b/src/pyff/store.py @@ -9,7 +9,6 @@ from threading import ThreadError import ipaddress -import six from cachetools.func import ttl_cache from redis_collections import Dict, Set from whoosh.fields import ID, KEYWORD, NGRAMWORDS, Schema @@ -756,7 +755,7 @@ def lookup(self, key): if key == 'entities' or key is None: return self._entities() - bkey = six.b(key) + bkey = key.encode('latin-1') if bkey in self.objects: return [self.objects.get(bkey)] diff --git a/src/pyff/utils.py b/src/pyff/utils.py index ce3167d8..0012a6e5 100644 --- a/src/pyff/utils.py +++ b/src/pyff/utils.py @@ -313,9 +313,6 @@ def safe_write(fn, data, mkdirs=False): data = data.decode('utf-8') with tempfile.NamedTemporaryFile(mode, **kwargs) as tmp: - if six.PY2: - data = data.encode('utf-8') - log.debug(f"safe writing {len(data)} chrs into {fn}") tmp.write(data) tmpn = tmp.name @@ -679,7 +676,7 @@ def send(self, request, **kwargs): (_, _, _dir) = request.url.partition('://') if _dir is None or len(_dir) == 0: raise ValueError(f"not a directory url: {request.url}") - resp.raw = six.BytesIO(six.b(_dir)) + resp.raw = io.BytesIO(_dir.encode("latin-1")) resp.status_code = 200 resp.reason = "OK" resp.headers = {} @@ -727,9 +724,6 @@ def url_get(url: str, verify_tls: Optional[bool] = False) -> Response: s = requests.Session() r = s.get(url, headers=headers, verify=verify_tls, timeout=config.request_timeout) - if six.PY2: - r.encoding = "utf-8" - log.debug(f"url_get({url}) returns {len(r.content)} chrs encoded as {r.encoding}") if config.request_override_encoding is not None: @@ -788,7 +782,7 @@ def short_id(data): def unicode_stream(data: str) -> io.BytesIO: - return six.BytesIO(data.encode('UTF-8')) + return io.BytesIO(data.encode('UTF-8')) def b2u(data: Union[str, bytes, tuple, list, set]) -> Union[str, bytes, tuple, list, set]: From 8f955099cc9f4ca3f1d88fd9242a67395862f4b5 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 14:37:36 +0200 Subject: [PATCH 33/53] Don't run tests twice. Also use the virtualenv syntax for running the tools. --- .github/workflows/python-package.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 80b5df95..6256a990 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -45,13 +45,13 @@ jobs: - name: Run tests run: | if [ "${{ matrix.python-version }}" == "3.13" ]; then # workaround for TheKevJames/coveralls-python#523 - python -m pytest --no-cov src + python -m coverage run -m pytest --no-cov src else + python -m coverage erase python -m pytest src - coverage erase - pytest --cov=src/pyff + python -m coverage run -m pytest --cov=src/pyff mv .coverage .coverage.1 - coverage combine + python -m coverage combine fi #make typecheck From d89d1b02b562c529a3dd036cc903be92e04a3116 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 14:45:28 +0200 Subject: [PATCH 34/53] No comment. --- .github/workflows/python-package.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 6256a990..e0eaa929 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -48,7 +48,6 @@ jobs: python -m coverage run -m pytest --no-cov src else python -m coverage erase - python -m pytest src python -m coverage run -m pytest --cov=src/pyff mv .coverage .coverage.1 python -m coverage combine From ac18d5aa38beb04a691b94013a61864a92aaff96 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 14:53:17 +0200 Subject: [PATCH 35/53] pydantic .dict() -> .model_dump() --- src/pyff/resource.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/pyff/resource.py b/src/pyff/resource.py index 02482d2f..20a74781 100644 --- a/src/pyff/resource.py +++ b/src/pyff/resource.py @@ -12,8 +12,8 @@ from datetime import datetime from enum import Enum from threading import Condition, Lock -from typing import TYPE_CHECKING, Any, Callable, Deque, Dict, List, Optional, Tuple -from collections.abc import Iterable, Mapping +from typing import TYPE_CHECKING, Any, Callable, Deque +from collections.abc import Iterable from urllib.parse import quote as urlescape import requests @@ -175,7 +175,7 @@ class ResourceOpts(BaseModel): model_config = ConfigDict(arbitrary_types_allowed=True) def to_dict(self) -> dict[str, Any]: - res = self.dict() + res = self.model_dump() # Compensate for the 'alias' field options res['as'] = res.pop('alias') res['validate'] = res.pop('validate_schema') @@ -208,7 +208,7 @@ def _format_key(k: str) -> str: # Turn validation_errors into 'Validation Errors' return k.replace('_', ' ').title() - res = {_format_key(k): v for k, v in self.dict().items()} + res = {_format_key(k): v for k, v in self.model_dump().items()} if self.parser_info: # Move contents from sub-dict to top of dict, for backwards compatibility @@ -475,7 +475,7 @@ def parse(self, getter: Callable[[str], Response]) -> Deque[Resource]: if self.post: for cb in self.post: if self.t is not None: - n_t = cb(self.t, self.opts.dict()) + n_t = cb(self.t, self.opts.model_dump()) if n_t is None: log.warn(f'callback did not return anything when parsing {self.url} {info}') self.t = n_t From 44e2543acd25d7eb7db61aba793e7aa5b0409aa5 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 15:10:02 +0200 Subject: [PATCH 36/53] ruff check --fix --- src/pyff/api.py | 4 ++-- src/pyff/builtins.py | 6 +++--- src/pyff/parse.py | 2 +- src/pyff/pipes.py | 4 ++-- src/pyff/repo.py | 3 ++- src/pyff/samlmd.py | 6 +++--- src/pyff/store.py | 33 +++++++++++++++++++-------------- src/pyff/test/test_md_api.py | 2 +- src/pyff/test/test_mdsl.py | 13 +++---------- src/pyff/test/test_rwlock.py | 2 +- src/pyff/test/test_utils.py | 2 +- src/pyff/utils.py | 4 ++-- 12 files changed, 40 insertions(+), 41 deletions(-) diff --git a/src/pyff/api.py b/src/pyff/api.py index f5632768..2ae5f980 100644 --- a/src/pyff/api.py +++ b/src/pyff/api.py @@ -2,7 +2,7 @@ import threading from datetime import datetime, timedelta from json import dumps -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional from collections.abc import Generator, Iterable, Mapping import pyramid.httpexceptions as exc @@ -186,7 +186,7 @@ def _d(x: Optional[str], do_split: bool = True) -> tuple[Optional[str], Optional if request.body: try: request.matchdict.update(request.json_body) - except ValueError as ex: + except ValueError: pass entry = request.matchdict.get('entry', 'request') diff --git a/src/pyff/builtins.py b/src/pyff/builtins.py index b510568c..ba256ed3 100644 --- a/src/pyff/builtins.py +++ b/src/pyff/builtins.py @@ -14,7 +14,7 @@ from datetime import datetime from io import BytesIO from str2bool import str2bool -from typing import Dict, Optional +from typing import Optional import ipaddress import xmlsec @@ -1457,7 +1457,7 @@ def drop_xsi_type(req: Plumbing.Request, *opts): def _drop_xsi_type(elt): try: del elt.attrib["{%s}type" % NS["xsi"]] - except Exception as ex: + except Exception: pass with_tree(root(req.t), _drop_xsi_type) @@ -1570,7 +1570,7 @@ def certreport(req: Plumbing.Request, *opts): "{} expires in {}".format(cert.getSubject(), dt), ) log.warning("{} expires in {}".format(eid, dt)) - except ValueError as ex: + except ValueError: annotate_entity( entity_elt, "certificate-error", diff --git a/src/pyff/parse.py b/src/pyff/parse.py index 758f9705..17d13e71 100644 --- a/src/pyff/parse.py +++ b/src/pyff/parse.py @@ -1,7 +1,7 @@ import os from abc import ABC from collections import deque -from typing import Any, Dict, List, Optional +from typing import Any, Optional from pydantic import BaseModel, Field from urllib.parse import quote as urlescape diff --git a/src/pyff/pipes.py b/src/pyff/pipes.py index e097f9b0..2d1df3b1 100644 --- a/src/pyff/pipes.py +++ b/src/pyff/pipes.py @@ -7,7 +7,7 @@ import functools import os import traceback -from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union +from typing import Any, Callable from collections.abc import Iterable import yaml @@ -112,7 +112,7 @@ def _n(_d: str) -> tuple[str, list[str]]: opts: list[str] = [] if is_text(d): name, opts = _n(d) - elif hasattr(d, '__iter__') and not type(d) is dict: + elif hasattr(d, '__iter__') and type(d) is not dict: if not len(d): raise PipeException("This does not look like a length of pipe... \n%s" % repr(d)) name, opts = _n(d[0]) diff --git a/src/pyff/repo.py b/src/pyff/repo.py index e53232a7..a3890e2a 100644 --- a/src/pyff/repo.py +++ b/src/pyff/repo.py @@ -1,4 +1,5 @@ -import random, time +import random +import time from pyff.constants import NS, config from pyff.logs import get_log diff --git a/src/pyff/samlmd.py b/src/pyff/samlmd.py index 61f3ef75..2e12425b 100644 --- a/src/pyff/samlmd.py +++ b/src/pyff/samlmd.py @@ -2,18 +2,18 @@ import traceback from base64 import b64decode from copy import deepcopy -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta from str2bool import str2bool from io import BytesIO from itertools import chain -from typing import Any, Dict, List, Optional, Union +from typing import Any, Optional, Union from lxml import etree from lxml.builder import ElementMaker from lxml.etree import DocumentInvalid, Element, ElementTree from pydantic import Field from xmlsec.crypto import CertDict -from .resource import Resource, ResourceHandler, ResourceOpts +from .resource import Resource, ResourceOpts from pyff.constants import ATTRS, NF_URI, NS, config from pyff.exceptions import * diff --git a/src/pyff/store.py b/src/pyff/store.py index 4dead931..34d448f1 100644 --- a/src/pyff/store.py +++ b/src/pyff/store.py @@ -303,7 +303,12 @@ def __call__(self, *args, **kwargs): if watched is not None and scheduler is not None: for r in watched.walk(): if r.t is None and len(r.children) > 0: - r.t = entitiesdescriptor(list(filter(lambda c: c is not None, [c.t for c in r.children])), name=r.name, validate=True, filter_invalid=True) + r.t = entitiesdescriptor( + list(filter(lambda c: c is not None, [c.t for c in r.children])), + name=r.name, + validate=True, + filter_invalid=True, + ) if r.t is not None: self.update(r.t, tid=r.name, etag=r.etag) else: @@ -337,22 +342,22 @@ def select(self, member, xp=None): the metadata repository then it is fetched an treated as a list of (one per line) of selectors. If all else fails an empty list is returned. """ - l = self._select(member) - if hasattr(l, 'tag'): - l = [l] - elif hasattr(l, '__iter__'): - l = list(l) + res = self._select(member) + if hasattr(res, 'tag'): + res = [res] + elif hasattr(res, '__iter__'): + res = list(res) if xp is None: - return l + return res else: - log.debug("filtering %d entities using xpath %s" % (len(l), xp)) - t = entitiesdescriptor(l, 'dummy', lookup_fn=self.lookup) + log.debug("filtering %d entities using xpath %s" % (len(res), xp)) + t = entitiesdescriptor(res, 'dummy', lookup_fn=self.lookup) if t is None: return [] - l = root(t).xpath(xp, namespaces=NS, smart_strings=False) - log.debug("got %d entities after filtering" % len(l)) - return l + res = root(t).xpath(xp, namespaces=NS, smart_strings=False) + log.debug("got %d entities after filtering" % len(res)) + return res def merge(self, t, nt, strategy=merge_strategies.replace_existing, strategy_name=None): """ @@ -463,7 +468,7 @@ def _match(qq, elt): m = _match(query, e) if m is not None: d = entity_simple_summary(e) - ll = d['title'].lower() + _ll = d['title'].lower() d['matched'] = m else: d = entity_simple_summary(e) @@ -620,7 +625,7 @@ def _reindex(self): try: log.debug("releasing index lock") lock.release() - except ThreadError as ex: + except ThreadError as _ex: pass def dump(self): diff --git a/src/pyff/test/test_md_api.py b/src/pyff/test/test_md_api.py index 8b52358d..f7b18e60 100644 --- a/src/pyff/test/test_md_api.py +++ b/src/pyff/test/test_md_api.py @@ -62,7 +62,7 @@ def test_status(self): def test_parse_robots(self): try: import urllib.robotparser as robotparser - except ImportError as ex: + except ImportError: raise unittest.SkipTest() rp = robotparser.RobotFileParser() diff --git a/src/pyff/test/test_mdsl.py b/src/pyff/test/test_mdsl.py index 74553721..af6414c3 100644 --- a/src/pyff/test/test_mdsl.py +++ b/src/pyff/test/test_mdsl.py @@ -1,22 +1,15 @@ -import json import os -import shutil -import sys import tempfile import pytest import yaml from mako.lookup import TemplateLookup -from unittest.mock import patch from io import StringIO from pyff import builtins -from pyff.exceptions import MetadataException -from pyff.parse import ParserException -from pyff.pipes import PipeException, Plumbing, plumbing +from pyff.pipes import Plumbing, plumbing from pyff.repo import MDRepository -from pyff.resource import ResourceException -from pyff.test import ExitException, SignerTestCase -from pyff.utils import hash_id, parse_xml, resource_filename, root, dumptree +from pyff.test import SignerTestCase +from pyff.utils import parse_xml from pyff.constants import NS diff --git a/src/pyff/test/test_rwlock.py b/src/pyff/test/test_rwlock.py index 3bc3d204..38216187 100644 --- a/src/pyff/test/test_rwlock.py +++ b/src/pyff/test/test_rwlock.py @@ -138,7 +138,7 @@ def test_deadlock(self): for i in range(0, 10): self._raise(w[i]) assert False - except ValueError as ex: + except ValueError: pass def test_2_readers_and_3_writers(self): diff --git a/src/pyff/test/test_utils.py b/src/pyff/test/test_utils.py index 725e365a..c58b0141 100644 --- a/src/pyff/test/test_utils.py +++ b/src/pyff/test/test_utils.py @@ -190,7 +190,7 @@ def _cb(*args, **kwargs): try: f("foo") assert False - except AssertionError as ex: + except AssertionError: pass diff --git a/src/pyff/utils.py b/src/pyff/utils.py index 0012a6e5..d5ee28c8 100644 --- a/src/pyff/utils.py +++ b/src/pyff/utils.py @@ -22,7 +22,7 @@ from itertools import chain from threading import local from time import gmtime, strftime -from typing import Any, BinaryIO, Callable, Dict, List, Optional, Set, Tuple, Union +from typing import Any, BinaryIO, Callable, Optional, Union from collections.abc import Sequence import pkg_resources @@ -720,7 +720,7 @@ def url_get(url: str, verify_tls: Optional[bool] = False) -> Response: headers['If-None-Match'] = _etag try: r = s.get(url, headers=headers, verify=verify_tls, timeout=config.request_timeout) - except OSError as ex: + except OSError: s = requests.Session() r = s.get(url, headers=headers, verify=verify_tls, timeout=config.request_timeout) From cb5d3d33546468eb5e7a4a13f0a2bf1d452acc29 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 15:19:50 +0200 Subject: [PATCH 37/53] Move PyffException. --- src/pyff/utils.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/src/pyff/utils.py b/src/pyff/utils.py index d5ee28c8..7e7ad901 100644 --- a/src/pyff/utils.py +++ b/src/pyff/utils.py @@ -3,6 +3,7 @@ This module contains various utilities. """ + import base64 import cgi import contextlib @@ -45,7 +46,7 @@ from pyff import __version__ from pyff.constants import NS, config -from pyff.exceptions import * +from pyff.exceptions import MetadataException, ResourceException from pyff.logs import get_log etree.set_default_parser(etree.XMLParser(resolve_entities=False)) @@ -73,8 +74,8 @@ def debug_observer(e): log.error(repr(e)) -def trunc_str(x, l): - return (x[:l] + '..') if len(x) > l else x +def trunc_str(x, length): + return (x[:length] + '..') if len(x) > length else x def resource_string(name: str, pfx: Optional[str] = None) -> Optional[Union[str, bytes]]: @@ -143,7 +144,7 @@ def totimestamp(dt: datetime, epoch=datetime(1970, 1, 1)) -> int: epoch = epoch.replace(tzinfo=dt.tzinfo) td = dt - epoch - ts = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / 1e6 + ts = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 1e6 return int(ts) @@ -421,7 +422,7 @@ def filter_lang(elts: Any, langs: Optional[Sequence[str]] = None) -> list[Elemen raise RuntimeError('Configuration is missing langs') dflt = langs[0] - lst = [find_lang(elts, l, dflt) for l in langs] + lst = [find_lang(elts, lang, dflt) for lang in langs] if len(lst) > 0: return lst else: @@ -477,7 +478,7 @@ def total_seconds(dt: timedelta) -> float: if hasattr(dt, "total_seconds"): return dt.total_seconds() # TODO: Remove? I guess this is for Python < 3 - return (dt.microseconds + (dt.seconds + dt.days * 24 * 3600) * 10 ** 6) / 10 ** 6 + return (dt.microseconds + (dt.seconds + dt.days * 24 * 3600) * 10**6) / 10**6 def etag(s): @@ -524,7 +525,7 @@ def has_tag(t, tag): def url2host(url): - (host, sep, port) = urlparse(url).netloc.partition(':') + (host, sep, _port) = urlparse(url).netloc.partition(':') return host @@ -659,10 +660,10 @@ def is_text(x: Any) -> bool: return isinstance(x, str) or isinstance(x, str) -def chunks(l, n): +def chunks(input_list, n): """Yield successive n-sized chunks from l.""" - for i in range(0, len(l), n): - yield l[i : i + n] + for i in range(0, len(input_list), n): + yield input_list[i : i + n] class DirAdapter(BaseAdapter): @@ -748,7 +749,7 @@ def safe_b64d(s: str) -> bytes: def img_to_data(data: bytes, content_type: str) -> Optional[str]: """Convert a file (specified by a path) into a data URI.""" - mime_type, options = cgi.parse_header(content_type) + mime_type, _options = cgi.parse_header(content_type) data64 = None if len(data) > config.icon_maxsize: return None @@ -952,7 +953,7 @@ def __init__(self): def add_watcher(self, cb, *args, **kwargs): self.watchers.append(Watchable.Watcher(cb, args, kwargs)) - def remove_watcher(self, cb, *args, **kwargs): + def remove_watcher(self, cb, *_args, **_kwargs): self.watchers.remove(Watchable.Watcher(cb)) def notify(self, *args, **kwargs): @@ -966,5 +967,5 @@ def notify(self, *args, **kwargs): def utc_now() -> datetime: - """ Return current time with tz=UTC """ + """Return current time with tz=UTC""" return datetime.now(tz=timezone.utc) From c64d5914029731a0c093e50b63e69a17b5cf9762 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 15:28:19 +0200 Subject: [PATCH 38/53] Move PyffException. --- src/pyff/pipes.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/pyff/pipes.py b/src/pyff/pipes.py index 2d1df3b1..92101715 100644 --- a/src/pyff/pipes.py +++ b/src/pyff/pipes.py @@ -2,6 +2,7 @@ Pipes and plumbing. Plumbing instances are sequences of pipes. Each pipe is called in order to load, select, transform, sign or output SAML metadata. """ + from __future__ import annotations import functools @@ -17,7 +18,8 @@ from pyff.logs import get_log from pyff.repo import MDRepository from pyff.store import SAMLStoreBase -from pyff.utils import PyffException, is_text, resource_string +from pyff.utils import is_text, resource_string +from pyff.exceptions import PyffException log = get_log(__name__) From a7b4ed85d8303a196b5aae2cd6343d459327e7cc Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 15:57:26 +0200 Subject: [PATCH 39/53] Even more cleanup, fixing linting errors. --- scripts/csv2xrd.py | 30 +++++++++++++++--------------- src/pyff/builtins.py | 6 +++--- src/pyff/constants.py | 6 +++--- src/pyff/parse.py | 4 ++-- src/pyff/samlmd.py | 17 ++++++++++------- src/pyff/tools.py | 8 +++++--- 6 files changed, 38 insertions(+), 33 deletions(-) diff --git a/scripts/csv2xrd.py b/scripts/csv2xrd.py index f418ef29..c71055cc 100755 --- a/scripts/csv2xrd.py +++ b/scripts/csv2xrd.py @@ -9,20 +9,20 @@ xrds = etree.Element("{http://docs.oasis-open.org/ns/xri/xrd-1.0}XRDS", nsmap=ns) with io.open(sys.argv[1]) as fd: - for l in fd.readlines(): - l = l.strip() - e = [x.strip('"') for x in l.split(",")] - xrd = etree.Element("{http://docs.oasis-open.org/ns/xri/xrd-1.0}XRD", nsmap=ns) - xrds.append(xrd) - subject = etree.Element("{http://docs.oasis-open.org/ns/xri/xrd-1.0}Subject", nsmap=ns) - subject.text = e[3] - link = etree.Element("{http://docs.oasis-open.org/ns/xri/xrd-1.0}Link", nsmap=ns) - link.set('rel',"urn:oasis:names:tc:SAML:2.0:metadata") - link.set('href',e[3]) - xrd.append(subject) - xrd.append(link) - title = etree.Element("{http://docs.oasis-open.org/ns/xri/xrd-1.0}Title", nsmap=ns) - title.text = e[1] - link.append(title) + for line in fd.readlines(): + line = line.strip() + e = [x.strip('"') for x in line.split(",")] + xrd = etree.Element("{http://docs.oasis-open.org/ns/xri/xrd-1.0}XRD", nsmap=ns) + xrds.append(xrd) + subject = etree.Element("{http://docs.oasis-open.org/ns/xri/xrd-1.0}Subject", nsmap=ns) + subject.text = e[3] + link = etree.Element("{http://docs.oasis-open.org/ns/xri/xrd-1.0}Link", nsmap=ns) + link.set('rel', "urn:oasis:names:tc:SAML:2.0:metadata") + link.set('href', e[3]) + xrd.append(subject) + xrd.append(link) + title = etree.Element("{http://docs.oasis-open.org/ns/xri/xrd-1.0}Title", nsmap=ns) + title.text = e[1] + link.append(title) print(etree.tostring(xrds, pretty_print=True)) diff --git a/src/pyff/builtins.py b/src/pyff/builtins.py index ba256ed3..34f4c931 100644 --- a/src/pyff/builtins.py +++ b/src/pyff/builtins.py @@ -301,7 +301,7 @@ def fork(req: Plumbing.Request, *opts): def _any(lst, d): for x in lst: if x in d: - if type(d) == dict: + if d is dict: return d[x] else: return True @@ -837,7 +837,7 @@ def select(req: Plumbing.Request, *opts): match = req.state['match'] if isinstance(match, str): - query = [match.lower()] + _query = [match.lower()] def _strings(elt): lst = [] @@ -869,7 +869,7 @@ def _match(q, elt): if q is not None and len(q) > 0: tokens = _strings(elt) - p = re.compile(fr'\b{q}', re.IGNORECASE) + p = re.compile(rf'\b{q}', re.IGNORECASE) for tstr in tokens: if p.search(tstr): return tstr diff --git a/src/pyff/constants.py b/src/pyff/constants.py index c55d2450..874f53f3 100644 --- a/src/pyff/constants.py +++ b/src/pyff/constants.py @@ -523,9 +523,9 @@ def parse_options(program, docs): if config.loglevel is None: config.loglevel = 'INFO' - - if config.aliases is None or len(config.aliases) == 0: - config.aliases = dict(metadata=entities) + # FIXME, don't know what this is, but it is wrong. + # if config.aliases is None or len(config.aliases) == 0: + # config.aliases = dict(metadata=entities) if config.modules is None: config.modules = [] diff --git a/src/pyff/parse.py b/src/pyff/parse.py index 17d13e71..3d346a06 100644 --- a/src/pyff/parse.py +++ b/src/pyff/parse.py @@ -27,7 +27,7 @@ def _format_key(k: str) -> str: # Turn expiration_time into 'Expiration Time' return k.replace('_', ' ').title() - res = {_format_key(k): v for k, v in self.dict().items()} + res = {_format_key(k): v for k, v in self.model_dump().items()} return res @@ -114,7 +114,7 @@ def parse(self, resource: Resource, content: str) -> ParserInfo: info = ParserInfo(description='XRD links', expiration_time='never expires') t = parse_xml(unicode_stream(content)) - relt = root(t) + _relt = root(t) for xrd in t.iter("{%s}XRD" % NS['xrd']): for link in xrd.findall(".//{{{}}}Link[@rel='{}']".format(NS['xrd'], NS['md'])): link_href = link.get("href") diff --git a/src/pyff/samlmd.py b/src/pyff/samlmd.py index 2e12425b..0f1fa4e6 100644 --- a/src/pyff/samlmd.py +++ b/src/pyff/samlmd.py @@ -16,10 +16,9 @@ from .resource import Resource, ResourceOpts from pyff.constants import ATTRS, NF_URI, NS, config -from pyff.exceptions import * +from pyff.exceptions import MetadataException from pyff.logs import get_log from pyff.parse import ParserInfo, PyffParser, add_parser -from pyff.resource import Resource, ResourceOpts from pyff.utils import ( Lambda, b2u, @@ -262,7 +261,7 @@ def _format_key(k: str) -> str: # Turn expiration_time into 'Expiration Time' return k.replace('_', ' ').title() - res = {_format_key(k): v for k, v in self.dict().items()} + res = {_format_key(k): v for k, v in self.model_dump().items()} return res @@ -371,7 +370,7 @@ def filter_invalids_from_document(t: ElementTree, base_url, validation_errors) - xsd = schema() for e in iter_entities(t): if not xsd.validate(e): - error = xml_error(xsd.error_log, m=base_url) + _error = xml_error(xsd.error_log, m=base_url) entity_id = e.get("entityID", "(Missing entityID)") log.warning('removing \'{}\': schema validation failed: {}'.format(entity_id, xsd.error_log)) validation_errors[entity_id] = f"{xsd.error_log}" @@ -703,7 +702,7 @@ def _u(an, values): def gen_icon(e): - scopes = entity_scopes(e) + _scopes = entity_scopes(e) def entity_icon_url(e, langs=None): @@ -990,7 +989,9 @@ def discojson_sp(e, global_trust_info=None, global_md_sources=None): sp['entityID'] = e.get('entityID', None) md_sources = e.findall( - "{{{}}}SPSSODescriptor/{{{}}}Extensions/{{{}}}TrustInfo/{{{}}}MetadataSource".format(NS['md'], NS['md'], NS['ti'], NS['ti']) + "{{{}}}SPSSODescriptor/{{{}}}Extensions/{{{}}}TrustInfo/{{{}}}MetadataSource".format( + NS['md'], NS['md'], NS['ti'], NS['ti'] + ) ) sp['extra_md'] = {} @@ -1294,7 +1295,9 @@ def _entity_attributes(e): def _eattribute(e, attr, nf): ea = _entity_attributes(e) - a = ea.xpath(".//saml:Attribute[@NameFormat='{}' and @Name='{}']".format(nf, attr), namespaces=NS, smart_strings=False) + a = ea.xpath( + ".//saml:Attribute[@NameFormat='{}' and @Name='{}']".format(nf, attr), namespaces=NS, smart_strings=False + ) if a is None or len(a) == 0: a = etree.Element("{%s}Attribute" % NS['saml']) a.set('NameFormat', nf) diff --git a/src/pyff/tools.py b/src/pyff/tools.py index 5ea0d452..6e5aa1d7 100644 --- a/src/pyff/tools.py +++ b/src/pyff/tools.py @@ -6,6 +6,7 @@ [--version] [uri1] [uri2] """ + import logging import sys import traceback @@ -19,7 +20,8 @@ from pyff.store import MemoryStore -def difftool(): +# TODO: Is this even working? remove? +def difftool(r1, r2): """ diff two saml metadata sources """ @@ -31,12 +33,12 @@ def difftool(): try: rm = Resource() + r1 = Resource(url=args[0], opts=ResourceOpts()) + r2 = Resource(url=args[1], opts=ResourceOpts()) rm.add(r1) rm.add(r2) store = MemoryStore() rm.reload(store=store) - r1 = Resource(url=args[0], opts=ResourceOpts()) - r2 = Resource(url=args[1], opts=ResourceOpts()) status = 0 if r1.t.get('Name') != r2.t.get('Name'): From 4b4c24d414e15dcc63465fe653ebca5513872998 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 16:04:44 +0200 Subject: [PATCH 40/53] Linting fixes. --- src/pyff/repo.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src/pyff/repo.py b/src/pyff/repo.py index a3890e2a..ac31cc6c 100644 --- a/src/pyff/repo.py +++ b/src/pyff/repo.py @@ -77,19 +77,19 @@ def lookup(self, member, xp=None, store=None): if store is None: store = self.store - l = self._lookup(member, store=store) - if hasattr(l, 'tag'): - l = [l] - elif hasattr(l, '__iter__'): - l = list(l) + etree = self._lookup(member, store=store) + if hasattr(etree, 'tag'): + etree = [etree] + elif hasattr(etree, '__iter__'): + etree = list(etree) if xp is None: - return l + return etree else: - log.debug("filtering %d entities using xpath %s" % (len(l), xp)) - t = entitiesdescriptor(l, 'dummy', lookup_fn=self.lookup) + log.debug("filtering %d entities using xpath %s" % (len(etree), xp)) + t = entitiesdescriptor(etree, 'dummy', lookup_fn=self.lookup) if t is None: return [] - l = root(t).xpath(xp, namespaces=NS, smart_strings=False) - log.debug("got %d entities after filtering" % len(l)) - return l + etree = root(t).xpath(xp, namespaces=NS, smart_strings=False) + log.debug("got %d entities after filtering" % len(etree)) + return etree From c6a7c67336e057c6621670de6643c63644994adc Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 16:09:36 +0200 Subject: [PATCH 41/53] Actually use context manager. --- src/pyff/store.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyff/store.py b/src/pyff/store.py index 34d448f1..d9dca93c 100644 --- a/src/pyff/store.py +++ b/src/pyff/store.py @@ -633,7 +633,7 @@ def dump(self): from whoosh.query import Every with ix.searcher() as searcher: - for result in ix.searcher().search(Every('object_id')): + for result in searcher.search(Every('object_id')): print(result) def _index_prep(self, info): From e66b30c62f9f096fa0c63b516a59d3de5ca2f0a1 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 16:11:00 +0200 Subject: [PATCH 42/53] Tell linter that assigned is not used. --- src/pyff/test/test_log.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyff/test/test_log.py b/src/pyff/test/test_log.py index 9a681a6a..222709dc 100644 --- a/src/pyff/test/test_log.py +++ b/src/pyff/test/test_log.py @@ -50,7 +50,7 @@ def dummy_syslog(self, code, msg): def test_bad_syslog(self): try: - bad_handler = SysLogLibHandler("SLARTIBARTIFAST") + _bad_handler = SysLogLibHandler("SLARTIBARTIFAST") assert False except ValueError: pass From 7b76cc57e1a15cb0e79b3ffccb1e392f54cb1e7b Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 16:13:13 +0200 Subject: [PATCH 43/53] Use modern type comparasion. black fixes. --- src/pyff/test/test_md_api.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/pyff/test/test_md_api.py b/src/pyff/test/test_md_api.py index f7b18e60..cabe33aa 100644 --- a/src/pyff/test/test_md_api.py +++ b/src/pyff/test/test_md_api.py @@ -10,7 +10,7 @@ from mako.lookup import TemplateLookup from wsgi_intercept.interceptor import RequestsInterceptor, UrllibInterceptor -from pyff.api import mkapp +from pyff.api import mkapp from pyff.constants import config from pyff.test import SignerTestCase from pyff.test.test_pipeline import PipeLineTest @@ -20,6 +20,7 @@ class PyFFAPITest(PipeLineTest): """ Runs twill tests using the wsgi-intercept """ + mdx = None mdx_template = None app = None @@ -31,7 +32,7 @@ def setUpClass(cls): cls.templates = TemplateLookup(directories=[os.path.join(cls.datadir, 'mdx')]) with tempfile.TemporaryDirectory() as td: cls.tmp = td - cls.mdx = os.path.join(td,'mdx.fd') + cls.mdx = os.path.join(td, 'mdx.fd') cls.mdx_template = cls.templates.get_template('mdx.fd') with open(cls.mdx, "w+") as fd: fd.write(cls.mdx_template.render(ctx=cls)) @@ -132,7 +133,7 @@ def test_load_and_query(self): data = r.json() assert data is not None and len(data) == 1 info = data[0] - assert type(info) == dict + assert info is dict assert info['title'] == 'NORDUnet' assert 'nordu.net' in info['scope'] @@ -141,7 +142,10 @@ def test_load_and_query(self): assert r.status_code == 200 data = r.json() info = data[0] - assert 'https://box-idp.nordu.net/simplesaml/module.php/saml/sp/discoResponse' in info['discovery_responses'] + assert ( + 'https://box-idp.nordu.net/simplesaml/module.php/saml/sp/discoResponse' in info['discovery_responses'] + ) + class PyFFAPITestResources(PipeLineTest): """ From 9213234c2e4e32a7722fec124860fa3172b9fe56 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 16:19:44 +0200 Subject: [PATCH 44/53] Tell linter about unused assignments. --- src/pyff/test/test_pipeline.py | 52 +++++++++++++++++----------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/src/pyff/test/test_pipeline.py b/src/pyff/test/test_pipeline.py index 5e6dd09b..411742bb 100644 --- a/src/pyff/test/test_pipeline.py +++ b/src/pyff/test/test_pipeline.py @@ -108,7 +108,7 @@ class LoadErrorTest(PipeLineTest): # A File that does not exist must throw an error with fail_on_error=True def test_fail_on_error_no_file(self): try: - res, md = self.exec_pipeline( + res, _md = self.exec_pipeline( f""" - load fail_on_error True: - {self.datadir}/file_that_does_not_exist.xml @@ -126,7 +126,7 @@ def test_fail_on_error_no_file(self): # A File that does not exist must throw an error with fail_on_error=True def test_fail_on_error_no_file_url(self): try: - res, md = self.exec_pipeline( + res, _md = self.exec_pipeline( f""" - load fail_on_error True: - file://{self.datadir}/file_that_does_not_exist.xml @@ -145,7 +145,7 @@ def test_fail_on_error_no_file_url(self): # Note: Due to load_url retries it takes 20s to complete this test def test_fail_on_error_no_url(self): try: - res, md = self.exec_pipeline( + res, _md = self.exec_pipeline( """ - load fail_on_error True: - http://127.0.0.1/does_not_exist.xml @@ -163,7 +163,7 @@ def test_fail_on_error_no_url(self): # A file with invalid XML must throw an exception with fail_on_error True: def test_fail_on_error_invalid_file(self): try: - res, md = self.exec_pipeline( + res, _md = self.exec_pipeline( f""" - load fail_on_error True: - {self.datadir}/metadata/test01.xml @@ -181,7 +181,7 @@ def test_fail_on_error_invalid_file(self): # A directory with a file with invalid metadata must throw an exception with fail_on_error True and filter_invalid False: def test_fail_on_error_invalid_dir(self): try: - res, md = self.exec_pipeline( + res, _md = self.exec_pipeline( f""" - load fail_on_error True filter_invalid False: - {self.datadir}/metadata/ @@ -197,7 +197,7 @@ def test_fail_on_error_invalid_dir(self): # A file with invalid XML must not throw an exception by default (fail_on_error False): def test_no_fail_on_error_invalid_file(self): - res, md = self.exec_pipeline( + _res, md = self.exec_pipeline( f""" - load: - {self.datadir}/metadata/test01.xml @@ -212,7 +212,7 @@ def test_no_fail_on_error_invalid_file(self): # Loading an xml file with an invalid entity must throw when filter_invalid False and fail_on_error True def test_fail_on_error_invalid_entity(self): try: - res, md = self.exec_pipeline( + res, _md = self.exec_pipeline( f""" - load fail_on_error True filter_invalid False: - {self.datadir}/metadata/test01.xml @@ -231,7 +231,7 @@ def test_fail_on_error_invalid_entity(self): # Test default behaviour. Loading a file with an invalid entity must not raise an exception def test_no_fail_on_error_invalid_entity(self): - res, md = self.exec_pipeline( + _res, md = self.exec_pipeline( f""" - load: - {self.datadir}/metadata/test01.xml @@ -245,7 +245,7 @@ def test_no_fail_on_error_invalid_entity(self): # A directory with a file with invalid metadata must not throw by default: def test_no_fail_on_error_invalid_dir(self): - res, md = self.exec_pipeline( + _res, md = self.exec_pipeline( f""" - load: - {self.datadir}/metadata/ @@ -263,7 +263,7 @@ class SortTest(PipeLineTest): EID3 = "https://sharav.abes.fr/idp/shibboleth" @staticmethod - def _run_sort_test(expected_order, sxp, res, l): + def _run_sort_test(expected_order, sxp, res, output): if sxp is not None: # Verify expected warnings for missing sort values for e in expected_order: @@ -275,11 +275,11 @@ def _run_sort_test(expected_order, sxp, res, l): f"Sort pipe: unable to sort entity by '{sxp}'. Entity '{e[0]}' has no such value" ) try: - assert keygen_fail_str in str(l) + assert keygen_fail_str in str(output) except AssertionError: print( f"Test failed on expecting missing sort value from: '{e[0]}'.\n" - f"Could not find string on the output: '{keygen_fail_str}'.\nOutput was:\n {l}" + f"Could not find string on the output: '{keygen_fail_str}'.\nOutput was:\n {output}" ) raise except (IndexError, TypeError): @@ -308,7 +308,7 @@ def _run_sort_test(expected_order, sxp, res, l): # Test sort by entityID only def test_sort(self): sxp = None - res, md = self.exec_pipeline( + res, _md = self.exec_pipeline( f""" - load: - {self.datadir}/metadata/test01.xml @@ -370,7 +370,7 @@ def test_sort_group(self): class SigningTest(PipeLineTest): def test_signing(self): self.output = tempfile.NamedTemporaryFile('w').name - res, md, ctx = self.run_pipeline("signer.fd", self) + _res, md, _ctx = self.run_pipeline("signer.fd", self) eIDs = [e.get('entityID') for e in md.store] assert 'https://idp.aco.net/idp/shibboleth' in eIDs assert 'https://skriptenforum.net/shibboleth' in eIDs @@ -378,8 +378,8 @@ def test_signing(self): def test_signing_and_validation(self): self.output = tempfile.NamedTemporaryFile('w').name - res_s, md_s, ctx_s = self.run_pipeline("signer.fd", self) - res_v, md_v, ctx_v = self.run_pipeline("validator.fd", self) + _res_s, _md_s, _ctx_s = self.run_pipeline("signer.fd", self) + _res_v, md_v, _ctx_v = self.run_pipeline("validator.fd", self) eIDs = [e.get('entityID') for e in md_v.store] assert 'https://idp.aco.net/idp/shibboleth' in eIDs @@ -388,7 +388,7 @@ def test_signing_and_validation(self): def test_cert_report(self): self.output = tempfile.NamedTemporaryFile('w').name - res, md, ctx = self.run_pipeline("certreport.fd", self) + _res, md, _ctx = self.run_pipeline("certreport.fd", self) eIDs = [e.get('entityID') for e in md.store] assert 'https://idp.aco.net/idp/shibboleth' in eIDs assert 'https://skriptenforum.net/shibboleth' in eIDs @@ -398,7 +398,7 @@ def test_cert_report(self): def test_cert_report_swamid(self): self.output = tempfile.NamedTemporaryFile('w').name - res, md, ctx = self.run_pipeline("certreport-swamid.fd", self) + _res, md, _ctx = self.run_pipeline("certreport-swamid.fd", self) with open(self.output) as fd: print(fd.read()) @@ -529,7 +529,7 @@ def test_prune(self): finally: try: os.unlink(tmpfile) - except: + except Exception: pass def test_empty_store(self): @@ -632,14 +632,14 @@ def test_pick_invalid(self): finally: try: os.unlink(tmpfile) - except: + except Exception: pass def test_blacklist_single_file(self): entity = 'https://idp.example.com/saml2/idp/metadata.php' # First, load without a filter to ensure the entity is there - res, md = self.exec_pipeline( + _res, md = self.exec_pipeline( f""" - when batch: - load: @@ -649,7 +649,7 @@ def test_blacklist_single_file(self): assert md.lookup(entity) # Then, load with a filter and ensure the entity isn't there anymore - res, md = self.exec_pipeline( + _res, md = self.exec_pipeline( f""" - when batch: - load: @@ -670,7 +670,7 @@ def test_blacklist_directory(self): entity = 'https://idp.example.com/saml2/idp/metadata.php' # First, load without a filter to ensure the entity is there - res, md = self.exec_pipeline( + _res, md = self.exec_pipeline( f""" - when batch: - load: @@ -680,7 +680,7 @@ def test_blacklist_directory(self): assert md.lookup(entity) # Then, load with a filter and ensure the entity isn't there anymore - res, md = self.exec_pipeline( + _res, md = self.exec_pipeline( f""" - when batch: - load: @@ -695,7 +695,7 @@ def test_blacklist_directory(self): def test_bad_namespace(self): try: - res, md = self.exec_pipeline( + _res, _md = self.exec_pipeline( f""" - when batch: - load: @@ -710,7 +710,7 @@ def test_bad_namespace(self): def test_parsecopy_(self): entity = 'https://idp.example.com/saml2/idp/metadata.php' - res, md = self.exec_pipeline( + _res, md = self.exec_pipeline( f""" - when batch: - load: From a04e4106e38d4834befc1a7201ada7230d6ecb61 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 16:27:03 +0200 Subject: [PATCH 45/53] Fix the last linter errors. Turn on fail on linter errors. --- .github/workflows/python-package.yml | 2 +- src/pyff/test/test_repo.py | 2 +- src/pyff/test/test_rwlock.py | 2 +- src/pyff/test/test_store.py | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index e0eaa929..382388bc 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -28,7 +28,7 @@ jobs: run: | pip install ruff ruff check --output-format=github . - continue-on-error: true + continue-on-error: false - name: Install dependencies run: | diff --git a/src/pyff/test/test_repo.py b/src/pyff/test/test_repo.py index cd35fd1a..5d0bbce6 100644 --- a/src/pyff/test/test_repo.py +++ b/src/pyff/test/test_repo.py @@ -141,7 +141,7 @@ def test_display(self): funet_connect = self.md.lookup('https://connect.funet.fi/shibboleth')[0] name, desc = entity_extended_display(funet_connect) assert name == 'FUNET E-Meeting Service' - dn = entity_extended_display(funet_connect) + _dn = entity_extended_display(funet_connect) def test_missing(self): swamid = root(self.swamid) diff --git a/src/pyff/test/test_rwlock.py b/src/pyff/test/test_rwlock.py index 38216187..fba820cd 100644 --- a/src/pyff/test/test_rwlock.py +++ b/src/pyff/test/test_rwlock.py @@ -122,7 +122,7 @@ def test_unthreaded(self): finally: try: self.lock.release() - except: + except Exception: pass def test_deadlock(self): diff --git a/src/pyff/test/test_store.py b/src/pyff/test/test_store.py index fe298cb8..4f725423 100644 --- a/src/pyff/test/test_store.py +++ b/src/pyff/test/test_store.py @@ -125,7 +125,7 @@ def test_select_wayf(self): def test_lookup_intersect_empty_test01(self): store = RedisWhooshStore(directory=self.dir, clear=True, name="test", redis=fakeredis.FakeStrictRedis()) store.update(self.test01, etag='test01', lazy=False) - entity_id = root(self.test01).get('entityID') + _entity_id = root(self.test01).get('entityID') e = store.lookup("{}={}+{}={}".format(ATTRS['domain'], 'example.com', ATTRS['role'], 'sp')) assert len(e) == 0 @@ -258,7 +258,7 @@ def test_lookup_intersect_test01(self): def test_lookup_intersect_empty_test01(self): store = MemoryStore() store.update(self.test01) - entity_id = root(self.test01).get('entityID') + _entity_id = root(self.test01).get('entityID') e = store.lookup("{}={}+{}={}".format(ATTRS['domain'], 'example.com', ATTRS['role'], 'sp')) assert len(e) == 0 From 749a3b452e303a5155d8d354e526529e8e59df19 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 7 May 2025 16:39:36 +0200 Subject: [PATCH 46/53] Oops, check types correctly. --- src/pyff/builtins.py | 2 +- src/pyff/test/test_md_api.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pyff/builtins.py b/src/pyff/builtins.py index 34f4c931..810f7f46 100644 --- a/src/pyff/builtins.py +++ b/src/pyff/builtins.py @@ -301,7 +301,7 @@ def fork(req: Plumbing.Request, *opts): def _any(lst, d): for x in lst: if x in d: - if d is dict: + if isinstance(d, dict): return d[x] else: return True diff --git a/src/pyff/test/test_md_api.py b/src/pyff/test/test_md_api.py index cabe33aa..0b08f134 100644 --- a/src/pyff/test/test_md_api.py +++ b/src/pyff/test/test_md_api.py @@ -133,7 +133,7 @@ def test_load_and_query(self): data = r.json() assert data is not None and len(data) == 1 info = data[0] - assert info is dict + assert isinstance(info, dict) assert info['title'] == 'NORDUnet' assert 'nordu.net' in info['scope'] From 749598fae18022e879c73563104ebae1a3ce949b Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Wed, 14 May 2025 15:45:03 +0200 Subject: [PATCH 47/53] Need to import metadata directly. --- pyproject.toml | 11 +++++++++++ src/pyff/__init__.py | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 5e560500..a0e8f0f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,6 +24,17 @@ samldiff = "pyff.tools:difftool" # Allow lines to be as long as 120. line-length = 120 target-version = "py39" + +[tool.ruff.lint] +# future fixes.... +#select = [ +# "B", # flake8-bugbear +# "E", # pycodestyle error +# "F", # pyflakes +# "I", # isort +# "UP", # pyupgrade +# "W", # pycodestyle warning +#] [tool.ruff.format] quote-style = "preserve" diff --git a/src/pyff/__init__.py b/src/pyff/__init__.py index 7d54b185..33e9bdcd 100644 --- a/src/pyff/__init__.py +++ b/src/pyff/__init__.py @@ -2,6 +2,6 @@ pyFF is a SAML metadata aggregator. """ -import importlib +import importlib.metadata __version__ = importlib.metadata.version('pyFF') From 838e3e4803913b07cc615702c3a9855a58a4c983 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Thu, 15 May 2025 08:07:18 +0200 Subject: [PATCH 48/53] Add publiccode.yml. --- publiccode.yml | 63 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 publiccode.yml diff --git a/publiccode.yml b/publiccode.yml new file mode 100644 index 00000000..b0952504 --- /dev/null +++ b/publiccode.yml @@ -0,0 +1,63 @@ +publiccodeYmlVersion: "0.4" +name: pyFF +url: https://github.com/IdentityPython/pyFF +softwareVersion: 2.1.3 +releaseDate: 2024-11-14 +platforms: + - linux +categories: + - identity-management +developmentStatus: stable +softwareType: standalone/backend +description: + en: + localisedName: pyFF + shortDescription: "pyFF is a simple but reasonably complete SAML metadata + processor capable of aggregate, validate, combine, transform, sign or + publish SAML metadata. " + longDescription: >- + pyFF is a simple but reasonably complete SAML metadata processor. It is + intended to be used by anyone who needs to aggregate, validate, combine, + transform, sign or publish SAML metadata. + + + pyFF is used to run infrastructure for several identity federations of + signifficant size including edugain.org. + + + pyFF supports producing and validating digital signatures on SAML metadata + using the pyXMLSecurity package which in turn supports PKCS#11 and other + mechanisms for talking to HSMs and other cryptographic hardware. + + + pyFF is also a complete implementation of the SAML metadata query protocol + as described in draft-young-md-query and draft-young-md-query-saml and + implements extensions to MDQ for searching which means pyFF can be used as + the backend for a discovery service for large-scale identity federations. + + + Possible usecases include running an federation aggregator, filtering + metadata for use by a discovery service, generating reports from metadata + (eg certificate expiration reports), transforming metadata to add custom + elements. + documentation: https://pyff.readthedocs.io/en/latest/ + features: + - SAML +legal: + license: BSD-2-Clause +maintenance: + type: community + contacts: + - name: Mikael Frykholm + email: mifr@sunet.se + phone: "" + affiliation: SUNET + - name: Leif Johansson + email: leifj@sunet.se + phone: "" + affiliation: SUNET +localisation: + localisationReady: false + availableLanguages: + - en + From 04695f5e504dd2d74a1874e31a1ecbf44e8fea6a Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Thu, 15 May 2025 10:13:26 +0200 Subject: [PATCH 49/53] Migrate to pyproject.toml, the testing build does not like a mixed requirements.txt. --- pyproject.toml | 29 +++++++++++++++++++++++++++++ requirements.txt | 27 --------------------------- 2 files changed, 29 insertions(+), 27 deletions(-) delete mode 100644 requirements.txt diff --git a/pyproject.toml b/pyproject.toml index a0e8f0f4..47f9af30 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,35 @@ authors = [ maintainers = [ {name = "Mikael Frykholm", email = "mifr@sunet.se"} ] +dependencies = [ + "accept-types>=0.4.1", + "apscheduler==3.6.3", + "cachetools>=5.5.2", + "eval-type-backport>=0.2.2 ; python_full_version == '3.9.*'", + "gunicorn>=23.0.0", + "httplib2>=0.7.7", + "lxml>=4.1.1", + "mako>=1.3.10", + "pyconfig>=3.2.3", + "pydantic>=2.8", + "pyramid>=2.0.2", + "pytz>=2025.2", + "pyxmlsecurity>=1.0.0", + "pyyaml>=3.10", + "redis>=5.3.0", + "redis-collections>=0.13.0", + "requests>=2.32.3", + "requests-cache>=1.2.1", + "requests-file>=2.1.0", + "setuptools>=78.1.1", + "simplejson>=2.6.2", + "str2bool>=1.1", + "urllib3==1.26.19", + "whoosh>=2.7.4", + "wsgi-intercept>=1.13.1", + "xmldiff>=2.7.0", + "zipp>=3.19.1", +] [project.scripts] pyff = "pyff.md:main" pyffd = "pyff.mdq:main" diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 60eb056f..00000000 --- a/requirements.txt +++ /dev/null @@ -1,27 +0,0 @@ -urllib3==1.26.19 -pytz -accept_types >=0.4.1 -apscheduler==3.6.3 -cachetools -eval_type_backport; python_version == '3.9' -gunicorn -httplib2 >=0.7.7 -lxml >=4.1.1 -mako -pyXMLSecurity >=1.0.0 -pyconfig -pydantic>=2.8 -pyramid -pyyaml >=3.10 -redis -redis-collections -requests -requests_cache -requests_file -simplejson >=2.6.2 -whoosh -wsgi-intercept -xmldiff -str2bool -setuptools>=78.1.1 # not directly required, pinned by Snyk to avoid a vulnerability -zipp>=3.19.1 # not directly required, pinned by Snyk to avoid a vulnerability From 5c69159cdf3103a388a2bd5408dc6f1b060337b3 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Thu, 15 May 2025 10:18:22 +0200 Subject: [PATCH 50/53] Make sure we don't end up in this codepath. --- setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.py b/setup.py index f861c7b1..1909492e 100755 --- a/setup.py +++ b/setup.py @@ -30,7 +30,6 @@ def load_requirements(path: PurePath) -> List[str]: README = open(here.with_name('README.rst')).read() NEWS = open(here.with_name('NEWS.txt')).read() -install_requires = load_requirements(here.with_name('requirements.txt')) tests_require = load_requirements(here.with_name('test_requirements.txt')) python_implementation_str = python_implementation() From e017a8e24369adfbd6f186cf467e2012d105bca1 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Thu, 15 May 2025 10:24:39 +0200 Subject: [PATCH 51/53] This needs to be removed also. --- setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.py b/setup.py index 1909492e..f808f220 100755 --- a/setup.py +++ b/setup.py @@ -56,7 +56,6 @@ def load_requirements(path: PurePath) -> List[str]: include_package_data=True, package_data={'pyff': ['xslt/*.xsl', 'schema/*.xsd']}, zip_safe=False, - install_requires=install_requires, scripts=['scripts/mirror-mdq.sh'], entry_points={ 'console_scripts': ['pyff=pyff.md:main', 'pyffd=pyff.mdq:main', 'samldiff=pyff.tools:difftool'], From 60f830813ccb18f6397aea60fe7e87b5b07a0273 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Thu, 15 May 2025 15:41:14 +0200 Subject: [PATCH 52/53] More ruffing. --- docs/conf.py | 4 +- pyproject.toml | 28 +++- scripts/csv2xrd.py | 5 +- setup.py | 13 +- src/pyff/api.py | 9 +- src/pyff/builtins.py | 157 +++++++++-------- src/pyff/constants.py | 4 +- src/pyff/fetch.py | 2 +- src/pyff/logs.py | 7 +- src/pyff/merge_strategies.py | 1 - src/pyff/parse.py | 6 +- src/pyff/pipes.py | 20 +-- src/pyff/repo.py | 2 +- src/pyff/resource.py | 22 +-- src/pyff/samlmd.py | 233 +++++++++++++------------- src/pyff/store.py | 30 ++-- src/pyff/test/__init__.py | 16 +- src/pyff/test/test_decorators.py | 3 +- src/pyff/test/test_log.py | 3 +- src/pyff/test/test_md_api.py | 2 +- src/pyff/test/test_mdsl.py | 6 +- src/pyff/test/test_pipeline.py | 24 +-- src/pyff/test/test_repo.py | 6 +- src/pyff/test/test_rwlock.py | 26 +-- src/pyff/test/test_simple_pipeline.py | 2 +- src/pyff/test/test_time.py | 6 +- src/pyff/test/test_utils.py | 7 +- src/pyff/utils.py | 36 ++-- 28 files changed, 334 insertions(+), 346 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 7fec3cf8..c829af9c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # # Federation Feeder documentation build configuration file, created by # sphinx-quickstart on Thu Jul 5 12:38:40 2012. @@ -11,8 +10,9 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os +import sys + import sphinx sys.path.insert(0, os.path.abspath('./../')) diff --git a/pyproject.toml b/pyproject.toml index 47f9af30..5cd0bfc1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,15 +55,27 @@ line-length = 120 target-version = "py39" [tool.ruff.lint] -# future fixes.... -#select = [ -# "B", # flake8-bugbear -# "E", # pycodestyle error -# "F", # pyflakes -# "I", # isort -# "UP", # pyupgrade -# "W", # pycodestyle warning +#select = [ +## "ANN", +# "ASYNC", +# "E", +# "ERA", +# "F", +# "FAST", +# "FLY", +# "FURB", +# "I", +# "PERF", +# "PGH", +# "PIE", +# "PL", +# "UP", +# "W", #] +ignore = [ +"PLR0915", +"PLR0912", +] [tool.ruff.format] quote-style = "preserve" diff --git a/scripts/csv2xrd.py b/scripts/csv2xrd.py index c71055cc..6f77175b 100755 --- a/scripts/csv2xrd.py +++ b/scripts/csv2xrd.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -import io import sys from lxml import etree @@ -8,8 +7,8 @@ ns = {None: "http://docs.oasis-open.org/ns/xri/xrd-1.0"} xrds = etree.Element("{http://docs.oasis-open.org/ns/xri/xrd-1.0}XRDS", nsmap=ns) -with io.open(sys.argv[1]) as fd: - for line in fd.readlines(): +with open(sys.argv[1]) as fd: + for line in fd: line = line.strip() e = [x.strip('"') for x in line.split(",")] xrd = etree.Element("{http://docs.oasis-open.org/ns/xri/xrd-1.0}XRD", nsmap=ns) diff --git a/setup.py b/setup.py index f808f220..d4d2bb9b 100755 --- a/setup.py +++ b/setup.py @@ -1,26 +1,23 @@ #!/usr/bin/env python3 -# -*- encoding: utf-8 -*- -from setuptools import setup from pathlib import PurePath from platform import python_implementation -from typing import List -from setuptools import find_packages +from setuptools import find_packages, setup __author__ = 'Leif Johansson' __version__ = '2.1.3' -def load_requirements(path: PurePath) -> List[str]: +def load_requirements(path: PurePath) -> list[str]: """ Load dependencies from a requirements.txt style file, ignoring comments etc. """ res = [] with open(path) as fd: - for line in fd.readlines(): - while line.endswith('\n') or line.endswith('\\'): + for line in fd: + while line.endswith(('\n', '\\')): line = line[:-1] line = line.strip() - if not line or line.startswith('-') or line.startswith('#'): + if not line or line.startswith(('-', '#')): continue res += [line] return res diff --git a/src/pyff/api.py b/src/pyff/api.py index 2ae5f980..10e7b3ad 100644 --- a/src/pyff/api.py +++ b/src/pyff/api.py @@ -1,9 +1,10 @@ import importlib import threading +from collections.abc import Generator, Iterable, Mapping from datetime import datetime, timedelta from json import dumps from typing import Any, Optional -from collections.abc import Generator, Iterable, Mapping +from urllib.parse import quote_plus import pyramid.httpexceptions as exc import pytz @@ -15,8 +16,8 @@ from pyramid.events import NewRequest from pyramid.request import Request from pyramid.response import Response -from urllib.parse import quote_plus +from pyff import __version__ from pyff.constants import config from pyff.exceptions import ResourceException from pyff.logs import get_log @@ -25,7 +26,6 @@ from pyff.resource import Resource from pyff.samlmd import entity_display_name from pyff.utils import b2u, dumptree, hash_id, json_serializer, utc_now -from pyff import __version__ log = get_log(__name__) @@ -131,7 +131,6 @@ def call(entry: str) -> None: resp = requests.post(url) if resp.status_code >= 300: log.error(f'POST request to API endpoint at {url} failed: {resp.status_code} {resp.reason}') - return None def request_handler(request: Request) -> Response: @@ -386,7 +385,7 @@ def _links(url: str, title: Any = None) -> None: for entity in request.registry.md.store.lookup('entities'): entity_display = entity_display_name(entity) - _links("/entities/%s" % hash_id(entity.get('entityID')), title=entity_display) + _links("/entities/{}".format(hash_id(entity.get('entityID'))), title=entity_display) aliases = request.registry.aliases for a in aliases.keys(): diff --git a/src/pyff/builtins.py b/src/pyff/builtins.py index 810f7f46..9c560956 100644 --- a/src/pyff/builtins.py +++ b/src/pyff/builtins.py @@ -4,6 +4,7 @@ import base64 import hashlib +import ipaddress import json import operator import os @@ -13,14 +14,13 @@ from copy import deepcopy from datetime import datetime from io import BytesIO -from str2bool import str2bool from typing import Optional +from urllib.parse import quote_plus, urlparse -import ipaddress import xmlsec from lxml import etree from lxml.etree import DocumentInvalid -from urllib.parse import quote_plus, urlparse +from str2bool import str2bool from pyff.constants import NS from pyff.decorators import deprecated @@ -29,8 +29,8 @@ from pyff.pipes import PipeException, PipelineCallback, Plumbing, pipe, registry from pyff.samlmd import ( annotate_entity, - discojson_sp_t, discojson_sp_attr_t, + discojson_sp_t, discojson_t, entitiesdescriptor, find_in_document, @@ -254,8 +254,8 @@ def fork(req: Plumbing.Request, *opts): **parsecopy** - Due to a hard to find bug, fork which uses deepcopy can lose some namespaces. The parsecopy argument is a workaround. - It uses a brute force serialisation and deserialisation to get around the bug. + Due to a hard to find bug, fork which uses deepcopy can lose some namespaces. The parsecopy argument is a + workaround. It uses a brute force serialisation and deserialisation to get around the bug. .. code-block:: yaml @@ -414,7 +414,7 @@ def when(req: Plumbing.Request, condition: str, *values): if not isinstance(req.args, list): raise ValueError('Non-list arguments to "when" not allowed') - return Plumbing(pipeline=req.args, pid="%s.when" % req.plumbing.id).iprocess(req) + return Plumbing(pipeline=req.args, pid=f"{req.plumbing.id}.when").iprocess(req) return req.t @@ -472,13 +472,11 @@ def sort(req: Plumbing.Request, *opts): def publish(req: Plumbing.Request, *opts): """ Publish the working document in XML form. - :param req: The request :param opts: Options (unused) :return: None Publish takes one argument: path to a file where the document tree will be written. - **Examples** .. code-block:: yaml @@ -543,33 +541,34 @@ def _nop(x): if req.args.get('urlencode_filenames'): enc = quote_plus - if output_file is not None: - output_file = output_file.strip() - resource_name = output_file - m = re.match(FILESPEC_REGEX, output_file) - if m: - output_file = m.group(1) - resource_name = m.group(2) - out = output_file - data = req.t - if not req.args.get('raw'): - data = dumptree(req.t, pretty_print=req.args.get('pretty_print')) - - if os.path.isdir(output_file): - file_name = "{}{}".format(enc(req.id), req.args.get('ext')) - out = os.path.join(output_file, file_name) - safe_write(out, data, mkdirs=True) - if req.args.get('hash_link'): - link_name = "{}{}".format(enc(hash_id(req.id)), req.args.get('ext')) - link_path = os.path.join(output_file, link_name) - if os.path.exists(link_path): - os.unlink(link_path) - os.symlink(file_name, link_path) - else: - safe_write(out, data, mkdirs=True) + if output_file is None: + return req.t + output_file = output_file.strip() + resource_name = output_file + m = re.match(FILESPEC_REGEX, output_file) + if m: + output_file = m.group(1) + resource_name = m.group(2) + out = output_file + data = req.t + if not req.args.get('raw'): + data = dumptree(req.t, pretty_print=req.args.get('pretty_print')) + + if os.path.isdir(output_file): + file_name = "{}{}".format(enc(req.id), req.args.get('ext')) + out = os.path.join(output_file, file_name) + safe_write(out, data, mkdirs=True) + if req.args.get('hash_link'): + link_name = "{}{}".format(enc(hash_id(req.id)), req.args.get('ext')) + link_path = os.path.join(output_file, link_name) + if os.path.exists(link_path): + os.unlink(link_path) + os.symlink(file_name, link_path) + else: + safe_write(out, data, mkdirs=True) - if req.args.get('update_store'): - req.store.update(req.t, tid=resource_name) # TODO maybe this is not the right thing to do anymore + if req.args.get('update_store'): + req.store.update(req.t, tid=resource_name) # TODO maybe this is not the right thing to do anymore return req.t @@ -639,9 +638,10 @@ def load(req: Plumbing.Request, *opts): - max_workers <5> : Number of parallel threads to use for loading MD files - timeout <120> : Socket timeout when downloading files - validate : When true downloaded metadata files are validated (schema validation) - - fail_on_error : Control whether an error during download, parsing or (optional)validation of a MD file - does not abort processing of the pipeline. When true a failure aborts and causes pyff - to exit with a non zero exit code. Otherwise errors are logged but ignored. + - fail_on_error : Control whether an error during download, parsing or (optional)validation of a MD + file does not abort processing of the pipeline. When true a failure aborts and + causes pyff to exit with a non zero exit code. Otherwise errors are logged but + ignored. - filter_invalid : Controls validation behaviour. When true Entities that fail validation are filtered I.e. are not loaded. When false the entire metadata file is either loaded, or not. fail_on_error controls whether failure to validating the entire MD file will abort @@ -664,9 +664,9 @@ def load(req: Plumbing.Request, *opts): raise ValueError('Non-list args to "load" not allowed') for x in req.args: - x = x.strip() - log.debug(f"load parsing '{x}'") - r = x.split() + stripped = x.strip() + log.debug(f"load parsing '{stripped}'") + r = stripped.split() assert len(r) in range(1, 8), PipeException( "Usage: load resource [as url] [[verify] verification] [via pipeline] [cleanup pipeline]" @@ -720,7 +720,7 @@ def _select_args(req): if args is None or not args: args = [] - log.info("selecting using args: %s" % args) + log.info(f"selecting using args: {args}") return args @@ -735,8 +735,8 @@ def select(req: Plumbing.Request, *opts): :return: returns the result of the operation as a working document Select picks and expands elements (with optional filtering) from the active repository you setup using calls - to :py:mod:`pyff.pipes.builtins.load`. See :py:mod:`pyff.mdrepo.MDRepository.lookup` for a description of the syntax for - selectors. + to :py:mod:`pyff.pipes.builtins.load`. See :py:mod:`pyff.mdrepo.MDRepository.lookup` for a description of the syntax + for selectors. **Examples** @@ -842,19 +842,19 @@ def select(req: Plumbing.Request, *opts): def _strings(elt): lst = [] for attr in [ - '{%s}DisplayName' % NS['mdui'], - '{%s}ServiceName' % NS['md'], - '{%s}OrganizationDisplayName' % NS['md'], - '{%s}OrganizationName' % NS['md'], - '{%s}Keywords' % NS['mdui'], - '{%s}Scope' % NS['shibmd'], + f"{{{NS['mdui']}}}DisplayName", + f"{{{NS['md']}}}ServiceName", + f"{{{NS['md']}}}OrganizationDisplayName", + f"{{{NS['md']}}}OrganizationName", + f"{{{NS['mdui']}}}Keywords", + f"{{{NS['shibmd']}}}Scope", ]: lst.extend([s.text for s in elt.iter(attr)]) lst.append(elt.get('entityID')) return [item for item in lst if item is not None] def _ip_networks(elt): - return [ipaddress.ip_network(x.text) for x in elt.iter('{%s}IPHint' % NS['mdui'])] + return [ipaddress.ip_network(x.text) for x in elt.iter(f"{{{NS['mdui']}}}IPHint")] def _match(q, elt): q = q.strip() @@ -956,7 +956,7 @@ def pick(req: Plumbing.Request, *opts): args = _select_args(req) ot = entitiesdescriptor(args, req.plumbing.id, lookup_fn=req.md.store.lookup, validate=False) if ot is None: - raise PipeException("empty select '%s' - stop" % ",".join(args)) + raise PipeException("empty select '{}' - stop".format(",".join(args))) return ot @@ -970,8 +970,8 @@ def first(req: Plumbing.Request, *opts): :param opts: Options (unused) :return: returns the first entity descriptor if the working document only contains one - Sometimes (eg when running an MDX pipeline) it is usually expected that if a single EntityDescriptor is being returned - then the outer EntitiesDescriptor is stripped. This method does exactly that. + Sometimes (eg when running an MDX pipeline) it is usually expected that if a single EntityDescriptor is being + returned then the outer EntitiesDescriptor is stripped. This method does exactly that. """ if req.t is None: @@ -1264,7 +1264,7 @@ def _store(req: Plumbing.Request, *opts): os.makedirs(target_dir) for e in iter_entities(req.t): fn = hash_id(e, prefix=False) - safe_write("%s.xml" % os.path.join(target_dir, fn), dumptree(e, pretty_print=True)) + safe_write(f"{os.path.join(target_dir, fn)}.xml", dumptree(e, pretty_print=True)) return req.t @@ -1302,7 +1302,7 @@ def xslt(req: Plumbing.Request, *opts): if stylesheet is None: raise PipeException("xslt requires stylesheet") - params = {k: "'%s'" % v for (k, v) in list(req.args.items())} + params = {k: f"'{v}'" for (k, v) in list(req.args.items())} del params['stylesheet'] try: return root(xslt_transform(req.t, stylesheet, params)) @@ -1429,7 +1429,7 @@ def check_xml_namespaces(req: Plumbing.Request, *opts): def _verify(elt): if isinstance(elt.tag, str): - for prefix, uri in list(elt.nsmap.items()): + for _prefix, uri in list(elt.nsmap.items()): if not uri.startswith('urn:'): u = urlparse(uri) if u.scheme not in ('http', 'https'): @@ -1456,7 +1456,7 @@ def drop_xsi_type(req: Plumbing.Request, *opts): def _drop_xsi_type(elt): try: - del elt.attrib["{%s}type" % NS["xsi"]] + del elt.attrib["{{{}}}type".format(NS["xsi"])] except Exception: pass @@ -1510,7 +1510,7 @@ def certreport(req: Plumbing.Request, *opts): seen: dict[str, bool] = {} for eid in req.t.xpath("//md:EntityDescriptor/@entityID", namespaces=NS, smart_strings=False): for cd in req.t.xpath( - "md:EntityDescriptor[@entityID='%s']//ds:X509Certificate" % eid, namespaces=NS, smart_strings=False + f"md:EntityDescriptor[@entityID='{eid}']//ds:X509Certificate", namespaces=NS, smart_strings=False ): try: cert_pem = cd.text @@ -1529,17 +1529,17 @@ def certreport(req: Plumbing.Request, *opts): entity_elt, "certificate-error", "keysize too small", - "{} has keysize of {} bits (less than {})".format(cert.getSubject(), keysize, error_bits), + f"{cert.getSubject()} has keysize of {keysize} bits (less than {error_bits})", ) - log.error("{} has keysize of {}".format(eid, keysize)) + log.error(f"{eid} has keysize of {keysize}") elif keysize < warning_bits: annotate_entity( entity_elt, "certificate-warning", "keysize small", - "{} has keysize of {} bits (less than {})".format(cert.getSubject(), keysize, warning_bits), + f"{cert.getSubject()} has keysize of {keysize} bits (less than {warning_bits})", ) - log.warning("{} has keysize of {}".format(eid, keysize)) + log.warning(f"{eid} has keysize of {keysize}") notafter = cert.getNotAfter() if notafter is None: @@ -1547,11 +1547,11 @@ def certreport(req: Plumbing.Request, *opts): entity_elt, "certificate-error", "certificate has no expiration time", - "%s has no expiration time" % cert.getSubject(), + f"{cert.getSubject()} has no expiration time", ) else: try: - et = datetime.strptime("%s" % notafter, "%y%m%d%H%M%SZ") + et = datetime.strptime(f"{notafter}", "%y%m%d%H%M%SZ") now = datetime.now() dt = et - now if total_seconds(dt) < error_seconds: @@ -1559,23 +1559,23 @@ def certreport(req: Plumbing.Request, *opts): entity_elt, "certificate-error", "certificate has expired", - "{} expired {} ago".format(cert.getSubject(), -dt), + f"{cert.getSubject()} expired {-dt} ago", ) - log.error("{} expired {} ago".format(eid, -dt)) + log.error(f"{eid} expired {-dt} ago") elif total_seconds(dt) < warning_seconds: annotate_entity( entity_elt, "certificate-warning", "certificate about to expire", - "{} expires in {}".format(cert.getSubject(), dt), + f"{cert.getSubject()} expires in {dt}", ) - log.warning("{} expires in {}".format(eid, dt)) + log.warning(f"{eid} expires in {dt}") except ValueError: annotate_entity( entity_elt, "certificate-error", "certificate has unknown expiration time", - "{} unknown expiration time {}".format(cert.getSubject(), notafter), + f"{cert.getSubject()} unknown expiration time {notafter}", ) req.store.update(entity_elt) @@ -1594,8 +1594,8 @@ def emit(req: Plumbing.Request, ctype="application/xml", *opts): :param opts: Options (not used) :return: unicode data - Renders the working tree as text and sets the digest of the tree as the ETag. If the tree has already been rendered as - text by an earlier step the text is returned as utf-8 encoded unicode. The mimetype (ctype) will be set in the + Renders the working tree as text and sets the digest of the tree as the ETag. If the tree has already been rendered + as text by an earlier step the text is returned as utf-8 encoded unicode. The mimetype (ctype) will be set in the Content-Type HTTP response header. **Examples** @@ -1654,8 +1654,8 @@ def signcerts(req: Plumbing.Request, *opts): if req.t is None: raise PipeException("Your pipeline is missing a select statement.") - for fp, pem in list(xmlsec.crypto.CertDict(req.t).items()): - log.info("found signing cert with fingerprint %s" % fp) + for fp, _pem in list(xmlsec.crypto.CertDict(req.t).items()): + log.info(f"found signing cert with fingerprint {fp}") return req.t @@ -1700,7 +1700,7 @@ def finalize(req: Plumbing.Request, *opts): raise ValueError('Non-dict args to "finalize" not allowed') e = root(req.t) - if e.tag == "{%s}EntitiesDescriptor" % NS['md']: + if e.tag == "{{{}}}EntitiesDescriptor".format(NS['md']): name = req.args.get('name', None) if name is None or 0 == len(name): name = req.args.get('Name', None) @@ -1714,7 +1714,7 @@ def finalize(req: Plumbing.Request, *opts): # error: On Python 3 '{}'.format(b'abc') produces "b'abc'", not 'abc'; # use '{!r}'.format(b'abc') if this is desired behavior name = f"{base_url.scheme}://{base_url.netloc}{name_url.path}" # type: ignore - log.debug("-------- using Name: %s" % name) + log.debug(f"-------- using Name: {name}") except ValueError as ex: log.debug(f'Got an exception while finalizing: {ex}') name = None @@ -1749,7 +1749,7 @@ def finalize(req: Plumbing.Request, *opts): offset = dt - now e.set('validUntil', datetime2iso(dt)) except ValueError as ex: - log.error("Unable to parse validUntil: {} ({})".format(valid_until, ex)) + log.error(f"Unable to parse validUntil: {valid_until} ({ex})") # set a reasonable default: 50% of the validity # we replace this below if we have cacheDuration set @@ -1762,7 +1762,7 @@ def finalize(req: Plumbing.Request, *opts): if cache_duration is not None and len(cache_duration) > 0: offset = duration2timedelta(cache_duration) if offset is None: - raise PipeException("Unable to parse %s as xs:duration" % cache_duration) + raise PipeException(f"Unable to parse {cache_duration} as xs:duration") e.set('cacheDuration', cache_duration) req.state['cache'] = int(total_seconds(offset)) @@ -1864,7 +1864,6 @@ def _setattr(req: Plumbing.Request, *opts): raise PipeException("Your pipeline is missing a select statement.") for e in iter_entities(req.t): - # log.debug("setting %s on %s" % (req.args,e.get('entityID'))) set_entity_attributes(e, req.args) req.store.update(e) diff --git a/src/pyff/constants.py b/src/pyff/constants.py index 874f53f3..acdc9063 100644 --- a/src/pyff/constants.py +++ b/src/pyff/constants.py @@ -8,9 +8,9 @@ import os import re import sys -from str2bool import str2bool import pyconfig +from str2bool import str2bool from pyff import __version__ as pyff_version @@ -482,7 +482,7 @@ def __str__(self): def find_setting(self, o): for s in self.settings(): - if o == s.short_name or o == s.long_name: + if o in (s.short_name, s.long_name): return s return None diff --git a/src/pyff/fetch.py b/src/pyff/fetch.py index 88ac2477..a29d5d11 100644 --- a/src/pyff/fetch.py +++ b/src/pyff/fetch.py @@ -83,7 +83,7 @@ def __init__(self, num_threads=config.worker_pool_size, name="Fetcher", content_ self.response = queue.Queue() self.pool = threading.BoundedSemaphore(num_threads) self.threads = [] - for i in range(0, num_threads): + for i in range(num_threads): t = Fetch(self.request, self.response, self.pool, self._id, content_handler) t.start() self.threads.append(t) diff --git a/src/pyff/logs.py b/src/pyff/logs.py index 2cc089e8..7ae72955 100644 --- a/src/pyff/logs.py +++ b/src/pyff/logs.py @@ -5,6 +5,7 @@ import syslog from typing import Any, Optional + class PyFFLogger: def __init__(self, name=None): if name is None: @@ -23,7 +24,7 @@ def _l(self, severity, msg): if severity in self._loggers: self._loggers[severity](str(msg)) else: - raise ValueError("unknown severity %s" % severity) + raise ValueError(f"unknown severity {severity}") def warn(self, msg: str) -> Any: return self._l(logging.WARN, msg) @@ -86,9 +87,9 @@ class SysLogLibHandler(logging.Handler): def __init__(self, facility): if isinstance(facility, str): - nf = getattr(syslog, "LOG_%s" % facility.upper(), None) + nf = getattr(syslog, f"LOG_{facility.upper()}", None) if not isinstance(nf, int): - raise ValueError('Invalid log facility: %s' % nf) + raise ValueError(f'Invalid log facility: {nf}') self.facility = nf else: self.facility = facility diff --git a/src/pyff/merge_strategies.py b/src/pyff/merge_strategies.py index 104bacce..e1ea0329 100644 --- a/src/pyff/merge_strategies.py +++ b/src/pyff/merge_strategies.py @@ -13,4 +13,3 @@ def replace_existing(old, new): def remove(old, new): if old is not None: old.getparent().remove(old) - return None diff --git a/src/pyff/parse.py b/src/pyff/parse.py index 3d346a06..f182dc8c 100644 --- a/src/pyff/parse.py +++ b/src/pyff/parse.py @@ -2,9 +2,9 @@ from abc import ABC from collections import deque from typing import Any, Optional +from urllib.parse import quote as urlescape from pydantic import BaseModel, Field -from urllib.parse import quote as urlescape from xmlsec.crypto import CertDict from pyff.constants import NS @@ -68,7 +68,7 @@ def magic(self, content: str) -> bool: return True def parse(self, resource: Resource, content: str) -> ParserInfo: - raise ParserException("No matching parser found for %s" % resource.url) + raise ParserException(f"No matching parser found for {resource.url}") class DirectoryParser(PyffParser): @@ -115,7 +115,7 @@ def parse(self, resource: Resource, content: str) -> ParserInfo: t = parse_xml(unicode_stream(content)) _relt = root(t) - for xrd in t.iter("{%s}XRD" % NS['xrd']): + for xrd in t.iter("{{{}}}XRD".format(NS['xrd'])): for link in xrd.findall(".//{{{}}}Link[@rel='{}']".format(NS['xrd'], NS['md'])): link_href = link.get("href") certs = CertDict(link) diff --git a/src/pyff/pipes.py b/src/pyff/pipes.py index 92101715..f15f7abb 100644 --- a/src/pyff/pipes.py +++ b/src/pyff/pipes.py @@ -8,18 +8,18 @@ import functools import os import traceback -from typing import Any, Callable from collections.abc import Iterable +from typing import Any, Callable import yaml from apscheduler.schedulers.background import BackgroundScheduler from lxml.etree import Element, ElementTree +from pyff.exceptions import PyffException from pyff.logs import get_log from pyff.repo import MDRepository from pyff.store import SAMLStoreBase from pyff.utils import is_text, resource_string -from pyff.exceptions import PyffException log = get_log(__name__) @@ -116,24 +116,24 @@ def _n(_d: str) -> tuple[str, list[str]]: name, opts = _n(d) elif hasattr(d, '__iter__') and type(d) is not dict: if not len(d): - raise PipeException("This does not look like a length of pipe... \n%s" % repr(d)) + raise PipeException(f"This does not look like a length of pipe... \n{repr(d)}") name, opts = _n(d[0]) elif type(d) is dict: k = list(d.keys())[0] name, opts = _n(k) args = d[k] else: - raise PipeException("This does not look like a length of pipe... \n%s" % repr(d)) + raise PipeException(f"This does not look like a length of pipe... \n{repr(d)}") if name is None: - raise PipeException("Anonymous length of pipe... \n%s" % repr(d)) + raise PipeException(f"Anonymous length of pipe... \n{repr(d)}") func = None if name in registry: func = registry[name] if func is None or not hasattr(func, '__call__'): - raise PipeException('No pipe named %s is installed' % name) + raise PipeException(f'No pipe named {name} is installed') return func, opts, name, args @@ -313,14 +313,12 @@ def iprocess(self, req: Plumbing.Request) -> ElementTree: try: pipefn, opts, name, args = load_pipe(p) log.debug( - "{!s}: calling '{}' using args:\n{} and opts:\n{}".format( - self.pipeline, name, repr(args), repr(opts) - ) + f"{self.pipeline!s}: calling '{name}' using args:\n{repr(args)} and opts:\n{repr(opts)}" ) if is_text(args): args = [args] if args is not None and type(args) is not dict and type(args) is not list and type(args) is not tuple: - raise PipeException("Unknown argument type %s" % repr(args)) + raise PipeException(f"Unknown argument type {repr(args)}") req.args = args req.name = name ot = pipefn(req, *opts) @@ -380,7 +378,7 @@ def plumbing(fn: str) -> Plumbing: pid = os.path.splitext(fn)[0] ystr = resource_string(fn) if ystr is None: - raise PipeException("Plumbing not found: %s" % fn) + raise PipeException(f"Plumbing not found: {fn}") pipeline = yaml.safe_load(ystr) return Plumbing(pipeline=pipeline, pid=pid) diff --git a/src/pyff/repo.py b/src/pyff/repo.py index ac31cc6c..b157729a 100644 --- a/src/pyff/repo.py +++ b/src/pyff/repo.py @@ -41,7 +41,7 @@ def _lookup(self, member, store=None): src = None return self.lookup(src, xp=xp, store=store) - log.debug("calling store lookup %s" % member) + log.debug(f"calling store lookup {member}") return store.lookup(member) def lookup(self, member, xp=None, store=None): diff --git a/src/pyff/resource.py b/src/pyff/resource.py index 20a74781..4d0d1177 100644 --- a/src/pyff/resource.py +++ b/src/pyff/resource.py @@ -9,16 +9,16 @@ import os import traceback from collections import defaultdict, deque +from collections.abc import Iterable from datetime import datetime from enum import Enum from threading import Condition, Lock -from typing import TYPE_CHECKING, Any, Callable, Deque -from collections.abc import Iterable +from typing import TYPE_CHECKING, Any, Callable from urllib.parse import quote as urlescape import requests from lxml.etree import ElementTree -from pydantic import ConfigDict, BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from requests.adapters import Response from pyff.constants import config @@ -236,8 +236,8 @@ def __init__(self, url: str | None, opts: ResourceOpts): self.never_expires: bool = False self.last_seen: datetime | None = None self.last_parser: PyffParser | None = None # importing PyffParser in this module causes a loop - self._infos: Deque[ResourceInfo] = deque(maxlen=config.info_buffer_size) - self.children: Deque[Resource] = deque() + self._infos: deque[ResourceInfo] = deque(maxlen=config.info_buffer_size) + self.children: deque[Resource] = deque() self.trust_info: dict | None = None self.md_sources: dict | None = None self._setup() @@ -334,7 +334,7 @@ def add_info(self) -> ResourceInfo: return info def _replace(self, r: Resource) -> None: - for i in range(0, len(self.children)): + for i in range(len(self.children)): if self.children[i].url == r.url: self.children[i] = r return @@ -381,9 +381,7 @@ def load_backup(self) -> str | None: return res except OSError as ex: log.warning( - "Caught an exception trying to load local backup for {} via {}: {}".format( - self.url, self.local_copy_fn, ex - ) + f"Caught an exception trying to load local backup for {self.url} via {self.local_copy_fn}: {ex}" ) return None @@ -426,9 +424,7 @@ def load_resource(self, getter: Callable[[str], Response]) -> tuple[str | None, self.etag = _etag elif self.local_copy_fn is not None: log.warning( - "Got status={:d} while getting {}. Attempting fallback to local copy.".format( - r.status_code, self.url - ) + f"Got status={r.status_code:d} while getting {self.url}. Attempting fallback to local copy." ) data = self.load_backup() if data is not None and len(data) > 0: @@ -454,7 +450,7 @@ def load_resource(self, getter: Callable[[str], Response]) -> tuple[str | None, return data, status, info - def parse(self, getter: Callable[[str], Response]) -> Deque[Resource]: + def parse(self, getter: Callable[[str], Response]) -> deque[Resource]: data, status, info = self.load_resource(getter) if not data: diff --git a/src/pyff/samlmd.py b/src/pyff/samlmd.py index 0f1fa4e6..a0047a6c 100644 --- a/src/pyff/samlmd.py +++ b/src/pyff/samlmd.py @@ -3,7 +3,6 @@ from base64 import b64decode from copy import deepcopy from datetime import datetime, timedelta -from str2bool import str2bool from io import BytesIO from itertools import chain from typing import Any, Optional, Union @@ -12,8 +11,8 @@ from lxml.builder import ElementMaker from lxml.etree import DocumentInvalid, Element, ElementTree from pydantic import Field +from str2bool import str2bool from xmlsec.crypto import CertDict -from .resource import Resource, ResourceOpts from pyff.constants import ATTRS, NF_URI, NS, config from pyff.exceptions import MetadataException @@ -46,6 +45,8 @@ xml_error, ) +from .resource import Resource, ResourceOpts + log = get_log(__name__) @@ -76,7 +77,7 @@ def __contains__(self, item): def find_merge_strategy(strategy_name): if '.' not in strategy_name: - strategy_name = "pyff.merge_strategies:%s" % strategy_name + strategy_name = f"pyff.merge_strategies:{strategy_name}" if ':' not in strategy_name: # TODO: BUG: Parameter 'occurrence' unfilled strategy_name = rreplace(strategy_name, '.', ':') # backwards compat for old way of specifying these @@ -112,7 +113,7 @@ def parse_saml_metadata( t = check_signature(t, opts.verify) trust_info = None - extensions = t.find('{%s}Extensions' % NS['md']) + extensions = t.find('{{{}}}Extensions'.format(NS['md'])) if opts.cleanup is not None: for cb in opts.cleanup: @@ -134,11 +135,11 @@ def parse_saml_metadata( ) if t is not None: - if t.tag == "{%s}EntityDescriptor" % NS['md']: + if t.tag == "{{{}}}EntityDescriptor".format(NS['md']): t = entitiesdescriptor( [t], base_url, copy=False, validate=True, filter_invalid=filter_invalid, nsmap=t.nsmap ) - elif t.tag == "{%s}EntitiesDescriptor" % NS['md'] and extensions is not None: + elif t.tag == "{{{}}}EntitiesDescriptor".format(NS['md']) and extensions is not None: trust_info = discojson_sp(extensions) except Exception as ex: @@ -194,12 +195,11 @@ def _extra_md(_t, resource_opts, **kwargs): sp_entity = sp_entities.find("{{{}}}EntityDescriptor[@entityID='{}']".format(NS['md'], entityID)) if sp_entity is not None: md_source = sp_entity.find( - "{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource[@src='%s']" - % (NS['md'], NS['md'], NS['ti'], NS['ti'], location) + "{{{}}}SPSSODescriptor/{{{}}}Extensions/{{{}}}TrustInfo/{{{}}}MetadataSource[@src='{}']".format(NS['md'], NS['md'], NS['ti'], NS['ti'], location) ) for e in iter_entities(_t): md_source.append(e) - return etree.Element("{%s}EntitiesDescriptor" % NS['md']) + return etree.Element("{{{}}}EntitiesDescriptor".format(NS['md'])) if t is not None: resource.t = t @@ -210,8 +210,7 @@ def _extra_md(_t, resource_opts, **kwargs): info.entities.append(entityID) md_source = e.find( - "{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource" - % (NS['md'], NS['md'], NS['ti'], NS['ti']) + "{{{}}}SPSSODescriptor/{{{}}}Extensions/{{{}}}TrustInfo/{{{}}}MetadataSource".format(NS['md'], NS['md'], NS['ti'], NS['ti']) ) if md_source is not None: location = md_source.attrib.get('src') @@ -295,11 +294,11 @@ def parse(self, resource: Resource, content: str) -> EidasMDParserInfo: resource.expire_time = info.next_update info.expiration_time = 'None' if not resource.expire_time else resource.expire_time.isoformat() - info.issuer_name = first_text(relt, "{%s}IssuerName" % NS['ser']) - info.scheme_identifier = first_text(relt, "{%s}SchemeIdentifier" % NS['ser']) - info.scheme_territory = first_text(relt, "{%s}SchemeTerritory" % NS['ser']) - for mdl in relt.iter("{%s}MetadataList" % NS['ser']): - for ml in mdl.iter("{%s}MetadataLocation" % NS['ser']): + info.issuer_name = first_text(relt, "{{{}}}IssuerName".format(NS['ser'])) + info.scheme_identifier = first_text(relt, "{{{}}}SchemeIdentifier".format(NS['ser'])) + info.scheme_territory = first_text(relt, "{{{}}}SchemeTerritory".format(NS['ser'])) + for mdl in relt.iter("{{{}}}MetadataList".format(NS['ser'])): + for ml in mdl.iter("{{{}}}MetadataLocation".format(NS['ser'])): location = ml.get('Location') if location: certs = CertDict(ml) @@ -308,7 +307,7 @@ def parse(self, resource: Resource, content: str) -> EidasMDParserInfo: if len(fingerprints) > 0: fp = fingerprints[0] - ep = ml.find("{%s}Endpoint" % NS['ser']) + ep = ml.find("{{{}}}Endpoint".format(NS['ser'])) if ep is not None and fp is not None: args = dict( country_code=mdl.get('Territory'), @@ -349,7 +348,7 @@ def _update_entities(_t, resource_opts, **kwargs): def metadata_expiration(t: ElementTree) -> Optional[timedelta]: relt = root(t) - if relt.tag in ('{%s}EntityDescriptor' % NS['md'], '{%s}EntitiesDescriptor' % NS['md']): + if relt.tag in ('{{{}}}EntityDescriptor'.format(NS['md']), '{{{}}}EntitiesDescriptor'.format(NS['md'])): cache_duration = config.default_cache_duration valid_until = relt.get('validUntil', None) if valid_until is not None: @@ -372,7 +371,7 @@ def filter_invalids_from_document(t: ElementTree, base_url, validation_errors) - if not xsd.validate(e): _error = xml_error(xsd.error_log, m=base_url) entity_id = e.get("entityID", "(Missing entityID)") - log.warning('removing \'{}\': schema validation failed: {}'.format(entity_id, xsd.error_log)) + log.warning(f'removing \'{entity_id}\': schema validation failed: {xsd.error_log}') validation_errors[entity_id] = f"{xsd.error_log}" if e.getparent() is None: return None @@ -394,9 +393,7 @@ def filter_or_validate( t = filter_invalids_from_document(t, base_url=base_url, validation_errors=validation_errors) for entity_id, err in validation_errors.items(): log.error( - "Validation error while parsing {} (from {}). Removed @entityID='{}': {}".format( - base_url, source, entity_id, err - ) + f"Validation error while parsing {base_url} (from {source}). Removed @entityID='{entity_id}': {err}" ) else: # all or nothing log.debug(f"Validating (one-shot) {base_url}") @@ -484,7 +481,7 @@ def entitiesdescriptor( attrs['cacheDuration'] = cache_duration if valid_until is not None: attrs['validUntil'] = valid_until - t = etree.Element("{%s}EntitiesDescriptor" % NS['md'], **attrs) + t = etree.Element("{{{}}}EntitiesDescriptor".format(NS['md']), **attrs) for entity in entities: ent_insert = entity if copy: @@ -517,7 +514,7 @@ def entities_list(t=None): """ if t is None: return [] - elif root(t).tag == "{%s}EntityDescriptor" % NS['md']: + elif root(t).tag == "{{{}}}EntityDescriptor".format(NS['md']): return [root(t)] else: return iter_entities(t) @@ -526,7 +523,7 @@ def entities_list(t=None): def iter_entities(t): if t is None: return [] - return t.iter('{%s}EntityDescriptor' % NS['md']) + return t.iter('{{{}}}EntityDescriptor'.format(NS['md'])) def find_entity(t, e_id, attr='entityID'): @@ -540,7 +537,7 @@ def find_entity(t, e_id, attr='entityID'): # many thanks to Anders Lordahl & Scotty Logan for the idea def guess_entity_software(e): for elt in chain( - e.findall(".//{%s}SingleSignOnService" % NS['md']), e.findall(".//{%s}AssertionConsumerService" % NS['md']) + e.findall(".//{{{}}}SingleSignOnService".format(NS['md'])), e.findall(".//{{{}}}AssertionConsumerService".format(NS['md'])) ): location = elt.get('Location') if location: @@ -555,7 +552,7 @@ def guess_entity_software(e): return 'SimpleSAMLphp' if location.endswith('user/authenticate'): return 'KalturaSSP' - if location.endswith('adfs/ls') or location.endswith('adfs/ls/'): + if location.endswith(('adfs/ls', 'adfs/ls/')): return 'ADFS' if '/oala/' in location or 'login.openathens.net' in location: return 'OpenAthens' @@ -603,20 +600,20 @@ def guess_entity_software(e): def is_idp(entity): - return has_tag(entity, "{%s}IDPSSODescriptor" % NS['md']) + return has_tag(entity, "{{{}}}IDPSSODescriptor".format(NS['md'])) def is_sp(entity): - return has_tag(entity, "{%s}SPSSODescriptor" % NS['md']) + return has_tag(entity, "{{{}}}SPSSODescriptor".format(NS['md'])) def is_aa(entity): - return has_tag(entity, "{%s}AttributeAuthorityDescriptor" % NS['md']) + return has_tag(entity, "{{{}}}AttributeAuthorityDescriptor".format(NS['md'])) def _domains(entity): domains = [url2host(entity.get('entityID'))] - for d in entity.iter("{%s}DomainHint" % NS['mdui']): + for d in entity.iter("{{{}}}DomainHint".format(NS['mdui'])): if d.text not in domains: domains.append(d.text) return domains @@ -627,11 +624,11 @@ def _stext(e): if e.text is not None: return e.text.strip() - for ea in entity.iter("{%s}EntityAttributes" % NS['mdattr']): - for a in ea.iter("{%s}Attribute" % NS['saml']): + for ea in entity.iter("{{{}}}EntityAttributes".format(NS['mdattr'])): + for a in ea.iter("{{{}}}Attribute".format(NS['saml'])): an = a.get('Name', None) if a is not None: - values = [x for x in [_stext(v) for v in a.iter("{%s}AttributeValue" % NS['saml'])] if x is not None] + values = [x for x in [_stext(v) for v in a.iter("{{{}}}AttributeValue".format(NS['saml']))] if x is not None] cb(an, values) @@ -670,7 +667,7 @@ def find_in_document(t, member): if e.get('entityID') == member: lst.append(e) return lst - raise MetadataException("unknown format for filtr member: %s" % member) + raise MetadataException(f"unknown format for filtr member: {member}") def entity_attribute_dict(entity): @@ -706,24 +703,24 @@ def gen_icon(e): def entity_icon_url(e, langs=None): - for ico in filter_lang(e.iter("{%s}Logo" % NS['mdui']), langs=langs): + for ico in filter_lang(e.iter("{{{}}}Logo".format(NS['mdui'])), langs=langs): return dict(url=ico.text, width=ico.get('width'), height=ico.get('height')) def privacy_statement_url(entity, langs): - for url in filter_lang(entity.iter("{%s}PrivacyStatementURL" % NS['mdui']), langs=langs): + for url in filter_lang(entity.iter("{{{}}}PrivacyStatementURL".format(NS['mdui'])), langs=langs): return url.text def entity_geoloc(entity): - for loc in entity.iter("{%s}GeolocationHint" % NS['mdui']): + for loc in entity.iter("{{{}}}GeolocationHint".format(NS['mdui'])): pos = loc.text[5:].split(",") return dict(lat=pos[0], long=pos[1]) def entity_domains(entity): domains = [] - for d in entity.iter("{%s}DomainHint" % NS['mdui']): + for d in entity.iter("{{{}}}DomainHint".format(NS['mdui'])): if d.text == '.': return [] domains.append(d.text) @@ -733,18 +730,18 @@ def entity_domains(entity): def entity_extended_display_i18n(entity, default_lang=None): - name_dict = lang_dict(entity.iter("{%s}OrganizationName" % NS['md']), lambda e: e.text, default_lang=default_lang) + name_dict = lang_dict(entity.iter("{{{}}}OrganizationName".format(NS['md'])), lambda e: e.text, default_lang=default_lang) name_dict.update( - lang_dict(entity.iter("{%s}OrganizationDisplayName" % NS['md']), lambda e: e.text, default_lang=default_lang) + lang_dict(entity.iter("{{{}}}OrganizationDisplayName".format(NS['md'])), lambda e: e.text, default_lang=default_lang) ) - name_dict.update(lang_dict(entity.iter("{%s}ServiceName" % NS['md']), lambda e: e.text, default_lang=default_lang)) + name_dict.update(lang_dict(entity.iter("{{{}}}ServiceName".format(NS['md'])), lambda e: e.text, default_lang=default_lang)) name_dict.update( - lang_dict(entity.iter("{%s}DisplayName" % NS['mdui']), lambda e: e.text, default_lang=default_lang) + lang_dict(entity.iter("{{{}}}DisplayName".format(NS['mdui'])), lambda e: e.text, default_lang=default_lang) ) - desc_dict = lang_dict(entity.iter("{%s}OrganizationURL" % NS['md']), lambda e: e.text, default_lang=default_lang) + desc_dict = lang_dict(entity.iter("{{{}}}OrganizationURL".format(NS['md'])), lambda e: e.text, default_lang=default_lang) desc_dict.update( - lang_dict(entity.iter("{%s}Description" % NS['mdui']), lambda e: e.text, default_lang=default_lang) + lang_dict(entity.iter("{{{}}}Description".format(NS['mdui'])), lambda e: e.text, default_lang=default_lang) ) return name_dict, desc_dict @@ -753,8 +750,7 @@ def entity_extended_display_i18n(entity, default_lang=None): def entity_attribute(entity, attribute): values = None els = entity.findall( - './/{%s}EntityAttributes/{%s}Attribute[@Name="%s"]/{%s}AttributeValue' - % (NS['mdattr'], NS['saml'], attribute, NS['saml']) + './/{{{}}}EntityAttributes/{{{}}}Attribute[@Name="{}"]/{{{}}}AttributeValue'.format(NS['mdattr'], NS['saml'], attribute, NS['saml']) ) if len(els) > 0: values = [el.text for el in els] @@ -764,8 +760,7 @@ def entity_attribute(entity, attribute): def entity_categories(entity): cats = None cats_els = entity.findall( - './/{%s}EntityAttributes/{%s}Attribute[@Name="http://macedir.org/entity-category"]/{%s}AttributeValue' - % (NS['mdattr'], NS['saml'], NS['saml']) + './/{{{}}}EntityAttributes/{{{}}}Attribute[@Name="http://macedir.org/entity-category"]/{{{}}}AttributeValue'.format(NS['mdattr'], NS['saml'], NS['saml']) ) if len(cats_els) > 0: cats = [el.text for el in cats_els] @@ -775,8 +770,7 @@ def entity_categories(entity): def assurance_cetification(entity): certs = None certs_els = entity.findall( - './/{%s}EntityAttributes/{%s}Attribute[@Name="urn:oasis:names:tc:SAML:attribute:assurance-certification"]/{%s}AttributeValue' - % (NS['mdattr'], NS['saml'], NS['saml']) + './/{{{}}}EntityAttributes/{{{}}}Attribute[@Name="urn:oasis:names:tc:SAML:attribute:assurance-certification"]/{{{}}}AttributeValue'.format(NS['mdattr'], NS['saml'], NS['saml']) ) if len(certs_els) > 0: certs = [el.text for el in certs_els] @@ -786,8 +780,7 @@ def assurance_cetification(entity): def entity_category_support(entity): cats = None cats_els = entity.findall( - './/{%s}EntityAttributes/{%s}Attribute[@Name="http://macedir.org/entity-category-support"]/{%s}AttributeValue' - % (NS['mdattr'], NS['saml'], NS['saml']) + './/{{{}}}EntityAttributes/{{{}}}Attribute[@Name="http://macedir.org/entity-category-support"]/{{{}}}AttributeValue'.format(NS['mdattr'], NS['saml'], NS['saml']) ) if len(cats_els) > 0: cats = [el.text for el in cats_els] @@ -795,14 +788,14 @@ def entity_category_support(entity): def registration_authority(entity): - regauth_el = entity.find(".//{%s}RegistrationInfo" % NS['mdrpi']) + regauth_el = entity.find(".//{{{}}}RegistrationInfo".format(NS['mdrpi'])) if regauth_el is not None: return regauth_el.attrib.get('registrationAuthority') def discovery_responses(entity): responses = None - responses_els = entity.findall(".//{%s}DiscoveryResponse" % NS['idpdisc']) + responses_els = entity.findall(".//{{{}}}DiscoveryResponse".format(NS['idpdisc'])) if len(responses_els) > 0: responses = [el.attrib.get('Location') for el in responses_els] return responses @@ -817,31 +810,31 @@ def entity_extended_display(entity, langs=None): display = entity.get('entityID') info = '' - for organizationName in filter_lang(entity.iter("{%s}OrganizationName" % NS['md']), langs=langs): + for organizationName in filter_lang(entity.iter("{{{}}}OrganizationName".format(NS['md'])), langs=langs): info = display display = organizationName.text break - for organizationDisplayName in filter_lang(entity.iter("{%s}OrganizationDisplayName" % NS['md']), langs=langs): + for organizationDisplayName in filter_lang(entity.iter("{{{}}}OrganizationDisplayName".format(NS['md'])), langs=langs): info = display display = organizationDisplayName.text break - for serviceName in filter_lang(entity.iter("{%s}ServiceName" % NS['md']), langs=langs): + for serviceName in filter_lang(entity.iter("{{{}}}ServiceName".format(NS['md'])), langs=langs): info = display display = serviceName.text break - for displayName in filter_lang(entity.iter("{%s}DisplayName" % NS['mdui']), langs=langs): + for displayName in filter_lang(entity.iter("{{{}}}DisplayName".format(NS['mdui'])), langs=langs): info = display display = displayName.text break - for organizationUrl in filter_lang(entity.iter("{%s}OrganizationURL" % NS['md']), langs=langs): + for organizationUrl in filter_lang(entity.iter("{{{}}}OrganizationURL".format(NS['md'])), langs=langs): info = organizationUrl.text break - for description in filter_lang(entity.iter("{%s}Description" % NS['mdui']), langs=langs): + for description in filter_lang(entity.iter("{{{}}}Description".format(NS['mdui'])), langs=langs): info = description.text break @@ -857,16 +850,16 @@ def entity_display_name(entity: Element, langs=None) -> str: :param entity: An EntityDescriptor element :param langs: The list of languages to search in priority order """ - for displayName in filter_lang(entity.iter("{%s}DisplayName" % NS['mdui']), langs=langs): + for displayName in filter_lang(entity.iter("{{{}}}DisplayName".format(NS['mdui'])), langs=langs): return displayName.text.strip() - for serviceName in filter_lang(entity.iter("{%s}ServiceName" % NS['md']), langs=langs): + for serviceName in filter_lang(entity.iter("{{{}}}ServiceName".format(NS['md'])), langs=langs): return serviceName.text.strip() - for organizationDisplayName in filter_lang(entity.iter("{%s}OrganizationDisplayName" % NS['md']), langs=langs): + for organizationDisplayName in filter_lang(entity.iter("{{{}}}OrganizationDisplayName".format(NS['md'])), langs=langs): return organizationDisplayName.text.strip() - for organizationName in filter_lang(entity.iter("{%s}OrganizationName" % NS['md']), langs=langs): + for organizationName in filter_lang(entity.iter("{{{}}}OrganizationName".format(NS['md'])), langs=langs): return organizationName.text.strip() return entity.get('entityID').strip() @@ -953,7 +946,7 @@ def discojson(e, sources=None, langs=None, fallback_to_favicon=False, icon_store icon_info['url'] = ico d['entity_icon_url'] = icon_info - keywords = filter_lang(e.iter("{%s}Keywords" % NS['mdui']), langs=langs) + keywords = filter_lang(e.iter("{{{}}}Keywords".format(NS['mdui'])), langs=langs) if keywords is not None: lst = [elt.text for elt in keywords] if len(lst) > 0: @@ -982,7 +975,7 @@ def discojson_t(t, resource, icon_store=None): def discojson_sp(e, global_trust_info=None, global_md_sources=None): sp = {} - tinfo_el = e.find('.//{%s}TrustInfo' % NS['ti']) + tinfo_el = e.find('.//{{{}}}TrustInfo'.format(NS['ti'])) if tinfo_el is None: return None @@ -997,42 +990,42 @@ def discojson_sp(e, global_trust_info=None, global_md_sources=None): sp['extra_md'] = {} for md_source in md_sources: dname_external = {} - for dname in md_source.iterfind('.//{%s}DisplayName' % NS['ti']): - lang = dname.attrib['{%s}lang' % NS['xml']] + for dname in md_source.iterfind('.//{{{}}}DisplayName'.format(NS['ti'])): + lang = dname.attrib['{{{}}}lang'.format(NS['xml'])] dname_external[lang] = dname.text - for idp in md_source.findall("{%s}EntityDescriptor" % NS['md']): + for idp in md_source.findall("{{{}}}EntityDescriptor".format(NS['md'])): idp_json = discojson(idp) idp_json['hint'] = dname_external sp['extra_md'][idp_json['entityID']] = idp_json sp['profiles'] = {} # Grab trust profile emements, and translate to json - for profile_el in tinfo_el.findall('.//{%s}TrustProfile' % NS['ti']): + for profile_el in tinfo_el.findall('.//{{{}}}TrustProfile'.format(NS['ti'])): name = profile_el.attrib['name'] strict = profile_el.attrib.get('strict', True) strict = strict if type(strict) is bool else strict in ('t', 'T', 'true', 'True') sp['profiles'][name] = {'strict': strict, 'entity': [], 'entities': []} display_name = {} - for dname in profile_el.iterfind('.//{%s}DisplayName' % NS['ti']): - lang = dname.attrib['{%s}lang' % NS['xml']] + for dname in profile_el.iterfind('.//{{{}}}DisplayName'.format(NS['ti'])): + lang = dname.attrib['{{{}}}lang'.format(NS['xml'])] display_name[lang] = dname.text sp['profiles'][name]['display_name'] = display_name - fallback_handler = profile_el.find('.//{%s}FallbackHandler' % NS['ti']) + fallback_handler = profile_el.find('.//{{{}}}FallbackHandler'.format(NS['ti'])) if fallback_handler is not None: prof = fallback_handler.attrib.get('profile', 'href') handler = fallback_handler.text sp['profiles'][name]['fallback_handler'] = {'profile': prof, 'handler': handler} - for entity_el in profile_el.findall('.//{%s}TrustedEntity' % NS['ti']): + for entity_el in profile_el.findall('.//{{{}}}TrustedEntity'.format(NS['ti'])): entity_id = entity_el.text include = entity_el.attrib.get('include', True) include = include if type(include) is bool else include in ('t', 'T', 'true', 'True') sp['profiles'][name]['entity'].append({'entity_id': entity_id, 'include': include}) - for entities_el in profile_el.findall('.//{%s}TrustedEntities' % NS['ti']): + for entities_el in profile_el.findall('.//{{{}}}TrustedEntities'.format(NS['ti'])): select = entities_el.text match = entities_el.attrib.get('match', 'registrationAuthority') include = entities_el.attrib.get('include', True) @@ -1040,7 +1033,7 @@ def discojson_sp(e, global_trust_info=None, global_md_sources=None): sp['profiles'][name]['entities'].append({'select': select, 'match': match, 'include': include}) if global_trust_info is not None and global_md_sources is not None: - for profileref_el in tinfo_el.findall('.//{%s}TrustProfileRef' % NS['ti']): + for profileref_el in tinfo_el.findall('.//{{{}}}TrustProfileRef'.format(NS['ti'])): refname = profileref_el.text sources = global_md_sources[sp['entityID']] for source in sources: @@ -1131,19 +1124,19 @@ def entity_simple_summary(e): def entity_orgurl(entity, langs=None): - for organizationUrl in filter_lang(entity.iter("{%s}OrganizationURL" % NS['md']), langs=langs): + for organizationUrl in filter_lang(entity.iter("{{{}}}OrganizationURL".format(NS['md'])), langs=langs): return organizationUrl.text return None def entity_service_name(entity, langs=None): - for serviceName in filter_lang(entity.iter("{%s}ServiceName" % NS['md']), langs=langs): + for serviceName in filter_lang(entity.iter("{{{}}}ServiceName".format(NS['md'])), langs=langs): return serviceName.text return None def entity_service_description(entity, langs=None): - for serviceName in filter_lang(entity.iter("{%s}ServiceDescription" % NS['md']), langs=langs): + for serviceName in filter_lang(entity.iter("{{{}}}ServiceDescription".format(NS['md'])), langs=langs): return serviceName.text return None @@ -1151,19 +1144,19 @@ def entity_service_description(entity, langs=None): def entity_requested_attributes(entity, langs=None): return [ (a.get('Name'), bool(a.get('isRequired'))) - for a in filter_lang(entity.iter("{%s}RequestedAttribute" % NS['md']), langs=langs) + for a in filter_lang(entity.iter("{{{}}}RequestedAttribute".format(NS['md'])), langs=langs) ] def entity_idp(entity): - for idp in entity.iter("{%s}IDPSSODescriptor" % NS['md']): + for idp in entity.iter("{{{}}}IDPSSODescriptor".format(NS['md'])): return idp return None def entity_sp(entity): - for sp in entity.iter("{%s}SPSSODescriptor" % NS['md']): + for sp in entity.iter("{{{}}}SPSSODescriptor".format(NS['md'])): return sp return None @@ -1171,16 +1164,16 @@ def entity_sp(entity): def entity_contacts(entity): def _contact_dict(contact): - first_name = first_text(contact, "{%s}GivenName" % NS['md']) - last_name = first_text(contact, "{%s}SurName" % NS['md']) - org = first_text(entity, "{%s}OrganizationName" % NS['md']) or first_text( - entity, "{%s}OrganizationDisplayName" % NS['md'] + first_name = first_text(contact, "{{{}}}GivenName".format(NS['md'])) + last_name = first_text(contact, "{{{}}}SurName".format(NS['md'])) + org = first_text(entity, "{{{}}}OrganizationName".format(NS['md'])) or first_text( + entity, "{{{}}}OrganizationDisplayName".format(NS['md']) ) - company = first_text(entity, "{%s}Company" % NS['md']) - mail = first_text(contact, "{%s}EmailAddress" % NS['md']) + company = first_text(entity, "{{{}}}Company".format(NS['md'])) + mail = first_text(contact, "{{{}}}EmailAddress".format(NS['md'])) display_name = "Unknown" if first_name and last_name: - display_name = ' '.join([first_name, last_name]) + display_name = f'{first_name} {last_name}' elif first_name: display_name = first_name elif last_name: @@ -1197,11 +1190,11 @@ def _contact_dict(contact): mail=mail, ) - return [_contact_dict(c) for c in entity.iter("{%s}ContactPerson" % NS['md'])] + return [_contact_dict(c) for c in entity.iter("{{{}}}ContactPerson".format(NS['md']))] def entity_nameid_formats(entity): - return [nif.text for nif in entity.iter("{%s}NameIDFormat" % NS['md'])] + return [nif.text for nif in entity.iter("{{{}}}NameIDFormat".format(NS['md']))] def object_id(e): @@ -1213,7 +1206,7 @@ def entity_simple_info(e, langs=None): d['service_name'] = entity_service_name(e, langs) d['service_descr'] = entity_service_description(e, langs) d['entity_attributes'] = entity_attribute_dict(e) - keywords = filter_lang(e.iter("{%s}Keywords" % NS['mdui']), langs=langs) + keywords = filter_lang(e.iter("{{{}}}Keywords".format(NS['mdui'])), langs=langs) if keywords is not None: lst = [elt.text for elt in keywords] if len(lst) > 0: @@ -1223,7 +1216,7 @@ def entity_simple_info(e, langs=None): def entity_info(e, langs=None): d = entity_simple_summary(e) - keywords = filter_lang(e.iter("{%s}Keywords" % NS['mdui']), langs=langs) + keywords = filter_lang(e.iter("{{{}}}Keywords".format(NS['mdui'])), langs=langs) if keywords is not None: lst = [elt.text for elt in keywords] if len(lst) > 0: @@ -1256,9 +1249,9 @@ def entity_extensions(e): :param e: an EntityDescriptor :return: a list """ - ext = e.find("./{%s}Extensions" % NS['md']) + ext = e.find("./{{{}}}Extensions".format(NS['md'])) if ext is None: - ext = etree.Element("{%s}Extensions" % NS['md']) + ext = etree.Element("{{{}}}Extensions".format(NS['md'])) e.insert(0, ext) return ext @@ -1273,11 +1266,11 @@ def annotate_entity(e, category, title, message, source=None): :param message: The ATOM content :param source: An optional source URL. It is added as a element with @rel='saml-metadata-source' """ - if e.tag != "{%s}EntityDescriptor" % NS['md'] and e.tag != "{%s}EntitiesDescriptor" % NS['md']: + if e.tag != "{{{}}}EntityDescriptor".format(NS['md']) and e.tag != "{{{}}}EntitiesDescriptor".format(NS['md']): raise MetadataException('I can only annotate EntityDescriptor or EntitiesDescriptor elements') subject = e.get('Name', e.get('entityID', None)) atom = ElementMaker(nsmap={'atom': 'http://www.w3.org/2005/Atom'}, namespace='http://www.w3.org/2005/Atom') - args = [atom.published("%s" % datetime.now().isoformat()), atom.link(href=subject, rel="saml-metadata-subject")] + args = [atom.published(f"{datetime.now().isoformat()}"), atom.link(href=subject, rel="saml-metadata-subject")] if source is not None: args.append(atom.link(href=source, rel="saml-metadata-source")) args.extend([atom.title(title), atom.category(term=category), atom.content(message, type="text/plain")]) @@ -1286,9 +1279,9 @@ def annotate_entity(e, category, title, message, source=None): def _entity_attributes(e): ext = entity_extensions(e) - ea = ext.find(".//{%s}EntityAttributes" % NS['mdattr']) + ea = ext.find(".//{{{}}}EntityAttributes".format(NS['mdattr'])) if ea is None: - ea = etree.Element("{%s}EntityAttributes" % NS['mdattr']) + ea = etree.Element("{{{}}}EntityAttributes".format(NS['mdattr'])) ext.append(ea) return ea @@ -1296,10 +1289,10 @@ def _entity_attributes(e): def _eattribute(e, attr, nf): ea = _entity_attributes(e) a = ea.xpath( - ".//saml:Attribute[@NameFormat='{}' and @Name='{}']".format(nf, attr), namespaces=NS, smart_strings=False + f".//saml:Attribute[@NameFormat='{nf}' and @Name='{attr}']", namespaces=NS, smart_strings=False ) if a is None or len(a) == 0: - a = etree.Element("{%s}Attribute" % NS['saml']) + a = etree.Element("{{{}}}Attribute".format(NS['saml'])) a.set('NameFormat', nf) a.set('Name', attr) ea.append(a) @@ -1316,18 +1309,18 @@ def set_entity_attributes(e, d, nf=NF_URI): :param nf: The nameFormat (by default "urn:oasis:names:tc:SAML:2.0:attrname-format:uri") to use. :raise: MetadataException unless e is an EntityDescriptor element """ - if e.tag != "{%s}EntityDescriptor" % NS['md']: + if e.tag != "{{{}}}EntityDescriptor".format(NS['md']): raise MetadataException("I can only add EntityAttribute(s) to EntityDescriptor elements") for attr, value in d.items(): a = _eattribute(e, attr, nf) - velt = etree.Element("{%s}AttributeValue" % NS['saml']) + velt = etree.Element("{{{}}}AttributeValue".format(NS['saml'])) velt.text = value a.append(velt) def set_pubinfo(e, publisher=None, creation_instant=None): - if e.tag != "{%s}EntitiesDescriptor" % NS['md']: + if e.tag != "{{{}}}EntitiesDescriptor".format(NS['md']): raise MetadataException("I can only set RegistrationAuthority to EntitiesDescriptor elements") if publisher is None: raise MetadataException("At least publisher must be provided") @@ -1336,10 +1329,10 @@ def set_pubinfo(e, publisher=None, creation_instant=None): creation_instant = datetime2iso(utc_now()) ext = entity_extensions(e) - pi = ext.find(".//{%s}PublicationInfo" % NS['mdrpi']) + pi = ext.find(".//{{{}}}PublicationInfo".format(NS['mdrpi'])) if pi is not None: raise MetadataException("A PublicationInfo element is already present") - pi = etree.Element("{%s}PublicationInfo" % NS['mdrpi']) + pi = etree.Element("{{{}}}PublicationInfo".format(NS['mdrpi'])) pi.set('publisher', publisher) if creation_instant: pi.set('creationInstant', creation_instant) @@ -1347,7 +1340,7 @@ def set_pubinfo(e, publisher=None, creation_instant=None): def set_reginfo(e, policy=None, authority=None): - if e.tag != "{%s}EntityDescriptor" % NS['md']: + if e.tag != "{{{}}}EntityDescriptor".format(NS['md']): raise MetadataException("I can only set RegistrationAuthority to EntityDescriptor elements") if authority is None: raise MetadataException("At least authority must be provided") @@ -1355,23 +1348,23 @@ def set_reginfo(e, policy=None, authority=None): policy = dict() ext = entity_extensions(e) - ri = ext.find(".//{%s}RegistrationInfo" % NS['mdrpi']) + ri = ext.find(".//{{{}}}RegistrationInfo".format(NS['mdrpi'])) if ri is not None: ext.remove(ri) - ri = etree.Element("{%s}RegistrationInfo" % NS['mdrpi']) + ri = etree.Element("{{{}}}RegistrationInfo".format(NS['mdrpi'])) ext.append(ri) ri.set('registrationAuthority', authority) for lang, policy_url in policy.items(): - rp = etree.Element("{%s}RegistrationPolicy" % NS['mdrpi']) + rp = etree.Element("{{{}}}RegistrationPolicy".format(NS['mdrpi'])) rp.text = policy_url - rp.set('{%s}lang' % NS['xml'], lang) + rp.set('{{{}}}lang'.format(NS['xml']), lang) ri.append(rp) def expiration(t): relt = root(t) - if relt.tag in ('{%s}EntityDescriptor' % NS['md'], '{%s}EntitiesDescriptor' % NS['md']): + if relt.tag in ('{{{}}}EntityDescriptor'.format(NS['md']), '{{{}}}EntitiesDescriptor'.format(NS['md'])): cache_duration = config.default_cache_duration valid_until = relt.get('validUntil', None) if valid_until is not None: @@ -1407,11 +1400,11 @@ def get_key(e): except AttributeError: pass except IndexError: - log.warning("Sort pipe: unable to sort entity by '{}'. Entity '{}' has no such value".format(sxp, eid)) + log.warning(f"Sort pipe: unable to sort entity by '{sxp}'. Entity '{eid}' has no such value") except TypeError: pass - log.debug("Generated sort key for entityID='{}' and {}='{}'".format(eid, sxp, sv)) + log.debug(f"Generated sort key for entityID='{eid}' and {sxp}='{sv}'") return sv is None, sv, eid container = root(t) @@ -1425,23 +1418,23 @@ def set_nodecountry(e, country_code): :param country_code: An ISO country code :raise: MetadataException unless e is an EntityDescriptor element """ - if e.tag != "{%s}EntityDescriptor" % NS['md']: + if e.tag != "{{{}}}EntityDescriptor".format(NS['md']): raise MetadataException("I can only add NodeCountry to EntityDescriptor elements") def _set_nodecountry_in_ext(ext_elt, iso_cc): - nc_elt = ext_elt.find("./{%s}NodeCountry" % NS['eidas']) + nc_elt = ext_elt.find("./{{{}}}NodeCountry".format(NS['eidas'])) if ext_elt is not None and nc_elt is None: - velt = etree.Element("{%s}NodeCountry" % NS['eidas']) + velt = etree.Element("{{{}}}NodeCountry".format(NS['eidas'])) velt.text = iso_cc ext_elt.append(velt) ext = None - idp = e.find("./{%s}IDPSSODescriptor" % NS['md']) + idp = e.find("./{{{}}}IDPSSODescriptor".format(NS['md'])) if idp is not None and len(idp) > 0: ext = entity_extensions(idp) _set_nodecountry_in_ext(ext, country_code) - sp = e.find("./{%s}SPSSODescriptor" % NS['md']) + sp = e.find("./{{{}}}SPSSODescriptor".format(NS['md'])) if sp is not None and len(sp) > 0: ext = entity_extensions(sp) _set_nodecountry_in_ext(ext, country_code) diff --git a/src/pyff/store.py b/src/pyff/store.py index d9dca93c..0b3672df 100644 --- a/src/pyff/store.py +++ b/src/pyff/store.py @@ -1,3 +1,4 @@ +import ipaddress import json import operator import os @@ -8,7 +9,6 @@ from io import BytesIO from threading import ThreadError -import ipaddress from cachetools.func import ttl_cache from redis_collections import Dict, Set from whoosh.fields import ID, KEYWORD, NGRAMWORDS, Schema @@ -265,7 +265,7 @@ def lookup(self, key): def __iter__(self): for e in self.lookup("entities"): - log.debug("**** yield entityID=%s" % e.get('entityID')) + log.debug("**** yield entityID={}".format(e.get('entityID'))) yield e def size(self, a=None, v=None): @@ -294,7 +294,7 @@ def _select(self, member=None): src = None return self.select(src, xp=xp) - log.debug("calling store lookup %s" % member) + log.debug(f"calling store lookup {member}") return self.lookup(member) def __call__(self, *args, **kwargs): @@ -417,19 +417,19 @@ def search(self, query=None, path=None, entity_filter=None, related=None): def _strings(elt): lst = [] for attr in [ - '{%s}DisplayName' % NS['mdui'], - '{%s}ServiceName' % NS['md'], - '{%s}OrganizationDisplayName' % NS['md'], - '{%s}OrganizationName' % NS['md'], - '{%s}Keywords' % NS['mdui'], - '{%s}Scope' % NS['shibmd'], + '{{{}}}DisplayName'.format(NS['mdui']), + '{{{}}}ServiceName'.format(NS['md']), + '{{{}}}OrganizationDisplayName'.format(NS['md']), + '{{{}}}OrganizationName'.format(NS['md']), + '{{{}}}Keywords'.format(NS['mdui']), + '{{{}}}Scope'.format(NS['shibmd']), ]: lst.extend([s.text for s in elt.iter(attr)]) lst.append(elt.get('entityID')) return [item for item in lst if item is not None] def _ip_networks(elt): - return [ipaddress.ip_network(x.text) for x in elt.iter('{%s}IPHint' % NS['mdui'])] + return [ipaddress.ip_network(x.text) for x in elt.iter('{{{}}}IPHint'.format(NS['mdui']))] def _match(qq, elt): for q in qq: @@ -460,7 +460,7 @@ def _match(qq, elt): if f: mexpr = "+".join(f) - log.debug("match using '%s'" % mexpr) + log.debug(f"match using '{mexpr}'") res = [] for e in self.lookup(mexpr): d = None @@ -666,7 +666,7 @@ def update(self, t, tid=None, etag=None, lazy=True): relt = root(t) assert relt is not None - if relt.tag == "{%s}EntityDescriptor" % NS['md']: + if relt.tag == "{{{}}}EntityDescriptor".format(NS['md']): ref = object_id(relt) parts = None if ref in self.parts: @@ -675,7 +675,7 @@ def update(self, t, tid=None, etag=None, lazy=True): self.parts[ref] = {'id': relt.get('entityID'), 'etag': etag, 'count': 1, 'items': [ref]} self.objects[ref] = relt self._last_modified = datetime.now() - elif relt.tag == "{%s}EntitiesDescriptor" % NS['md']: + elif relt.tag == "{{{}}}EntitiesDescriptor".format(NS['md']): if tid is None: tid = relt.get('Name') if etag is None: @@ -883,13 +883,13 @@ def collections(self): def update(self, t, tid=None, etag=None, lazy=True): relt = root(t) assert relt is not None - if relt.tag == "{%s}EntityDescriptor" % NS['md']: + if relt.tag == "{{{}}}EntityDescriptor".format(NS['md']): self._unindex(relt) self._index(relt) self.entities[relt.get('entityID')] = relt # TODO: merge? if tid is not None: self.md[tid] = [relt.get('entityID')] - elif relt.tag == "{%s}EntitiesDescriptor" % NS['md']: + elif relt.tag == "{{{}}}EntitiesDescriptor".format(NS['md']): if tid is None: tid = relt.get('Name') lst = [] diff --git a/src/pyff/test/__init__.py b/src/pyff/test/__init__.py index 3b858cfd..fb966601 100644 --- a/src/pyff/test/__init__.py +++ b/src/pyff/test/__init__.py @@ -1,3 +1,4 @@ +import importlib.resources import logging import os import socket @@ -6,7 +7,6 @@ import tempfile from unittest import TestCase -import importlib.resources from pyff import __version__ as pyffversion # range of ports where available ports can be found @@ -45,10 +45,10 @@ def run_pyffd(*args): def run_cmdline(script, *args): argv = list(*args) starter = tempfile.NamedTemporaryFile('w').name - print("starting {} using {}".format(script, starter)) + print(f"starting {script} using {starter}") with open(starter, 'w') as fd: fd.write( - """#!%s + f"""#!{sys.executable} import sys import coverage import os @@ -58,17 +58,16 @@ def run_cmdline(script, *args): cov.start() rv = 0 try: - rv = load_entry_point('pyFF==%s', 'console_scripts', '%s')() + rv = load_entry_point('pyFF=={pyffversion}', 'console_scripts', '{script}')() except Exception as ex: raise ex finally: cov.stop() cov.save() - os.rename('.coverage','.coverage.%%d' %% os.getpid()) + os.rename('.coverage','.coverage.%d' % os.getpid()) sys.exit(rv) """ - % (sys.executable, pyffversion, script) ) os.chmod(starter, 0o700) @@ -107,9 +106,8 @@ def _p(args, outf=None, ignore_exit=False): if outf is not None: with open(outf, "w") as fd: fd.write(out.decode('UTF-8')) - else: - if out is not None and len(out) > 0: - logging.debug(out.decode('UTF-8')) + elif out is not None and len(out) > 0: + logging.debug(out.decode('UTF-8')) rv = proc.wait() if rv and not ignore_exit: raise RuntimeError("command exited with code != 0: %d" % rv) diff --git a/src/pyff/test/test_decorators.py b/src/pyff/test/test_decorators.py index 3ff20507..c28ee410 100644 --- a/src/pyff/test/test_decorators.py +++ b/src/pyff/test/test_decorators.py @@ -1,8 +1,7 @@ import logging +from io import StringIO from unittest import TestCase - from unittest.mock import patch -from io import StringIO from pyff.decorators import deprecated diff --git a/src/pyff/test/test_log.py b/src/pyff/test/test_log.py index 222709dc..b130374f 100644 --- a/src/pyff/test/test_log.py +++ b/src/pyff/test/test_log.py @@ -1,8 +1,7 @@ import logging +from io import StringIO from unittest import TestCase - from unittest.mock import patch -from io import StringIO from pyff.logs import SysLogLibHandler, log diff --git a/src/pyff/test/test_md_api.py b/src/pyff/test/test_md_api.py index 0b08f134..a9fc8a40 100644 --- a/src/pyff/test/test_md_api.py +++ b/src/pyff/test/test_md_api.py @@ -62,7 +62,7 @@ def test_status(self): def test_parse_robots(self): try: - import urllib.robotparser as robotparser + from urllib import robotparser except ImportError: raise unittest.SkipTest() diff --git a/src/pyff/test/test_mdsl.py b/src/pyff/test/test_mdsl.py index af6414c3..49821964 100644 --- a/src/pyff/test/test_mdsl.py +++ b/src/pyff/test/test_mdsl.py @@ -1,17 +1,17 @@ import os import tempfile +from io import StringIO import pytest import yaml from mako.lookup import TemplateLookup -from io import StringIO + from pyff import builtins +from pyff.constants import NS from pyff.pipes import Plumbing, plumbing from pyff.repo import MDRepository from pyff.test import SignerTestCase from pyff.utils import parse_xml -from pyff.constants import NS - __author__ = 'leifj' diff --git a/src/pyff/test/test_pipeline.py b/src/pyff/test/test_pipeline.py index 411742bb..269093a4 100644 --- a/src/pyff/test/test_pipeline.py +++ b/src/pyff/test/test_pipeline.py @@ -2,12 +2,12 @@ import os import shutil import tempfile +from io import StringIO +from unittest.mock import patch import pytest import yaml -from io import StringIO from mako.lookup import TemplateLookup -from unittest.mock import patch from pyff import builtins from pyff.exceptions import MetadataException @@ -729,18 +729,18 @@ def test_discojson_sp(self): os.rmdir(tmpdir) # lets make sure 'store' can recreate it try: self.exec_pipeline( - """ + f""" - load: - - file://{}/metadata/test02-sp.xml + - file://{self.datadir}/metadata/test02-sp.xml - select - discojson_sp - publish: - output: {}/disco_sp.json + output: {tmpdir}/disco_sp.json raw: true update_store: false -""".format(self.datadir, tmpdir) +""" ) - fn = "%s/disco_sp.json" % tmpdir + fn = f"{tmpdir}/disco_sp.json" assert os.path.exists(fn) with open(fn) as f: sp_json = json.load(f) @@ -781,18 +781,18 @@ def test_discojson_sp_trustinfo_in_attr(self): os.rmdir(tmpdir) # lets make sure 'store' can recreate it try: self.exec_pipeline( - """ + f""" - load: - - file://{}/metadata/test-sp-trustinfo-in-attr.xml + - file://{self.datadir}/metadata/test-sp-trustinfo-in-attr.xml - select - discojson_sp_attr - publish: - output: {}/disco_sp_attr.json + output: {tmpdir}/disco_sp_attr.json raw: true update_store: false -""".format(self.datadir, tmpdir) +""" ) - fn = "%s/disco_sp_attr.json" % tmpdir + fn = f"{tmpdir}/disco_sp_attr.json" assert os.path.exists(fn) with open(fn) as f: sp_json = json.load(f) diff --git a/src/pyff/test/test_repo.py b/src/pyff/test/test_repo.py index 5d0bbce6..db8bfe2c 100644 --- a/src/pyff/test/test_repo.py +++ b/src/pyff/test/test_repo.py @@ -102,17 +102,17 @@ def test_utils(self): disp = entity_display_name(e) assert disp == 'Example University' - for elt in e.findall(".//{%s}DisplayName" % NS['mdui']): + for elt in e.findall(".//{{{}}}DisplayName".format(NS['mdui'])): elt.getparent().remove(elt) disp = entity_display_name(e) assert disp == 'The Example University' - for elt in e.findall(".//{%s}OrganizationDisplayName" % NS['md']): + for elt in e.findall(".//{{{}}}OrganizationDisplayName".format(NS['md'])): elt.getparent().remove(elt) disp = entity_display_name(e) assert disp == 'ExampleU' - for elt in e.findall(".//{%s}OrganizationName" % NS['md']): + for elt in e.findall(".//{{{}}}OrganizationName".format(NS['md'])): elt.getparent().remove(elt) disp = entity_display_name(e) diff --git a/src/pyff/test/test_rwlock.py b/src/pyff/test/test_rwlock.py index fba820cd..d4e8e91e 100644 --- a/src/pyff/test/test_rwlock.py +++ b/src/pyff/test/test_rwlock.py @@ -30,7 +30,7 @@ def timeout_writer(self, timeout=1): self.lock.acquireWrite(timeout=timeout, blocking=False) # upgrade to write self.lock.acquireWrite(blocking=True) # get it twice... - print("thread (writer): %s starting" % current_thread().name) + print(f"thread (writer): {current_thread().name} starting") self.writer_active = True sleep(1) except Exception as ex: @@ -42,13 +42,13 @@ def timeout_writer(self, timeout=1): pass self.writer_active = False - print("thread: %s exiting" % current_thread().name) + print(f"thread: {current_thread().name} exiting") def timeout_reader(self, to_wait_for, timeout=1): try: self.lock.acquireRead(timeout=timeout) assert not self.writer_active - print("thread (reader): %s starting" % current_thread().name) + print(f"thread (reader): {current_thread().name} starting") self.readers += 1 while to_wait_for - self.readers > 0: assert not self.writer_active @@ -62,17 +62,17 @@ def timeout_reader(self, to_wait_for, timeout=1): except ValueError: # ignore double release error pass - print("thread (reader): %s exiting" % current_thread().name) + print(f"thread (reader): {current_thread().name} exiting") def writer(self): try: with self.lock.writelock: - print("thread (writer): %s starting" % current_thread().name) + print(f"thread (writer): {current_thread().name} starting") self.writer_active = True self.lock.acquireRead(timeout=0.1) # make sure we can get a readlock as a writer sleep(1) self.writer_active = False - print("thread: %s exiting" % current_thread().name) + print(f"thread: {current_thread().name} exiting") except Exception as ex: self.exceptions[current_thread().name] = ex finally: @@ -85,13 +85,13 @@ def reader(self, to_wait_for): try: with self.lock.readlock: assert not self.writer_active - print("thread (reader): %s starting" % current_thread().name) + print(f"thread (reader): {current_thread().name} starting") self.readers += 1 while to_wait_for - self.readers > 0: assert not self.writer_active print("waiting for %d more readers" % (to_wait_for - self.readers)) sleep(0.1) - print("thread (reader): %s exiting" % current_thread().name) + print(f"thread (reader): {current_thread().name} exiting") except Exception as ex: self.exceptions[current_thread().name] = ex @@ -129,13 +129,13 @@ def test_deadlock(self): self.reset() try: w = [] - for i in range(0, 10): - w.append(Thread(target=self._rww, name="w%s" % i)) - for i in range(0, 10): + for i in range(10): + w.append(Thread(target=self._rww, name=f"w{i}")) + for i in range(10): w[i].start() - for i in range(0, 10): + for i in range(10): w[i].join() - for i in range(0, 10): + for i in range(10): self._raise(w[i]) assert False except ValueError: diff --git a/src/pyff/test/test_simple_pipeline.py b/src/pyff/test/test_simple_pipeline.py index cdaeb683..50c0e470 100644 --- a/src/pyff/test/test_simple_pipeline.py +++ b/src/pyff/test/test_simple_pipeline.py @@ -47,7 +47,7 @@ def test_non_zero_output(self): def test_select_single(self): assert self.validator_result is not None - entities = self.validator_result.findall('{%s}EntityDescriptor' % NS['md']) + entities = self.validator_result.findall('{{{}}}EntityDescriptor'.format(NS['md'])) assert len(entities) == 1 assert entities[0].get('entityID') == 'https://idp.aco.net/idp/shibboleth' diff --git a/src/pyff/test/test_time.py b/src/pyff/test/test_time.py index 1a17f0a1..94df677b 100644 --- a/src/pyff/test/test_time.py +++ b/src/pyff/test/test_time.py @@ -11,9 +11,9 @@ class TestDuration(TestCase): def test_duration2timedelta(self): for expr, secs in TestDuration.DURATIONS: td = duration2timedelta(expr) - print("timedelta: %s" % td) - print("duration: %s" % expr) - print("expected seconds: %s" % secs) + print(f"timedelta: {td}") + print(f"duration: {expr}") + print(f"expected seconds: {secs}") assert int(td.total_seconds()) == secs assert int(total_seconds(td)) == secs diff --git a/src/pyff/test/test_utils.py b/src/pyff/test/test_utils.py index c58b0141..5d5a5b90 100644 --- a/src/pyff/test/test_utils.py +++ b/src/pyff/test/test_utils.py @@ -6,6 +6,7 @@ from pyff import utils from pyff.constants import NS, as_list_of_string +from pyff.merge_strategies import remove, replace_existing from pyff.resource import Resource, ResourceOpts from pyff.samlmd import entities_list, find_entity from pyff.utils import ( @@ -22,8 +23,6 @@ url_get, ) -from pyff.merge_strategies import remove, replace_existing - class TestMetadata(TestCase): def setUp(self): @@ -52,13 +51,13 @@ def test_replace_ndn(self): assert idp is not None idp2 = copy.deepcopy(idp) assert idp2 is not None - for o in idp2.findall(".//{%s}OrganizationName" % NS['md']): + for o in idp2.findall(".//{{{}}}OrganizationName".format(NS['md'])): o.text = "FOO" idp2.set('ID', 'kaka4711') replace_existing(idp, idp2) idp3 = find_entity(root(self.t2), 'kaka4711', attr='ID') assert idp3 is not None - for o in idp2.findall(".//{%s}OrganizationName" % NS['md']): + for o in idp2.findall(".//{{{}}}OrganizationName".format(NS['md'])): assert o.text == "FOO" remove(idp3, None) idp = find_entity(root(self.t2), 'kaka4711', attr='ID') diff --git a/src/pyff/utils.py b/src/pyff/utils.py index 7e7ad901..3df96e65 100644 --- a/src/pyff/utils.py +++ b/src/pyff/utils.py @@ -17,6 +17,7 @@ import time import traceback from _collections_abc import Mapping, MutableMapping +from collections.abc import Sequence from copy import copy from datetime import datetime, timedelta, timezone from email.utils import parsedate @@ -24,7 +25,7 @@ from threading import local from time import gmtime, strftime from typing import Any, BinaryIO, Callable, Optional, Union -from collections.abc import Sequence +from urllib.parse import urlparse import pkg_resources import requests @@ -42,7 +43,6 @@ from requests.structures import CaseInsensitiveDict from requests_cache import CachedSession from requests_file import FileAdapter -from urllib.parse import urlparse from pyff import __version__ from pyff.constants import NS, config @@ -67,7 +67,7 @@ def _f(x): return False return True - return "\n".join(filter(_f, ["%s" % e for e in error_log])) + return "\n".join(filter(_f, [f"{e}" for e in error_log])) def debug_observer(e): @@ -105,8 +105,8 @@ def resource_string(name: str, pfx: Optional[str] = None) -> Optional[Union[str, data = fd.read() elif pkg_resources.resource_exists(__name__, name): data = pkg_resources.resource_string(__name__, name) - elif pfx and pkg_resources.resource_exists(__name__, "{}/{}".format(pfx, name)): - data = pkg_resources.resource_string(__name__, "{}/{}".format(pfx, name)) + elif pfx and pkg_resources.resource_exists(__name__, f"{pfx}/{name}"): + data = pkg_resources.resource_string(__name__, f"{pfx}/{name}") return data @@ -134,8 +134,8 @@ def resource_filename(name, pfx=None): return os.path.join(pfx, name) elif pkg_resources.resource_exists(__name__, name): return pkg_resources.resource_filename(__name__, name) - elif pfx and pkg_resources.resource_exists(__name__, "{}/{}".format(pfx, name)): - return pkg_resources.resource_filename(__name__, "{}/{}".format(pfx, name)) + elif pfx and pkg_resources.resource_exists(__name__, f"{pfx}/{name}"): + return pkg_resources.resource_filename(__name__, f"{pfx}/{name}") return None @@ -211,10 +211,10 @@ def resolve(self, system_url, public_id, context): fn = path[len(path) - 1] if pkg_resources.resource_exists(__name__, fn): return self.resolve_file(pkg_resources.resource_stream(__name__, fn), context) - elif pkg_resources.resource_exists(__name__, "schema/%s" % fn): - return self.resolve_file(pkg_resources.resource_stream(__name__, "schema/%s" % fn), context) + elif pkg_resources.resource_exists(__name__, f"schema/{fn}"): + return self.resolve_file(pkg_resources.resource_stream(__name__, f"schema/{fn}"), context) else: - raise ValueError("Unable to locate %s" % fn) + raise ValueError(f"Unable to locate {fn}") thread_local_lock = threading.Lock() @@ -265,7 +265,7 @@ def redis(): def check_signature(t: ElementTree, key: Optional[str], only_one_signature: bool = False) -> ElementTree: if key is not None: - log.debug("verifying signature using %s" % key) + log.debug(f"verifying signature using {key}") refs = xmlsec.verified(t, key, drop_signature=True) if only_one_signature and len(refs) != 1: raise MetadataException("XML metadata contains %d signatures - exactly 1 is required" % len(refs)) @@ -303,7 +303,7 @@ def safe_write(fn, data, mkdirs=False): try: fn = os.path.expanduser(fn) dirname, basename = os.path.split(fn) - kwargs = dict(delete=False, prefix=".%s" % basename, dir=dirname) + kwargs = dict(delete=False, prefix=f".{basename}", dir=dirname) kwargs['encoding'] = "utf-8" mode = 'w+' @@ -445,11 +445,11 @@ def xslt_transform(t, stylesheet, params=None): return transform(t, **params) except etree.XSLTApplyError as ex: for entry in transform.error_log: - log.error('\tmessage from line {}, col {}: {}'.format(entry.line, entry.column, entry.message)) + log.error(f'\tmessage from line {entry.line}, col {entry.column}: {entry.message}') log.error('\tdomain: %s (%d)' % (entry.domain_name, entry.domain)) log.error('\ttype: %s (%d)' % (entry.type_name, entry.type)) log.error('\tlevel: %s (%d)' % (entry.level_name, entry.level)) - log.error('\tfilename: %s' % entry.filename) + log.error(f'\tfilename: {entry.filename}') raise ex @@ -492,7 +492,7 @@ def hash_id(entity: Element, hn: str = 'sha1', prefix: bool = True) -> str: hstr = hex_digest(entity_id, hn) if prefix: - return "{{{}}}{}".format(hn, hstr) + return f"{{{hn}}}{hstr}" else: return hstr @@ -502,7 +502,7 @@ def hex_digest(data, hn='sha1'): return data if not hasattr(hashlib, hn): - raise ValueError("Unknown digest '%s'" % hn) + raise ValueError(f"Unknown digest '{hn}'") if not isinstance(data, bytes): data = data.encode("utf-8") @@ -594,7 +594,7 @@ def load_callable(name): # many thanks to Anders Lordahl & Scotty Logan for the idea def guess_entity_software(e): for elt in chain( - e.findall(".//{%s}SingleSignOnService" % NS['md']), e.findall(".//{%s}AssertionConsumerService" % NS['md']) + e.findall(".//{{{}}}SingleSignOnService".format(NS['md'])), e.findall(".//{{{}}}AssertionConsumerService".format(NS['md'])) ): location = elt.get('Location') if location: @@ -609,7 +609,7 @@ def guess_entity_software(e): return 'SimpleSAMLphp' if location.endswith('user/authenticate'): return 'KalturaSSP' - if location.endswith('adfs/ls') or location.endswith('adfs/ls/'): + if location.endswith(('adfs/ls', 'adfs/ls/')): return 'ADFS' if '/oala/' in location or 'login.openathens.net' in location: return 'OpenAthens' From 3be1b21c98479a5208aeac1261ee5ddeb2431608 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Tue, 27 May 2025 07:35:51 +0200 Subject: [PATCH 53/53] Prepare release. --- NEWS.txt | 8 ++++++++ RELEASE.txt | 2 +- docs/conf.py | 2 +- pyproject.toml | 2 +- 4 files changed, 11 insertions(+), 3 deletions(-) diff --git a/NEWS.txt b/NEWS.txt index 5c664af4..ee8433c1 100644 --- a/NEWS.txt +++ b/NEWS.txt @@ -179,3 +179,11 @@ to sign using HSMs. The only mandatory non-python dependency now is lxml. * Remove cherrypy imports * Fix logging * suport SP trust metadata in an entity attribute as JSON blob + +2.1.4 +----- +* Release date: Tue May 27 07:29:08 CEST 2025 + +* Lots of cleanups, pyupgrade and linting fixes. +* MDSL fixes + diff --git a/RELEASE.txt b/RELEASE.txt index 0dadba21..d8dd2a82 100644 --- a/RELEASE.txt +++ b/RELEASE.txt @@ -3,7 +3,7 @@ Release HOWTO To make a release, - 1) remove dev suffix on version in setup.py + 1) remove dev suffix on version in pyproject.toml 2) set release date in NEWS.txt 2b) update the version in docs/conf.py 3) commit the changes to setup.py and NEWS.txt diff --git a/docs/conf.py b/docs/conf.py index c829af9c..2f42b4b7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -59,7 +59,7 @@ def setup(app): # The short X.Y version. version = '2.1' # The full version, including alpha/beta/rc tags. -release = '2.1.3' +release = '2.1.4' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/pyproject.toml b/pyproject.toml index 5cd0bfc1..be9db851 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "pyFF" -version = "2.1.5" +version = "2.1.4" readme = "README.rst" description = "Federation Feeder" requires-python = ">=3.9"