From 5a69de849377a8deea89ecbf24f3389c3b58a4f4 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Mon, 7 Apr 2025 14:30:16 +0200 Subject: [PATCH 1/5] Switch to ruff for formatting instead of black. This helps Language Servers keep the formatting sane. --- Makefile | 3 +-- pyproject.toml | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 pyproject.toml diff --git a/Makefile b/Makefile index d1b20d9f..3ab85c56 100644 --- a/Makefile +++ b/Makefile @@ -15,8 +15,7 @@ test_coverage: coverage combine reformat: - isort --line-width 120 --atomic --project eduid_scimapi --recursive $(SOURCE) - black --line-length 120 --target-version py37 --skip-string-normalization $(SOURCE) + ruff --format $(SOURCE) typecheck: mypy --ignore-missing-imports $(SOURCE) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..a9d009aa --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,32 @@ +[project] +name = "pyFF" +version = "2.1.5" +readme = "README.rst" +description = "Federation Feeder" +requires-python = ">=3.7" +license = {file = "LICENSE"} + +authors = [ + {name = "Leif Johansson", email = "leifj@sunet.se"}, + {name = "Fredrik Thulin", email = "redrik@thulin.net"}, + {name = "Enrique Pérez Arnaud"}, + {name = "Mikael Frykholm", email = "mifr@sunet.se"}, +] +maintainers = [ + {name = "Mikael Frykholm", email = "mifr@sunet.se"} +] + +[tool.ruff] +# Allow lines to be as long as 120. +line-length = 120 +target-version = "py37" +[tool.ruff.format] +quote-style = "preserve" + +[tool.build_sphinx] +source-dir = "docs/" +build-dir = "docs/build" +all_files = "1" + +[tool.upload_sphinx] +upload-dir = "docs/build/html" From a6af871348e268900dd9db7e107f0a58c3e03b96 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Mon, 7 Apr 2025 14:41:57 +0200 Subject: [PATCH 2/5] Fix Pydantic deprecation copy > model_copy --- src/pyff/builtins.py | 13 ++++++------- src/pyff/parse.py | 8 +++++--- src/pyff/samlmd.py | 27 ++++++++++++++++++--------- 3 files changed, 29 insertions(+), 19 deletions(-) diff --git a/src/pyff/builtins.py b/src/pyff/builtins.py index 8bdd3a2d..b00cbd63 100644 --- a/src/pyff/builtins.py +++ b/src/pyff/builtins.py @@ -256,7 +256,7 @@ def fork(req: Plumbing.Request, *opts): **parsecopy** Due to a hard to find bug, fork which uses deepcopy can lose some namespaces. The parsecopy argument is a workaround. - It uses a brute force serialisation and deserialisation to get around the bug. + It uses a brute force serialisation and deserialisation to get around the bug. .. code-block:: yaml @@ -676,7 +676,7 @@ def load(req: Plumbing.Request, *opts): url = r.pop(0) # Copy parent node opts as a starting point - child_opts = req.md.rm.opts.copy(update={"via": [], "cleanup": [], "verify": None, "alias": url}) + child_opts = req.md.rm.opts.model_copy(update={"via": [], "cleanup": [], "verify": None, "alias": url}) while len(r) > 0: elt = r.pop(0) @@ -702,7 +702,7 @@ def load(req: Plumbing.Request, *opts): child_opts.verify = elt # override anything in child_opts with what is in opts - child_opts = child_opts.copy(update=_opts) + child_opts = child_opts.model_copy(update=_opts) req.md.rm.add_child(url, child_opts) @@ -814,7 +814,7 @@ def select(req: Plumbing.Request, *opts): else: _opts['as'] = opts[i] if i + 1 < len(opts): - more_opts = opts[i + 1:] + more_opts = opts[i + 1 :] _opts.update(dict(list(zip(more_opts[::2], more_opts[1::2])))) break @@ -835,7 +835,6 @@ def select(req: Plumbing.Request, *opts): entities = resolve_entities(args, lookup_fn=req.md.store.select, dedup=dedup) if req.state.get('match', None): # TODO - allow this to be passed in via normal arguments - match = req.state['match'] if isinstance(match, six.string_types): @@ -1304,7 +1303,7 @@ def xslt(req: Plumbing.Request, *opts): if stylesheet is None: raise PipeException("xslt requires stylesheet") - params = dict((k, "\'%s\'" % v) for (k, v) in list(req.args.items())) + params = dict((k, "'%s'" % v) for (k, v) in list(req.args.items())) del params['stylesheet'] try: return root(xslt_transform(req.t, stylesheet, params)) @@ -1312,6 +1311,7 @@ def xslt(req: Plumbing.Request, *opts): log.debug(traceback.format_exc()) raise ex + @pipe def indent(req: Plumbing.Request, *opts): """ @@ -1710,7 +1710,6 @@ def finalize(req: Plumbing.Request, *opts): if name is None or 0 == len(name): name = req.state.get('url', None) if name and 'baseURL' in req.args: - try: name_url = urlparse(name) base_url = urlparse(req.args.get('baseURL')) diff --git a/src/pyff/parse.py b/src/pyff/parse.py index 9162d743..7737f43b 100644 --- a/src/pyff/parse.py +++ b/src/pyff/parse.py @@ -9,7 +9,7 @@ from pyff.constants import NS from pyff.logs import get_log -from pyff.resource import Resource,ResourceInfo +from pyff.resource import Resource, ResourceInfo from pyff.utils import find_matching_files, parse_xml, root, unicode_stream, utc_now __author__ = 'leifj' @@ -30,8 +30,10 @@ def _format_key(k: str) -> str: res = {_format_key(k): v for k, v in self.dict().items()} return res + ResourceInfo.model_rebuild() + class ParserException(Exception): def __init__(self, msg, wrapped=None, data=None): self._wraped = wrapped @@ -84,7 +86,7 @@ def parse(self, resource: Resource, content: str) -> ParserInfo: info = ParserInfo(description='Directory', expiration_time='never expires') n = 0 for fn in find_matching_files(content, self.extensions): - child_opts = resource.opts.copy(update={'alias': None}) + child_opts = resource.opts.model_copy(update={'alias': None}) resource.add_child("file://" + urlescape(fn), child_opts) n += 1 @@ -122,7 +124,7 @@ def parse(self, resource: Resource, content: str) -> ParserInfo: if len(fingerprints) > 0: fp = fingerprints[0] log.debug("XRD: {} verified by {}".format(link_href, fp)) - child_opts = resource.opts.copy(update={'alias': None}) + child_opts = resource.opts.model_copy(update={'alias': None}) resource.add_child(link_href, child_opts) resource.last_seen = utc_now().replace(microsecond=0) resource.expire_time = None diff --git a/src/pyff/samlmd.py b/src/pyff/samlmd.py index 9be47176..671f32f4 100644 --- a/src/pyff/samlmd.py +++ b/src/pyff/samlmd.py @@ -86,7 +86,10 @@ def find_merge_strategy(strategy_name): def parse_saml_metadata( - source: BytesIO, opts: ResourceOpts, base_url=None, validation_errors: Optional[Dict[str, Any]] = None, + source: BytesIO, + opts: ResourceOpts, + base_url=None, + validation_errors: Optional[Dict[str, Any]] = None, ): """Parse a piece of XML and return an EntitiesDescriptor element after validation. @@ -192,7 +195,10 @@ def _extra_md(_t, info, **kwargs): location = kwargs.get('location') sp_entity = sp_entities.find("{%s}EntityDescriptor[@entityID='%s']" % (NS['md'], entityID)) if sp_entity is not None: - md_source = sp_entity.find("{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource[@src='%s']" % (NS['md'], NS['md'], NS['ti'], NS['ti'], location)) + md_source = sp_entity.find( + "{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource[@src='%s']" + % (NS['md'], NS['md'], NS['ti'], NS['ti'], location) + ) for e in iter_entities(_t): md_source.append(e) return etree.Element("{%s}EntitiesDescriptor" % NS['md']) @@ -205,11 +211,14 @@ def _extra_md(_t, info, **kwargs): entityID = e.get('entityID') info.entities.append(entityID) - md_source = e.find("{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource" % (NS['md'], NS['md'], NS['ti'], NS['ti'])) + md_source = e.find( + "{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource" + % (NS['md'], NS['md'], NS['ti'], NS['ti']) + ) if md_source is not None: location = md_source.attrib.get('src') if location is not None: - child_opts = resource.opts.copy(update={'alias': entityID}) + child_opts = resource.opts.model_copy(update={'alias': entityID}) r = resource.add_child(location, child_opts) kwargs = { 'entityID': entityID, @@ -311,7 +320,7 @@ def parse(self, resource: Resource, content: str) -> EidasMDParserInfo: info.scheme_territory, location, fp, args.get('country_code') ) ) - child_opts = resource.opts.copy(update={'alias': None}) + child_opts = resource.opts.model_copy(update={'alias': None}) child_opts.verify = fp r = resource.add_child(location, child_opts) @@ -725,7 +734,6 @@ def entity_domains(entity): def entity_extended_display_i18n(entity, default_lang=None): - name_dict = lang_dict(entity.iter("{%s}OrganizationName" % NS['md']), lambda e: e.text, default_lang=default_lang) name_dict.update( lang_dict(entity.iter("{%s}OrganizationDisplayName" % NS['md']), lambda e: e.text, default_lang=default_lang) @@ -981,7 +989,9 @@ def discojson_sp(e, global_trust_info=None, global_md_sources=None): sp['entityID'] = e.get('entityID', None) - md_sources = e.findall("{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource" % (NS['md'], NS['md'], NS['ti'], NS['ti'])) + md_sources = e.findall( + "{%s}SPSSODescriptor/{%s}Extensions/{%s}TrustInfo/{%s}MetadataSource" % (NS['md'], NS['md'], NS['ti'], NS['ti']) + ) sp['extra_md'] = {} for md_source in md_sources: @@ -1041,7 +1051,6 @@ def discojson_sp(e, global_trust_info=None, global_md_sources=None): def discojson_sp_attr(e): - attribute = "https://refeds.org/entity-selection-profile" b64_trustinfos = entity_attribute(e, attribute) if b64_trustinfos is None: @@ -1395,7 +1404,7 @@ def get_key(e): except AttributeError: pass except IndexError: - log.warning("Sort pipe: unable to sort entity by '%s'. " "Entity '%s' has no such value" % (sxp, eid)) + log.warning("Sort pipe: unable to sort entity by '%s'. Entity '%s' has no such value" % (sxp, eid)) except TypeError: pass From 19fb6b34ad3f9683b0e2d1cdeb8b4b5c0b908f4d Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Mon, 7 Apr 2025 15:57:10 +0200 Subject: [PATCH 3/5] Started removing pkg_resources. Formatting fixes. --- src/pyff/__init__.py | 9 ++------- src/pyff/api.py | 12 ++++++------ src/pyff/test/__init__.py | 9 +++++---- 3 files changed, 13 insertions(+), 17 deletions(-) diff --git a/src/pyff/__init__.py b/src/pyff/__init__.py index 5e45b2b6..7d54b185 100644 --- a/src/pyff/__init__.py +++ b/src/pyff/__init__.py @@ -2,11 +2,6 @@ pyFF is a SAML metadata aggregator. """ -import pkg_resources +import importlib -__author__ = 'Leif Johansson' -__copyright__ = "Copyright 2009-2018 SUNET and the IdentityPython Project" -__license__ = "BSD" -__maintainer__ = "leifj@sunet.se" -__status__ = "Production" -__version__ = pkg_resources.require("pyFF")[0].version +__version__ = importlib.metadata.version('pyFF') diff --git a/src/pyff/api.py b/src/pyff/api.py index 1050efbf..8b443409 100644 --- a/src/pyff/api.py +++ b/src/pyff/api.py @@ -4,7 +4,6 @@ from json import dumps from typing import Any, Dict, Generator, Iterable, List, Mapping, Optional, Tuple -import pkg_resources import pyramid.httpexceptions as exc import pytz import requests @@ -26,12 +25,13 @@ from pyff.resource import Resource from pyff.samlmd import entity_display_name from pyff.utils import b2u, dumptree, hash_id, json_serializer, utc_now +from pyff import __version__ log = get_log(__name__) class NoCache(object): - """ Dummy implementation for when caching isn't enabled """ + """Dummy implementation for when caching isn't enabled""" def __init__(self) -> None: pass @@ -70,7 +70,7 @@ def status_handler(request: Request) -> Response: if 'Validation Errors' in r.info and r.info['Validation Errors']: d[r.url] = r.info['Validation Errors'] _status = dict( - version=pkg_resources.require("pyFF")[0].version, + version=__version__, invalids=d, icon_store=dict(size=request.registry.md.icon_store.size()), jobs=[dict(id=j.id, next_run_time=j.next_run_time) for j in request.registry.scheduler.get_jobs()], @@ -163,7 +163,7 @@ def process_handler(request: Request) -> Response: _ctypes = {'xml': 'application/samlmetadata+xml;application/xml;text/xml', 'json': 'application/json'} def _d(x: Optional[str], do_split: bool = True) -> Tuple[Optional[str], Optional[str]]: - """ Split a path into a base component and an extension. """ + """Split a path into a base component and an extension.""" if x is not None: x = x.strip() @@ -214,7 +214,7 @@ def _d(x: Optional[str], do_split: bool = True) -> Tuple[Optional[str], Optional pfx = request.registry.aliases.get(alias, None) if pfx is None: log.debug("alias {} not found - passing to storage lookup".format(alias)) - path=alias #treat as path + path = alias # treat as path # content_negotiation_policy is one of three values: # 1. extension - current default, inspect the path and if it ends in @@ -478,7 +478,7 @@ def cors_headers(request: Request, response: Response) -> None: { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Methods': 'POST,GET,DELETE,PUT,OPTIONS', - 'Access-Control-Allow-Headers': ('Origin, Content-Type, Accept, ' 'Authorization'), + 'Access-Control-Allow-Headers': ('Origin, Content-Type, Accept, Authorization'), 'Access-Control-Allow-Credentials': 'true', 'Access-Control-Max-Age': '1728000', } diff --git a/src/pyff/test/__init__.py b/src/pyff/test/__init__.py index c39abfda..51cd14ef 100644 --- a/src/pyff/test/__init__.py +++ b/src/pyff/test/__init__.py @@ -6,9 +6,8 @@ import tempfile from unittest import TestCase -import pkg_resources +import importlib.resources import six - from pyff import __version__ as pyffversion # range of ports where available ports can be found @@ -118,7 +117,6 @@ def _p(args, outf=None, ignore_exit=False): class SignerTestCase(TestCase): - datadir = None private_keyspec = None public_keyspec = None @@ -128,7 +126,10 @@ def sys_exit(self, code): @classmethod def setUpClass(cls): - cls.datadir = pkg_resources.resource_filename(__name__, 'data') + with importlib.resources.path( + __name__, 'data' + ) as context: # We just want the path for now to be compatible downstream + cls.datadir = context.as_posix() cls.private_keyspec = tempfile.NamedTemporaryFile('w').name cls.public_keyspec = tempfile.NamedTemporaryFile('w').name From e67185a1e9fa3a2ca1431f1171a3f330396f12e4 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Tue, 8 Apr 2025 15:34:48 +0200 Subject: [PATCH 4/5] Make it work with python3.9. --- pyproject.toml | 4 ++-- src/pyff/test/__init__.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a9d009aa..317d1cb1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "pyFF" version = "2.1.5" readme = "README.rst" description = "Federation Feeder" -requires-python = ">=3.7" +requires-python = ">=3.9" license = {file = "LICENSE"} authors = [ @@ -19,7 +19,7 @@ maintainers = [ [tool.ruff] # Allow lines to be as long as 120. line-length = 120 -target-version = "py37" +target-version = "py39" [tool.ruff.format] quote-style = "preserve" diff --git a/src/pyff/test/__init__.py b/src/pyff/test/__init__.py index 51cd14ef..be32c588 100644 --- a/src/pyff/test/__init__.py +++ b/src/pyff/test/__init__.py @@ -126,10 +126,10 @@ def sys_exit(self, code): @classmethod def setUpClass(cls): - with importlib.resources.path( - __name__, 'data' - ) as context: # We just want the path for now to be compatible downstream - cls.datadir = context.as_posix() + cls.datadir = importlib.resources.files( + __name__, + ).joinpath('data') + cls.private_keyspec = tempfile.NamedTemporaryFile('w').name cls.public_keyspec = tempfile.NamedTemporaryFile('w').name From 00fda464aa7abf9c94d67aefade0cd6635f9bbd7 Mon Sep 17 00:00:00 2001 From: Mikael Frykholm Date: Tue, 8 Apr 2025 16:11:10 +0200 Subject: [PATCH 5/5] ubuntu-20.04 is deprecated by github runners. --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 20f9e0b4..aa18e2d9 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -15,7 +15,7 @@ jobs: strategy: fail-fast: false matrix: - os: ["ubuntu-24.04", "ubuntu-22.04", "ubuntu-20.04"] + os: ["ubuntu-24.04", "ubuntu-22.04"] python: ["3.9", "3.10", "3.11", "3.12"] runs-on: ${{ matrix.os }} steps: