Skip to content

Commit b3ba5fa

Browse files
authored
Merge pull request #144 from stackhpc/upstream/master-2025-12-01
Synchronise master with upstream
2 parents 2ef8b06 + 44dbd10 commit b3ba5fa

File tree

12 files changed

+174
-116
lines changed

12 files changed

+174
-116
lines changed

global-requirements.txt

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,6 @@ pyzabbix # LGPL
150150
qrcode # BSD
151151
rbd-iscsi-client # Apache-2.0
152152
requests!=2.20.0,!=2.24.0 # Apache-2.0
153-
requests-aws # BSD License (3 clause)
154153
requests-kerberos # ISC
155154
requestsexceptions # Apache-2.0
156155
rfc3986 # Apache-2.0

openstack_requirements/check.py

Lines changed: 27 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020

2121
from packaging import markers
2222

23+
from openstack_requirements.project import Project
2324
from openstack_requirements import requirement
2425

2526
MIN_PY_VERSION = '3.5'
@@ -32,23 +33,27 @@
3233

3334

3435
class RequirementsList:
35-
def __init__(self, name, project):
36+
def __init__(self, name: str, project: Project) -> None:
3637
self.name = name
37-
self.reqs_by_file = {}
38+
self.reqs_by_file: dict[str, dict[str, set[str]]] = {}
3839
self.project = project
3940
self.failed = False
4041

4142
@property
42-
def reqs(self):
43+
def reqs(self) -> dict[str, set[str]]:
44+
"""Flattens the list of per-file reqs."""
4345
return {k: v for d in self.reqs_by_file.values() for k, v in d.items()}
4446

45-
def extract_reqs(self, content, strict):
47+
def extract_reqs(
48+
self, content: list[str], strict: bool
49+
) -> dict[str, set[str]]:
4650
reqs = collections.defaultdict(set)
47-
parsed = requirement.parse(content)
51+
parsed = requirement.parse_lines(content)
4852
for name, entries in parsed.items():
4953
if not name:
5054
# Comments and other unprocessed lines
5155
continue
56+
5257
list_reqs = [r for (r, line) in entries]
5358
# Strip the comments out before checking if there are duplicates
5459
list_reqs_stripped = [r._replace(comment='') for r in list_reqs]
@@ -64,7 +69,7 @@ def extract_reqs(self, content, strict):
6469
reqs[name].update(list_reqs)
6570
return reqs
6671

67-
def process(self, strict=True):
72+
def process(self, strict: bool = True) -> None:
6873
"""Convert the project into ready to use data.
6974
7075
- an iterable of requirement sets to check
@@ -99,18 +104,17 @@ def process(self, strict=True):
99104
)
100105

101106
print(f"Processing {fname} (requirements)")
102-
if strict and not content.endswith('\n'):
103-
print(
104-
f"Requirements file {fname} does not end with a newline.",
105-
file=sys.stderr,
106-
)
107-
self.reqs_by_file[fname] = self.extract_reqs(content, strict)
107+
self.reqs_by_file[f'{fname} (dependencies)'] = self.extract_reqs(
108+
content, strict
109+
)
108110

109111
for fname, extras in self.project['extras'].items():
110112
print(f"Processing {fname} (extras)")
111113
for name, content in extras.items():
112-
print(f"Processing .[{name}]")
113-
self.reqs_by_file[name] = self.extract_reqs(content, strict)
114+
print(f" Processing .[{name}]")
115+
self.reqs_by_file[f'{fname} (.[{name}] extra)'] = (
116+
self.extract_reqs(content, strict)
117+
)
114118

115119

116120
def _get_exclusions(req):
@@ -191,20 +195,19 @@ def _is_requirement_in_global_reqs(
191195
difference = req_exclusions - global_exclusions
192196
print(
193197
f"ERROR: Requirement for package {local_req.package} "
194-
"excludes a version not excluded in the "
195-
"global list.\n"
196-
f" Local settings : {req_exclusions}\n"
197-
f" Global settings: {global_exclusions}\n"
198-
f" Unexpected : {difference}"
198+
f"excludes a version not excluded in the "
199+
f"global list.\n"
200+
f" Local settings : {list(req_exclusions)}\n"
201+
f" Global settings: {list(global_exclusions)}\n"
202+
f" Unexpected : {list(difference)}"
199203
)
200204
return False
201205

202206
print(
203-
"ERROR: "
204-
f"Could not find a global requirements entry to match package {local_req.package}. "
205-
"If the package is already included in the global list, "
206-
"the name or platform markers there may not match the local "
207-
"settings."
207+
f"ERROR: Could not find a global requirements entry to match package "
208+
f"{local_req.package}. If the package is already included in the "
209+
f"global list, the name or platform markers there may not match the "
210+
f"local settings."
208211
)
209212
return False
210213

openstack_requirements/cmds/check_exists.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ def main(args=None):
6767
print(
6868
f'\nComparing {require_file} with global-requirements and upper-constraints'
6969
)
70-
requirements = requirement.parse(data)
70+
requirements = requirement.parse_lines(data)
7171
for name, spec_list in requirements.items():
7272
if not name or name in denylist:
7373
continue

openstack_requirements/project.py

Lines changed: 75 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,9 @@
1919
import errno
2020
import io
2121
import os
22+
import sys
23+
from typing import Any
24+
from typing import TypedDict
2225

2326
try:
2427
# Python 3.11+
@@ -28,41 +31,73 @@
2831
import tomli as tomllib # type: ignore
2932

3033

31-
def _read_pyproject_toml(root):
34+
def _read_raw(root: str, filename: str) -> str | None:
35+
try:
36+
path = os.path.join(root, filename)
37+
with open(path, encoding="utf-8") as f:
38+
data = f.read()
39+
if not data.endswith('\n'):
40+
print(
41+
f"Requirements file {filename} does not end with a "
42+
f"newline.",
43+
file=sys.stderr,
44+
)
45+
return data
46+
except OSError as e:
47+
if e.errno == errno.ENOENT:
48+
return None
49+
50+
raise
51+
52+
53+
def _read_pyproject_toml(root: str) -> dict[str, Any] | None:
3254
data = _read_raw(root, 'pyproject.toml')
3355
if data is None:
3456
return None
3557

3658
return tomllib.loads(data)
3759

3860

39-
def _read_pyproject_toml_requirements(root):
61+
def _read_requirements_txt(root: str, filename: str) -> list[str] | None:
62+
data = _read_raw(root, filename)
63+
if data is None:
64+
return None
65+
66+
result = []
67+
for line in data.splitlines():
68+
# we only ignore comments and empty lines: everything else is
69+
# handled later
70+
line = line.strip()
71+
72+
if line.startswith('#') or not line:
73+
continue
74+
75+
result.append(line)
76+
77+
return result
78+
79+
80+
def _read_pyproject_toml_requirements(root: str) -> list[str] | None:
4081
data = _read_pyproject_toml(root) or {}
4182

4283
# projects may not have PEP-621 project metadata
4384
if 'project' not in data:
4485
return None
4586

46-
# FIXME(stephenfin): We should not be doing this, but the fix requires a
47-
# larger change to do normalization here.
48-
return '\n'.join(data['project'].get('dependencies', []))
87+
return data['project'].get('dependencies', [])
4988

5089

51-
def _read_pyproject_toml_extras(root):
90+
def _read_pyproject_toml_extras(root: str) -> dict[str, list[str]] | None:
5291
data = _read_pyproject_toml(root) or {}
5392

5493
# projects may not have PEP-621 project metadata
5594
if 'project' not in data:
5695
return None
5796

58-
# FIXME(stephenfin): As above, we should not be doing this.
59-
return {
60-
k: '\n'.join(v) for k, v in
61-
data['project'].get('optional-dependencies', {}).items()
62-
}
97+
return data['project'].get('optional-dependencies', {})
6398

6499

65-
def _read_setup_cfg_extras(root):
100+
def _read_setup_cfg_extras(root: str) -> dict[str, list[str]] | None:
66101
data = _read_raw(root, 'setup.cfg')
67102
if data is None:
68103
return None
@@ -72,20 +107,32 @@ def _read_setup_cfg_extras(root):
72107
if not c.has_section('extras'):
73108
return None
74109

75-
return dict(c.items('extras'))
110+
result: dict[str, list[str]] = {}
111+
for extra, deps in c.items('extras'):
112+
result[extra] = []
113+
for line in deps.splitlines():
114+
# we only ignore comments and empty lines: everything else is
115+
# handled later
116+
line = line.strip()
76117

118+
if line.startswith('#') or not line:
119+
continue
77120

78-
def _read_raw(root, filename):
79-
try:
80-
path = os.path.join(root, filename)
81-
with open(path, encoding="utf-8") as f:
82-
return f.read()
83-
except OSError as e:
84-
if e.errno != errno.ENOENT:
85-
raise
121+
result[extra].append(line)
122+
123+
return result
124+
125+
126+
class Project(TypedDict):
127+
# The root directory path
128+
root: str
129+
# A mapping of filename to the contents of that file
130+
requirements: dict[str, list[str]]
131+
# A mapping of filename to extras from that file
132+
extras: dict[str, dict[str, list[str]]]
86133

87134

88-
def read(root):
135+
def read(root: str) -> Project:
89136
"""Read into memory the packaging data for the project at root.
90137
91138
:param root: A directory path.
@@ -96,11 +143,13 @@ def read(root):
96143
requirements
97144
"""
98145
# Store root directory and installer-related files for later processing
99-
result = {'root': root}
146+
result: Project = {
147+
'root': root,
148+
'requirements': {},
149+
'extras': {},
150+
}
100151

101152
# Store requirements
102-
result['requirements'] = {}
103-
104153
if (data := _read_pyproject_toml_requirements(root)) is not None:
105154
result['requirements']['pyproject.toml'] = data
106155

@@ -116,12 +165,10 @@ def read(root):
116165
'test-requirements-py2.txt',
117166
'test-requirements-py3.txt',
118167
]:
119-
if (data := _read_raw(root, filename)) is not None:
168+
if (data := _read_requirements_txt(root, filename)) is not None:
120169
result['requirements'][filename] = data
121170

122171
# Store extras
123-
result['extras'] = {}
124-
125172
if (data := _read_setup_cfg_extras(root)) is not None:
126173
result['extras']['setup.cfg'] = data
127174

openstack_requirements/requirement.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,10 @@ def parse(content, permit_urls=False):
9999
return to_dict(to_reqs(content, permit_urls=permit_urls))
100100

101101

102+
def parse_lines(lines, permit_urls=False):
103+
return to_dict(to_req(line, permit_urls=permit_urls) for line in lines)
104+
105+
102106
def parse_line(req_line, permit_urls=False):
103107
"""Parse a single line of a requirements file.
104108
@@ -201,6 +205,13 @@ def _pass_through(req_line, permit_urls=False):
201205
)
202206

203207

208+
def to_req(line, permit_urls=False):
209+
if _pass_through(line, permit_urls=permit_urls):
210+
return (None, line)
211+
else:
212+
return (parse_line(line, permit_urls=permit_urls), line)
213+
214+
204215
def to_reqs(content, permit_urls=False):
205216
for content_line in content.splitlines(True):
206217
req_line = content_line.strip()
@@ -209,10 +220,7 @@ def to_reqs(content, permit_urls=False):
209220
if req_line.startswith('#') or not req_line:
210221
continue
211222

212-
if _pass_through(req_line, permit_urls=permit_urls):
213-
yield None, content_line
214-
else:
215-
yield parse_line(req_line, permit_urls=permit_urls), content_line
223+
yield to_req(req_line, permit_urls)
216224

217225

218226
def check_reqs_bounds_policy(global_reqs):

openstack_requirements/tests/files/upper-constraints.txt

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@ oslo.service===1.20.0
2929
django-appconf===1.0.2
3030
pykerberos===1.1.14
3131
certifi===2017.1.23
32-
requests-aws===0.1.8
3332
alabaster===0.7.10
3433
pbr===2.0.0
3534
microversion-parse===0.1.4

openstack_requirements/tests/test_check.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -30,24 +30,24 @@ def test_extras__setup_cfg(self):
3030
project_data = {
3131
'root': '/fake/root',
3232
'requirements': {
33-
'requirements.txt': 'requests>=2.0.0\n'
33+
'requirements.txt': ['requests>=2.0.0'],
3434
},
3535
'extras': {
3636
'setup.cfg': {
37-
'test': 'pytest>=6.0.0\nflake8>=3.8.0\n',
38-
'dev': 'black>=24.0.0\nmypy>=0.900\n'
37+
'test': ['pytest>=6.0.0', 'flake8>=3.8.0'],
38+
'dev': ['black>=24.0.0', 'mypy>=0.900'],
3939
}
40-
}
40+
},
4141
}
4242

4343
req_list = check.RequirementsList('test-project', project_data)
4444
req_list.process(strict=False)
4545

46-
self.assertIn('test', req_list.reqs_by_file)
47-
self.assertIn('dev', req_list.reqs_by_file)
46+
self.assertIn('setup.cfg (.[test] extra)', req_list.reqs_by_file)
47+
self.assertIn('setup.cfg (.[dev] extra)', req_list.reqs_by_file)
4848

49-
test_reqs = req_list.reqs_by_file['test']
50-
dev_reqs = req_list.reqs_by_file['dev']
49+
test_reqs = req_list.reqs_by_file['setup.cfg (.[test] extra)']
50+
dev_reqs = req_list.reqs_by_file['setup.cfg (.[dev] extra)']
5151

5252
self.assertEqual(len(test_reqs), 2)
5353
self.assertIn('pytest', test_reqs)

0 commit comments

Comments
 (0)