Skip to content

Commit 1634258

Browse files
authored
Enable more Ruff rules (inventree#7930)
* bump version * Add more checks * add simplify rules * Add RUF rules * small perf imrpovements * pylint checks * more style fixes * fix a number of A002 cases * fix A001 cases * disable unsafe fixes * remove unneeded branches fixes SIM102 * re-enable .keys for specific case * Revert "remove unneeded branches" This reverts commit f74d41b. * fix reference
1 parent bcbbae0 commit 1634258

File tree

127 files changed

+525
-739
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

127 files changed

+525
-739
lines changed

.github/scripts/check_js_templates.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ def check_prohibited_tags(data):
7171
for filename in pathlib.Path(js_i18n_dir).rglob('*.js'):
7272
print(f"Checking file 'translated/{os.path.basename(filename)}':")
7373

74-
with open(filename, 'r') as js_file:
74+
with open(filename, encoding='utf-8') as js_file:
7575
data = js_file.readlines()
7676

7777
errors += check_invalid_tag(data)
@@ -81,7 +81,7 @@ def check_prohibited_tags(data):
8181
print(f"Checking file 'dynamic/{os.path.basename(filename)}':")
8282

8383
# Check that the 'dynamic' files do not contains any translated strings
84-
with open(filename, 'r') as js_file:
84+
with open(filename, encoding='utf-8') as js_file:
8585
data = js_file.readlines()
8686

8787
invalid_tags = ['blocktrans', 'blocktranslate', 'trans', 'translate']

.github/scripts/check_migration_files.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,9 @@
2020
if len(migrations) == 0:
2121
sys.exit(0)
2222

23-
print('There are {n} unstaged migration files:'.format(n=len(migrations)))
23+
print(f'There are {len(migrations)} unstaged migration files:')
2424

2525
for m in migrations:
26-
print(' - {m}'.format(m=m))
26+
print(f' - {m}')
2727

2828
sys.exit(len(migrations))

.github/scripts/version_check.py

+4-7
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ def check_version_number(version_string, allow_duplicate=False):
8989

9090
if release > version_tuple:
9191
highest_release = False
92-
print(f'Found newer release: {str(release)}')
92+
print(f'Found newer release: {release!s}')
9393

9494
return highest_release
9595

@@ -134,7 +134,7 @@ def check_version_number(version_string, allow_duplicate=False):
134134

135135
version = None
136136

137-
with open(version_file, 'r') as f:
137+
with open(version_file, encoding='utf-8') as f:
138138
text = f.read()
139139

140140
# Extract the InvenTree software version
@@ -175,10 +175,7 @@ def check_version_number(version_string, allow_duplicate=False):
175175
print(f"Version number '{version}' does not match tag '{version_tag}'")
176176
sys.exit
177177

178-
if highest_release:
179-
docker_tags = [version_tag, 'stable']
180-
else:
181-
docker_tags = [version_tag]
178+
docker_tags = [version_tag, 'stable'] if highest_release else [version_tag]
182179

183180
elif GITHUB_REF_TYPE == 'branch':
184181
# Otherwise we know we are targeting the 'master' branch
@@ -202,7 +199,7 @@ def check_version_number(version_string, allow_duplicate=False):
202199
target_repos = [REPO.lower(), f'ghcr.io/{REPO.lower()}']
203200

204201
# Ref: https://getridbug.com/python/how-to-set-environment-variables-in-github-actions-using-python/
205-
with open(os.getenv('GITHUB_ENV'), 'a') as env_file:
202+
with open(os.getenv('GITHUB_ENV'), 'a', encoding='utf-8') as env_file:
206203
# Construct tag string
207204
tag_list = [[f'{r}:{t}' for t in docker_tags] for r in target_repos]
208205
tags = ','.join(itertools.chain(*tag_list))

.pre-commit-config.yaml

+4-3
Original file line numberDiff line numberDiff line change
@@ -17,17 +17,18 @@ repos:
1717
- id: check-yaml
1818
- id: mixed-line-ending
1919
- repo: https://github.com/astral-sh/ruff-pre-commit
20-
rev: v0.5.1
20+
rev: v0.6.1
2121
hooks:
2222
- id: ruff-format
2323
args: [--preview]
2424
- id: ruff
2525
args: [
2626
--fix,
27+
# --unsafe-fixes,
2728
--preview
2829
]
2930
- repo: https://github.com/astral-sh/uv-pre-commit
30-
rev: 0.2.13
31+
rev: 0.2.37
3132
hooks:
3233
- id: pip-compile
3334
name: pip-compile requirements-dev.in
@@ -77,7 +78,7 @@ repos:
7778
- "prettier@^2.4.1"
7879
- "@trivago/prettier-plugin-sort-imports"
7980
- repo: https://github.com/pre-commit/mirrors-eslint
80-
rev: "v9.6.0"
81+
rev: "v9.9.0"
8182
hooks:
8283
- id: eslint
8384
additional_dependencies:

docs/ci/check_mkdocs_config.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010

1111
config_file = os.path.join(tld, 'mkdocs.yml')
1212

13-
with open(config_file, 'r') as f:
13+
with open(config_file, encoding='utf-8') as f:
1414
data = yaml.load(f, yaml.BaseLoader)
1515

1616
assert data['strict'] == 'true'

docs/docs/hooks.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ def make_request(url, headers):
5757
versions = sorted(versions, key=lambda x: StrictVersion(x['version']), reverse=True)
5858

5959
# Add "latest" version first
60-
if not any((x['title'] == 'latest' for x in versions)):
60+
if not any(x['title'] == 'latest' for x in versions):
6161
versions.insert(
6262
0,
6363
{
@@ -70,7 +70,7 @@ def make_request(url, headers):
7070
# Ensure we have the 'latest' version
7171
current_version = os.environ.get('READTHEDOCS_VERSION', None)
7272

73-
if current_version and not any((x['title'] == current_version for x in versions)):
73+
if current_version and not any(x['title'] == current_version for x in versions):
7474
versions.append({
7575
'version': current_version,
7676
'title': current_version,
@@ -82,7 +82,7 @@ def make_request(url, headers):
8282
print('Discovered the following versions:')
8383
print(versions)
8484

85-
with open(output_filename, 'w') as file:
85+
with open(output_filename, 'w', encoding='utf-8') as file:
8686
json.dump(versions, file, indent=2)
8787

8888

@@ -100,7 +100,7 @@ def get_release_data():
100100
# Release information has been cached to file
101101

102102
print("Loading release information from 'releases.json'")
103-
with open(json_file) as f:
103+
with open(json_file, encoding='utf-8') as f:
104104
return json.loads(f.read())
105105

106106
# Download release information via the GitHub API
@@ -127,7 +127,7 @@ def get_release_data():
127127
page += 1
128128

129129
# Cache these results to file
130-
with open(json_file, 'w') as f:
130+
with open(json_file, 'w', encoding='utf-8') as f:
131131
print("Saving release information to 'releases.json'")
132132
f.write(json.dumps(releases))
133133

@@ -173,7 +173,7 @@ def on_config(config, *args, **kwargs):
173173
# Add *all* readthedocs related keys
174174
readthedocs = {}
175175

176-
for key in os.environ.keys():
176+
for key in os.environ:
177177
if key.startswith('READTHEDOCS_'):
178178
k = key.replace('READTHEDOCS_', '').lower()
179179
readthedocs[k] = os.environ[key]

docs/extract_schema.py

+6-8
Original file line numberDiff line numberDiff line change
@@ -46,17 +46,15 @@ def top_level_path(path: str) -> str:
4646

4747
key = path.split('/')[1]
4848

49-
if key in SPECIAL_PATHS.keys():
49+
if key in SPECIAL_PATHS:
5050
return key
5151

5252
return GENERAL_PATH
5353

5454

5555
def generate_schema_file(key: str) -> None:
5656
"""Generate a schema file for the provided key."""
57-
description = (
58-
SPECIAL_PATHS[key] if key in SPECIAL_PATHS else 'General API Endpoints'
59-
)
57+
description = SPECIAL_PATHS.get(key, 'General API Endpoints')
6058

6159
output = f"""
6260
---
@@ -75,7 +73,7 @@ def generate_schema_file(key: str) -> None:
7573

7674
print('Writing schema file to:', output_file)
7775

78-
with open(output_file, 'w') as f:
76+
with open(output_file, 'w', encoding='utf-8') as f:
7977
f.write(output)
8078

8179

@@ -121,7 +119,7 @@ def generate_index_file(version: str):
121119

122120
print('Writing index file to:', output_file)
123121

124-
with open(output_file, 'w') as f:
122+
with open(output_file, 'w', encoding='utf-8') as f:
125123
f.write(output)
126124

127125

@@ -173,7 +171,7 @@ def parse_api_file(filename: str):
173171
174172
The intent is to make the API schema easier to peruse on the documentation.
175173
"""
176-
with open(filename, 'r') as f:
174+
with open(filename, encoding='utf-8') as f:
177175
data = yaml.safe_load(f)
178176

179177
paths = data['paths']
@@ -213,7 +211,7 @@ def parse_api_file(filename: str):
213211

214212
output_file = os.path.abspath(output_file)
215213

216-
with open(output_file, 'w') as f:
214+
with open(output_file, 'w', encoding='utf-8') as f:
217215
yaml.dump(output, f)
218216

219217
# Generate a markdown file for the schema

docs/main.py

+12-11
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
here = os.path.dirname(__file__)
1717
settings_file = os.path.join(here, 'inventree_settings.json')
1818

19-
with open(settings_file, 'r') as sf:
19+
with open(settings_file, encoding='utf-8') as sf:
2020
settings = json.load(sf)
2121

2222
GLOBAL_SETTINGS = settings['global']
@@ -27,7 +27,7 @@ def get_repo_url(raw=False):
2727
"""Return the repository URL for the current project."""
2828
mkdocs_yml = os.path.join(os.path.dirname(__file__), 'mkdocs.yml')
2929

30-
with open(mkdocs_yml, 'r') as f:
30+
with open(mkdocs_yml, encoding='utf-8') as f:
3131
mkdocs_config = yaml.safe_load(f)
3232
repo_name = mkdocs_config['repo_name']
3333

@@ -47,7 +47,7 @@ def check_link(url) -> bool:
4747

4848
# Keep a local cache file of URLs we have already checked
4949
if os.path.exists(CACHE_FILE):
50-
with open(CACHE_FILE, 'r') as f:
50+
with open(CACHE_FILE, encoding='utf-8') as f:
5151
cache = f.read().splitlines()
5252

5353
if url in cache:
@@ -59,7 +59,7 @@ def check_link(url) -> bool:
5959
response = requests.head(url, timeout=5000)
6060
if response.status_code == 200:
6161
# Update the cache file
62-
with open(CACHE_FILE, 'a') as f:
62+
with open(CACHE_FILE, 'a', encoding='utf-8') as f:
6363
f.write(f'{url}\n')
6464

6565
return True
@@ -177,7 +177,7 @@ def invoke_commands():
177177

178178
assert subprocess.call(command, shell=True) == 0
179179

180-
with open(output, 'r') as f:
180+
with open(output, encoding='utf-8') as f:
181181
content = f.read()
182182

183183
return content
@@ -200,12 +200,13 @@ def listimages(subdir):
200200
return assets
201201

202202
@env.macro
203-
def includefile(filename: str, title: str, format: str = ''):
203+
def includefile(filename: str, title: str, fmt: str = ''):
204204
"""Include a file in the documentation, in a 'collapse' block.
205205
206206
Arguments:
207207
- filename: The name of the file to include (relative to the top-level directory)
208208
- title:
209+
- fmt:
209210
"""
210211
here = os.path.dirname(__file__)
211212
path = os.path.join(here, '..', filename)
@@ -214,11 +215,11 @@ def includefile(filename: str, title: str, format: str = ''):
214215
if not os.path.exists(path):
215216
raise FileNotFoundError(f'Required file {path} does not exist.')
216217

217-
with open(path, 'r') as f:
218+
with open(path, encoding='utf-8') as f:
218219
content = f.read()
219220

220221
data = f'??? abstract "{title}"\n\n'
221-
data += f' ```{format}\n'
222+
data += f' ```{fmt}\n'
222223
data += textwrap.indent(content, ' ')
223224
data += '\n\n'
224225
data += ' ```\n\n'
@@ -233,15 +234,15 @@ def templatefile(filename):
233234
'src', 'backend', 'InvenTree', 'report', 'templates', filename
234235
)
235236

236-
return includefile(fn, f'Template: {base}', format='html')
237+
return includefile(fn, f'Template: {base}', fmt='html')
237238

238239
@env.macro
239240
def rendersetting(setting: dict):
240241
"""Render a provided setting object into a table row."""
241242
name = setting['name']
242243
description = setting['description']
243-
default = setting.get('default', None)
244-
units = setting.get('units', None)
244+
default = setting.get('default')
245+
units = setting.get('units')
245246

246247
return f'| {name} | {description} | {default if default is not None else ""} | {units if units is not None else ""} |'
247248

pyproject.toml

+19-2
Original file line numberDiff line numberDiff line change
@@ -20,13 +20,30 @@ src = ["src/backend/InvenTree"]
2020
"__init__.py" = ["D104"]
2121

2222
[tool.ruff.lint]
23-
select = ["A", "B", "C4", "D", "I", "N", "F"]
23+
select = ["A", "B", "C", "C4", "D", "F", "I", "N", "SIM", "PIE", "PLE", "PLW", "RUF", "UP", "W"]
2424
# Things that should be enabled in the future:
2525
# - LOG
2626
# - DJ # for Django stuff
2727
# - S # for security stuff (bandit)
2828

2929
ignore = [
30+
"PLE1205",
31+
# - PLE1205 - Too many arguments for logging format string
32+
"PLW2901",
33+
# - PLW2901 - Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
34+
"PLW0602","PLW0603","PLW0604", # global variable things
35+
"RUF015",
36+
# - RUF015 - Prefer next({iterable}) over single element slice
37+
"RUF012",
38+
# - RUF012 - Mutable class attributes should be annotated with typing.ClassVar
39+
"SIM117",
40+
# - SIM117 - Use a single with statement with multiple contexts instead of nested with statements
41+
"SIM102",
42+
# - SIM102 - Use a single if statement instead of nested if statements
43+
"SIM105",
44+
# - SIM105 - Use contextlib.suppress({exception}) instead of try-except-pass
45+
"C901",
46+
# - C901 - function is too complex
3047
"N999",
3148
# - N802 - function name should be lowercase
3249
"N802",
@@ -42,7 +59,7 @@ ignore = [
4259
"B904",
4360

4461
# Remove fast
45-
"A001", "A002","A003","B018"
62+
"A002", "B018"
4663
]
4764

4865
[tool.ruff.lint.pydocstyle]

src/backend/InvenTree/InvenTree/admin.py

+7-5
Original file line numberDiff line numberDiff line change
@@ -104,14 +104,16 @@ def before_import(self, dataset, using_transactions, dry_run, **kwargs):
104104
attribute = getattr(field, 'attribute', field_name)
105105

106106
# Check if the associated database field is a non-nullable string
107-
if db_field := db_fields.get(attribute):
108-
if (
107+
if (
108+
(db_field := db_fields.get(attribute))
109+
and (
109110
isinstance(db_field, CharField)
110111
and db_field.blank
111112
and not db_field.null
112-
):
113-
if column not in self.CONVERT_NULL_FIELDS:
114-
self.CONVERT_NULL_FIELDS.append(column)
113+
)
114+
and column not in self.CONVERT_NULL_FIELDS
115+
):
116+
self.CONVERT_NULL_FIELDS.append(column)
115117

116118
return super().before_import(dataset, using_transactions, dry_run, **kwargs)
117119

0 commit comments

Comments
 (0)