Skip to content

Commit c4b1fac

Browse files
author
Mathijs de Boer
committed
Run formatting and linting
Signed-off-by: Boer-41 <m.deboer-41@umcutrecht.nl>
1 parent 0a06b13 commit c4b1fac

File tree

8 files changed

+35
-42
lines changed

8 files changed

+35
-42
lines changed

python/evdplanner/cli/generate/target.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -235,7 +235,7 @@ def target(
235235
visible_points=True,
236236
)
237237

238-
markups.save(output.parent / f"EVD.mrk.json")
238+
markups.save(output.parent / "EVD.mrk.json")
239239

240240
if verbosity > 0:
241241
left_distance = (left_tp - left_kp).length

python/evdplanner/cli/plot/times.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ def times(
186186
logger.info(f"Creating CSV file with timings. {len(df)} entries.")
187187
df = pd.DataFrame(df)
188188

189-
logger.info(f"Saving CSV file with timings.")
189+
logger.info("Saving CSV file with timings.")
190190
df.to_csv(output, index=False)
191191

192192
total_times = np.array([x["total"] for x in evd_times])

python/evdplanner/cli/validate/model.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22

33
import click
44

5-
from evdplanner.network.transforms.keypoint_flip import flip_keypoints
65

76

87
@click.command()

python/evdplanner/network/training/utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323

2424

2525
def get_loss_fn(
26-
loss: nn.Module | Callable[[Tensor, Tensor], Tensor] | str
26+
loss: nn.Module | Callable[[Tensor, Tensor], Tensor] | str,
2727
) -> nn.Module | Callable[[Tensor, Tensor], Tensor]:
2828
"""
2929
Get the loss function.
@@ -56,7 +56,7 @@ def get_loss_fn(
5656

5757

5858
def get_metric_fn(
59-
metric: nn.Module | Callable[[Tensor, Tensor], Tensor] | str
59+
metric: nn.Module | Callable[[Tensor, Tensor], Tensor] | str,
6060
) -> nn.Module | Callable[[Tensor, Tensor], Tensor]:
6161
"""
6262
Get the metric function.

python/evdplanner/notebooks/kakarla.ipynb

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212
"import matplotlib.pyplot as plt\n",
1313
"import pandas as pd\n",
1414
"import seaborn as sns\n",
15-
"from matplotlib.ticker import MaxNLocator\n",
1615
"from scipy.stats import pearsonr\n",
1716
"\n",
1817
"\n",
@@ -54,7 +53,7 @@
5453
"scores = df[df[\"RaterID\"] == \"Majority\"][\"Score\"].value_counts()\n",
5554
"total = scores.sum()\n",
5655
"for score, count in scores.items():\n",
57-
" print(f\"{score}: {count} ({count/total:.2%})\")"
56+
" print(f\"{score}: {count} ({count / total:.2%})\")"
5857
]
5958
},
6059
{
@@ -148,7 +147,7 @@
148147
" print(f\"Effect size: {effect_size}, Power: {power:.3f}\")\n",
149148
"\n",
150149
"# Plot the power curve\n",
151-
"effects, powers = zip(*effects)\n",
150+
"effects, powers = zip(*effects, strict=False)\n",
152151
"plt.plot(effects, powers, marker=\"o\")\n",
153152
"plt.xlabel(\"Effect size\")\n",
154153
"plt.ylabel(\"Power\")\n",
@@ -163,7 +162,6 @@
163162
"metadata": {},
164163
"outputs": [],
165164
"source": [
166-
"import statsmodels.api as sm\n",
167165
"from statsmodels.miscmodels.ordinal_model import OrderedModel\n",
168166
"\n",
169167
"# Ordinal Logistic Regression\n",
@@ -233,7 +231,7 @@
233231
"print(labels)\n",
234232
"\n",
235233
"# Sort by label name\n",
236-
"values, labels = zip(*sorted(zip(values, labels), key=lambda x: x[1]))\n",
234+
"values, labels = zip(*sorted(zip(values, labels, strict=False), key=lambda x: x[1]), strict=False)\n",
237235
"\n",
238236
"plt.figure(figsize=(4.5, 4.5))\n",
239237
"plt.pie(\n",
@@ -265,9 +263,7 @@
265263
"source": [
266264
"majority_df = df.loc[df[\"RaterID\"] == \"Majority\"]\n",
267265
"majority_df = majority_df.drop(columns=[\"RaterID\"])\n",
268-
"majority_df.to_csv(\n",
269-
" Path(r\"S:\\E_ResearchData\\evdplanner\\MajorityVoting.csv\"), index=False\n",
270-
")"
266+
"majority_df.to_csv(Path(r\"S:\\E_ResearchData\\evdplanner\\MajorityVoting.csv\"), index=False)"
271267
]
272268
},
273269
{

python/evdplanner/notebooks/method_stability.ipynb

Lines changed: 24 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,7 @@
2121
"metadata": {},
2222
"outputs": [],
2323
"source": [
24-
"from itertools import product\n",
2524
"from pathlib import Path\n",
26-
"from typing import Any\n",
2725
"from random import choice\n",
2826
"\n",
2927
"import numpy as np\n",
@@ -36,7 +34,7 @@
3634
"from evdplanner.generation import measure_kocher\n",
3735
"from evdplanner.geometry import Mesh\n",
3836
"from evdplanner.linalg import Vec3\n",
39-
"from evdplanner.markups import DisplaySettings, MarkupManager\n",
37+
"from evdplanner.markups import MarkupManager\n",
4038
"from evdplanner.rendering import find_target, Ray, IntersectionSort\n",
4139
"\n",
4240
"skin_mesh_file = \"mesh_skin.stl\"\n",
@@ -51,7 +49,7 @@
5149
"\n",
5250
"scores_file = Path(r\"S:\\E_ResearchData\\evdplanner\\MajorityVoting.csv\")\n",
5351
"\n",
54-
"set_verbosity(0)\n"
52+
"set_verbosity(0)"
5553
]
5654
},
5755
{
@@ -110,7 +108,7 @@
110108
" except:\n",
111109
" print(f\"Skin Mesh or landmarks not found for patient {patient.name}.\")\n",
112110
" continue\n",
113-
" \n",
111+
"\n",
114112
" gt_nasion = Vec3(*gt_landmarks.find_fiducial(\"Nasion\").position)\n",
115113
" gt_left_ear = Vec3(*gt_landmarks.find_fiducial(\"Pre-Auricle Left\").position)\n",
116114
" gt_right_ear = Vec3(*gt_landmarks.find_fiducial(\"Pre-Auricle Right\").position)\n",
@@ -125,14 +123,14 @@
125123
"\n",
126124
" print(\"Finding GT target points...\")\n",
127125
" gt_left_tp, _ = find_target(\n",
128-
" ventricles_mesh,\n",
129-
" gt_left_kp,\n",
130-
" check_radially=check_radially,\n",
131-
" radius=radius,\n",
132-
" objective_distance_weight=objective_distance_weight,\n",
133-
" thickness_threshold=thickness_threshold,\n",
134-
" depth_threshold=depth_threshold,\n",
135-
" )\n",
126+
" ventricles_mesh,\n",
127+
" gt_left_kp,\n",
128+
" check_radially=check_radially,\n",
129+
" radius=radius,\n",
130+
" objective_distance_weight=objective_distance_weight,\n",
131+
" thickness_threshold=thickness_threshold,\n",
132+
" depth_threshold=depth_threshold,\n",
133+
" )\n",
136134
" gt_right_tp, _ = find_target(\n",
137135
" ventricles_mesh,\n",
138136
" gt_right_kp,\n",
@@ -154,21 +152,21 @@
154152
" \"Patient\": patient.name,\n",
155153
" \"Side\": \"Right\",\n",
156154
" }\n",
157-
" \n",
155+
"\n",
158156
" wiggle_nasion = gt_nasion + Vec3(\n",
159-
" np.random.uniform(-max_error, max_error), \n",
160-
" np.random.uniform(-max_error, max_error), \n",
161-
" np.random.uniform(-max_error, max_error)\n",
157+
" np.random.uniform(-max_error, max_error),\n",
158+
" np.random.uniform(-max_error, max_error),\n",
159+
" np.random.uniform(-max_error, max_error),\n",
162160
" )\n",
163161
" wiggle_left_ear = gt_left_ear + Vec3(\n",
164162
" np.random.uniform(-max_error, max_error),\n",
165163
" np.random.uniform(-max_error, max_error),\n",
166-
" np.random.uniform(-max_error, max_error)\n",
164+
" np.random.uniform(-max_error, max_error),\n",
167165
" )\n",
168166
" wiggle_right_ear = gt_right_ear + Vec3(\n",
169167
" np.random.uniform(-max_error, max_error),\n",
170168
" np.random.uniform(-max_error, max_error),\n",
171-
" np.random.uniform(-max_error, max_error)\n",
169+
" np.random.uniform(-max_error, max_error),\n",
172170
" )\n",
173171
"\n",
174172
" # Project the wiggled points to the skin mesh\n",
@@ -188,7 +186,7 @@
188186
" if wiggle_nasion is None or wiggle_left_ear is None or wiggle_right_ear is None:\n",
189187
" print(f\"Intersection failed for patient {patient.name}.\")\n",
190188
" continue\n",
191-
" \n",
189+
"\n",
192190
" wiggle_nasion = wiggle_nasion.position\n",
193191
" wiggle_left_ear = wiggle_left_ear.position\n",
194192
" wiggle_right_ear = wiggle_right_ear.position\n",
@@ -319,7 +317,7 @@
319317
" constrained_layout=True,\n",
320318
")\n",
321319
"\n",
322-
"for ax, (x, y) in zip(axs, pairs):\n",
320+
"for ax, (x, y) in zip(axs, pairs, strict=False):\n",
323321
" # Calculate Spearman correlation coefficient\n",
324322
" res = spearmanr(df[x], df[y])\n",
325323
" corr = res.statistic\n",
@@ -336,7 +334,7 @@
336334
" )\n",
337335
" p.set_title(f\"{y} vs {x}\")\n",
338336
" p.set_xlabel(x)\n",
339-
" p.set_ylabel(y)\n"
337+
" p.set_ylabel(y)"
340338
]
341339
},
342340
{
@@ -378,7 +376,7 @@
378376
" corr = f\"+{corr}\"\n",
379377
" else:\n",
380378
" corr = f\"{corr}\"\n",
381-
" \n",
379+
"\n",
382380
" if p_values.iloc[i, j] < 0.001:\n",
383381
" p_val = r\"$^{*}$\"\n",
384382
" elif p_values.iloc[i, j] < 0.01:\n",
@@ -443,7 +441,9 @@
443441
" median = df[col].median()\n",
444442
" low_ci = df[col].quantile(0.025)\n",
445443
" high_ci = df[col].quantile(0.975)\n",
446-
" print(f\"{col}: {mean:.2f} ± {std:.2f} (median: {median:.2f}, 95% CI: [{low_ci:.2f}, {high_ci:.2f}])\")"
444+
" print(\n",
445+
" f\"{col}: {mean:.2f} ± {std:.2f} (median: {median:.2f}, 95% CI: [{low_ci:.2f}, {high_ci:.2f}])\"\n",
446+
" )"
447447
]
448448
},
449449
{

python/evdplanner/notebooks/plot_colin.ipynb

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
"from pathlib import Path\n",
1111
"\n",
1212
"import matplotlib.pyplot as plt\n",
13-
"import seaborn as sns\n",
1413
"\n",
1514
"normal_map = Path(r\"S:\\E_ResearchData\\evdplanner\\Templates\\colin\\map_skin_normal.png\")\n",
1615
"keypoint_json = Path(r\"S:\\E_ResearchData\\evdplanner\\Templates\\colin\\projected_skin.kp.json\")"
@@ -62,7 +61,7 @@
6261
"source": [
6362
"image = plt.imread(normal_map)\n",
6463
"\n",
65-
"with open(keypoint_json, \"r\") as f:\n",
64+
"with open(keypoint_json) as f:\n",
6665
" keypoints = json.load(f)\n",
6766
"\n",
6867
"y_shape, x_shape = image.shape[:2]\n",

python/evdplanner/notebooks/timing.ipynb

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,7 @@
1313
"import numpy as np\n",
1414
"import pandas as pd\n",
1515
"import seaborn as sns\n",
16-
"from matplotlib import cm\n",
17-
"from tqdm.auto import tqdm"
16+
"from matplotlib import cm"
1817
]
1918
},
2019
{
@@ -36,7 +35,7 @@
3635
}
3736
],
3837
"source": [
39-
"timings = pd.read_csv(Path(\"F:\\evdplanner\\Test\\evd_times.csv\"))\n",
38+
"timings = pd.read_csv(Path(r\"F:\\evdplanner\\Test\\evd_times.csv\"))\n",
4039
"\n",
4140
"total_times = timings.loc[timings[\"substage\"] == \"total\"]\n",
4241
"\n",

0 commit comments

Comments
 (0)