Skip to content

Commit 9af1c3e

Browse files
committed
Fix pre-commit errors: Replace .format() with f-strings and list comprehensions with generators
1 parent 2e32fbd commit 9af1c3e

12 files changed

+22
-22
lines changed

examples/case_studies/factor_analysis.ipynb

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -307,7 +307,7 @@
307307
"source": [
308308
"for i in trace.posterior.chain.values:\n",
309309
" samples = trace.posterior[\"W\"].sel(chain=i, observed_columns=3, latent_columns=1)\n",
310-
" plt.plot(samples, label=\"Chain {}\".format(i + 1))\n",
310+
" plt.plot(samples, label=f\"Chain {i + 1}\")\n",
311311
" plt.axhline(samples.mean(), color=f\"C{i}\")\n",
312312
"plt.legend(ncol=4, loc=\"upper center\", fontsize=12, frameon=True), plt.xlabel(\"Sample\");"
313313
]
@@ -484,7 +484,7 @@
484484
"\n",
485485
"for i in range(4):\n",
486486
" samples = trace.posterior[\"W\"].sel(chain=i, observed_columns=3, latent_columns=1)\n",
487-
" plt.plot(samples, label=\"Chain {}\".format(i + 1))\n",
487+
" plt.plot(samples, label=f\"Chain {i + 1}\")\n",
488488
"\n",
489489
"plt.legend(ncol=4, loc=\"lower center\", fontsize=8), plt.xlabel(\"Sample\");"
490490
]
@@ -704,7 +704,7 @@
704704
"\n",
705705
"ax = az.plot_kde(\n",
706706
" trace.posterior[\"W\"].sel(**col_selection).values,\n",
707-
" label=\"MCMC posterior for the explicit model\".format(0),\n",
707+
" label=f\"MCMC posterior for the explicit model\",\n",
708708
" plot_kwargs={\"color\": f\"C{1}\"},\n",
709709
")\n",
710710
"\n",

examples/case_studies/factor_analysis.myst.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,7 @@ At this point, there are already several warnings regarding failed convergence c
130130
```{code-cell} ipython3
131131
for i in trace.posterior.chain.values:
132132
samples = trace.posterior["W"].sel(chain=i, observed_columns=3, latent_columns=1)
133-
plt.plot(samples, label="Chain {}".format(i + 1))
133+
plt.plot(samples, label=f"Chain {i + 1}")
134134
plt.axhline(samples.mean(), color=f"C{i}")
135135
plt.legend(ncol=4, loc="upper center", fontsize=12, frameon=True), plt.xlabel("Sample");
136136
```
@@ -198,7 +198,7 @@ with pm.Model(coords=coords) as PPCA_identified:
198198
199199
for i in range(4):
200200
samples = trace.posterior["W"].sel(chain=i, observed_columns=3, latent_columns=1)
201-
plt.plot(samples, label="Chain {}".format(i + 1))
201+
plt.plot(samples, label=f"Chain {i + 1}")
202202
203203
plt.legend(ncol=4, loc="lower center", fontsize=8), plt.xlabel("Sample");
204204
```
@@ -253,7 +253,7 @@ col_selection = dict(observed_columns=3, latent_columns=1)
253253
254254
ax = az.plot_kde(
255255
trace.posterior["W"].sel(**col_selection).values,
256-
label="MCMC posterior for the explicit model".format(0),
256+
label=f"MCMC posterior for the explicit model",
257257
plot_kwargs={"color": f"C{1}"},
258258
)
259259

examples/case_studies/probabilistic_matrix_factorization.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1135,7 +1135,7 @@
11351135
" Method = baseline_methods[name]\n",
11361136
" method = Method(train)\n",
11371137
" baselines[name] = method.rmse(test)\n",
1138-
" print(\"{} RMSE:\\t{:.5f}\".format(method, baselines[name]))"
1138+
" print(f\"{method} RMSE:\\t{baselines[name]:.5f}\")"
11391139
]
11401140
},
11411141
{

examples/case_studies/probabilistic_matrix_factorization.myst.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -537,7 +537,7 @@ for name in baseline_methods:
537537
Method = baseline_methods[name]
538538
method = Method(train)
539539
baselines[name] = method.rmse(test)
540-
print("{} RMSE:\t{:.5f}".format(method, baselines[name]))
540+
print(f"{method} RMSE:\t{baselines[name]:.5f}")
541541
```
542542

543543
As expected: the uniform random baseline is the worst by far, the global mean baseline is next best, and the mean of means method is our best baseline. Now let's see how PMF stacks up.

examples/gaussian_processes/GP-Heteroskedastic.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@
147147
" return ℓ_μ, ℓ_σ\n",
148148
"\n",
149149
"\n",
150-
"ℓ_μ, ℓ_σ = [stat for stat in get_ℓ_prior(X_)]"
150+
"ℓ_μ, ℓ_σ = (stat for stat in get_ℓ_prior(X_))"
151151
]
152152
},
153153
{
@@ -1036,7 +1036,7 @@
10361036
" return np.hstack([np.tile(x, (2, 1)), np.vstack([np.zeros(x.shape), np.ones(x.shape)])])\n",
10371037
"\n",
10381038
"\n",
1039-
"Xu_c, X_obs_c, Xnew_c = [add_coreg_idx(x) for x in [Xu, X_obs, Xnew]]\n",
1039+
"Xu_c, X_obs_c, Xnew_c = (add_coreg_idx(x) for x in [Xu, X_obs, Xnew])\n",
10401040
"\n",
10411041
"with pm.Model() as model_htsc:\n",
10421042
" ℓ = pm.InverseGamma(\"ℓ\", mu=ℓ_μ, sigma=ℓ_σ)\n",

examples/gaussian_processes/GP-Heteroskedastic.myst.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ def get_ℓ_prior(points):
8787
return ℓ_μ, ℓ_σ
8888
8989
90-
ℓ_μ, ℓ_σ = [stat for stat in get_ℓ_prior(X_)]
90+
ℓ_μ, ℓ_σ = (stat for stat in get_ℓ_prior(X_))
9191
```
9292

9393
```{code-cell} ipython3
@@ -388,7 +388,7 @@ def add_coreg_idx(x):
388388
return np.hstack([np.tile(x, (2, 1)), np.vstack([np.zeros(x.shape), np.ones(x.shape)])])
389389
390390
391-
Xu_c, X_obs_c, Xnew_c = [add_coreg_idx(x) for x in [Xu, X_obs, Xnew]]
391+
Xu_c, X_obs_c, Xnew_c = (add_coreg_idx(x) for x in [Xu, X_obs, Xnew])
392392
393393
with pm.Model() as model_htsc:
394394
ℓ = pm.InverseGamma("ℓ", mu=ℓ_μ, sigma=ℓ_σ)

examples/generalized_linear_models/GLM-hierarchical-binomial-model.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -203,7 +203,7 @@
203203
"ix_z, ix_x = np.unravel_index(np.argmax(surface, axis=None), surface.shape)\n",
204204
"ax.scatter([X[0, ix_x]], [Z[ix_z, 0]], color=\"red\")\n",
205205
"\n",
206-
"text = r\"$({a},{b})$\".format(a=np.round(X[0, ix_x], 2), b=np.round(Z[ix_z, 0], 2))\n",
206+
"text = fr\"$({np.round(X[0, ix_x], 2)},{np.round(Z[ix_z, 0], 2)})$\"\n",
207207
"\n",
208208
"ax.annotate(\n",
209209
" text,\n",

examples/generalized_linear_models/GLM-hierarchical-binomial-model.myst.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,7 @@ ax.set_ylabel(r"$\log(\alpha+\beta)$", fontsize=16)
159159
ix_z, ix_x = np.unravel_index(np.argmax(surface, axis=None), surface.shape)
160160
ax.scatter([X[0, ix_x]], [Z[ix_z, 0]], color="red")
161161
162-
text = r"$({a},{b})$".format(a=np.round(X[0, ix_x], 2), b=np.round(Z[ix_z, 0], 2))
162+
text = fr"$({np.round(X[0, ix_x], 2)},{np.round(Z[ix_z, 0], 2)})$"
163163
164164
ax.annotate(
165165
text,

examples/generalized_linear_models/GLM-robust-with-outlier-detection.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -245,7 +245,7 @@
245245
" columns=[\"id\", \"x\", \"y\", \"sigma_y\", \"sigma_x\", \"rho_xy\"],\n",
246246
")\n",
247247
"\n",
248-
"dfhogg[\"id\"] = dfhogg[\"id\"].apply(lambda x: \"p{}\".format(int(x)))\n",
248+
"dfhogg[\"id\"] = dfhogg[\"id\"].apply(lambda x: f\"p{int(x)}\")\n",
249249
"dfhogg.set_index(\"id\", inplace=True)\n",
250250
"dfhogg.head()"
251251
]

examples/generalized_linear_models/GLM-robust-with-outlier-detection.myst.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ dfhogg = pd.DataFrame(
113113
columns=["id", "x", "y", "sigma_y", "sigma_x", "rho_xy"],
114114
)
115115
116-
dfhogg["id"] = dfhogg["id"].apply(lambda x: "p{}".format(int(x)))
116+
dfhogg["id"] = dfhogg["id"].apply(lambda x: f"p{int(x)}")
117117
dfhogg.set_index("id", inplace=True)
118118
dfhogg.head()
119119
```

0 commit comments

Comments
 (0)