Skip to content

Commit

Permalink
Merge pull request #922 from gchq/chore/pre-commit-autoupdate
Browse files Browse the repository at this point in the history
chore(deps): autoupdate pre-commit hooks
  • Loading branch information
pc532627 authored Jan 14, 2025
2 parents cb005eb + 1e8ac11 commit 65b25cb
Show file tree
Hide file tree
Showing 11 changed files with 31 additions and 43 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ repos:
# Enforce that type annotations are used instead of type comments
- id: python-use-type-annotations
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.6
rev: v0.9.1
hooks:
# Run the linter.
- id: ruff
Expand Down
6 changes: 3 additions & 3 deletions benchmark/blobs_benchmark_visualiser.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ def plot_benchmarking_results(data):
for i, metric in enumerate(metrics):
ax = axs[i]
ax.set_title(
f'{metric.replace("_", " ").title()} vs '
f'Coreset Size (n_samples = {n_samples})',
f"{metric.replace('_', ' ').title()} vs "
f"Coreset Size (n_samples = {n_samples})",
fontsize=14,
)

Expand All @@ -83,7 +83,7 @@ def plot_benchmarking_results(data):
ax.plot(coreset_sizes, metric_values, marker="o", label=algo)

ax.set_xlabel("Coreset Size")
ax.set_ylabel(f'{metric.replace("_", " ").title()}')
ax.set_ylabel(f"{metric.replace('_', ' ').title()}")
ax.set_yscale("log") # log scale for better visualization
ax.legend()

Expand Down
3 changes: 1 addition & 2 deletions coreax/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -434,8 +434,7 @@ def speed_comparison_test(
)
else:
_logger.info(
"Compilation time: %s ± %s per run "
"(mean ± std. dev. of %s runs)",
"Compilation time: %s ± %s per run (mean ± std. dev. of %s runs)",
format_time(mean[0].item()),
format_time(std[0].item()),
num_runs,
Expand Down
5 changes: 2 additions & 3 deletions examples/herding_approximate_gram_matrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,8 +138,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float]:
)
plt.axis("off")
plt.title(
f"Stein kernel herding, m={coreset_size}, "
f"MMD={round(float(herding_mmd), 6)}"
f"Stein kernel herding, m={coreset_size}, MMD={round(float(herding_mmd), 6)}"
)
plt.show()

Expand All @@ -150,7 +149,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float]:
s=10,
color="red",
)
plt.title(f"Random, m={coreset_size}, " f"MMD={round(float(random_mmd), 6)}")
plt.title(f"Random, m={coreset_size}, MMD={round(float(random_mmd), 6)}")
plt.axis("off")

if out_path is not None:
Expand Down
10 changes: 4 additions & 6 deletions examples/herding_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,9 +159,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float, float, float]:
color="red",
)
plt.axis("off")
plt.title(
f"Kernel herding, m={coreset_size}, " f"MMD={round(float(herding_mmd), 6)}"
)
plt.title(f"Kernel herding, m={coreset_size}, MMD={round(float(herding_mmd), 6)}")
plt.show()

plt.scatter(x[:, 0], x[:, 1], s=2.0, alpha=0.1)
Expand All @@ -172,7 +170,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float, float, float]:
color="red",
)
plt.axis("off")
plt.title(f"RP Cholesky, m={coreset_size}, " f"MMD={round(float(rpc_mmd), 6)}")
plt.title(f"RP Cholesky, m={coreset_size}, MMD={round(float(rpc_mmd), 6)}")
plt.show()

plt.scatter(x[:, 0], x[:, 1], s=2.0, alpha=0.1)
Expand All @@ -183,7 +181,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float, float, float]:
color="red",
)
plt.axis("off")
plt.title(f"Stein thinning, m={coreset_size}, " f"MMD={round(float(stein_mmd), 6)}")
plt.title(f"Stein thinning, m={coreset_size}, MMD={round(float(stein_mmd), 6)}")
plt.show()

plt.scatter(x[:, 0], x[:, 1], s=2.0, alpha=0.1)
Expand All @@ -193,7 +191,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float, float, float]:
s=10,
color="red",
)
plt.title(f"Random, m={coreset_size}, " f"MMD={round(float(random_mmd), 6)}")
plt.title(f"Random, m={coreset_size}, MMD={round(float(random_mmd), 6)}")
plt.axis("off")

if out_path is not None:
Expand Down
10 changes: 4 additions & 6 deletions examples/herding_basic_weighted.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,9 +163,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float, float, float]:
color="red",
)
plt.axis("off")
plt.title(
f"Kernel herding, m={coreset_size}, " f"MMD={round(float(herding_mmd), 6)}"
)
plt.title(f"Kernel herding, m={coreset_size}, MMD={round(float(herding_mmd), 6)}")
plt.show()

plt.scatter(x[:, 0], x[:, 1], s=2.0, alpha=0.1)
Expand All @@ -176,7 +174,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float, float, float]:
color="red",
)
plt.axis("off")
plt.title(f"Stein thinning, m={coreset_size}, " f"MMD={round(float(stein_mmd), 6)}")
plt.title(f"Stein thinning, m={coreset_size}, MMD={round(float(stein_mmd), 6)}")
plt.show()

plt.scatter(x[:, 0], x[:, 1], s=2.0, alpha=0.1)
Expand All @@ -187,7 +185,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float, float, float]:
color="red",
)
plt.axis("off")
plt.title(f"RP Cholesky, m={coreset_size}, " f"MMD={round(float(rpc_mmd), 6)}")
plt.title(f"RP Cholesky, m={coreset_size}, MMD={round(float(rpc_mmd), 6)}")
plt.show()

plt.scatter(x[:, 0], x[:, 1], s=2.0, alpha=0.1)
Expand All @@ -197,7 +195,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float, float, float]:
s=10,
color="red",
)
plt.title(f"Random, m={coreset_size}, " f"MMD={round(float(random_mmd), 6)}")
plt.title(f"Random, m={coreset_size}, MMD={round(float(random_mmd), 6)}")
plt.axis("off")

if out_path is not None:
Expand Down
6 changes: 2 additions & 4 deletions examples/herding_duo_composite_kernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,9 +133,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float, float, float]:
color="red",
)
plt.axis("off")
plt.title(
f"Kernel herding, m={coreset_size}, " f"MMD={round(float(herding_mmd), 6)}"
)
plt.title(f"Kernel herding, m={coreset_size}, MMD={round(float(herding_mmd), 6)}")
plt.show()

plt.scatter(x[:, 0], x[:, 1], s=2.0, alpha=0.1)
Expand All @@ -145,7 +143,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float, float, float]:
s=10,
color="red",
)
plt.title(f"Random, m={coreset_size}, " f"MMD={round(float(random_mmd), 6)}")
plt.title(f"Random, m={coreset_size}, MMD={round(float(random_mmd), 6)}")
plt.axis("off")

if out_path is not None:
Expand Down
10 changes: 4 additions & 6 deletions examples/herding_refine_weighted.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,9 +173,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float, float, float]:
color="red",
)
plt.axis("off")
plt.title(
f"Kernel herding, m={coreset_size}, " f"MMD={round(float(herding_mmd), 6)}"
)
plt.title(f"Kernel herding, m={coreset_size}, MMD={round(float(herding_mmd), 6)}")
plt.show()

plt.scatter(x[:, 0], x[:, 1], s=2.0, alpha=0.1)
Expand All @@ -186,7 +184,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float, float, float]:
color="red",
)
plt.axis("off")
plt.title(f"RP Cholesky, m={coreset_size}, " f"MMD={round(float(rpc_mmd), 6)}")
plt.title(f"RP Cholesky, m={coreset_size}, MMD={round(float(rpc_mmd), 6)}")
plt.show()

plt.scatter(x[:, 0], x[:, 1], s=2.0, alpha=0.1)
Expand All @@ -197,7 +195,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float, float, float]:
color="red",
)
plt.axis("off")
plt.title(f"Stein thinning, m={coreset_size}, " f"MMD={round(float(stein_mmd), 6)}")
plt.title(f"Stein thinning, m={coreset_size}, MMD={round(float(stein_mmd), 6)}")
plt.show()

plt.scatter(x[:, 0], x[:, 1], s=2.0, alpha=0.1)
Expand All @@ -207,7 +205,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float, float, float]:
s=10,
color="red",
)
plt.title(f"Random, m={coreset_size}, " f"MMD={round(float(random_mmd), 6)}")
plt.title(f"Random, m={coreset_size}, MMD={round(float(random_mmd), 6)}")
plt.axis("off")

if out_path is not None:
Expand Down
5 changes: 2 additions & 3 deletions examples/herding_stein_weighted.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float]:
)
plt.axis("off")
plt.title(
f"Stein kernel herding, m={coreset_size}, "
f"MMD={round(float(herding_mmd), 6)}"
f"Stein kernel herding, m={coreset_size}, MMD={round(float(herding_mmd), 6)}"
)
plt.show()

Expand All @@ -164,7 +163,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float]:
s=10,
color="red",
)
plt.title(f"Random, m={coreset_size}, " f"MMD={round(float(random_mmd), 6)}")
plt.title(f"Random, m={coreset_size}, MMD={round(float(random_mmd), 6)}")
plt.axis("off")

if out_path is not None:
Expand Down
5 changes: 2 additions & 3 deletions examples/herding_stein_weighted_ssm.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,8 +165,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float]:
)
plt.axis("off")
plt.title(
f"Stein kernel herding, m={coreset_size}, "
f"MMD={round(float(herding_mmd), 6)}"
f"Stein kernel herding, m={coreset_size}, MMD={round(float(herding_mmd), 6)}"
)
plt.show()

Expand All @@ -177,7 +176,7 @@ def main(out_path: Optional[Path] = None) -> tuple[float, float]:
s=10,
color="red",
)
plt.title(f"Random, m={coreset_size}, " f"MMD={round(float(random_mmd), 6)}")
plt.title(f"Random, m={coreset_size}, MMD={round(float(random_mmd), 6)}")
plt.axis("off")

if out_path is not None:
Expand Down
12 changes: 6 additions & 6 deletions tests/unit/test_solvers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1994,15 +1994,15 @@ def test_map_reduce_diverse_selection(self):
coreset, _ = solver.reduce(Data(dataset))
selected_indices = coreset.nodes.data

assert jnp.any(
selected_indices >= coreset_size
), "MapReduce should select points beyond the first few"
assert jnp.any(selected_indices >= coreset_size), (
"MapReduce should select points beyond the first few"
)

# Check if there are indices from different partitions
partitions_represented = jnp.unique(selected_indices // leaf_size)
assert (
len(partitions_represented) > 1
), "MapReduce should select points from multiple partitions"
assert len(partitions_represented) > 1, (
"MapReduce should select points from multiple partitions"
)

def test_map_reduce_analytic(self):
r"""
Expand Down

0 comments on commit 65b25cb

Please sign in to comment.