Skip to content

Commit

Permalink
Preparing for 1.2.2 and updating versions (#46)
Browse files Browse the repository at this point in the history
* Update build.yml

* Update pyproject.toml

Expanding our supported python trove identifiers to include python 3.13

* Update Cargo.toml

Updating pyo3 from 0.15 to 0.23

* Update pyproject.toml

Moving 3.6 and 3.7 to EOL (since not only they, but even 3.8 are EOL. pyo3 currently does not support abi3-py36, and I'm betting 3.7 is going to follow suit sometime soon.)

* Update Cargo.toml

abi3-py36 changing to abi3-py38

* Updated a bunch of versions. Pyo3 changed fairly substantially in signature specification for python. Also updated my first name everywhere.

* Updating versions, running clippy, yanking the logging nonsense that was never used and never should be used (there have to be better ways than that nonsense), and about to have clippy fix my convention of always using a return statement because I hate implicit returns but that's a me thing not a world thing

* More clippy fixes

* More clippy suggestions. It's like I never ran this before.

* Adding ipython and networkx as dev deps. `cd packages/pyo3 && uv sync && uv run ipython` get you to a reasonable repl for manual testing. I cannot believe I did not do proper python testing here. Maybe I did it in graspologic?

* Running cargo fmt

* Misspelled repetition, which I have also repetitively done in this commit message alone

* Committing some minor changes before I rebase on dev. I forgot the dev/main branching scheme.

* Updating the pyproject.toml to be correct as per the current pypa specification. I really hope this doesn't break older versions.

* Fixing the changes clippy made to some of the commonmark documentation in the function. Too much was being treated as a quoted paragraph.
  • Loading branch information
daxpryce authored Jan 8, 2025
1 parent 10ef0a1 commit 0ea1c92
Show file tree
Hide file tree
Showing 33 changed files with 550 additions and 517 deletions.
12 changes: 6 additions & 6 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@ jobs:
runs-on: "ubuntu-latest"
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.8
- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version: 3.8
python-version: 3.12
- name: Materialize build number
run: |
pip install -U pip
Expand All @@ -38,10 +38,10 @@ jobs:
os: ["ubuntu-latest", "windows-latest", "macos-latest"]
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.9
- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version: 3.9
python-version: 3.12
- uses: actions/download-artifact@v4
with:
name: cargo-toml
Expand Down Expand Up @@ -106,10 +106,10 @@ jobs:
if: github.ref=='refs/heads/main' || github.ref=='refs/heads/dev'
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.8
- name: Set up Python 3.12
uses: actions/setup-python@v2
with:
python-version: 3.8
python-version: 3.12
- uses: actions/download-artifact@v4
with:
name: dist-ubuntu-latest
Expand Down
3 changes: 1 addition & 2 deletions .rustfmt.toml
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
fn_args_layout = "vertical"
empty_item_single_line = false
fn_params_layout = "vertical"

2 changes: 2 additions & 0 deletions clippy.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
too-many-arguments-threshold=20 # for what it is worth, clippy is absolutely right and pythonic-ness is absolutely wrong
enum-variant-name-threshold=10 # it doesn't like the repetition in "Error" in my export-to-python error types
File renamed without changes.
9 changes: 3 additions & 6 deletions packages/cli/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,16 +1,13 @@
[package]
name = "cli"
version = "0.1.0"
authors = ["Dwayne Pryce <dwpryce@microsoft.com>"]
version = "0.1.1"
authors = ["Dax Pryce <daxpryce@microsoft.com>"]
edition = "2018"
license = "MIT"
description = "CLI Runner for the topologic associated crates (network_partitions and eventually network_automatic_layouts)"

[dependencies]
clap = "2.34"
clap = "4.5"
rand = "0.8"
rand_xorshift = "0.3"
network_partitions={path = "../network_partitions"}

[features]
logging = ["network_partitions/logging"]
74 changes: 23 additions & 51 deletions packages/cli/src/args.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,27 +33,30 @@ pub struct CliArgs {
pub skip_first_line: bool,
}

impl TryFrom<ArgMatches<'_>> for CliArgs {
impl TryFrom<ArgMatches> for CliArgs {
type Error = ParseCliError;

fn try_from(matches: ArgMatches<'_>) -> Result<Self, Self::Error> {
let source_edges = matches
.value_of(SOURCE_EDGES)
fn try_from(matches: ArgMatches) -> Result<Self, Self::Error> {
let source_edges: &str = matches
.get_one(SOURCE_EDGES)
.cloned()
.ok_or(ParseCliError::RequiredValueError)?;
let output = matches
.value_of(OUTPUT)
let output: &str = matches
.get_one(OUTPUT)
.cloned()
.ok_or(ParseCliError::RequiredValueError)?;
let separator = matches
.value_of(SEPARATOR)
let separator: &str = matches
.get_one(SEPARATOR)
.cloned()
.ok_or(ParseCliError::RequiredValueError)?;
let source_index: usize = matches.value_of(SOURCE_INDEX).as_a()?;
let target_index: usize = matches.value_of(TARGET_INDEX).as_a()?;
let weight_index: Option<usize> = matches.value_of(WEIGHT_INDEX).as_a()?;
let seed: Option<usize> = matches.value_of(SEED).as_a()?;
let iterations: usize = matches.value_of(ITERATIONS).as_a()?;
let resolution: f64 = matches.value_of(RESOLUTION).as_a()?;
let randomness: f64 = matches.value_of(RANDOMNESS).as_a()?;
let quality_option: Option<&str> = matches.value_of(QUALITY);
let source_index: usize = *matches.get_one(SOURCE_INDEX).unwrap();
let target_index: usize = *matches.get_one(TARGET_INDEX).unwrap();
let weight_index: Option<usize> = matches.get_one(WEIGHT_INDEX).copied();
let seed: Option<usize> = matches.get_one(SEED).cloned();
let iterations: usize = *matches.get_one(ITERATIONS).unwrap();
let resolution: f64 = *matches.get_one(RESOLUTION).unwrap();
let randomness: f64 = *matches.get_one(RANDOMNESS).unwrap();
let quality_option: Option<&str> = matches.get_one(QUALITY).cloned();
let use_modularity: bool = match quality_option {
Some(quality_value) => {
if quality_value == "cpm" {
Expand All @@ -66,7 +69,7 @@ impl TryFrom<ArgMatches<'_>> for CliArgs {
}
None => Err(ParseCliError::RequiredValueError),
}?;
let skip_first_line: bool = matches.is_present(HAS_HEADER);
let skip_first_line: bool = matches.contains_id(HAS_HEADER);
let cli_args: CliArgs = CliArgs {
source_edges: source_edges.into(),
output_path: output.into(),
Expand All @@ -81,7 +84,7 @@ impl TryFrom<ArgMatches<'_>> for CliArgs {
use_modularity,
skip_first_line,
};
return Ok(cli_args);
Ok(cli_args)
}
}

Expand All @@ -94,43 +97,12 @@ pub enum ParseCliError {

impl From<ParseFloatError> for ParseCliError {
fn from(_: ParseFloatError) -> Self {
return ParseCliError::NotANumber;
ParseCliError::NotANumber
}
}

impl From<ParseIntError> for ParseCliError {
fn from(_: ParseIntError) -> Self {
return ParseCliError::NotANumber;
}
}

trait As<T> {
fn as_a(&self) -> Result<T, ParseCliError>;
}

impl As<f64> for Option<&str> {
fn as_a(&self) -> Result<f64, ParseCliError> {
self.map(|cli_arg| cli_arg.parse::<f64>().unwrap())
.ok_or(ParseCliError::RequiredValueError)
}
}

impl As<usize> for Option<&str> {
fn as_a(&self) -> Result<usize, ParseCliError> {
self.map(|cli_arg| cli_arg.parse::<usize>().unwrap())
.ok_or(ParseCliError::RequiredValueError)
}
}

impl As<Option<usize>> for Option<&str> {
fn as_a(&self) -> Result<Option<usize>, ParseCliError> {
let result = match self {
Some(cli_arg) => {
let parse_result = cli_arg.parse::<usize>();
Ok(parse_result.map(|value| Some(value))?)
}
None => Ok(None),
};
return result;
ParseCliError::NotANumber
}
}
4 changes: 2 additions & 2 deletions packages/cli/src/leiden.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,9 +75,9 @@ pub fn leiden(
let mut output_file: File =
File::create(output_path).expect("Unable to open output file for writing");
for item in &clustering {
write!(
writeln!(
output_file,
"{},{}\n",
"{},{}",
labeled_network.label_for(item.node_id),
item.cluster
)
Expand Down
58 changes: 28 additions & 30 deletions packages/cli/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.

#![feature(in_band_lifetimes)]
use clap::{App, Arg};
use clap::{Arg, ArgAction, Command};
use std::convert::TryFrom;

mod args;
Expand All @@ -11,83 +10,82 @@ mod leiden;
use crate::args::*;

fn main() {
let matches = App::new("leiden_cli")
let matches = Command::new("leiden_cli")
.version("0.1.0")
.author("Dwayne Pryce <dwpryce@microsoft.com>")
.about("Runs leiden over a provided edge list and outputs the results")
.arg(
Arg::with_name(SOURCE_EDGES)
Arg::new(SOURCE_EDGES)
.help("The edge list that defines the graph's connections")
.required(true)
.index(1),
)
.arg(
Arg::with_name(OUTPUT)
Arg::new(OUTPUT)
.help("The output for the communities detected")
.required(true)
.index(2),
)
.arg(
Arg::with_name(SEPARATOR)
.short("s")
Arg::new(SEPARATOR)
.short('s')
.help("The character to split the edge list on")
.takes_value(true)
.action(ArgAction::Set)
.default_value("\t"),
)
.arg(
Arg::with_name(SOURCE_INDEX)
.takes_value(true)
Arg::new(SOURCE_INDEX)
.action(ArgAction::Set)
.help("0-based index of source column from edge file")
.default_value("0"),
)
.arg(
Arg::with_name(TARGET_INDEX)
.takes_value(true)
Arg::new(TARGET_INDEX)
.action(ArgAction::Set)
.help("0-based index of target column from edge file")
.default_value("1"),
)
.arg(
Arg::with_name(WEIGHT_INDEX)
.takes_value(true)
Arg::new(WEIGHT_INDEX)
.action(ArgAction::Set)
.help("0-based index of weight column from edge file")
)
.arg(
Arg::with_name(SEED)
.takes_value(true)
Arg::new(SEED)
.action(ArgAction::Set)
.help("A seed value to start the PRNG")
.long("seed"),
)
.arg(
Arg::with_name(ITERATIONS)
.takes_value(true)
Arg::new(ITERATIONS)
.action(ArgAction::Set)
.help("Leiden is an inherently recursive algorithm, however it may find itself (due to randomness) at a localized maximum. Setting iterations to a number larger than 1 may allow you to jump out of a local maximum and continue until a better optimum partitioning is found (note that any n > 1 will mean that leiden will be run again for a minimum of n-1 more times, though it may be run for many more than that")
.short("i")
.short('i')
.default_value("1"),
)
.arg(
Arg::with_name(RESOLUTION)
.takes_value(true)
Arg::new(RESOLUTION)
.action(ArgAction::Set)
.help("")
.short("r")
.short('r')
.default_value("1.0")
)
.arg(
Arg::with_name(RANDOMNESS)
.takes_value(true)
Arg::new(RANDOMNESS)
.action(ArgAction::Set)
.help("")
.default_value("1E-2"),
)
.arg(
Arg::with_name(QUALITY)
.takes_value(true)
Arg::new(QUALITY)
.action(ArgAction::Set)
.help("Quality function to use")
.short("q")
.possible_value("modularity")
.possible_value("cpm")
.short('q')
.value_parser(["modularity", "cpm"])
.default_value("modularity"),
)
.arg(
Arg::with_name(HAS_HEADER)
Arg::new(HAS_HEADER)
.help("Flag must be added if the source file contains a header line")
.long("has_header")
)
Expand Down
6 changes: 1 addition & 5 deletions packages/network_partitions/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,18 +1,14 @@
[package]
name = "network_partitions"
version = "0.1.0"
authors = ["Dwayne Pryce <dwpryce@microsoft.com>"]
authors = ["Dax Pryce <daxpryce@microsoft.com>"]
edition = "2018"
license = "MIT"
description = "Leiden community detection as per https://arxiv.org/abs/1810.08473"

[dependencies]
rand = "0.8"
chrono = { version = "0.4", optional = true }

[dev-dependencies]
rand_xorshift = "0.3"

[features]
logging = ["chrono"]
debug = []
Loading

0 comments on commit 0ea1c92

Please sign in to comment.