diff --git a/autoembedder/evaluator.py b/autoembedder/evaluator.py index 0eecc98..b5a32f3 100644 --- a/autoembedder/evaluator.py +++ b/autoembedder/evaluator.py @@ -86,14 +86,12 @@ def __predict( float: Loss value. """ - device = ( - torch.device( - "cuda" - if torch.cuda.is_available() - else "mps" - if torch.backends.mps.is_available() and parameters.get("use_mps", 0) == 1 - else "cpu" - ), + device = torch.device( + "cuda" + if torch.cuda.is_available() + else "mps" + if torch.backends.mps.is_available() and parameters.get("use_mps", 0) == 1 + else "cpu" ) with torch.no_grad(): diff --git a/example.ipynb b/example.ipynb index 197e7e6..359fd25 100644 --- a/example.ipynb +++ b/example.ipynb @@ -282,7 +282,7 @@ "source": [ "parameters = {\n", " \"hidden_layers\": [[25, 20], [20, 10]],\n", - " \"epochs\": 1,\n", + " \"epochs\": 10,\n", " \"lr\": 0.0001,\n", " \"verbose\": 1,\n", " \"target\": \"Class\",\n", diff --git a/pyproject.toml b/pyproject.toml index b38bc9c..1ba2a7d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "Autoembedder" -version = "0.1.13" +version = "0.1.14" description = "PyTorch autoencoder with additional embeddings layer for categorical data." authors = ["Christopher Lemke "] license = "MIT"