Skip to content

Commit

Permalink
Use Ruff for Python linting and formatting
Browse files Browse the repository at this point in the history
This replaces the black, pyupgrade, reorder-python-imports and flake8
hooks in pre-commit with ruff.
  • Loading branch information
replaceafill authored Jun 20, 2024
1 parent e1b60c7 commit 47ac085
Show file tree
Hide file tree
Showing 26 changed files with 80 additions and 89 deletions.
16 changes: 0 additions & 16 deletions .flake8

This file was deleted.

26 changes: 5 additions & 21 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,26 +1,10 @@
repos:
- repo: https://github.com/asottile/pyupgrade
rev: v3.16.0
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.4.9
hooks:
- id: pyupgrade
args: [--py38-plus]
- repo: https://github.com/asottile/reorder_python_imports
rev: v3.13.0
hooks:
- id: reorder-python-imports
args: [--py38-plus]
- repo: https://github.com/psf/black
rev: "23.12.1"
hooks:
- id: black
args: [--safe, --quiet]
- repo: https://github.com/pycqa/flake8
rev: "7.1.0"
hooks:
- id: flake8
additional_dependencies:
- flake8-bugbear==24.4.26
- flake8-comprehensions==3.14.0
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
- id: ruff-format
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.41.0
hooks:
Expand Down
1 change: 1 addition & 0 deletions aips/create_dip.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
generated alongside the objects folder containing only a reference to the ZIP file
(without AMD or DMD sections).
"""

import argparse
import csv
import logging.config # Has to be imported separately
Expand Down
1 change: 1 addition & 0 deletions aips/create_dips_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
Optionally, uploads those DIPs to AtoM or the Storage Service using
the scripts from `dips` and deletes the local copy.
"""

import argparse
import logging.config # Has to be imported separately
import os
Expand Down
2 changes: 1 addition & 1 deletion aips/models.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
from os.path import isfile

from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import Integer
from sqlalchemy import Sequence
from sqlalchemy import String
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker

Expand Down
2 changes: 1 addition & 1 deletion dips/atom_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
AtoM instance. A passwordless SSH connection is required to the AtoM host for the
user running this script and it must be already added to the list of known hosts.
"""

import argparse
import logging.config # Has to be imported separately
import os
Expand All @@ -15,7 +16,6 @@

import requests


THIS_DIR = os.path.abspath(os.path.dirname(__file__))
LOGGER = logging.getLogger("dip_workflow")

Expand Down
2 changes: 1 addition & 1 deletion dips/copy_to_netx.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
Copies a local DIP to NetX, providing a CSV list of each object.
"""

import argparse
import csv
import logging.config # Has to be imported separately
Expand All @@ -14,7 +15,6 @@

import lxml.etree


THIS_DIR = os.path.abspath(os.path.dirname(__file__))
LOGGER = logging.getLogger("copy_to_netx")

Expand Down
2 changes: 1 addition & 1 deletion dips/storage_service_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
Storage Service to process that DIP and create a relationship with the
AIP from where it was created.
"""

import argparse
import logging.config # Has to be imported separately
import os
Expand All @@ -17,7 +18,6 @@

import requests


THIS_DIR = os.path.abspath(os.path.dirname(__file__))
LOGGER = logging.getLogger("dip_workflow")

Expand Down
1 change: 1 addition & 0 deletions requirements-dev.in
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
-r requirements.txt
pytest
pytest-cov
ruff
2 changes: 2 additions & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,8 @@ requests==2.32.3
# via
# -r requirements.txt
# amclient
ruff==0.4.9
# via -r requirements-dev.in
sqlalchemy==1.4.52
# via -r requirements.txt
tomli==2.0.1
Expand Down
19 changes: 19 additions & 0 deletions ruff.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
[lint]
# Rule reference: https://docs.astral.sh/ruff/rules/
select = [
"B",
"C4",
"E",
"F",
"I",
"UP",
"W",
]
ignore = [
"B904",
"E501",
"UP031",
]

[lint.isort]
force-single-line = true
5 changes: 2 additions & 3 deletions tests/test_create_avalon_dip.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@

from aips import create_dip


SS_URL = "http://127.0.0.1:62081"
SS_USER_NAME = "test"
SS_API_KEY = "test"
Expand Down Expand Up @@ -68,8 +67,8 @@ def test_create_avalon_dip_success(_get, tmp_path):
aip_dir, AVALON_AIP_UUID, output_dir.as_posix(), "atom", "avalon-manifest"
)
# Check DIP structure
assert avalon_dip_dir == "{}/{}/{}".format(
output_dir.as_posix(), TRANSFER_NAME, AVALON_AIP_UUID
assert (
avalon_dip_dir == f"{output_dir.as_posix()}/{TRANSFER_NAME}/{AVALON_AIP_UUID}"
)
assert os.path.isdir(avalon_dip_dir)

Expand Down
1 change: 0 additions & 1 deletion tests/test_create_dip.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@

from aips import create_dip


SS_URL = "http://192.168.168.192:8000"
SS_USER_NAME = "test"
SS_API_KEY = "7021334bee4c9155c07e531608dd28a9d8039420"
Expand Down
21 changes: 10 additions & 11 deletions tests/test_create_dips_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@

from aips import create_dips_job


SS_URL = "http://192.168.168.192:8000"
SS_USER_NAME = "test"
SS_API_KEY = "12883879c823f6e533738c12266bfe9f7316a672"
Expand Down Expand Up @@ -149,7 +148,7 @@ def test_main_fail_request(_request, args):
),
"json.return_value": AIPS_JSON,
},
spec=requests.Response
spec=requests.Response,
)
],
)
Expand All @@ -162,7 +161,7 @@ def test_main_fail_request(_request, args):
"headers": {},
"iter_content.return_value": iter([AIP_CONTENT]),
},
spec=requests.Response
spec=requests.Response,
),
],
)
Expand All @@ -187,7 +186,7 @@ def test_main_success(_get, _request, args):
),
"json.return_value": AIPS_JSON,
},
spec=requests.Response
spec=requests.Response,
)
],
)
Expand All @@ -200,7 +199,7 @@ def test_main_success(_get, _request, args):
"headers": {},
"iter_content.return_value": iter([AIP_CONTENT]),
},
spec=requests.Response
spec=requests.Response,
),
],
)
Expand Down Expand Up @@ -230,7 +229,7 @@ def test_main_success_no_dip_creation(_get, _request, args):
),
"json.return_value": AIPS_JSON,
},
spec=requests.Response
spec=requests.Response,
)
],
)
Expand All @@ -243,7 +242,7 @@ def test_main_success_no_dip_creation(_get, _request, args):
"headers": {},
"iter_content.return_value": iter([AIP_CONTENT]),
},
spec=requests.Response
spec=requests.Response,
),
],
)
Expand All @@ -267,7 +266,7 @@ def test_main_dip_creation_failed(_get, _request, create_dip, atom_upload, args)
),
"json.return_value": AIPS_JSON,
},
spec=requests.Response
spec=requests.Response,
)
],
)
Expand All @@ -280,7 +279,7 @@ def test_main_dip_creation_failed(_get, _request, create_dip, atom_upload, args)
"headers": {},
"iter_content.return_value": iter([AIP_CONTENT]),
},
spec=requests.Response
spec=requests.Response,
),
],
)
Expand Down Expand Up @@ -314,7 +313,7 @@ def test_main_success_atom_upload_call(_get, _request, create_dip, atom_upload,
),
"json.return_value": AIPS_JSON,
},
spec=requests.Response
spec=requests.Response,
)
],
)
Expand All @@ -327,7 +326,7 @@ def test_main_success_atom_upload_call(_get, _request, create_dip, atom_upload,
"headers": {},
"iter_content.return_value": iter([AIP_CONTENT]),
},
spec=requests.Response
spec=requests.Response,
),
],
)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_storage_service_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def test_request_fail(self, _get, _makedirs, _copytree):
"json.return_value": {},
"headers": {},
},
spec=requests.Response
spec=requests.Response,
)
],
)
Expand Down
9 changes: 2 additions & 7 deletions tests/test_transfers.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
from transfers import models
from transfers import transfer


AM_URL = "http://127.0.0.1"
SS_URL = "http://127.0.0.1:8000"
USER = "demo"
Expand Down Expand Up @@ -42,17 +41,13 @@ def setUp(self):
transfer_type="standard",
target="standard_1",
transfer_name="standard_1",
transfer_abs_path="{}/standardTransfer/standard_1/".format(
transfers_dir
),
transfer_abs_path=f"{transfers_dir}/standardTransfer/standard_1/",
),
Result(
transfer_type="standard",
target="standard_1",
transfer_name="standard_1_1",
transfer_abs_path="{}/standardTransfer/standard_1_1/".format(
transfers_dir
),
transfer_abs_path=f"{transfers_dir}/standardTransfer/standard_1_1/",
),
Result(
transfer_type="dspace",
Expand Down
1 change: 0 additions & 1 deletion tests/test_transfers_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from transfers import models
from transfers import transfer_async


AM_URL = "http://127.0.0.1:62080"
SS_URL = "http://127.0.0.1:62081"
USER = "test"
Expand Down
2 changes: 1 addition & 1 deletion transfers/defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
UNDECODABLE = "UNABLE TO DECODE"
UNDEC_MSG = (
"Unable to decode a transfer source component; giving up and"
" returning {}".format(UNDECODABLE)
f" returning {UNDECODABLE}"
)

# Default logging for thee module.
Expand Down
1 change: 1 addition & 0 deletions transfers/examples/split_transfer.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
Make sure that you have permissions on the locations you are reading or writing!
"""

import argparse
import csv
import os
Expand Down
6 changes: 3 additions & 3 deletions transfers/models.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import Integer
from sqlalchemy import LargeBinary
from sqlalchemy import Sequence
from sqlalchemy import String
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session
from sqlalchemy.orm import sessionmaker
Expand All @@ -29,8 +29,8 @@ class Unit(Base):

def __repr__(self):
return (
"<Unit(id={s.id}, uuid={s.uuid}, unit_type={s.unit_type}, "
"path={s.path}, status={s.status}, current={s.current})>".format(s=self)
f"<Unit(id={self.id}, uuid={self.uuid}, unit_type={self.unit_type}, "
f"path={self.path}, status={self.status}, current={self.current})>"
)


Expand Down
4 changes: 3 additions & 1 deletion transfers/reingest.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
A work in progress, with some improvements that can be made to long-running
processes like this over time.
"""

import argparse
import atexit
import json
Expand All @@ -22,7 +23,8 @@
# by ensuring that it can see itself.
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

from transfers import errors, loggingconfig
from transfers import errors
from transfers import loggingconfig
from transfers import reingestmodel as reingestunit

LOGGER = logging.getLogger("transfers")
Expand Down
Loading

0 comments on commit 47ac085

Please sign in to comment.