Skip to content

Commit

Permalink
[pre-commit.ci] pre-commit autoupdate (#188)
Browse files Browse the repository at this point in the history
* [pre-commit.ci] pre-commit autoupdate

updates:
- [github.com/astral-sh/ruff-pre-commit: v0.1.3 → v0.2.1](astral-sh/ruff-pre-commit@v0.1.3...v0.2.1)
- [github.com/pre-commit/mirrors-mypy: v1.6.1 → v1.8.0](pre-commit/mirrors-mypy@v1.6.1...v1.8.0)

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Fix type

* format

* fi

---------

Co-authored-by: Bas Nijholt <bas@nijho.lt>
  • Loading branch information
pre-commit-ci[bot] and basnijholt authored Feb 14, 2024
1 parent b013122 commit fee0703
Show file tree
Hide file tree
Showing 15 changed files with 50 additions and 10 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,14 @@ repos:
- id: debug-statements
- id: check-ast
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.1.3"
rev: "v0.2.1"
hooks:
- id: ruff
exclude: docs/source/conf.py|ipynb_filter.py
args: ["--fix"]
- id: ruff-format
- repo: https://github.com/pre-commit/mirrors-mypy
rev: "v1.6.1"
rev: "v1.8.0"
hooks:
- id: mypy
exclude: ipynb_filter.py|docs/source/conf.py
Expand Down
1 change: 1 addition & 0 deletions adaptive_scheduler/_mock_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ class MockScheduler:
``bash`` executable.
url
The URL of the socket. Defaults to {DEFAULT_URL}.
"""

def __init__(
Expand Down
4 changes: 4 additions & 0 deletions adaptive_scheduler/_scheduler/base_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ class BaseScheduler(abc.ABC):
Returns
-------
`BaseScheduler` object.
"""

_ext: ClassVar[str]
Expand Down Expand Up @@ -113,6 +114,7 @@ def queue(self, *, me_only: bool = True) -> dict[str, dict]:
-----
This function might return extra information about the job, however
this is not used elsewhere in this package.
"""

def queue_df(self) -> pd.DataFrame:
Expand Down Expand Up @@ -141,6 +143,7 @@ def job_script(self, options: dict[str, Any], *, index: int | None = None) -> st
index
The index of the job that is being run. This is used when
specifying different resources for different jobs.
"""

@property
Expand Down Expand Up @@ -188,6 +191,7 @@ def cancel(
Display a progress bar using `tqdm`.
max_tries
Maximum number of attempts to cancel a job.
"""

def cancel_jobs(job_ids: list[str]) -> None:
Expand Down
1 change: 1 addition & 0 deletions adaptive_scheduler/_scheduler/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ def job_script(self, options: dict[str, Any], *, index: int | None = None) -> st
Currently, there is a problem that this will not properly cleanup.
for example `ipengine ... &` will be detached and go on,
normally a scheduler will take care of this.
"""
job_script = textwrap.dedent(
"""\
Expand Down
1 change: 1 addition & 0 deletions adaptive_scheduler/_scheduler/pbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,7 @@ def job_script(self, options: dict[str, Any], *, index: int | None = None) -> st
The index of the job that is being run. This is used when
specifying different resources for different jobs.
Currently not implemented for PBS!
"""
job_script = textwrap.dedent(
f"""\
Expand Down
2 changes: 2 additions & 0 deletions adaptive_scheduler/_scheduler/slurm.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ class SLURM(BaseScheduler):
extra_script
Extra script that will be executed after any environment variables are set,
but before the main scheduler is run.
"""

# Attributes that all schedulers need to have
Expand Down Expand Up @@ -297,6 +298,7 @@ def job_script(self, options: dict[str, Any], *, index: int | None = None) -> st
index
The index of the job that is being run. This is used when
specifying different resources for different jobs.
"""
cores = self._get_cores(index=index)
job_script = textwrap.dedent(
Expand Down
3 changes: 3 additions & 0 deletions adaptive_scheduler/_server_support/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ def get_allowed_url() -> str:
url
An url that can be used for the database manager, with the format
``tcp://ip_of_this_machine:allowed_port.``.
"""
ip = socket.gethostbyname(socket.gethostname())
port = zmq.ssh.tunnel.select_random_ports(1)[0]
Expand Down Expand Up @@ -98,6 +99,7 @@ def cleanup_scheduler_files(
If None the file is removed.
log_file_folder
The folder in which to delete the log-files.
"""
to_rm = _get_all_files(job_names, scheduler)

Expand Down Expand Up @@ -162,6 +164,7 @@ def periodically_clean_ipython_profiles(
Returns
-------
asyncio.Task
"""

async def clean(interval: float) -> None:
Expand Down
4 changes: 3 additions & 1 deletion adaptive_scheduler/_server_support/database_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def __init__(self, db_fname: str | Path, *, clear_existing: bool = False) -> Non
raw_data = json.load(f)
self._data = [_DBEntry(**entry) for entry in raw_data["data"]]

def all(self) -> list[_DBEntry]: # noqa: A003
def all(self) -> list[_DBEntry]:
return self._data

def insert_multiple(self, entries: list[_DBEntry]) -> None:
Expand Down Expand Up @@ -152,6 +152,7 @@ class DatabaseManager(BaseManager):
----------
failed : list
A list of entries that have failed and have been removed from the database.
"""

def __init__(
Expand Down Expand Up @@ -363,6 +364,7 @@ async def _manage(self) -> None:
Returns
-------
coroutine
"""
log.debug("started database")
socket = ctx.socket(zmq.REP)
Expand Down
2 changes: 2 additions & 0 deletions adaptive_scheduler/_server_support/job_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ def command_line_options(
-------
dict
The command line options for the job_script.
"""
if runner_kwargs is None:
runner_kwargs = {}
Expand Down Expand Up @@ -142,6 +143,7 @@ class JobManager(BaseManager):
----------
n_started : int
Total number of jobs started by the `JobManager`.
"""

def __init__(
Expand Down
2 changes: 2 additions & 0 deletions adaptive_scheduler/_server_support/kill_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ def logs_with_string_or_condition(
-------
has_string
A list ``(job_name, fnames)``, which have the string inside their log-file.
"""
if isinstance(error, str):
has_error = lambda lines: error in "".join(lines) # noqa: E731
Expand Down Expand Up @@ -90,6 +91,7 @@ class KillManager(BaseManager):
move_to
If a job is cancelled the log is either removed (if ``move_to=None``)
or moved to a folder (e.g. if ``move_to='old_logs'``).
"""

def __init__(
Expand Down
1 change: 1 addition & 0 deletions adaptive_scheduler/_server_support/parse_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ def parse_log_files(
Returns
-------
`~pandas.core.frame.DataFrame`
"""
_queue = scheduler.queue()
database_manager.update(_queue)
Expand Down
3 changes: 3 additions & 0 deletions adaptive_scheduler/client_support.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ def get_learner(
The filename of the learner that was chosen.
initializer
A function that runs before the process is forked.
"""
_add_log_file_handler(log_fname)
log.info(
Expand Down Expand Up @@ -120,6 +121,7 @@ def tell_done(url: str, fname: str | list[str]) -> None:
(`adaptive_scheduler.server_support.manage_database`).
fname
The filename of the learner that is done.
"""
log.info("goal reached! 🎉🎊🥳")
with ctx.socket(zmq.REQ) as socket:
Expand Down Expand Up @@ -173,6 +175,7 @@ def log_info(runner: AsyncRunner, interval: float = 300) -> asyncio.Task:
Adaptive Runner instance.
interval
Time in seconds between log entries.
"""

async def coro(runner: AsyncRunner, interval: float) -> None:
Expand Down
24 changes: 20 additions & 4 deletions adaptive_scheduler/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from contextlib import contextmanager, suppress
from datetime import datetime, timedelta, timezone
from inspect import signature
from itertools import chain
from multiprocessing import Manager
from pathlib import Path
from typing import (
Expand Down Expand Up @@ -103,6 +104,7 @@ def split(seq: Iterable, n_parts: int) -> Iterable[tuple]:
A list or other iterable that has to be split up.
n_parts
The sequence will be split up in this many parts.
"""
lst = list(seq)
n = math.ceil(len(lst) / n_parts)
Expand Down Expand Up @@ -131,14 +133,15 @@ def split_in_balancing_learners(
Returns
-------
new_learners, new_fnames
"""
new_learners = []
new_fnames = []
for x in split(zip(learners, fnames), n_parts):
learners_part, fnames_part = zip(*x)
learner = adaptive.BalancingLearner(learners_part, strategy=strategy)
new_learners.append(learner)
new_fnames.append(fnames_part)
new_fnames.append(list(fnames_part))
return new_learners, new_fnames


Expand Down Expand Up @@ -169,6 +172,7 @@ def split_sequence_learner(
List of `~adaptive.SequenceLearner`\s.
new_fnames
List of str based on a hash of the sequence.
"""
new_learners, new_fnames = split_sequence_in_sequence_learners(
function=big_learner._original_function,
Expand Down Expand Up @@ -214,6 +218,7 @@ def split_sequence_in_sequence_learners(
List of `~adaptive.SequenceLearner`\s.
new_fnames
List of str based on a hash of the sequence.
"""
folder = Path(folder)
new_learners = []
Expand Down Expand Up @@ -248,11 +253,11 @@ def combine_sequence_learners(
-------
adaptive.SequenceLearner
Big `~adaptive.SequenceLearner` with data from ``learners``.
"""
if big_learner is None:
big_sequence: list[Any] = sum(
(list(learner.sequence) for learner in learners),
[],
big_sequence: list[Any] = list(
chain.from_iterable(learner.sequence for learner in learners),
)
big_learner = adaptive.SequenceLearner(
learners[0]._original_function,
Expand Down Expand Up @@ -282,6 +287,7 @@ def copy_from_sequence_learner(
Learner to take the data from.
learner_to
Learner to tell the data to.
"""
mapping = {
hash_anything(learner_from.sequence[i]): v for i, v in learner_from.data.items()
Expand Down Expand Up @@ -420,6 +426,7 @@ def _remove_or_move_files(
If None the file is removed.
desc
Description of the progressbar.
"""
n_failed = 0
for fname in _progress(fnames, with_progress_bar, desc or "Removing files"):
Expand Down Expand Up @@ -463,6 +470,7 @@ def load_parallel(
max_workers
The maximum number of parallel threads when loading the data.
If ``None``, use the maximum number of threads that is possible.
"""

def load(learner: adaptive.BaseLearner, fname: str) -> None:
Expand Down Expand Up @@ -492,6 +500,7 @@ def save_parallel(
A list of filenames corresponding to `learners`.
with_progress_bar
Display a progress bar using `tqdm`.
"""

def save(learner: adaptive.BaseLearner, fname: str) -> None:
Expand Down Expand Up @@ -562,6 +571,7 @@ def connect_to_ipyparallel(
-------
client
An IPyparallel client.
"""
from ipyparallel import Client

Expand Down Expand Up @@ -623,6 +633,7 @@ class LRUCachedCallable:
Cache size of the LRU cache, by default 128.
with_cloudpickle
Use cloudpickle for storing the data in memory.
"""

def __init__(
Expand Down Expand Up @@ -1015,6 +1026,7 @@ def smart_goal(
Returns
-------
Callable[[adaptive.BaseLearner], bool]
"""
if callable(goal):
return goal
Expand Down Expand Up @@ -1083,6 +1095,7 @@ class WrappedFunction:
>>> wrapped_function = WrappedFunction(square)
>>> wrapped_function(4)
16
"""

def __init__(
Expand Down Expand Up @@ -1132,6 +1145,7 @@ def __call__(self, *args: Any, **kwargs: Any) -> Any:
Any
The result of calling the deserialized function with the provided
arguments and keyword arguments.
"""
global _GLOBAL_CACHE # noqa: PLW0602

Expand Down Expand Up @@ -1258,6 +1272,7 @@ async def _track_file_creation_progress(
The time interval (in seconds) at which to update the progress. The interval is dynamically
adjusted to be at least 50 times the time it takes to update the progress. This ensures that
updating the progress does not take up a significant amount of time.
"""
# create total_files and add_total_progress before updating paths_dict
total_files = sum(len(paths) for paths in paths_dict.values())
Expand Down Expand Up @@ -1348,6 +1363,7 @@ def track_file_creation_progress(
"example2": {Path("/path/to/file3"), Path("/path/to/file4")},
}
>>> task = track_file_creation_progress(paths_dict)
"""
get_console().clear_live() # avoid LiveError, only 1 live render allowed at a time
columns = (*Progress.get_default_columns(), TimeElapsedColumn())
Expand Down
6 changes: 4 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,8 @@ exclude_lines = [
[tool.ruff]
line-length = 88
target-version = "py38"

[tool.ruff.lint]
select = ["ALL"]
ignore = [
"T20", # flake8-print
Expand All @@ -132,12 +134,12 @@ ignore = [
"E501", # Line too long
]

[tool.ruff.per-file-ignores]
[tool.ruff.lint.per-file-ignores]
"tests/*" = ["SLF001", "PLR2004"]
"tests/test_examples.py" = ["E501"]
".github/*" = ["INP001"]

[tool.ruff.mccabe]
[tool.ruff.lint.mccabe]
max-complexity = 18

[tool.mypy]
Expand Down
2 changes: 1 addition & 1 deletion tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def test_split_in_balancing_learners(
)
assert len(new_learners) == n_parts
assert all(isinstance(lrn, adaptive.BalancingLearner) for lrn in new_learners)
assert new_fnames == [(fnames[0],), (fnames[1],)]
assert new_fnames == [[fnames[0]], [fnames[1]]]


def test_split_sequence_learner() -> None:
Expand Down

0 comments on commit fee0703

Please sign in to comment.