diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index c75019cffff..0c693bd7f72 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,3 +1,3 @@ - [ ] Closes #xxxx - [ ] Tests added / passed -- [ ] Passes `black distributed` / `flake8 distributed` +- [ ] Passes `black distributed` / `flake8 distributed` / `isort distributed` diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ca1c26a7d87..fb4d7de629c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,9 @@ repos: + - repo: https://github.com/pycqa/isort + rev: 5.7.0 + hooks: + - id: isort + language_version: python3 - repo: https://github.com/psf/black rev: 20.8b1 hooks: diff --git a/conftest.py b/conftest.py index 07adc4982f6..e8a159fa4a3 100644 --- a/conftest.py +++ b/conftest.py @@ -1,7 +1,6 @@ # https://pytest.org/latest/example/simple.html#control-skipping-of-tests-according-to-command-line-option import pytest - # Uncomment to enable more logging and checks # (https://docs.python.org/3/library/asyncio-dev.html) # Note this makes things slower and might consume much memory. diff --git a/distributed/__init__.py b/distributed/__init__.py index 475288a3b32..70bde950a0e 100644 --- a/distributed/__init__.py +++ b/distributed/__init__.py @@ -1,42 +1,42 @@ -from . import config +from . import config # isort:skip import dask from dask.config import config + +from ._version import get_versions from .actor import Actor, ActorFuture -from .core import connect, rpc, Status -from .deploy import LocalCluster, Adaptive, SpecCluster, SSHCluster -from .diagnostics.progressbar import progress -from .diagnostics.plugin import WorkerPlugin, SchedulerPlugin, PipInstall from .client import ( Client, - Executor, CompatibleExecutor, - wait, + Executor, + Future, as_completed, default_client, fire_and_forget, - Future, futures_of, + get_task_metadata, get_task_stream, performance_report, - get_task_metadata, + wait, ) +from .core import Status, connect, rpc +from .deploy import Adaptive, LocalCluster, SpecCluster, SSHCluster +from .diagnostics.plugin import PipInstall, SchedulerPlugin, WorkerPlugin +from .diagnostics.progressbar import progress +from .event import Event from .lock import Lock from .multi_lock import MultiLock from .nanny import Nanny from .pubsub import Pub, Sub from .queues import Queue +from .scheduler import Scheduler from .security import Security from .semaphore import Semaphore -from .event import Event -from .scheduler import Scheduler from .threadpoolexecutor import rejoin -from .utils import sync, TimeoutError, CancelledError +from .utils import CancelledError, TimeoutError, sync from .variable import Variable -from .worker import Worker, get_worker, get_client, secede, Reschedule +from .worker import Reschedule, Worker, get_client, get_worker, secede from .worker_client import local_client, worker_client -from ._version import get_versions - versions = get_versions() __version__ = versions["version"] __git_revision__ = versions["full-revisionid"] diff --git a/distributed/_concurrent_futures_thread.py b/distributed/_concurrent_futures_thread.py index b26da12cb7a..1b6f328991f 100644 --- a/distributed/_concurrent_futures_thread.py +++ b/distributed/_concurrent_futures_thread.py @@ -8,16 +8,17 @@ __author__ = "Brian Quinlan (brian@sweetapp.com)" import atexit -from concurrent.futures import _base import itertools +from concurrent.futures import _base try: import queue except ImportError: import Queue as queue + +import os import threading import weakref -import os # Workers are created as daemon threads. This is done to allow the interpreter # to exit when there are still idle threads in a ThreadPoolExecutor's thread diff --git a/distributed/_ipython_utils.py b/distributed/_ipython_utils.py index 8aa1fe7ad7b..8e83a78ff4f 100644 --- a/distributed/_ipython_utils.py +++ b/distributed/_ipython_utils.py @@ -12,19 +12,17 @@ except ImportError: # Python 2 import Queue as queue -from subprocess import Popen + import sys -from threading import Thread +from subprocess import Popen +from threading import Event, Thread from uuid import uuid4 -from tornado.gen import TimeoutError -from tornado.ioloop import IOLoop -from threading import Event - from IPython import get_ipython from jupyter_client import BlockingKernelClient, write_connection_file from jupyter_core.paths import jupyter_runtime_dir - +from tornado.gen import TimeoutError +from tornado.ioloop import IOLoop OUTPUT_TIMEOUT = 10 diff --git a/distributed/actor.py b/distributed/actor.py index b9149fec7f4..0facdda4cb8 100644 --- a/distributed/actor.py +++ b/distributed/actor.py @@ -5,7 +5,7 @@ from .client import Future, default_client from .protocol import to_serialize -from .utils import iscoroutinefunction, thread_state, sync +from .utils import iscoroutinefunction, sync, thread_state from .utils_comm import WrappedKey from .worker import get_worker diff --git a/distributed/batched.py b/distributed/batched.py index 7b2523fc1e0..dc20de6f220 100644 --- a/distributed/batched.py +++ b/distributed/batched.py @@ -1,5 +1,5 @@ -from collections import deque import logging +from collections import deque import dask from tornado import gen, locks @@ -8,7 +8,6 @@ from .core import CommClosedError from .utils import parse_timedelta - logger = logging.getLogger(__name__) diff --git a/distributed/cfexecutor.py b/distributed/cfexecutor.py index e11c96c2821..c86c989abac 100644 --- a/distributed/cfexecutor.py +++ b/distributed/cfexecutor.py @@ -1,12 +1,11 @@ -import concurrent.futures as cf import weakref +from concurrent import futures as cf from tlz import merge - from tornado import gen from .metrics import time -from .utils import sync, TimeoutError +from .utils import TimeoutError, sync @gen.coroutine diff --git a/distributed/cli/dask_scheduler.py b/distributed/cli/dask_scheduler.py index 335fcd63b5c..702772667d2 100755 --- a/distributed/cli/dask_scheduler.py +++ b/distributed/cli/dask_scheduler.py @@ -1,6 +1,6 @@ import atexit -import logging import gc +import logging import os import re import sys @@ -8,17 +8,16 @@ import click import dask - from tornado.ioloop import IOLoop from distributed import Scheduler -from distributed.preloading import validate_preload_argv from distributed.cli.utils import check_python_3, install_signal_handlers -from distributed.utils import deserialize_for_cli +from distributed.preloading import validate_preload_argv from distributed.proctitle import ( enable_proctitle_on_children, enable_proctitle_on_current, ) +from distributed.utils import deserialize_for_cli logger = logging.getLogger("distributed.scheduler") diff --git a/distributed/cli/dask_spec.py b/distributed/cli/dask_spec.py index 299878a3a46..9d4b4e4ca45 100644 --- a/distributed/cli/dask_spec.py +++ b/distributed/cli/dask_spec.py @@ -1,11 +1,12 @@ import asyncio -import click import json import os import sys -import yaml +import click import dask.config +import yaml + from distributed.deploy.spec import run_spec from distributed.utils import deserialize_for_cli diff --git a/distributed/cli/dask_ssh.py b/distributed/cli/dask_ssh.py index f592c572ac5..f81cd73d495 100755 --- a/distributed/cli/dask_ssh.py +++ b/distributed/cli/dask_ssh.py @@ -1,7 +1,7 @@ -from distributed.deploy.old_ssh import SSHCluster import click from distributed.cli.utils import check_python_3 +from distributed.deploy.old_ssh import SSHCluster @click.command( diff --git a/distributed/cli/dask_worker.py b/distributed/cli/dask_worker.py index 3d60ce35603..dcd60f2e540 100755 --- a/distributed/cli/dask_worker.py +++ b/distributed/cli/dask_worker.py @@ -1,16 +1,19 @@ import asyncio import atexit -from contextlib import suppress -import logging import gc +import logging import os import signal import sys import warnings +from contextlib import suppress import click import dask from dask.system import CPU_COUNT +from tlz import valmap +from tornado.ioloop import IOLoop, TimeoutError + from distributed import Nanny from distributed.cli.utils import check_python_3, install_signal_handlers from distributed.comm import get_address_host_port @@ -22,9 +25,6 @@ ) from distributed.utils import deserialize_for_cli, import_term -from tlz import valmap -from tornado.ioloop import IOLoop, TimeoutError - logger = logging.getLogger("distributed.dask_worker") diff --git a/distributed/cli/tests/test_dask_scheduler.py b/distributed/cli/tests/test_dask_scheduler.py index 6be7f3c365d..2e938db558f 100644 --- a/distributed/cli/tests/test_dask_scheduler.py +++ b/distributed/cli/tests/test_dask_scheduler.py @@ -3,26 +3,26 @@ pytest.importorskip("requests") import os -import requests -import socket import shutil +import socket import sys import tempfile from time import sleep +import requests from click.testing import CliRunner import distributed -from distributed import Scheduler, Client +import distributed.cli.dask_scheduler +from distributed import Client, Scheduler +from distributed.metrics import time from distributed.utils import get_ip, get_ip_interface, tmpfile +from distributed.utils_test import loop # noqa: F401 from distributed.utils_test import ( - popen, assert_can_connect_from_everywhere_4_6, assert_can_connect_locally_4, + popen, ) -from distributed.utils_test import loop # noqa: F401 -from distributed.metrics import time -import distributed.cli.dask_scheduler def test_defaults(loop): diff --git a/distributed/cli/tests/test_dask_spec.py b/distributed/cli/tests/test_dask_spec.py index a18b9fb383a..0a5f64fc484 100644 --- a/distributed/cli/tests/test_dask_spec.py +++ b/distributed/cli/tests/test_dask_spec.py @@ -1,10 +1,11 @@ -import pytest import sys + +import pytest import yaml from distributed import Client -from distributed.utils_test import popen from distributed.utils_test import cleanup # noqa: F401 +from distributed.utils_test import popen @pytest.mark.asyncio diff --git a/distributed/cli/tests/test_dask_ssh.py b/distributed/cli/tests/test_dask_ssh.py index 9be8cb06f62..b73cd66d914 100644 --- a/distributed/cli/tests/test_dask_ssh.py +++ b/distributed/cli/tests/test_dask_ssh.py @@ -1,4 +1,5 @@ from click.testing import CliRunner + from distributed.cli.dask_ssh import main diff --git a/distributed/cli/tests/test_dask_worker.py b/distributed/cli/tests/test_dask_worker.py index 853b9964128..98724c6754b 100644 --- a/distributed/cli/tests/test_dask_worker.py +++ b/distributed/cli/tests/test_dask_worker.py @@ -1,22 +1,29 @@ import asyncio + import pytest from click.testing import CliRunner pytest.importorskip("requests") -import requests -import sys import os -from time import sleep +import sys from multiprocessing import cpu_count +from time import sleep + +import requests import distributed.cli.dask_worker from distributed import Client, Scheduler from distributed.deploy.utils import nprocesses_nthreads from distributed.metrics import time -from distributed.utils import sync, tmpfile, parse_ports -from distributed.utils_test import popen, terminate_process, wait_for_port -from distributed.utils_test import loop, cleanup # noqa: F401 +from distributed.utils import parse_ports, sync, tmpfile +from distributed.utils_test import ( # noqa: F401 + cleanup, + loop, + popen, + terminate_process, + wait_for_port, +) def test_nanny_worker_ports(loop): diff --git a/distributed/cli/tests/test_tls_cli.py b/distributed/cli/tests/test_tls_cli.py index def31bc244d..9301b47ab3e 100644 --- a/distributed/cli/tests/test_tls_cli.py +++ b/distributed/cli/tests/test_tls_cli.py @@ -1,16 +1,15 @@ from time import sleep from distributed import Client +from distributed.metrics import time +from distributed.utils_test import loop # noqa: F401 from distributed.utils_test import ( - popen, get_cert, new_config_file, - tls_security, + popen, tls_only_config, + tls_security, ) -from distributed.utils_test import loop # noqa: F401 -from distributed.metrics import time - ca_file = get_cert("tls-ca-cert.pem") cert = get_cert("tls-cert.pem") diff --git a/distributed/cli/utils.py b/distributed/cli/utils.py index c1bff051534..b2515faff11 100644 --- a/distributed/cli/utils.py +++ b/distributed/cli/utils.py @@ -1,6 +1,5 @@ from tornado.ioloop import IOLoop - py3_err_msg = """ Warning: Your terminal does not set locales. diff --git a/distributed/client.py b/distributed/client.py index 2669a25473b..229a49c0689 100644 --- a/distributed/client.py +++ b/distributed/client.py @@ -1,37 +1,36 @@ import asyncio import atexit -from collections import defaultdict -from collections.abc import Iterator -from concurrent.futures import ThreadPoolExecutor -from concurrent.futures._base import DoneAndNotDoneFutures -from contextlib import contextmanager, suppress -from contextvars import ContextVar import copy import errno -from functools import partial import html import inspect import json import logging -from numbers import Number import os +import socket import sys -import uuid import threading -import socket -from queue import Queue as pyQueue +import uuid import warnings import weakref +from collections import defaultdict +from collections.abc import Iterator +from concurrent.futures import ThreadPoolExecutor +from concurrent.futures._base import DoneAndNotDoneFutures +from contextlib import contextmanager, suppress +from contextvars import ContextVar +from functools import partial +from numbers import Number +from queue import Queue as pyQueue import dask -from dask.base import tokenize, normalize_token, collections_to_dsk +from dask.base import collections_to_dsk, normalize_token, tokenize +from dask.compatibility import apply from dask.core import flatten +from dask.highlevelgraph import HighLevelGraph from dask.optimization import SubgraphCallable -from dask.compatibility import apply from dask.utils import ensure_dict, format_bytes, funcname, stringify -from dask.highlevelgraph import HighLevelGraph - -from tlz import first, groupby, merge, valmap, keymap, partition_all +from tlz import first, groupby, keymap, merge, partition_all, valmap try: from dask.delayed import single_key @@ -40,24 +39,18 @@ from tornado import gen from tornado.ioloop import IOLoop, PeriodicCallback +from . import versions as version_module from .batched import BatchedSend -from .utils_comm import ( - WrappedKey, - unpack_remotedata, - pack_data, - scatter_to_workers, - gather_from_workers, - retry_operation, -) from .cfexecutor import ClientExecutor from .core import ( - connect, - rpc, - clean_exception, CommClosedError, - PooledRPCCall, ConnectionPool, + PooledRPCCall, + clean_exception, + connect, + rpc, ) +from .diagnostics.plugin import UploadFile, WorkerPlugin from .metrics import time from .protocol import to_serialize from .protocol.pickle import dumps, loads @@ -66,26 +59,31 @@ from .security import Security from .sizeof import sizeof from .threadpoolexecutor import rejoin -from .worker import get_client, get_worker, secede -from .diagnostics.plugin import UploadFile, WorkerPlugin from .utils import ( All, - sync, - log_errors, + Any, + CancelledError, + LoopRunner, + TimeoutError, + format_dashboard_link, + has_keyword, key_split, - thread_state, + log_errors, no_default, - LoopRunner, parse_timedelta, shutting_down, - Any, - has_keyword, - format_dashboard_link, - TimeoutError, - CancelledError, + sync, + thread_state, ) -from . import versions as version_module - +from .utils_comm import ( + WrappedKey, + gather_from_workers, + pack_data, + retry_operation, + scatter_to_workers, + unpack_remotedata, +) +from .worker import get_client, get_worker, secede logger = logging.getLogger(__name__) @@ -3941,7 +3939,7 @@ async def _get_task_stream( source, figure = task_stream_figure(sizing_mode="stretch_both") source.data.update(rects) if plot == "save": - from bokeh.plotting import save, output_file + from bokeh.plotting import output_file, save output_file(filename=filename, title="Dask Task Stream") save(figure, filename=filename, resources=bokeh_resources) diff --git a/distributed/comm/__init__.py b/distributed/comm/__init__.py index 2ff679ada3d..af6d30812d3 100644 --- a/distributed/comm/__init__.py +++ b/distributed/comm/__init__.py @@ -1,21 +1,20 @@ from .addressing import ( - parse_address, - unparse_address, + get_address_host, + get_address_host_port, + get_local_address_for, normalize_address, + parse_address, parse_host_port, - unparse_host_port, resolve_address, - get_address_host_port, - get_address_host, - get_local_address_for, + unparse_address, + unparse_host_port, ) -from .core import connect, listen, Comm, CommClosedError +from .core import Comm, CommClosedError, connect, listen from .utils import get_tcp_server_address def _register_transports(): - from . import inproc - from . import tcp + from . import inproc, tcp try: from . import ucx diff --git a/distributed/comm/addressing.py b/distributed/comm/addressing.py index 537ecd4ef23..949fa31bcd1 100644 --- a/distributed/comm/addressing.py +++ b/distributed/comm/addressing.py @@ -1,9 +1,9 @@ import itertools + import dask -from . import registry from ..utils import get_ip_interface - +from . import registry DEFAULT_SCHEME = dask.config.get("distributed.comm.default-scheme") diff --git a/distributed/comm/core.py b/distributed/comm/core.py index 2a01ec3d728..6ee79723736 100644 --- a/distributed/comm/core.py +++ b/distributed/comm/core.py @@ -1,21 +1,20 @@ -from abc import ABC, abstractmethod, abstractproperty import asyncio -from contextlib import suppress import inspect import logging import random import sys import weakref +from abc import ABC, abstractmethod, abstractproperty +from contextlib import suppress import dask from ..metrics import time -from ..utils import parse_timedelta, TimeoutError +from ..protocol import pickle +from ..protocol.compression import get_default_compression +from ..utils import TimeoutError, parse_timedelta from . import registry from .addressing import parse_address -from ..protocol.compression import get_default_compression -from ..protocol import pickle - logger = logging.getLogger(__name__) diff --git a/distributed/comm/inproc.py b/distributed/comm/inproc.py index d93377975ba..7374fba188d 100644 --- a/distributed/comm/inproc.py +++ b/distributed/comm/inproc.py @@ -1,21 +1,19 @@ import asyncio -from collections import deque, namedtuple import itertools import logging import os import threading -import weakref import warnings +import weakref +from collections import deque, namedtuple from tornado.concurrent import Future from tornado.ioloop import IOLoop from ..protocol import nested_deserialize from ..utils import get_ip - +from .core import Comm, CommClosedError, Connector, Listener from .registry import Backend, backends -from .core import Comm, Connector, Listener, CommClosedError - logger = logging.getLogger(__name__) diff --git a/distributed/comm/tcp.py b/distributed/comm/tcp.py index b79a5105a0d..3ba49b7f869 100644 --- a/distributed/comm/tcp.py +++ b/distributed/comm/tcp.py @@ -2,11 +2,12 @@ import functools import logging import socket -from ssl import SSLError import struct import sys -from tornado import gen import weakref +from ssl import SSLError + +from tornado import gen try: import ssl @@ -19,16 +20,14 @@ from tornado.tcpclient import TCPClient from tornado.tcpserver import TCPServer +from ..protocol.utils import pack_frames_prelude, unpack_frames from ..system import MEMORY_LIMIT from ..threadpoolexecutor import ThreadPoolExecutor from ..utils import ensure_ip, get_ip, get_ipv6, nbytes, parse_timedelta, shutting_down - -from .registry import Backend, backends from .addressing import parse_host_port, unparse_host_port -from .core import Comm, Connector, Listener, CommClosedError, FatalCommClosedError -from .utils import to_frames, from_frames, get_tcp_server_address, ensure_concrete_host -from ..protocol.utils import pack_frames_prelude, unpack_frames - +from .core import Comm, CommClosedError, Connector, FatalCommClosedError, Listener +from .registry import Backend, backends +from .utils import ensure_concrete_host, from_frames, get_tcp_server_address, to_frames logger = logging.getLogger(__name__) diff --git a/distributed/comm/tests/test_comms.py b/distributed/comm/tests/test_comms.py index 56f3c7a1b11..0b323669223 100644 --- a/distributed/comm/tests/test_comms.py +++ b/distributed/comm/tests/test_comms.py @@ -7,10 +7,12 @@ from functools import partial import dask - -import distributed import pkg_resources import pytest +from tornado import ioloop +from tornado.concurrent import Future + +import distributed from distributed.comm import ( CommClosedError, connect, @@ -38,8 +40,6 @@ has_ipv6, requires_ipv6, ) -from tornado import ioloop -from tornado.concurrent import Future EXTERNAL_IP4 = get_ip() if has_ipv6(): diff --git a/distributed/comm/tests/test_ucx.py b/distributed/comm/tests/test_ucx.py index aa5095e2f3c..2a388904475 100644 --- a/distributed/comm/tests/test_ucx.py +++ b/distributed/comm/tests/test_ucx.py @@ -1,16 +1,15 @@ import asyncio + import pytest ucp = pytest.importorskip("ucp") -from distributed import Client, Worker, Scheduler, wait -from distributed.comm import ucx, listen, connect +from distributed import Client, Scheduler, Worker, wait +from distributed.comm import connect, listen, parse_address, ucx from distributed.comm.registry import backends, get_backend -from distributed.comm import ucx, parse_address -from distributed.protocol import to_serialize from distributed.deploy.local import LocalCluster -from distributed.utils_test import gen_test, loop, inc, cleanup, popen # noqa: 401 - +from distributed.protocol import to_serialize +from distributed.utils_test import cleanup, gen_test, inc, loop, popen # noqa: 401 try: HOST = ucp.get_address() diff --git a/distributed/comm/tests/test_ucx_config.py b/distributed/comm/tests/test_ucx_config.py index 43eb45acb23..c266bb75148 100644 --- a/distributed/comm/tests/test_ucx_config.py +++ b/distributed/comm/tests/test_ucx_config.py @@ -1,12 +1,13 @@ -import pytest from time import sleep import dask +import pytest from dask.utils import format_bytes + from distributed import Client -from distributed.utils_test import gen_test, loop, inc, cleanup, popen # noqa: 401 -from distributed.utils import get_ip from distributed.comm.ucx import _scrub_ucx_config +from distributed.utils import get_ip +from distributed.utils_test import cleanup, gen_test, inc, loop, popen # noqa: 401 try: HOST = get_ip() diff --git a/distributed/comm/ucx.py b/distributed/comm/ucx.py index 0d91b404ee2..7bd06776894 100644 --- a/distributed/comm/ucx.py +++ b/distributed/comm/ucx.py @@ -11,19 +11,19 @@ import dask -from .addressing import parse_host_port, unparse_host_port -from .core import Comm, Connector, Listener, CommClosedError -from .registry import Backend, backends -from .utils import ensure_concrete_host, to_frames, from_frames from ..utils import ( + CancelledError, ensure_ip, get_ip, get_ipv6, - nbytes, log_errors, - CancelledError, + nbytes, parse_bytes, ) +from .addressing import parse_host_port, unparse_host_port +from .core import Comm, CommClosedError, Connector, Listener +from .registry import Backend, backends +from .utils import ensure_concrete_host, from_frames, to_frames logger = logging.getLogger(__name__) diff --git a/distributed/comm/utils.py b/distributed/comm/utils.py index b3ac85feed8..15b9244329a 100644 --- a/distributed/comm/utils.py +++ b/distributed/comm/utils.py @@ -9,7 +9,6 @@ from .. import protocol from ..utils import get_ip, get_ipv6, nbytes, offload - logger = logging.getLogger(__name__) diff --git a/distributed/core.py b/distributed/core.py index bc2930a4f78..3e099ec1d3d 100644 --- a/distributed/core.py +++ b/distributed/core.py @@ -1,15 +1,15 @@ import asyncio -from collections import defaultdict -from contextlib import suppress -from enum import Enum -from functools import partial import inspect import logging import threading import traceback import uuid -import weakref import warnings +import weakref +from collections import defaultdict +from contextlib import suppress +from enum import Enum +from functools import partial import dask import tblib @@ -17,28 +17,27 @@ from tornado import gen from tornado.ioloop import IOLoop, PeriodicCallback +from . import profile, protocol from .comm import ( + CommClosedError, connect, + get_address_host_port, listen, - CommClosedError, normalize_address, unparse_host_port, - get_address_host_port, ) from .metrics import time -from . import profile from .system_monitor import SystemMonitor from .utils import ( - is_coroutine_function, - get_traceback, - truncate_exception, - shutting_down, - parse_timedelta, - has_keyword, CancelledError, TimeoutError, + get_traceback, + has_keyword, + is_coroutine_function, + parse_timedelta, + shutting_down, + truncate_exception, ) -from . import protocol class Status(Enum): diff --git a/distributed/counter.py b/distributed/counter.py index feffb69ce8c..d6c1dad9ecc 100644 --- a/distributed/counter.py +++ b/distributed/counter.py @@ -2,7 +2,6 @@ from tornado.ioloop import IOLoop, PeriodicCallback - try: from crick import TDigest except ImportError: diff --git a/distributed/dashboard/components/__init__.py b/distributed/dashboard/components/__init__.py index 78d60108c8e..8f8fd09ee5b 100644 --- a/distributed/dashboard/components/__init__.py +++ b/distributed/dashboard/components/__init__.py @@ -1,34 +1,34 @@ import asyncio +import weakref from bisect import bisect from operator import add from time import time -import weakref -from bokeh.layouts import row, column +import dask +from bokeh.layouts import column, row from bokeh.models import ( + BoxZoomTool, + Button, ColumnDataSource, - Plot, DataRange1d, - LinearAxis, HoverTool, - BoxZoomTool, - ResetTool, + LinearAxis, + OpenURL, PanTool, - WheelZoomTool, - Range1d, + Plot, Quad, - TapTool, - OpenURL, - Button, + Range1d, + ResetTool, Select, + TapTool, + WheelZoomTool, ) from bokeh.palettes import Spectral9 from bokeh.plotting import figure -import dask from tornado import gen -from distributed.dashboard.utils import without_property_validation, BOKEH_VERSION from distributed import profile +from distributed.dashboard.utils import BOKEH_VERSION, without_property_validation from distributed.utils import log_errors, parse_timedelta if dask.config.get("distributed.dashboard.export-tool"): diff --git a/distributed/dashboard/components/nvml.py b/distributed/dashboard/components/nvml.py index 34cce3c4bc7..9a77f22eee8 100644 --- a/distributed/dashboard/components/nvml.py +++ b/distributed/dashboard/components/nvml.py @@ -1,22 +1,21 @@ import math -from distributed.dashboard.components import DashboardComponent, add_periodic_callback - -from bokeh.plotting import figure from bokeh.models import ( - ColumnDataSource, BasicTicker, + ColumnDataSource, + HoverTool, NumeralTickFormatter, - TapTool, OpenURL, - HoverTool, + TapTool, ) -from tornado import escape +from bokeh.plotting import figure from dask.utils import format_bytes -from distributed.utils import log_errors -from distributed.dashboard.components.scheduler import BOKEH_THEME, TICKS_1024 -from distributed.dashboard.utils import without_property_validation, update +from tornado import escape +from distributed.dashboard.components import DashboardComponent, add_periodic_callback +from distributed.dashboard.components.scheduler import BOKEH_THEME, TICKS_1024 +from distributed.dashboard.utils import update, without_property_validation +from distributed.utils import log_errors try: import pynvml diff --git a/distributed/dashboard/components/scheduler.py b/distributed/dashboard/components/scheduler.py index 49032665150..f13d421b349 100644 --- a/distributed/dashboard/components/scheduler.py +++ b/distributed/dashboard/components/scheduler.py @@ -1,46 +1,46 @@ -from collections import defaultdict import logging import math -from numbers import Number import operator import os +from collections import defaultdict +from numbers import Number +import dask +from bokeh.io import curdoc from bokeh.layouts import column, row from bokeh.models import ( - ColumnDataSource, + AdaptiveTicker, + BasicTicker, + BoxSelectTool, + BoxZoomTool, + CDSView, ColorBar, + ColumnDataSource, DataRange1d, + GroupFilter, HoverTool, - ResetTool, - PanTool, - WheelZoomTool, - TapTool, + NumberFormatter, + NumeralTickFormatter, OpenURL, + Panel, + PanTool, Range1d, - value, - NumeralTickFormatter, - BoxZoomTool, - AdaptiveTicker, - BasicTicker, - NumberFormatter, - BoxSelectTool, - GroupFilter, - CDSView, + ResetTool, Tabs, - Panel, + TapTool, Title, + WheelZoomTool, + value, ) from bokeh.models.widgets import DataTable, TableColumn -from bokeh.plotting import figure from bokeh.palettes import Viridis11 +from bokeh.plotting import figure from bokeh.themes import Theme -from bokeh.transform import factor_cmap, linear_cmap, cumsum -from bokeh.io import curdoc -import dask +from bokeh.transform import cumsum, factor_cmap, linear_cmap from dask import config from dask.utils import format_bytes, key_split from tlz import pipe -from tlz.curried import map, concat, groupby +from tlz.curried import concat, groupby, map from tornado import escape try: @@ -51,24 +51,24 @@ from distributed.dashboard.components import add_periodic_callback from distributed.dashboard.components.shared import ( DashboardComponent, - ProfileTimePlot, ProfileServer, + ProfileTimePlot, SystemMonitor, ) from distributed.dashboard.utils import ( - transpose, BOKEH_VERSION, PROFILING, - without_property_validation, + transpose, update, + without_property_validation, ) -from distributed.metrics import time -from distributed.utils import log_errors, format_time, parse_timedelta -from distributed.diagnostics.progress_stream import color_of, progress_quads from distributed.diagnostics.graph_layout import GraphLayout +from distributed.diagnostics.progress_stream import color_of, progress_quads from distributed.diagnostics.task_stream import TaskStreamPlugin from distributed.diagnostics.task_stream import color_of as ts_color_of from distributed.diagnostics.task_stream import colors as ts_color_lookup +from distributed.metrics import time +from distributed.utils import format_time, log_errors, parse_timedelta if dask.config.get("distributed.dashboard.export-tool"): from distributed.dashboard.export_tool import ExportTool diff --git a/distributed/dashboard/components/shared.py b/distributed/dashboard/components/shared.py index 037b256b188..5a20c0724d0 100644 --- a/distributed/dashboard/components/shared.py +++ b/distributed/dashboard/components/shared.py @@ -1,31 +1,31 @@ import asyncio import weakref -from bokeh.layouts import row, column +import dask +import tlz as toolz +from bokeh.layouts import column, row from bokeh.models import ( + Button, ColumnDataSource, DataRange1d, HoverTool, + NumeralTickFormatter, Range1d, - Button, Select, - NumeralTickFormatter, ) from bokeh.palettes import Spectral9 from bokeh.plotting import figure -import dask from tornado import gen -import tlz as toolz +from distributed import profile +from distributed.compatibility import WINDOWS from distributed.dashboard.components import DashboardComponent from distributed.dashboard.utils import ( - without_property_validation, BOKEH_VERSION, update, + without_property_validation, ) -from distributed import profile from distributed.utils import log_errors, parse_timedelta -from distributed.compatibility import WINDOWS if dask.config.get("distributed.dashboard.export-tool"): from distributed.dashboard.export_tool import ExportTool diff --git a/distributed/dashboard/components/worker.py b/distributed/dashboard/components/worker.py index ee9ad65d2e1..93dcfd71d79 100644 --- a/distributed/dashboard/components/worker.py +++ b/distributed/dashboard/components/worker.py @@ -2,22 +2,21 @@ import math import os -from bokeh.layouts import row, column +from bokeh.layouts import column, row from bokeh.models import ( + BoxZoomTool, ColumnDataSource, DataRange1d, HoverTool, - BoxZoomTool, - ResetTool, - PanTool, - WheelZoomTool, NumeralTickFormatter, + PanTool, + ResetTool, Select, + WheelZoomTool, ) - from bokeh.models.widgets import DataTable, TableColumn -from bokeh.plotting import figure from bokeh.palettes import RdBu +from bokeh.plotting import figure from bokeh.themes import Theme from dask.utils import format_bytes from tlz import merge, partition_all @@ -25,15 +24,14 @@ from distributed.dashboard.components import add_periodic_callback from distributed.dashboard.components.shared import ( DashboardComponent, - ProfileTimePlot, ProfileServer, + ProfileTimePlot, SystemMonitor, ) -from distributed.dashboard.utils import transpose, without_property_validation, update +from distributed.dashboard.utils import transpose, update, without_property_validation from distributed.diagnostics.progress_stream import color_of from distributed.metrics import time -from distributed.utils import log_errors, key_split, format_time - +from distributed.utils import format_time, key_split, log_errors logger = logging.getLogger(__name__) diff --git a/distributed/dashboard/core.py b/distributed/dashboard/core.py index 916504f4d04..75f1ff9c1b7 100644 --- a/distributed/dashboard/core.py +++ b/distributed/dashboard/core.py @@ -1,6 +1,6 @@ -from distutils.version import LooseVersion import functools import warnings +from distutils.version import LooseVersion import bokeh from bokeh.server.server import BokehTornado @@ -9,11 +9,11 @@ from bokeh.server.util import create_hosts_allowlist except ImportError: from bokeh.server.util import create_hosts_whitelist as create_hosts_allowlist -from bokeh.application.handlers.function import FunctionHandler -from bokeh.application import Application + import dask import toolz - +from bokeh.application import Application +from bokeh.application.handlers.function import FunctionHandler if LooseVersion(bokeh.__version__) < LooseVersion("0.13.0"): warnings.warn( diff --git a/distributed/dashboard/export_tool.py b/distributed/dashboard/export_tool.py index d93d21b881b..a9be2d7a4bf 100644 --- a/distributed/dashboard/export_tool.py +++ b/distributed/dashboard/export_tool.py @@ -6,7 +6,6 @@ from bokeh.resources import CDN from bokeh.util.compiler import JavaScript - fn = __file__ fn = os.path.join(os.path.dirname(fn), "export_tool.js") with open(fn) as f: diff --git a/distributed/dashboard/scheduler.py b/distributed/dashboard/scheduler.py index 09a4339b50a..565e46a5e30 100644 --- a/distributed/dashboard/scheduler.py +++ b/distributed/dashboard/scheduler.py @@ -1,44 +1,43 @@ from urllib.parse import urljoin -from tornado.ioloop import IOLoop from tornado import web +from tornado.ioloop import IOLoop try: import numpy as np except ImportError: np = False -from .core import BokehApplication -from .components.worker import counters_doc +from .components.nvml import gpu_memory_doc, gpu_utilization_doc # noqa: 1708 from .components.scheduler import ( - systemmonitor_doc, - stealing_doc, - workers_doc, events_doc, - tasks_doc, - status_doc, - profile_doc, - profile_server_doc, graph_doc, - individual_task_stream_doc, - individual_progress_doc, - individual_graph_doc, - individual_profile_doc, - individual_profile_server_doc, - individual_nbytes_doc, - individual_cpu_doc, - individual_nprocessing_doc, - individual_workers_doc, + individual_aggregate_time_per_action_doc, individual_bandwidth_types_doc, individual_bandwidth_workers_doc, - individual_memory_by_key_doc, individual_compute_time_per_key_doc, - individual_aggregate_time_per_action_doc, + individual_cpu_doc, + individual_graph_doc, + individual_memory_by_key_doc, + individual_nbytes_doc, + individual_nprocessing_doc, + individual_profile_doc, + individual_profile_server_doc, + individual_progress_doc, individual_systemmonitor_doc, + individual_task_stream_doc, + individual_workers_doc, + profile_doc, + profile_server_doc, + status_doc, + stealing_doc, + systemmonitor_doc, + tasks_doc, + workers_doc, ) +from .components.worker import counters_doc +from .core import BokehApplication from .worker import counters_doc -from .components.nvml import gpu_memory_doc, gpu_utilization_doc # noqa: 1708 - template_variables = { "pages": ["status", "workers", "tasks", "system", "profile", "graph", "info"] diff --git a/distributed/dashboard/tests/test_components.py b/distributed/dashboard/tests/test_components.py index a3e444e17e6..bc9f6c74849 100644 --- a/distributed/dashboard/tests/test_components.py +++ b/distributed/dashboard/tests/test_components.py @@ -6,12 +6,12 @@ from bokeh.models import ColumnDataSource, Model -from distributed.utils_test import slowinc, gen_cluster from distributed.dashboard.components.shared import ( Processing, ProfilePlot, ProfileTimePlot, ) +from distributed.utils_test import gen_cluster, slowinc @pytest.mark.parametrize("Component", [Processing]) diff --git a/distributed/dashboard/tests/test_scheduler_bokeh.py b/distributed/dashboard/tests/test_scheduler_bokeh.py index 9c50ae50434..9e21c003e00 100644 --- a/distributed/dashboard/tests/test_scheduler_bokeh.py +++ b/distributed/dashboard/tests/test_scheduler_bokeh.py @@ -8,39 +8,39 @@ import pytest pytest.importorskip("bokeh") +import dask from bokeh.server.server import BokehTornado +from dask.core import flatten +from dask.utils import stringify from tlz import first from tornado.httpclient import AsyncHTTPClient, HTTPRequest -import dask -from dask.core import flatten -from dask.utils import stringify from distributed.client import wait from distributed.compatibility import MACOS -from distributed.metrics import time -from distributed.utils import format_dashboard_link -from distributed.utils_test import gen_cluster, inc, dec, slowinc, div, get_cert -from distributed.dashboard.components.worker import Counters -from distributed.dashboard.scheduler import applications +from distributed.dashboard import scheduler from distributed.dashboard.components.scheduler import ( - SystemMonitor, + AggregateAction, + ComputePerKey, + CurrentLoad, + Events, + MemoryByKey, + NBytesHistogram, Occupancy, - StealingTimeSeries, + ProcessingHistogram, + ProfileServer, StealingEvents, - Events, - TaskStream, + StealingTimeSeries, + SystemMonitor, + TaskGraph, TaskProgress, - CurrentLoad, - ProcessingHistogram, - NBytesHistogram, + TaskStream, WorkerTable, - TaskGraph, - ProfileServer, - MemoryByKey, - AggregateAction, - ComputePerKey, ) -from distributed.dashboard import scheduler +from distributed.dashboard.components.worker import Counters +from distributed.dashboard.scheduler import applications +from distributed.metrics import time +from distributed.utils import format_dashboard_link +from distributed.utils_test import dec, div, gen_cluster, get_cert, inc, slowinc scheduler.PROFILING = False diff --git a/distributed/dashboard/tests/test_worker_bokeh.py b/distributed/dashboard/tests/test_worker_bokeh.py index 6143e837529..b6f5476f618 100644 --- a/distributed/dashboard/tests/test_worker_bokeh.py +++ b/distributed/dashboard/tests/test_worker_bokeh.py @@ -10,17 +10,17 @@ from tornado.httpclient import AsyncHTTPClient from distributed.client import wait -from distributed.metrics import time -from distributed.utils_test import gen_cluster, inc, dec from distributed.dashboard.components.worker import ( - StateTable, - CrossFilter, CommunicatingStream, - ExecutingTimeSeries, CommunicatingTimeSeries, - SystemMonitor, Counters, + CrossFilter, + ExecutingTimeSeries, + StateTable, + SystemMonitor, ) +from distributed.metrics import time +from distributed.utils_test import dec, gen_cluster, inc @gen_cluster( diff --git a/distributed/dashboard/worker.py b/distributed/dashboard/worker.py index ff9ae3b2f7d..1e65cfcc765 100644 --- a/distributed/dashboard/worker.py +++ b/distributed/dashboard/worker.py @@ -1,14 +1,14 @@ +from tornado.ioloop import IOLoop + from .components.worker import ( - status_doc, - crossfilter_doc, - systemmonitor_doc, counters_doc, + crossfilter_doc, profile_doc, profile_server_doc, + status_doc, + systemmonitor_doc, ) from .core import BokehApplication -from tornado.ioloop import IOLoop - template_variables = { "pages": ["status", "system", "profile", "crossfilter", "profile-server"] diff --git a/distributed/deploy/__init__.py b/distributed/deploy/__init__.py index 0148328cd4c..1518942dc4c 100644 --- a/distributed/deploy/__init__.py +++ b/distributed/deploy/__init__.py @@ -1,10 +1,10 @@ from contextlib import suppress +from .adaptive import Adaptive from .cluster import Cluster from .local import LocalCluster +from .spec import ProcessInterface, SpecCluster from .ssh import SSHCluster -from .spec import SpecCluster, ProcessInterface -from .adaptive import Adaptive with suppress(ImportError): from .ssh import SSHCluster diff --git a/distributed/deploy/adaptive.py b/distributed/deploy/adaptive.py index d3e9ddb1fbf..ff73015a40a 100644 --- a/distributed/deploy/adaptive.py +++ b/distributed/deploy/adaptive.py @@ -1,10 +1,11 @@ -from inspect import isawaitable import logging +from inspect import isawaitable + import dask.config -from .adaptive_core import AdaptiveCore -from ..utils import log_errors, parse_timedelta from ..protocol import pickle +from ..utils import log_errors, parse_timedelta +from .adaptive_core import AdaptiveCore logger = logging.getLogger(__name__) diff --git a/distributed/deploy/adaptive_core.py b/distributed/deploy/adaptive_core.py index b74e013c947..7c95096956a 100644 --- a/distributed/deploy/adaptive_core.py +++ b/distributed/deploy/adaptive_core.py @@ -2,13 +2,12 @@ import logging import math -from tornado.ioloop import IOLoop, PeriodicCallback import tlz as toolz +from tornado.ioloop import IOLoop, PeriodicCallback from ..metrics import time from ..utils import parse_timedelta - logger = logging.getLogger(__name__) diff --git a/distributed/deploy/cluster.py b/distributed/deploy/cluster.py index 93a40cfa0f3..c22d21b4376 100644 --- a/distributed/deploy/cluster.py +++ b/distributed/deploy/cluster.py @@ -1,28 +1,26 @@ import asyncio import datetime -from contextlib import suppress import logging import threading -import warnings import uuid -from tornado.ioloop import PeriodicCallback +import warnings +from contextlib import suppress import dask.config from dask.utils import format_bytes - -from .adaptive import Adaptive +from tornado.ioloop import PeriodicCallback from ..core import Status from ..utils import ( - log_errors, - sync, Log, Logs, - thread_state, format_dashboard_link, + log_errors, parse_timedelta, + sync, + thread_state, ) - +from .adaptive import Adaptive logger = logging.getLogger(__name__) @@ -310,7 +308,7 @@ def _widget(self): pass try: - from ipywidgets import Layout, VBox, HBox, IntText, Button, HTML, Accordion + from ipywidgets import HTML, Accordion, Button, HBox, IntText, Layout, VBox except ImportError: self._cached_widget = None return None diff --git a/distributed/deploy/local.py b/distributed/deploy/local.py index a30a6e410f5..0282e2b507f 100644 --- a/distributed/deploy/local.py +++ b/distributed/deploy/local.py @@ -4,15 +4,15 @@ import warnings import weakref -from dask.system import CPU_COUNT import toolz +from dask.system import CPU_COUNT -from .spec import SpecCluster -from .utils import nprocesses_nthreads from ..nanny import Nanny from ..scheduler import Scheduler from ..security import Security from ..worker import Worker, parse_memory_limit +from .spec import SpecCluster +from .utils import nprocesses_nthreads logger = logging.getLogger(__name__) diff --git a/distributed/deploy/old_ssh.py b/distributed/deploy/old_ssh.py index 6d62e0cfd8b..77b01e2388f 100644 --- a/distributed/deploy/old_ssh.py +++ b/distributed/deploy/old_ssh.py @@ -1,6 +1,6 @@ import logging -import socket import os +import socket import sys import time import traceback @@ -13,10 +13,8 @@ from threading import Thread from tlz import merge - from tornado import gen - logger = logging.getLogger(__name__) @@ -36,7 +34,7 @@ class bcolors: def async_ssh(cmd_dict): import paramiko from paramiko.buffered_pipe import PipeTimeout - from paramiko.ssh_exception import SSHException, PasswordRequiredException + from paramiko.ssh_exception import PasswordRequiredException, SSHException ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) diff --git a/distributed/deploy/spec.py b/distributed/deploy/spec.py index ef1d8a4c6fa..3d816582032 100644 --- a/distributed/deploy/spec.py +++ b/distributed/deploy/spec.py @@ -1,29 +1,28 @@ import asyncio import atexit -from contextlib import suppress import copy import logging import math -import weakref import warnings +import weakref +from contextlib import suppress import dask from tornado import gen -from .adaptive import Adaptive -from .cluster import Cluster -from ..core import rpc, CommClosedError, Status +from ..core import CommClosedError, Status, rpc +from ..scheduler import Scheduler +from ..security import Security from ..utils import ( LoopRunner, - silence_logging, + TimeoutError, + import_term, parse_bytes, parse_timedelta, - import_term, - TimeoutError, + silence_logging, ) -from ..scheduler import Scheduler -from ..security import Security - +from .adaptive import Adaptive +from .cluster import Cluster logger = logging.getLogger(__name__) diff --git a/distributed/deploy/ssh.py b/distributed/deploy/ssh.py index 66003a49572..8945396475f 100644 --- a/distributed/deploy/ssh.py +++ b/distributed/deploy/ssh.py @@ -1,17 +1,16 @@ import logging import sys -from typing import List, Union import warnings import weakref +from typing import List, Union import dask -from .spec import SpecCluster, ProcessInterface from ..core import Status -from ..utils import cli_keywords from ..scheduler import Scheduler as _Scheduler +from ..utils import cli_keywords, serialize_for_cli from ..worker import Worker as _Worker -from ..utils import serialize_for_cli +from .spec import ProcessInterface, SpecCluster logger = logging.getLogger(__name__) diff --git a/distributed/deploy/tests/test_adaptive.py b/distributed/deploy/tests/test_adaptive.py index e747cf95a20..4e44baeaf08 100644 --- a/distributed/deploy/tests/test_adaptive.py +++ b/distributed/deploy/tests/test_adaptive.py @@ -6,10 +6,16 @@ import dask import pytest -from distributed import Client, wait, Adaptive, LocalCluster, SpecCluster, Worker -from distributed.utils_test import gen_test, slowinc, clean -from distributed.utils_test import loop, nodebug, cleanup # noqa: F401 +from distributed import Adaptive, Client, LocalCluster, SpecCluster, Worker, wait from distributed.metrics import time +from distributed.utils_test import ( # noqa: F401 + clean, + cleanup, + gen_test, + loop, + nodebug, + slowinc, +) @pytest.mark.asyncio diff --git a/distributed/deploy/tests/test_adaptive_core.py b/distributed/deploy/tests/test_adaptive_core.py index a073314223d..b4fc5768f82 100644 --- a/distributed/deploy/tests/test_adaptive_core.py +++ b/distributed/deploy/tests/test_adaptive_core.py @@ -1,4 +1,5 @@ import asyncio + import pytest from distributed.deploy.adaptive_core import AdaptiveCore diff --git a/distributed/deploy/tests/test_local.py b/distributed/deploy/tests/test_local.py index 8eaea2ba83f..82d7922f95f 100644 --- a/distributed/deploy/tests/test_local.py +++ b/distributed/deploy/tests/test_local.py @@ -1,42 +1,41 @@ import asyncio -from functools import partial import gc import subprocess import sys -from time import sleep -from threading import Lock import unittest import weakref from distutils.version import LooseVersion +from functools import partial +from threading import Lock +from time import sleep -from tornado.ioloop import IOLoop +import pytest import tornado +from dask.system import CPU_COUNT from tornado.httpclient import AsyncHTTPClient -import pytest +from tornado.ioloop import IOLoop -from dask.system import CPU_COUNT -from distributed import Client, Worker, Nanny, get_client +from distributed import Client, Nanny, Worker, get_client from distributed.core import Status from distributed.deploy.local import LocalCluster +from distributed.deploy.utils_test import ClusterTest from distributed.metrics import time from distributed.system import MEMORY_LIMIT +from distributed.utils import TimeoutError, sync from distributed.utils_test import ( # noqa: F401 + assert_can_connect_from_everywhere_4, + assert_can_connect_from_everywhere_4_6, + assert_can_connect_locally_4, + assert_cannot_connect, + captured_logger, clean, cleanup, - inc, gen_test, + inc, + loop, slowinc, - assert_cannot_connect, - assert_can_connect_locally_4, - assert_can_connect_from_everywhere_4, - assert_can_connect_from_everywhere_4_6, - captured_logger, tls_only_security, ) -from distributed.utils_test import loop # noqa: F401 -from distributed.utils import sync, TimeoutError - -from distributed.deploy.utils_test import ClusterTest def test_simple(loop): diff --git a/distributed/deploy/tests/test_slow_adaptive.py b/distributed/deploy/tests/test_slow_adaptive.py index e7021fc854a..bf89098ea0d 100644 --- a/distributed/deploy/tests/test_slow_adaptive.py +++ b/distributed/deploy/tests/test_slow_adaptive.py @@ -1,9 +1,10 @@ import asyncio + import pytest +from dask.distributed import Client, Scheduler, SpecCluster, Worker -from dask.distributed import Worker, Scheduler, SpecCluster, Client -from distributed.utils_test import slowinc, cleanup # noqa: F401 from distributed.metrics import time +from distributed.utils_test import cleanup, slowinc # noqa: F401 class SlowWorker: diff --git a/distributed/deploy/tests/test_spec_cluster.py b/distributed/deploy/tests/test_spec_cluster.py index afdc10cbe04..30ffc50114b 100644 --- a/distributed/deploy/tests/test_spec_cluster.py +++ b/distributed/deploy/tests/test_spec_cluster.py @@ -1,18 +1,19 @@ import asyncio import re -from time import sleep import warnings +from time import sleep import dask -from dask.distributed import SpecCluster, Worker, Client, Scheduler, Nanny -from distributed.core import Status +import pytest +import tlz as toolz +from dask.distributed import Client, Nanny, Scheduler, SpecCluster, Worker + from distributed.compatibility import WINDOWS -from distributed.deploy.spec import close_clusters, ProcessInterface, run_spec +from distributed.core import Status +from distributed.deploy.spec import ProcessInterface, close_clusters, run_spec from distributed.metrics import time -from distributed.utils_test import loop, cleanup # noqa: F401 from distributed.utils import is_valid_xml -import tlz as toolz -import pytest +from distributed.utils_test import cleanup, loop # noqa: F401 class MyWorker(Worker): diff --git a/distributed/deploy/tests/test_ssh.py b/distributed/deploy/tests/test_ssh.py index 0bea6f7dc75..6c330383d7d 100644 --- a/distributed/deploy/tests/test_ssh.py +++ b/distributed/deploy/tests/test_ssh.py @@ -3,7 +3,9 @@ pytest.importorskip("asyncssh") import sys + import dask + from distributed import Client from distributed.compatibility import MACOS, WINDOWS from distributed.deploy.ssh import SSHCluster diff --git a/distributed/deploy/utils_test.py b/distributed/deploy/utils_test.py index fd6ba03aae9..543020ec21b 100644 --- a/distributed/deploy/utils_test.py +++ b/distributed/deploy/utils_test.py @@ -1,7 +1,7 @@ -from ..client import Client - import pytest +from ..client import Client + class ClusterTest: Cluster = None diff --git a/distributed/diagnostics/eventstream.py b/distributed/diagnostics/eventstream.py index f1f70f458af..1f98e47c64b 100644 --- a/distributed/diagnostics/eventstream.py +++ b/distributed/diagnostics/eventstream.py @@ -1,10 +1,8 @@ import logging -from .plugin import SchedulerPlugin - -from ..core import connect, coerce_to_address +from ..core import coerce_to_address, connect from ..worker import dumps_function - +from .plugin import SchedulerPlugin logger = logging.getLogger(__name__) diff --git a/distributed/diagnostics/nvml.py b/distributed/diagnostics/nvml.py index c1bbb4161a8..cd39f9e04c5 100644 --- a/distributed/diagnostics/nvml.py +++ b/distributed/diagnostics/nvml.py @@ -1,4 +1,5 @@ import os + import pynvml nvmlInit = None diff --git a/distributed/diagnostics/progress.py b/distributed/diagnostics/progress.py index eeb3c8a2817..de88cd60c05 100644 --- a/distributed/diagnostics/progress.py +++ b/distributed/diagnostics/progress.py @@ -1,14 +1,13 @@ import asyncio -from collections import defaultdict import logging +from collections import defaultdict from timeit import default_timer +from dask.utils import stringify from tlz import groupby, valmap -from dask.utils import stringify -from .plugin import SchedulerPlugin from ..utils import key_split, key_split_group, log_errors - +from .plugin import SchedulerPlugin logger = logging.getLogger(__name__) diff --git a/distributed/diagnostics/progress_stream.py b/distributed/diagnostics/progress_stream.py index b17c38fc1b6..2ee2b1c5000 100644 --- a/distributed/diagnostics/progress_stream.py +++ b/distributed/diagnostics/progress_stream.py @@ -1,14 +1,12 @@ import logging -from tlz import valmap, merge +from tlz import merge, valmap -from .progress import AllProgress - -from ..core import connect, coerce_to_address +from ..core import coerce_to_address, connect from ..scheduler import Scheduler -from ..utils import key_split, color_of +from ..utils import color_of, key_split from ..worker import dumps_function - +from .progress import AllProgress logger = logging.getLogger(__name__) diff --git a/distributed/diagnostics/progressbar.py b/distributed/diagnostics/progressbar.py index 45884bd5b1c..851910e45d4 100644 --- a/distributed/diagnostics/progressbar.py +++ b/distributed/diagnostics/progressbar.py @@ -1,20 +1,18 @@ -from contextlib import suppress -import logging import html -from timeit import default_timer +import logging import sys import weakref +from contextlib import suppress +from timeit import default_timer from tlz import valmap from tornado.ioloop import IOLoop -from .progress import format_time, Progress, MultiProgress - -from ..core import connect, coerce_to_address, CommClosedError from ..client import default_client, futures_of +from ..core import CommClosedError, coerce_to_address, connect from ..protocol.pickle import dumps -from ..utils import key_split, is_kernel, LoopRunner, parse_timedelta - +from ..utils import LoopRunner, is_kernel, key_split, parse_timedelta +from .progress import MultiProgress, Progress, format_time logger = logging.getLogger(__name__) @@ -160,7 +158,7 @@ def __init__( ): super().__init__(keys, scheduler, interval, complete) - from ipywidgets import FloatProgress, HBox, VBox, HTML + from ipywidgets import HTML, FloatProgress, HBox, VBox self.elapsed_time = HTML("") self.bar = FloatProgress(min=0, max=1, description="") @@ -319,7 +317,7 @@ def __init__( self.widget = VBox([]) def make_widget(self, all): - from ipywidgets import FloatProgress, HBox, VBox, HTML + from ipywidgets import HTML, FloatProgress, HBox, VBox self.elapsed_time = HTML("") self.bars = {key: FloatProgress(min=0, max=1, description="") for key in all} diff --git a/distributed/diagnostics/task_stream.py b/distributed/diagnostics/task_stream.py index c319ca73d69..e276c477776 100644 --- a/distributed/diagnostics/task_stream.py +++ b/distributed/diagnostics/task_stream.py @@ -1,12 +1,12 @@ -from collections import deque import logging +from collections import deque import dask -from .progress_stream import color_of -from .plugin import SchedulerPlugin -from ..utils import key_split, format_time, parse_timedelta -from ..metrics import time +from ..metrics import time +from ..utils import format_time, key_split, parse_timedelta +from .plugin import SchedulerPlugin +from .progress_stream import color_of logger = logging.getLogger(__name__) diff --git a/distributed/diagnostics/tests/test_graph_layout.py b/distributed/diagnostics/tests/test_graph_layout.py index b63311f8432..b714b261a4c 100644 --- a/distributed/diagnostics/tests/test_graph_layout.py +++ b/distributed/diagnostics/tests/test_graph_layout.py @@ -1,9 +1,9 @@ import asyncio import operator -from distributed.utils_test import gen_cluster, inc -from distributed.diagnostics import GraphLayout from distributed import wait +from distributed.diagnostics import GraphLayout +from distributed.utils_test import gen_cluster, inc @gen_cluster(client=True) diff --git a/distributed/diagnostics/tests/test_nvml.py b/distributed/diagnostics/tests/test_nvml.py index 6182049fe40..6938a86850f 100644 --- a/distributed/diagnostics/tests/test_nvml.py +++ b/distributed/diagnostics/tests/test_nvml.py @@ -1,6 +1,7 @@ -import pytest import os +import pytest + pynvml = pytest.importorskip("pynvml") from distributed.diagnostics import nvml diff --git a/distributed/diagnostics/tests/test_progress.py b/distributed/diagnostics/tests/test_progress.py index 871dcb0c5a5..fa74bfa3b46 100644 --- a/distributed/diagnostics/tests/test_progress.py +++ b/distributed/diagnostics/tests/test_progress.py @@ -4,15 +4,15 @@ from distributed import Nanny from distributed.client import wait -from distributed.metrics import time -from distributed.utils_test import gen_cluster, inc, dec, div, nodebug from distributed.diagnostics.progress import ( - Progress, - SchedulerPlugin, AllProgress, GroupProgress, MultiProgress, + Progress, + SchedulerPlugin, ) +from distributed.metrics import time +from distributed.utils_test import dec, div, gen_cluster, inc, nodebug def f(*args): diff --git a/distributed/diagnostics/tests/test_progress_stream.py b/distributed/diagnostics/tests/test_progress_stream.py index 8f506b7a7bb..d65d953e6b5 100644 --- a/distributed/diagnostics/tests/test_progress_stream.py +++ b/distributed/diagnostics/tests/test_progress_stream.py @@ -3,6 +3,7 @@ pytest.importorskip("bokeh") from dask import delayed + from distributed.client import wait from distributed.diagnostics.progress_stream import progress_quads, progress_stream from distributed.utils_test import div, gen_cluster, inc diff --git a/distributed/diagnostics/tests/test_progressbar.py b/distributed/diagnostics/tests/test_progressbar.py index f19dbd2df26..36db0d7b45b 100644 --- a/distributed/diagnostics/tests/test_progressbar.py +++ b/distributed/diagnostics/tests/test_progressbar.py @@ -5,8 +5,14 @@ from distributed import Scheduler, Worker from distributed.diagnostics.progressbar import TextProgressBar, progress from distributed.metrics import time -from distributed.utils_test import inc, div, gen_cluster -from distributed.utils_test import client, loop, cluster_fixture # noqa: F401 +from distributed.utils_test import ( # noqa: F401 + client, + cluster_fixture, + div, + gen_cluster, + inc, + loop, +) def test_text_progressbar(capsys, client): diff --git a/distributed/diagnostics/tests/test_scheduler_plugin.py b/distributed/diagnostics/tests/test_scheduler_plugin.py index 3f1e54f6ed7..465b674971f 100644 --- a/distributed/diagnostics/tests/test_scheduler_plugin.py +++ b/distributed/diagnostics/tests/test_scheduler_plugin.py @@ -1,6 +1,7 @@ import pytest -from distributed import Scheduler, Worker, SchedulerPlugin -from distributed.utils_test import inc, gen_cluster, cleanup # noqa: F401 + +from distributed import Scheduler, SchedulerPlugin, Worker +from distributed.utils_test import cleanup, gen_cluster, inc # noqa: F401 @gen_cluster(client=True) diff --git a/distributed/diagnostics/tests/test_task_stream.py b/distributed/diagnostics/tests/test_task_stream.py index 4b57d18ee7a..642277e81dc 100644 --- a/distributed/diagnostics/tests/test_task_stream.py +++ b/distributed/diagnostics/tests/test_task_stream.py @@ -5,11 +5,18 @@ from tlz import frequencies from distributed import get_task_stream -from distributed.utils_test import gen_cluster, div, inc, slowinc -from distributed.utils_test import client, loop, cluster_fixture # noqa: F401 from distributed.client import wait from distributed.diagnostics.task_stream import TaskStreamPlugin from distributed.metrics import time +from distributed.utils_test import ( # noqa: F401 + client, + cluster_fixture, + div, + gen_cluster, + inc, + loop, + slowinc, +) @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 3) diff --git a/distributed/diagnostics/tests/test_widgets.py b/distributed/diagnostics/tests/test_widgets.py index 2505f986406..bc86e436b24 100644 --- a/distributed/diagnostics/tests/test_widgets.py +++ b/distributed/diagnostics/tests/test_widgets.py @@ -2,10 +2,11 @@ pytest.importorskip("ipywidgets") -from distributed.compatibility import WINDOWS from ipykernel.comm import Comm from ipywidgets import Widget +from distributed.compatibility import WINDOWS + ################# # Utility stuff # ################# @@ -72,20 +73,28 @@ def record_display(*args): # Distributed stuff # ##################### -from operator import add import re +from operator import add from tlz import valmap from distributed.client import wait -from distributed.worker import dumps_task -from distributed.utils_test import inc, dec, throws, gen_cluster, gen_tls_cluster -from distributed.utils_test import client, loop, cluster_fixture # noqa: F401 from distributed.diagnostics.progressbar import ( - ProgressWidget, MultiProgressWidget, + ProgressWidget, progress, ) +from distributed.utils_test import ( # noqa: F401 + client, + cluster_fixture, + dec, + gen_cluster, + gen_tls_cluster, + inc, + loop, + throws, +) +from distributed.worker import dumps_task @gen_cluster(client=True) diff --git a/distributed/diagnostics/websocket.py b/distributed/diagnostics/websocket.py index e34961bfeff..51282c1e621 100644 --- a/distributed/diagnostics/websocket.py +++ b/distributed/diagnostics/websocket.py @@ -1,5 +1,5 @@ -from .plugin import SchedulerPlugin from ..utils import key_split +from .plugin import SchedulerPlugin from .task_stream import colors diff --git a/distributed/diskutils.py b/distributed/diskutils.py index e9dbb7b25bf..49d0a26222c 100644 --- a/distributed/diskutils.py +++ b/distributed/diskutils.py @@ -11,7 +11,6 @@ from . import locket - logger = logging.getLogger(__name__) DIR_LOCK_EXT = ".dirlock" diff --git a/distributed/event.py b/distributed/event.py index 0136d35ef26..a3e2a1b7eeb 100644 --- a/distributed/event.py +++ b/distributed/event.py @@ -1,13 +1,12 @@ import asyncio -from collections import defaultdict -from contextlib import suppress import logging import uuid +from collections import defaultdict +from contextlib import suppress from .client import Client -from .utils import log_errors, TimeoutError +from .utils import TimeoutError, log_errors, parse_timedelta from .worker import get_worker -from .utils import parse_timedelta logger = logging.getLogger(__name__) diff --git a/distributed/http/routing.py b/distributed/http/routing.py index 8a1d90d5490..7de870faa72 100644 --- a/distributed/http/routing.py +++ b/distributed/http/routing.py @@ -1,7 +1,8 @@ import os -from tornado import web + import tornado.httputil import tornado.routing +from tornado import web def _descend_routes(router, routers=set(), out=set()): diff --git a/distributed/http/scheduler/info.py b/distributed/http/scheduler/info.py index 96199faba38..01446531285 100644 --- a/distributed/http/scheduler/info.py +++ b/distributed/http/scheduler/info.py @@ -1,19 +1,18 @@ -from datetime import datetime import json import logging import os import os.path +from datetime import datetime from dask.utils import format_bytes - +from tlz import first, merge from tornado import escape from tornado.websocket import WebSocketHandler -from tlz import first, merge -from ..utils import RequestHandler, redirect from ...diagnostics.websocket import WebsocketPlugin from ...metrics import time -from ...utils import log_errors, format_time +from ...utils import format_time, log_errors +from ..utils import RequestHandler, redirect ns = { func.__name__: func diff --git a/distributed/http/scheduler/json.py b/distributed/http/scheduler/json.py index 5dc09b4b6fe..61801a00d34 100644 --- a/distributed/http/scheduler/json.py +++ b/distributed/http/scheduler/json.py @@ -1,5 +1,5 @@ -from ..utils import RequestHandler from ...utils import log_errors +from ..utils import RequestHandler class CountsJSON(RequestHandler): diff --git a/distributed/http/scheduler/missing_bokeh.py b/distributed/http/scheduler/missing_bokeh.py index 3eb68960d53..917e79f610b 100644 --- a/distributed/http/scheduler/missing_bokeh.py +++ b/distributed/http/scheduler/missing_bokeh.py @@ -1,5 +1,5 @@ -from ..utils import RequestHandler, redirect from ...utils import log_errors +from ..utils import RequestHandler, redirect class MissingBokeh(RequestHandler): diff --git a/distributed/http/scheduler/prometheus/__init__.py b/distributed/http/scheduler/prometheus/__init__.py index 2016ca32512..120a01dab58 100644 --- a/distributed/http/scheduler/prometheus/__init__.py +++ b/distributed/http/scheduler/prometheus/__init__.py @@ -2,6 +2,7 @@ from distributed.http.utils import RequestHandler from distributed.scheduler import ALL_TASK_STATES + from .semaphore import SemaphoreMetricExtension @@ -10,7 +11,7 @@ def __init__(self, dask_server): self.server = dask_server def collect(self): - from prometheus_client.core import GaugeMetricFamily, CounterMetricFamily + from prometheus_client.core import CounterMetricFamily, GaugeMetricFamily yield GaugeMetricFamily( "dask_scheduler_clients", diff --git a/distributed/http/scheduler/prometheus/semaphore.py b/distributed/http/scheduler/prometheus/semaphore.py index f1df7434019..aac467b66cc 100644 --- a/distributed/http/scheduler/prometheus/semaphore.py +++ b/distributed/http/scheduler/prometheus/semaphore.py @@ -3,7 +3,7 @@ def __init__(self, dask_server): self.server = dask_server def collect(self): - from prometheus_client.core import GaugeMetricFamily, CounterMetricFamily + from prometheus_client.core import CounterMetricFamily, GaugeMetricFamily sem_ext = self.server.extensions["semaphores"] diff --git a/distributed/http/scheduler/tests/test_scheduler_http.py b/distributed/http/scheduler/tests/test_scheduler_http.py index 2aa4f58c4ea..65225520ed9 100644 --- a/distributed/http/scheduler/tests/test_scheduler_http.py +++ b/distributed/http/scheduler/tests/test_scheduler_http.py @@ -5,12 +5,12 @@ pytest.importorskip("bokeh") +from dask.sizeof import sizeof from tornado.escape import url_escape from tornado.httpclient import AsyncHTTPClient, HTTPClientError -from dask.sizeof import sizeof from distributed.utils import is_valid_xml -from distributed.utils_test import gen_cluster, slowinc, inc +from distributed.utils_test import gen_cluster, inc, slowinc @gen_cluster(client=True) diff --git a/distributed/http/scheduler/tests/test_semaphore_http.py b/distributed/http/scheduler/tests/test_semaphore_http.py index 4c66165b985..21996cb35f4 100644 --- a/distributed/http/scheduler/tests/test_semaphore_http.py +++ b/distributed/http/scheduler/tests/test_semaphore_http.py @@ -1,9 +1,8 @@ import pytest - from tornado.httpclient import AsyncHTTPClient -from distributed.utils_test import gen_cluster from distributed import Semaphore +from distributed.utils_test import gen_cluster @gen_cluster(client=True, clean_kwargs={"threads": False}) diff --git a/distributed/http/statics.py b/distributed/http/statics.py index e1c7a98e9a2..f838ac8522e 100644 --- a/distributed/http/statics.py +++ b/distributed/http/statics.py @@ -1,6 +1,7 @@ -from tornado import web import os +from tornado import web + routes = [ ( r"/statics/(.*)", diff --git a/distributed/http/tests/test_core.py b/distributed/http/tests/test_core.py index c1bffedb72e..61cb713fcf2 100644 --- a/distributed/http/tests/test_core.py +++ b/distributed/http/tests/test_core.py @@ -1,6 +1,7 @@ -from distributed.utils_test import gen_cluster from tornado.httpclient import AsyncHTTPClient +from distributed.utils_test import gen_cluster + @gen_cluster(client=True) async def test_scheduler(c, s, a, b): diff --git a/distributed/http/tests/test_routing.py b/distributed/http/tests/test_routing.py index ca7d071d256..764475ce4ed 100644 --- a/distributed/http/tests/test_routing.py +++ b/distributed/http/tests/test_routing.py @@ -1,6 +1,6 @@ +import pytest from tornado import web from tornado.httpclient import AsyncHTTPClient, HTTPClientError -import pytest from distributed.http.routing import RoutingApplication diff --git a/distributed/http/utils.py b/distributed/http/utils.py index 5977ccd5bad..c0de926c99b 100644 --- a/distributed/http/utils.py +++ b/distributed/http/utils.py @@ -2,12 +2,11 @@ import os from typing import List -from tornado import web import toolz +from tornado import web from ..utils import has_keyword - dirname = os.path.dirname(__file__) diff --git a/distributed/http/worker/prometheus.py b/distributed/http/worker/prometheus.py index b354cad3ea9..4d0c0a55e60 100644 --- a/distributed/http/worker/prometheus.py +++ b/distributed/http/worker/prometheus.py @@ -1,7 +1,7 @@ -from ..utils import RequestHandler - import logging +from ..utils import RequestHandler + class _PrometheusCollector: def __init__(self, server): diff --git a/distributed/http/worker/tests/test_worker_http.py b/distributed/http/worker/tests/test_worker_http.py index 5bc0a5debc9..e464c484ebe 100644 --- a/distributed/http/worker/tests/test_worker_http.py +++ b/distributed/http/worker/tests/test_worker_http.py @@ -1,6 +1,8 @@ -import pytest import json + +import pytest from tornado.httpclient import AsyncHTTPClient + from distributed.utils_test import gen_cluster diff --git a/distributed/lock.py b/distributed/lock.py index 7d1c1a4af57..7ffe8c8daf9 100644 --- a/distributed/lock.py +++ b/distributed/lock.py @@ -1,12 +1,11 @@ import asyncio -from collections import defaultdict, deque import logging import uuid +from collections import defaultdict, deque from .client import Client -from .utils import log_errors, TimeoutError +from .utils import TimeoutError, log_errors, parse_timedelta from .worker import get_worker -from .utils import parse_timedelta logger = logging.getLogger(__name__) diff --git a/distributed/locket.py b/distributed/locket.py index 65a10f195f7..906938e6085 100644 --- a/distributed/locket.py +++ b/distributed/locket.py @@ -3,9 +3,9 @@ # flake8: noqa -import time import errno import threading +import time import weakref __all__ = ["lock_file"] diff --git a/distributed/metrics.py b/distributed/metrics.py index 163a982b792..c52b40c5afd 100755 --- a/distributed/metrics.py +++ b/distributed/metrics.py @@ -1,8 +1,7 @@ import collections -from functools import wraps import sys import time as timemod - +from functools import wraps _empty_namedtuple = collections.namedtuple("_empty_namedtuple", ()) diff --git a/distributed/multi_lock.py b/distributed/multi_lock.py index 6d1df68c2f4..aaa21999a19 100644 --- a/distributed/multi_lock.py +++ b/distributed/multi_lock.py @@ -1,13 +1,12 @@ import asyncio -from collections import defaultdict import logging -from typing import Hashable, List import uuid +from collections import defaultdict +from typing import Hashable, List from .client import Client -from .utils import log_errors, TimeoutError +from .utils import TimeoutError, log_errors, parse_timedelta from .worker import get_worker -from .utils import parse_timedelta logger = logging.getLogger(__name__) diff --git a/distributed/nanny.py b/distributed/nanny.py index f93f18522ca..a62833aa32a 100644 --- a/distributed/nanny.py +++ b/distributed/nanny.py @@ -1,41 +1,40 @@ import asyncio -from contextlib import suppress import errno import logging -from multiprocessing.queues import Empty import os -import psutil import shutil import threading import uuid import warnings import weakref +from contextlib import suppress +from multiprocessing.queues import Empty import dask +import psutil from dask.system import CPU_COUNT -from tornado.ioloop import IOLoop, PeriodicCallback from tornado import gen +from tornado.ioloop import IOLoop, PeriodicCallback +from . import preloading from .comm import get_address_host, unparse_host_port from .comm.addressing import address_from_user_args -from .core import RPCClosed, CommClosedError, coerce_to_address, Status +from .core import CommClosedError, RPCClosed, Status, coerce_to_address from .metrics import time from .node import ServerNode -from . import preloading from .process import AsyncProcess from .proctitle import enable_proctitle_on_children from .security import Security from .utils import ( + TimeoutError, get_ip, - mp_context, - silence_logging, json_load_robust, - parse_timedelta, + mp_context, parse_ports, - TimeoutError, + parse_timedelta, + silence_logging, ) -from .worker import run, parse_memory_limit, Worker - +from .worker import Worker, parse_memory_limit, run logger = logging.getLogger(__name__) diff --git a/distributed/node.py b/distributed/node.py index 997e6a1a988..247a8d85cc2 100644 --- a/distributed/node.py +++ b/distributed/node.py @@ -1,18 +1,17 @@ -from contextlib import suppress import logging import warnings import weakref +from contextlib import suppress -from tornado.httpserver import HTTPServer -import tlz import dask +import tlz +from tornado.httpserver import HTTPServer -from .comm import get_tcp_server_address -from .comm import get_address_host +from .comm import get_address_host, get_tcp_server_address from .core import Server from .http.routing import RoutingApplication -from .versions import get_versions from .utils import DequeHandler, clean_dashboard_address +from .versions import get_versions class ServerNode(Server): diff --git a/distributed/preloading.py b/distributed/preloading.py index dbbe42d57d0..bae94a149d8 100644 --- a/distributed/preloading.py +++ b/distributed/preloading.py @@ -1,17 +1,16 @@ +import filecmp import inspect import logging import os import shutil import sys -from typing import List -from types import ModuleType -import filecmp from importlib import import_module +from types import ModuleType +from typing import List import click -from tornado.httpclient import AsyncHTTPClient - from dask.utils import tmpfile +from tornado.httpclient import AsyncHTTPClient from .utils import import_file diff --git a/distributed/process.py b/distributed/process.py index 1c11dd2e3d7..f46527b5337 100644 --- a/distributed/process.py +++ b/distributed/process.py @@ -1,18 +1,17 @@ +import asyncio import logging import os -from queue import Queue as PyQueue import re import threading import weakref -import asyncio -import dask - -from .utils import mp_context, TimeoutError +from queue import Queue as PyQueue +import dask from tornado import gen from tornado.concurrent import Future from tornado.ioloop import IOLoop +from .utils import TimeoutError, mp_context logger = logging.getLogger(__name__) diff --git a/distributed/profile.py b/distributed/profile.py index 78e1765d041..fbaf6ea6599 100644 --- a/distributed/profile.py +++ b/distributed/profile.py @@ -25,16 +25,16 @@ } """ import bisect -from collections import defaultdict, deque import linecache import sys import threading +from collections import defaultdict, deque from time import sleep import tlz as toolz from .metrics import time -from .utils import format_time, color_of, parse_timedelta +from .utils import color_of, format_time, parse_timedelta def identifier(frame): @@ -375,8 +375,8 @@ def plot_figure(data, **kwargs): -------- plot_data """ - from bokeh.plotting import ColumnDataSource, figure from bokeh.models import HoverTool + from bokeh.plotting import ColumnDataSource, figure if "states" in data: data = toolz.dissoc(data, "states") diff --git a/distributed/protocol/__init__.py b/distributed/protocol/__init__.py index be1c498c35c..36aa3f42c12 100644 --- a/distributed/protocol/__init__.py +++ b/distributed/protocol/__init__.py @@ -1,25 +1,25 @@ from contextlib import suppress -from functools import partial from distutils.version import LooseVersion +from functools import partial from .compression import compressions, default_compression -from .core import dumps, loads, maybe_compress, decompress, msgpack -from .cuda import cuda_serialize, cuda_deserialize +from .core import decompress, dumps, loads, maybe_compress, msgpack +from .cuda import cuda_deserialize, cuda_serialize from .serialize import ( - serialize, - deserialize, - nested_deserialize, Serialize, Serialized, - to_serialize, - register_serialization, - dask_serialize, dask_deserialize, - serialize_bytes, + dask_serialize, + deserialize, deserialize_bytes, - serialize_bytelist, - register_serialization_family, + nested_deserialize, register_generic, + register_serialization, + register_serialization_family, + serialize, + serialize_bytelist, + serialize_bytes, + to_serialize, ) diff --git a/distributed/protocol/arrow.py b/distributed/protocol/arrow.py index 1f2b4e83e9a..2850c47466e 100644 --- a/distributed/protocol/arrow.py +++ b/distributed/protocol/arrow.py @@ -1,7 +1,7 @@ -from .serialize import dask_serialize, dask_deserialize - import pyarrow +from .serialize import dask_deserialize, dask_serialize + if pyarrow.__version__ < "0.10": raise ImportError( "Need pyarrow >= 0.10 . " diff --git a/distributed/protocol/compression.py b/distributed/protocol/compression.py index b067ae0e526..5131a4f53f3 100644 --- a/distributed/protocol/compression.py +++ b/distributed/protocol/compression.py @@ -3,10 +3,10 @@ Includes utilities for determining whether or not to compress """ -from contextlib import suppress -from functools import partial import logging import random +from contextlib import suppress +from functools import partial import dask from tlz import identity @@ -22,7 +22,6 @@ from ..utils import ensure_bytes - compressions = {None: {"compress": identity, "decompress": identity}} compressions[False] = compressions[None] # alias diff --git a/distributed/protocol/core.py b/distributed/protocol/core.py index 4235dca9f9c..05a804d3b52 100644 --- a/distributed/protocol/core.py +++ b/distributed/protocol/core.py @@ -1,18 +1,18 @@ import logging + import msgpack -from .compression import compressions, maybe_compress, decompress +from .compression import compressions, decompress, maybe_compress from .serialize import ( Serialize, Serialized, + merge_and_deserialize, msgpack_decode_default, msgpack_encode_default, - merge_and_deserialize, serialize_and_split, ) from .utils import msgpack_opts - logger = logging.getLogger(__name__) diff --git a/distributed/protocol/cuda.py b/distributed/protocol/cuda.py index 44ed6a033df..572b63547f5 100644 --- a/distributed/protocol/cuda.py +++ b/distributed/protocol/cuda.py @@ -1,8 +1,8 @@ import dask +from dask.utils import typename from . import pickle from .serialize import ObjectDictSerializer, register_serialization_family -from dask.utils import typename cuda_serialize = dask.utils.Dispatch("cuda_serialize") cuda_deserialize = dask.utils.Dispatch("cuda_deserialize") diff --git a/distributed/protocol/h5py.py b/distributed/protocol/h5py.py index e129c166683..8a47c7abdc6 100644 --- a/distributed/protocol/h5py.py +++ b/distributed/protocol/h5py.py @@ -1,7 +1,7 @@ -from .serialize import dask_serialize, dask_deserialize - import h5py +from .serialize import dask_deserialize, dask_serialize + @dask_serialize.register(h5py.File) def serialize_h5py_file(f): diff --git a/distributed/protocol/keras.py b/distributed/protocol/keras.py index 121aa0c4700..c2c24e3992e 100644 --- a/distributed/protocol/keras.py +++ b/distributed/protocol/keras.py @@ -1,7 +1,7 @@ -from .serialize import dask_serialize, dask_deserialize, serialize, deserialize - import keras +from .serialize import dask_deserialize, dask_serialize, deserialize, serialize + @dask_serialize.register(keras.Model) def serialize_keras_model(model): diff --git a/distributed/protocol/netcdf4.py b/distributed/protocol/netcdf4.py index eb83461eddc..d3d0b1e2c0c 100644 --- a/distributed/protocol/netcdf4.py +++ b/distributed/protocol/netcdf4.py @@ -1,7 +1,7 @@ -from .serialize import dask_serialize, dask_deserialize, serialize, deserialize - import netCDF4 +from .serialize import dask_deserialize, dask_serialize, deserialize, serialize + @dask_serialize.register(netCDF4.Dataset) def serialize_netcdf4_dataset(ds): diff --git a/distributed/protocol/numpy.py b/distributed/protocol/numpy.py index 65f7e2f4076..a0a7d544064 100644 --- a/distributed/protocol/numpy.py +++ b/distributed/protocol/numpy.py @@ -1,10 +1,10 @@ import math -import numpy as np -from .serialize import dask_serialize, dask_deserialize -from . import pickle +import numpy as np from ..utils import log_errors +from . import pickle +from .serialize import dask_deserialize, dask_serialize def itemsize(dt): diff --git a/distributed/protocol/serialize.py b/distributed/protocol/serialize.py index 228f02d78ca..971e1d3821a 100644 --- a/distributed/protocol/serialize.py +++ b/distributed/protocol/serialize.py @@ -1,24 +1,17 @@ -from array import array -from functools import partial -import traceback import importlib +import traceback +from array import array from enum import Enum +from functools import partial import dask -from dask.base import normalize_token - import msgpack +from dask.base import normalize_token +from ..utils import ensure_bytes, has_keyword, typename from . import pickle -from ..utils import has_keyword, typename, ensure_bytes -from .compression import maybe_compress, decompress -from .utils import ( - unpack_frames, - pack_frames_prelude, - frame_split_size, - msgpack_opts, -) - +from .compression import decompress, maybe_compress +from .utils import frame_split_size, msgpack_opts, pack_frames_prelude, unpack_frames lazy_registrations = {} diff --git a/distributed/protocol/sparse.py b/distributed/protocol/sparse.py index a22d661f849..42d625b0df2 100644 --- a/distributed/protocol/sparse.py +++ b/distributed/protocol/sparse.py @@ -1,7 +1,7 @@ -from .serialize import dask_serialize, dask_deserialize, serialize, deserialize - import sparse +from .serialize import dask_deserialize, dask_serialize, deserialize, serialize + @dask_serialize.register(sparse.COO) def serialize_sparse(x): diff --git a/distributed/protocol/tests/test_arrow.py b/distributed/protocol/tests/test_arrow.py index e86bfa6f827..35d26177f03 100644 --- a/distributed/protocol/tests/test_arrow.py +++ b/distributed/protocol/tests/test_arrow.py @@ -4,8 +4,8 @@ pa = pytest.importorskip("pyarrow") import distributed -from distributed.utils_test import gen_cluster from distributed.protocol import deserialize, serialize, to_serialize +from distributed.utils_test import gen_cluster df = pd.DataFrame({"A": list("abc"), "B": [1, 2, 3]}) tbl = pa.Table.from_pandas(df, preserve_index=False) diff --git a/distributed/protocol/tests/test_collection.py b/distributed/protocol/tests/test_collection.py index fd112b6c792..f2064fda858 100644 --- a/distributed/protocol/tests/test_collection.py +++ b/distributed/protocol/tests/test_collection.py @@ -1,7 +1,8 @@ -import pytest -from distributed.protocol import serialize, deserialize -import pandas as pd import numpy as np +import pandas as pd +import pytest + +from distributed.protocol import deserialize, serialize @pytest.mark.parametrize("collection", [tuple, dict, list]) diff --git a/distributed/protocol/tests/test_collection_cuda.py b/distributed/protocol/tests/test_collection_cuda.py index e2602795782..4f3242525cd 100644 --- a/distributed/protocol/tests/test_collection_cuda.py +++ b/distributed/protocol/tests/test_collection_cuda.py @@ -1,8 +1,8 @@ +import pandas as pd import pytest - -from distributed.protocol import serialize, deserialize from dask.dataframe.utils import assert_eq -import pandas as pd + +from distributed.protocol import deserialize, serialize @pytest.mark.parametrize("collection", [tuple, dict]) diff --git a/distributed/protocol/tests/test_cupy.py b/distributed/protocol/tests/test_cupy.py index 520693fb5c1..5c684e46d62 100644 --- a/distributed/protocol/tests/test_cupy.py +++ b/distributed/protocol/tests/test_cupy.py @@ -1,6 +1,7 @@ import pickle import pytest + from distributed.protocol import deserialize, serialize cupy = pytest.importorskip("cupy") diff --git a/distributed/protocol/tests/test_h5py.py b/distributed/protocol/tests/test_h5py.py index 80eeb2c05f5..5189fc7f499 100644 --- a/distributed/protocol/tests/test_h5py.py +++ b/distributed/protocol/tests/test_h5py.py @@ -6,7 +6,6 @@ h5py = pytest.importorskip("h5py") from distributed.protocol import deserialize, serialize - from distributed.utils import tmpfile @@ -82,10 +81,9 @@ def test_raise_error_on_serialize_write_permissions(): deserialize(*serialize(f)) -from distributed.utils_test import gen_cluster - +from dask import array as da -import dask.array as da +from distributed.utils_test import gen_cluster @silence_h5py_issue775 diff --git a/distributed/protocol/tests/test_highlevelgraph.py b/distributed/protocol/tests/test_highlevelgraph.py index 1fcb339f721..ea0c752494b 100644 --- a/distributed/protocol/tests/test_highlevelgraph.py +++ b/distributed/protocol/tests/test_highlevelgraph.py @@ -1,14 +1,12 @@ import ast import dask +import pytest +from dask import array as da +from dask import dataframe as dd -import dask.array as da -import dask.dataframe as dd - -from distributed.utils_test import gen_cluster from distributed.diagnostics import SchedulerPlugin - -import pytest +from distributed.utils_test import gen_cluster np = pytest.importorskip("numpy") pd = pytest.importorskip("pandas") diff --git a/distributed/protocol/tests/test_keras.py b/distributed/protocol/tests/test_keras.py index da8cdf6374a..d84127d5df1 100644 --- a/distributed/protocol/tests/test_keras.py +++ b/distributed/protocol/tests/test_keras.py @@ -1,10 +1,10 @@ import numpy as np -from numpy.testing import assert_allclose import pytest +from numpy.testing import assert_allclose keras = pytest.importorskip("keras") -from distributed.protocol import serialize, deserialize, dumps, loads, to_serialize +from distributed.protocol import deserialize, dumps, loads, serialize, to_serialize def test_serialize_deserialize_model(): diff --git a/distributed/protocol/tests/test_netcdf4.py b/distributed/protocol/tests/test_netcdf4.py index 1ed78508156..4b198381328 100644 --- a/distributed/protocol/tests/test_netcdf4.py +++ b/distributed/protocol/tests/test_netcdf4.py @@ -4,7 +4,6 @@ np = pytest.importorskip("numpy") from distributed.protocol import deserialize, serialize - from distributed.utils import tmpfile @@ -75,10 +74,9 @@ def test_serialize_deserialize_group(): assert (x[:] == y[:]).all() -from distributed.utils_test import gen_cluster - +from dask import array as da -import dask.array as da +from distributed.utils_test import gen_cluster @gen_cluster(client=True) diff --git a/distributed/protocol/tests/test_numba.py b/distributed/protocol/tests/test_numba.py index 61213640715..b34d4be25ab 100644 --- a/distributed/protocol/tests/test_numba.py +++ b/distributed/protocol/tests/test_numba.py @@ -1,7 +1,9 @@ -from distributed.protocol import serialize, deserialize import pickle + import pytest +from distributed.protocol import deserialize, serialize + cuda = pytest.importorskip("numba.cuda") np = pytest.importorskip("numpy") diff --git a/distributed/protocol/tests/test_numpy.py b/distributed/protocol/tests/test_numpy.py index 9096748b5d1..9a4269e3b9c 100644 --- a/distributed/protocol/tests/test_numpy.py +++ b/distributed/protocol/tests/test_numpy.py @@ -4,20 +4,20 @@ import pytest from distributed.protocol import ( - serialize, - deserialize, decompress, + deserialize, dumps, loads, - to_serialize, msgpack, + serialize, + to_serialize, ) -from distributed.protocol.utils import BIG_BYTES_SHARD_SIZE +from distributed.protocol.compression import maybe_compress from distributed.protocol.numpy import itemsize from distributed.protocol.pickle import HIGHEST_PROTOCOL -from distributed.protocol.compression import maybe_compress +from distributed.protocol.utils import BIG_BYTES_SHARD_SIZE from distributed.system import MEMORY_LIMIT -from distributed.utils import ensure_bytes, tmpfile, nbytes +from distributed.utils import ensure_bytes, nbytes, tmpfile from distributed.utils_test import gen_cluster diff --git a/distributed/protocol/tests/test_pandas.py b/distributed/protocol/tests/test_pandas.py index a8134d7e3d0..e2037962884 100644 --- a/distributed/protocol/tests/test_pandas.py +++ b/distributed/protocol/tests/test_pandas.py @@ -1,20 +1,18 @@ import numpy as np import pandas as pd import pytest - from dask.dataframe.utils import assert_eq from distributed.protocol import ( - serialize, - deserialize, decompress, + deserialize, dumps, loads, + serialize, to_serialize, ) from distributed.utils import ensure_bytes - dfs = [ pd.DataFrame({}), pd.DataFrame({"x": [1, 2, 3]}), diff --git a/distributed/protocol/tests/test_pickle.py b/distributed/protocol/tests/test_pickle.py index d7a2ad6d1ad..86424f8c1e1 100644 --- a/distributed/protocol/tests/test_pickle.py +++ b/distributed/protocol/tests/test_pickle.py @@ -1,8 +1,8 @@ -from functools import partial import gc -from operator import add -import weakref import sys +import weakref +from functools import partial +from operator import add import pytest diff --git a/distributed/protocol/tests/test_protocol.py b/distributed/protocol/tests/test_protocol.py index e088f146bfb..411fcc0c4e4 100644 --- a/distributed/protocol/tests/test_protocol.py +++ b/distributed/protocol/tests/test_protocol.py @@ -1,8 +1,8 @@ import pytest -from distributed.protocol import loads, dumps, msgpack, maybe_compress, to_serialize +from distributed.protocol import dumps, loads, maybe_compress, msgpack, to_serialize from distributed.protocol.compression import compressions -from distributed.protocol.serialize import Serialize, Serialized, serialize, deserialize +from distributed.protocol.serialize import Serialize, Serialized, deserialize, serialize from distributed.system import MEMORY_LIMIT from distributed.utils import nbytes diff --git a/distributed/protocol/tests/test_rmm.py b/distributed/protocol/tests/test_rmm.py index 8b176afd877..bd9d7f4cab9 100644 --- a/distributed/protocol/tests/test_rmm.py +++ b/distributed/protocol/tests/test_rmm.py @@ -1,6 +1,7 @@ -from distributed.protocol import serialize, deserialize import pytest +from distributed.protocol import deserialize, serialize + numpy = pytest.importorskip("numpy") cuda = pytest.importorskip("numba.cuda") rmm = pytest.importorskip("rmm") diff --git a/distributed/protocol/tests/test_scipy.py b/distributed/protocol/tests/test_scipy.py index 4e5eb8423cf..0904f92002a 100644 --- a/distributed/protocol/tests/test_scipy.py +++ b/distributed/protocol/tests/test_scipy.py @@ -1,4 +1,5 @@ import pytest + from distributed.protocol import deserialize, serialize numpy = pytest.importorskip("numpy") diff --git a/distributed/protocol/tests/test_serialize.py b/distributed/protocol/tests/test_serialize.py index 735dffb5c19..d1e15cb6d44 100644 --- a/distributed/protocol/tests/test_serialize.py +++ b/distributed/protocol/tests/test_serialize.py @@ -1,35 +1,34 @@ -from array import array import copy import pickle +from array import array import msgpack import numpy as np import pytest -from tlz import identity - from dask.utils_test import inc +from tlz import identity from distributed import wait +from distributed.comm.utils import from_frames, to_frames from distributed.protocol import ( - register_serialization, - serialize, - deserialize, - nested_deserialize, Serialize, Serialized, - to_serialize, - serialize_bytes, - deserialize_bytes, - serialize_bytelist, - register_serialization_family, dask_serialize, + deserialize, + deserialize_bytes, dumps, loads, + nested_deserialize, + register_serialization, + register_serialization_family, + serialize, + serialize_bytelist, + serialize_bytes, + to_serialize, ) from distributed.protocol.serialize import check_dask_serializable from distributed.utils import nbytes -from distributed.utils_test import inc, gen_test -from distributed.comm.utils import to_frames, from_frames +from distributed.utils_test import gen_test, inc class MyObj: @@ -141,9 +140,10 @@ def test_nested_deserialize(): assert x == x_orig # x wasn't mutated -from distributed.utils_test import gen_cluster from dask import delayed +from distributed.utils_test import gen_cluster + @gen_cluster(client=True) async def test_object_in_graph(c, s, a, b): diff --git a/distributed/protocol/tests/test_sparse.py b/distributed/protocol/tests/test_sparse.py index 89f9da09bc2..5d971c9b1f4 100644 --- a/distributed/protocol/tests/test_sparse.py +++ b/distributed/protocol/tests/test_sparse.py @@ -1,6 +1,6 @@ import numpy as np -from numpy.testing import assert_allclose import pytest +from numpy.testing import assert_allclose sparse = pytest.importorskip("sparse") diff --git a/distributed/protocol/tests/test_torch.py b/distributed/protocol/tests/test_torch.py index efb5fa6610a..3e212c0e858 100644 --- a/distributed/protocol/tests/test_torch.py +++ b/distributed/protocol/tests/test_torch.py @@ -1,6 +1,7 @@ -from distributed.protocol import serialize, deserialize import pytest +from distributed.protocol import deserialize, serialize + np = pytest.importorskip("numpy") torch = pytest.importorskip("torch") diff --git a/distributed/protocol/torch.py b/distributed/protocol/torch.py index 3b4c6d19c8d..f8b6acb13b0 100644 --- a/distributed/protocol/torch.py +++ b/distributed/protocol/torch.py @@ -1,7 +1,7 @@ -from .serialize import serialize, dask_serialize, dask_deserialize, register_generic - -import torch import numpy as np +import torch + +from .serialize import dask_deserialize, dask_serialize, register_generic, serialize @dask_serialize.register(torch.Tensor) diff --git a/distributed/pubsub.py b/distributed/pubsub.py index 91200be06eb..5dba5b679c6 100644 --- a/distributed/pubsub.py +++ b/distributed/pubsub.py @@ -1,13 +1,13 @@ import asyncio -from collections import defaultdict, deque import logging import threading import weakref +from collections import defaultdict, deque from .core import CommClosedError from .metrics import time -from .utils import sync, TimeoutError, parse_timedelta from .protocol.serialize import to_serialize +from .utils import TimeoutError, parse_timedelta, sync logger = logging.getLogger(__name__) @@ -283,7 +283,7 @@ class Pub: def __init__(self, name, worker=None, client=None): if worker is None and client is None: - from distributed import get_worker, get_client + from distributed import get_client, get_worker try: worker = get_worker() @@ -363,7 +363,7 @@ class Sub: def __init__(self, name, worker=None, client=None): if worker is None and client is None: - from distributed.worker import get_worker, get_client + from distributed.worker import get_client, get_worker try: worker = get_worker() diff --git a/distributed/pytest_resourceleaks.py b/distributed/pytest_resourceleaks.py index 348472892d6..68246005275 100644 --- a/distributed/pytest_resourceleaks.py +++ b/distributed/pytest_resourceleaks.py @@ -4,10 +4,10 @@ """ import collections import gc -import time import os import sys import threading +import time import pytest diff --git a/distributed/queues.py b/distributed/queues.py index e368d329d03..cadaf358f0a 100644 --- a/distributed/queues.py +++ b/distributed/queues.py @@ -1,14 +1,13 @@ import asyncio -from collections import defaultdict import logging import uuid +from collections import defaultdict from dask.utils import stringify -from .client import Future, Client -from .utils import sync, thread_state +from .client import Client, Future +from .utils import parse_timedelta, sync, thread_state from .worker import get_client, get_worker -from .utils import parse_timedelta logger = logging.getLogger(__name__) diff --git a/distributed/recreate_exceptions.py b/distributed/recreate_exceptions.py index 7e966f270a0..6b498113b5e 100644 --- a/distributed/recreate_exceptions.py +++ b/distributed/recreate_exceptions.py @@ -1,5 +1,7 @@ import logging + from dask.utils import stringify + from .client import futures_of, wait from .utils import sync from .utils_comm import pack_data diff --git a/distributed/scheduler.py b/distributed/scheduler.py index d4e0b20dcb8..8627ee56d53 100644 --- a/distributed/scheduler.py +++ b/distributed/scheduler.py @@ -1,90 +1,85 @@ import asyncio -from collections import defaultdict, deque - -from collections.abc import Mapping, Set -from contextlib import suppress -from datetime import timedelta -from functools import partial import inspect import itertools import json import logging import math -from numbers import Number import operator import os -import sys import random +import sys import warnings import weakref +from collections import defaultdict, deque +from collections.abc import Mapping, Set +from contextlib import suppress +from datetime import timedelta +from functools import partial +from numbers import Number + +import dask import psutil import sortedcontainers - +from dask.highlevelgraph import HighLevelGraph from tlz import ( + compose, + concat, + first, + groupby, merge, - pluck, merge_sorted, - first, merge_with, - valmap, + pluck, second, - compose, - groupby, - concat, + valmap, ) from tornado.ioloop import IOLoop, PeriodicCallback -import dask -from dask.highlevelgraph import HighLevelGraph - -from . import profile +from . import preloading, profile +from . import versions as version_module from .batched import BatchedSend from .comm import ( + get_address_host, normalize_address, resolve_address, - get_address_host, unparse_host_port, ) from .comm.addressing import addresses_from_user_args -from .core import rpc, send_recv, clean_exception, CommClosedError, Status +from .core import CommClosedError, Status, clean_exception, rpc, send_recv from .diagnostics.plugin import SchedulerPlugin - +from .event import EventExtension from .http import get_handlers +from .lock import LockExtension from .metrics import time +from .multi_lock import MultiLockExtension from .node import ServerNode -from . import preloading from .proctitle import setproctitle +from .publish import PublishExtension +from .pubsub import PubSubSchedulerExtension +from .queues import QueueExtension +from .recreate_exceptions import ReplayExceptionScheduler from .security import Security +from .semaphore import SemaphoreExtension +from .stealing import WorkStealing from .utils import ( All, + TimeoutError, + empty_context, + format_bytes, + format_time, get_fileno_limit, - log_errors, key_split, - validate_key, + key_split_group, + log_errors, no_default, - parse_timedelta, parse_bytes, + parse_timedelta, shutting_down, - key_split_group, - empty_context, tmpfile, - format_bytes, - format_time, - TimeoutError, + validate_key, ) -from .utils_comm import scatter_to_workers, gather_from_workers, retry_operation -from .utils_perf import enable_gc_diagnosis, disable_gc_diagnosis -from . import versions as version_module - -from .publish import PublishExtension -from .queues import QueueExtension -from .semaphore import SemaphoreExtension -from .recreate_exceptions import ReplayExceptionScheduler -from .lock import LockExtension -from .multi_lock import MultiLockExtension -from .event import EventExtension -from .pubsub import PubSubSchedulerExtension -from .stealing import WorkStealing +from .utils_comm import gather_from_workers, retry_operation, scatter_to_workers +from .utils_perf import disable_gc_diagnosis, enable_gc_diagnosis from .variable import VariableExtension try: @@ -94,6 +89,8 @@ if compiled: from cython import ( + Py_hash_t, + Py_ssize_t, bint, cast, ccall, @@ -105,15 +102,11 @@ final, inline, nogil, - Py_hash_t, - Py_ssize_t, ) else: - from ctypes import ( - c_double as double, - c_ssize_t as Py_hash_t, - c_ssize_t as Py_ssize_t, - ) + from ctypes import c_double as double + from ctypes import c_ssize_t as Py_hash_t + from ctypes import c_ssize_t as Py_ssize_t bint = bool @@ -6387,16 +6380,16 @@ def profile_to_figure(state): for k in sorted(timespent.keys()): tasks_timings += f"\n
  • {k} time: {format_time(timespent[k])}
  • " - from .diagnostics.task_stream import rectangles from .dashboard.components.scheduler import task_stream_figure + from .diagnostics.task_stream import rectangles rects = rectangles(task_stream) source, task_stream = task_stream_figure(sizing_mode="stretch_both") source.data.update(rects) from distributed.dashboard.components.scheduler import ( - BandwidthWorkers, BandwidthTypes, + BandwidthWorkers, ) bandwidth_workers = BandwidthWorkers(self, sizing_mode="stretch_both") @@ -6404,7 +6397,8 @@ def profile_to_figure(state): bandwidth_types = BandwidthTypes(self, sizing_mode="stretch_both") bandwidth_types.update() - from bokeh.models import Panel, Tabs, Div + from bokeh.models import Div, Panel, Tabs + import distributed # HTML @@ -6473,8 +6467,8 @@ def profile_to_figure(state): ] ) - from bokeh.plotting import save, output_file from bokeh.core.templates import get_env + from bokeh.plotting import output_file, save with tmpfile(extension=".html") as fn: output_file(filename=fn, title="Dask Performance Report") diff --git a/distributed/security.py b/distributed/security.py index 2cfe952b397..551e29c8652 100644 --- a/distributed/security.py +++ b/distributed/security.py @@ -1,6 +1,6 @@ import datetime -import tempfile import os +import tempfile try: import ssl @@ -9,7 +9,6 @@ import dask - __all__ = ("Security",) @@ -95,8 +94,7 @@ def temporary(cls): try: from cryptography import x509 from cryptography.hazmat.backends import default_backend - from cryptography.hazmat.primitives import hashes - from cryptography.hazmat.primitives import serialization + from cryptography.hazmat.primitives import hashes, serialization from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.x509.oid import NameOID except ImportError: diff --git a/distributed/stealing.py b/distributed/stealing.py index 7a336777f26..2044fe5b1f6 100644 --- a/distributed/stealing.py +++ b/distributed/stealing.py @@ -1,18 +1,17 @@ -from collections import defaultdict, deque import logging +from collections import defaultdict, deque from math import log2 from time import time +import dask +from tlz import topk from tornado.ioloop import PeriodicCallback -import dask from .comm.addressing import get_address_host from .core import CommClosedError from .diagnostics.plugin import SchedulerPlugin from .utils import log_errors, parse_timedelta -from tlz import topk - LATENCY = 10e-3 logger = logging.getLogger(__name__) diff --git a/distributed/system_monitor.py b/distributed/system_monitor.py index cf305869a8c..2f8e64c6640 100644 --- a/distributed/system_monitor.py +++ b/distributed/system_monitor.py @@ -1,4 +1,5 @@ from collections import deque + import psutil from .compatibility import WINDOWS diff --git a/distributed/tests/make_tls_certs.py b/distributed/tests/make_tls_certs.py index 02817cbdf5e..7286b780449 100644 --- a/distributed/tests/make_tls_certs.py +++ b/distributed/tests/make_tls_certs.py @@ -5,8 +5,8 @@ import os import shutil -import tempfile import subprocess +import tempfile req_template = """ [req] diff --git a/distributed/tests/test_actor.py b/distributed/tests/test_actor.py index 7b91b3da1c3..83697cef220 100644 --- a/distributed/tests/test_actor.py +++ b/distributed/tests/test_actor.py @@ -2,13 +2,18 @@ import operator from time import sleep +import dask import pytest -import dask -from distributed import Actor, ActorFuture, Client, Future, wait, Nanny -from distributed.utils_test import cluster, gen_cluster -from distributed.utils_test import client, cluster_fixture, loop # noqa: F401 +from distributed import Actor, ActorFuture, Client, Future, Nanny, wait from distributed.metrics import time +from distributed.utils_test import ( # noqa: F401 + client, + cluster, + cluster_fixture, + gen_cluster, + loop, +) class Counter: @@ -425,8 +430,8 @@ def __init__(self, x, y=None): @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 4, Worker=Nanny) async def bench_param_server(c, s, *workers): - import dask.array as da import numpy as np + from dask import array as da x = da.random.random((500000, 1000), chunks=(1000, 1000)) x = x.persist() diff --git a/distributed/tests/test_as_completed.py b/distributed/tests/test_as_completed.py index 3efa759fa36..1002259391b 100644 --- a/distributed/tests/test_as_completed.py +++ b/distributed/tests/test_as_completed.py @@ -1,17 +1,23 @@ import asyncio -from collections.abc import Iterator -from operator import add import queue import random +from collections.abc import Iterator +from operator import add from time import sleep import pytest -from distributed.client import _as_completed, as_completed, _first_completed, wait +from distributed.client import _as_completed, _first_completed, as_completed, wait from distributed.metrics import time from distributed.utils import CancelledError -from distributed.utils_test import gen_cluster, inc, throws -from distributed.utils_test import client, cluster_fixture, loop # noqa: F401 +from distributed.utils_test import ( # noqa: F401 + client, + cluster_fixture, + gen_cluster, + inc, + loop, + throws, +) @gen_cluster(client=True) diff --git a/distributed/tests/test_asyncprocess.py b/distributed/tests/test_asyncprocess.py index 288ba58be86..32c4e5c5589 100644 --- a/distributed/tests/test_asyncprocess.py +++ b/distributed/tests/test_asyncprocess.py @@ -16,7 +16,7 @@ from distributed.metrics import time from distributed.process import AsyncProcess from distributed.utils import mp_context -from distributed.utils_test import gen_test, pristine_loop, nodebug +from distributed.utils_test import gen_test, nodebug, pristine_loop def feed(in_q, out_q): diff --git a/distributed/tests/test_batched.py b/distributed/tests/test_batched.py index a1342178a71..ee84ec3224e 100644 --- a/distributed/tests/test_batched.py +++ b/distributed/tests/test_batched.py @@ -5,11 +5,11 @@ from tlz import assoc from distributed.batched import BatchedSend -from distributed.core import listen, connect, CommClosedError +from distributed.core import CommClosedError, connect, listen from distributed.metrics import time +from distributed.protocol import to_serialize from distributed.utils import All from distributed.utils_test import captured_logger -from distributed.protocol import to_serialize class EchoServer: diff --git a/distributed/tests/test_client.py b/distributed/tests/test_client.py index 7a9ca27352a..7ef6340f53c 100644 --- a/distributed/tests/test_client.py +++ b/distributed/tests/test_client.py @@ -1,102 +1,101 @@ import asyncio -from collections import deque -from contextlib import suppress -from functools import partial import gc import logging -from operator import add import os import pickle -import psutil import random import subprocess import sys import threading -from threading import Semaphore -from time import sleep import traceback import warnings import weakref import zipfile - -import pytest -from tlz import identity, isdistinct, concat, pluck, valmap, first, merge +from collections import deque +from contextlib import suppress +from functools import partial +from operator import add +from threading import Semaphore +from time import sleep import dask +import psutil +import pytest +from dask import bag as db from dask import delayed from dask.optimization import SubgraphCallable from dask.utils import stringify -import dask.bag as db +from tlz import concat, first, identity, isdistinct, merge, pluck, valmap + from distributed import ( - Worker, + CancelledError, + Executor, + LocalCluster, Nanny, + TimeoutError, + Worker, fire_and_forget, - LocalCluster, get_client, - secede, get_worker, - Executor, - profile, performance_report, - TimeoutError, - CancelledError, + profile, + secede, ) -from distributed.core import Status -from distributed.comm import CommClosedError from distributed.client import ( Client, Future, - wait, - as_completed, - tokenize, _get_global_client, + as_completed, default_client, futures_of, - temp_default_client, get_task_metadata, + temp_default_client, + tokenize, + wait, ) +from distributed.comm import CommClosedError from distributed.compatibility import MACOS, WINDOWS - +from distributed.core import Status from distributed.metrics import time -from distributed.scheduler import Scheduler, KilledWorker, CollectTaskMetaDataPlugin +from distributed.scheduler import CollectTaskMetaDataPlugin, KilledWorker, Scheduler from distributed.sizeof import sizeof -from distributed.utils import mp_context, sync, tmp_text, tmpfile, is_valid_xml -from distributed.utils_test import ( - cluster, - slowinc, - slowadd, - slowdec, - randominc, - inc, - dec, - div, - throws, - geninc, +from distributed.utils import is_valid_xml, mp_context, sync, tmp_text, tmpfile +from distributed.utils_test import ( # noqa: F401 + TaskStateMetadataPlugin, + a, + async_wait_for, asyncinc, - gen_cluster, - gen_test, - double, - popen, + b, captured_logger, - varying, - map_varying, - wait_for, - async_wait_for, - pristine_loop, - save_sys_modules, - TaskStateMetadataPlugin, + cleanup, ) +from distributed.utils_test import client as c # noqa: F401 +from distributed.utils_test import client_secondary as c2 # noqa: F401 from distributed.utils_test import ( # noqa: F401 - client as c, - client_secondary as c2, - cleanup, + cluster, cluster_fixture, + dec, + div, + double, + gen_cluster, + gen_test, + geninc, + inc, loop, loop_in_thread, + map_varying, nodebug, + popen, + pristine_loop, + randominc, s, - a, - b, + save_sys_modules, + slowadd, + slowdec, + slowinc, + throws, + varying, + wait_for, ) @@ -1598,7 +1597,8 @@ def g(): @gen_cluster(client=True) async def test_upload_file_egg(c, s, a, b): def g(): - import package_1, package_2 + import package_1 + import package_2 return package_1.a, package_2.b @@ -2451,7 +2451,7 @@ def test_Future_exception_sync_2(loop, capsys): @gen_cluster(timeout=60, client=True) async def test_async_persist(c, s, a, b): - from dask.delayed import delayed, Delayed + from dask.delayed import Delayed, delayed x = delayed(1) y = delayed(inc)(x) @@ -2485,7 +2485,7 @@ async def test_async_persist(c, s, a, b): @gen_cluster(client=True) async def test__persist(c, s, a, b): pytest.importorskip("dask.array") - import dask.array as da + from dask import array as da x = da.ones((10, 10), chunks=(5, 10)) y = 2 * (x + 1) @@ -2505,7 +2505,7 @@ async def test__persist(c, s, a, b): def test_persist(c): pytest.importorskip("dask.array") - import dask.array as da + from dask import array as da x = da.ones((10, 10), chunks=(5, 10)) y = 2 * (x + 1) @@ -5517,8 +5517,8 @@ async def test_client_timeout_2(): @gen_test() async def test_client_active_bad_port(): - import tornado.web import tornado.httpserver + import tornado.web application = tornado.web.Application([(r"/", tornado.web.RequestHandler)]) http_server = tornado.httpserver.HTTPServer(application) @@ -6432,8 +6432,8 @@ async def test_annotations_retries(c, s, a, b): @gen_cluster(client=True) async def test_annotations_blockwise_unpack(c, s, a, b): da = pytest.importorskip("dask.array") - from dask.array.utils import assert_eq import numpy as np + from dask.array.utils import assert_eq # A flaky doubling function -- need extra args because it is called before # application to establish dtype/meta. diff --git a/distributed/tests/test_client_executor.py b/distributed/tests/test_client_executor.py index b55b86ce7c8..555bcb86fbe 100644 --- a/distributed/tests/test_client_executor.py +++ b/distributed/tests/test_client_executor.py @@ -1,13 +1,12 @@ import random import time - from concurrent.futures import ( - TimeoutError, - Future, - wait, - as_completed, FIRST_COMPLETED, FIRST_EXCEPTION, + Future, + TimeoutError, + as_completed, + wait, ) import pytest @@ -15,16 +14,21 @@ from distributed import Client from distributed.utils import CancelledError -from distributed.utils_test import ( - slowinc, +from distributed.utils_test import ( # noqa: F401 + a, + b, + client, + cluster, + cluster_fixture, + inc, + loop, + s, slowadd, slowdec, - inc, + slowinc, throws, varying, - cluster, ) -from distributed.utils_test import client, cluster_fixture, loop, s, a, b # noqa: F401 def number_of_processing_tasks(client): diff --git a/distributed/tests/test_client_loop.py b/distributed/tests/test_client_loop.py index ce50498f7e4..63a08cb1639 100644 --- a/distributed/tests/test_client_loop.py +++ b/distributed/tests/test_client_loop.py @@ -1,5 +1,6 @@ import pytest -from distributed import LocalCluster, Client + +from distributed import Client, LocalCluster from distributed.utils import LoopRunner diff --git a/distributed/tests/test_collections.py b/distributed/tests/test_collections.py index 022db4080dd..d372bdc2504 100644 --- a/distributed/tests/test_collections.py +++ b/distributed/tests/test_collections.py @@ -6,21 +6,26 @@ pytest.importorskip("pandas") import dask -import dask.dataframe as dd -import dask.bag as db -from distributed.client import wait -from distributed.utils_test import gen_cluster -from distributed.utils_test import client, cluster_fixture, loop # noqa F401 import numpy as np import pandas as pd +from dask import bag as db +from dask import dataframe as dd + +from distributed.client import wait +from distributed.utils_test import ( # noqa F401 + client, + cluster_fixture, + gen_cluster, + loop, +) PANDAS_VERSION = LooseVersion(pd.__version__) PANDAS_GT_100 = PANDAS_VERSION >= LooseVersion("1.0.0") if PANDAS_GT_100: - import pandas.testing as tm # noqa: F401 + from pandas import testing as tm # noqa: F401 else: - import pandas.util.testing as tm # noqa: F401 + from pandas.util import testing as tm # noqa: F401 dfs = [ @@ -78,7 +83,7 @@ async def test_dataframes(c, s, a, b): @gen_cluster(client=True) async def test_dask_array_collections(c, s, a, b): - import dask.array as da + from dask import array as da s.validate = False x_dsk = {("x", i, j): np.random.random((3, 3)) for i in range(3) for j in range(2)} diff --git a/distributed/tests/test_config.py b/distributed/tests/test_config.py index 74b57b1f011..200493a822c 100644 --- a/distributed/tests/test_config.py +++ b/distributed/tests/test_config.py @@ -1,19 +1,19 @@ import logging +import os import subprocess import sys import tempfile -import os -import yaml import pytest +import yaml +from distributed.config import initialize_logging from distributed.utils_test import ( captured_handler, captured_logger, new_config, new_config_file, ) -from distributed.config import initialize_logging def dump_logger_list(): diff --git a/distributed/tests/test_core.py b/distributed/tests/test_core.py index 071b6b039c7..5a50263a383 100644 --- a/distributed/tests/test_core.py +++ b/distributed/tests/test_core.py @@ -2,43 +2,42 @@ import os import socket import threading -import weakref import warnings +import weakref +import dask import pytest -import dask from distributed.core import ( - pingpong, + ConnectionPool, Server, Status, - rpc, + coerce_to_address, connect, + pingpong, + rpc, send_recv, - coerce_to_address, - ConnectionPool, ) -from distributed.protocol.compression import compressions - from distributed.metrics import time from distributed.protocol import to_serialize +from distributed.protocol.compression import compressions from distributed.utils import get_ip, get_ipv6 +from distributed.utils_test import loop # noqa F401 from distributed.utils_test import ( - gen_cluster, - has_ipv6, assert_can_connect, - assert_cannot_connect, assert_can_connect_from_everywhere_4, assert_can_connect_from_everywhere_4_6, assert_can_connect_from_everywhere_6, assert_can_connect_locally_4, assert_can_connect_locally_6, - tls_security, + assert_cannot_connect, captured_logger, + gen_cluster, + has_ipv6, inc, throws, + tls_security, ) -from distributed.utils_test import loop # noqa F401 def echo(comm, x): diff --git a/distributed/tests/test_diskutils.py b/distributed/tests/test_diskutils.py index 3b97fde1498..22c11bbbeb4 100644 --- a/distributed/tests/test_diskutils.py +++ b/distributed/tests/test_diskutils.py @@ -8,9 +8,9 @@ from time import sleep from unittest import mock +import dask import pytest -import dask from distributed.compatibility import MACOS, WINDOWS from distributed.diskutils import WorkSpace from distributed.metrics import time diff --git a/distributed/tests/test_events.py b/distributed/tests/test_events.py index 484a01c4e3a..9f27a19a65d 100644 --- a/distributed/tests/test_events.py +++ b/distributed/tests/test_events.py @@ -2,8 +2,12 @@ from datetime import timedelta from distributed import Event -from distributed.utils_test import gen_cluster -from distributed.utils_test import client, cluster_fixture, loop # noqa F401 +from distributed.utils_test import ( # noqa F401 + client, + cluster_fixture, + gen_cluster, + loop, +) @gen_cluster(client=True, nthreads=[("127.0.0.1", 8)] * 2) diff --git a/distributed/tests/test_failed_workers.py b/distributed/tests/test_failed_workers.py index 717fe01c80d..ac960f0bd44 100644 --- a/distributed/tests/test_failed_workers.py +++ b/distributed/tests/test_failed_workers.py @@ -1,28 +1,28 @@ import asyncio -from contextlib import suppress import os import random +from contextlib import suppress from time import sleep import pytest -from tlz import partition_all, first - from dask import delayed +from tlz import first, partition_all + from distributed import Client, Nanny, wait from distributed.comm import CommClosedError from distributed.compatibility import MACOS from distributed.metrics import time -from distributed.utils import sync, CancelledError +from distributed.utils import CancelledError, sync +from distributed.utils_test import loop # noqa: F401 from distributed.utils_test import ( - gen_cluster, + captured_logger, cluster, - inc, div, - slowinc, + gen_cluster, + inc, slowadd, - captured_logger, + slowinc, ) -from distributed.utils_test import loop # noqa: F401 def test_submit_after_failed_worker_sync(loop): diff --git a/distributed/tests/test_ipython.py b/distributed/tests/test_ipython.py index 51d7ae85f26..fbe5cfc8f9d 100644 --- a/distributed/tests/test_ipython.py +++ b/distributed/tests/test_ipython.py @@ -4,8 +4,7 @@ from tlz import first from distributed import Client -from distributed.utils_test import cluster, mock_ipython -from distributed.utils_test import loop, zmq_ctx # noqa F401 +from distributed.utils_test import cluster, loop, mock_ipython, zmq_ctx # noqa F401 def need_functional_ipython(func): diff --git a/distributed/tests/test_locks.py b/distributed/tests/test_locks.py index 80eaa06a94f..54c25c49099 100644 --- a/distributed/tests/test_locks.py +++ b/distributed/tests/test_locks.py @@ -1,13 +1,17 @@ import pickle -from time import sleep from datetime import timedelta +from time import sleep import pytest -from distributed import Lock, get_client, Client +from distributed import Client, Lock, get_client from distributed.metrics import time -from distributed.utils_test import gen_cluster -from distributed.utils_test import client, cluster_fixture, loop # noqa F401 +from distributed.utils_test import ( # noqa F401 + client, + cluster_fixture, + gen_cluster, + loop, +) @gen_cluster(client=True, nthreads=[("127.0.0.1", 8)] * 2) diff --git a/distributed/tests/test_multi_locks.py b/distributed/tests/test_multi_locks.py index c1a7cc55af5..26f4a6dbdb7 100644 --- a/distributed/tests/test_multi_locks.py +++ b/distributed/tests/test_multi_locks.py @@ -1,12 +1,15 @@ import asyncio -from distributed.multi_lock import MultiLockExtension from time import sleep - from distributed import MultiLock, get_client from distributed.metrics import time -from distributed.utils_test import gen_cluster -from distributed.utils_test import client, cluster_fixture, loop # noqa F401 +from distributed.multi_lock import MultiLockExtension +from distributed.utils_test import ( # noqa F401 + client, + cluster_fixture, + gen_cluster, + loop, +) @gen_cluster(client=True, nthreads=[("127.0.0.1", 8)] * 2) diff --git a/distributed/tests/test_nanny.py b/distributed/tests/test_nanny.py index 1993f11a377..b00101699a9 100644 --- a/distributed/tests/test_nanny.py +++ b/distributed/tests/test_nanny.py @@ -1,32 +1,31 @@ import asyncio -from contextlib import suppress import gc import logging +import multiprocessing as mp import os import random import sys -import multiprocessing as mp +from contextlib import suppress +import dask import numpy as np - import pytest -from tlz import valmap, first +from tlz import first, valmap from tornado.ioloop import IOLoop -import dask -from distributed.diagnostics import SchedulerPlugin -from distributed import Nanny, rpc, Scheduler, Worker, Client, wait, worker +from distributed import Client, Nanny, Scheduler, Worker, rpc, wait, worker from distributed.compatibility import MACOS from distributed.core import CommClosedError, Status +from distributed.diagnostics import SchedulerPlugin from distributed.metrics import time from distributed.protocol.pickle import dumps -from distributed.utils import tmpfile, TimeoutError, parse_ports +from distributed.utils import TimeoutError, parse_ports, tmpfile from distributed.utils_test import ( # noqa: F401 + captured_logger, + cleanup, gen_cluster, gen_test, inc, - captured_logger, - cleanup, ) diff --git a/distributed/tests/test_preload.py b/distributed/tests/test_preload.py index ae13137d847..df65115bfd5 100644 --- a/distributed/tests/test_preload.py +++ b/distributed/tests/test_preload.py @@ -1,17 +1,14 @@ import os -import pytest import shutil import sys import tempfile -import pytest - -from tornado import web import dask -from distributed import Client, Scheduler, Worker, Nanny -from distributed.utils_test import cluster, captured_logger -from distributed.utils_test import loop, cleanup # noqa F401 +import pytest +from tornado import web +from distributed import Client, Nanny, Scheduler, Worker +from distributed.utils_test import captured_logger, cleanup, cluster, loop # noqa F401 PRELOAD_TEXT = """ _worker_info = {} diff --git a/distributed/tests/test_priorities.py b/distributed/tests/test_priorities.py index d3ba8a1a026..411df10df7e 100644 --- a/distributed/tests/test_priorities.py +++ b/distributed/tests/test_priorities.py @@ -1,14 +1,13 @@ import asyncio -import pytest - -from dask.core import flatten import dask +import pytest from dask import delayed, persist +from dask.core import flatten from dask.utils import stringify -from distributed.utils_test import gen_cluster, inc, slowinc, slowdec -from distributed import wait, Worker +from distributed import Worker, wait +from distributed.utils_test import gen_cluster, inc, slowdec, slowinc @gen_cluster(client=True, nthreads=[]) diff --git a/distributed/tests/test_profile.py b/distributed/tests/test_profile.py index 9f673e8caaf..5fd6f6f2dab 100644 --- a/distributed/tests/test_profile.py +++ b/distributed/tests/test_profile.py @@ -1,21 +1,22 @@ -import pytest import sys +import threading import time + +import pytest from tlz import first -import threading -from distributed.compatibility import WINDOWS from distributed import metrics +from distributed.compatibility import WINDOWS from distributed.profile import ( - process, - merge, - create, call_stack, + create, identifier, - watch, - llprocess, ll_get_stack, + llprocess, + merge, plot_data, + process, + watch, ) diff --git a/distributed/tests/test_publish.py b/distributed/tests/test_publish.py index 82bcc7c996f..043ef8df1f7 100644 --- a/distributed/tests/test_publish.py +++ b/distributed/tests/test_publish.py @@ -1,13 +1,19 @@ import asyncio -import pytest +import pytest from dask import delayed + from distributed import Client from distributed.client import futures_of from distributed.metrics import time -from distributed.utils_test import gen_cluster, inc -from distributed.utils_test import client, cluster_fixture, loop # noqa F401 from distributed.protocol import Serialized +from distributed.utils_test import ( # noqa F401 + client, + cluster_fixture, + gen_cluster, + inc, + loop, +) @gen_cluster(client=False) diff --git a/distributed/tests/test_pubsub.py b/distributed/tests/test_pubsub.py index 8f8a3b734e7..64c16860126 100644 --- a/distributed/tests/test_pubsub.py +++ b/distributed/tests/test_pubsub.py @@ -5,9 +5,9 @@ import pytest import tlz as toolz -from distributed import Pub, Sub, wait, get_worker, TimeoutError -from distributed.utils_test import gen_cluster +from distributed import Pub, Sub, TimeoutError, get_worker, wait from distributed.metrics import time +from distributed.utils_test import gen_cluster @gen_cluster(client=True, timeout=None) diff --git a/distributed/tests/test_queues.py b/distributed/tests/test_queues.py index 8f400498854..c7efa9a0d23 100644 --- a/distributed/tests/test_queues.py +++ b/distributed/tests/test_queues.py @@ -4,10 +4,17 @@ import pytest -from distributed import Client, Queue, Nanny, worker_client, wait, TimeoutError +from distributed import Client, Nanny, Queue, TimeoutError, wait, worker_client from distributed.metrics import time -from distributed.utils_test import gen_cluster, inc, div, popen -from distributed.utils_test import client, cluster_fixture, loop # noqa: F401 +from distributed.utils_test import ( # noqa: F401 + client, + cluster_fixture, + div, + gen_cluster, + inc, + loop, + popen, +) @gen_cluster(client=True) diff --git a/distributed/tests/test_resources.py b/distributed/tests/test_resources.py index 68837e30a14..55e1fe7fc72 100644 --- a/distributed/tests/test_resources.py +++ b/distributed/tests/test_resources.py @@ -2,15 +2,25 @@ from time import time import dask +import pytest from dask import delayed from dask.utils import stringify -import pytest from distributed import Worker from distributed.client import wait from distributed.compatibility import WINDOWS -from distributed.utils_test import inc, gen_cluster, slowinc, slowadd -from distributed.utils_test import client, cluster_fixture, loop, s, a, b # noqa: F401 +from distributed.utils_test import ( # noqa: F401 + a, + b, + client, + cluster_fixture, + gen_cluster, + inc, + loop, + s, + slowadd, + slowinc, +) @gen_cluster(client=True, nthreads=[]) diff --git a/distributed/tests/test_scheduler.py b/distributed/tests/test_scheduler.py index 701a874fbcc..c8d87910011 100644 --- a/distributed/tests/test_scheduler.py +++ b/distributed/tests/test_scheduler.py @@ -9,38 +9,38 @@ import cloudpickle import dask -from dask import delayed -from tlz import merge, concat, valmap, first, frequencies - import pytest +from dask import delayed +from dask.compatibility import apply +from tlz import concat, first, frequencies, merge, valmap -from distributed import Nanny, Worker, Client, wait, fire_and_forget +from distributed import Client, Nanny, Worker, fire_and_forget, wait +from distributed.client import wait from distributed.comm import Comm from distributed.compatibility import MACOS -from distributed.core import connect, rpc, ConnectionPool, Status -from distributed.scheduler import Scheduler -from distributed.client import wait +from distributed.core import ConnectionPool, Status, connect, rpc from distributed.metrics import time from distributed.protocol.pickle import dumps -from distributed.worker import dumps_function, dumps_task -from distributed.utils import tmpfile, typename, TimeoutError +from distributed.scheduler import Scheduler +from distributed.utils import TimeoutError, tmpfile, typename from distributed.utils_test import ( # noqa: F401 captured_logger, cleanup, - inc, + cluster, dec, + div, gen_cluster, gen_test, - slowinc, + inc, + loop, + nodebug, slowadd, slowdec, - cluster, - div, - varying, + slowinc, tls_only_security, + varying, ) -from distributed.utils_test import loop, nodebug # noqa: F401 -from dask.compatibility import apply +from distributed.worker import dumps_function, dumps_task if sys.version_info < (3, 8): try: diff --git a/distributed/tests/test_security.py b/distributed/tests/test_security.py index dc1aee9e9b6..c7a6b826dc7 100644 --- a/distributed/tests/test_security.py +++ b/distributed/tests/test_security.py @@ -5,14 +5,13 @@ except ImportError: ssl = None +import dask import pytest from distributed.comm import connect, listen from distributed.security import Security from distributed.utils_test import get_cert -import dask - ca_file = get_cert("tls-ca-cert.pem") cert1 = get_cert("tls-cert.pem") diff --git a/distributed/tests/test_semaphore.py b/distributed/tests/test_semaphore.py index e23864d811f..acecd5a3b79 100644 --- a/distributed/tests/test_semaphore.py +++ b/distributed/tests/test_semaphore.py @@ -1,27 +1,29 @@ import asyncio -from datetime import timedelta +import logging import pickle +from datetime import timedelta +from time import sleep, time + import dask import pytest from dask.distributed import Client -from time import time, sleep + from distributed import Semaphore, fire_and_forget from distributed.comm import Comm from distributed.compatibility import WINDOWS from distributed.core import ConnectionPool from distributed.metrics import time from distributed.utils_test import ( # noqa: F401 - client, - cleanup, - cluster, async_wait_for, captured_logger, + cleanup, + client, + cluster, cluster_fixture, gen_cluster, - slowidentity, loop, + slowidentity, ) -import logging @gen_cluster(client=True) diff --git a/distributed/tests/test_sizeof.py b/distributed/tests/test_sizeof.py index a92d2900ae8..b5383bfd5d3 100644 --- a/distributed/tests/test_sizeof.py +++ b/distributed/tests/test_sizeof.py @@ -1,5 +1,6 @@ -import pytest import logging + +import pytest from dask.sizeof import sizeof from distributed.sizeof import safe_sizeof diff --git a/distributed/tests/test_steal.py b/distributed/tests/test_steal.py index ac37c0d5acf..fb1a0bac9b6 100644 --- a/distributed/tests/test_steal.py +++ b/distributed/tests/test_steal.py @@ -9,6 +9,8 @@ import dask import pytest +from tlz import concat, sliding_window + from distributed import Nanny, Worker, wait, worker_client from distributed.config import config from distributed.metrics import time @@ -24,7 +26,6 @@ slowidentity, slowinc, ) -from tlz import concat, sliding_window # Most tests here are timing-dependent setup_module = nodebug_setup_module diff --git a/distributed/tests/test_stress.py b/distributed/tests/test_stress.py index 2cc65b29ae0..e5dafbc63b0 100644 --- a/distributed/tests/test_stress.py +++ b/distributed/tests/test_stress.py @@ -1,34 +1,31 @@ import asyncio -from contextlib import suppress import random import sys +from contextlib import suppress from operator import add from time import sleep -from dask import delayed import pytest +from dask import delayed from tlz import concat, sliding_window -from distributed import Client, wait, Nanny +from distributed import Client, Nanny, wait +from distributed.client import wait from distributed.config import config from distributed.metrics import time from distributed.utils import All, CancelledError -from distributed.utils_test import ( - gen_cluster, +from distributed.utils_test import ( # noqa: F401 + bump_rlimit, cluster, + gen_cluster, inc, - slowinc, - slowadd, - slowsum, - bump_rlimit, -) -from distributed.utils_test import ( # noqa: F401 loop, nodebug_setup_module, nodebug_teardown_module, + slowadd, + slowinc, + slowsum, ) -from distributed.client import wait - # All tests here are slow in some way setup_module = nodebug_setup_module @@ -254,11 +251,13 @@ async def test_no_delay_during_large_transfer(c, s, w): x_nbytes = x.nbytes # Reset digests - from distributed.counter import Digest from collections import defaultdict from functools import partial + from dask.diagnostics import ResourceProfiler + from distributed.counter import Digest + for server in [s, w]: server.digests = defaultdict(partial(Digest, loop=server.io_loop)) server._last_tick = time() diff --git a/distributed/tests/test_threadpoolexecutor.py b/distributed/tests/test_threadpoolexecutor.py index 6da81e39288..ec26be4cd19 100644 --- a/distributed/tests/test_threadpoolexecutor.py +++ b/distributed/tests/test_threadpoolexecutor.py @@ -1,8 +1,8 @@ -from time import sleep import threading +from time import sleep from distributed.metrics import time -from distributed.threadpoolexecutor import ThreadPoolExecutor, secede, rejoin +from distributed.threadpoolexecutor import ThreadPoolExecutor, rejoin, secede def test_tpe(): diff --git a/distributed/tests/test_tls_functional.py b/distributed/tests/test_tls_functional.py index 3002b0a2c43..59e0bbb429d 100644 --- a/distributed/tests/test_tls_functional.py +++ b/distributed/tests/test_tls_functional.py @@ -3,21 +3,22 @@ Most are taken from other test files and adapted. """ import asyncio + import pytest -from distributed import Scheduler, Worker, Client, Nanny, worker_client, Queue -from distributed.core import Status +from distributed import Client, Nanny, Queue, Scheduler, Worker, worker_client from distributed.client import wait +from distributed.core import Status from distributed.metrics import time from distributed.nanny import Nanny from distributed.utils_test import ( # noqa: F401 + cleanup, + double, gen_tls_cluster, inc, - double, - slowinc, slowadd, + slowinc, tls_config, - cleanup, ) diff --git a/distributed/tests/test_utils.py b/distributed/tests/test_utils.py index 022e9925445..0bdacbcaefe 100644 --- a/distributed/tests/test_utils.py +++ b/distributed/tests/test_utils.py @@ -1,55 +1,63 @@ -import asyncio import array +import asyncio import datetime -from functools import partial import io import os import queue import socket import sys -from time import sleep import traceback +from functools import partial +from time import sleep +import dask import numpy as np import pytest from tornado.ioloop import IOLoop -import dask from distributed.metrics import time from distributed.utils import ( + LRU, All, Log, Logs, - sync, - is_kernel, - is_valid_xml, - ensure_ip, - truncate_exception, - get_traceback, + LoopRunner, + TimeoutError, _maybe_complex, - read_block, - seek_delimiter, - funcname, + deserialize_for_cli, ensure_bytes, - open_port, + ensure_ip, + format_dashboard_link, + funcname, get_ip_interface, + get_traceback, + is_kernel, + is_valid_xml, nbytes, - set_thread_state, - thread_state, - LoopRunner, + offload, + open_port, parse_bytes, - parse_timedelta, parse_ports, - warn_on_duration, - format_dashboard_link, - LRU, - offload, - TimeoutError, - deserialize_for_cli, + parse_timedelta, + read_block, + seek_delimiter, serialize_for_cli, + set_thread_state, + sync, + thread_state, + truncate_exception, + warn_on_duration, +) +from distributed.utils_test import ( # noqa: F401 + captured_logger, + div, + gen_test, + has_ipv6, + inc, + loop, + loop_in_thread, + throws, ) -from distributed.utils_test import loop, loop_in_thread # noqa: F401 -from distributed.utils_test import div, has_ipv6, inc, throws, gen_test, captured_logger def test_All(loop): diff --git a/distributed/tests/test_utils_comm.py b/distributed/tests/test_utils_comm.py index 7ab793e18e4..c0bc721f51b 100644 --- a/distributed/tests/test_utils_comm.py +++ b/distributed/tests/test_utils_comm.py @@ -1,12 +1,12 @@ -from distributed.core import ConnectionPool -from distributed.comm import Comm -from distributed.utils_test import gen_cluster, loop # noqa: F401 -from distributed.utils_comm import pack_data, subs_multiple, gather_from_workers, retry - from unittest import mock import pytest +from distributed.comm import Comm +from distributed.core import ConnectionPool +from distributed.utils_comm import gather_from_workers, pack_data, retry, subs_multiple +from distributed.utils_test import gen_cluster, loop # noqa: F401 + def test_pack_data(): data = {"x": 1} diff --git a/distributed/tests/test_utils_test.py b/distributed/tests/test_utils_test.py index 6093d2eaea7..4bf0230548b 100755 --- a/distributed/tests/test_utils_test.py +++ b/distributed/tests/test_utils_test.py @@ -1,33 +1,30 @@ import asyncio -from contextlib import contextmanager import socket import threading +from contextlib import contextmanager from time import sleep import pytest from tornado import gen -from distributed import Scheduler, Worker, Nanny, Client, config, default_client +from distributed import Client, Nanny, Scheduler, Worker, config, default_client from distributed.core import rpc from distributed.metrics import time +from distributed.utils import get_ip from distributed.utils_test import ( # noqa: F401 cleanup, cluster, gen_cluster, - inc, gen_test, - wait_for_port, - new_config, -) - -from distributed.utils_test import ( # noqa: F401 + inc, loop, - tls_only_security, + new_config, security, tls_client, tls_cluster, + tls_only_security, + wait_for_port, ) -from distributed.utils import get_ip def test_bare_cluster(loop): diff --git a/distributed/tests/test_variable.py b/distributed/tests/test_variable.py index 37b3c756be7..9763de48335 100644 --- a/distributed/tests/test_variable.py +++ b/distributed/tests/test_variable.py @@ -1,17 +1,25 @@ import asyncio +import logging import random from datetime import timedelta -from time import sleep, monotonic -import logging +from time import monotonic, sleep import pytest from tornado.ioloop import IOLoop -from distributed import Client, Variable, worker_client, Nanny, wait, TimeoutError -from distributed.metrics import time +from distributed import Client, Nanny, TimeoutError, Variable, wait, worker_client from distributed.compatibility import WINDOWS -from distributed.utils_test import gen_cluster, inc, div, captured_logger, popen -from distributed.utils_test import client, cluster_fixture, loop # noqa: F401 +from distributed.metrics import time +from distributed.utils_test import ( # noqa: F401 + captured_logger, + client, + cluster_fixture, + div, + gen_cluster, + inc, + loop, + popen, +) @gen_cluster(client=True) diff --git a/distributed/tests/test_versions.py b/distributed/tests/test_versions.py index 7b09d5299c6..345cee3a253 100644 --- a/distributed/tests/test_versions.py +++ b/distributed/tests/test_versions.py @@ -3,10 +3,9 @@ import pytest -from distributed.versions import get_versions, error_message from distributed import Client, Worker from distributed.utils_test import gen_cluster, loop # noqa: F401 - +from distributed.versions import error_message, get_versions # if one of the nodes reports this version, there's a mismatch mismatched_version = get_versions() diff --git a/distributed/tests/test_worker.py b/distributed/tests/test_worker.py index b15030d3599..b1ffc3ddbd3 100644 --- a/distributed/tests/test_worker.py +++ b/distributed/tests/test_worker.py @@ -1,61 +1,59 @@ -from concurrent.futures import ThreadPoolExecutor +import asyncio import importlib import logging -from numbers import Number -from operator import add import os -import psutil import sys -from time import sleep import threading import traceback +from concurrent.futures import ThreadPoolExecutor +from numbers import Number +from operator import add +from time import sleep from unittest import mock -import asyncio import dask +import psutil +import pytest from dask import delayed -from dask.utils import format_bytes from dask.system import CPU_COUNT -import pytest -from tlz import pluck, sliding_window, first +from dask.utils import format_bytes +from tlz import first, pluck, sliding_window from distributed import ( Client, Nanny, - get_client, + Reschedule, default_client, + get_client, get_worker, - Reschedule, wait, ) -from distributed.diagnostics.plugin import PipInstall from distributed.compatibility import MACOS, WINDOWS -from distributed.core import rpc, CommClosedError, Status -from distributed.scheduler import Scheduler +from distributed.core import CommClosedError, Status, rpc +from distributed.diagnostics.plugin import PipInstall from distributed.metrics import time -from distributed.worker import Worker, error_message, logger, parse_memory_limit -from distributed.utils import tmpfile, TimeoutError +from distributed.scheduler import Scheduler +from distributed.utils import TimeoutError, tmpfile from distributed.utils_test import ( # noqa: F401 + TaskStateMetadataPlugin, + a, + b, + captured_logger, cleanup, - inc, - mul, - gen_cluster, - div, + client, + cluster_fixture, dec, - slowinc, + div, + gen_cluster, gen_test, - captured_logger, -) -from distributed.utils_test import ( # noqa: F401 - client, + inc, loop, + mul, nodebug, - cluster_fixture, s, - a, - b, - TaskStateMetadataPlugin, + slowinc, ) +from distributed.worker import Worker, error_message, logger, parse_memory_limit @pytest.mark.asyncio diff --git a/distributed/tests/test_worker_client.py b/distributed/tests/test_worker_client.py index 09ae20e8f20..33c18d1c617 100644 --- a/distributed/tests/test_worker_client.py +++ b/distributed/tests/test_worker_client.py @@ -1,24 +1,30 @@ import asyncio import random import threading -from time import sleep import warnings +from time import sleep import dask -from dask import delayed import pytest +from dask import delayed from distributed import ( - worker_client, Client, as_completed, + get_client, get_worker, wait, - get_client, + worker_client, ) from distributed.metrics import time -from distributed.utils_test import double, gen_cluster, inc -from distributed.utils_test import client, cluster_fixture, loop # noqa: F401 +from distributed.utils_test import ( # noqa: F401 + client, + cluster_fixture, + double, + gen_cluster, + inc, + loop, +) @gen_cluster(client=True) @@ -191,7 +197,7 @@ def mysum(): def test_dont_override_default_get(loop): - import dask.bag as db + from dask import bag as db def f(x): with worker_client() as c: diff --git a/distributed/threadpoolexecutor.py b/distributed/threadpoolexecutor.py index cc4a9894823..2bd224ff4e6 100644 --- a/distributed/threadpoolexecutor.py +++ b/distributed/threadpoolexecutor.py @@ -20,13 +20,13 @@ Copyright 2001-2016 Python Software Foundation; All Rights Reserved """ -from . import _concurrent_futures_thread as thread -import os +import itertools import logging +import os import queue import threading -import itertools +from . import _concurrent_futures_thread as thread from .metrics import time logger = logging.getLogger(__name__) diff --git a/distributed/utils.py b/distributed/utils.py index 115e9578b6b..cbc9aef390c 100644 --- a/distributed/utils.py +++ b/distributed/utils.py @@ -1,33 +1,34 @@ import asyncio -from asyncio import TimeoutError import atexit -import click -from collections import deque, OrderedDict, UserDict -from concurrent.futures import ThreadPoolExecutor, CancelledError # noqa: F401 -from contextlib import contextmanager, suppress +import base64 import functools -from hashlib import md5 import html +import importlib +import inspect import json import logging import multiprocessing import os +import pkgutil import re import shutil import socket -from time import sleep -import importlib -from importlib.util import cache_from_source -import inspect import sys import tempfile import threading import warnings import weakref -import pkgutil -import base64 -import tblib.pickling_support import xml.etree.ElementTree +from asyncio import TimeoutError +from collections import OrderedDict, UserDict, deque +from concurrent.futures import CancelledError, ThreadPoolExecutor # noqa: F401 +from contextlib import contextmanager, suppress +from hashlib import md5 +from importlib.util import cache_from_source +from time import sleep + +import click +import tblib.pickling_support try: import resource @@ -35,18 +36,17 @@ resource = None import dask +import tlz as toolz from dask import istask # provide format_bytes here for backwards compatibility from dask.utils import ( # noqa format_bytes, - funcname, format_time, + funcname, parse_bytes, parse_timedelta, ) - -import tlz as toolz from tornado import gen from tornado.ioloop import IOLoop @@ -58,7 +58,6 @@ from .compatibility import PYPY, WINDOWS from .metrics import time - try: from dask.context import thread_state except ImportError: @@ -87,7 +86,7 @@ def _initialize_mp_context(): if "pkg_resources" in sys.modules: preload.append("pkg_resources") - from .versions import required_packages, optional_packages + from .versions import optional_packages, required_packages for pkg, _ in required_packages + optional_packages: try: diff --git a/distributed/utils_comm.py b/distributed/utils_comm.py index 4175e0ee495..24f3bf5e102 100644 --- a/distributed/utils_comm.py +++ b/distributed/utils_comm.py @@ -1,14 +1,14 @@ import asyncio +import logging +import random from collections import defaultdict from functools import partial from itertools import cycle -import logging -import random -from dask.optimization import SubgraphCallable import dask.config +from dask.optimization import SubgraphCallable from dask.utils import parse_timedelta, stringify -from tlz import merge, concat, groupby, drop +from tlz import concat, drop, groupby, merge from .core import rpc from .utils import All diff --git a/distributed/utils_perf.py b/distributed/utils_perf.py index 3b97dd46327..c81eb013c78 100644 --- a/distributed/utils_perf.py +++ b/distributed/utils_perf.py @@ -1,14 +1,13 @@ -from collections import deque import gc import logging import threading +from collections import deque from dask.utils import format_bytes from .compatibility import PYPY from .metrics import thread_time - logger = _logger = logging.getLogger(__name__) diff --git a/distributed/utils_test.py b/distributed/utils_test.py index e253d7760c7..615c3851835 100644 --- a/distributed/utils_test.py +++ b/distributed/utils_test.py @@ -1,10 +1,8 @@ import asyncio import collections -import gc -from contextlib import contextmanager, suppress import copy import functools -from glob import glob +import gc import io import itertools import logging @@ -19,49 +17,50 @@ import sys import tempfile import threading -from time import sleep import uuid import warnings import weakref +from contextlib import contextmanager, suppress +from glob import glob +from time import sleep try: import ssl except ImportError: ssl = None -import pytest - import dask -from tlz import merge, memoize, assoc +import pytest +from tlz import assoc, memoize, merge from tornado import gen from tornado.ioloop import IOLoop from . import system -from .client import default_client, _global_clients, Client -from .compatibility import WINDOWS +from .client import Client, _global_clients, default_client from .comm import Comm +from .compatibility import WINDOWS from .config import initialize_logging -from .core import connect, rpc, CommClosedError, Status +from .core import CommClosedError, Status, connect, rpc from .deploy import SpecCluster +from .diagnostics.plugin import WorkerPlugin from .metrics import time +from .nanny import Nanny from .proctitle import enable_proctitle_on_children from .security import Security from .utils import ( - log_errors, - mp_context, + DequeHandler, + TimeoutError, + _offload_executor, get_ip, get_ipv6, - DequeHandler, + iscoroutinefunction, + log_errors, + mp_context, reset_logger_locks, sync, - iscoroutinefunction, thread_state, - _offload_executor, - TimeoutError, ) from .worker import Worker -from .nanny import Nanny -from .diagnostics.plugin import WorkerPlugin try: import dask.array # register config @@ -190,6 +189,7 @@ def pristine_loop(): @contextmanager def mock_ipython(): from unittest import mock + from distributed._ipython_utils import remote_magic ip = mock.Mock() diff --git a/distributed/variable.py b/distributed/variable.py index 83bea566b45..6d511ce51e5 100644 --- a/distributed/variable.py +++ b/distributed/variable.py @@ -1,14 +1,14 @@ import asyncio -from collections import defaultdict -from contextlib import suppress import logging import uuid +from collections import defaultdict +from contextlib import suppress +from dask.utils import stringify from tlz import merge -from dask.utils import stringify -from .client import Future, Client -from .utils import log_errors, TimeoutError, parse_timedelta +from .client import Client, Future +from .utils import TimeoutError, log_errors, parse_timedelta from .worker import get_client, get_worker logger = logging.getLogger(__name__) diff --git a/distributed/versions.py b/distributed/versions.py index a276eb418ca..d8af5eabcd0 100644 --- a/distributed/versions.py +++ b/distributed/versions.py @@ -1,13 +1,12 @@ """ utilities for package version introspection """ -from __future__ import print_function, division, absolute_import +from __future__ import absolute_import, division, print_function +import importlib +import os import platform import struct -import os import sys -import importlib - required_packages = [ ("dask", lambda p: p.__version__), diff --git a/distributed/worker.py b/distributed/worker.py index 7e282c2e46a..653c94597c1 100644 --- a/distributed/worker.py +++ b/distributed/worker.py @@ -1,77 +1,80 @@ import asyncio import bisect - -from collections import defaultdict, deque, namedtuple -from collections.abc import MutableMapping -from contextlib import suppress -from datetime import timedelta import errno -from functools import partial import heapq -from inspect import isawaitable import logging import os -from pickle import PicklingError import random -import threading import sys +import threading import uuid import warnings import weakref +from collections import defaultdict, deque, namedtuple +from collections.abc import MutableMapping +from contextlib import suppress +from datetime import timedelta +from functools import partial +from inspect import isawaitable +from pickle import PicklingError import dask -from dask.core import istask from dask.compatibility import apply -from dask.utils import format_bytes, funcname +from dask.core import istask from dask.system import CPU_COUNT - -from tlz import pluck, merge, first, keymap +from dask.utils import format_bytes, funcname +from tlz import first, keymap, merge, pluck from tornado import gen from tornado.ioloop import IOLoop, PeriodicCallback -from . import profile, comm, system +from . import comm, preloading, profile, system from .batched import BatchedSend -from .comm import get_address_host, connect +from .comm import connect, get_address_host from .comm.addressing import address_from_user_args from .comm.utils import OFFLOAD_THRESHOLD -from .core import error_message, CommClosedError, send_recv, pingpong, coerce_to_address +from .core import ( + CommClosedError, + Status, + coerce_to_address, + error_message, + pingpong, + send_recv, +) from .diskutils import WorkSpace from .http import get_handlers from .metrics import time from .node import ServerNode -from . import preloading from .proctitle import setproctitle -from .protocol import pickle, to_serialize, deserialize_bytes, serialize_bytelist +from .protocol import deserialize_bytes, pickle, serialize_bytelist, to_serialize from .pubsub import PubSubWorkerExtension from .security import Security from .sizeof import safe_sizeof as sizeof -from .threadpoolexecutor import ThreadPoolExecutor, secede as tpe_secede +from .threadpoolexecutor import ThreadPoolExecutor +from .threadpoolexecutor import secede as tpe_secede from .utils import ( + LRU, + TimeoutError, + _maybe_complex, get_ip, - typename, has_arg, - _maybe_complex, - log_errors, import_file, - silence_logging, - thread_state, + iscoroutinefunction, json_load_robust, key_split, + log_errors, offload, parse_bytes, - parse_timedelta, parse_ports, - iscoroutinefunction, + parse_timedelta, + silence_logging, + thread_state, + typename, warn_on_duration, - LRU, - TimeoutError, ) -from .utils_comm import pack_data, gather_from_workers, retry_operation -from .utils_perf import ThrottledGC, enable_gc_diagnosis, disable_gc_diagnosis +from .utils_comm import gather_from_workers, pack_data, retry_operation +from .utils_perf import ThrottledGC, disable_gc_diagnosis, enable_gc_diagnosis from .versions import get_versions -from .core import Status - logger = logging.getLogger(__name__) LOG_PDB = dask.config.get("distributed.admin.pdb-on-err") diff --git a/distributed/worker_client.py b/distributed/worker_client.py index 2f6eec73c62..7ad1cb7e20b 100644 --- a/distributed/worker_client.py +++ b/distributed/worker_client.py @@ -1,10 +1,11 @@ -from contextlib import contextmanager import warnings +from contextlib import contextmanager import dask -from .threadpoolexecutor import secede, rejoin -from .worker import thread_state, get_client, get_worker + +from .threadpoolexecutor import rejoin, secede from .utils import parse_timedelta +from .worker import get_client, get_worker, thread_state @contextmanager diff --git a/docs/source/conf.py b/docs/source/conf.py index d83c6b9d2c6..201cd76b00b 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -425,12 +425,12 @@ def copy_legacy_redirects(app, docname): f.write(page) +from docutils.parsers.rst import directives + # -- Configuration to keep autosummary in sync with autoclass::members ---------------------------------------------- # Fixes issues/3693 # See https://stackoverflow.com/questions/20569011/python-sphinx-autosummary-automated-listing-of-member-functions -from sphinx.ext.autosummary import Autosummary -from sphinx.ext.autosummary import get_documenter -from docutils.parsers.rst import directives +from sphinx.ext.autosummary import Autosummary, get_documenter from sphinx.util.inspect import safe_getattr diff --git a/setup.cfg b/setup.cfg index 0689b58795d..ef72dcbd2bf 100644 --- a/setup.cfg +++ b/setup.cfg @@ -27,6 +27,13 @@ ignore = max-line-length = 120 +[isort] +profile = black +skip_gitignore = true +force_to_top = true +default_section = THIRDPARTY +known_first_party = distributed + [versioneer] VCS = git style = pep440 diff --git a/setup.py b/setup.py index 8aaaeee5de8..61f2193d6c3 100755 --- a/setup.py +++ b/setup.py @@ -2,8 +2,10 @@ import os import sys -from setuptools import setup, find_packages + +from setuptools import find_packages, setup from setuptools.extension import Extension + import versioneer requires = open("requirements.txt").read().strip().split("\n") diff --git a/versioneer.py b/versioneer.py index fa805b12b0f..bf23b9a50d4 100644 --- a/versioneer.py +++ b/versioneer.py @@ -277,10 +277,12 @@ """ from __future__ import print_function + try: import configparser except ImportError: import ConfigParser as configparser + import errno import json import os @@ -1557,6 +1559,7 @@ def run(self): if "cx_Freeze" in sys.modules: # cx_freeze enabled? from cx_Freeze.dist import build_exe as _build_exe + # nczeczulin reports that py2exe won't like the pep440-style string # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. # setup(console=[{