From 77e3e079db3f46dab891ba570e046ca600e01da7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 4 Feb 2025 00:07:33 +0000 Subject: [PATCH 1/3] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.7.2 → v0.9.4](https://github.com/astral-sh/ruff-pre-commit/compare/v0.7.2...v0.9.4) - [github.com/nbQA-dev/nbQA: 1.9.0 → 1.9.1](https://github.com/nbQA-dev/nbQA/compare/1.9.0...1.9.1) - [github.com/pre-commit/mirrors-eslint: v9.14.0 → v9.19.0](https://github.com/pre-commit/mirrors-eslint/compare/v9.14.0...v9.19.0) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 108606b7..89560da9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ ci: repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.2 + rev: v0.9.4 hooks: - id: ruff args: @@ -24,7 +24,7 @@ repos: # than ruff itself # but has trouble with isort rules - repo: https://github.com/nbQA-dev/nbQA - rev: 1.9.0 + rev: 1.9.1 hooks: - id: nbqa-ruff-format - id: nbqa-ruff-check @@ -44,7 +44,7 @@ repos: - id: check-executables-have-shebangs - id: requirements-txt-fixer - repo: https://github.com/pre-commit/mirrors-eslint - rev: v9.14.0 + rev: v9.19.0 hooks: - id: eslint files: \.[jt]sx?$ # *.js, *.jsx, *.ts and *.tsx From 718b85fba6d2a25692af27925984f5ec66ac6f1b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 4 Feb 2025 00:08:10 +0000 Subject: [PATCH 2/3] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- benchmarks/cluster_start.py | 2 +- ipyparallel/controller/app.py | 6 +++--- ipyparallel/controller/heartmonitor.py | 3 +-- ipyparallel/controller/hub.py | 5 ++--- ipyparallel/tests/clienttest.py | 6 +++--- ipyparallel/tests/test_asyncresult.py | 6 +++--- ipyparallel/tests/test_cluster.py | 2 +- ipyparallel/tests/test_dependency.py | 18 +++++++++--------- ipyparallel/tests/test_magics.py | 12 ++++++------ ipyparallel/tests/test_view.py | 6 +++--- 10 files changed, 32 insertions(+), 34 deletions(-) diff --git a/benchmarks/cluster_start.py b/benchmarks/cluster_start.py index aafbb5c1..de05de97 100644 --- a/benchmarks/cluster_start.py +++ b/benchmarks/cluster_start.py @@ -15,7 +15,7 @@ def start_cluster(depth, number_of_engines, path='', log_output_to_file=False): f'--HubFactory.db_class=NoDB' ) print(ipcontroller_cmd) - ipengine_cmd = f'{path}ipengine --profile=asv ' f'--cluster-id=depth_{depth} ' + ipengine_cmd = f'{path}ipengine --profile=asv --cluster-id=depth_{depth} ' ps = [ Popen( ipcontroller_cmd.split(), diff --git a/ipyparallel/controller/app.py b/ipyparallel/controller/app.py index 580076ad..8de42a02 100755 --- a/ipyparallel/controller/app.py +++ b/ipyparallel/controller/app.py @@ -712,9 +712,9 @@ def load_config_from_json(self): ccfg = json.loads(f.read()) for key in ('key', 'registration', 'pack', 'unpack', 'signature_scheme'): - assert ( - ccfg[key] == ecfg[key] - ), f"mismatch between engine and client info: {key!r}" + assert ccfg[key] == ecfg[key], ( + f"mismatch between engine and client info: {key!r}" + ) xport, ip = ccfg['interface'].split('://') diff --git a/ipyparallel/controller/heartmonitor.py b/ipyparallel/controller/heartmonitor.py index 5df78af7..2b605c55 100755 --- a/ipyparallel/controller/heartmonitor.py +++ b/ipyparallel/controller/heartmonitor.py @@ -106,8 +106,7 @@ class HeartMonitor(LoggingConfigurable): period = Integer( 3000, config=True, - help='The frequency at which the Hub pings the engines for heartbeats ' - '(in ms)', + help='The frequency at which the Hub pings the engines for heartbeats (in ms)', ) max_heartmonitor_misses = Integer( 10, diff --git a/ipyparallel/controller/hub.py b/ipyparallel/controller/hub.py index b199bcee..1895681e 100644 --- a/ipyparallel/controller/hub.py +++ b/ipyparallel/controller/hub.py @@ -564,8 +564,7 @@ def save_broadcast_request(self, idents, msg): msg = self.session.deserialize(msg) except Exception as e: self.log.error( - f'broadcast:: client {client_id} sent invalid broadcast message:' - f' {msg}', + f'broadcast:: client {client_id} sent invalid broadcast message: {msg}', exc_info=True, ) return @@ -588,7 +587,7 @@ def save_broadcast_result(self, idents, msg): msg = self.session.deserialize(msg) except Exception as e: self.log.error( - f'broadcast::invalid broadcast result message send to {client_id}:' f'' + f'broadcast::invalid broadcast result message send to {client_id}:' ) # save the result of a completed broadcast diff --git a/ipyparallel/tests/clienttest.py b/ipyparallel/tests/clienttest.py index f0976e62..9f5fb96e 100644 --- a/ipyparallel/tests/clienttest.py +++ b/ipyparallel/tests/clienttest.py @@ -107,9 +107,9 @@ def raises_remote(etype): e.raise_exception() except error.RemoteError as e: tb = '\n'.join(e.render_traceback()) - assert ( - expected_ename == e.ename - ), f"Should have raised {expected_ename}, but raised {e.ename}:\n{tb}" + assert expected_ename == e.ename, ( + f"Should have raised {expected_ename}, but raised {e.ename}:\n{tb}" + ) else: pytest.fail("should have raised a RemoteError") diff --git a/ipyparallel/tests/test_asyncresult.py b/ipyparallel/tests/test_asyncresult.py index 0621754d..d7f57a73 100644 --- a/ipyparallel/tests/test_asyncresult.py +++ b/ipyparallel/tests/test_asyncresult.py @@ -242,9 +242,9 @@ def test_hubresult_timestamps(self): hr = rc2.get_result(ar.msg_ids) assert hr.elapsed > 0.0, f"got bad elapsed: {hr.elapsed}" hr.get(1) - assert ( - hr.wall_time < ar.wall_time + 0.2 - ), f"got bad wall_time: {hr.wall_time} > {ar.wall_time}" + assert hr.wall_time < ar.wall_time + 0.2, ( + f"got bad wall_time: {hr.wall_time} > {ar.wall_time}" + ) assert hr.serial_time == ar.serial_time finally: rc2.close() diff --git a/ipyparallel/tests/test_cluster.py b/ipyparallel/tests/test_cluster.py index c1f065d6..fb9ee759 100644 --- a/ipyparallel/tests/test_cluster.py +++ b/ipyparallel/tests/test_cluster.py @@ -168,7 +168,7 @@ async def test_restart_engines(Cluster): await asyncio.sleep(0.1) if time.monotonic() > deadline: raise TimeoutError( - f"timeout waiting for engines 0-{n-1} to unregister, {rc.ids=}" + f"timeout waiting for engines 0-{n - 1} to unregister, {rc.ids=}" ) # wait for register rc.wait_for_engines(n, timeout=_timeout) diff --git a/ipyparallel/tests/test_dependency.py b/ipyparallel/tests/test_dependency.py index 95615ba4..e2b0232a 100644 --- a/ipyparallel/tests/test_dependency.py +++ b/ipyparallel/tests/test_dependency.py @@ -38,19 +38,19 @@ def assertMet(self, dep): assert dep.check(self.succeeded, self.failed), "Dependency should be met" def assertUnmet(self, dep): - assert not dep.check( - self.succeeded, self.failed - ), "Dependency should not be met" + assert not dep.check(self.succeeded, self.failed), ( + "Dependency should not be met" + ) def assertUnreachable(self, dep): - assert dep.unreachable( - self.succeeded, self.failed - ), "Dependency should be unreachable" + assert dep.unreachable(self.succeeded, self.failed), ( + "Dependency should be unreachable" + ) def assertReachable(self, dep): - assert not dep.unreachable( - self.succeeded, self.failed - ), "Dependency should be reachable" + assert not dep.unreachable(self.succeeded, self.failed), ( + "Dependency should be reachable" + ) def cancan(self, f): """decorator to pass through canning into self.user_ns""" diff --git a/ipyparallel/tests/test_magics.py b/ipyparallel/tests/test_magics.py index dfe796fc..ca3588b7 100644 --- a/ipyparallel/tests/test_magics.py +++ b/ipyparallel/tests/test_magics.py @@ -324,9 +324,9 @@ def test_cellpx_stream(self): # Check that all expected lines are in the output self._check_expected_lines_unordered(expected, lines) - assert ( - len(expected) - len(v) <= len(lines) <= len(expected) - ), f"expected {len(expected)} lines, got: {io.stdout}" + assert len(expected) - len(v) <= len(lines) <= len(expected), ( + f"expected {len(expected)} lines, got: {io.stdout}" + ) # Do the same for stderr print(io.stderr, file=sys.stderr) @@ -343,9 +343,9 @@ def test_cellpx_stream(self): * len(v) ) self._check_expected_lines_unordered(expected, lines) - assert ( - len(expected) - len(v) <= len(lines) <= len(expected) - ), f"expected {len(expected)} lines, got: {io.stderr}" + assert len(expected) - len(v) <= len(lines) <= len(expected), ( + f"expected {len(expected)} lines, got: {io.stderr}" + ) def test_px_nonblocking(self): ip = get_ipython() diff --git a/ipyparallel/tests/test_view.py b/ipyparallel/tests/test_view.py index fa0bcea3..f247a145 100644 --- a/ipyparallel/tests/test_view.py +++ b/ipyparallel/tests/test_view.py @@ -933,9 +933,9 @@ def test_cloudpickle_require(self): view = self.client[:] # enable cloudpickle view.use_cloudpickle() - assert ( - 'types' not in globals() - ), "Test condition isn't met if types is already imported" + assert 'types' not in globals(), ( + "Test condition isn't met if types is already imported" + ) @ipp.require("types") @ipp.interactive From a573c95eb4ba00092c5990897f6494a95e97cff1 Mon Sep 17 00:00:00 2001 From: Min RK Date: Tue, 4 Feb 2025 11:03:41 +0100 Subject: [PATCH 3/3] address UP031 lint requires manual fixes because it refuses to apply these automatically --- docs/source/conf.py | 3 +- docs/source/examples/Futures.ipynb | 4 +- .../Monitoring an MPI Simulation - 1.ipynb | 156 +++++++++++++----- .../Monitoring an MPI Simulation - 2.ipynb | 81 ++++++--- docs/source/examples/Using Dill.ipynb | 104 +----------- docs/source/examples/customresults.py | 6 +- .../examples/daVinci Word Count/pwordfreq.py | 8 +- docs/source/examples/dagdeps.py | 2 +- .../examples/interengine/communicator.py | 4 +- docs/source/examples/itermapresult.py | 16 +- docs/source/examples/pi/parallelpi.py | 6 +- docs/source/examples/task_profiler.py | 9 +- .../source/examples/wave2D/RectPartitioner.py | 4 +- docs/source/examples/wave2D/communicator.py | 4 +- .../examples/wave2D/parallelwave-mpi.py | 6 +- docs/source/examples/wave2D/parallelwave.py | 6 +- docs/source/examples/wave2D/wavesolver.py | 9 +- ipyparallel/client/client.py | 2 +- ipyparallel/controller/hub.py | 2 +- ipyparallel/controller/sqlitedb.py | 2 +- ipyparallel/engine/app.py | 6 +- ipyparallel/engine/log.py | 2 +- ipyparallel/error.py | 10 +- ipyparallel/tests/test_view.py | 2 +- ipyparallel/util.py | 2 +- 25 files changed, 225 insertions(+), 231 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 69511e38..87293bae 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -92,10 +92,9 @@ master_doc = 'index' # General information about the project. -from datetime import date project = 'ipyparallel' -copyright = '%04d, The IPython Development Team' % date.today().year +copyright = 'The IPython Development Team' author = 'The IPython Development Team' # The version info for the project you're documenting, acts as replacement for diff --git a/docs/source/examples/Futures.ipynb b/docs/source/examples/Futures.ipynb index a5af5cf7..c089edba 100644 --- a/docs/source/examples/Futures.ipynb +++ b/docs/source/examples/Futures.ipynb @@ -186,7 +186,7 @@ ], "source": [ "f = rc[-1].apply(os.getpid)\n", - "f.add_done_callback(lambda _: print(\"I got PID: %i\" % _.result()))\n", + "f.add_done_callback(lambda _: print(f\"I got PID: {_.result()}\"))\n", "f.result()" ] }, @@ -602,7 +602,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.9" + "version": "3.11.10" }, "widgets": { "application/vnd.jupyter.widget-state+json": { diff --git a/docs/source/examples/Monitoring an MPI Simulation - 1.ipynb b/docs/source/examples/Monitoring an MPI Simulation - 1.ipynb index 2b149205..8ec0ccb1 100644 --- a/docs/source/examples/Monitoring an MPI Simulation - 1.ipynb +++ b/docs/source/examples/Monitoring an MPI Simulation - 1.ipynb @@ -127,7 +127,7 @@ "bcast = mpi.bcast\n", "barrier = mpi.barrier\n", "rank = mpi.rank\n", - "print(\"MPI rank: %i/%i\" % (mpi.rank, mpi.size))" + "print(f\"MPI rank: {mpi.rank}/{mpi.size}\")" ] }, { @@ -300,7 +300,7 @@ " nx, nyt, j, nsteps = view.pull(['nx', 'nyt', 'j', 'nsteps'], targets=0, block=True)\n", " fig, ax = plt.subplots()\n", " ax.contourf(Z)\n", - " ax.set_title('Mesh: %i x %i, step %i/%i' % (nx, nyt, j + 1, nsteps))\n", + " ax.set_title(f\"Mesh: {nx} x {nyt}, step {j + 1}/{nsteps}\")\n", " plt.axis('off')\n", " # We clear the notebook output before plotting this if in-place plot updating is requested\n", " if in_place:\n", @@ -566,193 +566,267 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.9" + "version": "3.11.10" }, "widgets": { "application/vnd.jupyter.widget-state+json": { "state": { "02f2d94254d542bdbd3cff7f9bc035e7": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "FloatProgressModel", "state": { + "_view_name": "ErrorWidgetView", "bar_style": "success", + "error": {}, "layout": "IPY_MODEL_4cdefd8ae5764be8a8bf1f3c374492f0", "max": 4, + "msg": "Failed to load model class 'FloatProgressModel' from module '@jupyter-widgets/controls'", "style": "IPY_MODEL_d101b978f358462fbcc0ad51c4be1ee1", "value": 4 } }, "0ab41c84c30445dfa2c77e0e2014dd46": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", - "state": {} + "state": { + "_view_name": "ErrorWidgetView", + "error": {}, + "msg": "Failed to load model class 'LayoutModel' from module '@jupyter-widgets/base'" + } }, "11097de7cc114cc88d99cd17a693d510": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", - "state": {} + "state": { + "_view_name": "ErrorWidgetView", + "error": {}, + "msg": "Failed to load model class 'LayoutModel' from module '@jupyter-widgets/base'" + } }, "20d1375897eb4fdcb87fda73573ad678": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "HBoxModel", "state": { + "_view_name": "ErrorWidgetView", "children": [ "IPY_MODEL_7a50e71a222340a4ba51e70ef3abce47", "IPY_MODEL_02f2d94254d542bdbd3cff7f9bc035e7", "IPY_MODEL_876517d46fa6488f8fb61db3ed689b96" ], - "layout": "IPY_MODEL_0ab41c84c30445dfa2c77e0e2014dd46" + "error": {}, + "layout": "IPY_MODEL_0ab41c84c30445dfa2c77e0e2014dd46", + "msg": "Failed to load model class 'HBoxModel' from module '@jupyter-widgets/controls'" } }, "212854fc12af402eb3a57438a851dfac": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "DescriptionStyleModel", "state": { - "description_width": "" + "_view_name": "ErrorWidgetView", + "description_width": "", + "error": {}, + "msg": "Failed to load model class 'DescriptionStyleModel' from module '@jupyter-widgets/controls'" } }, "2231573544774560984193c60e6370cb": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", - "state": {} + "state": { + "_view_name": "ErrorWidgetView", + "error": {}, + "msg": "Failed to load model class 'LayoutModel' from module '@jupyter-widgets/base'" + } }, "4205fec62aea485c8ffaf31ca98859aa": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", - "state": {} + "state": { + "_view_name": "ErrorWidgetView", + "error": {}, + "msg": "Failed to load model class 'LayoutModel' from module '@jupyter-widgets/base'" + } }, "4cdefd8ae5764be8a8bf1f3c374492f0": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", - "state": {} + "state": { + "_view_name": "ErrorWidgetView", + "error": {}, + "msg": "Failed to load model class 'LayoutModel' from module '@jupyter-widgets/base'" + } }, "5143e78c3dcc439a95b7ec24ea37c2be": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "DescriptionStyleModel", "state": { - "description_width": "" + "_view_name": "ErrorWidgetView", + "description_width": "", + "error": {}, + "msg": "Failed to load model class 'DescriptionStyleModel' from module '@jupyter-widgets/controls'" } }, "6817e30d6c824bd194e102e20a9e7cda": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "DescriptionStyleModel", "state": { - "description_width": "" + "_view_name": "ErrorWidgetView", + "description_width": "", + "error": {}, + "msg": "Failed to load model class 'DescriptionStyleModel' from module '@jupyter-widgets/controls'" } }, "6be3837161754e08a715ac0c78fa1080": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", - "state": {} + "state": { + "_view_name": "ErrorWidgetView", + "error": {}, + "msg": "Failed to load model class 'LayoutModel' from module '@jupyter-widgets/base'" + } }, "7a50e71a222340a4ba51e70ef3abce47": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "HTMLModel", "state": { + "_view_name": "ErrorWidgetView", + "error": {}, "layout": "IPY_MODEL_2231573544774560984193c60e6370cb", + "msg": "Failed to load model class 'HTMLModel' from module '@jupyter-widgets/controls'", "style": "IPY_MODEL_6817e30d6c824bd194e102e20a9e7cda", "value": "100%" } }, "7afda4d37ef841c684bb624ff6b19267": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "DescriptionStyleModel", "state": { - "description_width": "" + "_view_name": "ErrorWidgetView", + "description_width": "", + "error": {}, + "msg": "Failed to load model class 'DescriptionStyleModel' from module '@jupyter-widgets/controls'" } }, "811e5930f3bc42899a5e700d9b275bdb": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", - "state": {} + "state": { + "_view_name": "ErrorWidgetView", + "error": {}, + "msg": "Failed to load model class 'LayoutModel' from module '@jupyter-widgets/base'" + } }, "876517d46fa6488f8fb61db3ed689b96": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "HTMLModel", "state": { + "_view_name": "ErrorWidgetView", + "error": {}, "layout": "IPY_MODEL_11097de7cc114cc88d99cd17a693d510", + "msg": "Failed to load model class 'HTMLModel' from module '@jupyter-widgets/controls'", "style": "IPY_MODEL_7afda4d37ef841c684bb624ff6b19267", "value": " 4/4 [00:05<00:00, 5.54s/engine]" } }, "88e7ff230d284b368ace25166967a93d": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "HTMLModel", "state": { + "_view_name": "ErrorWidgetView", + "error": {}, "layout": "IPY_MODEL_6be3837161754e08a715ac0c78fa1080", + "msg": "Failed to load model class 'HTMLModel' from module '@jupyter-widgets/controls'", "style": "IPY_MODEL_5143e78c3dcc439a95b7ec24ea37c2be", "value": "100%" } }, "8b14b3b6f0cb4a67be4645c8274c4074": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "HBoxModel", "state": { + "_view_name": "ErrorWidgetView", "children": [ "IPY_MODEL_88e7ff230d284b368ace25166967a93d", "IPY_MODEL_eee11ad7360e4cc2b6b2f196ff4a1d45", "IPY_MODEL_9d0e878210c24c93bc764ec9ec53b84e" ], - "layout": "IPY_MODEL_a15b2b7fec55486689859c321b2ca7e3" + "error": {}, + "layout": "IPY_MODEL_a15b2b7fec55486689859c321b2ca7e3", + "msg": "Failed to load model class 'HBoxModel' from module '@jupyter-widgets/controls'" } }, "9d0e878210c24c93bc764ec9ec53b84e": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "HTMLModel", "state": { + "_view_name": "ErrorWidgetView", + "error": {}, "layout": "IPY_MODEL_811e5930f3bc42899a5e700d9b275bdb", + "msg": "Failed to load model class 'HTMLModel' from module '@jupyter-widgets/controls'", "style": "IPY_MODEL_212854fc12af402eb3a57438a851dfac", "value": " 4/4 [00:06<00:00, 6.12s/engine]" } }, "a15b2b7fec55486689859c321b2ca7e3": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", - "state": {} + "state": { + "_view_name": "ErrorWidgetView", + "error": {}, + "msg": "Failed to load model class 'LayoutModel' from module '@jupyter-widgets/base'" + } }, "afa1055878c444c49c4db6767994e04b": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "ProgressStyleModel", "state": { - "description_width": "" + "_view_name": "ErrorWidgetView", + "description_width": "", + "error": {}, + "msg": "Failed to load model class 'ProgressStyleModel' from module '@jupyter-widgets/controls'" } }, "d101b978f358462fbcc0ad51c4be1ee1": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "ProgressStyleModel", "state": { - "description_width": "" + "_view_name": "ErrorWidgetView", + "description_width": "", + "error": {}, + "msg": "Failed to load model class 'ProgressStyleModel' from module '@jupyter-widgets/controls'" } }, "eee11ad7360e4cc2b6b2f196ff4a1d45": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "FloatProgressModel", "state": { + "_view_name": "ErrorWidgetView", "bar_style": "success", + "error": {}, "layout": "IPY_MODEL_4205fec62aea485c8ffaf31ca98859aa", "max": 4, + "msg": "Failed to load model class 'FloatProgressModel' from module '@jupyter-widgets/controls'", "style": "IPY_MODEL_afa1055878c444c49c4db6767994e04b", "value": 4 } diff --git a/docs/source/examples/Monitoring an MPI Simulation - 2.ipynb b/docs/source/examples/Monitoring an MPI Simulation - 2.ipynb index 8c0c7d91..2a2dfa30 100644 --- a/docs/source/examples/Monitoring an MPI Simulation - 2.ipynb +++ b/docs/source/examples/Monitoring an MPI Simulation - 2.ipynb @@ -119,7 +119,7 @@ "bcast = mpi.bcast\n", "barrier = mpi.barrier\n", "rank = mpi.rank\n", - "print(\"MPI rank: %i/%i\" % (mpi.rank, mpi.size))" + "print(f\"MPI rank: {mpi.rank}/{mpi.size}\")" ] }, { @@ -270,7 +270,7 @@ " else:\n", " fig, ax = plt.subplots()\n", " ax.contourf(Z)\n", - " ax.set_title(\"Mesh: %i x %i, step %i/%i\" % (nx, nyt, j + 1, nsteps))\n", + " ax.set_title(f\"Mesh: {nx} x {nyt}, step {j + 1}/{nsteps}\")\n", " plt.axis(\"off\")\n", " # We clear the notebook output before plotting this if in-place\n", " # plot updating is requested\n", @@ -420,102 +420,139 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.9" + "version": "3.11.10" }, "widgets": { "application/vnd.jupyter.widget-state+json": { "state": { "31b6b4a0105148e187aadfc70e0fe1c7": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "HTMLModel", "state": { + "_view_name": "ErrorWidgetView", + "error": {}, "layout": "IPY_MODEL_c872eb3aedc4492eb5a7d3b3c24249bb", + "msg": "Failed to load model class 'HTMLModel' from module '@jupyter-widgets/controls'", "style": "IPY_MODEL_7b50ee6a201f48f796c337a68ba8730c", "value": " 4/4 [00:05<00:00, 5.23s/engine]" } }, "401e2a9344964c7082435def275db2c8": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", - "state": {} + "state": { + "_view_name": "ErrorWidgetView", + "error": {}, + "msg": "Failed to load model class 'LayoutModel' from module '@jupyter-widgets/base'" + } }, "650e652f16044e4f816006c8f334eadb": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "HTMLModel", "state": { + "_view_name": "ErrorWidgetView", + "error": {}, "layout": "IPY_MODEL_9a56d508e10842299d798de5d18aa84a", + "msg": "Failed to load model class 'HTMLModel' from module '@jupyter-widgets/controls'", "style": "IPY_MODEL_fe5f5baeb0b24e13bfb0aa8e3d411b0b", "value": "100%" } }, "7b50ee6a201f48f796c337a68ba8730c": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "DescriptionStyleModel", "state": { - "description_width": "" + "_view_name": "ErrorWidgetView", + "description_width": "", + "error": {}, + "msg": "Failed to load model class 'DescriptionStyleModel' from module '@jupyter-widgets/controls'" } }, "7d5e98d4de4b472080ce751a65546968": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "FloatProgressModel", "state": { + "_view_name": "ErrorWidgetView", "bar_style": "success", + "error": {}, "layout": "IPY_MODEL_401e2a9344964c7082435def275db2c8", "max": 4, + "msg": "Failed to load model class 'FloatProgressModel' from module '@jupyter-widgets/controls'", "style": "IPY_MODEL_a577270a23bf46fa933a41e32d1a5a79", "value": 4 } }, "9a56d508e10842299d798de5d18aa84a": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", - "state": {} + "state": { + "_view_name": "ErrorWidgetView", + "error": {}, + "msg": "Failed to load model class 'LayoutModel' from module '@jupyter-widgets/base'" + } }, "a577270a23bf46fa933a41e32d1a5a79": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "ProgressStyleModel", "state": { - "description_width": "" + "_view_name": "ErrorWidgetView", + "description_width": "", + "error": {}, + "msg": "Failed to load model class 'ProgressStyleModel' from module '@jupyter-widgets/controls'" } }, "bba28e827ace46e9b233476330b76938": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", - "state": {} + "state": { + "_view_name": "ErrorWidgetView", + "error": {}, + "msg": "Failed to load model class 'LayoutModel' from module '@jupyter-widgets/base'" + } }, "c872eb3aedc4492eb5a7d3b3c24249bb": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", - "state": {} + "state": { + "_view_name": "ErrorWidgetView", + "error": {}, + "msg": "Failed to load model class 'LayoutModel' from module '@jupyter-widgets/base'" + } }, "da09992ca3fd438eadf96a6599d5384e": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "HBoxModel", "state": { + "_view_name": "ErrorWidgetView", "children": [ "IPY_MODEL_650e652f16044e4f816006c8f334eadb", "IPY_MODEL_7d5e98d4de4b472080ce751a65546968", "IPY_MODEL_31b6b4a0105148e187aadfc70e0fe1c7" ], - "layout": "IPY_MODEL_bba28e827ace46e9b233476330b76938" + "error": {}, + "layout": "IPY_MODEL_bba28e827ace46e9b233476330b76938", + "msg": "Failed to load model class 'HBoxModel' from module '@jupyter-widgets/controls'" } }, "fe5f5baeb0b24e13bfb0aa8e3d411b0b": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "DescriptionStyleModel", "state": { - "description_width": "" + "_view_name": "ErrorWidgetView", + "description_width": "", + "error": {}, + "msg": "Failed to load model class 'DescriptionStyleModel' from module '@jupyter-widgets/controls'" } } }, diff --git a/docs/source/examples/Using Dill.ipynb b/docs/source/examples/Using Dill.ipynb index 75e6b515..26d794a2 100644 --- a/docs/source/examples/Using Dill.ipynb +++ b/docs/source/examples/Using Dill.ipynb @@ -44,7 +44,7 @@ "\n", " def has_closure(b):\n", " product = a * b\n", - " f.write(\"%i: %g\\n\" % (os.getpid(), product))\n", + " f.write(f\"{os.getpid()}: {product:g}\\n\")\n", " f.flush()\n", " return product\n", "\n", @@ -596,109 +596,11 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.9" + "version": "3.11.10" }, "widgets": { "application/vnd.jupyter.widget-state+json": { - "state": { - "1086f7640c6148b7bb5cd4202d114ba3": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "37585be691034dba9d04f0c82d2d8a51": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "610913dc61674fd5ac61f9f8fa714f47": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "61aff84b5838480c92987db3608a698d": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "86f27c1b3bf840c8b0b5c0323f0d6264": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_b2ded84395ea4d53891f184694793eff", - "max": 2, - "style": "IPY_MODEL_f458c2a21bc24550b2a0dd2b7f014046", - "value": 2 - } - }, - "8bdc6187bd964e89acfe57c818fccec6": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_e2955a27d1a3468582d3cd1466737ba7", - "style": "IPY_MODEL_61aff84b5838480c92987db3608a698d", - "value": "100%" - } - }, - "a6d41c9031314b94851b72e499e8afbe": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_37585be691034dba9d04f0c82d2d8a51", - "style": "IPY_MODEL_1086f7640c6148b7bb5cd4202d114ba3", - "value": " 2/2 [00:00<00:00,  1.18engine/s]" - } - }, - "b2ded84395ea4d53891f184694793eff": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "d2ece3d105ca4c488a76246625ef8962": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_8bdc6187bd964e89acfe57c818fccec6", - "IPY_MODEL_86f27c1b3bf840c8b0b5c0323f0d6264", - "IPY_MODEL_a6d41c9031314b94851b72e499e8afbe" - ], - "layout": "IPY_MODEL_610913dc61674fd5ac61f9f8fa714f47" - } - }, - "e2955a27d1a3468582d3cd1466737ba7": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "f458c2a21bc24550b2a0dd2b7f014046": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - } - }, + "state": {}, "version_major": 2, "version_minor": 0 } diff --git a/docs/source/examples/customresults.py b/docs/source/examples/customresults.py index 7dfed2aa..778168de 100644 --- a/docs/source/examples/customresults.py +++ b/docs/source/examples/customresults.py @@ -30,7 +30,7 @@ def sleep_here(count, t): import sys import time - print("hi from engine %i" % id) + print(f"hi from engine {id}") sys.stdout.flush() time.sleep(t) return count, t @@ -52,7 +52,7 @@ def sleep_here(count, t): for msg_id in finished: # we know these are done, so don't worry about blocking ar = rc.get_result(msg_id) - print("job id %s finished on engine %i" % (msg_id, ar.engine_id)) + print(f"job id {msg_id} finished on engine {ar.engine_id}") print("with stdout:") print(' ' + ar.stdout.replace('\n', '\n ').rstrip()) print("and results:") @@ -60,4 +60,4 @@ def sleep_here(count, t): # note that each job in a map always returns a list of length chunksize # even if chunksize == 1 for count, t in ar.get(): - print(" item %i: slept for %.2fs" % (count, t)) + print(f" item {count}: slept for {t:.2f}s") diff --git a/docs/source/examples/daVinci Word Count/pwordfreq.py b/docs/source/examples/daVinci Word Count/pwordfreq.py index 09fe0a39..d449be6b 100644 --- a/docs/source/examples/daVinci Word Count/pwordfreq.py +++ b/docs/source/examples/daVinci Word Count/pwordfreq.py @@ -57,7 +57,7 @@ def pwordfreq(view, fnames): freqs = wordfreq(text) toc = time.time() print_wordfreq(freqs, 10) - print("Took %.3f s to calculate" % (toc - tic)) + print(f"Took {toc - tic:.3f}s to calculate") # The parallel version print("\nParallel word frequency count:") @@ -68,18 +68,18 @@ def pwordfreq(view, fnames): block = nlines // n for i in range(n): chunk = lines[i * block : i * (block + 1)] - with open('davinci%i.txt' % i, 'w', encoding='utf8') as f: + with open(f'davinci{i}.txt', 'w', encoding='utf8') as f: f.write('\n'.join(chunk)) try: # python2 cwd = os.path.abspath(os.getcwdu()) except AttributeError: # python3 cwd = os.path.abspath(os.getcwd()) - fnames = [os.path.join(cwd, 'davinci%i.txt' % i) for i in range(n)] + fnames = [os.path.join(cwd, f'davinci{i}.txt') for i in range(n)] tic = time.time() pfreqs = pwordfreq(view, fnames) toc = time.time() print_wordfreq(freqs) - print("Took %.3f s to calculate on %i engines" % (toc - tic, len(view.targets))) + print(f"Took {toc - tic:.3f} s to calculate on {len(view.targets)} engines") # cleanup split files map(os.remove, fnames) diff --git a/docs/source/examples/dagdeps.py b/docs/source/examples/dagdeps.py index afbc4b42..6fb50d2d 100644 --- a/docs/source/examples/dagdeps.py +++ b/docs/source/examples/dagdeps.py @@ -100,7 +100,7 @@ def main(nodes, edges): client = parallel.Client() view = client.load_balanced_view() - print("submitting %i tasks with %i dependencies" % (nodes, edges)) + print(f"submitting {nodes} tasks with {edges} dependencies") results = submit_jobs(view, G, jobs) print("waiting for results") client.wait_interactive() diff --git a/docs/source/examples/interengine/communicator.py b/docs/source/examples/interengine/communicator.py index d8ea2126..2f3e31ac 100644 --- a/docs/source/examples/interengine/communicator.py +++ b/docs/source/examples/interengine/communicator.py @@ -21,8 +21,8 @@ def __init__(self, interface='tcp://*', identity=None): # bind to ports port = self.socket.bind_to_random_port(interface) pub_port = self.pub.bind_to_random_port(interface) - self.url = interface + ":%i" % port - self.pub_url = interface + ":%i" % pub_port + self.url = f"{interface}:{port}" + self.pub_url = f"{interface}:{pub_port}" # guess first public IP from socket self.location = socket.gethostbyname_ex(socket.gethostname())[-1][0] self.peers = {} diff --git a/docs/source/examples/itermapresult.py b/docs/source/examples/itermapresult.py index 2e96a7e0..b9e080b7 100644 --- a/docs/source/examples/itermapresult.py +++ b/docs/source/examples/itermapresult.py @@ -34,10 +34,10 @@ # create a Reference to `id`. This will be a different value on each engine ref = ipp.Reference('id') print("sleeping for `id` seconds on each engine") -tic = time.time() +tic = time.perf_counter() ar = dv.apply(time.sleep, ref) for i, r in enumerate(ar): - print("%i: %.3f" % (i, time.time() - tic)) + print(f"{i}: {time.perf_counter() - tic:.3f}") def sleep_here(t): @@ -50,22 +50,22 @@ def sleep_here(t): # one call per task print("running with one call per task") amr = v.map(sleep_here, [0.01 * t for t in range(100)]) -tic = time.time() +tic = time.perf_counter() for i, r in enumerate(amr): - print("task %i on engine %i: %.3f" % (i, r[0], time.time() - tic)) + print(f"task {i} on engine {r[0]}: {time.perf_counter() - tic:.3f}") print("running with four calls per task") # with chunksize, we can have four calls per task amr = v.map(sleep_here, [0.01 * t for t in range(100)], chunksize=4) -tic = time.time() +tic = time.perf_counter() for i, r in enumerate(amr): - print("task %i on engine %i: %.3f" % (i, r[0], time.time() - tic)) + print(f"task {i} on engine {r[0]}: {time.perf_counter() - tic:.3f}") print("running with two calls per task, with unordered results") # We can even iterate through faster results first, with ordered=False amr = v.map( sleep_here, [0.01 * t for t in range(100, 0, -1)], ordered=False, chunksize=2 ) -tic = time.time() +tic = time.perf_counter() for i, r in enumerate(amr): - print("slept %.2fs on engine %i: %.3f" % (r[1], r[0], time.time() - tic)) + print(f"slept {r[1]:.2f}s on engine {r[0]}: {time.perf_counter() - tic:.3f}") diff --git a/docs/source/examples/pi/parallelpi.py b/docs/source/examples/pi/parallelpi.py index e480109e..d52a011b 100644 --- a/docs/source/examples/pi/parallelpi.py +++ b/docs/source/examples/pi/parallelpi.py @@ -42,7 +42,7 @@ v = c[:] v.block = True # fetch the pi-files -print("downloading %i files of pi" % n) +print(f"downloading {n} files of pi") v.map(fetch_pi_file, files[:n]) # noqa: F821 print("done") @@ -60,10 +60,10 @@ freqs150m = reduce_freqs(freqs_all) t2 = clock() digits_per_second8 = n * 10.0e6 / (t2 - t1) -print("Digits per second (%i engines, %i0m digits): " % (n, n), digits_per_second8) +print(f"Digits per second ({n} engines, {n}0m digits): ", digits_per_second8) print("Speedup: ", digits_per_second8 / digits_per_second1) plot_two_digit_freqs(freqs150m) -plt.title("2 digit sequences in %i0m digits of pi" % n) +plt.title(f"2 digit sequences in {n}0m digits of pi") plt.show() diff --git a/docs/source/examples/task_profiler.py b/docs/source/examples/task_profiler.py index 5ecdca1b..0df7d15d 100644 --- a/docs/source/examples/task_profiler.py +++ b/docs/source/examples/task_profiler.py @@ -60,10 +60,7 @@ def main(): ] stime = sum(times) - print( - "executing %i tasks, totalling %.1f secs on %i engines" - % (opts.n, stime, nengines) - ) + print(f"executing {opts.n} tasks, totalling {stime:.1f} secs on {nengines} engines") time.sleep(1) start = time.perf_counter() amr = view.map(time.sleep, times) @@ -74,8 +71,8 @@ def main(): scale = stime / ptime print(f"executed {stime:.1f} secs in {ptime:.1f} secs") - print("%.3fx parallel performance on %i engines" % (scale, nengines)) - print("%.1f%% of theoretical max" % (100 * scale / nengines)) + print(f"{scale:3f}x parallel performance on {nengines} engines") + print(f"{scale / nengines:.0%} of theoretical max") if __name__ == '__main__': diff --git a/docs/source/examples/wave2D/RectPartitioner.py b/docs/source/examples/wave2D/RectPartitioner.py index 61ac4b57..b2d10bf1 100755 --- a/docs/source/examples/wave2D/RectPartitioner.py +++ b/docs/source/examples/wave2D/RectPartitioner.py @@ -61,7 +61,7 @@ def prepare_communication(self): nsd_ = self.nsd if nsd_ < 1: - print('Number of space dimensions is %d, nothing to do' % nsd_) + print(f'Number of space dimensions is {nsd_}, nothing to do') return self.subd_rank = [-1, -1, -1] @@ -93,7 +93,7 @@ def prepare_communication(self): self.subd_rank[1] = (my_id % offsets[2]) / self.num_parts[0] self.subd_rank[2] = my_id / offsets[2] - print("my_id=%d, subd_rank: " % my_id, self.subd_rank) + print(f"my_id={my_id}, subd_rank: {self.subd_rank}") if my_id == 0: print("offsets=", offsets) diff --git a/docs/source/examples/wave2D/communicator.py b/docs/source/examples/wave2D/communicator.py index 6b526fae..5a7a7914 100644 --- a/docs/source/examples/wave2D/communicator.py +++ b/docs/source/examples/wave2D/communicator.py @@ -27,8 +27,8 @@ def __init__(self, interface='tcp://*', identity=None): northport = self.north.bind_to_random_port(interface) eastport = self.east.bind_to_random_port(interface) - self.north_url = interface + ":%i" % northport - self.east_url = interface + ":%i" % eastport + self.north_url = f"{interface}:{northport}" + self.east_url = f"{interface}:{eastport}" # guess first public IP from socket self.location = socket.gethostbyname_ex(socket.gethostname())[-1][0] diff --git a/docs/source/examples/wave2D/parallelwave-mpi.py b/docs/source/examples/wave2D/parallelwave-mpi.py index 27271086..71dfc168 100755 --- a/docs/source/examples/wave2D/parallelwave-mpi.py +++ b/docs/source/examples/wave2D/parallelwave-mpi.py @@ -120,11 +120,7 @@ def wave_saver(u, x, y, t): partition = [1, num_procs] assert partition[0] * partition[1] == num_procs, ( - "can't map partition %s to %i engines" - % ( - partition, - num_procs, - ) + f"can't map partition {partition} to {num_procs} engines" ) view = rc[:] diff --git a/docs/source/examples/wave2D/parallelwave.py b/docs/source/examples/wave2D/parallelwave.py index 102e3ac6..cafda397 100755 --- a/docs/source/examples/wave2D/parallelwave.py +++ b/docs/source/examples/wave2D/parallelwave.py @@ -127,11 +127,7 @@ def wave_saver(u, x, y, t): num_procs = min(num_procs, partition[0] * partition[1]) assert partition[0] * partition[1] == num_procs, ( - "can't map partition %s to %i engines" - % ( - partition, - num_procs, - ) + f"can't map partition {partition} to {num_procs} engines" ) # construct the View: diff --git a/docs/source/examples/wave2D/wavesolver.py b/docs/source/examples/wave2D/wavesolver.py index 1e6686e6..c49ddd49 100755 --- a/docs/source/examples/wave2D/wavesolver.py +++ b/docs/source/examples/wave2D/wavesolver.py @@ -303,14 +303,7 @@ def solve(self, tstop, dt=-1, user_action=None, verbose=False, final_test=False) t1 = time.time() print( - 'my_id=%2d, dt=%g, %s version, slice_copy=%s, net Wtime=%g' - % ( - partitioner.my_id, - dt, - implementation['inner'], - partitioner.slice_copy, - t1 - t0, - ) + f'my_id={partitioner.my_id:2}, dt={dt:g}, {implementation["inner"]} version, slice_copy={partitioner.slice_copy}, net Wtime={t1 - t0:g}' ) # save the us self.us = u, u_1, u_2 diff --git a/ipyparallel/client/client.py b/ipyparallel/client/client.py index 893b5169..4236cd5a 100644 --- a/ipyparallel/client/client.py +++ b/ipyparallel/client/client.py @@ -153,7 +153,7 @@ def __repr__(self): if len(text_out) > 32: text_out = text_out[:29] + '...' - return "" % (self.execution_count, text_out) + return f"" def _plaintext(self): execute_result = self.metadata['execute_result'] or {'data': {}} diff --git a/ipyparallel/controller/hub.py b/ipyparallel/controller/hub.py index 1895681e..b166fbce 100644 --- a/ipyparallel/controller/hub.py +++ b/ipyparallel/controller/hub.py @@ -1245,7 +1245,7 @@ def purge_results(self, client_id, msg): for eid in eids: if eid not in self.engines: try: - raise IndexError("No such engine: %i" % eid) + raise IndexError(f"No such engine: {eid}") except Exception: reply = error.wrap_exception() self.log.exception("Error dropping records") diff --git a/ipyparallel/controller/sqlitedb.py b/ipyparallel/controller/sqlitedb.py index 8f76306a..9ba04888 100644 --- a/ipyparallel/controller/sqlitedb.py +++ b/ipyparallel/controller/sqlitedb.py @@ -274,7 +274,7 @@ def _init_db(self): i = 0 while not self._check_table(): i += 1 - self.table = first_table + '_%i' % i + self.table = f"{first_table}_{i}" self.log.warning( f"Table {previous_table} exists and doesn't match db format, trying {self.table}" ) diff --git a/ipyparallel/engine/app.py b/ipyparallel/engine/app.py index d855d8f4..7f27dcaa 100755 --- a/ipyparallel/engine/app.py +++ b/ipyparallel/engine/app.py @@ -612,7 +612,7 @@ async def complete_registration(self, msg, connect, maybe_tunnel): def url(key): """get zmq url for given channel""" - return str(info["interface"] + ":%i" % info[key]) + return f"{info['interface']}:{info[key]}" def urls(key): return [f'{info["interface"]}:{port}' for port in info[key]] @@ -777,7 +777,7 @@ def send_with_metadata( content['hb_period'], identity, ) - self.log.info("Completed registration with id %i" % self.id) + self.log.info("Completed registration with id %i", self.id) def start_nanny(self, control_url): self.log.info("Starting nanny") @@ -809,7 +809,7 @@ def start_heartbeat(self, hb_ping, hb_pong, hb_period, identity): self._hb_listener = zmqstream.ZMQStream(mon, self.loop) self._hb_listener.on_recv(self._report_ping) - hb_monitor = "tcp://%s:%i" % (localhost(), mport) + hb_monitor = f"tcp://{localhost()}:{mport}" heart = Heart( hb_ping, diff --git a/ipyparallel/engine/log.py b/ipyparallel/engine/log.py index c45dc286..cdad377d 100644 --- a/ipyparallel/engine/log.py +++ b/ipyparallel/engine/log.py @@ -15,6 +15,6 @@ def root_topic(self): """this is a property, in case the handler is created before the engine gets registered with an id""" if isinstance(getattr(self.engine, 'id', None), int): - return "engine.%i" % self.engine.id + return f"engine.{self.engine.id}" else: return "engine" diff --git a/ipyparallel/error.py b/ipyparallel/error.py index fc26405c..d09144e4 100644 --- a/ipyparallel/error.py +++ b/ipyparallel/error.py @@ -161,11 +161,11 @@ def __str__(self): engine_str = self._get_engine_str(ei) s = s + '\n' + engine_str + en + ': ' + str(ev) if len(self.elist) > self.tb_limit: - s = s + '\n.... %i more exceptions ...' % (len(self.elist) - self.tb_limit) + s = s + f'\n.... {len(self.elist) - self.tb_limit} more exceptions ...' return s def __repr__(self): - return "CompositeError(%i)" % len(self.elist) + return f"CompositeError({len(self.elist)})" def render_traceback(self, excid=None): """render one or all of my tracebacks to a list of lines""" @@ -177,13 +177,13 @@ def render_traceback(self, excid=None): lines.append('') if len(self.elist) > self.tb_limit: lines.append( - '... %i more exceptions ...' % (len(self.elist) - self.tb_limit) + f'... {len(self.elist) - self.tb_limit} more exceptions ...' ) else: try: en, ev, etb, ei = self.elist[excid] except Exception: - raise IndexError("an exception with index %i does not exist" % excid) + raise IndexError(f"an exception with index {excid} does not exist") else: lines.append(self._get_engine_str(ei) + ":") lines.extend((etb or 'No traceback available').splitlines()) @@ -197,7 +197,7 @@ def raise_exception(self, excid=0): try: en, ev, etb, ei = self.elist[excid] except Exception: - raise IndexError("an exception with index %i does not exist" % excid) + raise IndexError(f"an exception with index {excid} does not exist") else: raise RemoteError(en, ev, etb, ei) diff --git a/ipyparallel/tests/test_view.py b/ipyparallel/tests/test_view.py index f247a145..87439015 100644 --- a/ipyparallel/tests/test_view.py +++ b/ipyparallel/tests/test_view.py @@ -553,7 +553,7 @@ def test_execute_reply(self): e0.block = True ar = e0.execute("5", silent=False) er = ar.get() - assert str(er) == "" % er.execution_count + assert str(er) == f"" assert er.execute_result['data']['text/plain'] == '5' def test_execute_reply_rich(self): diff --git a/ipyparallel/util.py b/ipyparallel/util.py index 8e6fb259..d4a953f4 100644 --- a/ipyparallel/util.py +++ b/ipyparallel/util.py @@ -364,7 +364,7 @@ def signal_children(children): def terminate_children(sig, frame): log = get_logger() - log.critical("Got signal %i, terminating children..." % sig) + log.critical("Got signal %i, terminating children...", sig) for child in children: child.terminate()