Skip to content

Commit 0f19954

Browse files
committed
Attempt JSON serialization before pickling and fix bug with save_all
1 parent 604395e commit 0f19954

File tree

8 files changed

+26
-14
lines changed

8 files changed

+26
-14
lines changed

README.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -69,13 +69,13 @@ if __name__ == "__main__":
6969
description='This is part 1 of a test') # Description
7070

7171
# Upload the code
72-
run.save('training.py', 'code')
72+
run.save_file('training.py', 'code')
7373

7474
# Upload an input file
75-
run.save('params.in', 'input')
75+
run.save_file('params.in', 'input')
7676

7777
# Add an alert (the alert definition will be created if necessary)
78-
run.add_alert(name='loss-too-high', # Name
78+
run.create_alert(name='loss-too-high', # Name
7979
source='metrics', # Source
8080
rule='is above', # Rule
8181
metric='loss', # Metric
@@ -96,7 +96,7 @@ if __name__ == "__main__":
9696
...
9797

9898
# Upload an output file
99-
run.save('output.cdf', 'output')
99+
run.save_file('output.cdf', 'output')
100100

101101
# If we weren't using a context manager we'd need to end the run
102102
# run.close()

examples/GeometryOptimisation/bluemira_simvue_geometry_optimisation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -171,5 +171,5 @@ def my_minimise_length(vector, grad, parameterisation, ad_args=None):
171171

172172
# Here we're minimising the length, within the bounds of our PrincetonD parameterisation,
173173
# so we'd expect that x1 goes to its upper bound, and x2 goes to its lower bound.
174-
run.save("bluemira_simvue_geometry_optimisation.py", "code")
174+
run.save_file("bluemira_simvue_geometry_optimisation.py", "code")
175175
run.close()

examples/PyTorch/main.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -205,7 +205,7 @@ def main():
205205
scheduler.step()
206206

207207
if args.save_model:
208-
run.save(model.state_dict(), "output", name="mnist_cnn.pt")
208+
run.save_file(model.state_dict(), "output", name="mnist_cnn.pt")
209209

210210
run.close()
211211

examples/SU2/SU2.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@
5656
filetype = None
5757
if input_file.endswith(".cfg"):
5858
filetype = "text/plain"
59-
run.save(input_file, "input", filetype)
59+
run.save_file(input_file, "input", filetype)
6060

6161
running = True
6262
latest = []
@@ -106,6 +106,6 @@
106106

107107
# Save output files
108108
for output_file in OUTPUT_FILES:
109-
run.save(output_file, "output")
109+
run.save_file(output_file, "output")
110110

111111
run.close()

examples/Tensorflow/dynamic_rnn.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@
4545
"computation over sequences with variable length. This example is using a toy dataset to "
4646
"classify linear sequences. The generated sequences have variable length.",
4747
)
48-
run.save("dynamic_rnn.py", "code")
48+
run.save_file("dynamic_rnn.py", "code")
4949

5050
# ====================
5151
# TOY DATA GENERATOR

simvue/executor.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -167,10 +167,10 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None:
167167
)
168168

169169
if script:
170-
self._runner.save(filename=script, category="code")
170+
self._runner.save_file(filename=script, category="code")
171171

172172
if input_file:
173-
self._runner.save(filename=input_file, category="input")
173+
self._runner.save_file(filename=input_file, category="input")
174174

175175
_command: typing.List[str] = []
176176

@@ -284,11 +284,11 @@ def _save_output(self) -> None:
284284
for proc_id in self._exit_codes.keys():
285285
# Only save the file if the contents are not empty
286286
if self._std_err[proc_id]:
287-
self._runner.save(
287+
self._runner.save_file(
288288
f"{self._runner.name}_{proc_id}.err", category="output"
289289
)
290290
if self._std_out[proc_id]:
291-
self._runner.save(
291+
self._runner.save_file(
292292
f"{self._runner.name}_{proc_id}.out", category="output"
293293
)
294294

simvue/run.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1142,7 +1142,7 @@ def save_all(
11421142

11431143
for item in items:
11441144
if item.is_file():
1145-
save_file = self.save(f"{item}", category, filetype, preserve_path)
1145+
save_file = self.save_file(item, category, filetype, preserve_path)
11461146
elif item.is_dir():
11471147
save_file = self.save_directory(item, category, filetype, preserve_path)
11481148
else:

simvue/serialization.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
import typing
99
import pickle
1010
import pandas
11+
import json
1112
import numpy
1213

1314
from io import BytesIO
@@ -82,6 +83,8 @@ def serialize_object(
8283
return _serialize_matplotlib(data)
8384
except ImportError:
8485
pass
86+
elif serialized := _serialize_json(data):
87+
return serialized
8588

8689
if allow_pickle:
8790
return _serialize_pickle(data)
@@ -155,6 +158,15 @@ def _serialize_torch_tensor(data: typing.Any) -> typing.Optional[tuple[str, str]
155158
return data, mimetype
156159

157160

161+
def _serialize_json(data: typing.Any) -> typing.Optional[tuple[str, str]]:
162+
mimetype = "application/json"
163+
try:
164+
data = json.dumps(data)
165+
except TypeError:
166+
return None
167+
return data, mimetype
168+
169+
158170
def _serialize_pickle(data: typing.Any) -> typing.Optional[tuple[str, str]]:
159171
mimetype = "application/octet-stream"
160172
data = pickle.dumps(data)

0 commit comments

Comments
 (0)