|
36 | 36 | print(df) |
37 | 37 |
|
38 | 38 |
|
39 | | -# Dask example: process large datasets |
| 39 | +# Particles |
40 | 40 | if found_dask: |
41 | 41 | df = electrons.to_dask() |
42 | 42 | print(df) |
|
54 | 54 | weights=df["weighting"].to_dask_array()) |
55 | 55 | print(h.compute()) |
56 | 56 |
|
57 | | - # example3: save all data data to parquet files |
| 57 | + # example3: longitudinal phase space (dask 2021.04.0+) |
| 58 | + z_min = df["position_z"].min().compute() |
| 59 | + z_max = df["position_z"].max().compute() |
| 60 | + |
| 61 | + z_pz, z_pz_bins = da.histogramdd( |
| 62 | + df[['position_z', 'momentum_z']].to_dask_array(), |
| 63 | + bins=[80, 80], |
| 64 | + range=[[z_min, z_max], [-8.0e-23, 8.0e-23]], |
| 65 | + weights=df["weighting"].to_dask_array() |
| 66 | + ) |
| 67 | + print(z_pz.compute()) |
| 68 | + |
| 69 | + # example4: save all data data to parquet files |
58 | 70 | delayed_save = delayed(df.to_parquet("electrons.parquet")) |
59 | 71 | delayed_save.compute() |
60 | 72 |
|
61 | | -# Dask example 2: meshes |
| 73 | +# Meshes |
62 | 74 | if found_dask: |
63 | | - E_x = s.iterations[400].meshes["E"]["x"] |
64 | | - E_y = s.iterations[400].meshes["E"]["y"] |
65 | | - E_z = s.iterations[400].meshes["E"]["z"] |
| 75 | + E = s.iterations[400].meshes["E"] |
| 76 | + E_x = E["x"] |
| 77 | + E_y = E["y"] |
| 78 | + E_z = E["z"] |
66 | 79 | darr_x = E_x.to_dask_array() |
67 | 80 | darr_y = E_y.to_dask_array() |
68 | 81 | darr_z = E_z.to_dask_array() |
|
71 | 84 | Intensity = darr_x * darr_x + darr_y * darr_y + darr_z * darr_z |
72 | 85 | Intensity_max = Intensity.max().compute() |
73 | 86 | idx_max = da.argwhere(Intensity == Intensity_max).compute()[0] |
74 | | - print("maximum intensity I={} at index={}".format(Intensity_max, idx_max)) |
| 87 | + pos_max = E.grid_unit_SI * 1.0e6 * ( |
| 88 | + idx_max * E.grid_spacing + E.grid_global_offset) |
| 89 | + print("maximum intensity I={} at index={} z={}mu".format( |
| 90 | + Intensity_max, idx_max, pos_max[2])) |
0 commit comments