Skip to content

Commit 0ea9be1

Browse files
committed
Further reduce initial data size to speed up benchmark
1 parent 6eda215 commit 0ea9be1

File tree

2 files changed

+4
-4
lines changed

2 files changed

+4
-4
lines changed

benchmarks/domains/easom_tl_noise.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -158,9 +158,9 @@ def easom_tl_noise(settings: ConvergenceBenchmarkSettings) -> pd.DataFrame:
158158
results = []
159159

160160
def sample_initial_data():
161-
p = 0.0001
161+
p = 0.00005
162162
upsample_max_thr = 0.5
163-
n_upsample_max = 3
163+
n_upsample_max = 1
164164
data_sub = pd.concat(
165165
[
166166
# Sample specific fraction of initial data

benchmarks/domains/michalewicz_tl_noise.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -144,9 +144,9 @@ def michalewicz_tl_noise(settings: ConvergenceBenchmarkSettings) -> pd.DataFrame
144144
results = []
145145

146146
def sample_initial_data():
147-
p = 0.0005
147+
p = 0.0001
148148
upsample_max_thr = 3
149-
n_upsample_max = 3
149+
n_upsample_max = 1
150150
data_sub = pd.concat(
151151
[
152152
# Sample specific fraction of initial data

0 commit comments

Comments
 (0)