diff --git a/libensemble/gen_funcs/persistent_ax_multitask.py b/libensemble/gen_funcs/persistent_ax_multitask.py index 451f14ad9a..a2d1aaebc5 100644 --- a/libensemble/gen_funcs/persistent_ax_multitask.py +++ b/libensemble/gen_funcs/persistent_ax_multitask.py @@ -102,9 +102,7 @@ def get_MTGP( """ if isinstance(experiment, MultiTypeExperiment): - trial_index_to_type = { - t.index: t.trial_type for t in experiment.trials.values() - } + trial_index_to_type = {t.index: t.trial_type for t in experiment.trials.values()} transforms = MT_MTGP_trans transform_configs = { "TrialAsTask": {"trial_level_map": {"trial_type": trial_index_to_type}}, @@ -275,9 +273,7 @@ def persistent_gp_mt_ax_gen_f(H, persis_info, gen_specs, libE_info): if not os.path.exists("model_history"): os.mkdir("model_history") # Register metric and runner in order to be able to save to json. - _, encoder_registry, decoder_registry = register_metrics( - {AxMetric: None} - ) + _, encoder_registry, decoder_registry = register_metrics({AxMetric: None}) _, encoder_registry, decoder_registry = register_runner( AxRunner, encoder_registry=encoder_registry, diff --git a/libensemble/tests/regression_tests/test_persistent_aposmm_ibcdfo_pounders.py b/libensemble/tests/regression_tests/test_persistent_aposmm_ibcdfo_pounders.py index dd0a86b5ba..b204ec6499 100644 --- a/libensemble/tests/regression_tests/test_persistent_aposmm_ibcdfo_pounders.py +++ b/libensemble/tests/regression_tests/test_persistent_aposmm_ibcdfo_pounders.py @@ -52,10 +52,6 @@ sys.exit("Ensure https://github.com/POptUS/minq has been cloned and that minq/py/minq5/ is on the PYTHONPATH") -def sum_squared(x): - return np.sum(np.power(x, 2)) - - def synthetic_beamline_mapping(H, _, sim_specs): x = H["x"][0] assert len(x) == 4, "Assuming 4 inputs to this function" @@ -76,7 +72,7 @@ def synthetic_beamline_mapping(H, _, sim_specs): nworkers, is_manager, libE_specs, _ = parse_args() - assert nworkers == 2, "This test is just for two workers" + assert nworkers == 2, "This test is just for two workers, as only one localopt run is being performed" for inst in range(2): if inst == 0: @@ -109,9 +105,9 @@ def synthetic_beamline_mapping(H, _, sim_specs): "persis_in": ["f", "fvec"] + [n[0] for n in gen_out], "out": gen_out, "user": { - "initial_sample_size": 1, - "stop_after_k_runs": 1, - "max_active_runs": 1, + "initial_sample_size": 1, # The initial sampled point will be the starting point + "stop_after_k_runs": 1, # Only one local optimization run will be performed + "max_active_runs": 1, # Only one local optimization run will be performed, "sample_points": np.atleast_2d(0.1 * (np.arange(n) + 1)), "localopt_method": "ibcdfo_pounders", "run_max_eval": 100 * (n + 1), diff --git a/libensemble/tests/regression_tests/test_persistent_gp_multitask_ax.py b/libensemble/tests/regression_tests/test_persistent_gp_multitask_ax.py index 478b42fcc5..242dfc1038 100644 --- a/libensemble/tests/regression_tests/test_persistent_gp_multitask_ax.py +++ b/libensemble/tests/regression_tests/test_persistent_gp_multitask_ax.py @@ -50,7 +50,7 @@ def run_simulation(H, persis_info, sim_specs, libE_info): z = 8 elif task == "cheap_model": z = 1 - print('in sim', task) + print("in sim", task) libE_output = np.zeros(1, dtype=sim_specs["out"]) calc_status = WORKER_DONE