Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion libensemble/libE.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,9 +242,11 @@ def libE(
]
exit_criteria = specs_dump(ensemble.exit_criteria, by_alias=True, exclude_none=True)

# Restore the generator object (don't use serialized version)
# Restore objects that don't survive serialization via model_dump
if hasattr(ensemble.gen_specs, "generator") and ensemble.gen_specs.generator is not None:
gen_specs["generator"] = ensemble.gen_specs.generator
if hasattr(ensemble.gen_specs, "vocs") and ensemble.gen_specs.vocs is not None:
gen_specs["vocs"] = ensemble.gen_specs.vocs

# Extract platform info from settings or environment
platform_info = get_platform(libE_specs)
Expand Down
10 changes: 10 additions & 0 deletions libensemble/specs.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,6 +216,16 @@ class GenSpecs(BaseModel):
batch sizes via ``gen_specs["user"]`` or other methods.
"""

initial_sample_method: str | None = None
"""
Method for producing initial sample points before starting the generator.
If None (default), the generator is responsible for producing its own initial
sample via ``suggest()``. Set to ``"uniform"`` to have libEnsemble generate
uniform random samples from VOCS bounds, evaluate them, and ingest the results
into the generator before optimization begins. The number of sample points is
determined by ``initial_batch_size``.
"""

threaded: bool | None = False
"""
Instruct Worker process to launch user function to a thread.
Expand Down
87 changes: 87 additions & 0 deletions libensemble/tests/regression_tests/test_xopt_EI_initial_sample.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
"""
Tests libEnsemble with Xopt ExpectedImprovementGenerator using
initial_sample_method="uniform" to produce initial sample points.

EI requires pre-evaluated data before it can suggest points. This test
verifies that setting initial_sample_method="uniform" in GenSpecs causes
libEnsemble to generate uniform random samples, evaluate them through
the sim, and ingest results into the generator before optimization begins.

Execute via one of the following commands (e.g. 4 workers):
mpiexec -np 5 python test_xopt_EI_initial_sample.py
python test_xopt_EI_initial_sample.py -n 4

"""

# Do not change these lines - they are parsed by run-tests.sh
# TESTSUITE_COMMS: local
# TESTSUITE_NPROCS: 4
# TESTSUITE_EXTRA: true
# TESTSUITE_EXCLUDE: true

import numpy as np
from gest_api.vocs import VOCS
from xopt.generators.bayesian.expected_improvement import ExpectedImprovementGenerator

from libensemble import Ensemble
from libensemble.alloc_funcs.start_only_persistent import only_persistent_gens as alloc_f
from libensemble.specs import AllocSpecs, ExitCriteria, GenSpecs, LibeSpecs, SimSpecs


def xtest_sim(H, persis_info, sim_specs, _):
"""y1 = x2, c1 = x1"""
batch = len(H)
H_o = np.zeros(batch, dtype=sim_specs["out"])
for i in range(batch):
H_o["y1"][i] = H["x2"][i]
H_o["c1"][i] = H["x1"][i]
return H_o, persis_info


if __name__ == "__main__":

batch_size = 4

libE_specs = LibeSpecs(gen_on_manager=True, nworkers=batch_size)
libE_specs.reuse_output_dir = True

vocs = VOCS(
variables={"x1": [0, 1.0], "x2": [0, 10.0]},
objectives={"y1": "MINIMIZE"},
constraints={"c1": ["GREATER_THAN", 0.5]},
constants={"constant1": 1.0},
)

gen = ExpectedImprovementGenerator(vocs=vocs)

# NO pre-ingested data — libEnsemble handles initial sampling.
gen_specs = GenSpecs(
generator=gen,
initial_batch_size=batch_size,
initial_sample_method="uniform",
batch_size=batch_size,
vocs=vocs,
)

sim_specs = SimSpecs(
sim_f=xtest_sim,
vocs=vocs,
)

alloc_specs = AllocSpecs(alloc_f=alloc_f)
exit_criteria = ExitCriteria(sim_max=20)

workflow = Ensemble(
libE_specs=libE_specs,
sim_specs=sim_specs,
alloc_specs=alloc_specs,
gen_specs=gen_specs,
exit_criteria=exit_criteria,
)

H, _, _ = workflow.run()

if workflow.is_manager:
print(f"Completed {len(H)} simulations")
assert len(H) >= 8, f"Expected at least 8 sims, got {len(H)}"
print("Test passed")
47 changes: 39 additions & 8 deletions libensemble/utils/runners.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,19 @@ def _start_generator_loop(self, tag, Work, H_in):
self._convert_initial_ingest(H_in)
return self._loop_over_gen(tag, Work, H_in)

def _create_initial_sample(self, sample_method, num_points):
"""Create initial sample points using the specified sampling method."""
from libensemble.gen_classes.sampling import UniformSample

vocs = self.specs.get("vocs")
samplers = {
"uniform": UniformSample,
}
if sample_method not in samplers:
raise ValueError(f"Unknown initial_sample_method: {sample_method!r}. Supported: {list(samplers.keys())}")
sampler = samplers[sample_method](vocs=vocs)
return sampler.suggest(num_points)

def _persistent_result(self, calc_in, persis_info, libE_info):
"""Setup comms with manager, setup gen, loop gen to completion, return gen's results"""
self.ps = PersistentSupport(libE_info, EVAL_GEN_TAG)
Expand All @@ -166,14 +179,32 @@ def _persistent_result(self, calc_in, persis_info, libE_info):
if calc_in is not None and len(calc_in) > 0:
self._convert_initial_ingest(calc_in)

# libE gens will hit the following line, but list_dicts_to_np will passthrough if the output is a numpy array
H_out = list_dicts_to_np(
self._get_initial_suggest(libE_info),
dtype=self.specs.get("out"),
mapping=getattr(self.gen, "variables_mapping", {}),
)
tag, Work, H_in = self.ps.send_recv(H_out) # evaluate the initial sample
final_H_out = self._start_generator_loop(tag, Work, H_in)
sample_method = self.specs.get("initial_sample_method")
if sample_method is not None:
# libEnsemble produces the initial sample, evaluates it, and
# ingests results into the generator before optimization begins.
initial_batch = self.specs.get("initial_batch_size")
if not initial_batch:
raise ValueError("initial_sample_method requires initial_batch_size to be set in GenSpecs.")
H_sample = list_dicts_to_np(
self._create_initial_sample(sample_method, initial_batch),
dtype=self.specs.get("out"),
mapping=getattr(self.gen, "variables_mapping", {}),
)
tag, Work, H_in = self.ps.send_recv(H_sample)
self._convert_initial_ingest(H_in)
# Generator now has evaluated data — enter the normal loop
final_H_out = self._loop_over_gen(tag, Work, H_in)
else:
# Generator handles its own initial sampling
H_out = list_dicts_to_np(
self._get_initial_suggest(libE_info),
dtype=self.specs.get("out"),
mapping=getattr(self.gen, "variables_mapping", {}),
)
tag, Work, H_in = self.ps.send_recv(H_out) # evaluate the initial sample
final_H_out = self._start_generator_loop(tag, Work, H_in)

self.gen.finalize()
return final_H_out, FINISHED_PERSISTENT_GEN_TAG

Expand Down
Loading