Skip to content

Commit

Permalink
Merge pull request #62 from MSDLLCpapers/main
Browse files Browse the repository at this point in the history
Sync plotting branch with main
  • Loading branch information
xuyuting authored Sep 16, 2024
2 parents 82039c0 + ca96f31 commit 36890c3
Show file tree
Hide file tree
Showing 34 changed files with 888 additions and 370 deletions.
20 changes: 20 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,25 @@
# Changelog

## [Untracked Changes]

## [0.8.5]
### Added
- More optional outputs for verbose settings
- Parameters in ParamSpace can also be indexed by name
- Parameters now have search_space property, to modify the optimizer search space from the full space
- Continuous parameters have search_min/search_max; Discete parameteres have search_categories
- Constraints are now defined by Constraint class
- Input constraints can now be included in ParamSpace, and serialized from there
- Output constraints can now be included in Campaign, and serialized from there
- New interface class IParamSpace to address circular import issues between ParamSpace and Constraint

### Modified
- Optimizer and Campaign X_space attributes are now assigned using setter
- Optimizer.maximize() appropriately recognizes fixed_var argument

### Removed
- Torch device references and options (GPU compatibility may be re-added)

## [0.8.4]
### Added
- Campaign X_best method
Expand Down
Binary file added docs/_static/tutorials/demo_app.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/_static/tutorials/demo_factoreffect.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/_static/tutorials/demo_optimprogress.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/_static/tutorials/demo_pdp_ice.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/_static/tutorials/demo_shap.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/_static/tutorials/demo_surface.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
5 changes: 4 additions & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,10 @@
"icon": "fa-brands fa-python",
"type": "fontawesome",
},
]
],
"analytics": {
"google_analytics_id": "G-BPMFV2DMZE",
},
}

# Add any paths that contain custom static files (such as style sheets) here,
Expand Down
10 changes: 10 additions & 0 deletions docs/index.rst
Original file line number Diff line number Diff line change
@@ -1,3 +1,13 @@
.. raw:: html

<div style="visibility: hidden;">
Home
====

.. raw:: html

</div>

.. toctree::
:hidden:
:maxdepth: 2
Expand Down
2 changes: 1 addition & 1 deletion obsidian/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""obsidian: Automated experiment design and black-box optimization"""
__version__ = '0.8.4'
__version__ = '0.8.5'

# Import key objects
from obsidian.campaign import Campaign
Expand Down
159 changes: 102 additions & 57 deletions obsidian/campaign/campaign.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@
from obsidian.optimizer import Optimizer, BayesianOptimizer
from obsidian.experiment import ExpDesigner
from obsidian.objectives import Objective, Objective_Sequence, obj_class_dict
from obsidian.constraints import Output_Constraint, const_class_dict
from obsidian.exceptions import IncompatibleObjectiveError
from obsidian.utils import tensordict_to_dict
import obsidian

import pandas as pd
Expand Down Expand Up @@ -42,12 +44,13 @@ class Campaign():
def __init__(self,
X_space: ParamSpace,
target: Target | list[Target],
constraints: Output_Constraint | list[Output_Constraint] | None = None,
optimizer: Optimizer | None = None,
designer: ExpDesigner | None = None,
objective: Objective | None = None,
seed: int | None = None):

self.X_space = X_space
self.set_X_space(X_space)
self.data = pd.DataFrame()

optimizer = BayesianOptimizer(X_space, seed=seed) if optimizer is None else optimizer
Expand All @@ -59,6 +62,9 @@ def __init__(self,
self.set_target(target)
self.set_objective(objective)

self.output_constraints = None
self.constrain_outputs(constraints)

# Non-object attributes
self.iter = 0
self.seed = seed
Expand Down Expand Up @@ -101,6 +107,15 @@ def clear_data(self):
self.data = pd.DataFrame()
self.iter = 0

@property
def X_space(self) -> ParamSpace:
"""Campaign ParamSpace"""
return self._X_space

def set_X_space(self, X_space: ParamSpace):
"""Sets the campaign ParamSpace"""
self._X_space = X_space

@property
def optimizer(self) -> Optimizer:
"""Campaign Optimizer"""
Expand Down Expand Up @@ -260,59 +275,6 @@ def X(self) -> pd.DataFrame:
"""
return self.data[list(self.X_space.X_names)]

def save_state(self) -> dict:
"""
Saves the state of the Campaign object as a dictionary.
Returns:
dict: A dictionary containing the saved state of the Campaign object.
"""
obj_dict = {}
obj_dict['X_space'] = self.X_space.save_state()
obj_dict['optimizer'] = self.optimizer.save_state()
obj_dict['data'] = self.data.to_dict()
obj_dict['target'] = [t.save_state() for t in self.target]
if self.objective:
obj_dict['objective'] = self.objective.save_state()
obj_dict['seed'] = self.seed

return obj_dict

@classmethod
def load_state(cls,
obj_dict: dict):
"""
Loads the state of the campaign from a dictionary.
Args:
cls (Campaign): The class object.
obj_dict (dict): A dictionary containing the campaign state.
Returns:
Campaign: A new campaign object with the loaded state.
"""

if 'objective' in obj_dict:
if obj_dict['objective']['name'] == 'Objective_Sequence':
new_objective = Objective_Sequence.load_state(obj_dict['objective'])
else:
obj_class = obj_class_dict[obj_dict['objective']['name']]
new_objective = obj_class.load_state(obj_dict['objective'])
else:
new_objective = None

new_campaign = cls(X_space=ParamSpace.load_state(obj_dict['X_space']),
target=[Target.load_state(t_dict) for t_dict in obj_dict['target']],
optimizer=BayesianOptimizer.load_state(obj_dict['optimizer']),
objective=new_objective,
seed=obj_dict['seed'])
new_campaign.data = pd.DataFrame(obj_dict['data'])
new_campaign.data.index = new_campaign.data.index.astype('int')

new_campaign.iter = new_campaign.data['Iteration'].astype('int').max()

return new_campaign

def __repr__(self):
"""String representation of object"""
return f"obsidian Campaign for {getattr(self,'y_names', None)}; {getattr(self,'m_exp', 0)} observations"
Expand Down Expand Up @@ -342,7 +304,11 @@ def suggest(self, **optim_kwargs):
"""
if self.optimizer.is_fit:
try:
X, eval = self.optimizer.suggest(objective=self.objective, **optim_kwargs)
# In case X_space has changed, re-set the optimizer X_space
self.optimizer.set_X_space(self.X_space)
X, eval = self.optimizer.suggest(objective=self.objective,
out_constraints=self.output_constraints,
**optim_kwargs)
return (X, eval)
except Exception:
warnings.warn('Optimization failed')
Expand Down Expand Up @@ -371,11 +337,11 @@ def _profile_hv(self):
for i in iters:
iter_index = self.data.query(f'Iteration <= {i}').index
out_iter = self.out.loc[iter_index, :]
out_iter = torch.tensor(out_iter.values).to(self.optimizer.device)
out_iter = torch.tensor(out_iter.values)
hv[i] = self.optimizer.hypervolume(out_iter)

self.data['Hypervolume (iter)'] = self.data.apply(lambda x: hv[x['Iteration']], axis=1)
self.data['Pareto Front'] = self.optimizer.pareto(torch.tensor(self.out.values).to(self.optimizer.device))
self.data['Pareto Front'] = self.optimizer.pareto(torch.tensor(self.out.values))

return

Expand Down Expand Up @@ -419,3 +385,82 @@ def _analyze(self):
)

return

def constrain_outputs(self,
constraints: Output_Constraint | list[Output_Constraint] | None) -> None:
"""
Sets optional output constraints for the campaign.
"""
if constraints is not None:
if isinstance(constraints, Output_Constraint):
constraints = [constraints]
self.output_constraints = constraints

return

def clear_output_constraints(self):
"""Clears output constraints"""
self.output_constraints = None

def save_state(self) -> dict:
"""
Saves the state of the Campaign object as a dictionary.
Returns:
dict: A dictionary containing the saved state of the Campaign object.
"""
obj_dict = {}
obj_dict['X_space'] = self.X_space.save_state()
obj_dict['optimizer'] = self.optimizer.save_state()
obj_dict['data'] = self.data.to_dict()
obj_dict['target'] = [t.save_state() for t in self.target]
if self.objective:
obj_dict['objective'] = self.objective.save_state()
obj_dict['seed'] = self.seed

if getattr(self, 'output_constraints', None):
obj_dict['output_constraints'] = [{'class': const.__class__.__name__,
'state': tensordict_to_dict(const.state_dict())}
for const in self.output_constraints]

return obj_dict

@classmethod
def load_state(cls,
obj_dict: dict):
"""
Loads the state of the campaign from a dictionary.
Args:
cls (Campaign): The class object.
obj_dict (dict): A dictionary containing the campaign state.
Returns:
Campaign: A new campaign object with the loaded state.
"""

if 'objective' in obj_dict:
if obj_dict['objective']['name'] == 'Objective_Sequence':
new_objective = Objective_Sequence.load_state(obj_dict['objective'])
else:
obj_class = obj_class_dict[obj_dict['objective']['name']]
new_objective = obj_class.load_state(obj_dict['objective'])
else:
new_objective = None

new_campaign = cls(X_space=ParamSpace.load_state(obj_dict['X_space']),
target=[Target.load_state(t_dict) for t_dict in obj_dict['target']],
optimizer=BayesianOptimizer.load_state(obj_dict['optimizer']),
objective=new_objective,
seed=obj_dict['seed'])
new_campaign.data = pd.DataFrame(obj_dict['data'])
new_campaign.data.index = new_campaign.data.index.astype('int')

new_campaign.iter = new_campaign.data['Iteration'].astype('int').max()

if 'output_constraints' in obj_dict:
for const_dict in obj_dict['output_constraints']:
const = const_class_dict[const_dict['class']](new_campaign.target, **const_dict['state'])
new_campaign.constrain_outputs(const)

return new_campaign
2 changes: 2 additions & 0 deletions obsidian/constraints/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
"""Constraints: Restrict the recommended space during optimization"""

from .base import *
from .input import *
from .output import *
from .config import *
20 changes: 20 additions & 0 deletions obsidian/constraints/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
"""Base class for obsidian constraints"""


from abc import abstractmethod, ABC
from torch.nn import Module


class Constraint(ABC, Module):
"""
Base class for constraints, which restrict the input or output space
of a model or optimization problem
"""

def __init__(self) -> None:
super().__init__()
return

@abstractmethod
def forward(self):
pass # pragma: no cover
17 changes: 17 additions & 0 deletions obsidian/constraints/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
"""Method pointers and config for constraints"""

from .input import (
Linear_Constraint,
BatchVariance_Constraint
)

from .output import (
Blank_Constraint,
L1_Constraint
)

const_class_dict = {'Linear_Constraint': Linear_Constraint,
'BatchVariance_Constraint': BatchVariance_Constraint,
'Blank_Constraint': Blank_Constraint,
'L1_Constraint': L1_Constraint
}
Loading

0 comments on commit 36890c3

Please sign in to comment.