Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add PythonicWorkflow #8151

Merged
merged 44 commits into from
Nov 27, 2024
Merged
Show file tree
Hide file tree
Changes from 42 commits
Commits
Show all changes
44 commits
Select commit Hold shift + click to select a range
44752e6
Removed CPU randn() from schedulers
borisfom Oct 12, 2024
1bf3edd
Merge branch 'dev' into fix-schedulers
borisfom Oct 14, 2024
df213e6
Merge remote-tracking branch 'origin/dev' into fix-schedulers
borisfom Oct 14, 2024
f5fa56b
Merge branch 'fix-schedulers' of github.com:borisfom/MONAI into fix-s…
borisfom Oct 14, 2024
1893375
workaround for #8149
KumoLiu Oct 15, 2024
1c126a9
add `PythonicWorkflow`
KumoLiu Oct 15, 2024
f431f6c
Merge remote-tracking branch 'origin/dev' into pythonicworkflow
KumoLiu Oct 15, 2024
7228989
minor update
KumoLiu Oct 15, 2024
cf31a38
support for multipy properties file
KumoLiu Oct 17, 2024
b94b2a0
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Oct 17, 2024
7248c2b
minor fix
KumoLiu Oct 18, 2024
800dd4e
Merge branch 'pythonicworkflow' of https://github.com/KumoLiu/MONAI i…
KumoLiu Oct 18, 2024
17da2ae
Merge branch 'dev' into pythonicworkflow
KumoLiu Nov 7, 2024
260cb63
Merge remote-tracking branch 'yunl/pythonicworkflow' into pythonicwor…
KumoLiu Nov 7, 2024
1b39551
Merge remote-tracking branch 'origin/dev' into pythonicworkflow
KumoLiu Nov 11, 2024
f99763d
add test case
KumoLiu Nov 11, 2024
6441466
fix format
KumoLiu Nov 11, 2024
c2e2dad
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Nov 11, 2024
17e4f32
add docstring
KumoLiu Nov 11, 2024
d1678b0
Merge branch 'pythonicworkflow' of https://github.com/KumoLiu/MONAI i…
KumoLiu Nov 11, 2024
6b2817e
fix format
KumoLiu Nov 11, 2024
6eed10d
update docstring
KumoLiu Nov 11, 2024
f31e39d
fix mypy
KumoLiu Nov 11, 2024
7f5c43b
Update monai/bundle/workflows.py
KumoLiu Nov 13, 2024
9c3155f
address comments
KumoLiu Nov 13, 2024
ef41603
fix format
KumoLiu Nov 13, 2024
891b9e3
Merge branch 'dev' into pythonicworkflow
KumoLiu Nov 13, 2024
4857445
Merge branch 'dev' into pythonicworkflow
KumoLiu Nov 14, 2024
9acad6b
fix ci
KumoLiu Nov 14, 2024
78d6da7
Merge remote-tracking branch 'yunl/pythonicworkflow' into pythonicwor…
KumoLiu Nov 14, 2024
99af7fd
address comments
KumoLiu Nov 14, 2024
f933263
address comments
KumoLiu Nov 15, 2024
4724f68
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Nov 15, 2024
50b1a50
address comments
KumoLiu Nov 15, 2024
150a60a
address comments
KumoLiu Nov 15, 2024
b0acb17
Merge branch 'pythonicworkflow' of https://github.com/KumoLiu/MONAI i…
KumoLiu Nov 15, 2024
c026441
fix format
KumoLiu Nov 15, 2024
5ffc03d
Merge branch 'dev' into pythonicworkflow
KumoLiu Nov 15, 2024
9f9ebcd
Merge remote-tracking branch 'yunl/pythonicworkflow' into pythonicwor…
KumoLiu Nov 15, 2024
ec202e0
update `compute_capabilities_after`
KumoLiu Nov 15, 2024
70dc9b5
Merge branch 'dev' into pythonicworkflow
KumoLiu Nov 25, 2024
d40ec95
Merge branch 'dev' into pythonicworkflow
KumoLiu Nov 27, 2024
42d5d0b
fix docstring
KumoLiu Nov 27, 2024
1f136f9
Merge branch 'pythonicworkflow' of https://github.com/KumoLiu/MONAI i…
KumoLiu Nov 27, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion monai/bundle/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,4 +43,4 @@
MACRO_KEY,
load_bundle_config,
)
from .workflows import BundleWorkflow, ConfigWorkflow
from .workflows import BundleWorkflow, ConfigWorkflow, PythonicWorkflow
190 changes: 170 additions & 20 deletions monai/bundle/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,12 +44,18 @@ class BundleWorkflow(ABC):
workflow_type: specifies the workflow type: "train" or "training" for a training workflow,
or "infer", "inference", "eval", "evaluation" for a inference workflow,
other unsupported string will raise a ValueError.
default to `train` for train workflow.
default to `None` for only using meta properties.
workflow: specifies the workflow type: "train" or "training" for a training workflow,
or "infer", "inference", "eval", "evaluation" for a inference workflow,
other unsupported string will raise a ValueError.
default to `None` for common workflow.
properties_path: the path to the JSON file of properties.
properties_path: the path to the JSON file of properties. If `workflow_type` is specified, properties will be
loaded from the file based on the provided `workflow_type` and meta. If no `workflow_type` is specified,
properties will default to loading from "meta". If the specified file is unavailable, default properties
will be sourced from "monai/bundle/properties.py" based on the workflow_type:
For a training workflow, properties load from `TrainProperties` and `MetaProperties`.
For a inference workflow, properties load from `InferProperties` and `MetaProperties`.
For workflow_type = None : only `MetaProperties` will be loaded.
Nic-Ma marked this conversation as resolved.
Show resolved Hide resolved
meta_file: filepath of the metadata file, if this is a list of file paths, their contents will be merged in order.
logging_file: config file for `logging` module in the program. for more details:
https://docs.python.org/3/library/logging.config.html#logging.config.fileConfig.
Expand Down Expand Up @@ -97,29 +103,50 @@ def __init__(
meta_file = None

workflow_type = workflow if workflow is not None else workflow_type
if workflow_type is None and properties_path is None:
self.properties = copy(MetaProperties)
self.workflow_type = None
self.meta_file = meta_file
return
if workflow_type is not None:
if workflow_type.lower() in self.supported_train_type:
workflow_type = "train"
elif workflow_type.lower() in self.supported_infer_type:
workflow_type = "infer"
else:
raise ValueError(f"Unsupported workflow type: '{workflow_type}'.")

if properties_path is not None:
properties_path = Path(properties_path)
if not properties_path.is_file():
raise ValueError(f"Property file {properties_path} does not exist.")
with open(properties_path) as json_file:
self.properties = json.load(json_file)
self.workflow_type = None
self.meta_file = meta_file
return
if workflow_type.lower() in self.supported_train_type: # type: ignore[union-attr]
self.properties = {**TrainProperties, **MetaProperties}
self.workflow_type = "train"
elif workflow_type.lower() in self.supported_infer_type: # type: ignore[union-attr]
self.properties = {**InferProperties, **MetaProperties}
self.workflow_type = "infer"
try:
properties = json.load(json_file)
self.properties: dict = {}
if workflow_type is not None and workflow_type in properties:
self.properties = properties[workflow_type]
if "meta" in properties:
self.properties.update(properties["meta"])
elif workflow_type is None:
if "meta" in properties:
self.properties = properties["meta"]
logger.info(
"No workflow type specified, default to load meta properties from property file."
)
else:
logger.warning("No 'meta' key found in properties while workflow_type is None.")
except KeyError as e:
raise ValueError(f"{workflow_type} not found in property file {properties_path}") from e
except json.JSONDecodeError as e:
raise ValueError(f"Error decoding JSON from property file {properties_path}") from e
else:
raise ValueError(f"Unsupported workflow type: '{workflow_type}'.")
if workflow_type == "train":
self.properties = {**TrainProperties, **MetaProperties}
elif workflow_type == "infer":
self.properties = {**InferProperties, **MetaProperties}
elif workflow_type is None:
self.properties = copy(MetaProperties)
logger.info("No workflow type and property file specified, default to 'meta' properties.")
else:
raise ValueError(f"Unsupported workflow type: '{workflow_type}'.")

self.workflow_type = workflow_type
self.meta_file = meta_file

@abstractmethod
Expand Down Expand Up @@ -226,6 +253,124 @@ def check_properties(self) -> list[str] | None:
return [n for n, p in self.properties.items() if p.get(BundleProperty.REQUIRED, False) and not hasattr(self, n)]


class PythonicWorkflow(BundleWorkflow):
"""
Base class for the pythonic workflow specification in bundle, it can be a training, evaluation or inference workflow.
It defines the basic interfaces for the bundle workflow behavior: `initialize`, `finalize`, etc.
This also provides the interface to get / set public properties to interact with a bundle workflow through
defined `get_<property>` accessor methods or directly defining members of the object.
Nic-Ma marked this conversation as resolved.
Show resolved Hide resolved
For how to set the properties, users can define the `_set_<property>` methods or directly set the members of the object.
The `initialize` method is called to set up the workflow before running. This method sets up internal state
and prepares properties. If properties are modified after the workflow has been initialized, `self._is_initialized`
is set to `False`. Before running the workflow again, `initialize` should be called to ensure that the workflow is
properly set up with the new property values.

Args:
workflow_type: specifies the workflow type: "train" or "training" for a training workflow,
yiheng-wang-nv marked this conversation as resolved.
Show resolved Hide resolved
or "infer", "inference", "eval", "evaluation" for a inference workflow,
other unsupported string will raise a ValueError.
default to `None` for only using meta properties.
workflow: specifies the workflow type: "train" or "training" for a training workflow,
or "infer", "inference", "eval", "evaluation" for a inference workflow,
other unsupported string will raise a ValueError.
default to `None` for common workflow.
properties_path: the path to the JSON file of properties. If `workflow_type` is specified, properties will be
loaded from the file based on the provided `workflow_type` and meta. If no `workflow_type` is specified,
properties will default to loading from "meta". If the specified file is unavailable, default properties
will be sourced from "monai/bundle/properties.py" based on the workflow_type:
For a training workflow, properties load from `TrainProperties` and `MetaProperties`.
For a inference workflow, properties load from `InferProperties` and `MetaProperties`.
For workflow_type = None : only `MetaProperties` will be loaded.
config_file: path to the config file, typically used to store hyperparameters.
meta_file: filepath of the metadata file, if this is a list of file paths, their contents will be merged in order.
logging_file: config file for `logging` module in the program. for more details:
https://docs.python.org/3/library/logging.config.html#logging.config.fileConfig.

"""

supported_train_type: tuple = ("train", "training")
supported_infer_type: tuple = ("infer", "inference", "eval", "evaluation")

def __init__(
self,
workflow_type: str | None = None,
properties_path: PathLike | None = None,
config_file: str | Sequence[str] | None = None,
meta_file: str | Sequence[str] | None = None,
logging_file: str | None = None,
**override: Any,
):
meta_file = str(Path(os.getcwd()) / "metadata.json") if meta_file is None else meta_file
super().__init__(
workflow_type=workflow_type, properties_path=properties_path, meta_file=meta_file, logging_file=logging_file
)
self._props_vals: dict = {}
self._set_props_vals: dict = {}
self.parser = ConfigParser()
if config_file is not None:
self.parser.read_config(f=config_file)
if self.meta_file is not None:
self.parser.read_meta(f=self.meta_file)

# the rest key-values in the _args are to override config content
self.parser.update(pairs=override)
self._is_initialized: bool = False

def initialize(self, *args: Any, **kwargs: Any) -> Any:
"""
Initialize the bundle workflow before running.
"""
self._props_vals = {}
self._is_initialized = True

def _get_property(self, name: str, property: dict) -> Any:
"""
With specified property name and information, get the expected property value.
If the property is already generated, return from the bucket directly.
If user explicitly set the property, return it directly.
Otherwise, generate the expected property as a class private property with prefix "_".

Args:
name: the name of target property.
property: other information for the target property, defined in `TrainProperties` or `InferProperties`.
"""
if not self._is_initialized:
raise RuntimeError("Please execute 'initialize' before getting any properties.")
value = None
if name in self._set_props_vals:
value = self._set_props_vals[name]
elif name in self._props_vals:
value = self._props_vals[name]
elif name in self.parser.config[self.parser.meta_key]: # type: ignore[index]
id = self.properties.get(name, None).get(BundlePropertyConfig.ID, None)
value = self.parser[id]
else:
try:
value = getattr(self, f"get_{name}")()
except AttributeError as e:
if property[BundleProperty.REQUIRED]:
raise ValueError(
f"unsupported property '{name}' is required in the bundle properties,"
f"need to implement a method 'get_{name}' to provide the property."
) from e
self._props_vals[name] = value
return value

def _set_property(self, name: str, property: dict, value: Any) -> Any:
"""
With specified property name and information, set value for the expected property.
Stores user-reset initialized objects that should not be re-initialized and marks the workflow as not initialized.

Args:
name: the name of target property.
property: other information for the target property, defined in `TrainProperties` or `InferProperties`.
value: value to set for the property.

"""
self._set_props_vals[name] = value
Nic-Ma marked this conversation as resolved.
Show resolved Hide resolved
self._is_initialized = False


binliunls marked this conversation as resolved.
Show resolved Hide resolved
class ConfigWorkflow(BundleWorkflow):
"""
Specification for the config-based bundle workflow.
Expand Down Expand Up @@ -262,7 +407,13 @@ class ConfigWorkflow(BundleWorkflow):
or "infer", "inference", "eval", "evaluation" for a inference workflow,
other unsupported string will raise a ValueError.
default to `None` for common workflow.
properties_path: the path to the JSON file of properties.
properties_path: the path to the JSON file of properties. If `workflow_type` is specified, properties will be
loaded from the file based on the provided `workflow_type` and meta. If no `workflow_type` is specified,
properties will default to loading from "train". If the specified file is unavailable, default properties
will be sourced from "monai/bundle/properties.py" based on the workflow_type:
For a training workflow, properties load from `TrainProperties` and `MetaProperties`.
For a inference workflow, properties load from `InferProperties` and `MetaProperties`.
For workflow_type = None : only `MetaProperties` will be loaded.
override: id-value pairs to override or add the corresponding config content.
e.g. ``--net#input_chns 42``, ``--net %/data/other.json#net_arg``

Expand Down Expand Up @@ -324,7 +475,6 @@ def __init__(
self.parser.read_config(f=config_file)
if self.meta_file is not None:
self.parser.read_meta(f=self.meta_file)

# the rest key-values in the _args are to override config content
self.parser.update(pairs=override)
self.init_id = init_id
Expand Down
6 changes: 3 additions & 3 deletions monai/utils/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -649,7 +649,7 @@ def compute_capabilities_after(major: int, minor: int = 0, current_ver_string: s
current_ver_string: if None, the current system GPU CUDA compute capability will be used.

Returns:
True if the current system GPU CUDA compute capability is greater than the specified version.
True if the current system GPU CUDA compute capability is greater than or equal to the specified version.
"""
if current_ver_string is None:
cuda_available = torch.cuda.is_available()
Expand All @@ -667,11 +667,11 @@ def compute_capabilities_after(major: int, minor: int = 0, current_ver_string: s

ver, has_ver = optional_import("packaging.version", name="parse")
if has_ver:
return ver(".".join((f"{major}", f"{minor}"))) < ver(f"{current_ver_string}") # type: ignore
return ver(".".join((f"{major}", f"{minor}"))) <= ver(f"{current_ver_string}") # type: ignore
parts = f"{current_ver_string}".split("+", 1)[0].split(".", 2)
while len(parts) < 2:
parts += ["0"]
c_major, c_minor = parts[:2]
c_mn = int(c_major), int(c_minor)
mn = int(major), int(minor)
return c_mn >= mn
return c_mn > mn
64 changes: 62 additions & 2 deletions tests/nonconfig_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import torch

from monai.bundle import BundleWorkflow
from monai.bundle import BundleWorkflow, PythonicWorkflow
from monai.data import DataLoader, Dataset
from monai.engines import SupervisedEvaluator
from monai.inferers import SlidingWindowInferer
Expand All @@ -26,8 +26,9 @@
LoadImaged,
SaveImaged,
ScaleIntensityd,
ScaleIntensityRanged,
)
from monai.utils import BundleProperty, set_determinism
from monai.utils import BundleProperty, CommonKeys, set_determinism


class NonConfigWorkflow(BundleWorkflow):
Expand Down Expand Up @@ -176,3 +177,62 @@ def _set_property(self, name, property, value):
self._numpy_version = value
elif property[BundleProperty.REQUIRED]:
raise ValueError(f"unsupported property '{name}' is required in the bundle properties.")


class PythonicWorkflowImpl(PythonicWorkflow):
"""
Test class simulates the bundle workflow defined by Python script directly.
"""

def __init__(
self,
workflow_type: str = "inference",
config_file: str | None = None,
properties_path: str | None = None,
meta_file: str | None = None,
):
super().__init__(
workflow_type=workflow_type, properties_path=properties_path, config_file=config_file, meta_file=meta_file
)
self.dataflow: dict = {}

def initialize(self):
self._props_vals = {}
self._is_initialized = True
self.net = UNet(
spatial_dims=3,
in_channels=1,
out_channels=2,
channels=(16, 32, 64, 128),
strides=(2, 2, 2),
num_res_units=2,
).to(self.device)
preprocessing = Compose(
[
EnsureChannelFirstd(keys=["image"]),
ScaleIntensityd(keys="image"),
ScaleIntensityRanged(keys="image", a_min=-57, a_max=164, b_min=0.0, b_max=1.0, clip=True),
]
)
self.dataset = Dataset(data=[self.dataflow], transform=preprocessing)
self.postprocessing = Compose([Activationsd(keys="pred", softmax=True), AsDiscreted(keys="pred", argmax=True)])

def run(self):
data = self.dataset[0]
inputs = data[CommonKeys.IMAGE].unsqueeze(0).to(self.device)
self.net.eval()
with torch.no_grad():
data[CommonKeys.PRED] = self.inferer(inputs, self.net)
self.dataflow.update({CommonKeys.PRED: self.postprocessing(data)[CommonKeys.PRED]})

def finalize(self):
pass

def get_bundle_root(self):
return "."

def get_device(self):
return torch.device("cuda" if torch.cuda.is_available() else "cpu")

def get_inferer(self):
return SlidingWindowInferer(roi_size=self.parser.roi_size, sw_batch_size=1, overlap=0)
2 changes: 1 addition & 1 deletion tests/test_bundle_trt_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
@skip_if_windows
@skip_if_no_cuda
@skip_if_quick
@SkipIfBeforeComputeCapabilityVersion((7, 0))
@SkipIfBeforeComputeCapabilityVersion((7, 5))
class TestTRTExport(unittest.TestCase):

def setUp(self):
Expand Down
Loading
Loading