From 6362f51315605c232bf5d4341e098a6c6c142b8a Mon Sep 17 00:00:00 2001 From: Lopa10ko Date: Mon, 27 Jan 2025 14:03:28 +0300 Subject: [PATCH] fix: correct EXAMPLES_DATA_PATH import --- .../specific_strategy/LoRa_example.py | 110 ++++++++---------- .../ts_anomaly_detection_skab_bench.py | 2 +- .../pipelines/abstract_pipeline.py | 2 +- 3 files changed, 51 insertions(+), 63 deletions(-) diff --git a/examples/automl_example/custom_strategy/specific_strategy/LoRa_example.py b/examples/automl_example/custom_strategy/specific_strategy/LoRa_example.py index 5d61bf933..361f78e35 100644 --- a/examples/automl_example/custom_strategy/specific_strategy/LoRa_example.py +++ b/examples/automl_example/custom_strategy/specific_strategy/LoRa_example.py @@ -2,78 +2,66 @@ import torchvision.transforms as transforms from fedot_ind.core.architecture.pipelines.abstract_pipeline import ApiTemplate from fedot_ind.core.repository.config_repository import DEFAULT_COMPUTE_CONFIG, \ - DEFAULT_AUTOML_LEARNING_CONFIG + DEFAULT_AUTOML_LEARNING_CONFIG, DEFAULT_CLF_AUTOML_CONFIG +from fedot_ind.tools.serialisation.path_lib import EXAMPLES_DATA_PATH -transform = transforms.Compose([ - transforms.ToTensor(), - transforms.Normalize((0.1307,), (0.3081,)) -]) +if __name__ == '__main__': + import ssl + ssl._create_default_https_context = ssl._create_unverified_context -# Load the MNIST train and test dataset -train_data = (datasets.MNIST( - root="./examples/data", - train=True, - download=True, - transform=transform), 'torchvision_dataset') + transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize((0.1307,), (0.3081,)) + ]) -test_data = (datasets.MNIST( - root="./examples/data", - train=False, - download=True, - transform=transform), 'torchvision_dataset') + # Load the MNIST train and test dataset + train_data = (datasets.MNIST( + root=EXAMPLES_DATA_PATH, + train=True, + download=True, + transform=transform), 'torchvision_dataset') -metric_names = ('f1', 'accuracy', 'precision', 'roc_auc') + test_data = (datasets.MNIST( + root="EXAMPLES_DATA_PATH", + train=False, + download=True, + transform=transform), 'torchvision_dataset') -lora_params = dict(rank=2, - sampling_share=0.5, - lora_init='random', - epochs=1, - batch_size=10 - ) + METRIC_NAMES = ('f1', 'accuracy', 'precision', 'roc_auc') -api_config = dict(problem='classification', - metric='accuracy', - timeout=0.1, - with_tuning=False, - industrial_strategy='lora_strategy', - industrial_strategy_params=lora_params, - logging_level=20) -AUTOML_LEARNING_STRATEGY = DEFAULT_AUTOML_LEARNING_CONFIG -COMPUTE_CONFIG = DEFAULT_COMPUTE_CONFIG -AUTOML_CONFIG = {'task': 'classification', - 'use_automl': True, - 'optimisation_strategy': {'optimisation_strategy': {'mutation_agent': 'bandit', - 'mutation_strategy': 'growth_mutation_strategy'}, - 'optimisation_agent': 'Industrial'}} + DEFAULT_AUTOML_LEARNING_CONFIG['timeout'] = 0.1 + AUTOML_LEARNING_STRATEGY = DEFAULT_AUTOML_LEARNING_CONFIG + COMPUTE_CONFIG = DEFAULT_COMPUTE_CONFIG + AUTOML_CONFIG = DEFAULT_CLF_AUTOML_CONFIG -LEARNING_CONFIG = {'learning_strategy': 'from_scratch', - 'learning_strategy_params': AUTOML_LEARNING_STRATEGY, - 'optimisation_loss': {'quality_loss': 'accuracy'}} + LEARNING_CONFIG = {'learning_strategy': 'from_scratch', + 'learning_strategy_params': AUTOML_LEARNING_STRATEGY, + 'optimisation_loss': {'quality_loss': 'accuracy'}} -INDUSTRIAL_PARAMS = {'rank': 2, - 'sampling_share': 0.5, - 'lora_init': 'random', - 'epochs': 1, - 'batch_size': 10, - 'data_type': 'tensor' - } + INDUSTRIAL_PARAMS = {'rank': 2, + 'sampling_share': 0.5, + 'lora_init': 'random', + 'epochs': 1, + 'batch_size': 10, + 'data_type': 'tensor' + } -INDUSTRIAL_CONFIG = {'problem': 'classification', - 'strategy': 'lora_strategy', - 'strategy_params': INDUSTRIAL_PARAMS - } + INDUSTRIAL_CONFIG = {'problem': 'classification', + 'strategy': 'lora_strategy', + 'strategy_params': INDUSTRIAL_PARAMS + } -API_CONFIG = {'industrial_config': INDUSTRIAL_CONFIG, - 'automl_config': AUTOML_CONFIG, - 'learning_config': LEARNING_CONFIG, - 'compute_config': COMPUTE_CONFIG} + API_CONFIG = {'industrial_config': INDUSTRIAL_CONFIG, + 'automl_config': AUTOML_CONFIG, + 'learning_config': LEARNING_CONFIG, + 'compute_config': COMPUTE_CONFIG} -dataset = dict(test_data=test_data, train_data=train_data) + dataset_dict = dict(test_data=(test_data[0].data.numpy(), test_data[0].targets.numpy()), + train_data=(train_data[0].data.numpy(), train_data[0].targets.numpy())) -industrial = ApiTemplate(api_config=API_CONFIG, - metric_list=metric_names).eval(dataset=dataset) -industrial.fit(train_data) -predict = industrial.predict(test_data) -_ = 1 + industrial = ApiTemplate(api_config=API_CONFIG, + metric_list=METRIC_NAMES).eval(dataset=dataset_dict) + industrial.fit(train_data) + predict = industrial.predict(test_data) diff --git a/examples/real_world_examples/benchmark_example/detection/ts_anomaly_detection_skab_bench.py b/examples/real_world_examples/benchmark_example/detection/ts_anomaly_detection_skab_bench.py index 7b7ec0960..e05065c15 100644 --- a/examples/real_world_examples/benchmark_example/detection/ts_anomaly_detection_skab_bench.py +++ b/examples/real_world_examples/benchmark_example/detection/ts_anomaly_detection_skab_bench.py @@ -2,7 +2,7 @@ from fedot_ind.core.architecture.pipelines.abstract_pipeline import ApiTemplate from fedot_ind.core.repository.config_repository import DEFAULT_COMPUTE_CONFIG, DEFAULT_CLF_AUTOML_CONFIG -from tools.test_load_data import EXAMPLES_DATA_PATH +from fedot_ind.tools.serialisation.path_lib import EXAMPLES_DATA_PATH def prepare_skab_benchmark(): diff --git a/fedot_ind/core/architecture/pipelines/abstract_pipeline.py b/fedot_ind/core/architecture/pipelines/abstract_pipeline.py index 30f9b764d..d0c89abf7 100644 --- a/fedot_ind/core/architecture/pipelines/abstract_pipeline.py +++ b/fedot_ind/core/architecture/pipelines/abstract_pipeline.py @@ -14,7 +14,7 @@ from fedot_ind.core.repository.initializer_industrial_models import IndustrialModels from fedot_ind.core.repository.model_repository import NEURAL_MODEL from fedot_ind.tools.loader import DataLoader -from tools.test_load_data import EXAMPLES_DATA_PATH +from fedot_ind.tools.serialisation.path_lib import EXAMPLES_DATA_PATH BENCHMARK = 'M4'