diff --git a/src/deepsparse/__init__.py b/src/deepsparse/__init__.py index 46c49b236d..a4f5ba1e66 100644 --- a/src/deepsparse/__init__.py +++ b/src/deepsparse/__init__.py @@ -34,7 +34,6 @@ from .pipeline_config import * from .tasks import * from .pipeline import * -from .loggers import * from .version import __version__, is_release from .analytics import deepsparse_analytics as _analytics from .subgraph_execute import * diff --git a/src/deepsparse/legacy/base_pipeline.py b/src/deepsparse/legacy/base_pipeline.py index c5d006fc80..08f49c4e03 100644 --- a/src/deepsparse/legacy/base_pipeline.py +++ b/src/deepsparse/legacy/base_pipeline.py @@ -19,10 +19,10 @@ from pydantic import BaseModel from deepsparse import Context +from deepsparse.legacy.loggers.base_logger import BaseLogger +from deepsparse.legacy.loggers.build_logger import logger_from_config +from deepsparse.legacy.loggers.constants import validate_identifier from deepsparse.legacy.tasks import SupportedTasks, dynamic_import_task -from deepsparse.loggers.base_logger import BaseLogger -from deepsparse.loggers.build_logger import logger_from_config -from deepsparse.loggers.constants import validate_identifier __all__ = [ diff --git a/src/deepsparse/legacy/loggers/__init__.py b/src/deepsparse/legacy/loggers/__init__.py new file mode 100644 index 0000000000..1919452186 --- /dev/null +++ b/src/deepsparse/legacy/loggers/__init__.py @@ -0,0 +1,31 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# flake8: noqa +# isort: skip_file + +# base modules +from .base_logger import * +from .constants import * + + +# logger implementations +from .async_logger import * +from .function_logger import * +from .multi_logger import * +from .prometheus_logger import * +from .python_logger import * + +# functions for creating complex loggers +from .build_logger import * diff --git a/src/deepsparse/loggers/async_logger.py b/src/deepsparse/legacy/loggers/async_logger.py similarity index 97% rename from src/deepsparse/loggers/async_logger.py rename to src/deepsparse/legacy/loggers/async_logger.py index 892751f759..acc9700fc8 100644 --- a/src/deepsparse/loggers/async_logger.py +++ b/src/deepsparse/legacy/loggers/async_logger.py @@ -21,7 +21,7 @@ from concurrent.futures import Executor, ThreadPoolExecutor from typing import Any -from deepsparse.loggers import BaseLogger, MetricCategories +from deepsparse.legacy.loggers import BaseLogger, MetricCategories __all__ = ["AsyncLogger"] diff --git a/src/deepsparse/loggers/base_logger.py b/src/deepsparse/legacy/loggers/base_logger.py similarity index 100% rename from src/deepsparse/loggers/base_logger.py rename to src/deepsparse/legacy/loggers/base_logger.py diff --git a/src/deepsparse/loggers/build_logger.py b/src/deepsparse/legacy/loggers/build_logger.py similarity index 98% rename from src/deepsparse/loggers/build_logger.py rename to src/deepsparse/legacy/loggers/build_logger.py index 4fd3877415..bd905d0748 100644 --- a/src/deepsparse/loggers/build_logger.py +++ b/src/deepsparse/legacy/loggers/build_logger.py @@ -25,7 +25,7 @@ import yaml -from deepsparse.loggers import ( +from deepsparse.legacy.loggers import ( FROM_PREDEFINED, AsyncLogger, BaseLogger, @@ -34,14 +34,14 @@ PrometheusLogger, PythonLogger, ) -from deepsparse.loggers.config import ( +from deepsparse.legacy.loggers.config import ( MetricFunctionConfig, PipelineLoggingConfig, SystemLoggingConfig, SystemLoggingGroup, ) -from deepsparse.loggers.helpers import get_function_and_function_name -from deepsparse.loggers.metric_functions.registry import DATA_LOGGING_REGISTRY +from deepsparse.legacy.loggers.helpers import get_function_and_function_name +from deepsparse.legacy.loggers.metric_functions.registry import DATA_LOGGING_REGISTRY __all__ = [ diff --git a/src/deepsparse/legacy/loggers/config.py b/src/deepsparse/legacy/loggers/config.py new file mode 100644 index 0000000000..e878fe69db --- /dev/null +++ b/src/deepsparse/legacy/loggers/config.py @@ -0,0 +1,140 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, Dict, List, Optional + +from pydantic import BaseModel, Field, validator + + +""" +Implements schemas for the configs pertaining to logging +""" + +__all__ = [ + "MetricFunctionConfig", + "SystemLoggingGroup", + "SystemLoggingConfig", + "PipelineLoggingConfig", +] + + +class MetricFunctionConfig(BaseModel): + """ + Holds logging configuration for a metric function + """ + + func: str = Field( + description="The name that specifies the metric function to be applied. " + "It can be: " + "1) a built-in function name " + "2) a dynamic import function of the form " + "':' " + "3) a framework function (e.g. np.mean or torch.mean)" + ) + + frequency: int = Field( + description="Specifies how often the function should be applied" + "(measured in numbers of inference calls).", + default=1, + ) + + target_loggers: List[str] = Field( + default=[], + description="Overrides the global logger configuration." + "If not an empty list, this configuration stops logging data " + "to globally specified loggers, and will only use " + "the subset of loggers (specified here by a list of their names).", + ) + + @validator("frequency") + def non_zero_frequency(cls, frequency: int) -> int: + if frequency <= 0: + raise ValueError( + f"Passed frequency: {frequency}, but " + "frequency must be a positive integer greater equal 1" + ) + return frequency + + +class SystemLoggingGroup(BaseModel): + """ + Holds the configuration for a single system logging group. + """ + + enable: bool = Field( + default=False, + description="Whether to enable the system logging group. Defaults to False", + ) + + target_loggers: List[str] = Field( + default=[], + description="The list of target loggers to log to. " + "If None, logs to all the available loggers", + ) + + +class SystemLoggingConfig(BaseModel): + # Global Logging Config + enable: bool = Field( + default=True, description="Whether to enable system logging. Defaults to True" + ) + + +class PipelineSystemLoggingConfig(SystemLoggingConfig): + """ + Holds the configuration for the system logging + in the context of a single pipeline + """ + + # Pipeline System Logging Groups + inference_details: SystemLoggingGroup = Field( + default=SystemLoggingGroup(enable=False), + description="The configuration group for the inference details " + "logging group. By default this group is disabled.", + ) + prediction_latency: SystemLoggingGroup = Field( + default=SystemLoggingGroup(enable=True), + description="The configuration group for the prediction latency " + "logging group. By default this group is enabled.", + ) + + +class PipelineLoggingConfig(BaseModel): + """ + Holds the complete configuration for the logging + in the context of a single pipeline + """ + + loggers: Dict[str, Optional[Dict[str, Any]]] = Field( + default={}, + description=( + "Optional dictionary of logger integration names to initialization kwargs." + "Set to {} for no loggers. Default is {}." + ), + ) + + system_logging: PipelineSystemLoggingConfig = Field( + default=PipelineSystemLoggingConfig(), + description="A model that holds the system logging configuration. " + "If not specified explicitly in the yaml config, the " + "default SystemLoggingConfig model is used.", + ) + + data_logging: Optional[Dict[str, List[MetricFunctionConfig]]] = Field( + default=None, + description="Specifies the rules for the data logging. " + "It relates a key (name of the logging target) " + "to a list of metric functions that are to be applied" + "to this target prior to logging.", + ) diff --git a/src/deepsparse/legacy/loggers/constants.py b/src/deepsparse/legacy/loggers/constants.py new file mode 100644 index 0000000000..9ba1b13459 --- /dev/null +++ b/src/deepsparse/legacy/loggers/constants.py @@ -0,0 +1,70 @@ +""" +Holds logging-related objects with constant values +""" +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass +from enum import Enum + + +__all__ = [ + "MetricCategories", + "validate_identifier", + "SystemGroups", + "FROM_PREDEFINED", +] + +UNSUPPORTED_IDENTIFIER_CHARS = {".", "[", "]"} +FROM_PREDEFINED = "predefined" + + +class MetricCategories(Enum): + """ + Metric Taxonomy [for reference] + CATEGORY - category of metric (System/Data) + GROUP - logical group of metrics + METRIC - individual metric + """ + + # Categories + SYSTEM = "system" + DATA = "data" + + +@dataclass(frozen=True) +class SystemGroups: + # Pipeline System Groups + INFERENCE_DETAILS: str = "inference_details" + PREDICTION_LATENCY: str = "prediction_latency" + # Server System Groups + REQUEST_DETAILS: str = "request_details" + RESOURCE_UTILIZATION: str = "resource_utilization" + + +def validate_identifier(identifier: str): + """ + Makes sure that the identifier does not contain any + of the characters that would introduce ambiguity + when parsing the identifier + + :param identifier: a string that is used + to identify a log + """ + for char in UNSUPPORTED_IDENTIFIER_CHARS: + if char in identifier: + raise ValueError( + f"Logging identifier: {identifier} " + f"contains unsupported character {char}" + ) diff --git a/src/deepsparse/loggers/function_logger.py b/src/deepsparse/legacy/loggers/function_logger.py similarity index 95% rename from src/deepsparse/loggers/function_logger.py rename to src/deepsparse/legacy/loggers/function_logger.py index b0ed9b3129..4c6313a953 100644 --- a/src/deepsparse/loggers/function_logger.py +++ b/src/deepsparse/legacy/loggers/function_logger.py @@ -18,8 +18,12 @@ import textwrap from typing import Any, Callable -from deepsparse.loggers import BaseLogger, MetricCategories -from deepsparse.loggers.helpers import NO_MATCH, finalize_identifier, match_and_extract +from deepsparse.legacy.loggers import BaseLogger, MetricCategories +from deepsparse.legacy.loggers.helpers import ( + NO_MATCH, + finalize_identifier, + match_and_extract, +) __all__ = ["FunctionLogger"] diff --git a/src/deepsparse/loggers/helpers.py b/src/deepsparse/legacy/loggers/helpers.py similarity index 98% rename from src/deepsparse/loggers/helpers.py rename to src/deepsparse/legacy/loggers/helpers.py index fbf4c7ce7d..1de58f6591 100644 --- a/src/deepsparse/loggers/helpers.py +++ b/src/deepsparse/legacy/loggers/helpers.py @@ -24,9 +24,9 @@ import numpy -import deepsparse.loggers.metric_functions as built_ins -from deepsparse.loggers import MetricCategories -from deepsparse.loggers.metric_functions.utils import BatchResult +import deepsparse.legacy.loggers.metric_functions as built_ins +from deepsparse.legacy.loggers import MetricCategories +from deepsparse.legacy.loggers.metric_functions.utils import BatchResult __all__ = [ diff --git a/src/deepsparse/loggers/metric_functions/__init__.py b/src/deepsparse/legacy/loggers/metric_functions/__init__.py similarity index 100% rename from src/deepsparse/loggers/metric_functions/__init__.py rename to src/deepsparse/legacy/loggers/metric_functions/__init__.py diff --git a/src/deepsparse/loggers/metric_functions/built_ins.py b/src/deepsparse/legacy/loggers/metric_functions/built_ins.py similarity index 95% rename from src/deepsparse/loggers/metric_functions/built_ins.py rename to src/deepsparse/legacy/loggers/metric_functions/built_ins.py index 480f930415..b0f855b883 100644 --- a/src/deepsparse/loggers/metric_functions/built_ins.py +++ b/src/deepsparse/legacy/loggers/metric_functions/built_ins.py @@ -16,10 +16,10 @@ """ from typing import Any, List, Union -from deepsparse.loggers.metric_functions.registry import ( +from deepsparse.legacy.loggers.metric_functions.registry import ( register as register_metric_function, ) -from deepsparse.loggers.metric_functions.utils import BatchResult +from deepsparse.legacy.loggers.metric_functions.utils import BatchResult __all__ = ["identity", "predicted_classes", "predicted_top_score"] diff --git a/src/deepsparse/loggers/metric_functions/computer_vision/__init__.py b/src/deepsparse/legacy/loggers/metric_functions/computer_vision/__init__.py similarity index 100% rename from src/deepsparse/loggers/metric_functions/computer_vision/__init__.py rename to src/deepsparse/legacy/loggers/metric_functions/computer_vision/__init__.py diff --git a/src/deepsparse/loggers/metric_functions/computer_vision/built_ins.py b/src/deepsparse/legacy/loggers/metric_functions/computer_vision/built_ins.py similarity index 98% rename from src/deepsparse/loggers/metric_functions/computer_vision/built_ins.py rename to src/deepsparse/legacy/loggers/metric_functions/computer_vision/built_ins.py index 6711219b65..7078a9341e 100644 --- a/src/deepsparse/loggers/metric_functions/computer_vision/built_ins.py +++ b/src/deepsparse/legacy/loggers/metric_functions/computer_vision/built_ins.py @@ -19,10 +19,10 @@ import numpy -from deepsparse.loggers.metric_functions.registry import ( +from deepsparse.legacy.loggers.metric_functions.registry import ( register as register_metric_function, ) -from deepsparse.loggers.metric_functions.utils import BatchResult +from deepsparse.legacy.loggers.metric_functions.utils import BatchResult __all__ = [ diff --git a/src/deepsparse/loggers/metric_functions/helpers/__init__.py b/src/deepsparse/legacy/loggers/metric_functions/helpers/__init__.py similarity index 100% rename from src/deepsparse/loggers/metric_functions/helpers/__init__.py rename to src/deepsparse/legacy/loggers/metric_functions/helpers/__init__.py diff --git a/src/deepsparse/loggers/metric_functions/helpers/config_generation.py b/src/deepsparse/legacy/loggers/metric_functions/helpers/config_generation.py similarity index 96% rename from src/deepsparse/loggers/metric_functions/helpers/config_generation.py rename to src/deepsparse/legacy/loggers/metric_functions/helpers/config_generation.py index c8bc977d75..d5fba75e42 100644 --- a/src/deepsparse/loggers/metric_functions/helpers/config_generation.py +++ b/src/deepsparse/legacy/loggers/metric_functions/helpers/config_generation.py @@ -25,9 +25,9 @@ import yaml -from deepsparse.loggers.build_logger import parse_out_predefined_function_groups -from deepsparse.loggers.config import MetricFunctionConfig -from deepsparse.loggers.metric_functions.registry import DATA_LOGGING_REGISTRY +from deepsparse.legacy.loggers.build_logger import parse_out_predefined_function_groups +from deepsparse.legacy.loggers.config import MetricFunctionConfig +from deepsparse.legacy.loggers.metric_functions.registry import DATA_LOGGING_REGISTRY _WHITESPACE = " " diff --git a/src/deepsparse/loggers/metric_functions/natural_language_processing/__init__.py b/src/deepsparse/legacy/loggers/metric_functions/natural_language_processing/__init__.py similarity index 100% rename from src/deepsparse/loggers/metric_functions/natural_language_processing/__init__.py rename to src/deepsparse/legacy/loggers/metric_functions/natural_language_processing/__init__.py diff --git a/src/deepsparse/loggers/metric_functions/natural_language_processing/built_ins.py b/src/deepsparse/legacy/loggers/metric_functions/natural_language_processing/built_ins.py similarity index 93% rename from src/deepsparse/loggers/metric_functions/natural_language_processing/built_ins.py rename to src/deepsparse/legacy/loggers/metric_functions/natural_language_processing/built_ins.py index cfcb3260cf..40cd885fd3 100644 --- a/src/deepsparse/loggers/metric_functions/natural_language_processing/built_ins.py +++ b/src/deepsparse/legacy/loggers/metric_functions/natural_language_processing/built_ins.py @@ -16,10 +16,10 @@ """ from typing import List, Union -from deepsparse.loggers.metric_functions.registry import ( +from deepsparse.legacy.loggers.metric_functions.registry import ( register as register_metric_function, ) -from deepsparse.loggers.metric_functions.utils import BatchResult +from deepsparse.legacy.loggers.metric_functions.utils import BatchResult __all__ = ["string_length", "percent_unknown_tokens"] diff --git a/src/deepsparse/loggers/metric_functions/natural_language_processing/question_answering/__init__.py b/src/deepsparse/legacy/loggers/metric_functions/natural_language_processing/question_answering/__init__.py similarity index 100% rename from src/deepsparse/loggers/metric_functions/natural_language_processing/question_answering/__init__.py rename to src/deepsparse/legacy/loggers/metric_functions/natural_language_processing/question_answering/__init__.py diff --git a/src/deepsparse/loggers/metric_functions/natural_language_processing/question_answering/built_ins.py b/src/deepsparse/legacy/loggers/metric_functions/natural_language_processing/question_answering/built_ins.py similarity index 93% rename from src/deepsparse/loggers/metric_functions/natural_language_processing/question_answering/built_ins.py rename to src/deepsparse/legacy/loggers/metric_functions/natural_language_processing/question_answering/built_ins.py index 42d712fbb8..3f609973ed 100644 --- a/src/deepsparse/loggers/metric_functions/natural_language_processing/question_answering/built_ins.py +++ b/src/deepsparse/legacy/loggers/metric_functions/natural_language_processing/question_answering/built_ins.py @@ -15,10 +15,10 @@ Set of functions for logging metrics from the question answering pipeline """ -from deepsparse.loggers.metric_functions.natural_language_processing import ( +from deepsparse.legacy.loggers.metric_functions.natural_language_processing import ( string_length, ) -from deepsparse.loggers.metric_functions.registry import ( +from deepsparse.legacy.loggers.metric_functions.registry import ( register as register_metric_function, ) diff --git a/src/deepsparse/loggers/metric_functions/natural_language_processing/token_classification/__init__.py b/src/deepsparse/legacy/loggers/metric_functions/natural_language_processing/token_classification/__init__.py similarity index 100% rename from src/deepsparse/loggers/metric_functions/natural_language_processing/token_classification/__init__.py rename to src/deepsparse/legacy/loggers/metric_functions/natural_language_processing/token_classification/__init__.py diff --git a/src/deepsparse/loggers/metric_functions/natural_language_processing/token_classification/built_ins.py b/src/deepsparse/legacy/loggers/metric_functions/natural_language_processing/token_classification/built_ins.py similarity index 95% rename from src/deepsparse/loggers/metric_functions/natural_language_processing/token_classification/built_ins.py rename to src/deepsparse/legacy/loggers/metric_functions/natural_language_processing/token_classification/built_ins.py index c47c6bece6..efbbb1c0a5 100644 --- a/src/deepsparse/loggers/metric_functions/natural_language_processing/token_classification/built_ins.py +++ b/src/deepsparse/legacy/loggers/metric_functions/natural_language_processing/token_classification/built_ins.py @@ -18,10 +18,10 @@ import numpy -from deepsparse.loggers.metric_functions.registry import ( +from deepsparse.legacy.loggers.metric_functions.registry import ( register as register_metric_function, ) -from deepsparse.loggers.metric_functions.utils import BatchResult +from deepsparse.legacy.loggers.metric_functions.utils import BatchResult __all__ = ["mean_score", "percent_zero_labels"] diff --git a/src/deepsparse/loggers/metric_functions/registry.py b/src/deepsparse/legacy/loggers/metric_functions/registry.py similarity index 100% rename from src/deepsparse/loggers/metric_functions/registry.py rename to src/deepsparse/legacy/loggers/metric_functions/registry.py diff --git a/src/deepsparse/loggers/metric_functions/utils.py b/src/deepsparse/legacy/loggers/metric_functions/utils.py similarity index 100% rename from src/deepsparse/loggers/metric_functions/utils.py rename to src/deepsparse/legacy/loggers/metric_functions/utils.py diff --git a/src/deepsparse/loggers/multi_logger.py b/src/deepsparse/legacy/loggers/multi_logger.py similarity index 96% rename from src/deepsparse/loggers/multi_logger.py rename to src/deepsparse/legacy/loggers/multi_logger.py index 0ba54782fe..a6fe58cb67 100644 --- a/src/deepsparse/loggers/multi_logger.py +++ b/src/deepsparse/legacy/loggers/multi_logger.py @@ -19,7 +19,7 @@ import textwrap from typing import Any, List -from deepsparse.loggers import BaseLogger, MetricCategories +from deepsparse.legacy.loggers import BaseLogger, MetricCategories __all__ = ["MultiLogger"] diff --git a/src/deepsparse/loggers/prometheus_logger.py b/src/deepsparse/legacy/loggers/prometheus_logger.py similarity index 98% rename from src/deepsparse/loggers/prometheus_logger.py rename to src/deepsparse/legacy/loggers/prometheus_logger.py index 9c1d99c42f..ad54bd4024 100644 --- a/src/deepsparse/loggers/prometheus_logger.py +++ b/src/deepsparse/legacy/loggers/prometheus_logger.py @@ -22,8 +22,8 @@ from collections import defaultdict from typing import Any, Dict, Optional, Type, Union -from deepsparse.loggers import BaseLogger, MetricCategories, SystemGroups -from deepsparse.loggers.helpers import unwrap_logged_value +from deepsparse.legacy.loggers import BaseLogger, MetricCategories, SystemGroups +from deepsparse.legacy.loggers.helpers import unwrap_logged_value try: diff --git a/src/deepsparse/loggers/python_logger.py b/src/deepsparse/legacy/loggers/python_logger.py similarity index 95% rename from src/deepsparse/loggers/python_logger.py rename to src/deepsparse/legacy/loggers/python_logger.py index 0eb90e1252..8d4bec15f7 100644 --- a/src/deepsparse/loggers/python_logger.py +++ b/src/deepsparse/legacy/loggers/python_logger.py @@ -18,7 +18,7 @@ from datetime import datetime from typing import Any -from deepsparse.loggers import BaseLogger, MetricCategories +from deepsparse.legacy.loggers import BaseLogger, MetricCategories __all__ = ["PythonLogger"] diff --git a/src/deepsparse/legacy/pipeline.py b/src/deepsparse/legacy/pipeline.py index 318433f250..ca25869cc7 100644 --- a/src/deepsparse/legacy/pipeline.py +++ b/src/deepsparse/legacy/pipeline.py @@ -34,8 +34,8 @@ BasePipeline, SupportedTasks, ) -from deepsparse.loggers.base_logger import BaseLogger -from deepsparse.loggers.constants import MetricCategories, SystemGroups +from deepsparse.legacy.loggers.base_logger import BaseLogger +from deepsparse.legacy.loggers.constants import MetricCategories, SystemGroups from deepsparse.pipeline_config import PipelineConfig from deepsparse.utils import ( InferenceStages, diff --git a/src/deepsparse/loggers/__init__.py b/src/deepsparse/loggers/__init__.py index 1919452186..e7873e4fe9 100644 --- a/src/deepsparse/loggers/__init__.py +++ b/src/deepsparse/loggers/__init__.py @@ -13,19 +13,16 @@ # limitations under the License. # flake8: noqa -# isort: skip_file -# base modules -from .base_logger import * -from .constants import * - - -# logger implementations -from .async_logger import * -from .function_logger import * -from .multi_logger import * -from .prometheus_logger import * -from .python_logger import * - -# functions for creating complex loggers -from .build_logger import * +from .async_executor import AsyncExecutor +from .config import LoggingConfig +from .filters import FrequencyFilter, is_match_found +from .logger_factory import LoggerFactory +from .logger_manager import LoggerManager +from .root_logger import ( + LogType, + MetricLogger, + PerformanceLogger, + RootLogger, + SystemLogger, +) diff --git a/src/deepsparse/loggers/async_executor.py b/src/deepsparse/loggers/async_executor.py new file mode 100644 index 0000000000..734d7ffba6 --- /dev/null +++ b/src/deepsparse/loggers/async_executor.py @@ -0,0 +1,48 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import threading +from concurrent.futures import Executor, Future, ThreadPoolExecutor, wait +from typing import Callable, Optional + + +class AsyncExecutor: + def __init__(self, max_workers: int = 1, *args, **kwargs): + super().__init__(*args, **kwargs) + self._job_pool: Executor = ThreadPoolExecutor(max_workers=max_workers) + self._job_futures: list[Future] = [] + self._lock = threading.Lock() + + def submit( + self, func: Callable, callback: Optional[Callable] = None, /, *args, **kwargs + ): + job_future = self._job_pool.submit( + func, + *args, + **kwargs, + ) + with self._lock: + self._job_futures.append(job_future) + if callback is not None: + job_future.add_done_callback(callback) + + def wait_for_completion(self): + with self._lock: + + # Wait for all submitted jobs to complete + wait(self._job_futures) + + # Clear the list of job futures + self._job_futures.clear() diff --git a/src/deepsparse/loggers/config.py b/src/deepsparse/loggers/config.py index e878fe69db..15244ae85f 100644 --- a/src/deepsparse/loggers/config.py +++ b/src/deepsparse/loggers/config.py @@ -12,129 +12,102 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict, List, Optional +from typing import Dict, List, Optional -from pydantic import BaseModel, Field, validator +import yaml +from pydantic import BaseModel, Extra, Field, validator -""" -Implements schemas for the configs pertaining to logging -""" - -__all__ = [ - "MetricFunctionConfig", - "SystemLoggingGroup", - "SystemLoggingConfig", - "PipelineLoggingConfig", -] +class LoggerConfig(BaseModel): + class Config: + extra = Extra.allow + name: str = Field( + default="PythonLogger", + description=( + "Path (/path/to/file:FooLogger) or name of loggers in " + "deepsparse/loggers/registry/__init__ path" + ), + ) + handler: Optional[Dict] = None -class MetricFunctionConfig(BaseModel): - """ - Holds logging configuration for a metric function - """ +class TargetConfig(BaseModel): func: str = Field( - description="The name that specifies the metric function to be applied. " - "It can be: " - "1) a built-in function name " - "2) a dynamic import function of the form " - "':' " - "3) a framework function (e.g. np.mean or torch.mean)" + default="identity", + description=( + ( + "Callable to apply to 'value' for dimensionality reduction. " + "func can be a path /path/to/file:func) or name of func in " + "deepsparse/loggers/registry/__init__ path" + ) + ), ) - frequency: int = Field( - description="Specifies how often the function should be applied" - "(measured in numbers of inference calls).", + freq: int = Field( default=1, + description="The rate to log. Log every N occurances", ) + uses: List[str] = Field(default=["default"], description="") - target_loggers: List[str] = Field( - default=[], - description="Overrides the global logger configuration." - "If not an empty list, this configuration stops logging data " - "to globally specified loggers, and will only use " - "the subset of loggers (specified here by a list of their names).", - ) - - @validator("frequency") - def non_zero_frequency(cls, frequency: int) -> int: - if frequency <= 0: - raise ValueError( - f"Passed frequency: {frequency}, but " - "frequency must be a positive integer greater equal 1" - ) - return frequency - - -class SystemLoggingGroup(BaseModel): - """ - Holds the configuration for a single system logging group. - """ - enable: bool = Field( - default=False, - description="Whether to enable the system logging group. Defaults to False", +class MetricTargetConfig(TargetConfig): + capture: Optional[List[str]] = Field( + None, + description=( + "Key of the output dict. Corresponding value will be logged. " + "The value can be a regex pattern" + ), ) - target_loggers: List[str] = Field( - default=[], - description="The list of target loggers to log to. " - "If None, logs to all the available loggers", - ) +class LoggingConfig(BaseModel): -class SystemLoggingConfig(BaseModel): - # Global Logging Config - enable: bool = Field( - default=True, description="Whether to enable system logging. Defaults to True" + loggers: Dict[str, LoggerConfig] = Field( + default=dict(default=LoggerConfig()), + description="Loggers to be Used", ) - -class PipelineSystemLoggingConfig(SystemLoggingConfig): - """ - Holds the configuration for the system logging - in the context of a single pipeline - """ - - # Pipeline System Logging Groups - inference_details: SystemLoggingGroup = Field( - default=SystemLoggingGroup(enable=False), - description="The configuration group for the inference details " - "logging group. By default this group is disabled.", - ) - prediction_latency: SystemLoggingGroup = Field( - default=SystemLoggingGroup(enable=True), - description="The configuration group for the prediction latency " - "logging group. By default this group is enabled.", + system: Dict[str, List[TargetConfig]] = Field( + default={"re:.*": [TargetConfig()]}, + description="Default python logging module logger", ) - -class PipelineLoggingConfig(BaseModel): - """ - Holds the complete configuration for the logging - in the context of a single pipeline - """ - - loggers: Dict[str, Optional[Dict[str, Any]]] = Field( - default={}, - description=( - "Optional dictionary of logger integration names to initialization kwargs." - "Set to {} for no loggers. Default is {}." - ), + performance: Dict[str, List[TargetConfig]] = Field( + default={"cpu": [TargetConfig()]}, + description="Performance level config", ) - system_logging: PipelineSystemLoggingConfig = Field( - default=PipelineSystemLoggingConfig(), - description="A model that holds the system logging configuration. " - "If not specified explicitly in the yaml config, the " - "default SystemLoggingConfig model is used.", + metric: Dict[str, List[MetricTargetConfig]] = Field( + default={"re:(?i)operator": [MetricTargetConfig()]}, + description="Metric level config", ) - data_logging: Optional[Dict[str, List[MetricFunctionConfig]]] = Field( - default=None, - description="Specifies the rules for the data logging. " - "It relates a key (name of the logging target) " - "to a list of metric functions that are to be applied" - "to this target prior to logging.", - ) + @validator("loggers", always=True) + def always_include_python_logger(cls, value): + if "default" not in value: + value["default"] = LoggerConfig() + return value + + @classmethod + def from_yaml(cls, yaml_path: str): + """Load from yaml file""" + with open(yaml_path, "r") as file: + yaml_content = yaml.safe_load(file) + return cls(**yaml_content) + + @classmethod + def from_str(cls, stringified_yaml: str): + """Load from stringified yaml""" + yaml_content = yaml.safe_load(stringified_yaml) + + return cls(**yaml_content) + + @classmethod + def from_config(cls, config: Optional[str] = None): + """Helper to load from file or string""" + if config: + if config.endswith(".yaml"): + return cls.from_yaml(config) + return cls.from_str(config) + return LoggingConfig() diff --git a/src/deepsparse/loggers/filters/__init__.py b/src/deepsparse/loggers/filters/__init__.py new file mode 100644 index 0000000000..5fccad84a9 --- /dev/null +++ b/src/deepsparse/loggers/filters/__init__.py @@ -0,0 +1,18 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# flake8: noqa + +from .frequency_filter import FrequencyFilter +from .pattern import is_match_found, unravel_value_as_generator diff --git a/src/deepsparse/loggers/filters/frequency_filter.py b/src/deepsparse/loggers/filters/frequency_filter.py new file mode 100644 index 0000000000..9ce5cc1b79 --- /dev/null +++ b/src/deepsparse/loggers/filters/frequency_filter.py @@ -0,0 +1,52 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import defaultdict +from threading import Lock + + +class FrequencyFilter: + def __init__(self): + self._lock = Lock() + self.counter = defaultdict(int) + + def inc(self, tag: str, func: str) -> None: + """ + Increment the counter with respect to tag and func + + :param tag: Tag fro the config file + :param func: Name of the func from the config file + + """ + stub = f"{tag}.{func}" + with self._lock: + self.counter[stub] += 1 + + def should_execute_on_frequency(self, tag: str, func: str, freq: int) -> bool: + """ + Check if the given tag, func and freq satisfies the criteria to execute. + If the counter with respect to tag and func is a multiple of freq, then + execute + + :param tag: Tag fro the config file + :param func: Name of the func from the config file + :param freq: The rate to log from the config file + + """ + + stub = f"{tag}.{func}" + with self._lock: + counter = self.counter[stub] + + return counter % freq == 0 diff --git a/src/deepsparse/loggers/filters/pattern.py b/src/deepsparse/loggers/filters/pattern.py new file mode 100644 index 0000000000..0211bbcc75 --- /dev/null +++ b/src/deepsparse/loggers/filters/pattern.py @@ -0,0 +1,133 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +from enum import Enum +from typing import Any, Dict, Generator, List, Optional, Tuple + +import numpy + + +SCALAR_TYPES_TUPLE = ( + int, + float, + bool, + str, +) + + +def is_match_found( + pattern: str, + string: Optional[str] = None, +): + """ + Check if a given pattern matches a string. + + - If the `pattern` starts with "re:", it treats the `pattern` as a regular + expression and searches for a match within the `string`. + + :param pattern: (str): The pattern to match, which can be a simple string or a + regular expression (if it starts with "re:"). + :param string: (str, optional): The string to test against the pattern. + Defaults to None. + :return: bool: True if a match is found, False otherwise. + + Examples: + >>> is_match_found("apple", "apple") + True + + >>> is_match_found("apple", "apple pie") + False + + >>> is_match_found("cherry", "apple pie") + False + + >>> is_match_found(r"re:\d{3}-\d{2}-\d{4}", "123-45-6789") # noqa + True + + >>> is_match_found(r"re:\d{3}-\d{2}-\d{4}", "abc-def-ghij") # noqa + False + """ + if string is not None: + if pattern.startswith("re:"): + comp = re.compile(pattern[3:]) + if comp.search(string) is not None: + return True + else: + if pattern == string: + return True + return False + + +def unravel_value_as_generator( + value: Any, capture: str = "" +) -> Generator[Tuple[str, Any], None, None]: + """ + Recursively unravel a nested data structure and yield tuples of capture paths + and corresponding values. + + :param value: The input value to be unraveled. + :param capture: A string representing the current capture path. + Defaults to an empty string. + + Yields: + Generator[Tuple[str, Any], None, None]: A generator that yields tuples + containing a capture path (string) and the corresponding value. + + Examples: + >>> data = {'a': [1, 2, {'b': 3}], 'c': 4} + >>> for path, val in unravel_value_as_generator(data): + ... print(f"Capture Path: {path}, Value: {val}") + Capture Path: ['a'], Value: [1, 2, {'b': 3}] + Capture Path: ['a'][0], Value: 1 + Capture Path: ['a'][1], Value: 2 + Capture Path: ['a'][2], Value: {'b': 3} + Capture Path: ['a'][2]['b'], Value: 3 + Capture Path: ['c'], Value: 4 + """ + + if isinstance(value, Dict): + for key, val in value.items(): + new_capture = capture + f"['{key}']" + yield from unravel_value_as_generator(val, new_capture) + + elif isinstance(value, numpy.ndarray): + yield (capture, value) + + elif isinstance(value, Tuple) and not isinstance(value, SCALAR_TYPES_TUPLE): + for idx, val in enumerate(value): + new_capture = capture + f"[{idx}]" + yield from unravel_value_as_generator(val, new_capture) + + elif isinstance(value, List): + for idx, val in enumerate(value): + new_capture = capture + f"[{idx}]" + yield from unravel_value_as_generator(val, new_capture) + + elif isinstance(value, Enum): + yield (capture.lstrip("."), value.value) + + elif isinstance(value, object) and not isinstance(value, SCALAR_TYPES_TUPLE): + + if hasattr(value, "__dict__"): + for prop, val in vars(value).items(): + new_capture = capture + f".{prop}" + yield from unravel_value_as_generator(val, new_capture) + + else: # None type only + yield (capture, None) + + else: + # scalars: (int, float, bool, str) + yield (capture, value) diff --git a/src/deepsparse/loggers/logger_factory.py b/src/deepsparse/loggers/logger_factory.py new file mode 100644 index 0000000000..8dfc2dddad --- /dev/null +++ b/src/deepsparse/loggers/logger_factory.py @@ -0,0 +1,114 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from typing import Any, Dict + +from deepsparse.loggers.registry.loggers.base_logger import BaseLogger +from deepsparse.loggers.root_logger import ( + LogType, + MetricLogger, + PerformanceLogger, + SystemLogger, +) +from deepsparse.loggers.utils import import_from_path, import_from_registry + + +ROOT_LOGGER_DICT = { + "system": SystemLogger, + "performance": PerformanceLogger, + "metric": MetricLogger, +} + + +class LoggerFactory: + """ + Factory to obtain root logger entrypoints given config file + + self.leaf_logger # dict{key=logger_id, value=instantiated logger obj} + self.root_logger_factory # dict{key=str, value=RootLogger} + self.logger # dict{key=LOG_TYPE.enum, value=RootLogger} + + """ + + def __init__(self, config: Dict[str, Dict]): + self.config = config + + self.leaf_logger = {} + self.root_logger_factory = {} + self.logger = {} + + self.build_leaf_logger() + self.build_root_logger() + + self.create() + + def build_leaf_logger( + self, + ) -> None: + """ + Build the leaf logegr singleton + + Notes: + name is the uuid of the logger, ex. default for + PythonLogger (specified by the user) + + """ + logger_config = self.config.get("loggers") + for name, init_args in logger_config.items(): + self.leaf_logger[name] = self.instantiate_logger( + name=init_args.pop("name"), + init_args=init_args, + ) + + def build_root_logger(self) -> None: + """ + Build the root logger factory instantiating the + root loggers with the leaf logger singleton and + its section of the config file + + """ + + for log_type, logger in ROOT_LOGGER_DICT.items(): + log_type_args = self.config.get(log_type) + if log_type_args is not None: + self.root_logger_factory[log_type] = logger( + config=self.config[log_type], + leaf_logger=self.leaf_logger, + ) + + def create(self) -> None: + """Create the entrypoints to access the root loggers""" + + self.logger = { + LogType.SYSTEM: self.root_logger_factory.get("system"), + LogType.PERFORMANCE: self.root_logger_factory.get("performance"), + LogType.METRIC: self.root_logger_factory.get("metric"), + } + + def instantiate_logger( + self, name: str, init_args: Dict[str, Any] = {} + ) -> BaseLogger: + """ + Instiate the logger from `name`, a path or the name of BaseLogger + in the registry. Path example: path/to/file.py:LoggerName + + """ + if ":" in name: + # Path example: path/to/file.py:LoggerName + logger = import_from_path(path=name) + return logger(**init_args) + + logger = import_from_registry(name) + return logger(**init_args) diff --git a/src/deepsparse/loggers/logger_manager.py b/src/deepsparse/loggers/logger_manager.py new file mode 100644 index 0000000000..adfad3a39a --- /dev/null +++ b/src/deepsparse/loggers/logger_manager.py @@ -0,0 +1,116 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from concurrent.futures import Future +from typing import Any + +from deepsparse.loggers.async_executor import AsyncExecutor +from deepsparse.loggers.config import LoggingConfig +from deepsparse.loggers.logger_factory import ( + LoggerFactory, + LogType, + MetricLogger, + PerformanceLogger, + SystemLogger, +) + + +ROOT_LOGGER = { + "system": SystemLogger, + "performance": PerformanceLogger, + "metric": MetricLogger, +} + + +class LoggerManager(AsyncExecutor, LoggerFactory): + + """ + Initialize loggers for Pipeline and create entrypoints to log + + Lifecycle of instantiation: + 1. Pydantic validation/parser + 2. In LoggerFactory, instantiate leaf logger as singleton and + use them to instantiate for system, performance + and metric root loggers + 3. In root logger instantiation, for each tag, func, freq, + generate a default dict to organize the params from the config to + facilliate filter rule matching (by tag, by freq, by capture) + + Entrypoints: + * .log -> log async to the root logger, selected by log_type + * .system -> log async to the system logger + * .performance -> log async to the performance logger + * .metric -> log async to the metric logger + + Note: + * To access what leaf loggers are being used + .root_logger_factory["system" or "performance" or "metric"] + * To access the frequency filter counters + .root_logger_factory[...].counter + + :param config: Path to yaml or stringified yaml + + """ + + def __init__(self, config: str = ""): + self.config = LoggingConfig.from_config(config).dict() + super().__init__(config=self.config) + + def log( + self, + *args, + **kwargs, + ): + self.submit( + self.run, + self.callback, + *args, + **kwargs, + ) + + def run(self, value: Any, tag: str, log_type: str, *args, **kwargs): + log_type = log_type.upper() + if log_type in LogType.__members__: + logger = self.logger.get(LogType[log_type]) + if logger: + logger.log(value=value, tag=tag, *args, **kwargs) + + def callback(self, future: Future): + exception = future.exception() + if exception is not None: + logging.error( + value=f"Exception occurred during async logging job: {repr(exception)}", + ) + + def system(self, *args, **kwargs): + self.log( + log_type="system", + *args, + **kwargs, + ) + + def performance(self, *args, **kwargs): + self.log( + log_type="performance", + *args, + **kwargs, + ) + + def metric(self, *args, **kwargs): + self.log( + log_type="metric", + *args, + **kwargs, + ) diff --git a/src/deepsparse/loggers/registry/__init__.py b/src/deepsparse/loggers/registry/__init__.py new file mode 100644 index 0000000000..cfd813d5de --- /dev/null +++ b/src/deepsparse/loggers/registry/__init__.py @@ -0,0 +1,18 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# flake8: noqa + +from .functions import average, identity, max +from .loggers import PrometheusLogger, PythonLogger diff --git a/src/deepsparse/loggers/registry/functions.py b/src/deepsparse/loggers/registry/functions.py new file mode 100644 index 0000000000..09fb9df789 --- /dev/null +++ b/src/deepsparse/loggers/registry/functions.py @@ -0,0 +1,60 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, Callable, Iterable + +import numpy + + +__all__ = [ + "average", + "identity", + "max", +] + + +def identity(value: Any): + return value + + +def max(lst: Any): + return _apply_function_to_iterable(lst, numpy.max) + + +def average(lst: Any): + return _apply_function_to_iterable(lst, numpy.mean) + + +def _apply_function_to_iterable(iterable: Iterable, func: Callable) -> Any: + """ + Apply a callable to apply to an iterable. Used for dimentionality reduction + to output a scalar + + :param iterable: An iterable + :param func: the functiont to apply to the iterable to return a scalae + + Example: + # Apply numpy.mean to an iterable + _apply_function_to_iterable(iterable, numpy.mean) + + """ + if isinstance(iterable, Iterable) and len(iterable) > 0: + if not isinstance(iterable, numpy.ndarray): + iterable = numpy.array(iterable) + + if numpy.can_cast(iterable.dtype, numpy.number): + arr = func(iterable) + return arr.item() + + return iterable diff --git a/src/deepsparse/loggers/registry/loggers/__init__.py b/src/deepsparse/loggers/registry/loggers/__init__.py new file mode 100644 index 0000000000..69426db3d8 --- /dev/null +++ b/src/deepsparse/loggers/registry/loggers/__init__.py @@ -0,0 +1,19 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# flake8: noqa + +from .base_logger import BaseLogger +from .prometheus_logger import PrometheusLogger +from .python_logger import PythonLogger diff --git a/src/deepsparse/loggers/registry/loggers/base_logger.py b/src/deepsparse/loggers/registry/loggers/base_logger.py new file mode 100644 index 0000000000..d1667c3b6b --- /dev/null +++ b/src/deepsparse/loggers/registry/loggers/base_logger.py @@ -0,0 +1,43 @@ +""" +Base implementation of the logger +""" +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC, abstractmethod +from typing import Any, Optional + + +class BaseLogger(ABC): + """ + Generic BaseLogger abstract class meant to define interfaces + for the loggers that support various monitoring services APIs. + """ + + @abstractmethod + def log(self, tag: str, value: Any, log_type: Optional[str] = None, **kwargs): + """ + The main method to collect information from the pipeline + and then possibly process the information and pass it to + the monitoring service + + :param tag: The name of the item that is being logged. + :param value: The data structure that is logged + :param log_type: The root logger type + :param kwargs: Additional keyword arguments to pass to the logger + """ + raise NotImplementedError() + + def __str__(self): + return f"{self.__class__.__name__}" diff --git a/src/deepsparse/loggers/registry/loggers/prometheus_logger.py b/src/deepsparse/loggers/registry/loggers/prometheus_logger.py new file mode 100644 index 0000000000..476ae6e878 --- /dev/null +++ b/src/deepsparse/loggers/registry/loggers/prometheus_logger.py @@ -0,0 +1,257 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Implementation of the Prometheus Logger +""" +import logging +import os +import re +import warnings +from collections import defaultdict +from typing import Any, Dict, Optional, Type, Union + +from deepsparse.loggers.constants import SystemGroups +from deepsparse.loggers.registry.loggers.base_logger import BaseLogger + + +try: + from prometheus_client import ( + REGISTRY, + CollectorRegistry, + Counter, + Gauge, + Histogram, + Summary, + start_http_server, + write_to_textfile, + ) + + prometheus_import_error = None +except Exception as prometheus_import_err: + REGISTRY = None + Summary = None + Histogram = None + Gauge = None + Counter = None + CollectorRegistry = None + start_http_server = None + write_to_textfile = None + prometheus_import_error = prometheus_import_err + + +__all__ = ["PrometheusLogger"] + +_LOGGER = logging.getLogger(__name__) + +_NAMESPACE = "deepsparse" +_PrometheusMetric = Union[Histogram, Gauge, Summary, Counter] +_tag_TO_METRIC_TYPE = { + "prediction_latency": Histogram, + SystemGroups.RESOURCE_UTILIZATION: Gauge, + f"{SystemGroups.REQUEST_DETAILS}/successful_request": Counter, + f"{SystemGroups.REQUEST_DETAILS}/input_batch_size": Histogram, +} +_SUPPORTED_DATA_TYPES = (int, float) +_DESCRIPTION = "{metric_name} metric for tag: {tag} | log_type: {log_type}" + + +class PrometheusLogger(BaseLogger): + """ + DeepSparse logger that continuously exposes the collected logs over the + Prometheus python client at the specified port. + + :param port: the port used by the client. Default is 6100 + :param text_log_save_frequency: the frequency of saving the text log + files. E.g. if `text_log_save_frequency` = 10, text logs are + exported after every tenth forward pass. Default set to 10 + :param text_log_save_dir: the directory where the text log files + are saved. By default, the python working directory + :param text_log_file_name: the name of the text log file. + Default: `prometheus_logs.prom` + """ + + def __init__( + self, + port: int = 9090, + text_log_save_dir: str = os.getcwd(), + text_log_file_name: Optional[str] = None, + **_ignore_args, + ): + _check_prometheus_import() + + self.port = port + self.text_log_save_dir = text_log_save_dir + self.text_log_file_path = os.path.join( + text_log_save_dir, text_log_file_name or "prometheus_logs.prom" + ) + self._prometheus_metrics = defaultdict(str) + + self._setup_client() + + def log( + self, + tag: str, + value: Any, + log_type: str, + capture: Optional[str] = None, + **kwargs, + ): + """ + Collect information from the pipeline and pipe it them to the stdout + + :param tag: The name of the thing that is being logged. + :param value: The data structure that the logger is logging + :param log_type: The metric log_type that the log belongs to + :param kwargs: Additional keyword arguments to pass to the logger + """ + + pipeline_name = tag + prometheus_metric = self._get_prometheus_metric( + capture or tag, log_type, **kwargs + ) + if prometheus_metric is None: + warnings.warn( + f"The tag {tag} cannot be matched with any " + f"of the Prometheus metrics and will be ignored." + ) + return + if pipeline_name: + prometheus_metric.labels(pipeline_name=pipeline_name).observe( + self._validate(value) + ) + else: + prometheus_metric.observe(self._validate(value)) + self._export_metrics_to_textfile() + + def _get_prometheus_metric( + self, + tag: str, + log_type: str, + **kwargs, + ) -> Optional[_PrometheusMetric]: + saved_metric = self._prometheus_metrics.get(tag) + if saved_metric is None: + return self._add_metric_to_registry(tag, log_type, **kwargs) + return saved_metric + + def _add_metric_to_registry( + self, + tag: str, + log_type: str, + **kwargs, + ) -> Optional[_PrometheusMetric]: + prometheus_metric = get_prometheus_metric(tag, log_type, REGISTRY, **kwargs) + self._prometheus_metrics[tag] = prometheus_metric + return prometheus_metric + + def __str__(self): + logger_info = f" port: {self.port}" + return f"{self.__class__.__name__}:\n{logger_info}" + + def _export_metrics_to_textfile(self): + # export the metrics to a text file with + # the specified frequency + os.makedirs(self.text_log_save_dir, exist_ok=True) + write_to_textfile(self.text_log_file_path, REGISTRY) + + def _setup_client(self): + # starts the Prometheus client + start_http_server(port=self.port) + _LOGGER.info(f"Prometheus client: started. Using port: {self.port}.") + + def _validate(self, value: Any) -> Any: + # make sure we are passing a value that is + # a valid metric by prometheus client's standards + if not isinstance(value, _SUPPORTED_DATA_TYPES): + raise ValueError( + "Prometheus logger expects the incoming values " + f"to be one of the type: {_SUPPORTED_DATA_TYPES}, " + f"but received: {type(value)}" + ) + return value + + +def get_prometheus_metric( + tag: str, log_type: str, registry: CollectorRegistry, **kwargs +) -> Optional["MetricWrapperBase"]: # noqa: F821 + """ + Get a Prometheus metric object for the given tag and log_type. + + :param tag: The name of the thing that is being logged. + :param log_type: The metric log_type that the log belongs to + :param registry: The Prometheus registry to which the metric should be added + :return: The Prometheus metric object or None if the tag not supported + """ + + if log_type == "system": + metric = _get_metric_from_the_mapping(tag) + else: + metric = Summary + + if metric is None: + return None + + pipeline_name = tag + return metric( + name=format_tag(tag), + documentation=_DESCRIPTION.format( + metric_name=metric._type, tag=tag, log_type=log_type + ), + labelnames=["pipeline_name"] if pipeline_name else [], + registry=registry, + ) + + +def _get_metric_from_the_mapping( + tag: str, metric_type_mapping: Dict[str, str] = _tag_TO_METRIC_TYPE +) -> Optional[Type["MetricWrapperBase"]]: # noqa: F821 + for system_group_name, metric_type in metric_type_mapping.items(): + """ + Attempts to get the metric type given the tag and system_group_name. + There are two cases: + Case 1) If system_group_name contains both the group name and the tag, + e.g. "request_details/successful_request", the match requires the tag + to end with the system_group_name, + e.g. "pipeline_name/request_details/successful_request". + Case 2) If system_group_name contains only the group name, + e.g. "prediction_latency", + the match requires the system_group_name to be + contained within the tag + e.g. prediction_latency/pipeline_inputs + """ + if ("/" in system_group_name and tag.endswith(system_group_name)) or ( + system_group_name in tag + ): + return metric_type + + +def format_tag(tag: str, namespace: str = _NAMESPACE) -> str: + """ + Replace forbidden characters with `__` so that the tag + digested by prometheus adheres to + https://prometheus.io/docs/concepts/data_model/#metric-names-and-labels + :param tag: The tag to be formatted + :return: The formatted tag + """ + return f"{namespace}_{re.sub(r'[^a-zA-Z0-9_]+', '__', tag).lower()}" + + +def _check_prometheus_import(): + if prometheus_import_error is not None: + _LOGGER.error( + "Attempting to instantiate a PrometheusLogger object but unable to import " + "prometheus. Check that prometheus requirements have been installed" + ) + raise prometheus_import_error diff --git a/src/deepsparse/loggers/registry/loggers/python_logger.py b/src/deepsparse/loggers/registry/loggers/python_logger.py new file mode 100644 index 0000000000..57e24f0aea --- /dev/null +++ b/src/deepsparse/loggers/registry/loggers/python_logger.py @@ -0,0 +1,73 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import logging +import os +from enum import Enum +from logging.handlers import RotatingFileHandler +from typing import Any, Dict, Optional + +from deepsparse.loggers.registry.loggers.base_logger import BaseLogger + + +class LoggerType(Enum): + STREAM = logging.StreamHandler + FILE = logging.FileHandler + ROTATING = RotatingFileHandler + + +def create_file_if_not_exists(filename): + if not os.path.exists(filename): + open(filename, "a").close() + + +class PythonLogger(BaseLogger): + def __init__( + self, + handler: Optional[Dict] = None, + ): + self.handler = handler + self.logger = logging.getLogger() # Root loggger + self.set_hander() + + def set_hander(self): + ... + + def log( + self, + value: Any, + tag: str, + log_type: str, + func: Optional[str] = None, + level: str = "info", + **kwargs, + ): + placeholders = f"[{log_type}.{tag}.{str(func)}]" + if (run_time := kwargs.get("time")) is not None: + placeholders += f"[⏱️{run_time}]" + + logger = getattr(self.logger, level) + logger(f"{placeholders}: {value}") + + +class CustomFormatter(logging.Formatter): + def format(self, record): + # Add your custom placeholders to the log record + record.placeholders = f"[{record.log_type}.{record.tag}.{str(record.func)}]" + if hasattr(record, "run_time"): + record.placeholders += f"[⏱️{record.run_time}]" + + # Use the original formatter to format the log message + return super().format(record) diff --git a/src/deepsparse/loggers/root_logger.py b/src/deepsparse/loggers/root_logger.py new file mode 100644 index 0000000000..f739b13aeb --- /dev/null +++ b/src/deepsparse/loggers/root_logger.py @@ -0,0 +1,311 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from collections import defaultdict +from enum import Enum +from typing import Any, Dict, List + +from deepsparse.loggers.filters.frequency_filter import FrequencyFilter +from deepsparse.loggers.filters.pattern import ( + is_match_found, + unravel_value_as_generator, +) +from deepsparse.loggers.utils import import_from_registry + + +class LogType(Enum): + SYSTEM = "SYSTEM" + PERFORMANCE = "PERFORMANCE" + METRIC = "METRIC" + + +class RootLogger(FrequencyFilter): + """ + Child class for SystemLogger, PerformanceLogger, MetricLogger + All class instantitated with RootLogger will have + its own FrequencyFilter + + :param config: config with respect to + the log_type (LoggerConfig().dict().get(log_type)) + :param leaf_logger: leaf logger singleton shared among other RootLogger + + """ + + def __init__(self, config: Dict, leaf_logger: Dict): + super().__init__() + self.config = config + self.leaf_logger = leaf_logger + self.run_args = defaultdict(lambda: defaultdict(lambda: defaultdict(list))) + self.create() + + def create(self): + """ + Organize the config to facillate .log call. Populate self.run_args + + Note: + + self.run_args = { + tag: { + func: { + freq: [ + ([loggers], [capture]), + ([loggers2], [capture2]), + ... + ] + }, + func2: {...} + }, + tag2: {...} + } + + """ + for tag, func_args in self.config.items(): + for func_arg in func_args: + func = func_arg.get("func") + + leaf_loggers = [] + for logger_id in func_arg.get("uses", []): + leaf_loggers.append(self.leaf_logger[logger_id]) + + self.run_args[tag][func][func_arg.get("freq", 1)].append( + (leaf_loggers, func_arg.get("capture", [])) + ) + + def log( + self, + value: Any, + log_type: str, + tag: str, + *args, + **kwargs, + ): + """ + Send args to the leaf loggers if the given tag, func, freq are accpeted. Need to + pass three filters to be accepted. + + 1. Tag filter: the provided tag must be a subset or regex match with the + tags in the root logger config file + 2. The number of calls to the current self.log(...) must be a multiple of + freq from the config file wrt tag and func + 3. If capture is speficied in the config file (only for metric log), it must be + a subset or have a regex match + + If accepted, value=func(value) if func is provided, and pass this value to the + leaf loggers + + :param value: Any value to log, may be dimentionally reduced by func + :param log_type: String representing the root logger level + :param tag: Candidate id that will be used to filter out only the wanted log + :param capture: The property or dict key to record if match exists. + If set to None no capture filter will be applied even if set in config + + """ + for tag_from_config, tag_run_args in self.run_args.items(): + self._execute_on_match_filter( + tag=tag, + value=value, + log_type=log_type, + tag_from_config=tag_from_config, + tag_run_args=tag_run_args, + *args, + **kwargs, + ) + + def _execute_on_match_filter( + self, tag: str, tag_from_config: str, tag_run_args: defaultdict, *args, **kwargs + ): + if is_match_found(tag_from_config, tag): + + # key: func_name, value: {freq: {...}} + for func_from_config, func_execute_args in tag_run_args.items(): + self._unwrap_execute_args( + tag_from_config=tag_from_config, + func_from_config=func_from_config, + func_execute_args=func_execute_args, + tag=tag, + *args, + **kwargs, + ) + + def _unwrap_execute_args( + self, + func_execute_args: defaultdict, + tag_from_config: str, + func_from_config: str, + *args, + **kwargs, + ): + """ + Increment the counter with respect to the matching tag and func from + the config file, and then unwrap func_execute_args. + + Args: + :param func_execute_args: defaultdict with frequency as key and tuple of + list loggers and list capture. Capture is set to None for non-metric log_type + + func_execute_args = + func: { + freq: [ + ([loggers], [capture]), + ([loggers2], [capture2]), + ... + ] + }, + :param tag_from_config: Tag from the config file + :param func_from_config: Func from the config file + + """ + + # increment the counter before interating over each frequency + self.inc(tag_from_config, func_from_config) + + # key: freq, value = [ ([loggers], [capture]), ... ] + for freq_from_config, execute_args in func_execute_args.items(): + + # execute_arg = ([loggers], [capture]) + for execute_arg in execute_args: + + leaf_loggers, captures_from_config = execute_arg + self._execute_on_frequency_fitler( + tag_from_config=tag_from_config, + leaf_loggers=leaf_loggers, + captures_from_config=captures_from_config, + freq_from_config=freq_from_config, + func_from_config=func_from_config, + *args, + **kwargs, + ) + + def _execute_on_frequency_fitler( + self, + tag_from_config: str, + func_from_config: str, + freq_from_config: int, + captures_from_config: List[str], + value: Any, + *args, + **kwargs, + ): + """ + Proceed if + 1. Frequency filter is satisifed + 2. Capture filter is satisfied, if capture was given as + input arg to .log. + + :param tag_from_config: Tag from the config file + :param func_from_config: Func from the config file + :param freq_from_config: Freq from the config file + :param captures_from_config: Capture from the config file + :param capture: Input arg to .log, capture any matching attr/dict key + based on capture_from_config + :param value: Any value to log + """ + + # check if the given tag.func is a multiple of the counter + if self.should_execute_on_frequency( + tag_from_config, func_from_config, freq_from_config + ): + # Capture filter (filter by class prop or dict key) + if captures_from_config is None or len(captures_from_config) == 0: + self._apply_func_and_log( + value=value, + func_from_config=func_from_config, + *args, + **kwargs, + ) + + else: + for capture_from_config in captures_from_config: + for ( + capture, + value, + ) in unravel_value_as_generator(value): + # capture match cannot be exact match, bc value might be from + # a dict of a dict, + # so value["key1"]["key2"], -> capture=["key1"]["key2"] + if capture_from_config in capture or is_match_found( + capture_from_config, capture + ): + self._apply_func_and_log( + value=value, + func_from_config=func_from_config, + capture=capture, + *args, + **kwargs, + ) + + def _apply_func_and_log( + self, + value: Any, + func_from_config: str, + leaf_loggers: list, + *args, + **kwargs, + ): + """ + Call the root loggers after applying func to the + value + + :param value: Designated value to log + :param func_from_config: Func to use to reduce dimensionality + from the config file + :param leaf_loggers: Loggers to use from the config gile + """ + if func_from_config is not None: + func_callable = import_from_registry(func_from_config) + value = func_callable(value) + + for leaf_logger in leaf_loggers: + leaf_logger.log( + value=value, + func=func_from_config, + *args, + **kwargs, + ) + + +class SystemLogger(RootLogger): + """ + Create Python level logging with handles + """ + + LOG_TYPE = "system" + + def log(self, *args, **kwargs): + super().log(log_type=self.LOG_TYPE, *args, **kwargs) + + +class PerformanceLogger(RootLogger): + """ + Create performance level (in-line pipeline) + logging with handles + """ + + LOG_TYPE = "performance" + + def log(self, *args, **kwargs): + super().log(log_type=self.LOG_TYPE, *args, **kwargs) + + +class MetricLogger(RootLogger): + """ + Create metric level (logged in LoggerMiddleware) + logging with handles + """ + + LOG_TYPE = "metric" + + def log(self, *args, **kwargs): + super().log(log_type=self.LOG_TYPE, *args, **kwargs) diff --git a/src/deepsparse/loggers/utils.py b/src/deepsparse/loggers/utils.py new file mode 100644 index 0000000000..de6c62c4e4 --- /dev/null +++ b/src/deepsparse/loggers/utils.py @@ -0,0 +1,64 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import importlib +import re +from typing import Any, Type + + +LOGGER_REGISTRY = "deepsparse.loggers.registry.__init__" + + +def import_from_registry(name: str) -> Type[Any]: + """ + Import `name` from the LOGGER_REGISTRY + + :param name: name of the function or class name in LOGGER_REGISTRY + :return: Function or class object + """ + module = importlib.import_module(LOGGER_REGISTRY) + try: + return getattr(module, name) + except AttributeError: + raise AttributeError( + f"Cannot import class/func with name '{name}' from {LOGGER_REGISTRY}" + ) + + +def import_from_path(path: str) -> Type[Any]: + """ + Import the module and the name of the function/class separated by : + + Examples: + path = "/path/to/file.py:func_name" + path = "/path/to/file:class_name" + + :param path: path including the file path and object name + :return Function or class object + + """ + path, class_name = path.split(":") + _path = path + + path = path.split(".py")[0] + path = re.sub(r"/+", ".", path) + try: + module = importlib.import_module(path) + except ImportError: + raise ImportError(f"Cannot find module with path {_path}") + + try: + return getattr(module, class_name) + except AttributeError: + raise AttributeError(f"Cannot find {class_name} in {_path}") diff --git a/src/deepsparse/middlewares/__init__.py b/src/deepsparse/middlewares/__init__.py index 0d4c186962..b8b5f73a29 100644 --- a/src/deepsparse/middlewares/__init__.py +++ b/src/deepsparse/middlewares/__init__.py @@ -14,5 +14,6 @@ # flake8: noqa +from .logger_middleware import LoggerMiddleware from .middleware import MiddlewareCallable, MiddlewareManager, MiddlewareSpec -from .timer_middleware import * +from .timer_middleware import IS_NESTED_KEY, NAME_KEY, TimerMiddleware diff --git a/src/deepsparse/middlewares/logger_middleware.py b/src/deepsparse/middlewares/logger_middleware.py new file mode 100644 index 0000000000..fa950377c1 --- /dev/null +++ b/src/deepsparse/middlewares/logger_middleware.py @@ -0,0 +1,47 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any + +from deepsparse.middlewares.middleware import MiddlewareCallable + + +NAME_KEY = "name" + + +class LoggerMiddleware(MiddlewareCallable): + def __init__( + self, + call_next: MiddlewareCallable, + identifier: str = "LoggerMiddleware", + ): + self.identifier: str = identifier + self.call_next: MiddlewareCallable = call_next + + def __call__(self, *args, **kwargs) -> Any: + + tag = kwargs.get(NAME_KEY) + + inference_state = kwargs.get("inference_state") + if inference_state and hasattr(inference_state, "logger"): + logger = inference_state.logger # metric logger + rtn = self.call_next(*args, **kwargs) + logger( + value=rtn, + tag=tag, + ) + + return rtn + + return self.call_next(*args, **kwargs) diff --git a/src/deepsparse/middlewares/timer_middleware.py b/src/deepsparse/middlewares/timer_middleware.py index 0d68e79a08..9cc6b97b32 100644 --- a/src/deepsparse/middlewares/timer_middleware.py +++ b/src/deepsparse/middlewares/timer_middleware.py @@ -17,10 +17,9 @@ from deepsparse.middlewares.middleware import MiddlewareCallable -__all__ = ["TimerMiddleware", "IS_NESTED_KEY", "NAME_KEY"] - IS_NESTED_KEY = "is_nested" NAME_KEY = "name" +INFERENCE_STATE_KEY = "inference_state" class TimerMiddleware(MiddlewareCallable): @@ -34,7 +33,9 @@ def __call__(self, *args, **kwargs) -> Any: name = kwargs.get(NAME_KEY) is_nested = kwargs.pop(IS_NESTED_KEY, False) - inference_state = kwargs.get("inference_state") - timer = inference_state.timer - with timer.time(id=name, enabled=not is_nested): - return self.call_next(*args, **kwargs) + inference_state = kwargs.get(INFERENCE_STATE_KEY) + if inference_state and hasattr(inference_state, "timer"): + timer = inference_state.timer + with timer.time(id=name, enabled=not is_nested): + return self.call_next(*args, **kwargs) + return self.call_next(*args, **kwargs) diff --git a/src/deepsparse/pipeline.py b/src/deepsparse/pipeline.py index 23ff3a2810..1c0c324f24 100644 --- a/src/deepsparse/pipeline.py +++ b/src/deepsparse/pipeline.py @@ -17,6 +17,7 @@ from pathlib import Path from typing import Any, AsyncGenerator, Callable, Dict, Generator, List, Optional, Union +from deepsparse.loggers.logger_manager import LoggerManager from deepsparse.middlewares import IS_NESTED_KEY, NAME_KEY, MiddlewareManager from deepsparse.operators import EngineOperator, Operator from deepsparse.pipeline_config import PipelineConfig @@ -83,6 +84,7 @@ def __init__( pipeline_state: Optional[PipelineState] = None, middleware_manager: Optional[MiddlewareManager] = None, timer_manager: Optional[TimerManager] = None, + logger_manager: Optional[LoggerManager] = None, benchmark: bool = False, ): @@ -94,6 +96,7 @@ def __init__( self._continuous_batching_scheduler = continuous_batching_scheduler self.middleware_manager = middleware_manager self.timer_manager = timer_manager or TimerManager() + self.logger_manager = logger_manager or LoggerManager() self.validate() self._scheduler_group = SchedulerGroup(self.schedulers) @@ -199,6 +202,11 @@ async def run_async(self, *args, inference_state: InferenceState, **kwargs): ): timer = self.timer_manager.get_new_timer() inference_state.set_timer(timer) + if ( + not hasattr(inference_state, "logger") + or getattr(inference_state, "logger") is None + ): + inference_state.set_logger(self.logger_manager.metric) with inference_state.time(id=InferenceStages.TOTAL_INFERENCE): while next_step != self.router.END_ROUTE: @@ -429,13 +437,13 @@ def __call__(self, *args, **kwargs): inference_state = kwargs.pop("inference_state") else: inference_state = InferenceState() - if self.timer_manager is not None: - timer = self.timer_manager.get_new_timer() inference_state.create_state({}) - inference_state.set_timer(timer) timer = self.timer_manager.get_new_timer() inference_state.set_timer(timer) + + inference_state.set_logger(self.logger_manager.metric) + is_nested = False kwargs["inference_state"] = inference_state diff --git a/src/deepsparse/server/config.py b/src/deepsparse/server/config.py index a42eb00059..fee869f278 100644 --- a/src/deepsparse/server/config.py +++ b/src/deepsparse/server/config.py @@ -17,13 +17,13 @@ from pydantic import BaseModel, Field, validator -from deepsparse.legacy.tasks import SupportedTasks -from deepsparse.loggers.config import ( +from deepsparse.legacy.loggers.config import ( MetricFunctionConfig, PipelineSystemLoggingConfig, SystemLoggingConfig, SystemLoggingGroup, ) +from deepsparse.legacy.tasks import SupportedTasks from deepsparse.operators.engine_operator import DEEPSPARSE_ENGINE from deepsparse.pipeline_config import PipelineConfig diff --git a/src/deepsparse/server/helpers.py b/src/deepsparse/server/helpers.py index 331f6931db..5c5103d039 100644 --- a/src/deepsparse/server/helpers.py +++ b/src/deepsparse/server/helpers.py @@ -15,13 +15,13 @@ from http import HTTPStatus from typing import Dict, List, Optional -from deepsparse import ( +from deepsparse.legacy.loggers import ( BaseLogger, build_logger, get_target_identifier, system_logging_config_to_groups, ) -from deepsparse.loggers.config import MetricFunctionConfig, SystemLoggingGroup +from deepsparse.legacy.loggers.config import MetricFunctionConfig, SystemLoggingGroup from deepsparse.server.config import EndpointConfig, ServerConfig from deepsparse.server.protocol import ErrorResponse from fastapi.responses import JSONResponse diff --git a/src/deepsparse/server/system_logging.py b/src/deepsparse/server/system_logging.py index ff01fc08fd..987a9e43aa 100644 --- a/src/deepsparse/server/system_logging.py +++ b/src/deepsparse/server/system_logging.py @@ -18,7 +18,7 @@ from typing import Any, Dict, List, Optional, Union import psutil -from deepsparse.loggers import BaseLogger, MetricCategories, SystemGroups +from deepsparse.legacy.loggers import BaseLogger, MetricCategories, SystemGroups from deepsparse.server.config import SystemLoggingConfig, SystemLoggingGroup from fastapi import FastAPI, Request, Response from starlette.middleware.base import BaseHTTPMiddleware diff --git a/src/deepsparse/transformers/pipelines/text_generation/pipeline.py b/src/deepsparse/transformers/pipelines/text_generation/pipeline.py index 64c0c64a51..acc8f40099 100644 --- a/src/deepsparse/transformers/pipelines/text_generation/pipeline.py +++ b/src/deepsparse/transformers/pipelines/text_generation/pipeline.py @@ -15,6 +15,7 @@ import logging from typing import List, Optional +from deepsparse.loggers.logger_manager import LoggerManager from deepsparse.middlewares import MiddlewareManager from deepsparse.operators import EngineOperator from deepsparse.operators.registry import OperatorRegistry @@ -116,6 +117,7 @@ def __init__( generation_config=None, benchmark: bool = False, middleware_manager: Optional[MiddlewareManager] = None, + logger_manager: Optional[LoggerManager] = None, **engine_kwargs, ): """ @@ -327,6 +329,7 @@ def __init__( pipeline_state=pipeline_state, continuous_batching_scheduler=continuous_batching_scheduler, middleware_manager=middleware_manager, + logger_manager=logger_manager, benchmark=benchmark, ) diff --git a/src/deepsparse/utils/state.py b/src/deepsparse/utils/state.py index 23bce02022..d4c616bbb9 100644 --- a/src/deepsparse/utils/state.py +++ b/src/deepsparse/utils/state.py @@ -17,6 +17,7 @@ from copy import deepcopy from typing import Any, Union +from deepsparse.loggers.logger_factory import MetricLogger from deepsparse.utils.time import Timer @@ -50,7 +51,24 @@ def create_state(self, new_state: dict): self._current_state = new_state -class TimerState: +class LoggerState: + def __init__(self): + super().__init__() + self._logger = None + + def set_logger(self, logger: MetricLogger): + self._logger = logger + + @property + def logger(self): + return self._logger + + @logger.setter + def logger(self, logger: MetricLogger): + self._logger = logger + + +class TimerState(LoggerState): """TimerState shared among all InferenceState""" def __init__(self): @@ -58,10 +76,22 @@ def __init__(self): self._timer = None @contextmanager - def time(self, id: str, enabled: bool = True): + def time( + self, + id: str, + enabled: bool = True, + ): if self._timer is not None: with self.timer.time(id=id, enabled=enabled): yield + + # log the runtime if logger found + if self.logger is not None: + run_time = self.timer.measurements[id][-1] + self.logger( + value={"time": run_time}, + tag=id, + ) else: yield # null context @@ -104,7 +134,7 @@ def get_state(self, key: str): if key in self.current_state: return self.current_state[key] - def copy_state(self, props=["timer"]): + def copy_state(self, props=["timer", "logger"]): """copy everything except the attrs in props""" original_values = { diff --git a/tests/deepsparse/loggers/__init__.py b/tests/deepsparse/legacy/loggers/__init__.py similarity index 100% rename from tests/deepsparse/loggers/__init__.py rename to tests/deepsparse/legacy/loggers/__init__.py diff --git a/tests/deepsparse/loggers/helpers.py b/tests/deepsparse/legacy/loggers/helpers.py similarity index 98% rename from tests/deepsparse/loggers/helpers.py rename to tests/deepsparse/legacy/loggers/helpers.py index a0adda1435..5d93897285 100644 --- a/tests/deepsparse/loggers/helpers.py +++ b/tests/deepsparse/legacy/loggers/helpers.py @@ -21,7 +21,7 @@ from time import sleep from typing import Any -from deepsparse.loggers import ( +from deepsparse.legacy.loggers import ( AsyncLogger, BaseLogger, FunctionLogger, diff --git a/tests/deepsparse/loggers/metric_functions/__init__.py b/tests/deepsparse/legacy/loggers/metric_functions/__init__.py similarity index 100% rename from tests/deepsparse/loggers/metric_functions/__init__.py rename to tests/deepsparse/legacy/loggers/metric_functions/__init__.py diff --git a/tests/deepsparse/loggers/metric_functions/computer_vision/__init__.py b/tests/deepsparse/legacy/loggers/metric_functions/computer_vision/__init__.py similarity index 100% rename from tests/deepsparse/loggers/metric_functions/computer_vision/__init__.py rename to tests/deepsparse/legacy/loggers/metric_functions/computer_vision/__init__.py diff --git a/tests/deepsparse/loggers/metric_functions/computer_vision/test_built_ins.py b/tests/deepsparse/legacy/loggers/metric_functions/computer_vision/test_built_ins.py similarity index 99% rename from tests/deepsparse/loggers/metric_functions/computer_vision/test_built_ins.py rename to tests/deepsparse/legacy/loggers/metric_functions/computer_vision/test_built_ins.py index 28b18a1c7b..03409de764 100644 --- a/tests/deepsparse/loggers/metric_functions/computer_vision/test_built_ins.py +++ b/tests/deepsparse/legacy/loggers/metric_functions/computer_vision/test_built_ins.py @@ -15,7 +15,7 @@ import numpy import pytest -from deepsparse.loggers.metric_functions import ( +from deepsparse.legacy.loggers.metric_functions import ( detected_classes, fraction_zeros, image_shape, diff --git a/tests/deepsparse/loggers/metric_functions/helpers/__init__.py b/tests/deepsparse/legacy/loggers/metric_functions/helpers/__init__.py similarity index 100% rename from tests/deepsparse/loggers/metric_functions/helpers/__init__.py rename to tests/deepsparse/legacy/loggers/metric_functions/helpers/__init__.py diff --git a/tests/deepsparse/loggers/metric_functions/helpers/test_config_generation.py b/tests/deepsparse/legacy/loggers/metric_functions/helpers/test_config_generation.py similarity index 96% rename from tests/deepsparse/loggers/metric_functions/helpers/test_config_generation.py rename to tests/deepsparse/legacy/loggers/metric_functions/helpers/test_config_generation.py index 7cf6ad0c07..1d54d16319 100644 --- a/tests/deepsparse/loggers/metric_functions/helpers/test_config_generation.py +++ b/tests/deepsparse/legacy/loggers/metric_functions/helpers/test_config_generation.py @@ -19,15 +19,15 @@ import yaml import pytest -from deepsparse.loggers.config import MetricFunctionConfig, PipelineLoggingConfig -from deepsparse.loggers.metric_functions.helpers.config_generation import ( +from deepsparse.legacy.loggers.config import MetricFunctionConfig, PipelineLoggingConfig +from deepsparse.legacy.loggers.metric_functions.helpers.config_generation import ( _loggers_to_config_string, _metric_function_config_to_string, _metric_functions_configs_to_string, _nested_dict_to_lines, data_logging_config_from_predefined, ) -from deepsparse.loggers.metric_functions.registry import DATA_LOGGING_REGISTRY +from deepsparse.legacy.loggers.metric_functions.registry import DATA_LOGGING_REGISTRY DATA_LOGGING_REGISTRY_W_DUMMY_GROUP = DATA_LOGGING_REGISTRY.copy() diff --git a/tests/deepsparse/loggers/metric_functions/helpers/test_data_config_from_predefined.py b/tests/deepsparse/legacy/loggers/metric_functions/helpers/test_data_config_from_predefined.py similarity index 93% rename from tests/deepsparse/loggers/metric_functions/helpers/test_data_config_from_predefined.py rename to tests/deepsparse/legacy/loggers/metric_functions/helpers/test_data_config_from_predefined.py index 1274903e73..c0169ffda4 100644 --- a/tests/deepsparse/loggers/metric_functions/helpers/test_data_config_from_predefined.py +++ b/tests/deepsparse/legacy/loggers/metric_functions/helpers/test_data_config_from_predefined.py @@ -17,8 +17,8 @@ import yaml import pytest -from deepsparse.loggers.config import PipelineLoggingConfig -from deepsparse.loggers.metric_functions.helpers import ( +from deepsparse.legacy.loggers.config import PipelineLoggingConfig +from deepsparse.legacy.loggers.metric_functions.helpers import ( data_logging_config_from_predefined, ) diff --git a/tests/deepsparse/loggers/metric_functions/natural_language_processing/__init__.py b/tests/deepsparse/legacy/loggers/metric_functions/natural_language_processing/__init__.py similarity index 100% rename from tests/deepsparse/loggers/metric_functions/natural_language_processing/__init__.py rename to tests/deepsparse/legacy/loggers/metric_functions/natural_language_processing/__init__.py diff --git a/tests/deepsparse/loggers/metric_functions/natural_language_processing/question_answering/__init__.py b/tests/deepsparse/legacy/loggers/metric_functions/natural_language_processing/question_answering/__init__.py similarity index 100% rename from tests/deepsparse/loggers/metric_functions/natural_language_processing/question_answering/__init__.py rename to tests/deepsparse/legacy/loggers/metric_functions/natural_language_processing/question_answering/__init__.py diff --git a/tests/deepsparse/loggers/metric_functions/natural_language_processing/question_answering/test_built_ins.py b/tests/deepsparse/legacy/loggers/metric_functions/natural_language_processing/question_answering/test_built_ins.py similarity index 95% rename from tests/deepsparse/loggers/metric_functions/natural_language_processing/question_answering/test_built_ins.py rename to tests/deepsparse/legacy/loggers/metric_functions/natural_language_processing/question_answering/test_built_ins.py index 46e44a8ba2..4859a8e8bd 100644 --- a/tests/deepsparse/loggers/metric_functions/natural_language_processing/question_answering/test_built_ins.py +++ b/tests/deepsparse/legacy/loggers/metric_functions/natural_language_processing/question_answering/test_built_ins.py @@ -13,7 +13,7 @@ # limitations under the License. import pytest -from deepsparse.loggers.metric_functions.natural_language_processing import ( +from deepsparse.legacy.loggers.metric_functions.natural_language_processing import ( answer_found, answer_length, answer_score, diff --git a/tests/deepsparse/loggers/metric_functions/natural_language_processing/test_built_ins.py b/tests/deepsparse/legacy/loggers/metric_functions/natural_language_processing/test_built_ins.py similarity index 87% rename from tests/deepsparse/loggers/metric_functions/natural_language_processing/test_built_ins.py rename to tests/deepsparse/legacy/loggers/metric_functions/natural_language_processing/test_built_ins.py index 016be63b3f..a91c4199bc 100644 --- a/tests/deepsparse/loggers/metric_functions/natural_language_processing/test_built_ins.py +++ b/tests/deepsparse/legacy/loggers/metric_functions/natural_language_processing/test_built_ins.py @@ -13,10 +13,10 @@ # limitations under the License. import pytest -from deepsparse.loggers.metric_functions.natural_language_processing import ( +from deepsparse.legacy.loggers.metric_functions.natural_language_processing import ( string_length, ) -from deepsparse.loggers.metric_functions.utils import BatchResult +from deepsparse.legacy.loggers.metric_functions.utils import BatchResult @pytest.mark.parametrize( diff --git a/tests/deepsparse/loggers/metric_functions/natural_language_processing/token_classification/__init__.py b/tests/deepsparse/legacy/loggers/metric_functions/natural_language_processing/token_classification/__init__.py similarity index 100% rename from tests/deepsparse/loggers/metric_functions/natural_language_processing/token_classification/__init__.py rename to tests/deepsparse/legacy/loggers/metric_functions/natural_language_processing/token_classification/__init__.py diff --git a/tests/deepsparse/loggers/metric_functions/natural_language_processing/token_classification/built_ins.py b/tests/deepsparse/legacy/loggers/metric_functions/natural_language_processing/token_classification/built_ins.py similarity index 100% rename from tests/deepsparse/loggers/metric_functions/natural_language_processing/token_classification/built_ins.py rename to tests/deepsparse/legacy/loggers/metric_functions/natural_language_processing/token_classification/built_ins.py diff --git a/tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/image_classification.txt b/tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/image_classification.txt similarity index 100% rename from tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/image_classification.txt rename to tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/image_classification.txt diff --git a/tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/object_detection.txt b/tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/object_detection.txt similarity index 100% rename from tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/object_detection.txt rename to tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/object_detection.txt diff --git a/tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/question_answering.txt b/tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/question_answering.txt similarity index 100% rename from tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/question_answering.txt rename to tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/question_answering.txt diff --git a/tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/segmentation.txt b/tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/segmentation.txt similarity index 100% rename from tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/segmentation.txt rename to tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/segmentation.txt diff --git a/tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/sentiment_analysis.txt b/tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/sentiment_analysis.txt similarity index 100% rename from tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/sentiment_analysis.txt rename to tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/sentiment_analysis.txt diff --git a/tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/sentiment_analysis_1.txt b/tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/sentiment_analysis_1.txt similarity index 100% rename from tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/sentiment_analysis_1.txt rename to tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/sentiment_analysis_1.txt diff --git a/tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/sentiment_analysis_2.txt b/tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/sentiment_analysis_2.txt similarity index 100% rename from tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/sentiment_analysis_2.txt rename to tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/sentiment_analysis_2.txt diff --git a/tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/text_classification.txt b/tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/text_classification.txt similarity index 100% rename from tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/text_classification.txt rename to tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/text_classification.txt diff --git a/tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/token_classification.txt b/tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/token_classification.txt similarity index 100% rename from tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/token_classification.txt rename to tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/token_classification.txt diff --git a/tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/token_classification_1.txt b/tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/token_classification_1.txt similarity index 100% rename from tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/token_classification_1.txt rename to tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/token_classification_1.txt diff --git a/tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/zero_shot_text_classification.txt b/tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/zero_shot_text_classification.txt similarity index 100% rename from tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/zero_shot_text_classification.txt rename to tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/zero_shot_text_classification.txt diff --git a/tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/zero_shot_text_classification_1.txt b/tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/zero_shot_text_classification_1.txt similarity index 100% rename from tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/zero_shot_text_classification_1.txt rename to tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/zero_shot_text_classification_1.txt diff --git a/tests/deepsparse/loggers/metric_functions/predefined/test_predefined.py b/tests/deepsparse/legacy/loggers/metric_functions/predefined/test_predefined.py similarity index 96% rename from tests/deepsparse/loggers/metric_functions/predefined/test_predefined.py rename to tests/deepsparse/legacy/loggers/metric_functions/predefined/test_predefined.py index 6db27ff0bf..317709b5d1 100644 --- a/tests/deepsparse/loggers/metric_functions/predefined/test_predefined.py +++ b/tests/deepsparse/legacy/loggers/metric_functions/predefined/test_predefined.py @@ -21,8 +21,8 @@ import pytest from deepsparse import Pipeline -from deepsparse.loggers.build_logger import logger_from_config -from tests.deepsparse.loggers.helpers import fetch_leaf_logger +from deepsparse.legacy.loggers.build_logger import logger_from_config +from tests.deepsparse.legacy.loggers.helpers import fetch_leaf_logger from tests.utils import mock_engine @@ -127,7 +127,7 @@ def test_group_name(mock_engine, group_name, pipeline_name, inputs, optional_ind yaml_config = """ loggers: list_logger: - path: tests/deepsparse/loggers/helpers.py:ListLogger + path: tests/deepsparse/legacy/loggers/helpers.py:ListLogger data_logging: predefined: - func: {group_name}""" diff --git a/tests/deepsparse/loggers/metric_functions/test_built_ins.py b/tests/deepsparse/legacy/loggers/metric_functions/test_built_ins.py similarity index 89% rename from tests/deepsparse/loggers/metric_functions/test_built_ins.py rename to tests/deepsparse/legacy/loggers/metric_functions/test_built_ins.py index 89c7aef9b4..3c0f71fc4e 100644 --- a/tests/deepsparse/loggers/metric_functions/test_built_ins.py +++ b/tests/deepsparse/legacy/loggers/metric_functions/test_built_ins.py @@ -13,8 +13,11 @@ # limitations under the License. import pytest -from deepsparse.loggers.metric_functions import predicted_classes, predicted_top_score -from deepsparse.loggers.metric_functions.utils import BatchResult +from deepsparse.legacy.loggers.metric_functions import ( + predicted_classes, + predicted_top_score, +) +from deepsparse.legacy.loggers.metric_functions.utils import BatchResult @pytest.mark.parametrize( diff --git a/tests/deepsparse/loggers/test_async_logger.py b/tests/deepsparse/legacy/loggers/test_async_logger.py similarity index 95% rename from tests/deepsparse/loggers/test_async_logger.py rename to tests/deepsparse/legacy/loggers/test_async_logger.py index 5604a1bb50..657f346bae 100644 --- a/tests/deepsparse/loggers/test_async_logger.py +++ b/tests/deepsparse/legacy/loggers/test_async_logger.py @@ -20,8 +20,8 @@ import numpy import pytest -from deepsparse.loggers import AsyncLogger, FunctionLogger, MetricCategories -from tests.deepsparse.loggers.helpers import ( +from deepsparse.legacy.loggers import AsyncLogger, FunctionLogger, MetricCategories +from tests.deepsparse.legacy.loggers.helpers import ( ErrorLogger, FileLogger, NullLogger, diff --git a/tests/deepsparse/loggers/test_build_logger.py b/tests/deepsparse/legacy/loggers/test_build_logger.py similarity index 93% rename from tests/deepsparse/loggers/test_build_logger.py rename to tests/deepsparse/legacy/loggers/test_build_logger.py index 1854c127f6..d918133d2c 100644 --- a/tests/deepsparse/loggers/test_build_logger.py +++ b/tests/deepsparse/legacy/loggers/test_build_logger.py @@ -15,20 +15,23 @@ import yaml import pytest -from deepsparse import ( +from deepsparse.legacy.loggers import ( AsyncLogger, MultiLogger, PythonLogger, default_logger, logger_from_config, ) -from deepsparse.loggers.build_logger import ( +from deepsparse.legacy.loggers.build_logger import ( build_logger, build_system_loggers, system_logging_config_to_groups, ) -from deepsparse.loggers.config import MetricFunctionConfig, PipelineSystemLoggingConfig -from tests.deepsparse.loggers.helpers import ListLogger, fetch_leaf_logger +from deepsparse.legacy.loggers.config import ( + MetricFunctionConfig, + PipelineSystemLoggingConfig, +) +from tests.deepsparse.legacy.loggers.helpers import ListLogger, fetch_leaf_logger from tests.helpers import find_free_port from tests.utils import mock_engine @@ -89,7 +92,7 @@ yaml_config_7 = """ loggers: custom_logger: - path: tests/deepsparse/loggers/helpers.py:CustomLogger + path: tests/deepsparse/legacy/loggers/helpers.py:CustomLogger arg1: 1 arg2: some_string data_logging: @@ -232,7 +235,7 @@ def test_kwargs(): ), loggers_config={ "kwargs_logger": { - "path": "tests/deepsparse/loggers/helpers.py:KwargsLogger" + "path": "tests/deepsparse/legacy/loggers/helpers.py:KwargsLogger" } }, ) diff --git a/tests/deepsparse/loggers/test_end_to_end.py b/tests/deepsparse/legacy/loggers/test_end_to_end.py similarity index 89% rename from tests/deepsparse/loggers/test_end_to_end.py rename to tests/deepsparse/legacy/loggers/test_end_to_end.py index 7bc73ac928..838e3e69af 100644 --- a/tests/deepsparse/loggers/test_end_to_end.py +++ b/tests/deepsparse/legacy/loggers/test_end_to_end.py @@ -15,15 +15,16 @@ import time import pytest -from deepsparse import Pipeline, logger_from_config -from tests.deepsparse.loggers.helpers import fetch_leaf_logger +from deepsparse import Pipeline +from deepsparse.legacy.loggers import logger_from_config +from tests.deepsparse.legacy.loggers.helpers import fetch_leaf_logger from tests.utils import mock_engine YAML_CONFIG = """ loggers: list_logger: - path: tests/deepsparse/loggers/helpers.py:ListLogger + path: tests/deepsparse/legacy/loggers/helpers.py:ListLogger system_logging: enable: true prediction_latency: diff --git a/tests/deepsparse/loggers/test_function_logger.py b/tests/deepsparse/legacy/loggers/test_function_logger.py similarity index 95% rename from tests/deepsparse/loggers/test_function_logger.py rename to tests/deepsparse/legacy/loggers/test_function_logger.py index 3c2088ae0b..5cc21618fe 100644 --- a/tests/deepsparse/loggers/test_function_logger.py +++ b/tests/deepsparse/legacy/loggers/test_function_logger.py @@ -14,7 +14,8 @@ import math import pytest -from deepsparse import FunctionLogger, Pipeline, PythonLogger +from deepsparse import Pipeline +from deepsparse.legacy.loggers import FunctionLogger, PythonLogger from tests.utils import mock_engine diff --git a/tests/deepsparse/loggers/test_helpers.py b/tests/deepsparse/legacy/loggers/test_helpers.py similarity index 96% rename from tests/deepsparse/loggers/test_helpers.py rename to tests/deepsparse/legacy/loggers/test_helpers.py index 9b1276e2bd..c9c069463c 100644 --- a/tests/deepsparse/loggers/test_helpers.py +++ b/tests/deepsparse/legacy/loggers/test_helpers.py @@ -18,15 +18,15 @@ from pydantic import BaseModel import pytest -from deepsparse.loggers.helpers import ( +from deepsparse.legacy.loggers.helpers import ( access_nested_value, check_identifier_match, get_function_and_function_name, possibly_extract_value, unwrap_logged_value, ) -from deepsparse.loggers.metric_functions import identity -from deepsparse.loggers.metric_functions.utils import BatchResult +from deepsparse.legacy.loggers.metric_functions import identity +from deepsparse.legacy.loggers.metric_functions.utils import BatchResult from tests.test_data.metric_functions import user_defined_identity diff --git a/tests/deepsparse/loggers/test_python_logger.py b/tests/deepsparse/legacy/loggers/test_python_logger.py similarity index 95% rename from tests/deepsparse/loggers/test_python_logger.py rename to tests/deepsparse/legacy/loggers/test_python_logger.py index 1b1281756a..ddd589ac25 100644 --- a/tests/deepsparse/loggers/test_python_logger.py +++ b/tests/deepsparse/legacy/loggers/test_python_logger.py @@ -14,7 +14,8 @@ import pytest -from deepsparse import MetricCategories, Pipeline, PythonLogger +from deepsparse import Pipeline +from deepsparse.legacy.loggers import MetricCategories, PythonLogger from tests.utils import mock_engine diff --git a/tests/deepsparse/loggers/test_str.py b/tests/deepsparse/legacy/loggers/test_str.py similarity index 98% rename from tests/deepsparse/loggers/test_str.py rename to tests/deepsparse/legacy/loggers/test_str.py index 53dcdea1db..6a2eda338d 100644 --- a/tests/deepsparse/loggers/test_str.py +++ b/tests/deepsparse/legacy/loggers/test_str.py @@ -18,7 +18,7 @@ import numpy as np import pytest -from deepsparse import ( +from deepsparse.legacy.loggers import ( AsyncLogger, FunctionLogger, MultiLogger, diff --git a/tests/deepsparse/loggers/filters/test_frequency_filter.py b/tests/deepsparse/loggers/filters/test_frequency_filter.py new file mode 100644 index 0000000000..ad91609443 --- /dev/null +++ b/tests/deepsparse/loggers/filters/test_frequency_filter.py @@ -0,0 +1,126 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import defaultdict + +import pytest +from deepsparse.loggers.filters import FrequencyFilter + + +@pytest.mark.parametrize( + "tag, func, freq, expected_counter, iter", # From config file + [ + ("re:(?i)operator", "max", 2, 6, 12), + ("re:(?i)operator", "max", 3, 4, 12), + ("re:(?i)operator", "max", 5, 2, 12), + ], +) +def test_frequency_filter(tag, func, freq, expected_counter, iter): + """basic filtering test by frequency""" + freq_filter = FrequencyFilter() + + counter = 0 + for _ in range(iter): + freq_filter.inc(tag, func) + + if freq_filter.should_execute_on_frequency( + tag=tag, + func=func, + freq=freq, + ): + counter += 1 + stub = f"{tag}.{func}" + assert counter == expected_counter + assert freq_filter.counter[stub] == iter + + +@pytest.mark.parametrize( + "tag_freq_func, iter, expected_counter_calls", # From config file + [ + ( # unique tag, same func + [ + ("tag1", 1, "func"), + ("tag2", 3, "func"), + ("tag3", 7, "func"), + ], + 15, + { + "tag1.func": 15, + "tag2.func": 5, + "tag3.func": 2, + }, + ), + ( # duplicated tag1.func + [ + ("tag1", 1, "func"), + ("tag1", 3, "func"), + ("tag3", 7, "func"), + ], + 15, + { + "tag1.func": 15 + 5, + "tag3.func": 2, + }, + ), + ( # duplicated tag1 + [ + ("tag1", 3, "func"), + ("tag1", 3, "func2"), + ("tag3", 7, "func3"), + ], + 15, + { + "tag1.func": 5, + "tag1.func2": 5, + "tag3.func3": 2, + }, + ), + ( # tag, func being shared + [ + ("tag1", 3, "func"), + ("tag1", 3, "func2"), + ("tag3", 7, "func"), + ("tag3", 5, "func3"), + ], + 15, + { + "tag1.func": 5, + "tag1.func2": 5, + "tag3.func": 2, + "tag3.func3": 3, + }, + ), + ], +) +def test_frequency_filter_with_tag_freq_func_combination( + tag_freq_func, iter, expected_counter_calls +): + """Test to check the regex number of matches with respect to the input tag""" + + freq_filter = FrequencyFilter() + counter = defaultdict(int) + + for tag, freq, func in tag_freq_func: + stub = f"{tag}.{func}" + + for _ in range(iter): + freq_filter.inc(tag, func) + + if freq_filter.should_execute_on_frequency( + tag=tag, + func=func, + freq=freq, + ): + counter[stub] += 1 + assert counter == expected_counter_calls diff --git a/tests/deepsparse/loggers/filters/test_pattern.py b/tests/deepsparse/loggers/filters/test_pattern.py new file mode 100644 index 0000000000..5cf109fcee --- /dev/null +++ b/tests/deepsparse/loggers/filters/test_pattern.py @@ -0,0 +1,73 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from deepsparse.loggers.filters import is_match_found, unravel_value_as_generator + + +class MockValue: + def __init__(self, data, **kwargs): + self.data = data + for key, value in kwargs.items(): + setattr(self, key, value) + + +@pytest.mark.parametrize( + "pattern, string, truth", + [ + ("re:.*", "foo", True), # matches everything + ("re:(?i)operator", "foo", False), + ("re:(?i)operator", "AddOneOperator", True), + ("operator", "AddOneOperator", False), + ("Operator", "AddOneOperator", False), + ("AddOneOperator", "AddOneOperator", True), + ], +) +def test_is_match_found(pattern, string, truth): + assert truth == is_match_found(pattern, string) + + +@pytest.mark.parametrize( + "data, expected_output", + [ + ( + {"a": {"b": 1, "c": {"d": 2}}, "e": 3}, + [("['a']['b']", 1), ("['a']['c']['d']", 2), ("['e']", 3)], + ), + ( + [1, [2, [3, 4]], 5], + [("[0]", 1), ("[1][0]", 2), ("[1][1][0]", 3), ("[1][1][1]", 4), ("[2]", 5)], + ), + ( + {"a": 1, "b": "hello", "c": True, "d": 3.14}, + [("['a']", 1), ("['b']", "hello"), ("['c']", True), ("['d']", 3.14)], + ), + ( + MockValue( + {"a": 1, "b": "hello", "c": True, "d": 3.14}, foo=42, bar="example" + ), + [ + (".data['a']", 1), + (".data['b']", "hello"), + (".data['c']", True), + (".data['d']", 3.14), + (".foo", 42), + (".bar", "example"), + ], + ), + ], +) +def test_unravel_value(data, expected_output): + + assert list(unravel_value_as_generator(data)) == expected_output diff --git a/tests/deepsparse/loggers/registry/loggers/list_logger.py b/tests/deepsparse/loggers/registry/loggers/list_logger.py new file mode 100644 index 0000000000..f454de7e09 --- /dev/null +++ b/tests/deepsparse/loggers/registry/loggers/list_logger.py @@ -0,0 +1,37 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from typing import Any, Callable + + +class ListLogger: + def __init__(self, **_ignore): + self.logs = [] + + def log( + self, + value: Any, + tag: str, + func: Callable, + log_type: str, + **kwargs, + ): + placeholders = f"[{log_type}.{tag}.{str(func)}]" + if (run_time := kwargs.get("run_time")) is not None: + placeholders += f"[⏱️{run_time}]" + if (capture := kwargs.get("capture")) is not None: + placeholders += f" {func}({capture})" + + self.logs.append(f"{placeholders}: {value}") diff --git a/tests/deepsparse/loggers/registry/loggers/test_prometheus_loggers.py b/tests/deepsparse/loggers/registry/loggers/test_prometheus_loggers.py new file mode 100644 index 0000000000..a6f0c86278 --- /dev/null +++ b/tests/deepsparse/loggers/registry/loggers/test_prometheus_loggers.py @@ -0,0 +1,170 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import requests + +import pytest +from deepsparse.legacy.loggers import MetricCategories +from deepsparse.loggers.filters import unravel_value_as_generator +from deepsparse.loggers.registry.loggers.prometheus_logger import ( + PrometheusLogger, + get_prometheus_metric, +) +from prometheus_client import REGISTRY, Counter, Gauge, Histogram, Summary +from tests.helpers import find_free_port +from tests.utils import mock_engine + + +@pytest.mark.parametrize( + "tag, log_type, registry, expected_metric", + [ + ("dummy_tag", "metric", REGISTRY, Summary), + ("dummy_tag", "system", REGISTRY, None), + ( + "prediction_latency/dummy_tag", + "system", + REGISTRY, + Histogram, + ), + ( + "resource_utilization/dummy_tag", + "system", + REGISTRY, + Gauge, + ), + ( + "request_details/successful_request", + "system", + REGISTRY, + Counter, + ), + ( + "request_details/input_batch_size", + "system", + REGISTRY, + Histogram, + ), + ( + "request_details/response_message", + "system", + REGISTRY, + None, + ), + ], +) +def test_get_prometheus_metric(tag, log_type, registry, expected_metric): + metric = get_prometheus_metric(tag, log_type, registry) + + if metric is None: + assert metric is expected_metric + return + assert isinstance(metric, expected_metric) + assert ( + metric._documentation + == "{metric_type} metric for tag: {tag} | log_type: {log_type}".format( # noqa: E501 + metric_type=metric._type, tag=tag, log_type=log_type + ) + ) + + +@pytest.mark.parametrize( + "tag, no_iterations, value,", + [ + ("dummy_pipeline/dummy.tag_1", 2, 999.0), + ("dummy_pipeline/dummy.tag_2", 20, 1234), + ], +) +@mock_engine(rng_seed=0) +def test_prometheus_logger( + engine, + tmp_path, + tag, + no_iterations, + value, +): + port = find_free_port() + logger = PrometheusLogger( + port=port, + text_log_save_dir=tmp_path, + ) + + for _ in range(no_iterations): + logger.log(tag, value, "metric") + + response = requests.get(f"http://0.0.0.0:{port}").text + request_log_lines = response.split("\n") + + # index -5 is where we get '{tag}_count {no_iterations}' + count_request_request = float(request_log_lines[-6].split(" ")[1]) + + with open(logger.text_log_file_path) as f: + text_log_lines = f.readlines() + count_request_text = float(text_log_lines[-5].split(" ")[1]) + + assert count_request_request == count_request_text == no_iterations + + +@pytest.mark.parametrize( + "tag, value, expected_logs", + [ + ( + "dummy_tag", + {"foo": {"alice": 1, "bob": [1, 2, 3]}, "bar": 5}, + { + 'deepsparse_dummy_tag__foo__alice___count{pipeline_name="dummy_tag"} 1.0', # noqa: E501 + 'deepsparse_dummy_tag__foo__bob__2___count{pipeline_name="dummy_tag"} 1.0', # noqa: E501 + 'deepsparse_dummy_tag__foo__bob__2___sum{pipeline_name="dummy_tag"} 3.0', # noqa: E501 + 'deepsparse_dummy_tag__bar___sum{pipeline_name="dummy_tag"} 5.0', # noqa: E501 + }, + ), + ], +) +@mock_engine(rng_seed=0) +def test_nested_value_inputs(engine, tag, value, expected_logs): + port = find_free_port() + logger = PrometheusLogger(port=port) + for capture, val in unravel_value_as_generator(value, tag): + logger.log(tag=tag, value=val, log_type="metric", capture=capture) + + response = requests.get(f"http://0.0.0.0:{port}").text + request_log_lines = response.split("\n") + assert set(request_log_lines).issuperset(expected_logs) + + +@pytest.mark.parametrize( + "tag, additional_args, expected_logs", + [ + ( + "some_dummy_tag", + {"pipeline_name": "dummy_pipeline"}, + { + 'deepsparse_some_dummy_tag_count{pipeline_name="some_dummy_tag"} 1.0', # noqa: E501 + 'deepsparse_some_dummy_tag_sum{pipeline_name="some_dummy_tag"} 1.0', # noqa: E501 + }, + ), + ], +) +@mock_engine(rng_seed=0) +def test_using_labels(engine, tag, additional_args, expected_logs): + port = find_free_port() + logger = PrometheusLogger(port=port) + logger.log( + tag=tag, + value=1.0, + log_type=MetricCategories.DATA, + **additional_args, + ) + response = requests.get(f"http://0.0.0.0:{port}").text + request_log_lines = response.split("\n") + assert set(request_log_lines).issuperset(expected_logs) diff --git a/tests/deepsparse/loggers/test_config.py b/tests/deepsparse/loggers/test_config.py new file mode 100644 index 0000000000..ebf6b8a89a --- /dev/null +++ b/tests/deepsparse/loggers/test_config.py @@ -0,0 +1,73 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import yaml + +from deepsparse.loggers.config import LoggerConfig, LoggingConfig + + +def test_config_generates_default_json(): + """Check the default LoggingConfig""" + + expected_config = """ + loggers: + default: + name: PythonLogger + handler: null # None in python + system: + "re:.*": + - func: identity + freq: 1 + uses: + - default + performance: + cpu: + - func: identity + freq: 1 + uses: + - default + metric: + "re:(?i)operator": + - func: identity + freq: 1 + uses: + - default + capture: null + + """ + expected_dict = yaml.safe_load(expected_config) + default_dict = LoggingConfig().dict() + assert expected_dict == default_dict + + +def test_logger_config_accepts_kwargs(): + expected_config = """ + name: PythonLogger + foo: 1 + bar: "2024" + baz: + one: 1 + two: 2 + boston: + - one + - two + """ + config = LoggerConfig(**yaml.safe_load(expected_config)).dict() + + assert config["name"] == "PythonLogger" + assert config["handler"] is None + assert config["baz"] == dict(one=1, two=2) + assert config["foo"] == 1 + assert config["boston"] == ["one", "two"] + assert config["bar"] == "2024" diff --git a/tests/deepsparse/loggers/test_loggers_pipeline.py b/tests/deepsparse/loggers/test_loggers_pipeline.py new file mode 100644 index 0000000000..cf443d03f1 --- /dev/null +++ b/tests/deepsparse/loggers/test_loggers_pipeline.py @@ -0,0 +1,451 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from typing import Dict + +import requests +from pydantic import BaseModel + +from deepsparse import Pipeline +from deepsparse.loggers.logger_manager import LoggerManager +from deepsparse.operators import Operator +from deepsparse.routers import LinearRouter +from deepsparse.schedulers import OperatorScheduler + + +class IntSchema(BaseModel): + value: int + + +class AddOneOperator(Operator): + input_schema = IntSchema + output_schema = IntSchema + + def run(self, inp: IntSchema, **kwargs) -> Dict: + return {"value": inp.value + 1} + + +class AddTwoOperator(Operator): + input_schema = IntSchema + output_schema = IntSchema + + def run(self, inp: IntSchema, **kwargs) -> Dict: + return {"value": inp.value + 2} + + +def test_pipeline_loggers(): + """basic logging test""" + + config = """ + loggers: + list: + name: tests/deepsparse/loggers/registry/loggers/list_logger.py:ListLogger + + system: + "tag": # uses exact match. For regex, use "re:tag" + - func: max + freq: 1 + uses: + - list + + """ + + class LoggerPipeline(Pipeline): + def __call__(self, *args, **kwargs): + self.logger_manager.log( + value=1, + tag="tag", + log_type="system", + ) + return super().__call__(*args, **kwargs) + + AddThreePipeline = LoggerPipeline( + ops=[AddOneOperator(), AddTwoOperator()], + router=LinearRouter(end_route=2), + schedulers=[OperatorScheduler()], + logger_manager=LoggerManager(config), + ) + + pipeline_input = IntSchema(value=5) + pipeline_output = AddThreePipeline(pipeline_input) + + assert pipeline_output.value == 8 + + list_logs = AddThreePipeline.logger_manager.leaf_logger["list"].logs + + assert len(list_logs) == 1 + + +def test_pipeline_loggers_with_frequency(): + """one root logger, one tag, frequency of 2""" + + config = """ + loggers: + list: + name: tests/deepsparse/loggers/registry/loggers/list_logger.py:ListLogger + + system: + "tag": + - func: max + freq: 2 + uses: + - list + + - func: identity + freq: 1 + uses: + - list + + """ + + class LoggerPipeline(Pipeline): + def __call__(self, *args, **kwargs): + for _ in range(3): + self.logger_manager.log(value=1, log_type="system", tag="tag") + return super().__call__(*args, **kwargs) + + AddThreePipeline = LoggerPipeline( + ops=[AddOneOperator(), AddTwoOperator()], + router=LinearRouter(end_route=2), + schedulers=[OperatorScheduler()], + logger_manager=LoggerManager(config), + ) + + pipeline_input = IntSchema(value=5) + pipeline_output = AddThreePipeline(pipeline_input) + + assert pipeline_output.value == 8 + + list_logs = AddThreePipeline.logger_manager.leaf_logger["list"].logs + + # three identity, one max + assert len(list_logs) == 4 + + # Check if there are three "[system.tag.identity]" and one "[system.tag.max]" + identity_count = list_logs.count("[system.tag.identity]: 1") + max_count = list_logs.count("[system.tag.max]: 1") + + # Check if there are three "[system.tag.identity]" and one "[system.tag.max]" + assert ( + identity_count == 3 + ), "Expected three occurrences of '[system.tag.identity]: 1'" + assert max_count == 1, "Expected one occurrence of '[system.tag.max]: 1'" + + +def test_pipeline_loggers_with_frequency_multiple_tags(): + """one logger multiple tag, exact match""" + + config = """ + loggers: + default: + name: PythonLogger + list: + name: tests/deepsparse/loggers/registry/loggers/list_logger.py:ListLogger + system: + tag1: + - func: identity + freq: 1 + uses: + - list + - func: identity + freq: 2 + uses: + - list + tag2: + - func: identity + freq: 1 + uses: + - list + - func: identity + freq: 2 + uses: + - list + """ + + class LoggerPipeline(Pipeline): + def __call__(self, *args, **kwargs): + + # record once + self.logger_manager.system("one", tag="tag2", level="warning") + + # record twice + self.logger_manager.system("two", tag="tag2", level="warning") + + # record once + self.logger_manager.system("three", tag="tag2", level="warning") + + # record once + self.logger_manager.system("four", tag="tag1", level="warning") + + # record twice + self.logger_manager.system("five", tag="tag2", level="warning") + + # record once + self.logger_manager.system("six", tag="tag2", level="warning") + + # record twice + self.logger_manager.system("tag1 seven", tag="tag1", level="warning") + return super().__call__(*args, **kwargs) + + AddThreePipeline = LoggerPipeline( + ops=[AddOneOperator(), AddTwoOperator()], + router=LinearRouter(end_route=2), + schedulers=[OperatorScheduler()], + logger_manager=LoggerManager(config), + ) + + pipeline_input = IntSchema(value=5) + pipeline_output = AddThreePipeline(pipeline_input) + + assert pipeline_output.value == 8 + + list_logs = AddThreePipeline.logger_manager.leaf_logger["list"].logs + # three identity, one max + assert len(list_logs) == 10 + + # Create a list of numbers to count + numbers_to_count = ["one", "two", "three", "four", "five", "six"] + + # Initialize a dictionary to store counts + number_counts = {} + + # Iterate through the list and count the numbers + for number in numbers_to_count: + count = sum( + 1 for tag in list_logs if f"[system.tag2.identity]: {number}" in tag + ) + number_counts[number] = count + + # Check if the counts match the expected values + for number in numbers_to_count: + assert number_counts[number] == list_logs.count( + f"[system.tag2.identity]: {number}" + ), ( + f"Expected {number_counts[number]} occurrences of " + "'[system.tag2.identity]: {number}'" + ) + + +def test_pipeline_loggers_with_two_log_types(): + # one logger multiple tag, regex + """one root logger, one tag, frequency of 2""" + + config = """ + loggers: + list: + name: tests/deepsparse/loggers/registry/loggers/list_logger.py:ListLogger + + system: + "tag": + - func: max + freq: 2 + uses: + - list + - func: identity + freq: 1 + uses: + - list + metric: + "tag": + - func: max + freq: 2 + uses: + - list + - func: identity + freq: 1 + uses: + - list + """ + + class LoggerPipeline(Pipeline): + def __call__(self, *args, **kwargs): + for _ in range(3): + self.logger_manager.log(value=1, log_type="system", tag="tag") + for _ in range(3): + self.logger_manager.log(value=1, log_type="metric", tag="tag") + return super().__call__(*args, **kwargs) + + AddThreePipeline = LoggerPipeline( + ops=[AddOneOperator(), AddTwoOperator()], + router=LinearRouter(end_route=2), + schedulers=[OperatorScheduler()], + logger_manager=LoggerManager(config), + ) + + pipeline_input = IntSchema(value=5) + pipeline_output = AddThreePipeline(pipeline_input) + + assert pipeline_output.value == 8 + + list_logs = AddThreePipeline.logger_manager.leaf_logger["list"].logs + + # three identity, one max + assert len(list_logs) == 4 + 4 + + # Check if there are three "[system.tag.identity]" and one "[system.tag.max]" + system_identity_count = list_logs.count("[system.tag.identity]: 1") + system_max_count = list_logs.count("[system.tag.max]: 1") + + # Check if there are three "[system.tag.identity]" and one "[system.tag.max]" + assert ( + system_identity_count == 3 + ), "Expected three occurrences of '[system.tag.identity]: 1'" + assert system_max_count == 1, "Expected one occurrence of '[system.tag.max]: 1'" + + metric_identity_count = list_logs.count("[metric.tag.identity]: 1") + metric_max_count = list_logs.count("[metric.tag.max]: 1") + + # Check if there are three "[metric.tag.identity]" and one "[metric.tag.max]" + assert ( + metric_identity_count == 3 + ), "Expected three occurrences of '[metric.tag.identity]: 1'" + assert metric_max_count == 1, "Expected one occurrence of '[metric.tag.max]: 1'" + + +def test_pipeline_loggers_no_tag_match(): + """Skip logs if no tag match""" + + config = """ + loggers: + list: + name: tests/deepsparse/loggers/registry/loggers/list_logger.py:ListLogger + + system: + "tag": + - func: max + freq: 2 + uses: + - list + + - func: identity + freq: 1 + uses: + - list + metric: + "ta": + - func: max + freq: 2 + uses: + - list + + """ + + class LoggerPipeline(Pipeline): + def __call__(self, *args, **kwargs): + for _ in range(3): + self.logger_manager.log(value=1, log_type="system", tag="tag3") + self.logger_manager.log(value=1, log_type="system", tag="ta") + return super().__call__(*args, **kwargs) + + AddThreePipeline = LoggerPipeline( + ops=[AddOneOperator(), AddTwoOperator()], + router=LinearRouter(end_route=2), + schedulers=[OperatorScheduler()], + logger_manager=LoggerManager(config), + ) + + pipeline_input = IntSchema(value=5) + pipeline_output = AddThreePipeline(pipeline_input) + + assert pipeline_output.value == 8 + + list_logs = AddThreePipeline.logger_manager.leaf_logger["list"].logs + + # tag shouldnt match with tag3, ta shouldnt match with tag3 + # and ta shouldnt match wit metric log + assert len(list_logs) == 0 + + +def test_pipeline_loggers_with_frequency_tags_multiple_capture(): + """Metric logger with config""" + + config = """ + loggers: + list: + name: tests/deepsparse/loggers/registry/loggers/list_logger.py:ListLogger + + metric: + "tag": # uses exact match. For regex, use "re:tag" + - func: max + freq: 1 + uses: + - list + capture: + - "re:.*" # capture all keys and class prop + + """ + + class LoggerPipeline(Pipeline): + def __call__(self, *args, **kwargs): + self.logger_manager.log(value=1, log_type="metric", tag="tag") + return super().__call__(*args, **kwargs) + + AddThreePipeline = LoggerPipeline( + ops=[AddOneOperator(), AddTwoOperator()], + router=LinearRouter(end_route=2), + schedulers=[OperatorScheduler()], + logger_manager=LoggerManager(config), + ) + + pipeline_input = IntSchema(value=5) + pipeline_output = AddThreePipeline(pipeline_input) + + assert pipeline_output.value == 8 + + list_logs = AddThreePipeline.logger_manager.leaf_logger["list"].logs + + assert len(list_logs) == 1 + + +def test_pipeline_with_prometheus(): + """Test with prometheus logger""" + + config = """ + loggers: + prom: + name: PrometheusLogger + + performance: + "tag": # uses exact match. For regex, use "re:tag" + - func: max + freq: 1 + uses: + - prom + + """ + + class LoggerPipeline(Pipeline): + def __call__(self, *args, **kwargs): + self.logger_manager.log(value=1, log_type="performance", tag="tag") + return super().__call__(*args, **kwargs) + + AddThreePipeline = LoggerPipeline( + ops=[AddOneOperator(), AddTwoOperator()], + router=LinearRouter(end_route=2), + schedulers=[OperatorScheduler()], + logger_manager=LoggerManager(config), + ) + + pipeline_input = IntSchema(value=5) + pipeline_output = AddThreePipeline(pipeline_input) + + assert pipeline_output.value == 8 + + prometheus_logger = AddThreePipeline.logger_manager.leaf_logger["prom"] + response = requests.get(f"http://0.0.0.0:{prometheus_logger.port}").text + request_log_lines = response.split("\n") + + assert request_log_lines[-5] == 'deepsparse_tag_sum{pipeline_name="tag"} 1.0' + assert request_log_lines[-6] == 'deepsparse_tag_count{pipeline_name="tag"} 1.0' diff --git a/tests/deepsparse/loggers/test_prometheus_logger.py b/tests/deepsparse/loggers/test_prometheus_logger.py deleted file mode 100644 index 689b5163af..0000000000 --- a/tests/deepsparse/loggers/test_prometheus_logger.py +++ /dev/null @@ -1,177 +0,0 @@ -# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import shutil - -import requests - -import pytest -from deepsparse import PrometheusLogger -from deepsparse.loggers import MetricCategories -from deepsparse.loggers.metric_functions.utils import BatchResult -from deepsparse.loggers.prometheus_logger import get_prometheus_metric -from prometheus_client import REGISTRY, Counter, Gauge, Histogram, Summary -from tests.helpers import find_free_port -from tests.utils import mock_engine - - -@pytest.mark.parametrize( - "identifier, category, registry, expected_metric", - [ - ("dummy_identifier", MetricCategories.DATA, REGISTRY, Summary), - ("dummy_identifier", MetricCategories.SYSTEM, REGISTRY, None), - ( - "prediction_latency/dummy_identifier", - MetricCategories.SYSTEM, - REGISTRY, - Histogram, - ), - ( - "resource_utilization/dummy_identifier", - MetricCategories.SYSTEM, - REGISTRY, - Gauge, - ), - ( - "request_details/successful_request", - MetricCategories.SYSTEM, - REGISTRY, - Counter, - ), - ( - "request_details/input_batch_size", - MetricCategories.SYSTEM, - REGISTRY, - Histogram, - ), - ( - "request_details/response_message", - MetricCategories.SYSTEM, - REGISTRY, - None, - ), - ], -) -def test_get_prometheus_metric(identifier, category, registry, expected_metric): - metric = get_prometheus_metric(identifier, category, registry) - if metric is None: - assert metric is expected_metric - return - assert isinstance(metric, expected_metric) - assert ( - metric._documentation - == "{metric_type} metric for identifier: {identifier} | Category: {category}".format( # noqa: E501 - metric_type=metric._type, identifier=identifier, category=category - ) - ) - - -@pytest.mark.parametrize( - "identifier, no_iterations, value, text_log_save_frequency, should_fail", - [ - ("dummy_pipeline/dummy.identifier_1", 20, 1.0, 1, False), - ("dummy_pipeline/dummy.identifier_2", 20, 1, 5, False), - ("dummy_pipeline/dummy.identifier_3", 20, [1.0], 10, True), - ], -) -@mock_engine(rng_seed=0) -def test_prometheus_logger( - engine, - tmp_path, - identifier, - no_iterations, - value, - text_log_save_frequency, - should_fail, -): - port = find_free_port() - logger = PrometheusLogger( - port=port, - text_log_save_frequency=text_log_save_frequency, - text_log_save_dir=tmp_path, - ) - - for idx in range(no_iterations): - if should_fail: - with pytest.raises(ValueError): - logger.log(identifier, value, MetricCategories.DATA) - return - return - logger.log(identifier, value, MetricCategories.DATA) - - response = requests.get(f"http://0.0.0.0:{port}").text - request_log_lines = response.split("\n") - # line 38 is where we get '{identifier}_count {no_iterations}' - count_request_request = float(request_log_lines[98].split(" ")[1]) - - with open(logger.text_log_file_path) as f: - text_log_lines = f.readlines() - count_request_text = float(text_log_lines[98].split(" ")[1]) - - assert count_request_request == count_request_text == no_iterations - shutil.rmtree(tmp_path) - - -@pytest.mark.parametrize( - "identifier, value, expected_logs", - [ - ( - "dummy_identifier", - {"foo": {"alice": 1, "bob": BatchResult([1, 2, 3])}, "bar": 5}, - { - "deepsparse_dummy_identifier__foo__alice_count 1.0", - "deepsparse_dummy_identifier__foo__bob_count 3.0", - "deepsparse_dummy_identifier__foo__bob_sum 6.0", - "deepsparse_dummy_identifier__bar_count 1.0", - }, - ), - ], -) -@mock_engine(rng_seed=0) -def test_nested_value_inputs(engine, identifier, value, expected_logs): - port = find_free_port() - logger = PrometheusLogger(port=port) - logger.log(identifier, value, MetricCategories.DATA) - response = requests.get(f"http://0.0.0.0:{port}").text - request_log_lines = response.split("\n") - assert set(request_log_lines).issuperset(expected_logs) - - -@pytest.mark.parametrize( - "identifier, additional_args, expected_logs", - [ - ( - "some_dummy_identifier", - {"pipeline_name": "dummy_pipeline"}, - { - 'deepsparse_some_dummy_identifier_count{pipeline_name="dummy_pipeline"} 1.0', # noqa: E501 - 'deepsparse_some_dummy_identifier_sum{pipeline_name="dummy_pipeline"} 1.0', # noqa: E501 - }, - ), - ], -) -@mock_engine(rng_seed=0) -def test_using_labels(engine, identifier, additional_args, expected_logs): - port = find_free_port() - logger = PrometheusLogger(port=port) - logger.log( - identifier=identifier, - value=1.0, - category=MetricCategories.DATA, - **additional_args, - ) - response = requests.get(f"http://0.0.0.0:{port}").text - request_log_lines = response.split("\n") - assert set(request_log_lines).issuperset(expected_logs) diff --git a/tests/deepsparse/loggers/test_root_logger.py b/tests/deepsparse/loggers/test_root_logger.py new file mode 100644 index 0000000000..a679238d7e --- /dev/null +++ b/tests/deepsparse/loggers/test_root_logger.py @@ -0,0 +1,76 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from unittest.mock import Mock + +from deepsparse.loggers.root_logger import RootLogger + + +def test_log_method(): + + mock_leaf_1 = Mock() + mock_leaf_1.log = Mock() + + mock_leaf_2 = Mock() + mock_leaf_2.log = Mock() + + mock_leaf_logger = { + "logger_id_1": mock_leaf_1, + "logger_id_2": mock_leaf_2, + } + + mock_config = { + "tag1": [{"func": "identity", "freq": 2, "uses": ["logger_id_1"]}], + "tag2": [ + {"func": "identity", "freq": 3, "uses": ["logger_id_2", "logger_id_1"]} + ], + } + + root_logger = RootLogger(mock_config, mock_leaf_logger) + + root_logger.log("log_value", "log_type", "tag1") + assert mock_leaf_1.log.call_count == 0 + + root_logger.log("log_value", "log_type", "tag1") + assert mock_leaf_1.log.call_count == 1 + + mock_leaf_logger["logger_id_1"].log.assert_called_with( + value="log_value", + tag="tag1", + func="identity", + log_type="log_type", + ) + + root_logger.log("log_value", "log_type", "tag2") + root_logger.log("log_value", "log_type", "tag2") + assert mock_leaf_2.log.call_count == 0 + + root_logger.log("log_value", "log_type", "tag2") + assert mock_leaf_2.log.call_count == 1 + assert mock_leaf_1.log.call_count == 2 + + mock_leaf_logger["logger_id_1"].log.assert_called_with( + value="log_value", + tag="tag2", + func="identity", + log_type="log_type", + ) + + mock_leaf_logger["logger_id_2"].log.assert_called_with( + value="log_value", + tag="tag2", + func="identity", + log_type="log_type", + ) diff --git a/tests/deepsparse/loggers/test_util.py b/tests/deepsparse/loggers/test_util.py new file mode 100644 index 0000000000..1c71adabbf --- /dev/null +++ b/tests/deepsparse/loggers/test_util.py @@ -0,0 +1,54 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import pytest +from deepsparse.loggers.utils import ( + LOGGER_REGISTRY, + import_from_path, + import_from_registry, +) + + +@pytest.mark.parametrize( + "name, is_successful", + [ + ("PythonLogger", True), + ("max", True), + ("blah", False), + ], +) +def test_import_from_registry(name, is_successful): + if is_successful: + assert import_from_registry(name) is not None + else: + with pytest.raises(AttributeError): + import_from_registry(name) + + +@pytest.mark.parametrize( + "path, is_successful", + [ + (f"{LOGGER_REGISTRY}.py:PythonLogger", True), + (f"{LOGGER_REGISTRY}:PythonLogger", True), + ("foo/bar:blah", False), + (f"{LOGGER_REGISTRY}:blah", False), + ], +) +def test_import_from_path(path, is_successful): + if is_successful: + assert import_from_path(path) is not None + else: + with pytest.raises((AttributeError, ImportError)): + import_from_path(path) diff --git a/tests/deepsparse/middlewares/test_logger_middleware.py b/tests/deepsparse/middlewares/test_logger_middleware.py new file mode 100644 index 0000000000..cbfa1fe51e --- /dev/null +++ b/tests/deepsparse/middlewares/test_logger_middleware.py @@ -0,0 +1,267 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by call_nextlicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import math +from collections import defaultdict +from typing import List + +import pytest +from deepsparse import TextGeneration +from deepsparse.loggers.logger_manager import LoggerManager +from deepsparse.middlewares import ( + LoggerMiddleware, + MiddlewareManager, + MiddlewareSpec, + TimerMiddleware, +) +from deepsparse.pipeline import Pipeline +from deepsparse.routers import LinearRouter +from deepsparse.schedulers import ContinuousBatchingScheduler, OperatorScheduler +from deepsparse.utils.state import InferenceState +from tests.deepsparse.pipelines.test_basic_pipeline import ( + AddOneOperator, + AddTwoOperator, + IntSchema, +) +from tests.deepsparse.utils.wrappers import asyncio_run + + +PROMPT = "How to make banana bread?" +GENERATION_CONFIG = {"max_new_tokens": 10} + + +@pytest.fixture +def text_generation_instance(frequency: int = 1): + config = f""" + loggers: + list: + name: tests/deepsparse/loggers/registry/loggers/list_logger.py:ListLogger + + metric: + "re:.*": # regex match all + - func: identity + freq: {frequency} + uses: + - list + + """ + + middlewares = [ + MiddlewareSpec(LoggerMiddleware), + ] + + model = TextGeneration( + model_path="hf:mgoin/TinyStories-1M-ds", + middleware_manager=MiddlewareManager(middlewares), + logger_manager=LoggerManager(config), + ) + + text = model(PROMPT, **GENERATION_CONFIG).generations[0].text + + # wait for async loggers to finish + model.logger_manager.wait_for_completion() + + assert text is not None + + return model + + +def test_logger_middleware_logs_saved_in_list_logger(): + """Check metric logs in LoggerMiddleware are logged as expected""" + config = """ + loggers: + list: + name: tests/deepsparse/loggers/registry/loggers/list_logger.py:ListLogger + + metric: + "re:(?i)operator": # regex match with non case sensitive Operator + - func: identity + freq: 1 + uses: + - list + """ + + middlewares = [ + MiddlewareSpec(LoggerMiddleware), # for timer + ] + + ops = [AddOneOperator(), AddTwoOperator()] + + AddThreePipeline = Pipeline( + ops=ops, + router=LinearRouter(end_route=2), + schedulers=[OperatorScheduler()], + continuous_batching_scheduler=ContinuousBatchingScheduler, + middleware_manager=MiddlewareManager(middlewares), + logger_manager=LoggerManager(config), + ) + pipeline_input = IntSchema(value=5) + pipeline_output = AddThreePipeline(pipeline_input) + assert pipeline_output.value == 8 + + AddThreePipeline.logger_manager.wait_for_completion() + + # check list logger logs + list_log = AddThreePipeline.logger_manager.leaf_logger["list"].logs + assert len(list_log) == 2 + + expected_logs = set( + [ + "[metric.AddOneOperator.identity]: value=6", + "[metric.AddTwoOperator.identity]: value=8", + ] + ) + for tag in list_log: + expected_logs.remove(tag) + assert len(expected_logs) == 0 + + +@pytest.mark.parametrize( + "frequency", + [ + 2, + 3, + 4, + ], +) +def test_text_generation_pipeline_trigger_logger_with_run_time_with_frequency_filter( + frequency, text_generation_instance +): + """Check logger with frequency filter and timer middleware""" + + config = f""" + loggers: + list: + name: tests/deepsparse/loggers/registry/loggers/list_logger.py:ListLogger + + metric: + "re:.*": # regex match all + - func: identity + freq: {frequency} + uses: + - list + + """ + + middlewares = [ + MiddlewareSpec(LoggerMiddleware), + ] + + model = TextGeneration( + model_path="hf:mgoin/TinyStories-1M-ds", + middleware_manager=MiddlewareManager(middlewares), + logger_manager=LoggerManager(config), + ) + + text = model(PROMPT, **GENERATION_CONFIG).generations[0].text + + # wait for async loggers to finish + model.logger_manager.wait_for_completion() + + assert text is not None + list_log = model.logger_manager.leaf_logger["list"].logs + + max_expected_len_list_logs = ( + len(text_generation_instance.logger_manager.leaf_logger["list"].logs) + / frequency + ) + assert math.floor(len(list_log)) <= math.floor(max_expected_len_list_logs) + + +@asyncio_run +async def test_timer_middleware_loggings_and_timings_async(): + """Check middlewares in async_run using timer and logger""" + + config = """ + loggers: + list: + name: tests/deepsparse/loggers/registry/loggers/list_logger.py:ListLogger + + metric: + "re:.*": + - func: identity + freq: 1 + uses: + - list + capture: + - "re:.*" + """ + + middlewares = [ + MiddlewareSpec(LoggerMiddleware), + MiddlewareSpec(TimerMiddleware), # for timer + ] + + ops = [AddOneOperator(), AddTwoOperator()] + + AddThreePipeline = Pipeline( + ops=ops, + router=LinearRouter(end_route=2), + schedulers=[OperatorScheduler()], + continuous_batching_scheduler=ContinuousBatchingScheduler, + middleware_manager=MiddlewareManager(middlewares), + logger_manager=LoggerManager(config), + ) + + inference_state = InferenceState() + inference_state.create_state({}) + + pipeline_input = IntSchema(value=5) + + pipeline_output = await AddThreePipeline.run_async( + pipeline_input, inference_state=inference_state + ) + + assert pipeline_output.value == 8 + + pipeline_measurements: List[ + defaultdict + ] = AddThreePipeline.timer_manager.measurements + measurements = pipeline_measurements[0] + + # Pipeline, AddOneOperator, AddTwoOperator should have one measurement each + assert len(measurements) == len(ops) + 1 + + # assert pipeline time is more than the sum of two ops + pipeline_time: List[float] = measurements["total_inference"] + add_one_operator_time, add_two_operator_time = ( + measurements["AddOneOperator"], + measurements["AddTwoOperator"], + ) + + assert pipeline_time > add_one_operator_time + add_two_operator_time + + # check list logger logs + list_log = AddThreePipeline.logger_manager.leaf_logger["list"].logs + + # wait for submitted jobs to complete + AddThreePipeline.logger_manager.wait_for_completion() + + # two logs and one timer + assert len(list_log) == 3 diff --git a/tests/server/test_helpers.py b/tests/server/test_helpers.py index 926ca580b1..259fada050 100644 --- a/tests/server/test_helpers.py +++ b/tests/server/test_helpers.py @@ -18,10 +18,10 @@ from pydantic import BaseModel import pytest -from deepsparse.loggers import AsyncLogger, MultiLogger, PythonLogger +from deepsparse.legacy.loggers import AsyncLogger, MultiLogger, PythonLogger from deepsparse.server.config import ServerConfig from deepsparse.server.helpers import server_logger_from_config -from tests.deepsparse.loggers.helpers import fetch_leaf_logger +from tests.deepsparse.legacy.loggers.helpers import fetch_leaf_logger from tests.helpers import find_free_port @@ -123,7 +123,7 @@ class DummyOutputSchema(BaseModel): yaml_config_8 = """ loggers: custom_logger: - path: tests/deepsparse/loggers/helpers.py:CustomLogger + path: tests/deepsparse/legacy/loggers/helpers.py:CustomLogger arg1: 1 arg2: some_string endpoints: diff --git a/tests/server/test_loggers.py b/tests/server/test_legacy_loggers.py similarity index 96% rename from tests/server/test_loggers.py rename to tests/server/test_legacy_loggers.py index e7dd33722d..e52e6fc4d9 100644 --- a/tests/server/test_loggers.py +++ b/tests/server/test_legacy_loggers.py @@ -16,8 +16,11 @@ from collections import Counter from unittest import mock -from deepsparse import PythonLogger -from deepsparse.loggers.config import PipelineSystemLoggingConfig, SystemLoggingGroup +from deepsparse.legacy.loggers import PythonLogger +from deepsparse.legacy.loggers.config import ( + PipelineSystemLoggingConfig, + SystemLoggingGroup, +) from deepsparse.server.config import ( EndpointConfig, MetricFunctionConfig, @@ -28,13 +31,13 @@ from deepsparse.server.helpers import server_logger_from_config from fastapi.testclient import TestClient from flaky import flaky -from tests.deepsparse.loggers.helpers import fetch_leaf_logger +from tests.deepsparse.legacy.loggers.helpers import fetch_leaf_logger from tests.helpers import find_free_port from tests.test_data.server_test_data import SAMPLE_LOGS_DICT from tests.utils import mock_engine -logger_identifier = "tests/deepsparse/loggers/helpers.py:ListLogger" +logger_identifier = "tests/deepsparse/legacy/loggers/helpers.py:ListLogger" stub = "zoo:distilbert-sst2_wikipedia_bookcorpus-pruned90" # noqa E501 task = "text-classification" name = "endpoint_name" @@ -95,7 +98,7 @@ def test_data_logging_from_predefined(): calls = fetch_leaf_logger(server_logger).calls data_logging_logs = [call for call in calls if "DATA" in call] with open( - "tests/deepsparse/loggers/metric_functions/predefined/predefined_logs/text_classification.txt", # noqa E501 + "tests/deepsparse/legacy/loggers/metric_functions/predefined/predefined_logs/text_classification.txt", # noqa E501 "r", ) as f: expected_logs = f.read().splitlines() diff --git a/tests/server/test_system_logging.py b/tests/server/test_legacy_system_logging.py similarity index 96% rename from tests/server/test_system_logging.py rename to tests/server/test_legacy_system_logging.py index c76fd1df3a..8f13003466 100644 --- a/tests/server/test_system_logging.py +++ b/tests/server/test_legacy_system_logging.py @@ -17,7 +17,7 @@ import pydantic import pytest -from deepsparse.loggers.config import SystemLoggingGroup +from deepsparse.legacy.loggers.config import SystemLoggingGroup from deepsparse.server.config import ( EndpointConfig, ServerConfig, @@ -27,11 +27,11 @@ from deepsparse.server.helpers import server_logger_from_config from deepsparse.server.system_logging import log_resource_utilization from fastapi.testclient import TestClient -from tests.deepsparse.loggers.helpers import ListLogger +from tests.deepsparse.legacy.loggers.helpers import ListLogger from tests.utils import mock_engine -logger_identifier = "tests/deepsparse/loggers/helpers.py:ListLogger" +logger_identifier = "tests/deepsparse/legacy/loggers/helpers.py:ListLogger" stub = "zoo:nlp/text_classification/distilbert-none/pytorch/huggingface/qqp/pruned80_quant-none-vnni" # noqa E501 task = "text-classification" name = "endpoint_name" diff --git a/tests/utils/test_basic_logger.py b/tests/utils/test_basic_logger.py new file mode 100644 index 0000000000..98a8944ee0 --- /dev/null +++ b/tests/utils/test_basic_logger.py @@ -0,0 +1,95 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Simple example and test of a dummy pipeline +""" + +import time +from collections import defaultdict +from typing import Dict + +from pydantic import BaseModel + +from deepsparse import Pipeline +from deepsparse.loggers.logger_manager import LoggerManager +from deepsparse.operators import Operator +from deepsparse.routers import LinearRouter +from deepsparse.schedulers import OperatorScheduler + + +class IntSchema(BaseModel): + value: int + + +class AddOneOperator(Operator): + input_schema = IntSchema + output_schema = IntSchema + + def run(self, inp: IntSchema, **kwargs) -> Dict: + inference_state = kwargs.get("inference_state") + with inference_state.time(id="AddOneOperator"): + time.sleep(0.2) + return {"value": inp.value + 1} + + +class AddTwoOperator(Operator): + input_schema = IntSchema + output_schema = IntSchema + + def run(self, inp: IntSchema, **kwargs) -> Dict: + inference_state = kwargs.get("inference_state") + with inference_state.time(id="AddTwoOperator"): + time.sleep(0.5) + return {"value": inp.value + 2} + + +def test_pipeline_fine_grained_timer_record_operator_run_times(): + config = """ + loggers: + list: + name: tests/deepsparse/loggers/registry/loggers/list_logger.py:ListLogger + + metric: + "re:.*": + - func: identity + freq: 1 + uses: + - list + capture: + - "re:.*" + """ + AddThreePipeline = Pipeline( + ops=[AddOneOperator(), AddTwoOperator()], + router=LinearRouter(end_route=2), + schedulers=[OperatorScheduler()], + logger_manager=LoggerManager(config), + ) + pipeline_input = IntSchema(value=5) + pipeline_output = AddThreePipeline(pipeline_input) + + assert pipeline_output.value == 8 + + measurements: defaultdict[list] = AddThreePipeline.timer_manager.measurements[0] + + assert len(measurements) == 2 + expected_keys = {"AddTwoOperator", "AddOneOperator"} + for key in measurements.keys(): + expected_keys.remove(key) + assert len(expected_keys) == 0 + + AddThreePipeline.logger_manager.wait_for_completion() + list_log = AddThreePipeline.logger_manager.leaf_logger["list"].logs + + assert len(list_log) == 2