Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Damian's Cherry Pick #1611

Merged
merged 1 commit into from
Feb 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,10 @@
from deepsparse import Pipeline
from deepsparse.evaluation.registry import EvaluationRegistry
from deepsparse.evaluation.results import Dataset, Evaluation, Metric, Result
from deepsparse.evaluation.utils import LM_EVALUATION_HARNESS
from deepsparse.evaluation.utils import (
LM_EVALUATION_HARNESS,
LM_EVALUATION_HARNESS_ALIASES,
)
from deepsparse.utils.data import numpy_log_softmax
from lm_eval import evaluator, tasks, utils
from lm_eval.api.instance import Instance
Expand All @@ -39,7 +42,9 @@
__all__ = ["integration_eval"]


@EvaluationRegistry.register(name=LM_EVALUATION_HARNESS, alias="lm-eval-harness")
@EvaluationRegistry.register(
name=LM_EVALUATION_HARNESS, alias=LM_EVALUATION_HARNESS_ALIASES
)
def integration_eval(
pipeline: Pipeline,
datasets: Union[List[str], str],
Expand Down
12 changes: 10 additions & 2 deletions src/deepsparse/evaluation/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

from deepsparse import Pipeline
from deepsparse.operators.engine_operator import DEEPSPARSE_ENGINE
from sparsezoo.utils.registry import standardize_lookup_name


__all__ = [
Expand All @@ -29,20 +30,27 @@
_LOGGER = logging.getLogger(__name__)

LM_EVALUATION_HARNESS = "lm-evaluation-harness"
LM_EVALUATION_HARNESS_ALIASES = ["lm-eval-harness", "lm-eval"]
PERPLEXITY = "perplexity"


def potentially_check_dependency_import(integration_name: str) -> bool:
"""
Check if the `integration_name` requires importing a dependency.
Checking involves comparing the `integration_name` to the known
integrations (e.g. 'lm-evaluation-harness') or their aliases.
If so, check if the dependency is installed and return True if it is.
Otherwise, return False.

:param integration_name: The name of the integration to check
:param integration_name: The name of the integration to check. The name
is standardized using `standardize_lookup_name` before checking.
:return: True if the dependency is installed, False otherwise
"""
integration_name = standardize_lookup_name(integration_name)

if integration_name == LM_EVALUATION_HARNESS:
if integration_name == LM_EVALUATION_HARNESS or any(
integration_name == alias for alias in LM_EVALUATION_HARNESS_ALIASES
):
from deepsparse.evaluation.integrations import try_import_lm_evaluation_harness

try_import_lm_evaluation_harness()
Expand Down
Loading