Skip to content

Commit

Permalink
[CPU][DEBUG_CAPS] Enable verbose mode for ov data types (#28747)
Browse files Browse the repository at this point in the history
### Details:
Not all the ov types are supported in oneDNN. This fact leads to an
exception being thrown if the CPU graph contains tensors of such data
types.
This PR enables verbose mode to dump such tensor descriptors.

### Tickets:
N/A
  • Loading branch information
maxnick authored Feb 3, 2025
1 parent 8ff7f58 commit 7ed80f9
Show file tree
Hide file tree
Showing 3 changed files with 45 additions and 14 deletions.
13 changes: 11 additions & 2 deletions src/plugins/intel_cpu/src/dnnl_extension_utils.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,9 @@ uint8_t DnnlExtensionUtils::sizeOfDataType(dnnl::memory::data_type dataType) {
}
}

dnnl::memory::data_type DnnlExtensionUtils::ElementTypeToDataType(const ov::element::Type& elementType) {
std::optional<dnnl::memory::data_type> DnnlExtensionUtils::ElementTypeToDataType(
const ov::element::Type& elementType,
DnnlExtensionUtils::nothrow_tag) noexcept {
switch (elementType) {
case ov::element::f32:
return memory::data_type::f32;
Expand Down Expand Up @@ -81,11 +83,18 @@ dnnl::memory::data_type DnnlExtensionUtils::ElementTypeToDataType(const ov::elem
case ov::element::undefined:
return memory::data_type::undef;
default: {
OPENVINO_THROW("CPU plugin does not support ", elementType.to_string(), " for use with oneDNN.");
return {};
}
}
}

dnnl::memory::data_type DnnlExtensionUtils::ElementTypeToDataType(const ov::element::Type& elementType,
DnnlExtensionUtils::throw_tag) {
auto&& result = ElementTypeToDataType(elementType, nothrow_tag{});
OPENVINO_ASSERT(result, "CPU plugin does not support ", elementType.to_string(), " for use with oneDNN.");
return result.value();
}

ov::element::Type DnnlExtensionUtils::DataTypeToElementType(const dnnl::memory::data_type& dataType) {
switch (dataType) {
case memory::data_type::f32:
Expand Down
12 changes: 11 additions & 1 deletion src/plugins/intel_cpu/src/dnnl_extension_utils.h
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
*/
#pragma once

#include <optional>
#include <string>

#include "common/c_types_map.hpp"
Expand All @@ -25,9 +26,18 @@ class Shape;
class IMemory;

class DnnlExtensionUtils {
public:
struct throw_tag {};
struct nothrow_tag {};

public:
static uint8_t sizeOfDataType(dnnl::memory::data_type dataType);
static dnnl::memory::data_type ElementTypeToDataType(const ov::element::Type& elementType);
static dnnl::memory::data_type ElementTypeToDataType(const ov::element::Type& elementType,
throw_tag tag = throw_tag{});

static std::optional<dnnl::memory::data_type> ElementTypeToDataType(const ov::element::Type& elementType,
nothrow_tag) noexcept;

static ov::element::Type DataTypeToElementType(const dnnl::memory::data_type& dataType);
static Dim convertToDim(const dnnl::memory::dim& dim);
static dnnl::memory::dim convertToDnnlDim(const Dim& dim);
Expand Down
34 changes: 23 additions & 11 deletions src/plugins/intel_cpu/src/utils/verbose.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -105,36 +105,48 @@ void Verbose::printInfo() {
written_total += size;
};

auto formatMemDesc = [&](const dnnl_memory_desc_t& desc, std::string& prefix) {
auto formatMemDesc = [&](const MemoryDescPtr& desc, std::string& prefix) {
prefix = colorize(BLUE, prefix);
written = snprintf(portsInfo + written_total, CPU_VERBOSE_DAT_LEN - written_total, " ");
shift(written);
written = snprintf(portsInfo + written_total, CPU_VERBOSE_DAT_LEN - written_total, "%s", prefix.c_str());
shift(written);
std::string fmt_str = dnnl::impl::md2fmt_str("", desc, dnnl::impl::format_kind_t::dnnl_format_kind_undef);
std::string fmt_str = {};
std::string dim_str = {};
if (DnnlExtensionUtils::ElementTypeToDataType(desc->getPrecision(), DnnlExtensionUtils::nothrow_tag{})) {
if (auto dnnl_desc = MemoryDescUtils::convertToDnnlMemoryDesc(desc)->getDnnlDesc()) {
fmt_str = dnnl::impl::md2fmt_str("", dnnl_desc.get(), dnnl::impl::format_kind_t::dnnl_format_kind_undef);
std::string dim_str = dnnl::impl::md2dim_str(dnnl_desc.get());
} else {
fmt_str = "empty";
}
} else {
fmt_str = desc->getPrecision().to_string();
if (const auto& dims = desc->getShape().getDims(); !dims.empty()) {
dim_str = dim2str(dims.front());
std::for_each(++(dims.begin()), dims.end(), [&dim_str](size_t dim) {
dim_str.append("x" + std::to_string(dim));
});
}
}
written = snprintf(portsInfo + written_total, CPU_VERBOSE_DAT_LEN - written_total, "%s", fmt_str.c_str());
shift(written);
written = snprintf(portsInfo + written_total, CPU_VERBOSE_DAT_LEN - written_total, ":");
shift(written);
std::string dim_str = dnnl::impl::md2dim_str(desc);
written = snprintf(portsInfo + written_total, CPU_VERBOSE_DAT_LEN - written_total, "%s", dim_str.c_str());
shift(written);
};

for (size_t i = 0; i < node->getParentEdges().size(); i++) {
std::string prefix("src:" + std::to_string(i) + ':');
formatMemDesc(MemoryDescUtils::convertToDnnlMemoryDesc(node->getParentEdgeAt(i)->getMemory().getDesc().clone())
->getDnnlDesc()
.get(),
prefix);
const auto& desc = node->getParentEdgeAt(i)->getMemory().getDescPtr();
formatMemDesc(desc, prefix);
}

for (size_t i = 0; i < node->getChildEdges().size(); i++) {
std::string prefix("dst:" + std::to_string(i) + ':');
formatMemDesc(MemoryDescUtils::convertToDnnlMemoryDesc(node->getChildEdgeAt(i)->getMemory().getDesc().clone())
->getDnnlDesc()
.get(),
prefix);
const auto& desc = node->getChildEdgeAt(i)->getMemory().getDescPtr();
formatMemDesc(desc, prefix);
}

std::string post_ops;
Expand Down

0 comments on commit 7ed80f9

Please sign in to comment.