Skip to content

Commit

Permalink
[JS API] Fix inference for outputs with missing names
Browse files Browse the repository at this point in the history
Signed-off-by: Kirill Suvorov <kirill.suvorov@intel.com>
  • Loading branch information
Retribution98 committed Jan 27, 2025
1 parent 59cc460 commit 93d1fff
Show file tree
Hide file tree
Showing 5 changed files with 87 additions and 9 deletions.
16 changes: 14 additions & 2 deletions src/bindings/js/node/src/infer_request.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,13 @@ Napi::Value InferRequestWrap::get_output_tensors(const Napi::CallbackInfo& info)
auto tensor = _infer_request.get_tensor(node);
auto new_tensor = ov::Tensor(tensor.get_element_type(), tensor.get_shape());
tensor.copy_to(new_tensor);
outputs_obj.Set(node.get_any_name(), TensorWrap::wrap(info.Env(), new_tensor));
std::string name;
if (node.get_names().empty()) {
name = node.get_node()->get_name();
} else {
name = node.get_any_name();
}
outputs_obj.Set(name, TensorWrap::wrap(info.Env(), new_tensor));
}
return outputs_obj;
}
Expand Down Expand Up @@ -215,7 +221,13 @@ void performInferenceThread(TsfnContext* context) {
const auto& tensor = context->_ir->get_tensor(node);
auto new_tensor = ov::Tensor(tensor.get_element_type(), tensor.get_shape());
tensor.copy_to(new_tensor);
outputs.insert({node.get_any_name(), new_tensor});
std::string name;
if (node.get_names().empty()) {
name = node.get_node()->get_name();
} else {
name = node.get_any_name();
}
outputs.insert({name, new_tensor});
}

context->result = outputs;
Expand Down
17 changes: 15 additions & 2 deletions src/bindings/js/node/src/node_output.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,14 @@ Napi::Value Output<ov::Node>::get_partial_shape(const Napi::CallbackInfo& info)
}

Napi::Value Output<ov::Node>::get_any_name(const Napi::CallbackInfo& info) {
return Napi::String::New(info.Env(), _output.get_any_name());
std::string name;
if (_output.get_names().empty()) {
name = _output.get_node()->get_name();
} else {
name = _output.get_any_name();
}

return Napi::String::New(info.Env(), name);
}

Output<const ov::Node>::Output(const Napi::CallbackInfo& info)
Expand Down Expand Up @@ -88,5 +95,11 @@ Napi::Value Output<const ov::Node>::get_partial_shape(const Napi::CallbackInfo&
}

Napi::Value Output<const ov::Node>::get_any_name(const Napi::CallbackInfo& info) {
return Napi::String::New(info.Env(), _output.get_any_name());
std::string name;
if (_output.get_names().empty()) {
name = _output.get_node()->get_name();
} else {
name = _output.get_any_name();
}
return Napi::String::New(info.Env(), name);
}
44 changes: 44 additions & 0 deletions src/bindings/js/node/tests/unit/infer_request.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -300,3 +300,47 @@ describe('ov.InferRequest tests', () => {
});
});
});

describe('ov.InferRequest tests with missing outputs names', () => {
const modelV3Small = testModels.modelV3Small;
let compiledModel = null;
let tensorData = null;
let tensor = null;
let inferRequest = null;

before(async () => {
await isModelAvailable(modelV3Small);

const fs = require('fs');
const core = new ov.Core();

let model_data = fs.readFileSync(getModelPath(modelV3Small).xml, 'utf8');
const weights = fs.readFileSync(getModelPath(modelV3Small).bin);
model_data = model_data.replace("names=\"MobilenetV3/Predictions/Softmax:0\"", "");
const model = core.readModelSync(Buffer.from(model_data, 'utf8'), weights);

compiledModel = core.compileModelSync(model, 'CPU');
inferRequest = compiledModel.createInferRequest();

tensorData = Float32Array.from(
{ length: 150528 },
() => Math.random() + epsilon,
);
tensor = new ov.Tensor(ov.element.f32, modelV3Small.inputShape, tensorData);
});

it('Test infer(inputData: Tensor[])', () => {
const outputLayer = compiledModel.outputs[0];
const result = inferRequest.infer([tensor]);
assert.deepStrictEqual(Object.keys(result), [outputLayer.toString()]);
assert.ok(result[outputLayer] instanceof ov.Tensor);
});

it('Test inferAsync(inputData: Tensor[])', () => {
inferRequest.inferAsync([tensor]).then((result) => {
const outputLayer = compiledModel.outputs[0];
assert.deepStrictEqual(Object.keys(result), [outputLayer.toString()]);
assert.ok(result[outputLayer] instanceof ov.Tensor);
});
});
});
1 change: 1 addition & 0 deletions src/bindings/js/node/tests/unit/setup.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@ if (require.main === module) {

async function main() {
await downloadTestModel(testModels.testModelFP32);
await downloadTestModel(testModels.modelV3Small);
}
18 changes: 13 additions & 5 deletions src/bindings/js/node/tests/unit/utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,16 @@ const testModels = {
binURL:
'https://media.githubusercontent.com/media/openvinotoolkit/testdata/master/models/test_model/test_model_fp32.bin',
},
modelV3Small: {
xml: 'v3-small_224_1.0_float.xml',
bin: 'v3-small_224_1.0_float.bin',
inputShape: [1, 224, 224, 3],
outputShape: [1, 1001],
xmlURL:
'https://storage.openvinotoolkit.org/repositories/openvino_notebooks/models/mobelinet-v3-tf/FP32/v3-small_224_1.0_float.xml',
binURL:
'https://storage.openvinotoolkit.org/repositories/openvino_notebooks/models/mobelinet-v3-tf/FP32/v3-small_224_1.0_float.bin',
},
};

module.exports = {
Expand Down Expand Up @@ -59,12 +69,10 @@ function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}

function getModelPath(isFP16 = false) {
const modelName = `test_model_fp${isFP16 ? 16 : 32}`;

function getModelPath(model=testModels.testModelFP32) {
return {
xml: path.join(modelDir, `${modelName}.xml`),
bin: path.join(modelDir, `${modelName}.bin`),
xml: path.join(modelDir, model.xml),
bin: path.join(modelDir, model.bin),
};
}

Expand Down

0 comments on commit 93d1fff

Please sign in to comment.