forked from openvinotoolkit/openvino
-
Notifications
You must be signed in to change notification settings - Fork 0
/
infer_request.cpp
151 lines (133 loc) · 5.68 KB
/
infer_request.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "infer_request.hpp"
#include <ie_blob.h>
#include <ie_layouts.h>
#include <cassert>
#include <description_buffer.hpp>
#include <ie_algorithm.hpp>
#include <map>
#include <string>
#include "itt.hpp"
using namespace HeteroPlugin;
using namespace InferenceEngine;
using namespace InferenceEngine::details;
HeteroInferRequest::HeteroInferRequest(
const std::vector<std::shared_ptr<const ov::Node>>& inputs,
const std::vector<std::shared_ptr<const ov::Node>>& outputs,
const SubRequestsList& inferRequests,
const std::unordered_map<std::string, std::string>& subgraphInputToOutputBlobNames)
: IInferRequestInternal(inputs, outputs),
_inferRequests(inferRequests) {
CreateInferRequest(subgraphInputToOutputBlobNames);
}
HeteroInferRequest::HeteroInferRequest(
InferenceEngine::InputsDataMap networkInputs,
InferenceEngine::OutputsDataMap networkOutputs,
const SubRequestsList& inferRequests,
const std::unordered_map<std::string, std::string>& subgraphInputToOutputBlobNames)
: IInferRequestInternal(networkInputs, networkOutputs),
_inferRequests(inferRequests) {
CreateInferRequest(subgraphInputToOutputBlobNames);
}
void HeteroInferRequest::CreateInferRequest(
const std::unordered_map<std::string, std::string>& subgraphInputToOutputBlobNames) {
if (_networkOutputs.empty() || _networkInputs.empty()) {
IE_THROW() << "Internal error: no information about network's output/input";
}
auto requestBlob([&](const std::string& blobName, InferenceEngine::SoIInferRequestInternal& r, bool output) {
std::string intermediateBlobName = blobName;
auto itName = subgraphInputToOutputBlobNames.find(blobName);
if (itName != subgraphInputToOutputBlobNames.end()) {
intermediateBlobName = itName->second;
}
if (output) {
if (InferenceEngine::details::contains(_networkOutputs, blobName)) {
_subRequestFromBlobName.emplace(blobName, r);
} else {
auto blob = r->GetBlob(blobName);
_blobs.emplace(intermediateBlobName, r->GetBlob(blobName));
}
} else {
if (InferenceEngine::details::contains(_networkInputs, blobName)) {
_subRequestFromBlobName.emplace(blobName, r);
} else {
r->SetBlob(blobName, _blobs.at(intermediateBlobName));
}
}
});
// go over all subnet and create requests
for (auto&& desc : _inferRequests) {
desc._request = {desc._network->CreateInferRequest(), desc._network._so};
desc._request->setModelInputsOutputs(desc._network->getInputs(), desc._network->getOutputs());
// go over all inputs and get blobs from subnet infer requests
for (auto&& outputInfo : desc._network->GetOutputsInfo()) {
requestBlob(outputInfo.first, desc._request, true);
}
}
// go over all outputs and get blobs from subnet infer requests
for (auto&& desc : _inferRequests) {
for (auto&& inputInfo : desc._network->GetInputsInfo()) {
requestBlob(inputInfo.first, desc._request, false);
}
}
}
void HeteroInferRequest::SetBlob(const std::string& name, const InferenceEngine::Blob::Ptr& blob) {
auto itRequest = _subRequestFromBlobName.find(name);
if (itRequest == _subRequestFromBlobName.end()) {
IE_THROW() << "There is no infer requests binded to blob with name: " << name;
}
itRequest->second->SetBlob(name, blob);
}
InferenceEngine::Blob::Ptr HeteroInferRequest::GetBlob(const std::string& name) {
auto itRequest = _subRequestFromBlobName.find(name);
if (itRequest == _subRequestFromBlobName.end()) {
IE_THROW() << "There is no infer requests binded to blob with name: " << name;
}
setPointerToSo(itRequest->second._so);
return itRequest->second->GetBlob(name);
}
void HeteroInferRequest::SetBlob(const std::string& name, const Blob::Ptr& blob, const PreProcessInfo& info) {
auto itRequest = _subRequestFromBlobName.find(name);
if (itRequest == _subRequestFromBlobName.end()) {
IE_THROW() << "There is no infer requests binded to blob with name: " << name;
}
itRequest->second->SetBlob(name, blob, info);
}
const InferenceEngine::PreProcessInfo& HeteroInferRequest::GetPreProcess(const std::string& name) const {
auto itRequest = _subRequestFromBlobName.find(name);
if (itRequest == _subRequestFromBlobName.end()) {
IE_THROW() << "There is no infer requests binded to blob with name: " << name;
}
return itRequest->second->GetPreProcess(name);
}
void HeteroInferRequest::InferImpl() {
for (auto&& desc : _inferRequests) {
OV_ITT_SCOPED_TASK(itt::domains::HeteroPlugin, desc._profilingTask);
auto& r = desc._request;
assert(r);
r->Infer();
}
}
std::vector<std::shared_ptr<InferenceEngine::IVariableStateInternal>> HeteroInferRequest::QueryState() {
memoryStates = {};
for (auto&& desc : _inferRequests) {
auto& r = desc._request;
assert(r);
for (auto&& state : r->QueryState()) {
memoryStates.emplace_back(state);
}
}
return memoryStates;
}
std::map<std::string, InferenceEngineProfileInfo> HeteroInferRequest::GetPerformanceCounts() const {
std::map<std::string, InferenceEngineProfileInfo> perfMap;
for (size_t i = 0; i < _inferRequests.size(); i++) {
auto perfMapRequest = _inferRequests[i]._request->GetPerformanceCounts();
for (auto&& r : perfMapRequest) {
perfMap[std::string("subgraph") + std::to_string(i) + ": " + r.first] = r.second;
}
}
return perfMap;
}