tensorflow_cpp 1.0.6
Loading...
Searching...
No Matches
saved_model_utils.h
Go to the documentation of this file.
1/*
2==============================================================================
3MIT License
4Copyright 2022 Institute for Automotive Engineering of RWTH Aachen University.
5Permission is hereby granted, free of charge, to any person obtaining a copy
6of this software and associated documentation files (the "Software"), to deal
7in the Software without restriction, including without limitation the rights
8to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9copies of the Software, and to permit persons to whom the Software is
10furnished to do so, subject to the following conditions:
11The above copyright notice and this permission notice shall be included in all
12copies or substantial portions of the Software.
13THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19SOFTWARE.
20==============================================================================
21*/
22
28#pragma once
29
30#include <algorithm>
31#include <sstream>
32#include <stdexcept>
33#include <string>
34#include <vector>
35
36#include <tensorflow/cc/saved_model/loader.h>
37#include <tensorflow/cc/saved_model/tag_constants.h>
39
40
41namespace tensorflow_cpp {
42
43
44namespace tf = tensorflow;
45
46
57inline tf::SavedModelBundleLite loadSavedModel(
58 const std::string& dir, const bool allow_growth = true,
59 const double per_process_gpu_memory_fraction = 0,
60 const std::string& visible_device_list = "") {
61
62 tf::SavedModelBundleLite saved_model;
63 tf::SessionOptions session_options = makeSessionOptions(
64 allow_growth, per_process_gpu_memory_fraction, visible_device_list);
65 tf::Status status =
66 tf::LoadSavedModel(session_options, tf::RunOptions(), dir,
67 {tf::kSavedModelTagServe}, &saved_model);
68 if (!status.ok())
69 throw std::runtime_error("Failed to load SavedModel: " + status.ToString());
70
71 return saved_model;
72}
73
74
85inline tf::Session* loadSavedModelIntoNewSession(
86 const std::string& dir, const bool allow_growth = true,
87 const double per_process_gpu_memory_fraction = 0,
88 const std::string& visible_device_list = "") {
89
90 tf::SavedModelBundleLite saved_model = loadSavedModel(
91 dir, allow_growth, per_process_gpu_memory_fraction, visible_device_list);
92 tf::Session* session = saved_model.GetSession();
93
94 return session;
95}
96
97
105inline tf::Session* getSessionFromSavedModel(
106 const tf::SavedModelBundleLite& saved_model) {
107
108 return saved_model.GetSession();
109}
110
111
125 const tf::SavedModelBundleLite& saved_model, const std::string& layer_name,
126 const std::string& signature = "serving_default") {
127
128 std::string node_name;
129 const tf::SignatureDef& model_def = saved_model.GetSignatures().at(signature);
130 auto inputs = model_def.inputs();
131 auto outputs = model_def.outputs();
132 auto& nodes = inputs;
133 nodes.insert(outputs.begin(), outputs.end());
134 for (const auto& node : nodes) {
135 const std::string& key = node.first;
136 const tf::TensorInfo& info = node.second;
137 if (key == layer_name) {
138 node_name = info.name();
139 break;
140 }
141 }
142
143 return node_name;
144}
145
146
160 const tf::SavedModelBundleLite& saved_model, const std::string& node_name,
161 const std::string& signature = "serving_default") {
162
163 std::string layer_name;
164 const tf::SignatureDef& model_def = saved_model.GetSignatures().at(signature);
165 auto inputs = model_def.inputs();
166 auto outputs = model_def.outputs();
167 auto& nodes = inputs;
168 nodes.insert(outputs.begin(), outputs.end());
169 for (const auto& node : nodes) {
170 const std::string& key = node.first;
171 const tf::TensorInfo& info = node.second;
172 if (info.name() == node_name) {
173 layer_name = key;
174 break;
175 }
176 }
177
178 return layer_name;
179}
180
181
198inline std::vector<std::string> getSavedModelInputNames(
199 const tf::SavedModelBundleLite& saved_model, const bool layer_names = false,
200 const std::string& signature = "serving_default") {
201
202 std::vector<std::string> names;
203 const tf::SignatureDef& model_def = saved_model.GetSignatures().at(signature);
204 for (const auto& node : model_def.inputs()) {
205 const std::string& key = node.first;
206 const tf::TensorInfo& info = node.second;
207 names.push_back(info.name());
208 }
209 std::sort(names.begin(), names.end());
210
211 if (layer_names) {
212 std::vector<std::string> node_names = names;
213 names = {};
214 for (const auto& node_name : node_names)
215 names.push_back(
216 getSavedModelLayerByNodeName(saved_model, node_name, signature));
217 }
218
219 return names;
220}
221
222
239inline std::vector<std::string> getSavedModelOutputNames(
240 const tf::SavedModelBundleLite& saved_model, const bool layer_names = false,
241 const std::string& signature = "serving_default") {
242
243 std::vector<std::string> names;
244 const tf::SignatureDef& model_def = saved_model.GetSignatures().at(signature);
245 for (const auto& node : model_def.outputs()) {
246 const std::string& key = node.first;
247 const tf::TensorInfo& info = node.second;
248 names.push_back(info.name());
249 }
250 std::sort(names.begin(), names.end());
251
252 if (layer_names) {
253 std::vector<std::string> node_names = names;
254 names = {};
255 for (const auto& node_name : node_names)
256 names.push_back(
257 getSavedModelLayerByNodeName(saved_model, node_name, signature));
258 }
259
260 return names;
261}
262
263
273inline std::vector<int> getSavedModelNodeShape(
274 const tf::SavedModelBundleLite& saved_model, const std::string& node_name,
275 const std::string& signature = "serving_default") {
276
277 std::vector<int> node_shape;
278 const tf::SignatureDef& model_def = saved_model.GetSignatures().at(signature);
279 auto inputs = model_def.inputs();
280 auto outputs = model_def.outputs();
281 auto& nodes = inputs;
282 nodes.insert(outputs.begin(), outputs.end());
283 for (const auto& node : nodes) {
284 const std::string& key = node.first;
285 const tf::TensorInfo& info = node.second;
286 if (info.name() == node_name) {
287 const auto& shape = info.tensor_shape();
288 for (int d = 0; d < shape.dim_size(); d++)
289 node_shape.push_back(shape.dim(d).size());
290 break;
291 }
292 }
293
294 return node_shape;
295}
296
297
307inline tf::DataType getSavedModelNodeType(
308 const tf::SavedModelBundleLite& saved_model, const std::string& node_name,
309 const std::string& signature = "serving_default") {
310
311 tf::DataType type = tf::DT_INVALID;
312 const tf::SignatureDef& model_def = saved_model.GetSignatures().at(signature);
313 auto inputs = model_def.inputs();
314 auto outputs = model_def.outputs();
315 auto& nodes = inputs;
316 nodes.insert(outputs.begin(), outputs.end());
317 for (const auto& node : nodes) {
318 const std::string& key = node.first;
319 const tf::TensorInfo& info = node.second;
320 if (info.name() == node_name) {
321 type = info.dtype();
322 break;
323 }
324 }
325
326 return type;
327}
328
329
340inline std::string getSavedModelInfoString(
341 const tf::SavedModelBundleLite& saved_model) {
342
343 std::stringstream ss;
344 ss << "SavedModel Info:" << std::endl;
345
346 ss << "Signatures:" << std::endl;
347 const auto& signatures = saved_model.GetSignatures();
348 for (const auto& sig : signatures) {
349
350 ss << " " << sig.first << std::endl;
351 const auto& def = sig.second;
352
353 ss << " Inputs: " << def.inputs_size() << std::endl;
354 for (const auto& node : def.inputs()) {
355 ss << " " << node.first << ": " << node.second.name() << std::endl;
356 ss << " Shape: [ ";
357 for (int d = 0; d < node.second.tensor_shape().dim_size(); d++) {
358 ss << node.second.tensor_shape().dim(d).size() << ", ";
359 }
360 ss << "]" << std::endl;
361 ss << " DataType: " << tf::DataTypeString(node.second.dtype())
362 << std::endl;
363 }
364
365 ss << " Outputs: " << def.outputs_size() << std::endl;
366 for (const auto& node : def.outputs()) {
367 ss << " " << node.first << ": " << node.second.name() << std::endl;
368 ss << " Shape: [ ";
369 for (int d = 0; d < node.second.tensor_shape().dim_size(); d++) {
370 ss << node.second.tensor_shape().dim(d).size() << ", ";
371 }
372 ss << "]" << std::endl;
373 ss << " DataType: " << tf::DataTypeString(node.second.dtype())
374 << std::endl;
375 }
376 }
377
378 return ss.str();
379}
380
381
382} // namespace tensorflow_cpp
Namespace for tensorflow_cpp library.
Definition graph_utils.h:40
tf::Session * getSessionFromSavedModel(const tf::SavedModelBundleLite &saved_model)
Returns the session that a SavedModel is loaded in.
tf::Session * loadSavedModelIntoNewSession(const std::string &dir, const bool allow_growth=true, const double per_process_gpu_memory_fraction=0, const std::string &visible_device_list="")
Loads a TensorFlow SavedModel from a directory into a new session.
tf::SavedModelBundleLite loadSavedModel(const std::string &dir, const bool allow_growth=true, const double per_process_gpu_memory_fraction=0, const std::string &visible_device_list="")
Loads a TensorFlow SavedModel from a directory into a new session.
std::string getSavedModelNodeByLayerName(const tf::SavedModelBundleLite &saved_model, const std::string &layer_name, const std::string &signature="serving_default")
Determines the node name from a SavedModel layer name.
std::vector< std::string > getSavedModelInputNames(const tf::SavedModelBundleLite &saved_model, const bool layer_names=false, const std::string &signature="serving_default")
Determines the names of the SavedModel input nodes.
std::string getSavedModelInfoString(const tf::SavedModelBundleLite &saved_model)
tf::DataType getSavedModelNodeType(const tf::SavedModelBundleLite &saved_model, const std::string &node_name, const std::string &signature="serving_default")
Determines the datatype of a given SavedModel node.
std::vector< std::string > getSavedModelOutputNames(const tf::SavedModelBundleLite &saved_model, const bool layer_names=false, const std::string &signature="serving_default")
Determines the names of the SavedModel output nodes.
tf::SessionOptions makeSessionOptions(const bool allow_growth=true, const double per_process_gpu_memory_fraction=0, const std::string &visible_device_list="")
Helps to quickly create SessionOptions.
Definition utils.h:52
std::string getSavedModelLayerByNodeName(const tf::SavedModelBundleLite &saved_model, const std::string &node_name, const std::string &signature="serving_default")
Determines the layer name from a SavedModel node name.
std::vector< int > getSavedModelNodeShape(const tf::SavedModelBundleLite &saved_model, const std::string &node_name, const std::string &signature="serving_default")
Determines the shape of a given SavedModel node.
Utility functions for TensorFlow backend.