Skip to content

Commit 60a841c

Browse files
l-batalalek
authored andcommitted
Merge pull request opencv#14255 from l-bat:networks_visualization
* Add networks visualization * Disable CXX11 * Fixed multy inputs support * Added output shapes * Added color for DLIE/CPU * Fixed graph colors
1 parent a246d8d commit 60a841c

File tree

2 files changed

+210
-0
lines changed

2 files changed

+210
-0
lines changed

modules/dnn/include/opencv2/dnn/dnn.hpp

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -385,6 +385,16 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
385385
/** Returns true if there are no layers in the network. */
386386
CV_WRAP bool empty() const;
387387

388+
/** @brief Dump net to String
389+
* @returns String with structure, hyperparameters, backend, target and fusion
390+
* To see correct backend, target and fusion run after forward().
391+
*/
392+
CV_WRAP String dump();
393+
/** @brief Dump net structure, hyperparameters, backend, target and fusion to dot file
394+
* @param path path to output file with .dot extension
395+
* @see dump()
396+
*/
397+
CV_WRAP void dumpToFile(const String& path);
388398
/** @brief Adds new layer to the net.
389399
* @param name unique name of the adding layer.
390400
* @param type typename of the adding layer (type must be registered in LayerRegister).

modules/dnn/src/dnn.cpp

Lines changed: 200 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@
4747
#include <algorithm>
4848
#include <iostream>
4949
#include <sstream>
50+
#include <fstream>
5051
#include <iterator>
5152
#include <numeric>
5253
#include <opencv2/dnn/shape_utils.hpp>
@@ -2921,6 +2922,205 @@ int Net::getLayerId(const String &layer)
29212922
return impl->getLayerId(layer);
29222923
}
29232924

2925+
String Net::dump()
2926+
{
2927+
CV_Assert(!empty());
2928+
std::ostringstream out;
2929+
std::map<int, LayerData>& map = impl->layers;
2930+
int prefBackend = impl->preferableBackend;
2931+
std::vector<std::vector<int> > skippedLayers;
2932+
std::vector<int> skipId;
2933+
std::vector<int> allLayers(map.size(), -1);
2934+
int idPrev = -1;
2935+
Ptr<BackendNode> prevNode;
2936+
for (std::map<int, LayerData>::reverse_iterator rit = map.rbegin(); rit != map.rend(); ++rit)
2937+
{
2938+
std::map<int, Ptr<BackendNode> >::iterator itBackend = rit->second.backendNodes.find(prefBackend);
2939+
if (prefBackend == DNN_BACKEND_OPENCV || itBackend == rit->second.backendNodes.end() ||
2940+
itBackend->second.empty())
2941+
{
2942+
if (rit->second.skip)
2943+
skipId.push_back(rit->first);
2944+
else if (!skipId.empty())
2945+
{
2946+
if (prefBackend == DNN_BACKEND_OPENCV || prevNode.empty())
2947+
skipId.push_back(rit->first);
2948+
else if (idPrev != -1)
2949+
skipId.push_back(idPrev);
2950+
2951+
std::sort(skipId.begin(), skipId.end());
2952+
for (int i = 0; i < skipId.size(); i++) {
2953+
allLayers[skipId[i]] = skippedLayers.size();
2954+
}
2955+
skippedLayers.push_back(skipId);
2956+
skipId.clear();
2957+
}
2958+
}
2959+
else
2960+
{
2961+
if (itBackend->second == prevNode)
2962+
skipId.push_back(idPrev);
2963+
else if (!skipId.empty())
2964+
{
2965+
skipId.push_back(idPrev);
2966+
std::sort(skipId.begin(), skipId.end());
2967+
for (int i = 0; i < skipId.size(); i++) {
2968+
allLayers[skipId[i]] = skippedLayers.size();
2969+
}
2970+
skippedLayers.push_back(skipId);
2971+
skipId.clear();
2972+
}
2973+
idPrev = rit->first;
2974+
prevNode = itBackend->second;
2975+
}
2976+
}
2977+
String colors[] = {"#ffffb3", "#fccde5", "#8dd3c7", "#bebada", "#80b1d3", "#fdb462"};
2978+
String backend;
2979+
switch (prefBackend) {
2980+
case DNN_BACKEND_DEFAULT: backend = "DEFAULT/"; break;
2981+
case DNN_BACKEND_HALIDE: backend = "HALIDE/"; break;
2982+
case DNN_BACKEND_INFERENCE_ENGINE: backend = "DLIE/"; break;
2983+
case DNN_BACKEND_OPENCV: backend = "OCV/"; break;
2984+
}
2985+
out << "digraph G {" << '\n';
2986+
// Add nodes
2987+
for (std::map<int, LayerData>::iterator it = map.begin(); it != map.end(); ++it)
2988+
{
2989+
String name = it->second.params.name;
2990+
if (allLayers[it->first] == -1 && !name.empty()) {
2991+
out << " " << "\"" << name << "\"" << " [label=\"";
2992+
skipId.clear();
2993+
skipId.push_back(it->first);
2994+
}
2995+
else if (name.empty() || it->first != skippedLayers[allLayers[it->first]][0])
2996+
continue;
2997+
else { // first node in cluster : it->first == skippedLayers[allLayers[it->first]][0]
2998+
int cluster = allLayers[it->first];
2999+
out << " " << "\"" << "cluster_" << cluster << "\"" << " [label=\"{";
3000+
skipId = skippedLayers[allLayers[it->first]]; // vertices in current cluster
3001+
}
3002+
for (int i = 0; i < skipId.size(); i++)
3003+
{
3004+
LayerParams& lp = map[skipId[i]].params;
3005+
if (!lp.name.empty()) {
3006+
if (i > 0) {
3007+
out << " | ";
3008+
}
3009+
out << lp.name << "\\n" << lp.type << "\\n";
3010+
if (lp.has("kernel_size")) {
3011+
DictValue size = lp.get("kernel_size");
3012+
out << "kernel (HxW): " << size << " x " << size << "\\l";
3013+
} else if (lp.has("kernel_h") && lp.has("kernel_w")) {
3014+
DictValue h = lp.get("kernel_h");
3015+
DictValue w = lp.get("kernel_w");
3016+
out << "kernel (HxW): " << h << " x " << w << "\\l";
3017+
}
3018+
if (lp.has("stride")) {
3019+
DictValue stride = lp.get("stride");
3020+
out << "stride (HxW): " << stride << " x " << stride << "\\l";
3021+
} else if (lp.has("stride_h") && lp.has("stride_w")) {
3022+
DictValue h = lp.get("stride_h");
3023+
DictValue w = lp.get("stride_w");
3024+
out << "stride (HxW): " << h << " x " << w << "\\l";
3025+
}
3026+
if (lp.has("dilation")) {
3027+
DictValue dilation = lp.get("dilation");
3028+
out << "dilation (HxW): " << dilation << " x " << dilation << "\\l";
3029+
} else if (lp.has("dilation_h") && lp.has("dilation_w")) {
3030+
DictValue h = lp.get("dilation_h");
3031+
DictValue w = lp.get("dilation_w");
3032+
out << "dilation (HxW): " << h << " x " << w << "\\l";
3033+
}
3034+
if (lp.has("pad")) {
3035+
DictValue pad = lp.get("pad");
3036+
out << "pad (LxTxRxB): " << pad << " x " << pad << " x " << pad << " x " << pad << "\\l";
3037+
} else if (lp.has("pad_l") && lp.has("pad_t") && lp.has("pad_r") && lp.has("pad_b")) {
3038+
DictValue l = lp.get("pad_l");
3039+
DictValue t = lp.get("pad_t");
3040+
DictValue r = lp.get("pad_r");
3041+
DictValue b = lp.get("pad_b");
3042+
out << "pad (LxTxRxB): " << l << " x " << t << " x " << r << " x " << b << "\\l";
3043+
}
3044+
else if (lp.has("pooled_w") || lp.has("pooled_h")) {
3045+
DictValue h = lp.get("pooled_h");
3046+
DictValue w = lp.get("pooled_w");
3047+
out << "pad (HxW): " << h << " x " << w << "\\l";
3048+
}
3049+
if (lp.has("pool")) {
3050+
out << "pool: " << lp.get("pool") << "\\l";
3051+
}
3052+
if (lp.has("global_pooling")) {
3053+
out << "global_pooling: " << lp.get("global_pooling") << "\\l";
3054+
}
3055+
if (lp.has("group")) {
3056+
out << "group: " << lp.get("group") << "\\l";
3057+
}
3058+
}
3059+
}
3060+
if (!it->second.outputBlobs.empty())
3061+
out << "output: " << it->second.outputBlobs[0].size << "\\l";
3062+
3063+
Ptr<BackendNode> layerBackend = it->second.backendNodes[prefBackend];
3064+
out << (!layerBackend.empty() ? backend : "OCV/");
3065+
int colorId = 0;
3066+
switch (it->second.layerInstance->preferableTarget) {
3067+
case DNN_TARGET_CPU: out << "CPU\\n"; colorId = layerBackend.empty() ? 0 : 5; break;
3068+
case DNN_TARGET_OPENCL: out << "OCL\\n"; colorId = 1; break;
3069+
case DNN_TARGET_OPENCL_FP16: out << "OCL_FP16\\n"; colorId = 2; break;
3070+
case DNN_TARGET_MYRIAD: out << "MYRIAD\\n"; colorId = 3; break;
3071+
case DNN_TARGET_FPGA: out << "FPGA\\n"; colorId = 4; break;
3072+
}
3073+
out << ((skipId.size() == 1)? "\" " : " }\" ");
3074+
out << "fillcolor=\"" << colors[colorId] << "\" ";
3075+
out << "style=filled ";
3076+
out << "shape=" << ((skipId.size() == 1)? "box" : "record") << "]" << '\n';
3077+
}
3078+
out << '\n';
3079+
// Add edges
3080+
int inputsSize = impl->netInputLayer->outNames.size();
3081+
for (std::map<int, LayerData>::iterator it = map.begin(); it != map.end(); ++it)
3082+
{
3083+
if (allLayers[it->first] == -1) // node
3084+
{
3085+
for (int i = 0; i < it->second.consumers.size(); i++)
3086+
{
3087+
int outId = it->second.consumers[i].lid;
3088+
if (it == map.begin() && inputsSize > 1)
3089+
out << " " << "\"" << it->second.name << "_" << i << "\"" << " -> ";
3090+
else
3091+
out << " " << "\"" << it->second.name << "\"" << " -> ";
3092+
if (allLayers[outId] == -1) // node
3093+
out << "\"" << map[outId].name << "\"" << '\n';
3094+
else // cluster
3095+
out << "\"" << "cluster_" << allLayers[outId] << "\"" << '\n';
3096+
}
3097+
}
3098+
else if (it->first == skippedLayers[allLayers[it->first]].back()) // edges from last layer in cluster
3099+
{
3100+
for (int i = 0; i < it->second.consumers.size(); i++)
3101+
{
3102+
int outId = it->second.consumers[i].lid;
3103+
if (allLayers[outId] == -1) { // node
3104+
out << " " << "\"" << "cluster_" << allLayers[it->first] << "\"" << " -> ";
3105+
out << "\"" << map[outId].name << "\"" << '\n';
3106+
}
3107+
else if (allLayers[outId] != allLayers[it->first]) { // another cluster
3108+
out << " " << "\"" << "cluster_" << allLayers[it->first] << "\"" << " -> ";
3109+
out << "\"" << "cluster_" << allLayers[outId] << "\"" << '\n';
3110+
}
3111+
}
3112+
}
3113+
}
3114+
out << "}";
3115+
return out.str();
3116+
}
3117+
3118+
void Net::dumpToFile(const String& path) {
3119+
std::ofstream file(path.c_str());
3120+
file << dump();
3121+
file.close();
3122+
}
3123+
29243124
Ptr<Layer> Net::getLayer(LayerId layerId)
29253125
{
29263126
LayerData &ld = impl->getLayerData(layerId);

0 commit comments

Comments
 (0)