diff --git a/inference-engine/src/cldnn_engine/cldnn_graph.cpp b/inference-engine/src/cldnn_engine/cldnn_graph.cpp index 49d5212d37e..945e795e7d8 100644 --- a/inference-engine/src/cldnn_engine/cldnn_graph.cpp +++ b/inference-engine/src/cldnn_engine/cldnn_graph.cpp @@ -4,6 +4,7 @@ #include #include +#include "cldnn/runtime/debug_configuration.hpp" #include "cldnn_graph.h" #include "simple_math.h" @@ -82,6 +83,13 @@ void CLDNNGraph::Build() { } UpdateImplementationsMap(); + + GPU_DEBUG_GET_INSTANCE(debug_config); + GPU_DEBUG_IF(!debug_config->dry_run_path.empty()) { + CNNNetwork net(GetExecGraphInfo()); + net.serialize(debug_config->dry_run_path); + exit(0); + } } std::shared_ptr CLDNNGraph::BuildNetwork(std::shared_ptr program) { diff --git a/inference-engine/thirdparty/clDNN/api/cldnn/runtime/debug_configuration.hpp b/inference-engine/thirdparty/clDNN/api/cldnn/runtime/debug_configuration.hpp index 819e1b957b6..ebbbe911ef1 100644 --- a/inference-engine/thirdparty/clDNN/api/cldnn/runtime/debug_configuration.hpp +++ b/inference-engine/thirdparty/clDNN/api/cldnn/runtime/debug_configuration.hpp @@ -32,6 +32,7 @@ public: std::string dump_sources; // Dump opencl sources std::string dump_layers_path; // Enable dumping intermediate buffers and set the dest path std::string dump_layers; // Dump intermediate buffers of specified layers only, separated by space + std::string dry_run_path; // Dry run and serialize execution graph into the specified path int dump_layers_dst_only; // Dump only output of layers static const debug_configuration *get_instance(); }; diff --git a/inference-engine/thirdparty/clDNN/runtime/debug_configuration.cpp b/inference-engine/thirdparty/clDNN/runtime/debug_configuration.cpp index 82196b9a554..debfc05f12e 100644 --- a/inference-engine/thirdparty/clDNN/runtime/debug_configuration.cpp +++ b/inference-engine/thirdparty/clDNN/runtime/debug_configuration.cpp @@ -103,6 +103,7 @@ debug_configuration::debug_configuration() , dump_layers_path(std::string()) , dump_layers(std::string()) , dump_layers_dst_only(0) + , dry_run_path(std::string()) , disable_onednn(0) { #ifdef GPU_DEBUG_CONFIG get_common_debug_env_var("Verbose", verbose); @@ -114,6 +115,7 @@ debug_configuration::debug_configuration() get_gpu_debug_env_var("DumpLayers", dump_layers); get_gpu_debug_env_var("DumpLayersDstOnly", dump_layers_dst_only); get_gpu_debug_env_var("DisableOnednn", disable_onednn); + get_gpu_debug_env_var("DryRunPath", dry_run_path); if (dump_layers_path.length() > 0 && !disable_usm) { disable_usm = 1; diff --git a/inference-engine/thirdparty/clDNN/src/impls/ocl/primitive_base.hpp b/inference-engine/thirdparty/clDNN/src/impls/ocl/primitive_base.hpp index 1fcbccfabf3..5255be02258 100644 --- a/inference-engine/thirdparty/clDNN/src/impls/ocl/primitive_base.hpp +++ b/inference-engine/thirdparty/clDNN/src/impls/ocl/primitive_base.hpp @@ -9,7 +9,6 @@ #include "primitive_inst.h" #include "cldnn/graph/program.hpp" #include "cldnn/runtime/error_handler.hpp" -#include "cldnn/runtime/debug_configuration.hpp" #include "kernel_selector_helper.h" #include "cldnn/graph/network.hpp" #include "register.hpp" diff --git a/inference-engine/thirdparty/clDNN/src/program.cpp b/inference-engine/thirdparty/clDNN/src/program.cpp index 8565f10acb2..c92f5a0995d 100644 --- a/inference-engine/thirdparty/clDNN/src/program.cpp +++ b/inference-engine/thirdparty/clDNN/src/program.cpp @@ -423,9 +423,17 @@ void program::build_program(bool is_internal) { { pre_optimize_graph(is_internal); } run_graph_compilation(); { post_optimize_graph(is_internal); } - prepare_memory_dependencies(); - compile(); - init_kernels(); + + GPU_DEBUG_GET_INSTANCE(debug_config); +#ifdef GPU_DEBUG_CONFIG + if (debug_config->dry_run_path.empty()) { +#else + { +#endif + prepare_memory_dependencies(); + compile(); + init_kernels(); + } if (!is_internal) { prim_info = get_current_stage_info();