Commit 8d74aaad authored by Dmitry Kurtaev's avatar Dmitry Kurtaev Committed by Alexander Alekhin

Merge pull request #1006 from dkurt:get_layer_inputs

Getting layer inputs (#1006)
parent f39f415f
......@@ -182,6 +182,9 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
/** @brief Returns pointer to layer with specified name which the network use. */
CV_WRAP Ptr<Layer> getLayer(LayerId layerId);
/** @brief Returns pointers to input layers of specific layer. */
CV_WRAP std::vector<Ptr<Layer> > getLayerInputs(LayerId layerId);
/** @brief Delete layer for the network (not implemented yet) */
CV_WRAP void deleteLayer(LayerId layer);
......
......@@ -568,10 +568,25 @@ Ptr<Layer> Net::getLayer(LayerId layerId)
{
LayerData &ld = impl->getLayerData(layerId);
if (!ld.layerInstance)
CV_Error(Error::StsNullPtr, format("Requseted layer \"%s\" was not initialized", ld.name.c_str()));
CV_Error(Error::StsNullPtr, format("Requested layer \"%s\" was not initialized", ld.name.c_str()));
return ld.layerInstance;
}
std::vector<Ptr<Layer> > Net::getLayerInputs(LayerId layerId)
{
LayerData &ld = impl->getLayerData(layerId);
if (!ld.layerInstance)
CV_Error(Error::StsNullPtr, format("Requested layer \"%s\" was not initialized", ld.name.c_str()));
std::vector<Ptr<Layer> > inputLayers;
inputLayers.reserve(ld.inputLayersId.size());
std::set<int>::iterator it;
for (it = ld.inputLayersId.begin(); it != ld.inputLayersId.end(); ++it) {
inputLayers.push_back(getLayer(*it));
}
return inputLayers;
}
std::vector<String> Net::getLayerNames() const
{
std::vector<String> res;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment