1. 程式人生 > >caffe 提取特徵caffe介面

caffe 提取特徵caffe介面

//CaffeExFeat.h
#ifndef CAFFEEXFEAT_H 
#define CAFFEEXFEAT_H
 
#include "caffe/caffe.hpp"
#include <string>
#include <vector>
#include "opencv2/opencv.hpp"
 https://blog.csdn.net/AP1005834/article/details/79820947
 
using namespace caffe;
 
class CaffeExFeat
{
    public:
         explicit CaffeExFeat(std::string proto,std::string model,char* nameLayer,std::string meanFile,float scale=-1);
         explicit CaffeExFeat(std::string proto,std::string model,char* nameLayer,float v1=0.0,float v2=0.0,float v3=0.0,float scale=-1);
         ~CaffeExFeat();
 
        double*extractFeat(const cv::Mat& img);
        double calSimilarity(const cv::Mat& img1 ,const cv::Mat& img2);
    private:
        unsigned int blob_id_;
        boost::shared_ptr< Net<float> > net_;
        cv::Size input_geometry_;
        int num_channels_;
        cv::Mat mean_;
        Blob<float>* input_blobs_;
        unsigned int featNum_;
        float scale_;
 
        void init(std::string proto,std::string model,float scale);
        void getMeanData(std::string mean_file);
        void getMeanData(float v1,float v2,float v3 );
        unsigned int get_blob_index( char *query_blob_name);
        void wrapInputLayer(std::vector<cv::Mat>* input_channels);
        void preprocess(const cv::Mat& img,std::vector<cv::Mat>* input_channels);
 
};
 
#endif
//CaffeExFeat.cpp
#include "CaffeExFeat.h"
 
CaffeExFeat::CaffeExFeat(std::string proto,std::string model, char* nLayer,std::string meanFile,
    float scale)
{
    init(proto,model,scale);
    getMeanData(meanFile);
    blob_id_ = get_blob_index(nLayer);
}
 
CaffeExFeat::CaffeExFeat(std::string proto,std::string model, char* nLayer,float v1,float v2,float v3,
    float scale)
{
    init(proto,model,scale);
    getMeanData(v1,v2,v3);
    blob_id_ = get_blob_index(nLayer);
}
 
void CaffeExFeat::init(std::string proto,std::string model,float scale)
{
    scale_=scale;
    Phase phase = TEST;
    Caffe::set_mode(Caffe::CPU);
    net_ = boost::shared_ptr< Net<float> >(new caffe::Net<float>(proto, phase));
    net_->CopyTrainedLayersFrom(model);
 
    input_blobs_ = net_->input_blobs()[0];
    num_channels_ = input_blobs_->channels();
    input_geometry_ = cv::Size(input_blobs_->width(),input_blobs_->height());
    input_blobs_->Reshape(1, num_channels_,input_geometry_.height, input_geometry_.width); //難道可輸入多張圖
    
    net_->Reshape();  //維度改變
    
}
 
CaffeExFeat::~CaffeExFeat()
{
    
}
 
double* CaffeExFeat::extractFeat(const cv::Mat& img)
{
    std::vector<cv::Mat> input_channels;
    wrapInputLayer(&input_channels);
    preprocess(img, &input_channels);
 
    net_->ForwardPrefilled();
    boost::shared_ptr<Blob<float> > featBlob = net_->blobs()[blob_id_];
    featNum_ = featBlob->count();
    const float *featData = (const float *) featBlob->cpu_data();
    
    double* out = new double[featNum_];
    for(int k=0;k<featNum_;++k) out[k]=featData[k];
 
    return out;
}
 
double CaffeExFeat::calSimilarity(const cv::Mat& img1 ,const cv::Mat& img2)
{
    double* feat_1 = extractFeat(img1);
    double* feat_2 = extractFeat(img2);
 
    double sim = cblas_ddot(featNum_,feat_1,1,feat_2,1)/(std::sqrt(cblas_ddot(featNum_,feat_1,1,feat_1,1))*std::sqrt(cblas_ddot(featNum_,feat_2,1,feat_2,1)));
    delete []feat_1;
    delete []feat_2;
 
    return sim;
}
 
void CaffeExFeat::getMeanData(std::string mean_file)
{
  BlobProto blob_proto;  
  ReadProtoFromBinaryFileOrDie(mean_file.c_str(), &blob_proto);  
  
  /* Convert from BlobProto to Blob<float> */  
  Blob<float> mean_blob;  
  mean_blob.FromProto(blob_proto);  
  
  /* The format of the mean file is planar 32-bit float BGR or grayscale. */  
  std::vector<cv::Mat> channels;  
  float* data = mean_blob.mutable_cpu_data();  
  for (int i = 0; i < num_channels_; ++i) {  
    /* Extract an individual channel. */  
    cv::Mat channel(mean_blob.height(), mean_blob.width(), CV_32FC1, data);  
    channels.push_back(channel);  
    data += mean_blob.height() * mean_blob.width();  
  }  
  
  /* Merge the separate channels into a single image. */  
  cv::Mat mean;  
  cv::merge(channels, mean);  
  
  /* Compute the global mean pixel value and create a mean image 
   * filled with this value. */  
  cv::Scalar channel_mean = cv::mean(mean);  
  mean_ = cv::Mat(input_geometry_, mean.type(), channel_mean);  
}
 
void CaffeExFeat::getMeanData(float v1,float v2, float v3)
{
    cv::Scalar channel_mean(v1,v2,v3);
    mean_ = cv::Mat(input_geometry_,CV_32FC3,channel_mean );
}
 
unsigned int CaffeExFeat::get_blob_index( char *query_blob_name)
{
    std::string str_query(query_blob_name);    
    vector< string > const & blob_names = net_->blob_names();
    for( unsigned int i = 0; i != blob_names.size(); ++i ) 
    { 
        if( str_query == blob_names[i] ) 
        { 
            return i;
        } 
    }
    LOG(FATAL) << "Unknown blob name: " << str_query;
}
 
void CaffeExFeat::wrapInputLayer(std::vector<cv::Mat>* input_channels) {  
  Blob<float>* input_layer = net_->input_blobs()[0];  
  
  int width = input_layer->width();  
  int height = input_layer->height();  
  float* input_data = input_layer->mutable_cpu_data();  
  for (int i = 0; i < input_layer->channels(); ++i) {  
    cv::Mat channel(height, width, CV_32FC1, input_data);  
    input_channels->push_back(channel);  
    input_data += width * height;  
  }  
}
 
void CaffeExFeat::preprocess(const cv::Mat& img,std::vector<cv::Mat>* input_channels)
{
  cv::Mat sample;  
  if (img.channels() == 3 && num_channels_ == 1)  
    cv::cvtColor(img, sample, CV_BGR2GRAY);  
  else if (img.channels() == 4 && num_channels_ == 1)  
    cv::cvtColor(img, sample, CV_BGRA2GRAY);  
  else if (img.channels() == 4 && num_channels_ == 3)  
    cv::cvtColor(img, sample, CV_BGRA2BGR);  
  else if (img.channels() == 1 && num_channels_ == 3)  
    cv::cvtColor(img, sample, CV_GRAY2BGR);  
  else  
    sample = img; 
 
  cv::Mat sample_resized;
  if (sample.size() != input_geometry_)  
    cv::resize(sample, sample_resized, input_geometry_);  
  else  
    sample_resized = sample;
 
  cv::Mat sample_float;
  if (num_channels_ == 3)  
    sample_resized.convertTo(sample_float, CV_32FC3);  
  else  
    sample_resized.convertTo(sample_float, CV_32FC1);
 
  cv::Mat sample_normalized;  
  cv::subtract(sample_float, mean_, sample_normalized);
  
  if(scale_!=-1){
      cv::multiply(scale_ ,sample_normalized,sample_normalized);  
  }
  
  cv::split(sample_normalized, *input_channels);
}

 

 

參考<深度學習21天實戰caffe>,P136,所用到的boost需要是boot_1_58_0版本。

編寫檔案 net_demo.cpp,並儲存在/home/sf/demo下:

#include <vector>
#include <iostream>
#include "caffe/net.hpp"
using namespace caffe;
using namespace std;


int main(void)
{
    std::string proto("deploy.prototxt");
    Net<float> nn(proto,caffe::TEST);
    vector<string> bn=nn.blob_names();
    for(int i=0;i<bn.size();i++)
    {
        cout<<"Blob #"<<i<<" : "<<bn[i]<<endl;


    }


return 0;


}

[email protected]
--------------------- 
作者:woneil 
來源:CSDN 
原文:https://blog.csdn.net/ahbbshenfeng/article/details/52077605 
版權宣告:本文為博主原創文章,轉載請附上博文連結!

 

Net在Caffe中代表一個完整的CNN模型,它包含若干個Layer例項。前面看到的各類prototxt的經典網路結構如LeNet、AlexNet等都是Caffe程式碼實現的一個Net物件。

1、Net基本用法
#include <caffe\net.hpp>

在main()中新增

std::string proto("deploy.prototxt");
Net<float> nn(proto, caffe::TEST);
vector<string> bn = nn.blob_names();  //獲取Net中所有blob物件名
vector<string> ln = nn.layer_names();
for (int i = 0; i < bn.size(); i++)
{
    cout << "Blob #" << i << ":" << bn[i] << endl;
}
for (int j = 0; j < ln.size(); j++)
{
    cout << "Layer #" << j << ":" << ln[j] << endl;
}
執行結果如下:
F0103 13:36:44.474290  5904 cudnn_conv_layer.cpp:53] Check failed: status == CUDNN_STATUS_SUCCESS (6 vs. 0)  CUDNN_STATUS_ARCH_MISMATCH
*** Check failure stack trace: ***
由於我的GPU硬體不支援cuda 3.0以上版本,關閉掉前處理器中的USE_CUDNN(修改CommonSettings.props檔案相應選項)。可以看到結果如下:

I0103 13:54:55.282647 13792 net.cpp:283] Network initialization done.
Blob #0:data
Blob #1:conv1
Blob #2:pool1
Blob #3:norm1
Blob #4:conv2
Blob #5:pool2
Blob #6:norm2
Blob #7:conv3
Blob #8:conv4
Blob #9:conv5
Blob #10:pool5
Blob #11:fc6
Blob #12:fc7
Blob #13:fc8
Blob #14:prob
Layer #0:data
Layer #1:conv1
Layer #2:relu1
Layer #3:pool1
Layer #4:norm1
Layer #5:conv2
Layer #6:relu2
Layer #7:pool2
Layer #8:norm2
Layer #9:conv3
Layer #10:relu3
Layer #11:conv4
Layer #12:relu4
Layer #13:conv5
Layer #14:relu5
Layer #15:pool5
Layer #16:fc6
Layer #17:relu6
Layer #18:drop6
Layer #19:fc7
Layer #20:relu7
Layer #21:drop7
Layer #22:fc8
Layer #23:prob
--------------------- 
作者:阿爾法旺旺 
來源:CSDN 
原文:https://blog.csdn.net/yingwei13mei/article/details/53997389 
版權宣告:本文為博主原創文章,轉載請附上博文連結!