caffe使用MemoryDataLayer从内存中加载数据

最近在搞caffe的应用,因为很多时候我们需要进行服务器来进行特征的抽取,所以我们需要很将单张图片丢入caffe的网络进行一次传递,这样就诞生了一个从内存中如何加载数据进入caffe的需求,这里我直接贴出代码来先:

#include <boost/make_shared.hpp>

// these need to be included after boost on OS X
#include <string>  // NOLINT(build/include_order)
#include <vector>  // NOLINT(build/include_order)
#include <fstream>  // NOLINT

#include "caffe/caffe.hpp"
#include <opencv.hpp>

static void CheckFile(const std::string& filename) {
	std::ifstream f(filename.c_str());
	if (!f.good()) {
		f.close();
		throw std::runtime_error("Could not open file " + filename);
	}
	f.close();
}

template <typename Dtype>
caffe::Net<Dtype>* Net_Init_Load(
	std::string param_file, std::string pretrained_param_file, caffe::Phase phase)
{
	CheckFile(param_file);
	CheckFile(pretrained_param_file);

	caffe::Net<Dtype>* net(new caffe::Net<Dtype>(param_file,phase));

	net->CopyTrainedLayersFrom(pretrained_param_file,0);
	return net;
}
#define NetF float

int main()
{
	cv::Mat src1;
	src1 = cv::imread("test.png");

	cv::Mat rszimage;

	//// The mean file image size is 256x256, need to resize the input image to 256x256
	cv::resize(src1, rszimage, cv::Size(227, 227));
	std::vector<cv::Mat> dv = { rszimage }; // image is a cv::Mat, as I'm using #1416
	std::vector<int> dvl = { 0 };

	caffe::Datum data;
	caffe::ReadFileToDatum("D:/work/DestImage/crop/CH0005-00-0019/00028.png", &data);

	caffe::Net<NetF>* _net = Net_Init_Load<NetF>("deploy_Test.prototxt", "bvlc_alexnet.caffemodel", caffe::TEST);
	caffe::MemoryDataLayer<NetF> *m_layer_ = (caffe::MemoryDataLayer<NetF> *)_net->layers()[0].get();
	m_layer_->AddMatVector(dv, dvl);

	/*float loss = 0.0;
	std::vector<caffe::Blob<float>*> results = _net->ForwardPrefilled(&loss);*/
	int end_ind = _net->layers().size();
	std::vector<caffe::Blob<NetF>*> input_vec;
	_net->Forward(input_vec);
	boost::shared_ptr<caffe::Blob<NetF>> outPool5 = _net->blob_by_name("pool5");
	std::cout << outPool5->shape()[0] << std::endl;
	std::cout << outPool5->shape()[1] << std::endl;
	std::cout << outPool5->shape()[2] << std::endl;
	std::cout << outPool5->shape()[3] << std::endl;

	std::cout << outPool5->num() << std::endl;
	std::cout << outPool5->channels() << std::endl;
	std::cout << outPool5->width() << std::endl;
	std::cout << outPool5->height() << std::endl;
	std::cout << outPool5->data_at(0, 0, 0, 0) << std::endl;
	std::cout << outPool5->data_at(0, 0, 1, 1) << std::endl;
	std::cout << outPool5->data_at(0, 95, 5, 5) << std::endl;

	const NetF* pstart = outPool5->cpu_data();
	std::cout << m_layer_->width() << std::endl;

	return 0;
}

然后是配置文件:

name: "CaffeNet"

layers
{
  name: "data"
  type: MEMORY_DATA
  top: "data"
  top: "label"
  memory_data_param
  {
    batch_size: 1
    channels: 3
    height: 227
    width: 227
  }
  transform_param
  {
    crop_size: 227
    mirror: false
    #mean_file:"imagenet_mean.binaryproto"
	mean_value: 104
	mean_value: 117
    mean_value: 123
  }
}

layers {
  name: "`"
  type: CONVOLUTION
  bottom: "data"
  top: "conv1"
  blobs_lr: 1
  blobs_lr: 2
  weight_decay: 1
  weight_decay: 0
  convolution_param {
    num_output: 96
    kernel_size: 11
    stride: 4
  }
}
layers {
  name: "relu1"
  type: RELU
  bottom: "conv1"
  top: "conv1"
}
layers {
  name: "pool1"
  type: POOLING
  bottom: "conv1"
  top: "pool1"
  pooling_param {
    pool: MAX
    kernel_size: 3
    stride: 2
  }
}
layers {
  name: "norm1"
  type: LRN
  bottom: "pool1"
  top: "norm1"
  lrn_param {
    local_size: 5
    alpha: 0.0001
    beta: 0.75
  }
}
layers {
  name: "conv2"
  type: CONVOLUTION
  bottom: "norm1"
  top: "conv2"
  blobs_lr: 1
  blobs_lr: 2
  weight_decay: 1
  weight_decay: 0
  convolution_param {
    num_output: 256
    pad: 2
    kernel_size: 5
    group: 2
  }
}
layers {
  name: "relu2"
  type: RELU
  bottom: "conv2"
  top: "conv2"
}
layers {
  name: "pool2"
  type: POOLING
  bottom: "conv2"
  top: "pool2"
  pooling_param {
    pool: MAX
    kernel_size: 3
    stride: 2
  }
}
layers {
  name: "norm2"
  type: LRN
  bottom: "pool2"
  top: "norm2"
  lrn_param {
    local_size: 5
    alpha: 0.0001
    beta: 0.75
  }
}
layers {
  name: "conv3"
  type: CONVOLUTION
  bottom: "norm2"
  top: "conv3"
  blobs_lr: 1
  blobs_lr: 2
  weight_decay: 1
  weight_decay: 0
  convolution_param {
    num_output: 384
    pad: 1
    kernel_size: 3
  }
}
layers {
  name: "relu3"
  type: RELU
  bottom: "conv3"
  top: "conv3"
}
layers {
  name: "conv4"
  type: CONVOLUTION
  bottom: "conv3"
  top: "conv4"
  blobs_lr: 1
  blobs_lr: 2
  weight_decay: 1
  weight_decay: 0
  convolution_param {
    num_output: 384
    pad: 1
    kernel_size: 3
    group: 2
  }
}
layers {
  name: "relu4"
  type: RELU
  bottom: "conv4"
  top: "conv4"
}
layers {
  name: "conv5"
  type: CONVOLUTION
  bottom: "conv4"
  top: "conv5"
  blobs_lr: 1
  blobs_lr: 2
  weight_decay: 1
  weight_decay: 0
  convolution_param {
    num_output: 256
    pad: 1
    kernel_size: 3
    group: 2
  }
}
layers {
  name: "relu5"
  type: RELU
  bottom: "conv5"
  top: "conv5"
}
layers {
  name: "pool5"
  type: POOLING
  bottom: "conv5"
  top: "pool5"
  pooling_param {
    pool: MAX
    kernel_size: 3
    stride: 2
  }
}
layers {
  name: "fc6"
  type: INNER_PRODUCT
  bottom: "pool5"
  top: "fc6"
  blobs_lr: 1
  blobs_lr: 2
  weight_decay: 1
  weight_decay: 0
  inner_product_param {
    num_output: 4096
  }
}
layers {
  name: "relu6"
  type: RELU
  bottom: "fc6"
  top: "fc6"
}
layers {
  name: "drop6"
  type: DROPOUT
  bottom: "fc6"
  top: "fc6"
  dropout_param {
    dropout_ratio: 0.5
  }
}
layers {
  name: "fc7"
  type: INNER_PRODUCT
  bottom: "fc6"
  top: "fc7"
  blobs_lr: 1
  blobs_lr: 2
  weight_decay: 1
  weight_decay: 0
  inner_product_param {
    num_output: 4096
  }
}
layers {
  name: "relu7"
  type: RELU
  bottom: "fc7"
  top: "fc7"
}
layers {
  name: "drop7"
  type: DROPOUT
  bottom: "fc7"
  top: "fc7"
  dropout_param {
    dropout_ratio: 0.5
  }
}
layers {
  name: "fc8"
  type: INNER_PRODUCT
  bottom: "fc7"
  top: "fc8"
  blobs_lr: 1
  blobs_lr: 2
  weight_decay: 1
  weight_decay: 0
  inner_product_param {
    num_output: 1000
  }
}

layers
{
  name: "prob"
  type: SOFTMAX
  bottom: "fc8"
  top: "prob"
}

layers
{
  name: "output"
  type: ARGMAX
  bottom: "prob"
  top: "output"
}

我的模型使用的是alexnet,例子是用来抽取一个图片在pool5那一层的特征。这样大家使用这个例子可以利用caffe的任意模型抽取任意图片的特征。

版权声明:本文为博主原创文章,未经博主允许不得转载。

时间: 2024-10-20 17:10:37

caffe使用MemoryDataLayer从内存中加载数据的相关文章

从内存中加载DLL DELPHI版

//从内存中加载DLL DELPHI版 unit MemLibrary; interface uses Windows; function memLoadLibrary(pLib: Pointer): DWord; function memGetProcAddress(dwLibHandle: DWord; pFunctionName: PChar): Pointer; stdcall; function memFreeLibrary(dwHandle: DWord): Boolean; imp

从内存中加载DLL Delphi版(转)

源:从内存中加载DLL DELPHI版 原文 : http://www.2ccc.com/article.asp?articleid=5784 MemLibrary.pas //从内存中加载DLL DELPHI版 unit MemLibrary; interface uses Windows; function memLoadLibrary(pLib: Pointer): DWord; function memGetProcAddress(dwLibHandle: DWord; pFunctio

Flexigrid从对象中加载数据

Flexigrid是用来动态加载数据的一种比较好(老)的Jquery表插件,然后有些时候,我们需要其从本地或者jQuery对象中加载数据,比如有这么个需求,页面显示中有两个表格A和B,其中A表格从服务器端加载数据,B表格需要根据用户的操作来动态更新,例如当用户勾选住A表格的某些项时,B表格就把这些项显示在其中.对于这么个请求,可以如下解决: 1)建立一个Jquery对象C,用于保存B表格的信息: 2)当用户选择A表格中的某些项时,C更新它所保存的内容: 3)当C的内容发生变化时,B表格更新其显示

七. 从文件中加载数据

从文件中加载数据有两种方法,一种是利用内置的csv模块,一种是利用第三方模块numpy import matplotlib.pyplot as plt import csv import numpy as np # csv 方法 x = [] y = [] with open('example.txt','r') as csvfile: plots = csv.reader(csvfile, delimiter=',') for row in plots: x.append(int(row[0]

ArcGIS Engine中加载数据

ArcGIS Engine中加载数据 http://blog.csdn.net/gisstar/article/details/4206822 分类: AE开发积累2009-05-21 16:491118人阅读评论(0)收藏举报 database数据库serveruser工作class 1.加载Shapefile数据 1 IWorkspaceFactory pWorkspaceFactory;2 IFeatureWorkspace pFeatureWorkspace;3 IFeatureLaye

如何在uboot上实现从网络下载版本镜像并直接在内存中加载之?

这是作者近期项目上遇到的一个需求,描述如下: 一块MT7620N的路由器单板,Flash中已存放一个版本并可以通过uboot正常加载并启动.现在需要:在uboot上电启动过程中,通过外部按键触发干涉,使得uboot可以从网络上下载一个临时版本,并直接在内存中启动之.即保证Flash中的原有版本不被更改 解决思路如下: 1. 利用单板上的Factory Reset按键,实现外部按键触发. 在uboot的board_init_r函数中,在调用main_loop死循环之前,检测Factory Rese

从内存中加载DLL

开发工程 https://github.com/fancycode/MemoryModule 原理介绍 http://blog.csdn.net/xrain_zh/article/details/44728321 使用 Api 从内存加载DLL http://bbs.pediy.com/showthread.php?t=141006

使用IEX在内存中加载此脚本,执行以下命令,脚本将进行所有的检查。

原文地址:https://www.cnblogs.com/SWQ12/p/11723724.html

DHTMLX 前端框架 建立你的一个应用程序 教程(六)-- 表格加载数据

从数据库加载数据 这篇我们介绍从MySQL数据库中加载数据到表格 我们使用 MySql的数据库dhtmlx_tutorial 和表contacts 示例使用的是PHP平台和dhtmlxConnector 帮助库  因为这是实现服务器端逻辑最简单的方法   数据以XML格式输出. 环境自己搭建  相信C#的朋友 从数据库获取数据转化盛XML也不是难事. 加载数据到表格: 1.找到”db.sql“文件将表导入到本地数据库 2.在codebase 文件中添加一个php文件codebase 3.下载dh