Skip to content

Commit

Permalink
implementato addMatVectorMultiLabel
Browse files Browse the repository at this point in the history
- cambiati i test di addMatVector per provare addMatVectorMultiLabel.
Vanno riportati come erano prima
  • Loading branch information
mtamburrano committed Nov 6, 2015
1 parent 0ec116e commit df83be6
Show file tree
Hide file tree
Showing 3 changed files with 56 additions and 9 deletions.
4 changes: 3 additions & 1 deletion include/caffe/data_layers.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,8 @@ class MemoryDataLayer : public BaseDataLayer<Dtype> {
#ifdef USE_OPENCV
virtual void AddMatVector(const vector<cv::Mat>& mat_vector,
const vector<int>& labels);
virtual void AddMatVectorMultilabel(const vector<cv::Mat>& mat_vector,
const vector< vector < int > >& labels);
#endif // USE_OPENCV

// Reset should accept const pointers, but can't, because the memory
Expand All @@ -296,7 +298,7 @@ class MemoryDataLayer : public BaseDataLayer<Dtype> {
virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);

int batch_size_, channels_, height_, width_, size_;
int batch_size_, channels_, height_, width_, data_size_, label_size_;
Dtype* data_;
Dtype* labels_;
int n_;
Expand Down
46 changes: 41 additions & 5 deletions src/caffe/layers/memory_data_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,9 @@ void MemoryDataLayer<Dtype>::DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
channels_ = this->layer_param_.memory_data_param().channels();
height_ = this->layer_param_.memory_data_param().height();
width_ = this->layer_param_.memory_data_param().width();
size_ = channels_ * height_ * width_;
CHECK_GT(batch_size_ * size_, 0) <<
data_size_ = channels_ * height_ * width_;
label_size_ = 1;
CHECK_GT(batch_size_ * data_size_, 0) <<
"batch_size, channels, height, and width must be specified and"
" positive in memory_data_param";
vector<int> label_shape(1, batch_size_);
Expand Down Expand Up @@ -77,6 +78,41 @@ void MemoryDataLayer<Dtype>::AddMatVector(const vector<cv::Mat>& mat_vector,
Reset(top_data, top_label, num);
has_new_data_ = true;
}

template <typename Dtype>
void MemoryDataLayer<Dtype>::AddMatVectorMultilabel(const vector<cv::Mat>& mat_vector,
const vector< vector < int > >& labels) {
size_t num = mat_vector.size();
CHECK(!has_new_data_) <<
"Can't add mat until current data has been consumed.";
CHECK_GT(num, 0) << "There is no mat to add";
CHECK_EQ(num % batch_size_, 0) <<
"The added data must be a multiple of the batch size.";
CHECK_EQ(num, labels.size()) <<
"Added mat and labels must have the same size";
label_size_ = labels[0].size();
for(int i = 1; i < num; ++i) {
CHECK_EQ(labels[i].size(), label_size_) <<
"All labels must have the same dimension";
}
added_data_.Reshape(num, channels_, height_, width_);
added_label_.Reshape(num, label_size_, 1, 1);
// Apply data transformations (mirror, scale, crop...)
this->data_transformer_->Transform(mat_vector, &added_data_);
// Copy Labels
Dtype* top_label = added_label_.mutable_cpu_data();
int index_label;
for (int item_id = 0; item_id < num; ++item_id) {
for (int label_id = 0; label_id < label_size_; ++label_id) {
index_label = (item_id * label_size_) + label_id;
top_label[index_label] = labels[item_id][label_id];
}
}
// num_images == batch_size_
Dtype* top_data = added_data_.mutable_cpu_data();
Reset(top_data, top_label, num);
has_new_data_ = true;
}
#endif // USE_OPENCV

template <typename Dtype>
Expand All @@ -101,16 +137,16 @@ void MemoryDataLayer<Dtype>::set_batch_size(int new_size) {
"Can't change batch_size until current data has been consumed.";
batch_size_ = new_size;
added_data_.Reshape(batch_size_, channels_, height_, width_);
added_label_.Reshape(batch_size_, 1, 1, 1);
added_label_.Reshape(batch_size_, label_size_, 1, 1);
}

template <typename Dtype>
void MemoryDataLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {
CHECK(data_) << "MemoryDataLayer needs to be initalized by calling Reset";
top[0]->Reshape(batch_size_, channels_, height_, width_);
top[1]->Reshape(batch_size_, 1, 1, 1);
top[0]->set_cpu_data(data_ + pos_ * size_);
top[1]->Reshape(batch_size_, label_size_, 1, 1);
top[0]->set_cpu_data(data_ + pos_ * data_size_);
top[1]->set_cpu_data(labels_ + pos_);
pos_ = (pos_ + batch_size_) % n_;
if (pos_ == 0)
Expand Down
15 changes: 12 additions & 3 deletions src/caffe/test/test_memory_data_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,10 @@ TYPED_TEST(MemoryDataLayerTest, AddMatVectorDefaultTransform) {
label_vector[i] = i;
cv::randu(mat_vector[i], cv::Scalar::all(0), cv::Scalar::all(255));
}
layer.AddMatVector(mat_vector, label_vector);
vector < vector < int > > multilabel_vector;
for(int i = 0; i< mat_vector.size(); ++i)
multilabel_vector.push_back(label_vector);
layer.AddMatVectorMultilabel(mat_vector, multilabel_vector);

int data_index;
const size_t count = this->channels_ * this->height_ * this->width_;
Expand Down Expand Up @@ -235,7 +238,10 @@ TYPED_TEST(MemoryDataLayerTest, TestSetBatchSize) {
label_vector[i] = i;
cv::randu(mat_vector[i], cv::Scalar::all(0), cv::Scalar::all(255));
}
layer.AddMatVector(mat_vector, label_vector);
vector < vector < int > > multilabel_vector;
for(int i = 0; i< mat_vector.size(); ++i)
multilabel_vector.push_back(label_vector);
layer.AddMatVectorMultilabel(mat_vector, multilabel_vector);
// then consume the data
int data_index;
const size_t count = this->channels_ * this->height_ * this->width_;
Expand Down Expand Up @@ -270,7 +276,10 @@ TYPED_TEST(MemoryDataLayerTest, TestSetBatchSize) {
label_vector[i] = i;
cv::randu(mat_vector[i], cv::Scalar::all(0), cv::Scalar::all(255));
}
layer.AddMatVector(mat_vector, label_vector);
multilabel_vector.clear();
for(int i = 0; i< mat_vector.size(); ++i)
multilabel_vector.push_back(label_vector);
layer.AddMatVectorMultilabel(mat_vector, multilabel_vector);

// finally consume new data and check if everything is fine
for (int iter = 0; iter < num_iter; ++iter) {
Expand Down

0 comments on commit df83be6

Please sign in to comment.