Caffe
contrastive_loss_layer.hpp
1 #ifndef CAFFE_CONTRASTIVE_LOSS_LAYER_HPP_
2 #define CAFFE_CONTRASTIVE_LOSS_LAYER_HPP_
3 
4 #include <vector>
5 
6 #include "caffe/blob.hpp"
7 #include "caffe/layer.hpp"
8 #include "caffe/proto/caffe.pb.h"
9 
10 #include "caffe/layers/loss_layer.hpp"
11 
12 namespace caffe {
13 
38 template <typename Dtype>
39 class ContrastiveLossLayer : public LossLayer<Dtype> {
40  public:
41  explicit ContrastiveLossLayer(const LayerParameter& param)
42  : LossLayer<Dtype>(param), diff_() {}
43  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
44  const vector<Blob<Dtype>*>& top);
45 
46  virtual inline int ExactNumBottomBlobs() const { return 3; }
47  virtual inline const char* type() const { return "ContrastiveLoss"; }
52  virtual inline bool AllowForceBackward(const int bottom_index) const {
53  return bottom_index != 2;
54  }
55 
56  protected:
58  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
59  const vector<Blob<Dtype>*>& top);
60  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
61  const vector<Blob<Dtype>*>& top);
62 
88  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
89  const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
90  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
91  const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
92 
93  Blob<Dtype> diff_; // cached for backward pass
94  Blob<Dtype> dist_sq_; // cached for backward pass
95  Blob<Dtype> diff_sq_; // tmp storage for gpu forward pass
96  Blob<Dtype> summer_vec_; // tmp storage for gpu forward pass
97 };
98 
99 } // namespace caffe
100 
101 #endif // CAFFE_CONTRASTIVE_LOSS_LAYER_HPP_
A layer factory that allows one to register layers. During runtime, registered layers can be called b...
Definition: blob.hpp:14
virtual const char * type() const
Returns the layer type.
Definition: contrastive_loss_layer.hpp:47
virtual int ExactNumBottomBlobs() const
Returns the exact number of bottom blobs required by the layer, or -1 if no exact number is required...
Definition: contrastive_loss_layer.hpp:46
virtual void Forward_gpu(const vector< Blob< Dtype > *> &bottom, const vector< Blob< Dtype > *> &top)
Using the GPU device, compute the layer output. Fall back to Forward_cpu() if unavailable.
virtual bool AllowForceBackward(const int bottom_index) const
Definition: contrastive_loss_layer.hpp:52
virtual void Forward_cpu(const vector< Blob< Dtype > *> &bottom, const vector< Blob< Dtype > *> &top)
Computes the contrastive loss where . This can be used to train siamese networks.
Definition: contrastive_loss_layer.cpp:31
virtual void Backward_gpu(const vector< Blob< Dtype > *> &top, const vector< bool > &propagate_down, const vector< Blob< Dtype > *> &bottom)
Using the GPU device, compute the gradients for any parameters and for the bottom blobs if propagate_...
An interface for Layers that take two Blobs as input – usually (1) predictions and (2) ground-truth ...
Definition: loss_layer.hpp:23
Computes the contrastive loss where . This can be used to train siamese networks.
Definition: contrastive_loss_layer.hpp:39
virtual void LayerSetUp(const vector< Blob< Dtype > *> &bottom, const vector< Blob< Dtype > *> &top)
Does layer-specific setup: your layer should implement this function as well as Reshape.
Definition: contrastive_loss_layer.cpp:10
virtual void Backward_cpu(const vector< Blob< Dtype > *> &top, const vector< bool > &propagate_down, const vector< Blob< Dtype > *> &bottom)
Computes the Contrastive error gradient w.r.t. the inputs.
Definition: contrastive_loss_layer.cpp:65
A wrapper around SyncedMemory holders serving as the basic computational unit through which Layers...
Definition: blob.hpp:24