Skip to content

Commit b4ca9a7

Browse files
JunliJunli
Junli
authored and
Junli
committed
update some uncomment
1 parent fb3c2db commit b4ca9a7

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

src/caffe/layers/conv_layer.cpp

+6-6
Original file line numberDiff line numberDiff line change
@@ -74,19 +74,19 @@ void ConvolutionLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
7474
template <typename Dtype>
7575
void ConvolutionLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
7676
const vector<Blob<Dtype>*>& top) {
77-
//if (!this->is_1x1_ && use_packing_scheme && global_packing_N > 1)
78-
//Forward_gpu_batched(bottom, top);
79-
//else
77+
if (!this->is_1x1_ && use_packing_scheme && global_packing_N > 1)
78+
Forward_gpu_batched(bottom, top);
79+
else
8080
Forward_gpu_org(bottom, top);
8181
}
8282

8383
template <typename Dtype>
8484
void ConvolutionLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
8585
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
86-
//if (!this->is_1x1_ && use_packing_scheme && global_packing_N > 1)
86+
if (!this->is_1x1_ && use_packing_scheme && global_packing_N > 1)
8787
Backward_gpu_batched(top, propagate_down, bottom);
88-
//else
89-
//Backward_gpu_org(top, propagate_down, bottom);
88+
else
89+
Backward_gpu_org(top, propagate_down, bottom);
9090
}
9191

9292
template <typename Dtype>

0 commit comments

Comments
 (0)