Skip to content

Commit

Permalink
向外提取conv\pooling等循环中的变量
Browse files Browse the repository at this point in the history
  • Loading branch information
zjhellofss committed Jul 21, 2023
1 parent e93b6bd commit 3c69741
Show file tree
Hide file tree
Showing 4 changed files with 33 additions and 28 deletions.
2 changes: 1 addition & 1 deletion source/data/tensor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ void Tensor<float>::Fill(const std::vector<float>& values, bool row_major) {
const uint32_t channels = this->data_.n_slices;

for (uint32_t i = 0; i < channels; ++i) {
auto& channel_data = this->data_.slice(i);
arma::fmat& channel_data = this->data_.slice(i);
arma::fmat channel_data_t((float*)values.data() + i * planes,
this->cols(), this->rows(), false, true);
channel_data = channel_data_t.t();
Expand Down
7 changes: 4 additions & 3 deletions source/layer/details/adaptive_avgpooling.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -116,18 +116,19 @@ InferStatus AdaptiveAveragePoolingLayer::Forward(
const arma::fmat& input_channel = input_data->slice(ic);
arma::fmat& output_channel = output_data->slice(ic);
for (uint32_t c = 0; c < input_w - pooling_w + 1; c += stride_w) {
int output_col = int(c / stride_w);
for (uint32_t r = 0; r < input_h - pooling_h + 1; r += stride_h) {
int output_row = int(r / stride_h);
float mean_value = 0.f;
float* output_channel_ptr = output_channel.colptr(int(c / stride_w));
float* output_channel_ptr = output_channel.colptr(output_col);
for (uint32_t w = 0; w < pooling_w; ++w) {
const float* col_ptr = input_channel.colptr(c + w) + r;
for (uint32_t h = 0; h < pooling_h; ++h) {
float current_value = *(col_ptr + h);
mean_value = mean_value + current_value;
}
}
*(output_channel_ptr + int(r / stride_h)) =
mean_value / float(pooling_size);
*(output_channel_ptr + output_row) = mean_value / float(pooling_size);
}
}
}
Expand Down
38 changes: 20 additions & 18 deletions source/layer/details/convolution.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -195,26 +195,26 @@ arma::fmat ConvolutionLayer::Im2Col(sftensor input, uint32_t kernel_w,
for (uint32_t ic = 0; ic < input_c_group; ++ic) {
float* input_channel_ptr =
input->matrix_raw_ptr(ic + group * input_c_group);
uint32_t input_channel_height = input_h;
int current_col = 0;
uint32_t current_col = 0;
uint32_t channel_row = ic * row_len;
for (uint32_t w = 0; w < input_padded_w - kernel_w + 1; w += stride_w_) {
for (uint32_t r = 0; r < input_padded_h - kernel_h + 1; r += stride_h_) {
float* input_matrix_c_ptr =
input_matrix.colptr(current_col) + ic * row_len;
float* input_matrix_ptr =
input_matrix.colptr(current_col) + channel_row;
current_col += 1;
for (uint32_t kw = 0; kw < kernel_w; ++kw) {
const uint32_t region_w = input_h * (w + kw - padding_w_);
for (uint32_t kh = 0; kh < kernel_h; ++kh) {
if ((kh + r >= padding_h_ && kw + w >= padding_w_) &&
(kh + r < input_h + padding_h_ &&
kw + w < input_w + padding_w_)) {
float* region_ptr = input_channel_ptr +
input_channel_height * (w + kw - padding_w_) +
r + kh - padding_h_;
*input_matrix_c_ptr = *region_ptr;
float* region_ptr =
input_channel_ptr + region_w + (r + kh - padding_h_);
*input_matrix_ptr = *region_ptr;
} else {
*input_matrix_c_ptr = padding_value; // only support zero mode
*input_matrix_ptr = padding_value; // only support zero mode
}
input_matrix_c_ptr += 1;
input_matrix_ptr += 1;
}
}
}
Expand Down Expand Up @@ -339,7 +339,7 @@ ParseParameterAttrStatus ConvolutionLayer::GetInstance(
return ParseParameterAttrStatus::kParameterMissingOutChannel;
}

auto out_channel =
auto out_channel =
std::dynamic_pointer_cast<RuntimeParameterInt>(params.at("out_channels"));
if (!out_channel) {
LOG(ERROR) << "Can not find the out channel parameter";
Expand All @@ -362,7 +362,8 @@ ParseParameterAttrStatus ConvolutionLayer::GetInstance(
LOG(ERROR) << "Can not find the bias parameter";
return ParseParameterAttrStatus::kParameterMissingUseBias;
}
auto use_bias = std::dynamic_pointer_cast<RuntimeParameterBool>(params.at("bias"));
auto use_bias =
std::dynamic_pointer_cast<RuntimeParameterBool>(params.at("bias"));
if (!use_bias) {
LOG(ERROR) << "Can not find the bias parameter";
return ParseParameterAttrStatus::kParameterMissingUseBias;
Expand All @@ -372,7 +373,7 @@ ParseParameterAttrStatus ConvolutionLayer::GetInstance(
LOG(ERROR) << "Can not find the stride parameter";
return ParseParameterAttrStatus::kParameterMissingStride;
}
auto stride =
auto stride =
std::dynamic_pointer_cast<RuntimeParameterIntArray>(params.at("stride"));
if (!stride) {
LOG(ERROR) << "Can not find the stride parameter";
Expand All @@ -383,16 +384,16 @@ ParseParameterAttrStatus ConvolutionLayer::GetInstance(
LOG(ERROR) << "Can not find the kernel parameter";
return ParseParameterAttrStatus::kParameterMissingKernel;
}
auto kernel =
std::dynamic_pointer_cast<RuntimeParameterIntArray>(params.at("kernel_size"));
auto kernel = std::dynamic_pointer_cast<RuntimeParameterIntArray>(
params.at("kernel_size"));
if (!kernel) {
LOG(ERROR) << "Can not find the kernel parameter";
return ParseParameterAttrStatus::kParameterMissingKernel;
}

if (params.find("padding_mode") != params.end()) {
auto padding_mode =
std::dynamic_pointer_cast<RuntimeParameterString>(params.at("padding_mode"));
auto padding_mode = std::dynamic_pointer_cast<RuntimeParameterString>(
params.at("padding_mode"));
if (padding_mode == nullptr) {
LOG(ERROR) << "Can not find the padding parameter";
return ParseParameterAttrStatus::kParameterMissingPaddingMode;
Expand All @@ -408,7 +409,8 @@ ParseParameterAttrStatus ConvolutionLayer::GetInstance(
return ParseParameterAttrStatus::kParameterMissingPaddingMode;
}

auto groups = std::dynamic_pointer_cast<RuntimeParameterInt>(params.at("groups"));
auto groups =
std::dynamic_pointer_cast<RuntimeParameterInt>(params.at("groups"));
if (!groups) {
LOG(ERROR) << "Can not find the groups parameter";
return ParseParameterAttrStatus::kParameterMissingGroups;
Expand Down
14 changes: 8 additions & 6 deletions source/layer/details/maxpooling.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -133,25 +133,27 @@ InferStatus MaxPoolingLayer::Forward(
const arma::fmat& input_channel = input_data->slice(ic);
arma::fmat& output_channel = output_data->slice(ic);
for (uint32_t c = 0; c < input_padded_w - pooling_w + 1; c += stride_w_) {
int output_col = int(c / stride_w_);
for (uint32_t r = 0; r < input_padded_h - pooling_h + 1;
r += stride_h_) {
float* output_channel_ptr = output_channel.colptr(int(c / stride_w_));
int output_row = int(r / stride_h_);
float* output_channel_ptr = output_channel.colptr(output_col);
float max_value = std::numeric_limits<float>::lowest();
for (uint32_t w = 0; w < pooling_w; ++w) {
const float* col_ptr = input_channel.colptr(c + w - padding_w_);
for (uint32_t h = 0; h < pooling_h; ++h) {
float current_value = 0.f;
if ((h + r >= padding_h_ && w + c >= padding_w_) &&
(h + r < input_h + padding_h_ &&
w + c < input_w + padding_w_)) {
const float* col_ptr = input_channel.colptr(c + w - padding_w_);
current_value = *(col_ptr + r + h - padding_h_);
} else {
current_value = std::numeric_limits<float>::lowest();
}
max_value = max_value > current_value ? max_value : current_value;
}
}
*(output_channel_ptr + int(r / stride_h_)) = max_value;
*(output_channel_ptr + output_row) = max_value;
}
}
}
Expand Down Expand Up @@ -182,7 +184,7 @@ ParseParameterAttrStatus MaxPoolingLayer::GetInstance(
return ParseParameterAttrStatus::kParameterMissingPadding;
}

auto padding =
auto padding =
std::dynamic_pointer_cast<RuntimeParameterIntArray>(params.at("padding"));
if (!padding) {
LOG(ERROR) << "Can not find the padding parameter";
Expand All @@ -194,8 +196,8 @@ ParseParameterAttrStatus MaxPoolingLayer::GetInstance(
return ParseParameterAttrStatus::kParameterMissingKernel;
}

auto kernel_size =
std::dynamic_pointer_cast<RuntimeParameterIntArray>(params.at("kernel_size"));
auto kernel_size = std::dynamic_pointer_cast<RuntimeParameterIntArray>(
params.at("kernel_size"));
if (!kernel_size) {
LOG(ERROR) << "Can not find the kernel size parameter";
return ParseParameterAttrStatus::kParameterMissingKernel;
Expand Down

0 comments on commit 3c69741

Please sign in to comment.