Skip to content
This repository has been archived by the owner on Jan 3, 2023. It is now read-only.

Commit

Permalink
Migrate #3736 from master. (#3745)
Browse files Browse the repository at this point in the history
* Use Eigen kernel for REFLECT mode Pad.

* Do not call is_optimized_et.
  • Loading branch information
ayzhuang authored and diyessi committed Oct 10, 2019
1 parent a68a3fa commit 22e9847
Show file tree
Hide file tree
Showing 5 changed files with 137 additions and 35 deletions.
32 changes: 19 additions & 13 deletions src/ngraph/runtime/cpu/builder/pad.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,8 @@ namespace ngraph
auto padding_above = pad->get_padding_above();
auto pad_mode = pad->get_pad_mode();

if (pad_mode == ngraph::op::PadMode::CONSTANT)
if (pad_mode == ngraph::op::PadMode::CONSTANT ||
pad_mode == ngraph::op::PadMode::REFLECT)
{
std::function<decltype(runtime::cpu::kernel::pad_and_slice<float, 1>)> kernel;

Expand All @@ -65,6 +66,7 @@ namespace ngraph
out_shape,
padding_below,
padding_above,
pad_mode,
arg_buffer_index,
padding_value_index,
out_buffer_index](CPURuntimeContext* ctx,
Expand All @@ -76,6 +78,7 @@ namespace ngraph
out_shape,
CoordinateDiff(padding_below.begin(), padding_below.end()),
CoordinateDiff(padding_above.begin(), padding_above.end()),
pad_mode,
ectx->arena);
};
functors.emplace_back(functor);
Expand Down Expand Up @@ -123,7 +126,8 @@ namespace ngraph
auto padding_above = pad->get_padding_above();
auto pad_mode = pad->get_pad_mode();

if (pad_mode == ngraph::op::PadMode::CONSTANT)
if (pad_mode == ngraph::op::PadMode::CONSTANT ||
pad_mode == ngraph::op::PadMode::REFLECT)
{
std::function<decltype(runtime::cpu::kernel::pad_and_slice<float, 1>)> kernel;

Expand All @@ -132,17 +136,19 @@ namespace ngraph
arg_shape.size(),
runtime::cpu::kernel::pad_and_slice);

auto functor = [kernel, arg_shape, out_shape, padding_below, padding_above](
const std::vector<void*>& inputs, std::vector<void*>& outputs) {
kernel(inputs[0],
outputs[0],
inputs[1],
arg_shape,
out_shape,
CoordinateDiff(padding_below.begin(), padding_below.end()),
CoordinateDiff(padding_above.begin(), padding_above.end()),
0);
};
auto functor =
[kernel, arg_shape, out_shape, padding_below, padding_above, pad_mode](
const std::vector<void*>& inputs, std::vector<void*>& outputs) {
kernel(inputs[0],
outputs[0],
inputs[1],
arg_shape,
out_shape,
CoordinateDiff(padding_below.begin(), padding_below.end()),
CoordinateDiff(padding_above.begin(), padding_above.end()),
pad_mode,
0);
};
return functor;
}
else
Expand Down
40 changes: 22 additions & 18 deletions src/ngraph/runtime/cpu/cpu_emitter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3107,8 +3107,26 @@ namespace ngraph
auto arg0_shape = args[0].get_shape();
auto result_shape = out[0].get_shape();

std::string pad_mode_string;
switch (pad->get_pad_mode())
{
case ngraph::op::PadMode::CONSTANT:
pad_mode_string = "ngraph::op::PadMode::CONSTANT";
break;
case ngraph::op::PadMode::EDGE:
pad_mode_string = "ngraph::op::PadMode::EDGE";
break;
case ngraph::op::PadMode::REFLECT:
pad_mode_string = "ngraph::op::PadMode::REFLECT";
break;
case ngraph::op::PadMode::SYMMETRIC:
pad_mode_string = "ngraph::op::PadMode::SYMMETRIC";
break;
}

if (arg0_shape.size() == 4 && args[0].get_element_type() == element::f32 &&
pad->get_pad_mode() == ngraph::op::PadMode::CONSTANT)
(pad->get_pad_mode() == ngraph::op::PadMode::CONSTANT ||
pad->get_pad_mode() == ngraph::op::PadMode::REFLECT))
{
writer << "cpu::kernel::pad_4d_float32(" << args[0].get_name() << ",\n"
<< " " << out[0].get_name() << ",\n"
Expand All @@ -3118,26 +3136,12 @@ namespace ngraph
<< " {" << join(pad->get_padding_below())
<< "},\n"
<< " {" << join(pad->get_padding_above())
<< "}, 0);\n";
<< "}, \n"
<< " " << pad_mode_string << ",\n"
<< " 0);\n";
}
else
{
std::string pad_mode_string;
switch (pad->get_pad_mode())
{
case ngraph::op::PadMode::CONSTANT:
pad_mode_string = "ngraph::op::PadMode::CONSTANT";
break;
case ngraph::op::PadMode::EDGE:
pad_mode_string = "ngraph::op::PadMode::EDGE";
break;
case ngraph::op::PadMode::REFLECT:
pad_mode_string = "ngraph::op::PadMode::REFLECT";
break;
case ngraph::op::PadMode::SYMMETRIC:
pad_mode_string = "ngraph::op::PadMode::SYMMETRIC";
break;
}
writer << "reference::pad<" << out[0].get_type() << ">(" << args[0].get_name()
<< ",\n";
writer << " " << args[1].get_name() << ",\n";
Expand Down
3 changes: 3 additions & 0 deletions src/ngraph/runtime/cpu/cpu_kernels.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@
#include <random>
#include <vector>

#include "ngraph/op/pad.hpp"

// CBLAS types and wrappers

namespace cblas
Expand Down Expand Up @@ -146,6 +148,7 @@ namespace ngraph
const Shape& output_shape,
const CoordinateDiff& padding_below,
const CoordinateDiff& padding_above,
const ngraph::op::PadMode pad_mode,
int arena);

void reduce_sum_all_1d_float32(float* input,
Expand Down
2 changes: 2 additions & 0 deletions src/ngraph/runtime/cpu/kernel/pad.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ namespace ngraph
const Shape& output_shape,
const CoordinateDiff& padding_below,
const CoordinateDiff& padding_above,
const ngraph::op::PadMode pad_mode,
int arena)
{
pad_and_slice<float, 4>(input,
Expand All @@ -40,6 +41,7 @@ namespace ngraph
output_shape,
padding_below,
padding_above,
pad_mode,
arena);
}
}
Expand Down
95 changes: 91 additions & 4 deletions src/ngraph/runtime/cpu/kernel/pad.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -67,15 +67,19 @@ namespace ngraph
const Shape& output_shape,
const CoordinateDiff& padding_below,
const CoordinateDiff& padding_above,
const ngraph::op::PadMode pad_mode,
int arena)
{
Eigen::array<Eigen::Index, Rank> out_dims, in_dims;
Eigen::array<Eigen::Index, Rank> out_dims, in_dims, temp_dims;
Eigen::array<Eigen::IndexPair<size_t>, Rank> padding;
Eigen::array<Eigen::Index, Rank> indices;

bool has_negative_below_padding = false;

for (int i = 0; i < Rank; i++)
{
out_dims[i] = output_shape[i];
temp_dims[i] = output_shape[i];
in_dims[i] = input_shape[i];

padding[i] = {
Expand All @@ -88,6 +92,8 @@ namespace ngraph
{
NGRAPH_CHECK(padding_below[i] > INT_MIN);
indices[i] = -padding_below[i];
temp_dims[i] -= padding_below[i];
has_negative_below_padding = true;
}
else
{
Expand All @@ -97,12 +103,93 @@ namespace ngraph

Eigen::TensorMap<Eigen::Tensor<ElementType, Rank, Eigen::RowMajor>> out(
static_cast<ElementType*>(output), out_dims);
Eigen::TensorMap<Eigen::Tensor<ElementType, Rank, Eigen::RowMajor>> temp(
static_cast<ElementType*>(output), temp_dims);
Eigen::TensorMap<Eigen::Tensor<ElementType, Rank, Eigen::RowMajor>> in(
static_cast<ElementType*>(input), in_dims);

out.device(ngraph::runtime::cpu::executor::GetCPUExecutor().get_device(arena)) =
in.pad(padding, *static_cast<ElementType*>(pad_value))
.slice(indices, out_dims);
if (pad_mode == ngraph::op::PadMode::CONSTANT)
{
out.device(ngraph::runtime::cpu::executor::GetCPUExecutor().get_device(
arena)) = in.pad(padding, *static_cast<ElementType*>(pad_value))
.slice(indices, out_dims);
}
else
{
// clang-format off
// PadMode::REFLECT
// We should have dim >= 2 for each dim.
// Example:
//
// Input shape: [4]
// Padding: 6 below, 13 above
// Output shape: [23]
//
// Input: 1 2 3 4
// Expected output: 1 2 3 4 3 2 1 2 3 4 3 2 1 2 3 4 3 2 1 2 3 4 3
// Pattern: ... | original n elements | middle (n - 2) elements of original n in reverse order |
// original n elements | middle (n - 2) elements of original n in reverse order | ...
// | 1 2 3 4 | 3 2 | 1 2 3 4 | 3 2 | 1 2 3 4 | 3 2 | 1 2 3 4 | 3
// clang-format on
auto generator =
[&](const Eigen::array<Eigen::DenseIndex, Rank>& out_index) {
Eigen::array<Eigen::DenseIndex, Rank> in_index;
for (size_t i = 0; i < Rank; i++)
{
auto origin_length = in_dims[i];
auto p_below = padding_below[i] >= 0 ? padding_below[i] : 0;
if (out_index[i] < p_below)
{
// padding below
auto reverse = p_below - out_index[i];
auto res = reverse % (origin_length * 2 - 2);
if (res <= origin_length - 2)
{
// copy one of the middle n-2 items
in_index[i] = res;
}
else
{
// copy one of the n items
in_index[i] = origin_length * 2 - 2 - res;
}
}
else if (out_index[i] < in_dims[i] + p_below)
{
// original
in_index[i] = out_index[i] - p_below;
}
else
{
// padding above
auto pos = out_index[i] - in_dims[i] - p_below;
auto res = pos % (origin_length * 2 - 2);
if (res < origin_length - 2)
{
// copy one of the middle n-2 items
in_index[i] = origin_length - 2 - res;
}
else
{
// copy one of the n items
in_index[i] = res - (origin_length - 2);
}
}
}
return in(in_index);
};

if (has_negative_below_padding)
{
out.device(ngraph::runtime::cpu::executor::GetCPUExecutor().get_device(
arena)) = temp.generate(generator).slice(indices, out_dims);
}
else
{
out.device(ngraph::runtime::cpu::executor::GetCPUExecutor().get_device(
arena)) = out.generate(generator);
}
}
}

template <typename ElementType>
Expand Down

0 comments on commit 22e9847

Please sign in to comment.