Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 32 additions & 32 deletions include/tvm/relax/attrs/nn.h
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@ namespace relax {

/*! \brief Attributes used in Conv1d operator */
struct Conv1DAttrs : public AttrsNodeReflAdapter<Conv1DAttrs> {
ffi::Array<IntImm> strides;
ffi::Array<IntImm> padding;
ffi::Array<IntImm> dilation;
ffi::Array<int64_t> strides;
ffi::Array<int64_t> padding;
ffi::Array<int64_t> dilation;
int groups;
ffi::String data_layout;
ffi::String kernel_layout;
Expand Down Expand Up @@ -75,9 +75,9 @@ struct Conv1DAttrs : public AttrsNodeReflAdapter<Conv1DAttrs> {

/*! \brief Attributes used in Conv2d operator */
struct Conv2DAttrs : public AttrsNodeReflAdapter<Conv2DAttrs> {
ffi::Array<IntImm> strides;
ffi::Array<IntImm> padding;
ffi::Array<IntImm> dilation;
ffi::Array<int64_t> strides;
ffi::Array<int64_t> padding;
ffi::Array<int64_t> dilation;
int groups;
ffi::String data_layout;
ffi::String kernel_layout;
Expand Down Expand Up @@ -121,9 +121,9 @@ struct Conv2DAttrs : public AttrsNodeReflAdapter<Conv2DAttrs> {

/*! \brief Attributes used in Conv3d operator */
struct Conv3DAttrs : public AttrsNodeReflAdapter<Conv3DAttrs> {
ffi::Array<IntImm> strides;
ffi::Array<IntImm> padding;
ffi::Array<IntImm> dilation;
ffi::Array<int64_t> strides;
ffi::Array<int64_t> padding;
ffi::Array<int64_t> dilation;
int groups;
ffi::String data_layout;
ffi::String kernel_layout;
Expand Down Expand Up @@ -169,10 +169,10 @@ struct Conv3DAttrs : public AttrsNodeReflAdapter<Conv3DAttrs> {

/*! \brief Attributes used in Conv1DTranspose operator */
struct Conv1DTransposeAttrs : public AttrsNodeReflAdapter<Conv1DTransposeAttrs> {
ffi::Array<IntImm> strides;
ffi::Array<IntImm> padding;
ffi::Array<IntImm> output_padding;
ffi::Array<IntImm> dilation;
ffi::Array<int64_t> strides;
ffi::Array<int64_t> padding;
ffi::Array<int64_t> output_padding;
ffi::Array<int64_t> dilation;
int groups;
ffi::String data_layout;
ffi::String kernel_layout;
Expand Down Expand Up @@ -218,10 +218,10 @@ struct Conv1DTransposeAttrs : public AttrsNodeReflAdapter<Conv1DTransposeAttrs>

/*! \brief Attributes used in Conv2d operator */
struct Conv2DTransposeAttrs : public AttrsNodeReflAdapter<Conv2DTransposeAttrs> {
ffi::Array<IntImm> strides;
ffi::Array<IntImm> padding;
ffi::Array<IntImm> output_padding;
ffi::Array<IntImm> dilation;
ffi::Array<int64_t> strides;
ffi::Array<int64_t> padding;
ffi::Array<int64_t> output_padding;
ffi::Array<int64_t> dilation;
int groups;
ffi::String data_layout;
ffi::String kernel_layout;
Expand Down Expand Up @@ -269,10 +269,10 @@ struct Conv2DTransposeAttrs : public AttrsNodeReflAdapter<Conv2DTransposeAttrs>

/*! \brief Attributes used in max_pool1d and avg_pool1d operator */
struct Pool1DAttrs : public AttrsNodeReflAdapter<Pool1DAttrs> {
ffi::Array<IntImm> pool_size;
ffi::Array<IntImm> strides;
ffi::Array<IntImm> padding;
ffi::Array<IntImm> dilation;
ffi::Array<int64_t> pool_size;
ffi::Array<int64_t> strides;
ffi::Array<int64_t> padding;
ffi::Array<int64_t> dilation;
bool ceil_mode;
bool count_include_pad;
ffi::String layout;
Expand Down Expand Up @@ -310,10 +310,10 @@ struct Pool1DAttrs : public AttrsNodeReflAdapter<Pool1DAttrs> {

/*! \brief Attributes used in max_pool2d and avg_pool2d operator */
struct Pool2DAttrs : public AttrsNodeReflAdapter<Pool2DAttrs> {
ffi::Array<IntImm> pool_size;
ffi::Array<IntImm> strides;
ffi::Array<IntImm> padding;
ffi::Array<IntImm> dilation;
ffi::Array<int64_t> pool_size;
ffi::Array<int64_t> strides;
ffi::Array<int64_t> padding;
ffi::Array<int64_t> dilation;
bool ceil_mode;
bool count_include_pad;
ffi::String layout;
Expand Down Expand Up @@ -353,10 +353,10 @@ struct Pool2DAttrs : public AttrsNodeReflAdapter<Pool2DAttrs> {

/*! \brief Attributes used in max_pool3d and avg_pool3d operator */
struct Pool3DAttrs : public AttrsNodeReflAdapter<Pool3DAttrs> {
ffi::Array<IntImm> pool_size;
ffi::Array<IntImm> strides;
ffi::Array<IntImm> padding;
ffi::Array<IntImm> dilation;
ffi::Array<int64_t> pool_size;
ffi::Array<int64_t> strides;
ffi::Array<int64_t> padding;
ffi::Array<int64_t> dilation;
bool ceil_mode;
bool count_include_pad;
ffi::String layout;
Expand Down Expand Up @@ -396,7 +396,7 @@ struct Pool3DAttrs : public AttrsNodeReflAdapter<Pool3DAttrs> {

/*! \brief Attributes for 1d adaptive pool operator */
struct AdaptivePool1DAttrs : public AttrsNodeReflAdapter<AdaptivePool1DAttrs> {
ffi::Optional<ffi::Array<IntImm>> output_size;
ffi::Optional<ffi::Array<int64_t>> output_size;
ffi::String layout;
ffi::String out_layout;

Expand All @@ -421,7 +421,7 @@ struct AdaptivePool1DAttrs : public AttrsNodeReflAdapter<AdaptivePool1DAttrs> {

/*! \brief Attributes for 2d adaptive pool operator */
struct AdaptivePool2DAttrs : public AttrsNodeReflAdapter<AdaptivePool2DAttrs> {
ffi::Optional<ffi::Array<IntImm>> output_size;
ffi::Optional<ffi::Array<int64_t>> output_size;
ffi::String layout;
ffi::String out_layout;

Expand All @@ -446,7 +446,7 @@ struct AdaptivePool2DAttrs : public AttrsNodeReflAdapter<AdaptivePool2DAttrs> {

/*! \brief Attributes for 3d adaptive pool operator */
struct AdaptivePool3DAttrs : public AttrsNodeReflAdapter<AdaptivePool3DAttrs> {
ffi::Optional<ffi::Array<IntImm>> output_size;
ffi::Optional<ffi::Array<int64_t>> output_size;
ffi::String layout;
ffi::String out_layout;

Expand Down
4 changes: 2 additions & 2 deletions python/tvm/relax/dpl/pattern.py
Original file line number Diff line number Diff line change
Expand Up @@ -656,8 +656,8 @@ def __len__(self):

@register_df_node
class AttrPattern(DFPattern):
"""Get match an expression with a certain attributes.
Currently only supports Op Attributes, not call Attributes.
"""Match an expression with certain attributes.
Supports Op attributes, Call attributes, and Function attributes.

Parameters
----------
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/relax/frontend/onnx/onnx_frontend.py
Original file line number Diff line number Diff line change
Expand Up @@ -2567,7 +2567,7 @@ def _impl_v1(cls, bb, inputs, attr, params):
pads = []
if cls.name == "avg_pool":
for axis in range(len(input_shape) - 2):
axis_shape = input_shape[2 + axis]
axis_shape = int(input_shape[2 + axis])
stride = strides[axis]
kernel = kernel_shape[axis]
pad = cls.get_pad_pair(axis_shape, kernel, stride, auto_pad)
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/relax/op/_op_gradient.py
Original file line number Diff line number Diff line change
Expand Up @@ -1202,7 +1202,7 @@ def conv2d_grad(
out_h = (grad_h - 1) * stride_h - pad_top - pad_bottom + filter_h
out_w = (grad_w - 1) * stride_w - pad_left - pad_right + filter_w

output_padding = (in_h - out_h, in_w - out_w)
output_padding = (int(in_h - out_h), int(in_w - out_w))

data_grad = conv2d_transpose( # type: ignore
output_grad,
Expand Down
4 changes: 3 additions & 1 deletion src/contrib/msc/core/utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -275,11 +275,13 @@ const ffi::String StringUtils::ToString(const ffi::Any& obj) {
obj_string = *opt_str;
} else if (const auto* n = obj.as<IntImmNode>()) {
obj_string = std::to_string(n->value);
} else if (obj.type_index() == kTVMFFIInt) {
obj_string = std::to_string(obj.cast<int64_t>());
} else if (const auto* n = obj.as<FloatImmNode>()) {
obj_string = std::to_string(n->value);
} else if (const auto* n = obj.as<ffi::ArrayObj>()) {
for (size_t i = 0; i < n->size(); i++) {
obj_string = obj_string + ToString((*n)[i].cast<ObjectRef>());
obj_string = obj_string + ToString((*n)[i]);
if (n->size() == 1 || i < n->size() - 1) {
obj_string = obj_string + ",";
}
Expand Down
13 changes: 6 additions & 7 deletions src/contrib/msc/framework/tensorrt/transform_tensorrt.cc
Original file line number Diff line number Diff line change
Expand Up @@ -429,10 +429,9 @@ Expr RewriteConv1d(BlockBuilder builder, const Var& var, const Call& src_call,
// change to conv2d
static const Op& conv2d_op = Op::Get("relax.nn.conv2d");
auto conv_attrs = ffi::make_object<Conv2DAttrs>();
conv_attrs->strides = ffi::Array<IntImm>{src_attrs->strides[0], Integer(1)};
conv_attrs->padding =
ffi::Array<IntImm>{Integer(0), src_attrs->padding[0], Integer(0), src_attrs->padding[1]};
conv_attrs->dilation = ffi::Array<IntImm>{src_attrs->dilation[0], Integer(1)};
conv_attrs->strides = ffi::Array<int64_t>{src_attrs->strides[0], 1};
conv_attrs->padding = ffi::Array<int64_t>{0, src_attrs->padding[0], 0, src_attrs->padding[1]};
conv_attrs->dilation = ffi::Array<int64_t>{src_attrs->dilation[0], 1};
conv_attrs->groups = src_attrs->groups;
conv_attrs->data_layout = "NCHW";
conv_attrs->kernel_layout = "OIHW";
Expand Down Expand Up @@ -706,9 +705,9 @@ Expr RewriteMatmul(BlockBuilder builder, const Var& var, const Call& src_call,
// to conv2d
static const Op& conv2d_op = Op::Get("relax.nn.conv2d");
auto conv_attrs = ffi::make_object<Conv2DAttrs>();
conv_attrs->strides = ffi::Array<IntImm>{Integer(1), Integer(1)};
conv_attrs->padding = ffi::Array<IntImm>{Integer(0), Integer(0), Integer(0), Integer(0)};
conv_attrs->dilation = ffi::Array<IntImm>{Integer(1), Integer(1)};
conv_attrs->strides = ffi::Array<int64_t>{1, 1};
conv_attrs->padding = ffi::Array<int64_t>{0, 0, 0, 0};
conv_attrs->dilation = ffi::Array<int64_t>{1, 1};
conv_attrs->groups = 1;
conv_attrs->data_layout = "NCHW";
conv_attrs->kernel_layout = "OIHW";
Expand Down
6 changes: 5 additions & 1 deletion src/relax/backend/contrib/codegen_json/codegen_json.h
Original file line number Diff line number Diff line change
Expand Up @@ -115,8 +115,12 @@ class OpAttrExtractor {
if (const auto* an = (*value).as<ffi::ArrayObj>()) {
std::vector<std::string> attr;
for (size_t i = 0; i < an->size(); ++i) {
if (const auto* im = (*an)[i].as<IntImmNode>()) {
if (auto opt_int = (*an)[i].try_cast<int64_t>()) {
attr.push_back(std::to_string(opt_int.value()));
} else if (const auto* im = (*an)[i].as<IntImmNode>()) {
attr.push_back(std::to_string(im->value));
} else if (auto opt_float = (*an)[i].try_cast<double>()) {
attr.push_back(Fp2String(opt_float.value()));
} else if (const auto* fm = (*an)[i].as<FloatImmNode>()) {
attr.push_back(Fp2String(fm->value));
} else if (auto opt_str = (*an)[i].as<ffi::String>()) {
Expand Down
22 changes: 5 additions & 17 deletions src/relax/backend/contrib/nnapi/codegen.cc
Original file line number Diff line number Diff line change
Expand Up @@ -107,20 +107,15 @@ class CollectFromCompositeFunctionBody : public ExprVisitor {
std::vector<std::string> strides;
if (!conv2d_attr->strides.empty()) {
for (auto stride : conv2d_attr->strides) {
const auto* stride_val = stride.as<IntImmNode>();
ICHECK(stride_val) << "convertion failed";

strides.push_back(std::to_string(stride_val->value));
strides.push_back(std::to_string(stride));
}
} else {
strides = {"1", "1"};
}

std::vector<std::string> padding;
for (auto pad : conv2d_attr->padding) {
const auto* padding_val = pad.as<IntImmNode>();

padding.push_back(std::to_string(padding_val->value));
padding.push_back(std::to_string(pad));
}

std::vector<std::string> groups;
Expand All @@ -147,10 +142,7 @@ class CollectFromCompositeFunctionBody : public ExprVisitor {
std::vector<std::string> strides;
if (!max_pool_2d_attr->strides.empty()) {
for (auto stride : max_pool_2d_attr->strides) {
const auto* stride_val = stride.as<IntImmNode>();
ICHECK(stride_val) << "convertion failed";

strides.push_back(std::to_string(stride_val->value));
strides.push_back(std::to_string(stride));
}
} else {
strides.push_back("1");
Expand All @@ -159,16 +151,12 @@ class CollectFromCompositeFunctionBody : public ExprVisitor {

std::vector<std::string> padding;
for (auto pad : max_pool_2d_attr->padding) {
const auto* padding_val = pad.as<IntImmNode>();

padding.push_back(std::to_string(padding_val->value));
padding.push_back(std::to_string(pad));
}

std::vector<std::string> pool_size;
for (auto size : max_pool_2d_attr->pool_size) {
const auto* pooling_val = size.as<IntImmNode>();

pool_size.push_back(std::to_string(pooling_val->value));
pool_size.push_back(std::to_string(size));
}

std::vector<dmlc::any> strides_attr;
Expand Down
Loading