Skip to content
This repository has been archived by the owner on Jan 24, 2024. It is now read-only.

Commit

Permalink
fix code styles
Browse files Browse the repository at this point in the history
  • Loading branch information
wenming2014 committed Sep 11, 2020
1 parent d9ac068 commit ec8ab4c
Show file tree
Hide file tree
Showing 5 changed files with 21 additions and 17 deletions.
4 changes: 2 additions & 2 deletions cinn/common/ir_util.h
Original file line number Diff line number Diff line change
Expand Up @@ -97,13 +97,13 @@ Expr make_const(Type t, T v) {
}

template <typename FuncOp>
Expr FoldExpr(FuncOp funcOp, const std::vector<Expr> &values) {
Expr FoldExpr(FuncOp func_op, const std::vector<Expr> &values) {
Expr init_value;
for (const Expr &val : values) {
if (!init_value.defined()) {
init_value = val;
} else {
init_value = funcOp(val, init_value);
init_value = func_op(val, init_value);
}
}
return init_value;
Expand Down
4 changes: 2 additions & 2 deletions cinn/hlir/op/broadcast.cc
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ std::shared_ptr<OpStrategy> StrategyForElementwiseAdd(const framework::NodeAttr
framework::CINNCompute add_compute([&attrs](lang::Args args, lang::RetValue *ret) {
CHECK(!args.empty()) << "The input argument of add compute is empty! Please check.\n";
CINNValuePack a = args[0];
CHECK_GE(a.size(), 2U) << "at least 2 input tensors for add compute\n";
CHECK_EQ(a.size(), 2U) << "input tensor size for add compute should be 2\n";
Expr A_expr = a[0];
Expr B_expr = a[1];
CHECK(A_expr.as_tensor());
Expand Down Expand Up @@ -67,7 +67,7 @@ std::shared_ptr<OpStrategy> StrategyForElementwiseMul(const framework::NodeAttr
framework::CINNCompute mul_compute([&attrs](lang::Args args, lang::RetValue *ret) {
CHECK(!args.empty()) << "The input argument of elementwise_mul compute is empty! Please check.\n";
CINNValuePack a = args[0];
CHECK_GE(a.size(), 2U) << "at least 2 input tensors for elementwise_mul compute\n";
CHECK_EQ(a.size(), 2U) << "input tensor size for elementwise_mul compute should be 2\n";
Expr A_expr = a[0];
Expr B_expr = a[1];
CHECK(A_expr.as_tensor());
Expand Down
22 changes: 11 additions & 11 deletions cinn/hlir/op/nn.cc
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ std::shared_ptr<OpStrategy> StrategyForConv2d(const framework::NodeAttr &attrs,
framework::CINNCompute conv2d_compute([=](lang::Args args, lang::RetValue *ret) {
CHECK(!args.empty()) << "The input argument of conv2d compute is empty! Please check.\n";
CINNValuePack a = args[0];
CHECK_GE(a.size(), 2U) << "at least 2 input tensors for conv2d compute\n";
CHECK_EQ(a.size(), 2U) << "input tensor size for conv2d compute should be 2\n";
Expr A = a[0];
Expr B = a[1];
CHECK(A.as_tensor());
Expand Down Expand Up @@ -207,7 +207,7 @@ std::shared_ptr<OpStrategy> StrategyForBatchNorm(const framework::NodeAttr &attr
framework::CINNCompute batchnorm_compute([=](lang::Args args, lang::RetValue *ret) {
CHECK(!args.empty()) << "The input argument of batchnorm compute is empty! Please check.\n";
CINNValuePack a = args[0];
CHECK_GE(a.size(), 2U) << "at least 2 input tensors for batchnorm compute\n";
CHECK_EQ(a.size(), 2U) << "input tensor size for batchnorm compute should be 2\n";
Expr A = a[0];
Expr B = a[1];
CHECK(A.as_tensor());
Expand Down Expand Up @@ -285,9 +285,9 @@ std::shared_ptr<OpStrategy> StrategyForPool1d(const framework::NodeAttr &attrs,
LOG(ERROR) << "unsupported attr: " << iter.first << std::endl;
}
}
CHECK(!kernel_size.empty());
CHECK(!stride_size.empty());
CHECK(!padding_size.empty());
CHECK(!kernel_size.empty()) << "kernel_size for pool1d is empty. Please check.\n";
CHECK(!stride_size.empty()) << "stride_size for pool1d is empty. Please check.\n";
CHECK(!padding_size.empty()) << "padding_size for pool1d is empty. Please check.\n";
auto out = pe::Pool1d(A.as_tensor_ref(),
kernel_size,
stride_size,
Expand Down Expand Up @@ -416,9 +416,9 @@ std::shared_ptr<OpStrategy> StrategyForPool2d(const framework::NodeAttr &attrs,
LOG(ERROR) << "unsupported attr: " << iter.first << std::endl;
}
}
CHECK(!kernel_size.empty());
CHECK(!stride_size.empty());
CHECK(!padding_size.empty());
CHECK(!kernel_size.empty()) << "kernel_size for pool2d is empty. Please check.\n";
CHECK(!stride_size.empty()) << "stride_size for pool2d is empty. Please check.\n";
CHECK(!padding_size.empty()) << "padding_size for pool2d is empty. Please check.\n";
auto out = pe::Pool2d(A.as_tensor_ref(),
kernel_size,
stride_size,
Expand Down Expand Up @@ -558,9 +558,9 @@ std::shared_ptr<OpStrategy> StrategyForPool3d(const framework::NodeAttr &attrs,
LOG(ERROR) << "unsupported attr: " << iter.first << std::endl;
}
}
CHECK(!kernel_size.empty());
CHECK(!stride_size.empty());
CHECK(!padding_size.empty());
CHECK(!kernel_size.empty()) << "kernel_size for pool3d is empty. Please check.\n";
CHECK(!stride_size.empty()) << "stride_size for pool3d is empty. Please check.\n";
CHECK(!padding_size.empty()) << "padding_size for pool3d is empty. Please check.\n";
auto out = pe::Pool3d(A.as_tensor_ref(),
kernel_size,
stride_size,
Expand Down
2 changes: 1 addition & 1 deletion cinn/hlir/op/transform.cc
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ std::vector<std::vector<int>> InferShapeForMul(const std::vector<std::vector<int
output_shape.insert(output_shape.begin(), shape1_new.begin(), shape1_new.begin() + x_num_col_dims);
output_shape.insert(output_shape.end(), shape2_new.begin() + y_num_col_dims, shape2_new.end());

if (output_shape.empty()) return {{1}};
CHECK(!output_shape.empty()) << "infer shape for mul turns to be empty. Please check\n";
std::vector<std::vector<int>> res{output_shape};
return res;
}
Expand Down
6 changes: 5 additions & 1 deletion cinn/pybind/framework.cc
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,11 @@ void BindFramework(pybind11::module *m) {
.def_readwrite("attr_store", &NodeAttr::attr_store)
.def("set_attr",
[](NodeAttr &self, const std::string &key, NodeAttr::attr_t value) { self.attr_store[key] = value; })
.def("get_attr", [](NodeAttr &self, const std::string &key) { return self.attr_store[key]; })
.def("get_attr",
[](NodeAttr &self, const std::string &key) {
CHECK_EQ(self.attr_store.count(key), 1) << "Didn't find value with key [" << key << "].";
return self.attr_store[key];
})
.def("__str__", [](NodeAttr &self) { return utils::GetStreamCnt(self); });
} // namespace frontend
} // namespace cinn::pybind

0 comments on commit ec8ab4c

Please sign in to comment.