Skip to content

Instantly share code, notes, and snippets.

@daj
Created October 7, 2017 22:59
Show Gist options
  • Save daj/64bfac1d8ea351760bf8793da098a43c to your computer and use it in GitHub Desktop.
Save daj/64bfac1d8ea351760bf8793da098a43c to your computer and use it in GitHub Desktop.
Example ops_to_register.h file for the TensorFlow model in the AndroidTensorFlowMNISTExample (https://github.com/daj/AndroidTensorFlowMNISTExample/commit/6e1b15ff6d3182be0e665c0456f356ffc2f62514)
// This file was autogenerated by print_selective_registration_header.py
#ifndef OPS_TO_REGISTER
#define OPS_TO_REGISTER
namespace {
constexpr const char* skip(const char* x) {
return (*x) ? (*x == ' ' ? skip(x + 1) : x) : x;
}
constexpr bool isequal(const char* x, const char* y) {
return (*skip(x) && *skip(y))
? (*skip(x) == *skip(y) && isequal(skip(x) + 1, skip(y) + 1))
: (!*skip(x) && !*skip(y));
}
template<int N>
struct find_in {
static constexpr bool f(const char* x, const char* const y[N]) {
return isequal(x, y[0]) || find_in<N - 1>::f(x, y + 1);
}
};
template<>
struct find_in<0> {
static constexpr bool f(const char* x, const char* const y[]) {
return false;
}
};
} // end namespace
constexpr const char* kNecessaryOpKernelClasses[] = {
"BinaryOp<CPUDevice, functor::add<float>>",
"BiasOp<CPUDevice, float>",
"ConstantOp",
"Conv2DOp<CPUDevice, float>",
"MatMulOp<CPUDevice, float, false >",
"MaxPoolingOp<CPUDevice, float>",
"NoOp",
"PlaceholderOp",
"ReluOp<CPUDevice, float>",
"ReshapeOp",
"SoftmaxOp<CPUDevice, float>",
"RecvOp",
"SendOp",
};
#define SHOULD_REGISTER_OP_KERNEL(clz) (find_in<sizeof(kNecessaryOpKernelClasses) / sizeof(*kNecessaryOpKernelClasses)>::f(clz, kNecessaryOpKernelClasses))
constexpr inline bool ShouldRegisterOp(const char op[]) {
return false
|| isequal(op, "Add")
|| isequal(op, "BiasAdd")
|| isequal(op, "Const")
|| isequal(op, "Conv2D")
|| isequal(op, "MatMul")
|| isequal(op, "MaxPool")
|| isequal(op, "NoOp")
|| isequal(op, "Placeholder")
|| isequal(op, "Relu")
|| isequal(op, "Reshape")
|| isequal(op, "Softmax")
|| isequal(op, "_Recv")
|| isequal(op, "_Send")
;
}
#define SHOULD_REGISTER_OP(op) ShouldRegisterOp(op)
#define SHOULD_REGISTER_OP_GRADIENT false
#endif
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment