std::true_type,
is_any_of<T, Tail...>>::type
{};
+ template<typename T, typename=void> struct underlying_type {
+ using type = T;
+ };
+ template<typename T> struct underlying_type<
+ T, typename std::enable_if<std::is_enum<T>::value>::type> {
+ using type = typename std::underlying_type<T>::type;
+ };
}
struct denc_traits<
T,
typename std::enable_if<
- _denc::is_any_of<T, ceph_le64, ceph_le32, ceph_le16, uint8_t
+ _denc::is_any_of<typename _denc::underlying_type<T>::type,
+ ceph_le64, ceph_le32, ceph_le16, uint8_t
#ifndef _CHAR_IS_SIGNED
, int8_t
#endif
ASSERT_TRUE(false);
}
+namespace {
+ // search `underlying_type` in denc.h for supported underlying types
+ enum class Colour : int8_t { R,G,B };
+ ostream& operator<<(ostream& os, Colour c) {
+ switch (c) {
+ case Colour::R:
+ return os << "Colour::R";
+ case Colour::G:
+ return os << "Colour::G";
+ case Colour::B:
+ return os << "Colour::B";
+ default:
+ return os << "Colour::???";
+ }
+ }
+}
+
TEST(EncodingRoundTrip, Integers) {
// int types
{
i = 42;
test_encode_and_decode(i);
}
+ // enum
+ {
+ test_encode_and_decode(Colour::R);
+ // this should not build, as the size of unsigned is not the same on
+ // different archs, that's why denc_traits<> intentionally leaves
+ // `int` and `unsigned int` out of supported types.
+ //
+ // enum E { R, G, B };
+ // test_encode_and_decode(R);
+ }
}
const char* expected_what[] = {