This question could be rephrased more generally as, "how do I execute compile-time generated code using runtime attributes?"
The basic answer is to build a compile-time array of pointers to the templatised functions that do the work, and then index into that array.
#include <bitset>
#include <cstdint>
#include <array>
#include <utility>
// define the parameters of supported lengths
struct traits
{
static const std::size_t lowest = 1;
static const std::size_t highest = 64;
static const std::size_t extent = (highest - lowest) + 1;
};
//
// use a bitset of some length to decode the string
//
template<std::size_t Bits>
unsigned long long decode_impl(std::string const& s)
{
std::bitset<Bits> bs(s);
return bs.to_ullong();
}
//
// build an array of pointers to the decode functions, one for each Is
//
template<std::size_t...Is>
constexpr std::array<unsigned long long (*)(std::string const&), traits::extent>
decoders(std::index_sequence<Is...>)
{
return {{
&decode_impl<Is + traits::lowest>...
}};
}
//
// build all pointers
//
std::array<unsigned long long (*)(std::string const&), traits::extent>
constexpr decoders()
{
return decoders(std::make_index_sequence<traits::extent>());
}
//
// use runtime attributes to switch into compile-time constructs
//
unsigned long long decode(std::string const& s)
{
static constexpr auto my_decoders = decoders();
auto sz = std::max(s.size(), traits::lowest);
if (sz > traits::highest)
throw std::logic_error("unsupported length");
return my_decoders[sz - traits::lowest](s);
}