11

Consider the following simple map:

class MyCoolMap : public unordered_map<const char *, const char *>
{
public:
  ProtoTypeMap()
  {
    insert(value_type("in1", "out1"));
    insert(value_type("in2", "out2"));
    ...
    insert(value_type("inN", "outN"));
  }
};

Now, suppose I need to make this map available both for char and wchar_t strings. So, I rewrite it as follows:

template<class C>
class MyCoolMap : public unordered_map<const C *, const C *>
{
public:
  MyCoolMap()
  {
    insert(value_type("in1", "out1"));
    insert(value_type("in2", "out2"));
    ...
    insert(value_type("inN", "outN"));
  }
};

And, of course, this does not work for C=wchar_t. The problem is that I do not know how to template the difference between char literals and wchar_t literals. Right now I see two solutions, both ugly.

Solution 1 - specialize MyCoolMap by wchar_t:

template<>
class MyCoolMap<wchar_t> : public unordered_map<const wchar_t *, const wchar_t *>
{
public:
  MyCoolMap()
  {
    insert(value_type(L"in1", L"out1"));
    insert(value_type(L"in2", L"out2"));
    ...
    insert(value_type(L"inN", L"outN"));
  }
};

This is bad, because the whole logic is duplicated.

Solution 2 - a traits like solution:

#define _TOWSTRING(x) L##x
#define TOWSTRING(x) _TOWSTRING(x)

template <class C, int> struct special_string;
#define DECL_SPECIAL_STRING(STR) \
const int ss_##STR = __LINE__; \
template<> struct special_string<char, ss_##STR> { static const char *get_value() { return #STR; } }; \
template<> struct special_string<wchar_t, ss_##STR> { static const wchar_t *get_value() { return TOWSTRING(#STR); } };

DECL_SPECIAL_STRING(in1)
DECL_SPECIAL_STRING(out1)
DECL_SPECIAL_STRING(in2)
DECL_SPECIAL_STRING(out2)
...
DECL_SPECIAL_STRING(inN)
DECL_SPECIAL_STRING(outN)

template<class C>
class MyCoolMap : public unordered_map<const C *, const C *>
{
public:
  MyCoolMap()
  {
#define INSERT_MAPPING(in, out) insert(value_type(special_string<C, ss_##in>::get_value(), special_string<C, ss_##out>::get_value()))
    INSERT_MAPPING(in1, out1);
    INSERT_MAPPING(in2, out2);
    ...
    INSERT_MAPPING(inN, outN);
#undef INSERT_MAPPING
  }
};

This way I do not need to replicate the logic, but this is so verbose and relies heavily on macros.

There must be a better way; I just do not see it.

I am using VS2010.

EDIT

I am glad that a much simpler solution is proposed - the credits go to https://stackoverflow.com/users/5987/mark-ransom. I had to make minor fixes to make it compile, though:

#define _TOWSTRING(x) L##x
#define TOWSTRING(x) _TOWSTRING(x)

template<typename C> const C * ChooseCW(const char * c, const wchar_t * w);
template<> const char * ChooseCW<char>(const char * c, const wchar_t * w)
{
  return c;
}
template<> const wchar_t *ChooseCW<wchar_t>(const char * c, const wchar_t * w)
{
  return w;
}

#define CW(C, STR) ChooseCW<C>(#STR, TOWSTRING(#STR))

Thanks again.

Community
  • 1
  • 1
mark
  • 59,016
  • 79
  • 296
  • 580
  • 2
    Your problem is that while the two lists of strings *resemble* each other, they're not really the same. – egrunin Jan 27 '12 at 21:29
  • Here is a refinement of Mark's idea that retains the array[count] type of the result, so the result can be used in sizeof() just like the original string literal: stackoverflow.com/a/63888331/1046167 – Louis Semprini Sep 14 '20 at 16:31

2 Answers2

12

Use a macro to generate both forms of the string, and a template function to choose which to use.

template<typename C>
const C * ChooseCW(const char * c, const wchar_t * w);

template<>
const char * ChooseCW<char>(const char * c, const wchar_t * w)
{
    return c;
}

template<>
const wchar_t * ChooseCW<wchar_t>(const char * c, const wchar_t * w)
{
    return w;
}

#define CW(C, STR) ChooseCW<C>(STR, L##STR)

insert(value_type(CW(C, "in1"), CW(C, "out1")));
Mark Ransom
  • 299,747
  • 42
  • 398
  • 622
  • Simple, as all that is genious. Your code snippet requires some tweaks to make it compile - posting them as an edit to my question. But otherwise - simply marvelous. – mark Jan 27 '12 at 22:51
  • @mark, thanks for the kudos and thanks for pointing out errors in my code. I hope this compiles now, even though it isn't identical to yours. – Mark Ransom Jan 28 '12 at 04:31
  • Here is a refinement of your idea that retains the array[count] type of the result, so the result can be used in sizeof() just like the original string literal: stackoverflow.com/a/63888331/1046167 – Louis Semprini Sep 14 '20 at 16:30
-1

Make all the string constants static members, something like this:

#include "stddef.h"
#include "stdio.h"
template<class C>
class String
{
  public:
    String(const C* value = defVal) : mValue(value) {}
    const C* valueOf() { return mValue; }
  private:
    const C* mValue;
    static const C defVal[];
};
const char String<char>::defVal[] = "char";
const wchar_t String<wchar_t>::defVal[] = L"wchar_t";
int main(int argc, char **argv)
{
  String<char> c(*argv);
  String<wchar_t> w;
  return printf("%S\n", w.valueOf());
}

You could probably macroise the definitions to avoid duplicating them.

Neil
  • 54,642
  • 8
  • 60
  • 72