I'm trying to average two colors.
My original (horrible) implement is as follows:
//color is a union
int ColorAverage(int c1, int c2) {
color C1(c1);
color C2(c2);
return color(
(unsigned char)(0.5f * C1.a + 0.5f * C2.a),
(unsigned char)(0.5f * C1.r + 0.5f * C2.r),
(unsigned char)(0.5f * C1.g + 0.5f * C2.g),
(unsigned char)(0.5f * C1.b + 0.5f * C2.b)
).c;
}
My current solution is as follows (which performs considerably better):
int ColorAverage(int c1, int c2) {
unsigned char* b1 = reinterpret_cast<unsigned char*>(&c1);
unsigned char* b2 = reinterpret_cast<unsigned char*>(&c2);
int value;
unsigned char* bv = reinterpret_cast<unsigned char*>(&value);
bv[0] = (b1[0] + b2[0]) / 2;
bv[1] = (b1[1] + b2[1]) / 2;
bv[2] = (b1[2] + b2[2]) / 2;
bv[3] = (b1[3] + b2[3]) / 2;
return(value);
}
However, it's still quite slow (it's about 3% of my frame time).
I did find a solution for 24bit, but it does not apply to 32bit (the alpha is lost):
#define AVERAGE(a, b) ( ((((a) ^ (b)) & 0xfffefefeL) >> 1) + ((a) & (b)) )