I ran a following test.
charspeed.c
#include <stdio.h>
#include <time.h>
#define CHAR_COUNT 26
#define CHAR_LIST "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
static const char *CHAR_ARRAY = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
#define RUN_COUNT 1000000000
#define GET_CLOCK (float)clock() / CLOCKS_PER_SEC
int main()
{
long long int sum = 0;
float start, end;
start = GET_CLOCK;
for (size_t i = 0; i < RUN_COUNT; i++)
{
char test = CHAR_LIST[i % CHAR_COUNT];
sum += test; // Force the loop to run!
}
end = GET_CLOCK;
printf("#define Time: %f\n", end - start);
start = GET_CLOCK;
for (size_t i = 0; i < RUN_COUNT; i++)
{
char test = CHAR_ARRAY[i % CHAR_COUNT];
sum += test; // Must be the same as fist loop!
}
end = GET_CLOCK;
printf("static const *CHAR_ARRAY Time: %f\n", end - start);
printf("sum = %lld\n", sum); // Must access "sum" after loops!
return 0;
}
Its outputs
#define Time: 1.741000
static const *CHAR_ARRAY Time: 1.868000
Why the string literal using #define directive faster than a pre-initialised static char array? Where exactly the string literal is stored and why it is faster to access them within a block scop?
The compiler option used is gcc -o charspeed charspeed.c