I am writing the following C code to get the time taken to perform a simple operation using getitimer and setitimer.
#include <sys/time.h>
#include <stdlib.h>
#include <stdio.h>
#include <limits.h>
#include <string.h>
#define INTERVAL 1 /* number of milliseconds to go off */
int main() {
double sum = 0;
struct itimerval initial, updated;
initial.it_value.tv_sec = INTERVAL;
initial.it_value.tv_usec = 999999;
initial.it_interval = initial.it_value;
memcpy(&(initial.it_interval), &(initial.it_value), sizeof( initial.it_value ));
printf("%ld\n", initial.it_value.tv_usec);
if (setitimer(ITIMER_VIRTUAL, &initial, NULL) == -1) {
perror("error calling setitimer()");
exit(1);
}
for (unsigned int i; i < 100; i++)
sum += 1./i;
if (getitimer(ITIMER_REAL, &updated) == -1) {
perror("error calling getitimer()");
exit(1);
}
printf("Time started = %ld\n; Time ended = %ld\n: Time taken = %ld\n",
initial.it_value.tv_usec, updated.it_value.tv_usec,
initial.it_value.tv_usec - updated.it_value.tv_usec);
return 0;
}
I have compiled with:
$ gcc -o timer -std=c99 -Wall -pedantic getitimer.c -lrt -03
However, my answer is always 999999 (I have raised and decreased the 100):
./timer
999999
Time started = 999999
; Time endd = 0
: Time taken = 999999
What is my error? Also, I wanted to ask what is the highest precision I can get using a progrma like this?
Thanks very much!