0
#include <stdlib.h>
#include <stdio.h>
#include <time.h>
#include <unistd.h>
#include <time.h>
typedef unsigned int uint32;
uint32 a;

int main()
{
            struct timespec start;

            if((clock_gettime( CLOCK_REALTIME, &start)) == -1 )
            {
              perror("clock gettime\n");

            }

/* I am calculating the Clock granularity here the granularity is basically how long that timer interrupt will last while it's processing the background task.*/
            //micro seconds output
        a = (1000000 * start.tv_sec + start.tv_nsec / 1000);

         printf( "%u\n", a);
            return EXIT_SUCCESS;
}

I created a timer to get the timestamp at any palce, so the above is a free running timer to take the timestamp. I tried to get the output in microseconds and getting the value as : 2847675807 is this right ?? I should get the value in microseconds. I think, getting some bigger values. someone please help me.

typedef unsigned int uint64;
typedef unsigned int uint32;
uint32 a;

uint64 timestamp()
{
            struct timespec start;

            if((clock_gettime( CLOCK_REALTIME, &start)) == -1 )
            {
              perror("clock gettime\n");

            }

/* I am calculating the Clock granularity here the granularity is basically how long that timer interrupt
 * will last while it's processing the background task.*/
            //micro seconds output
        a = (uint32)(1e6 * start.tv_sec + start.tv_nsec * 1e-3);

         printf( "%u\n", a);
            return EXIT_SUCCESS;
}


int main()
{
timestamp();
return 1;
}

I modified like above but then also same results like bigger number

  • A `uint32_t` may well overflow - use a `uint64_t` or a `double`. – Paul R May 21 '14 at 07:25
  • Note that the number of seconds since the epoch (January 1 1970) is well over a billion. Multiplying a billion with a million will overflow a 32-bit integer. – Some programmer dude May 21 '14 at 07:25
  • I want the output in microseconds of uint32 !! what shall i do now ? – user3635707 May 21 '14 at 07:26
  • [How to measure time in milliseconds using ANSI C?](http://stackoverflow.com/questions/361363/how-to-measure-time-in-milliseconds-using-ansi-c) – Rocoder May 21 '14 at 07:38
  • You still have only a 32-bit integer to store a value *much* larger than 4 billion (which is the maximum value a 32-bit unsigned integer can handle). Include the [``](http://en.cppreference.com/w/c/types/integer) header file and use the standardized `uint64_t` type. – Some programmer dude May 21 '14 at 08:05

1 Answers1

0

Maybe you can try this. It's not in realtime, it's less accurate than gettimeofday.

#include <time.h>
#include <stdio.h>
int main(int ac, char **av)
 {
  clock_t       start, end;
  double        duration = 0;

  start = clock();
  for (int i = 0; i < 100000000; i++)
    {}
  end = clock();
  duration = (double)(end - start) / CLOCKS_PER_SEC;
  printf("%f ms\n", duration);
  return 0;
 }
nyudebian
  • 63
  • 1
  • 1
  • 5