I have written the following code to measure the time of sorting any data. I am getting weird results, like negative time in some cases and am not getting any consistent result for same data set(I understand it won't be exactly same). Please let me know what is wrong or how can I measure time properly.
#include<stdio.h>
#include<sys/time.h>
void bubble(int a[],int n)
{
int i,j,flag,temp;
for(i=0;i<n-1;i++)
{
flag=0;
for(j=0;j<n-i-1;j++)
{
if(a[j]>a[j+1])
{
flag=1;
a[j]=a[j]+a[j+1];
a[j+1]=a[j]-a[j+1];
a[j]=a[j]-a[j+1];
}
}
if(flag==0)
break;
}
}
int main()
{
int n,a[100000],i;
printf("Enter the size of array:");
scanf("%d",&n);
for(i=0;i<n;i++)
a[i]=rand()%100000; //average case
//a[i]=i; //best case
//a[i]=n-1-i; //worst case
/*
printf("The UNsorted array is:\n");
for(i=0;i<n;i++)
printf("%d ",a[i]);
printf("\n\n");
*/
int st,et;
struct timezone tz;
struct timeval tv;
gettimeofday(&tv,NULL);
st=tv.tv_usec;
bubble(a,n);
gettimeofday(&tv,NULL);
et=tv.tv_usec;
printf("Sorting time: %d micro seconds\n",et-st);
/*
printf("\nThe sorted array is:\n");
for(i=0;i<n;i++)
printf("%d ",a[i]);
printf("\n");
*/
return 0;
}