I need times for
- 1000
- 5000
- 10000
- 15000
- 20000 etc no. of datas.
By using Quick Sort algorithm for verifying time-complexity from No. of Data vs Times graph. But I have still zero second time for 1000 data's and also 20000 data's. If I measured time in milli or nano second, but time is still zero. Is there any way to find approximate or comparative time for different No. of Data's?
My Quick Sort process are here -
#include <bits/stdc++.h>
using namespace std;
int A[50000], i, j, V, store;
int part(int left, int right, int P)
{
V = A[P];
swap(A[P], A[right]);
store = left;
for(i = left; i < right; i++)
{
if(A[i] <= V)
{
swap(A[i], A[store]);
store++;
}
}
swap(A[store], A[right]);
return store;
}
void qSort(int left, int right)
{
if(left < right)
{
j = part(left, right, left);
qSort(left, j-1);
qSort(j+1, right);
}
}
main()
{
int nData, k, minValue, maxValue;
cout<<"No. of Data: ";
cin>>nData;
cout<<"\nRange (min, max): ";
cin>>minValue>>maxValue;
for(k=0; k<nData; k++)
{
A[k] = minValue + (rand()%(int) (maxValue - minValue + 1));
}
clock_t t1 = clock();
qSort(0, nData-1);
clock_t t2 = clock();
cout<<"\n\nTime: "<<(double)(t2-t1)/CLOCKS_PER_SEC<<endl;
}
[N.B: My operating System is Windows]