For the given program I'm getting different results on Windows(VS 17) compare to linux machine(gcc 4.8).
#include "CrossDevelopment.h"
using namespace std;
int main()
{
for (auto i = 0; i < 3; i++)
{
//chrono::high_resolution_clock::time_point start_time = chrono::high_resolution_clock::now();
chrono::system_clock::time_point start_time = chrono::system_clock::now();
for (auto i = 0; i < 50; i++) {
int a = 10;
int b = 5;
int c = a + b;
c += 10;
c *= a;
a *= b;
}
//chrono::high_resolution_clock::time_point end_time = chrono::high_resolution_clock::now();
chrono::system_clock::time_point end_time = chrono::system_clock::now();
auto elapsed_time = chrono::duration<double, micro>(end_time - start_time);
cout << "Difference of time " << elapsed_time.count() << " " << (end_time - start_time).count()
<< " " << (chrono::duration_cast<chrono::nanoseconds>(end_time - start_time)).count() << endl;
}
getchar();
return 0;
}
Output On Windows machine
Difference of time 1 10 1000
Difference of time 0.7 7 700
Difference of time 0.7 7 700
On Linux machine
Difference of time 0.806 806 806
Difference of time 0.6 600 600
Difference of time 0.542 542 542
If you see the last columns you will observe the difference. Which is not in case of high_resolution_clock.