I was writing a program which involved input up to the range of 1 million, when I was using datatype 'int' to deal with my values the run time was very very high, the program never executed itself completely so I was not able to note down the run time. code before;
#include<stdio.h>
int main()
{
int n,m,i,maxt=0,maxn;
for(n=2;n<=1000000;n++){
m=n;
i=0;
for(i=0;m!=1;i++){
if(m%2==0)
m=m/2;
else
m=(3*m+1);
}
if(i>maxt){
maxt=i;
maxn=n;
}
}
printf("%d%d",maxn,maxt);
return 0;
}
But then while juggling with the code I changed the datatype from 'int' to 'long long int' surprisingly the run time decreased drastically(milli seconds), can anyone explain what may be the reason behind this ? code after;
#include<stdio.h>
int main()
{
long long int n,m,i,maxt=0,maxn;
for(n=2;n<=1000000;n++){
m=n;
i=0;
for(i=0;m!=1;i++){
if(m%2==0)
m=m/2;
else
m=(3*m+1);
}
if(i>maxt){
maxt=i;
maxn=n;
}
}
printf("%lld%lld",maxn,maxt);
return 0;
}