I wrote a simple insertion sort implementation to try to knock the rust of, and begin what I hope is a better understading of algortihms in general. The file holds 20 million random numbers. The code is below:
#include <fstream>
#include <time.h>
#include <cstdlib>
using namespace std;
void insertionSort(double numbers[], int array_size);
int main() {
int count;
double twentyMMNumbers[20000000];
ifstream inputFile;
time_t now;
time(&now);
inputFile.open("20KRandomsNumbers.data"); //Opens input file
if(inputFile.fail())
{
printf("Cannot open inputFile");
exit(1);
}
count = 0;
printf("%d\n",count);
inputFile >> twentyMMNumbers[count];
printf("%f\n",twentyMMNumbers[count]);
while(inputFile)
{ //While loop
count++;
if(count < 20000000)
inputFile >> twentyMMNumbers[count];
}
inputFile.close();
printf("%s\n", ctime(&now)); //BEFORE
insertionSort(twentyMMNumbers, 20000000); //Insertion Sort 20KRandomNumbers
printf("%s\n", ctime(&now)); //AFTER
for(int i = 0; i < count; i++)
printf("%f\n",twentyMMNumbers[i]);
}
void insertionSort(double numbers[], int array_size)
{
int i, j, index;
for (i=1; i < array_size; i++)
{
index = numbers[i];
j = i;
while ((j > 0) && (numbers[j-1] > index))
{
numbers[j] = numbers[j-1];
j = j - 1;
}
numbers[j] = index;
}
}
The code worked fine when it only had 20,000 entries, but now gives me:
Segmentation fault: 11
Was this caused by my increase in the size of the array? PS if you have any tips on optimizing this, feels free to point it out.