So I have included my successfully working code below, that takes in a .txt file with approximately 1.6 million lines with one word per line. This reads in successfully, qsort sorts the words alphabetically and then outputs to the out file. The problem however is from what I understand to be the size limitations of the array. Up to approximately 1 million lines in the array and it works and finishes in less than 2-3 seconds, but more than ~1 million and it errors with a segmentation fault.
Doing lots of reading and I have seen malloc, but that doesn't input into an array? And the file input can't be input into qsort? Any pointers how to get around this? The 16 in line array, specifies that there are no words in the txt that exceed 15 chars (chars+1).
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
static void sort_a(void *array, unsigned n);
static int cmpr(const void *a, const void *b);
int main(void)
{
static const char filename[] = "input.txt";
static const char filename_out[] = "output.txt";
int i = 0;
int j = 0;
FILE *file = fopen(filename, "r");
FILE *file_write = fopen(filename_out, "w");
char line[16];
char *line_array[1000001]; //works sub 1 Million
if (file != NULL){
while (fgets(line, sizeof line, file) != NULL) {
// Trim the newline character
line[strcspn(line, "\n")] = '\0';
// Stop processing if line_array is full
if (i < sizeof line_array / sizeof *line_array) {
line_array[i++] = strdup(line);
}
else {
break;
}
}
fclose(file);
sort_a(line_array, i);
for (j = 0; j < i; j++) {
fprintf(file_write, "%s\n", line_array[j]);
}
fclose(file_write);
// Clean up memory
for (j = 0; j < i; j++) {
free(line_array[j]);
}
}
else {
perror(filename);
}
return 0;
}
int cmpr(const void *a, const void *b) {
return (strcmp(*(char **)a, *(char **)b));
}
void sort_a(void *array, unsigned n) {
qsort(array, n, sizeof(const char *), cmpr);
}