at the moment I am using char *lines[1000+1] to read a file's line and was wondering if there is another more efficient way where I can read files as small as 1 line or as big as 5000 without having to hard code it.
Here is the code I am using:
int tokenize(char *result[], char *data, char *delimter) {
int i = 0;
char *token = strtok(data, delimter);
while(token != NULL) {
result[i++] = token;
token = strtok(NULL, delimter);
}
return i;
}
char *filebuffer = NULL;
char *lines[1000+1];
/* Read the file */
filebuffer = readfile(argv[1]);
if(filebuffer == NULL) {
printf("Failed to read file.\n");
return EXIT_FAILURE;
}
/* Split file into lines */
linecount = tokenize(lines, filebuffer, "\n");