I am correctly tokenizing single words from a line of strings; however, inserting them into a 2d array cuts off parts of the token. I also have a problem with NULL, and the code results in a segfault.
#include <stdio.h>
#include <stdlib.h>
#include <string.h> // strtok
#define MAX_FILE_LENGTH 30
#define MAX_COURSE_LENGTH 30
#define MAX_LINE_LENGTH 1000
void trim(char* str) {
int l = strlen(str);
if (str[l - 1] == '\n') {
str[l - 1] = 0;
}
}
int main() {
char filename[MAX_FILE_LENGTH];
char arr[MAX_COURSE_LENGTH][MAX_COURSE_LENGTH];
const char delim[] = " ";
char* token;
int course = 0;
char c;
FILE* fp;
int N = 0; // number of lines in file
printf("This program will read, from a file, a list of courses and their prerequisites and will print the list in which to take courses.\n");
printf("Enter filename: ");
scanf("%s%c", filename, &c);
fp = fopen(filename, "r");
if (fp == NULL) {
printf("Could not open file %s. Exit\n", filename);
printf("\nFailed to read from file. Program will terminate.\n");
return -1;
}
while (!feof(fp) && !ferror(fp)) {
int i = 0;
if (fgets(arr[N], MAX_LINE_LENGTH, fp)) {
trim(arr[N]);
printf("Full line: |%s|\n", arr[N]);
token = strtok(arr[N], delim);
arr[N][i] = *token;
printf("N = %d, i = %d, token = %s arr[%d][%d]: %s\n", N, i, token, N, i, &arr[N][i]);
while (token != NULL) {
i++;
token = strtok(NULL, " \n");
printf("token at arr[%d][%i]: %s value at arr[%d][%d]: %s\n", N, i, token, N, i, &arr[N][i]);
arr[N][i] = *token;
printf("N = %d, i = %d, token = %s arr[%d][%d]: %s\n", N, i, token, N, i, &arr[N][i]);
}
N++;
}
}
fclose(fp);
return 0;
}
The output I'm getting reads:
Full line: |c100 c200|
N = 0, i = 0, token = c100 arr[0][0]: c100
token at arr[0][1]: c200 value at arr[0][1]: 100
N = 0, i = 1, token = c200 arr[0][1]: c00
token at arr[0][2]: (null) value at arr[0][2]: 00
zsh: segmentation fault ./a.out
My file is a list of courses and I am to build an adjacency matrix with the list of prerequisite courses.
c100 c200
c300 c200 c100
c200 c100
I tried to reset each index to NULL or '\0' before inserting the tokens, but the same result occurred. Inserting the first word in the [N][0]th index of the inner array works, but there is something I'm missing when inserting into other indexes of the inner array.