I've designed a code to solve the Frobenuis coin problem.
#define _CRT_SECURE_NO_WARNINGS
int numberOfPences;
printf("Enter pences quantity: ");
scanf("%d", &numberOfPences);
int k=8;
int coins[8] = {1, 2, 5, 10, 20, 50, 100, 200};
printf("Start memory allocation with size %d.\n", sizeof(long long));
long long ** computed = (long long **)malloc((k+1)*sizeof(long long *) + (k+1)*(numberOfPences+1)*sizeof(long long ));
for(int i = 0; i <= k; ++i)
{
computed[i] = (long long *)malloc((numberOfPences+1)*sizeof(long long));
}
free(computed);
for(int i=0; i<k+1; i++)
{
for(int j=1; j<numberOfPences+1; j++)
{
computed[i][j]= 0LL;
}
}
computed[0][1]=1LL;
long long s = number_of_dec(numberOfPences, coins, k, computed);
printf("Number of variants : %d.\n", s);
return(0);
}
I cannot understand why sometimes it works, and sometimes it crushes with "Segmentation fault (core dumped)" even without stepping into number_of_dec function. Can you please help me on what am I doing wrong? I think that the problem is with memory allocation, since the same algorithm on Python is working allright.