The values in my input testcase files were such that at some point in the code, values would exceed the capacity of int, so I figured I'd change the datatype of this particular array holding this value greater than INT_MAX from int to long long and change maximum values in the code to LLONG_MAX from INT_MAX so that comparisons during runtime don't yield a wrong answer.
However, now the code seems to get stuck with a runtime error even before arriving at the mentioned testcase. It now fails at a case that it used to pass when the values were all int oriented. I don't understand how this is possible.
The testcase which passes with int but fails with ll is:
100 50
1 23 133
1 87 16
2 9 78
3 12 117
3 39 19
5 25 219
5 47 130
5 97 157
6 50 114
9 11 25
9 39 227
10 45 187
10 77 120
12 19 85
13 43 247
14 16 4
15 33 223
16 33 1
19 69 204
20 35 119
20 43 213
20 86 19
22 40 233
23 33 61
23 79 152
26 89 213
27 57 129
28 42 220
31 68 84
31 69 183
32 39 145
32 100 117
33 49 198
34 48 78
37 66 200
37 91 77
39 44 235
41 70 109
42 92 33
44 74 196
48 73 26
51 57 216
53 70 158
63 98 220
66 72 148
80 93 150
81 99 54
83 84 129
83 89 177
95 100 16
Below is the code that gives an error at this tc.
#include<bits/stdc++.h>
using namespace std;
# define ll long long int
ll update, previous;
set<pair<ll, int>> dist;
auto it=dist.begin();
int ind=0, n, i, j;
pair<ll, int>p;
void dij(vector<pair<int, ll>> tree[], bool decided[], ll d[], int path[]) {
ind=0;
while(!dist.empty()) {
it=dist.begin();
if(it==dist.end()) return;
ind=it->second;
dist.erase(it);
decided[ind]=1;
for(j=0; j<tree[ind].size(); j++) {
update=d[ind]+tree[ind][j].second;
previous=d[tree[ind][j].first];
if(update<previous) {
p=make_pair(previous, tree[ind][j].first);
dist.erase(dist.find(p));
p=make_pair(update, tree[ind][j].first);
dist.insert(p);
path[tree[ind][j].first]=ind;
}
d[tree[ind][j].first]=min(update, previous);
}
}
}
int main()
{
ll edges;
ll x, y, weight;
cin>>n>>edges;
vector<pair<int, ll>> graph[n];
for(i=0; i<edges; i++) {
cin>>x>>y>>weight;
x--; y--;
graph[x].push_back({y, weight});
graph[y].push_back({x, weight});
}
int src=1;
src--;
ll d[n];
for(i=0; i<n; i++) {
if(src==i) {
dist.insert({0, i});
d[i]=0;
}
else {
dist.insert({LLONG_MAX, i});
d[i]=LLONG_MAX;
}
}
bool decided[n]={0};
int path[n]={-1};
for(int i=1; i<n; i++) path[i]=-2;
dij(graph, decided, d, path);
if(path[n-1]==-2) cout<<-1;
else {
vector<int> s;
int final=n-1;
while (final!=-1) {
s.push_back(final);
final=path[final];
}
reverse(s.begin(), s.end());
for(auto pi:s) cout<<pi+1<<" ";
}
cout<<endl;
}
Below is the code that produces a correct output for this tc.
#include<bits/stdc++.h>
using namespace std;
# define ll long long int
ll update, previous;
set<pair<ll, int>> dist;
auto it=dist.begin();
int ind=0, n, i, j;
pair<ll, int>p;
void dij(vector<pair<int, ll>> tree[], bool decided[], int d[], int path[]) {
ind=0;
while(!dist.empty()) {
it=dist.begin();
if(it==dist.end()) return;
ind=it->second;
dist.erase(it);
decided[ind]=1;
for(j=0; j<tree[ind].size(); j++) {
update=d[ind]+tree[ind][j].second;
previous=d[tree[ind][j].first];
if(update<previous) {
p=make_pair(previous, tree[ind][j].first);
dist.erase(dist.find(p));
p=make_pair(update, tree[ind][j].first);
dist.insert(p);
path[tree[ind][j].first]=ind;
}
d[tree[ind][j].first]=min(update, previous);
}
}
}
int main()
{
ll edges;
ll x, y, weight;
cin>>n>>edges;
vector<pair<int, ll>> graph[n];
for(i=0; i<edges; i++) {
cin>>x>>y>>weight;
x--; y--;
graph[x].push_back({y, weight});
graph[y].push_back({x, weight});
}
int src=1;
src--;
int d[n];
for(i=0; i<n; i++) {
if(src==i) {
dist.insert({0, i});
d[i]=0;
}
else {
dist.insert({INT_MAX, i});
d[i]=INT_MAX;
}
}
bool decided[n]={0};
int path[n]={-1};
for(int i=1; i<n; i++) path[i]=-2;
dij(graph, decided, d, path);
if(path[n-1]==-2) cout<<-1;
else {
vector<int> s;
int final=n-1;
while (final!=-1) {
s.push_back(final);
final=path[final];
}
reverse(s.begin(), s.end());
for(auto pi:s) cout<<pi+1<<" ";
}
cout<<endl;
}
The only difference in the 2 codes are the following lines:
void dij(vector<pair<int, ll>> tree[], bool decided[], ll d[], int path[])
void dij(vector<pair<int, ll>> tree[], bool decided[], int d[], int path[])
ll d[n];
int d[n];
dist.insert({LLONG_MAX, i})
dist.insert({INT_MAX, i})
d[i]=LLONG_MAX
d[i]=INT_MAX
Could someone please point out how is this creating the following error which I read is related to "allocating memory where I should not" or "attempting to execute delete with a pointer value that was not obtained from new". What is causing this problem and how should I resolve it?
free(): invalid pointer
Aborted (core dumped)