This is a predominantly theoretical question as I guess I would not use the fetched data outside of the .then() method. I was just checking the behaviour of fetch.then() out of curiosity as I like to test and understand the principles of the languages I learn.
As expected
setTimeout(()=>{
console.log("myGlobVar in setTimeOut");
console.log(myGlobVar)}, 1000);
returns the fetched object, provided I use a delay long enough in setTimeOut (about 40ms in this case)
But I'm curious why the
console.log("myGlobVar after for loop");
console.log(myGlobVar); //returns undefined
does not work.
My rationale here was that running the loop long enough, would give enough time to fetch()
to fetch the data and assign it to myGlobVar
, which doesn't happen no matter how long it runs.
Actually -counterintuitively- for extremely high numbers in the loop (1000000000) even the
setTimeout(()=>{
console.log("myGlobVar in setTimeOut");
console.log(myGlobVar)}, 1000);
returns undefined.
<script>
"use strict"
let myGlobVar;
function loadPeaks() {
console.log("loadPeaks");
fetch('soundofChengdu.json')
.then(response => {
if (!response.ok) {
throw new Error("HTTP error: " + response.status);
}
return response.json();
})
.then(peaks => {
// wavesurfer.load('soundofChengdu.mp3', peaks.data);
myGlobVar= peaks;
})
.catch((e) => {
console.log({errorIs: e})
// console.error('error', e);
});
}
loadPeaks();
console.log("myGlobVar without waiting");
console.log(myGlobVar); //returns undefined
setTimeout(()=>{console.log("myGlobVar in setTimeOut"); console.log(myGlobVar)}, 1000); //returns undefined under +-40 ms, works above.
let b;
console.log("loop running");
for (let a=0; a<100000000; a++) {
b=b+a;
}
console.log("myGlobVar after for loop");
console.log(myGlobVar); //returns undefined
</script>