I am reading the contents of a directory, and returning the filename and some metadata for each file. I need to save this in an object and then do some filtering before I append it to the page. The problem is that the function ends before the asynchronous call adds all the items to the object. Maybe there is something I don't get about asynchronous javascript, but it's a pain in the butt.
function getAr(dir) {
var ar = {} || [];
ar.files = [];
ar.files.posts = [];
ar.files.pages = [];
ar.set = function(params) {
s3.headObject(params, function(err, data) {
if (err) console.log(err, err.stack);
else {
console.log(data);
if (params['Key'].match('/page/')) {
var size = ar['files']['pages'].length;
ar.files.pages[size] = {'id': data.Metadata.id, 'name': params['Key']};
}
else if (params['Key'].match('/post/')) {
var size = ar['files']['pages'].length;
ar.files.posts[size] = {'id': data.Metadata.id, 'name': params['Key']};
}
}
});
};
ar.ls = function(path) {
var params = {
Bucket: bucket,
EncodingType: 'url',
Prefix: path
};
s3.listObjects(params, function(err, data) {
if (err) {console.log(err, err.stack);}
else {
var file = null;
for (var num in data.Contents) {
params = {};
file = data.Contents[num]['Key'];
param = {
Bucket: bucket,
Key: file
};
ar.set(params);
}
//I need to do some sorting of ar.files here, but the script continues on without waiting for the asynchronous calls to finish.
//console.log(ar.files);
}
});
//ar.files returns null because the scripts haven't finished
return ar.files;
};
return ar.ls(dir);
}
var path = '/_admin/src/pages/';
console.log(getAr(path));