Tring to iterate through Wikipedia's API call where the page keys are all unique. Creating a response, to give results that are more akin to being useful. Also working with noodl.net hence the inputs and outputs.
let resultPagesArray = Response.content.query.pages;
let resultPagesArrayEntries.entries();
// Loop
let wikiPagesArray = [];
for (let i = 0;i <resultPagesArray.length;i++) {
console.log(i);
let obj = Noodl.Object.create ( {
name:i.names,
url:i.urls,
imageUrl:i.imageUrls,
description:i.descriptions
});
wikiPagesArray.push (obj);
}
Outputs.resultPages = resultPagesArray;
Current response:
"batchcomplete": "",
"continue": {
"gsroffset": 10,
"continue": "gsroffset||"
},
"query": {
"pages": {
"454403": {
"pageid": 454403,
"ns": 0,
"title": "Deep web",
"index": 3,
"extract": "The deep web, invisible web, or hidden web are parts of the World Wide Web whose contents are not indexed by standard web search-engines."
},
"3422674": {
"pageid": 3422674,
"ns": 0,
"title": "Form (HTML)",
"index": 9,
"extract": "A webform, web form or HTML form on a web page allows a user to enter data that is sent to a server for processing."```