I have a SQL table that consists of 16742 records. Using jquery and do a get and retrieve all the data. Then for each row I then do a check again for every row and I have my condition and if it matches save that row to another table using a post. This all works fine we worked out it takes 6 seconds go iterate through the table. Based on the number we calculated that the script would run for 27 hours.
Now when the application runs it always stops at 8190 rows that have saved to the other table.
Is seems like that the server times out. Is there something that can just keep the script running?
Thank you for your input here is the code i have upto now.
<script type="text/javascript" >
$.getJSON("/Home/GetAllAusPostCodes", function (data) {
// alert('All Data Collected')
var firstlat
var firstlng
var origionalpostcode
var auspostid
var orsuburb
$.each(data, function (index, item) {
alert('yes');
// alert(item.ID + " " + item.Postcode + " " + item.Suburb + " " + item.latitude + " " + item.longitude);
firstlat = item.latitude;
firstlng = item.longitude;
auspostid = item.ID
origionalpostcode = item.Postcode
orsuburb = item.Suburb
// alert('lat: ' + firstlat + ' ' + 'lng: ' + firstlng);
$.each(data, function (index, item) {
var p1 = new LatLon(Geo.parseDMS(firstlat), Geo.parseDMS(firstlng));
// alert(p1);
var p2 = new LatLon(Geo.parseDMS(item.latitude), Geo.parseDMS(item.longitude));
// alert(p2);
if (p1.distanceTo(p2) > 0 && p1.distanceTo(p2) < 30) {
var url = "/Home/SaveDistancePostCode?AusPostCodeID=" + auspostid + "&PostCode=" + item.Postcode + "&OrigionalSuburb=" + orsuburb + "&SuburbName=" + item.Suburb + "&lat=" + item.latitude + "&lng=" + item.longitude + "&state=" + item.State + "&distance=" + p1.distanceTo(p2);
// $.each(function (i) {
var i
$.post(url, function (data) {
alert(i + 1);
});
// })
}
});
alert('complete')
});
});
this current script still stops are 8190 records on a much better pc from my first tests. i will continue to get it working. tkz for your input.