The code that was given to me works by saving web scraped data in google sheets, however, when it saves data it creates duplicates of the same data entry from previously scheduled web scrapes. Is there a way we can create a code where it only saves unique rows and adds in edited/ updated info? I have added the code below:
function myFunction() {
SpreadsheetApp.getActiveSheet().getRange('A1').setValue('=importdata("https://www.parsehub.com/api/v2/projects/tZOywrNXQ3Q4/last_ready_run/data?api_key=tn6CGEyTTVxE&format=csv")')
}
// custom menu function
function onOpen() {
var ui = SpreadsheetApp.getUi();
ui.createMenu('Custom Menu').addItem('Save Data','saveData').addToUi().
}
// function to save data
function saveData() {
var ss=SpreadsheetApp.getActiveSpreadsheet();
var sh4=ss.getSheetByName('Zapier Tax Leads');
var sh3=ss.getSheetByName('Current Tax Leads')
var data=sh3.getRange(1,1,sh3.getLastRow(),33).getValues();
sh4.getRange(sh4.getLastRow()+1,1,data.length,data[0].length).setValues(data);
}
function removeDuplicates() {
var sheet = SpreadsheetApp.getActive().getSheetByName('Zapier Tax Leads')
var data = sheet.getDataRange().getValues();
var newData = new Array();
for(i in data){
var row = data[i];
var duplicate = false;
for(j in newData){
if(row.join() == newData[j].join()){
duplicate = true;
}
}
if(!duplicate){
newData.push(row);
}
}
sheet.clearContents();
sheet.getRange(1, 1, newData.length,
newData[0].length).setValues(newData);
}
Edit:
The tab labeled "Zapier Tax Leads" is where the code copies over the duplicate data.
In tab "Current Tax Leads" the columns that we will be updating are only column H through O.
Spreadsheet link: https://docs.google.com/spreadsheets/d/1G1PLs46cnQ-CyJWBI0ibKNmYosSEQRcrAGE8Qx2MArU/edit?usp=sharing