1

I created a script that imports a CSV file from a URL into a Google Sheet. I have used Utilities.parseCsv(csv) in the past with no problems. However, this particular CSV file contains a field that has multiple line returns (ALT+Enter). This causes the parsed file to create extra rows when it encounters these fields. I think the best approach is to sanitize the CSV file before parsing, but I do not know how to do this.

The problem field is 'Description' (Column G).

Current Script:

function importCampaignReport() {
var csvUrl = "https://s3.amazonaws.com/redacted.csv";
var csvContent = UrlFetchApp.fetch(csvUrl).getContentText();
var csvData = Utilities.parseCsv(csvContent);
var sheet = SpreadsheetApp.getActiveSpreadsheet().getSheetByName('Perm URL Campaign Dashboard Report');
sheet.getRange(1, 1, csvData.length, csvData[0].length).setValues(csvData);

}

Sample csvContent:

"Campaign Status","Billing Status",ID,Mon,Customer,Campaign,Description,"Customer Type",CID,CPL,Archived,"Start date","End date","Sales rep","Total Days Remaining","Monthly Days Left",Pace,"Month Delivered","Month Accepted","Month Remaining","% Delivered","% Accepted","Campaign Goal","Campaign Delivered","Campaign Remaining","% Campaign delivered","Campaign Undelivered","Campaign Awaiting Verification","Campaign Unverified","% Campaign Possible”,”CL Goal”,”CL Delivered”,”CL Remaining”,”CL Undelivered”,”CL Awaiting Verification”,”CL Unverified","#1 Partner Code","#1 Partner Goal","#1 Partner Cost","#2 Partner Code","#2 Partner Goal","#2 Partner Cost","#3 Partner Code","#3 Partner Goal","#3 Partner Cost”,”CL Mon Delivered","#1 Partner Mon Delivered","#2 Partner Mon Delivered","#3 Partner Mon Delivered","#4 Partner Mon Delivered","#5 Partner Mon Delivered","#6 Partner Mon Delivered","#4 Partner Code","#4 Partner Goal","#4 Partner Cost","#5 Partner Code","#5 Partner Goal","#5 Partner Cost","#6 Partner Code","#6 Partner Goal","#6 Partner Cost"
Completed,Pending,4607,"Mar 2018”,XYZ,”Big Software","IT Staff+, 500+, UK, France, Netherlands, CQs",1,,27.50,no,03/01/2018,03/24/2018,RD,0,0,87,97,87,0,111,89,87,97,0,111,0,0,0,111,0,2,0,0,0,0,STW001,87,13.00,,,,,,,2,95,,,,,,,,,,,,,,
Completed,Pending,4938,"Jan 2018",TestCustomer,TestCampaign,"Description Line 1
Description Line 2
Description Line 3",1,,0.00,no,04/09/2017,04/09/2018,RD,0,0,1,0,0,1,0,0,10,0,10,0,0,0,2,20,0,0,0,0,0,0,LC,10,0.00,,,,,,,0,0,,,,,,,,,,,,,,
Completed,Pending,4938,"Feb 2018",TestCustomer,TestCampaign,"Description Line 1
Description Line 2
Description Line 3",1,,0.00,no,04/09/2017,04/09/2018,RD,0,0,1,0,0,1,0,0,10,0,10,0,0,0,2,20,0,0,0,0,0,0,LC,10,0.00,,,,,,,0,0,,,,,,,,,,,,,,
Completed,Pending,4938,"Mar 2018",TestCustomer,TestCampaign,"Description Line 1
Description Line 2
Description Line 3",1,,0.00,no,04/09/2017,04/09/2018,RD,0,0,1,0,0,1,0,0,10,0,10,0,0,0,2,20,0,0,0,0,0,0,LC,10,0.00,,,,,,,0,0,,,,,,,,,,,,,,
Completed,Pending,4938,"Apr 2018",TestCustomer,TestCampaign,"Description Line 1
Description Line 2
Description Line 3",1,,0.00,no,04/09/2017,04/09/2018,RD,0,0,1,0,0,1,0,0,10,0,10,0,0,0,2,20,0,0,0,0,0,0,LC,10,0.00,,,,,,,0,0,,,,,,,,,,,,,,
Active,Pending,4944,"Jan 2018",TestCustomer1,Aberdeen,"Description Line 1
Description Line 2",1,,0.00,no,04/24/2017,04/24/2018,MN,0,0,9,0,0,9,0,0,100,0,100,0,0,1,9,10,0,0,0,0,0,0,ABD,100,0.00,,,,,,,0,0,,,,,,,,,,,,,,
Active,Pending,4944,"Feb 2018",TestCustomer1,Aberdeen,"Description Line 1
Description Line 2",1,,0.00,no,04/24/2017,04/24/2018,MN,0,0,8,0,0,8,0,0,100,0,100,0,0,1,9,10,0,0,0,0,0,0,ABD,100,0.00,,,,,,,0,0,,,,,,,,,,,,,,
Active,Pending,4944,"Mar 2018",TestCustomer1,Aberdeen,"Description Line 1
Description Line 2",1,,0.00,no,04/24/2017,04/24/2018,MN,0,0,9,0,0,9,0,0,100,0,100,0,0,1,9,10,0,0,0,0,0,0,ABD,100,0.00,,,,,,,0,0,,,,,,,,,,,,,,
Active,Pending,4944,"Apr 2018",TestCustomer1,Aberdeen,"Description Line 1
Description Line 2",1,,0.00,no,04/24/2017,04/24/2018,MN,0,0,7,0,0,7,0,0,100,0,100,0,0,1,9,10,0,0,0,0,0,0,ABD,100,0.00,,,,,,,0,0,,,,,,,,,,,,,,
Active,Pending,5035,"Jan 2018",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,254,0,0,254,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Feb 2018",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,229,0,0,229,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Mar 2018",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,254,0,0,254,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Apr 2018",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,246,0,0,246,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"May 2018",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,254,0,0,254,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Jun 2018",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,246,0,0,246,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Jul 2018",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,254,0,0,254,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Aug 2018",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,254,0,0,254,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Sep 2018",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,247,0,0,247,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Oct 2018",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,255,0,0,255,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Nov 2018",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,247,0,0,247,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Dec 2018",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,255,0,0,255,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Jan 2019",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,255,0,0,255,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Feb 2019",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,230,0,0,230,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Mar 2019",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,255,0,0,255,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Apr 2019",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,247,0,0,247,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"May 2019",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,255,0,0,255,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Jun 2019",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,247,0,0,247,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Jul 2019",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,0,255,0,0,255,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Aug 2019",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,12,255,0,0,255,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Sep 2019",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,30,247,0,0,247,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Active,Pending,5035,"Oct 2019",TestCustomer,TestCampaign,"Description Line 1",1,,0.00,no,05/04/2017,05/04/2020,MN,259,31,255,0,0,255,0,0,9002,0,9002,0,0,0,0,0,9000,0,9000,0,0,0,Redacted,1,0.00,B2M001,1,0.00,,,,0,0,0,,,,,,,,,,,,,
Rubén
  • 34,714
  • 9
  • 70
  • 166
ob1
  • 23
  • 4
  • 1
    Yeah, this can be done with regex as long as you know the field number, _AND_ there is no delimiter-as-data within that particular field. Post a sample of the file and you'll get some help. –  Aug 16 '19 at 19:56
  • 1
    Oh, need to see the raw csv file, not via some _viewer_. Can regex be run on a viewer, not sure. –  Aug 16 '19 at 20:43
  • You should be able to download the file from the website. – ob1 Aug 16 '19 at 21:02
  • @ob1 Thank you for replying and adding the script. From your script, I could understand about the language of your script and the issue of your script. So I proposed a modified script. Could you please confirm it? If I misunderstood your question and that was not the direction you want, I apologize. By the way, I added a tag. – Tanaike Aug 17 '19 at 07:07

2 Answers2

2
  • You want to put the CSV data to a Spreadsheet.
  • CSV data includes the values which have the line breaks.
    • The values which have the line breaks are enclosed by the double quotes.
    • Such values can be seen at the column "G".
  • You want to put the values including the line breaks.
  • You want to achieve this using Google Apps Script.

If my understanding is correct, how about this answer? Please think of this as just one of several answers.

Issue:

Unfortunately, in the current stage, it seems that Utilities.parseCsv() cannot be directly used for the data which has the cell values including the line breaks. In order to avoid this issue, I would like to propose the following 2 patterns.

Pattern 1:

In this pattern, the following flow is used.

Flow:

  1. In order to parse the data by Utilities.parseCsv(), at first, The values enclosed by the double quotes are replaced by the temporal string.
  2. The values are parsed by Utilities.parseCsv().
  3. Replace the temporal strings to the original values.
  4. Put the data to Spreadsheet.

Modified script:

When your script is modified, please modify as follows.

From:
var csvData = Utilities.parseCsv(csvContent);
To:
// The values enclosed by the double quotes are replaced by the temporal string.
var temp = [];
var tempStr = "###temp###";
var t = csvContent.replace(/\"[\w\s\S]+?\"/g, function(m) {
  temp.push(m.replace(/\"/g, ""));
  return tempStr;
});

// Parse data as CSV data.
var csvData = Utilities.parseCsv(t);

// eplace the temporal strings to the original values.
var h = 0;
csvData.forEach(function(e, i) {
  var j = e.indexOf(tempStr);
  if (j > -1) {
    csvData[i][j] = temp[h];
    h++;
  }
});

Pattern 2:

In this pattern, the following flow is used.

Flow:

  1. Convert the CSV file to Google Spreadsheet.
    • This converted Spreadsheet is used as the temporal Spreadsheet.
  2. Retrieve the values from the converted Spreadsheet.
  3. Delete the temporal Spreadsheet.
  4. Put the values to the destination Spreadsheet.

Sample script:

Before you use this script, please enable Drive API at Advanced Google services.

function importCampaignReport() {
  var csvUrl = "https://s3.amazonaws.com/redacted.csv";
  var blob = UrlFetchApp.fetch(csvUrl).getBlob(); // Modified


  // I added below script.
  var id = Drive.Files.insert({title: "temporalSpreadsheet", mimeType: MimeType.GOOGLE_SHEETS}, blob).id;
  var csvData = SpreadsheetApp.openById(id).getSheets()[0].getDataRange().getValues();
  Drive.Files.remove(id);


  var sheet = SpreadsheetApp.getActiveSpreadsheet().getSheetByName('Perm URL Campaign Dashboard Report');
  sheet.getRange(1, 1, csvData.length, csvData[0].length).setValues(csvData);
}

Note:

  • In this modified script, I could confirm that the script worked using your shared CSV data.
  • When the CSV file is manually imported to Spreadsheet, I could confirm that the result was the same with the above modified script.

References:

Added 1:

I could notice that when Sheets API is used, the values retrieved from the URL are directly parsed and put to the Spreadsheet. So as one more pattern, I would like to propose this. The sample script is as follows. In this case, no temporal file is used and the process cost will be able to be lower than those of above 2 patterns.

Sample script:

Before you use this script, please enable Sheets API at Advanced Google services.

function importCampaignReport() {
  var csvUrl = "https://s3.amazonaws.com/redacted.csv";
  var csvContent = UrlFetchApp.fetch(csvUrl).getContentText();

  var spreadsheet = SpreadsheetApp.getActiveSpreadsheet();
  var sheet = spreadsheet.getSheetByName('Perm URL Campaign Dashboard Report');
  var resource = {requests: [{pasteData: {data: csvContent, coordinate: {sheetId: sheet.getSheetId()}, delimiter: ","}}]};
  Sheets.Spreadsheets.batchUpdate(resource, spreadsheet.getId());
}

Reference:

Added 2:

  • When the value is put to the Spreadsheet, you want to remove the line breaks from the values.

I could understand like above. If my understanding is correct, how about the following sample script?

Before you use this script, please enable Sheets API at Advanced Google services.

Sample script:

function importCampaignReport() {
  var csvUrl = "https://s3.amazonaws.com/redacted.csv";
  var data = UrlFetchApp.fetch(csvUrl).getContentText();

  var spreadsheet = SpreadsheetApp.getActiveSpreadsheet();
  var sheet = spreadsheet.getSheetByName('Perm URL Campaign Dashboard Report');
  var sheetId = sheet.getSheetId();
  var resource = {requests: [
    {pasteData: {data: data, coordinate: {sheetId: sheetId}, delimiter: ","}},
    {findReplace: {range: {sheetId: sheetId, startColumnIndex: 6, endColumnIndex: 7}, find: "\n", replacement: " ", searchByRegex: true}}
  ]};
  Sheets.Spreadsheets.batchUpdate(resource, spreadsheet.getId());
}
  • In your comment, about the 5th column (G), I think that the 5th column is the column "E". But you say the column "G". I confuse about this. So in the current script, the column "G" can be processed. If you want to do this for the column "E", please set startColumnIndex: 4, endColumnIndex: 5.
  • If an error occurs, please provide your script for replicating the issue. By this, I would like to confirm it.
Tanaike
  • 181,128
  • 11
  • 97
  • 165
  • Thanks for your thoughtful and detailed response. Unfortunately, none of your solutions worked. Perhaps I confused things by stating "Unfortunately, the field is not wrapped in double quotes. " This has to do with the database that generates the CSV file. The problem occurs when the data processor enters ALT+Enter in a field. The problem can be averted if the data processor wraps the field entry in double quotes. The database exports the CSV file and ignores any line return that is wrapped in double quotes. I am trying to figure out a way to catch this if the data processor forgets to do this – ob1 Aug 18 '19 at 14:23
  • @ob1 Thank you for replying. I apologize for the inconvenience. I used your shared CSV file. In the CSV file, the column "G" was enclosed by the double quotes. So I used this. Is my understanding correct? If to use the shared CSV file is not correct, can you provide the value of `csvContent` of `var csvContent = UrlFetchApp.fetch(csvUrl).getContentText();`? Of course, please remove your personal information. – Tanaike Aug 18 '19 at 22:05
  • I added a sample of csvContent. Please note that fields are wrapped in double quotes when the database encounters a space or comma within a field when exporting the CSV file. I apologize for not being clearer in the original post. – ob1 Aug 19 '19 at 03:56
  • @ob1 Thank you for replying and adding the values. In your additional value, the double quotes are used. But in your latest replying, you said `the field is not wrapped in double quotes.`. So I cannot understand about your situation. Can you explain about this? By the way, if your additional value (`Sample csvContent:`) is the correct value, the 3rd script in my answer can be used. If in your environment, it didn't work, could you please provide the whole script for replicating your situation? By this, I would like to confirm it. – Tanaike Aug 19 '19 at 04:26
  • The following is one record and should be displayed on one line: – ob1 Aug 19 '19 at 15:12
  • The following is part of one record and should be displayed on one line: "Description Line 1 Description Line 2 Description Line 3", The entire field is wrapped in single quotes, but the parser doesn't ignore the 'hidden' line returns between Description Line 1 Description Line 2 Description Line 3. I think the field needs to be wrapped in double quotes (e.g: ""Description Line 1 Description Line 2 Description Line 3"") for the parser to work properly. I copied your 3rd script exactly and replaced the redacted part. Returned error: TypeError: Cannot call method "batchUpdate" of undefined. – ob1 Aug 19 '19 at 15:41
  • @ob1 Thank you for replying. I added one more sample script. Could you please confirm it? If that was not the result you want, I apologize. – Tanaike Aug 19 '19 at 22:22
  • I only want to remove any line breaks that are in the 5th column (G). I copied your 4th script exactly and replaced the redacted part. ReferenceError: "data" is not defined. – ob1 Aug 20 '19 at 15:29
  • @ob1 Thank you for replying. I apologize for the inconvenience. I noticed that I had mistaken my copy paste. This is due to my poor skill. I deeply apologize for my poor skill. I updated my answer. Could you please confirm it? – Tanaike Aug 20 '19 at 22:26
  • I tried your latest script and it works. It also runs substantially faster than the script that used the parseCSV utility. Thanks so much for all the time and effort you put into this solution. – ob1 Aug 21 '19 at 06:19
  • @ob1 Thank you for replying. I'm glad your issue was resolved. Thank you, too. – Tanaike Aug 21 '19 at 22:03
0

Just a simple regex replace with a callback is all you need.
Just matching quoted stuff and removing the line-breaks is all it does..

The regex and callback:

var rx = new RegExp('(?:,|^)([^,"]*(?:"[^"]*")[^,"]*)(?=,|$)', 'mg'); // in multi-line mode  
var csv_new = csv.replace( rx, function( match ) {
        var tmp = match;
        return tmp.replace( /\r?\n/g, '' );
        } );

Regex expanded for readability:

 (?: , | ^ )
 (                             # (1 start)
      [^,"]*  
      (?: " [^"]* " )
      [^,"]*  
 )                             # (1 end)
 (?= , | $ )

JS Sample:

var rx = new RegExp( '(?:,|^)([^,"]*(?:"[^"]*")[^,"]*)(?=,|$)', 'mg');


var csv = 
"Campaign Status,Billing Status,ID,Mon,Customer,Campaign,Description,Customer Type,CID,CPL,Archived,Start date,End date,Sales rep,Total Days Remaining,Monthly Days Left,Pace,Month Delivered,Month Accepted,Month Remaining,% Delivered,% Accepted,Campaign Goal,Campaign Delivered,Campaign Remaining,% Campaign delivered,Campaign Undelivered,Campaign Awaiting Verification,Campaign Unverified,% Campaign Possible\n" + +
"Completed,Paid,6252,18-Jan,Google,Oracle_787_41336,\"US, CQ, Fin\",1,25808,28.5,no,1/3/18,1/22/18,MN,0,0,82,122,50,0,148,40,82,122,0,148,0,0,0,148\n" +
"Completed,Paid,6253,18-Jan,Google,Optum_790_41325,\"US, Selected C-Level, Healthcare\",1,26642,18.53,no,1/3/18,1/26/18,MN,0,0,60,64,63,0,106,98,60,64,0,106,0,0,0,106\n" +
"Completed,Paid,6254,18-Jan,Google,Cyren_791_41352,\"US, IT, Mgr, Dir, C-Level 250-1k\",1,26965,15.2,no,1/3/18,1/31/18,MN,0,0,188,191,187,0,101,97,188,191,0,101,0,0,2,102\n" +
"Completed,Paid,6255,18-Jan,Google,Cornerstone_xxx_xxxxx,\"790 - 41392 150 leads\n" +
"791 - 41391 150 leads\n" +
"US, Exec, HR, Training\",1,26978,9.5,no,1/4/18,1/31/18,MN,0,0,300,312,274,0,104,87,300,312,0,104,0,1,11,108\n" +
"Completed,Paid,6256,18-Jan,Google,Cornerstone_787_41393,\"UK, Exec, HR, Training\",1,26975,20.9,no,1/5/18,1/31/18,MN,0,0,87,99,90,0,113,90,87,99,0,113,0,0,0,113\n" +
"Completed,Paid,6257,18-Jan,Google,Cornerstone_787_41394,\"ANZ, Exec, HR, Training\",1,26976,22.8,no,1/5/18,1/31/18,MN,0,0,80,85,82,0,106,96,80,85,0,106,0,0,0,106\n" +
"Completed,Paid,6258,18-Jan,Google,CA Technology ABM_791_41434,\"ABM, Sweden, IT, Mgr, Dir, C-Level\",1,25427,26.6,no,1/5/18,1/19/18,MN,0,0,7,14,7,0,200,50,7,14,0,200,0,0,0,200\n" +
"Completed,Paid,6259,18-Jan,Google,CA Technologies ABM_791_41427,\"ABM, Spain, IT, Mgr, Dir, C-Level\",1,25424,26.6,no,1/5/18,1/19/18,MN,0,0,18,25,18,0,138,72,18,25,0,138,0,13,0,211\n" +
"Completed,Paid,6260,18-Jan,Google,CA Technologies ABM_791_41428,\"ABM, Italy, IT, Mgr, Dir, C-Level\",1,25421,26.6,no,1/6/18,1/19/18,MN,0,0,17,20,17,0,117,85,17,20,0,117,0,0,1,123\n" +
"Completed,Paid,6262,18-Jan,Google,Mimecast_791_41482,\"US, IT mgr, dir, c-level, 250-1k\",1,24247,14.06,no,1/10/18,1/31/18,MN,0,0,25,30,25,0,120,83,25,30,0,120,0,0,1,124\n" +
"Completed,Paid,6263,18-Jan,Google,Esker_791_41523,\"US, IT mgr+, 250+\",1,27024,9.5,no,1/10/18,1/31/18,MN,0,0,14,71,68,0,507,95,68,71,0,104,0,0,0,104\n" +
"Completed,Paid,6264,18-Jan,Google,F5 ABM_790_41474,\"ABM, US, CA, IT Mgr+, Selected Staffs\",1,26954,24.7,no,1/10/18,1/26/18,MN,0,0,195,203,195,0,104,96,195,203,0,104,0,1,11,110\n" +
"Completed,Paid,6265,18-Jan,Google,Symantec ABM_791_41498,\"ABM, US, Exec, IT, Mgr+, Selected Staff\",1,27048,26.6,no,1/10/18,1/31/18,MN,0,0,185,188,185,0,101,98,185,188,0,101,19,2,349,301\n" +
"Completed,Paid,6266,18-Jan,Google,Great Bay ABM_791_41520,\"ABM, US, IT - Security, C-Level, Staff, Mgr, VP\",1,27018,22.8,no,1/10/18,1/31/18,MN,0,0,26,71,46,0,273,64,26,71,0,273,2,0,0,280\n" +
"Completed,Paid,6267,18-Jan,Google,Staples_xxx_xxxxx,\"790 - 41579 - 300 leads791 - 41578 - 300 leadsUS, IT Mgr, Dir, C-Level 10-250\",1,27044,17.1,no,1/10/18,1/31/18,MN,0,0,600,623,607,0,103,97,600,623,0,103,0,0,55,112\n" +
"Completed,Pending,6270,18-Jan,Google,Opt In Page Only Test,,1,26978,0,no,1/4/18,1/31/18,MN,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0\n" +
"Completed,Paid,6271,18-Jan,Google,LiveRamp_791_41590,\"France, Mkt, 1k+\",1,26963,25.64,no,1/11/18,1/31/18,MN,0,0,23,23,23,0,100,100,23,23,0,100,5,0,0,121\n" +
"Completed,Paid,6272,18-Jan,Google,Cardinal Health_791_41631,\"US, Healthcare, CEO, CFO\",1,26920,30.4,no,1/12/18,1/31/18,MN,0,0,50,51,50,0,102,98,50,51,0,102,0,29,0,160\n" +
"Completed,Paid,6273,18-Jan,Google,ePharma_791_41585,\"US, Mkt Staff+\",1,27056,22.8,no,1/12/18,1/31/18,MN,0,0,83,95,83,0,114,87,83,95,0,114,0,0,0,114\n" +
"Completed,Paid,6274,18-Jan,Google,Staples_xxx_xxxxx,\"790 - 41584 - 165 Leads791 - 41583 - 160 LeadsUS, Operations Fin\",1,27043,17.1,no,1/12/18,1/31/18,MN,0,0,325,337,325,0,103,96,325,337,0,103,0,30,0,112\n" +
"Completed,Paid,6275,18-Jan,Google,Staples_xxx_xxxxx,\"790 - 41582 - 150 Leads\n" +
"791 - 41581 - 150 Leads\n" +
"US, Operations\",1,27045,17.1,no,1/12/18,1/31/18,MN,0,0,300,309,300,0,103,97,300,309,0,103,0,0,0,103\n" +
"Completed,Paid,6276,18-Jan,Google,Envoy_791_41654,\"US, HR, Talent, 1 cq\",1,27096,19,no,1/15/18,1/31/18,MN,0,0,75,83,75,0,110,90,75,83,0,110,16,0,6,140\n" +
"Completed,Pending,6277,18-Jan,Google,Datamatics Test Campaign,Datamatics Test Campaign,1,24247,0,no,1/10/18,1/31/18,MN,0,0,20,0,0,20,0,0,20,0,20,0,0,21,6,135\n" +
"Completed,Pending,6278,18-Jan,Google,NetApp Netline Test,,1,,0,no,1/15/18,1/31/18,MN,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0\n" +
"Completed,Paid,6279,18-Feb,Google,Unitrends,IT Staff+ 100-5K,1,,16,no,2/1/18,4/30/18,MN,0,0,1380,1500,1502,0,108,100,4500,4483,17,99,49,34,61,102\n" +
"Completed,Paid,6279,18-Mar,Google,Unitrends,IT Staff+ 100-5K,1,,16,no,2/1/18,4/30/18,MN,0,0,1524,1492,1508,32,97,101,4500,4483,17,99,49,34,61,102\n" +
"Completed,Paid,6279,18-Apr,Google,Unitrends,IT Staff+ 100-5K,1,,16,no,2/1/18,4/30/18,MN,0,0,1490,1491,1490,0,100,99,4500,4483,17,99,49,34,61,102\n" +
"Completed,Paid,6280,18-Jan,Google,NEC_xxx_xxx,\"790 - 41672 - 100 Leads\n" +
"791 - 41671 - 100 Leads\n" +
"US, Selected IT staff\",1,27057,13.3,no,1/16/18,1/31/18,MN,0,0,200,207,200,0,103,96,200,207,0,103,1,0,8,108\n" +
"Completed,Paid,6281,18-Jan,Google,Microsoft_787_41704,\"Germany, IT Mgr+, 50+, \n" +
"1 CQ - Answer must be True\",1,27086,30.61,no,1/16/18,1/31/18,MN,0,0,248,251,248,0,101,98,248,251,0,101,0,12,2,106\n" +
"Completed,Paid,6282,18-Jan,Google,Datavai ABMl_,\"791 - 41695\n" +
"790 - 41872\n" +
"ABM, Finance Mgr-Dir, Hyperion Mgr-Dir\",1,27078,24.7,no,1/16/18,1/31/18,MN,0,0,70,78,76,0,111,97,70,78,0,111,0,4,1,118\n" +
"Completed,Paid,6283,18-Jan,Google,Teredata ABM_791_41697,\"ABM, Business - IT and ExecMgmt Dir+\",1,23681,25.84,no,1/16/18,1/23/18,MN,0,0,7,10,7,0,142,70,7,10,0,142,0,0,0,142\n" +
"Completed,Pending,6284,18-Jan,Google,Netline Box Test,,1,,0,no,1/17/18,1/31/18,RD,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0\n" +
"Completed,Paid,6285,18-Jan,Google,Georgetown University_791_41707,\"Finance Staff+, 100+, 1 CQ\",1,27127,17.1,no,1/17/18,1/31/18,MN,0,0,200,250,200,0,125,80,200,250,0,125,0,2,9,130\n" +
"Hold,Pending,6286,18-Jan,Google,B2B Leads Corp Test,,1,,0,no,1/17/18,1/31/18,MN,0,0,50,0,0,50,0,0,50,0,50,0,0,0,0,0\n" +
"Completed,Paid,6288,18-Jan,Google,Sparton_791_41722,\"US, R&D Mgr-VP, 500+\",1,27141,19,no,1/17/18,1/26/18,MN,0,0,74,76,74,0,102,97,74,76,0,102,0,0,0,102\n" +
"Completed,Paid,6289,18-Jan,Google,One Medical_791_41717,\"HR Staff-VP, ExecMgmt Dir-VP, 251-2500, Select States\",1,26586,19,no,1/18/18,1/31/18,MN,0,0,34,37,34,0,108,91,34,37,0,108,0,0,72,320\n" +
"Hold,Pending,5994,18-Jan,Google,Valasys Test Campaign,\"US, Exec, IT Mgr+, selected staff\",1,26071,0,no,1/18/18,1/31/18,MN,0,0,40,0,0,40,0,0,40,0,40,0,0,0,18,45\n" +
"Completed,Paid,6290,18-Jan,Google,IBM_787_41755,IT Mgr+ 1k+,1,27120,10.02,no,1/18/18,1/26/18,MN,0,0,140,142,140,0,101,98,140,142,0,101,0,0,45,133\n" +
"Completed,Paid,6291,18-Jan,Google,Dell_787_41774,\"Germany, IT, Mgr, Dir, C-Level\",1,27032,30.69,no,1/19/18,1/26/18,MN,0,0,38,39,38,0,102,97,38,39,0,102,0,0,0,102\n" 
;

var csv_new = csv.replace( rx, function( match ) {
        var tmp = match;
        return tmp.replace( /\r?\n/g, '' );
        } );
        
        
console.log("Fixed CSV text: ");
 console.log( csv_new );
  • Thanks for you response. Unfortunately, the sample CSV file did not convert properly when I posted it. Please use the sample that was added to my original post. Thanks. – ob1 Aug 19 '19 at 15:50
  • The following better describes the problem: The following is part of one record and should be displayed on one line: "Description Line 1 Description Line 2 Description Line 3", The entire field is wrapped in single quotes, but the parser doesn't ignore the 'hidden' line returns between "Description Line 1 Description Line 2 Description Line 3". I think the field needs to be wrapped in double quotes (e.g: ""Description Line 1 Description Line 2 Description Line 3"") for the parser to work properly. – ob1 Aug 19 '19 at 15:50