Im writing a blobtrigger that takes CSV files from a blob and inserts it into SQL using SqlBulkCopy.
I have about 20 predefined tables in the db.
My problem is that alot of the tables in the db have date formatted columns. So when i try to insert my files using a DataTable i get an exception (since DataTable columns is automatically formatted as strings i guess).
I have been googling around and all i can find is hard-coded examples using SqlBulkCopy mappings. Since i have about 20 different files which eventually can be moore i really dont want to write a shitloads of if-statements or create 20 dto´s.
So im wondering if there is any magic way to solve my issue in a smooth fashion?
Here is my basic blobtrigger code:
[FunctionName("BlobTriggerFunction")]
public static async Task Run([BlobTrigger("myTrigger", Connection = "AzureWebJobsStorage")]CloudBlockBlob blob, TraceWriter log)
{
var dataTable = new DataTable();
var fileName = blob.Name;
if (!fileName.EndsWith(".CSV"))
{
log.Info($"Blob '{blob.Name}' doesn't have the .csv extension. Skipping processing.");
return;
}
//Read csv
using (var reader = new StreamReader(await blob.OpenReadAsync()))
using (var csv = new CsvReader(reader))
{
using (var dr = new CsvDataReader(csv))
{
dataTable = new DataTable();
dataTable.Load(dr);
}
}
//Do some stuff with the data, nothing importent
var data = new PrepareDataTable(dataTable, fileName);
try
{
using (var connection = new SqlConnection(Environment.GetEnvironmentVariable("DWHConnString")))
{
connection.Open();
using (var sqlBulk = new SqlBulkCopy(connection))
{
sqlBulk.DestinationTableName = data.TableName;
await sqlBulk.WriteToServerAsync(data.Table);
}
}
}
catch (Exception e)
{
throw e;
}
//Delete blob
await blob.DeleteAsync();
}
}
Appreciate all help i can get,
Thanks!