5

I have a CSV file which contain the following:

ProductName,EmployeeID,EmployeeName,ContactNo,Adddress
iPad,1233,Tom,89897898,34 Pitt st
iPad,1573,Jack,8978 9689,50 George st
iPad,1893,Peter,8878 8989,32 Martin st 

The following code will insert into one table. What I am trying to achieve is to insert into 2 tables:

Product table (parent table)
ProductId(Pk), ProductName

Employee Table (child table)
EmployeeId(Pk), ProductId(fk), EmployeeName, ContactNo, Address

So I need to basically insert the record first into Product table and then into Employee table from the CSV file.

Controller.cs

[HttpPost]
public ActionResult Index(HttpPostedFileBase FileUpload)
{
    // Set up DataTable place holder 

    Guid ProductId= Guid.NewGuid();
    using (SqlConnection conn = new SqlConnection(connString))
    {
        conn.Open();

        using (SqlCommand cmd = new SqlCommand(
               "INSERT INTO Product VALUES(" + "@ReferralListID,  @ProductName)", conn))
        {
            //Note product name need to read from csv file
            cmd.Parameters.AddWithValue("@ProductId", ProductId);
            cmd.Parameters.AddWithValue("@ProductName", ProductName); 

            int rows = cmd.ExecuteNonQuery();

            //rows number of record got inserted
        }
    }

    DataTable dt = new DataTable();

    //check we have a file 
    if (FileUpload.ContentLength > 0)
    {
        //Workout our file path
        string fileName = Path.GetFileName(FileUpload.FileName);
        string path = Path.Combine(Server.MapPath("~/App_Data/uploads"), fileName);

        //Try and upload
        try
        {
            FileUpload.SaveAs(path);
            //Process the CSV file and capture the results to our DataTable place holder
            dt = ProcessCSV(path);

            //Process the DataTable and capture the results to our SQL Bulk copy
            ViewData["Feedback"] = ProcessBulkCopy(dt);
        }
        catch (Exception ex)
        {
            //Catch errors
            ViewData["Feedback"] = ex.Message;
        }
    }
    else
    {
        //Catch errors
        ViewData["Feedback"] = "Please select a file";
    }

    //Tidy up
    dt.Dispose();

    return View("Index", ViewData["Feedback"]);
}

/// <summary>
/// Process the file supplied and process the CSV to a dynamic datatable
/// </summary>
/// <param name="fileName">String</param>
/// <returns>DataTable</returns>
private static DataTable ProcessCSV(string fileName)
{
    //Set up our variables 
    string Feedback = string.Empty;
    string line = string.Empty;
    string[] strArray;  
    DataTable dt = new DataTable();
    DataRow row;

    // work out where we should split on comma, but not in a sentance
    Regex r = new Regex(",(?=(?:[^\"]*\"[^\"]*\")*(?![^\"]*\"))");

    //Set the filename in to our stream
    StreamReader sr = new StreamReader(fileName);

    //Read the first line and split the string at , with our regular express in to an array
    line = sr.ReadLine();
    strArray = r.Split(line);

    //For each item in the new split array, dynamically builds our Data columns. Save us having to worry about it.
    Array.ForEach(strArray, s => dt.Columns.Add(new DataColumn()));


    //Read each line in the CVS file until it's empty
    while ((line = sr.ReadLine()) != null)
    {
        row = dt.NewRow();

        //add our current value to our data row
        row.ItemArray = r.Split(line);
        dt.Rows.Add(row);
    }

    //Tidy Streameader up
    sr.Dispose();

    //return a the new DataTable
    return dt;


}

/// <summary>
/// Take the DataTable and using WriteToServer(DataTable) send it all to the database table "BulkImportDetails" in one go
/// </summary>
/// <param name="dt">DataTable</param>
/// <returns>String</returns>
private static String ProcessBulkCopy(DataTable dt)
{
    string Feedback = string.Empty;
    string connString = ConfigurationManager.ConnectionStrings["DataBaseConnectionString"].ConnectionString;

    //make our connection and dispose at the end    
    using(  SqlConnection conn = new SqlConnection(connString))
    {
        //make our command and dispose at the end
        using (var copy = new SqlBulkCopy(conn))
        {
            //Open our connection
            conn.Open();

            //Set target table and tell the number of rows
            copy.DestinationTableName = "Employee";
            copy.BatchSize = dt.Rows.Count;
            try
            {
                //Send it to the server
                copy.WriteToServer(dt);
                Feedback = "Upload complete";
            }
            catch (Exception ex)
            {
                Feedback = ex.Message;
            }
        }
    }

    return Feedback;
}

View.aspx

<asp:Content ID="Content1" ContentPlaceHolderID="TitleContent" runat="server">
    Home Page
</asp:Content>

<asp:Content ID="Content2" ContentPlaceHolderID="MainContent" runat="server">

    <h2>CSV Bulk Upload</h2> 

    <% using (Html.BeginForm("","",FormMethod.Post, new {enctype="multipart/form-data"})){ %>

        <input type="file" name="FileUpload" />
        <input type="submit" name="Submit" id="Submit" value="Upload" />
    <% } %>

    <p><%= Html.Encode(ViewData["Feedback"]) %></p> 
</asp:Content>

Stored Procedure

USE [BULkDatabase]
GO


SET ANSI_NULLS ON
GO

SET QUOTED_IDENTIFIER OFF
GO



CREATE PROCEDURE [dbo].[InsertProdutInfo] 
(
   @ProductName varchar (50),  
   @EmployeeName varchar (50),
   @EmployeeAddress varchar (50)
)

AS


BEGIN TRAN

   update [dbo.Product] 
   set    [ProductName] = @ProductName
   where  [ProductName] = @ProductName;

   -- get product id
   select ProductId = [ProductId] 
   from   [dbo.Product]
   where  [ProductName] = @ProductName;  

   if @@rowcount = 0
   BEGIN TRAN

      DECLARE @ProductId uniqueidentifier
      -- there's no such product, let's create it
      insert into [dbo.Product]
      values (NEWID(),@ProductName);

      select @ProductId = SCOPE_IDENTITY()
   end

   -- now that we know we have added the product and have the id, let's add the rest
   insert into [dbo.Employees]
   values (NEWID(), @EmployeeName, @EmployeeAddress, @ProductId);

COMMIT TRAN
Chait
  • 1,052
  • 2
  • 18
  • 30
Spidey
  • 1,583
  • 5
  • 23
  • 30
  • What's your question? Are you asking how to insert data into multiple tables? And given the aspx file, is this really MVC. – Jonathan Wood Dec 20 '12 at 02:35
  • yes. i using mvc 4 framework where you can you use ASPX or razor engine. Yes, i need to insert data into multiple tables. As it mention above, i need to use the information in csv file and insert to Product and employee table. – Spidey Dec 20 '12 at 06:17
  • what is the error u r getting, does your postback hitting your controller function, if so where does it gets stuck? – Niraj Dec 20 '12 at 07:02
  • Hi Niraj, it is no issue if i tried to insert to employee table only. but since i need to insert to product table first i need to find out how i can achieve that. The information that need to insert in Product table is product name, where product id automatically. this is the part that i have a problem. – Spidey Dec 20 '12 at 07:06
  • Spidey, I can not see in your code if you are adding records to product table, I have only seen records being added to Employee table in ProcessBulkCopy method. You need to add records to product table first, unless you have allowed the employee table to have null in for productid column. – Niraj Dec 20 '12 at 09:11
  • Hi Niraj, That what i am trying to achieve. I need somehow only read product name and insert to product table. – Spidey Dec 20 '12 at 23:49
  • I have added the insert statement for product table, but i need to grab product name from csv file – Spidey Dec 20 '12 at 23:55

1 Answers1

8

first of all you should decouple the Controller from your database code, just simply create a new Class project and host all database access there, so you can have in your Controller something like:

[HttpPost]
public ActionResult UploadFile(HttpPostedFileBase FileUpload)
{
    if (FileUpload.ContentLength > 0) {
        // there's a file that needs our attention
        var success = db.UploadProductFile(FileUpload);

        // was everything ok?
        if (success)
            return View("UploadSuccess");
        else
            return View("UploadFail");
    }

    return RedirectToAction("Index", new { error = "Please upload a file..." });
}

public ActionResult Index(string error)
{
    ...
}

This way, the controller does not really care what do you do with the uploaded file as it's not the Controller concern to know such thing, it has the task to know that it needs to delegate that job and process the result, that's it.

Please see that the action method is called UploadFile and not Index. It's not a good practice to post to the same action to avoid, when the user refreshes the page, post it again.

I also suggest you to use ADO.NET Entity Model, there are plenty of Videos out there, in ASP.NET website as well, and it will greatly help you use the database in a simpler and clean way.

back to your question... Inside your Database class, the method UploadProductFile should be something like, and assuming that you don't have more than 200 records to process it's better to use the memory to deal with the file rather than spend time to save and read again (for more, you should save the file and process it, like you already do):

private bool UploadProductFile(HttpPostedFileBase FileUpload)
{
    // get the file stream in a readable way
    StreamReader reader = new StreamReader(FileUpload.InputStream);

    // get a DataTable representing the passed string
    System.Data.DataTable dt = ProcessCSV(reader.ReadToEnd()); 

    // for each row, compose the statement
    bool success = true;
    foreach (System.Data.DataRow row in dt.Rows)
        success = db.InsertProdutInfo(row);

    return success;
}

the method InsertProdutInfo would fire a store procedure that would be something like:

declare @product_key int

begin tran

   update [tbl_products] 
   set    [name] = @product_name, [last_update] = getdate()
   where  [name] = @product_name;

   -- get product id
   select @product_key = [id] 
   from   [tbl_products]
   where  [name] = @product_name;  

   if @@rowcount = 0
   begin
      -- there's no such product, let's create it
      insert into [tbl_products] (name, last_update)
      values (@product_name, getdate());

      select @product_key = SCOPE_IDENTITY()
   end

   -- now that we know we have added the product and have the id, let's add the rest
   insert into [tbl_Employees] (id, product_id, name, contact, address)
   values (@employee_id, @product_key, @employee_name,
           @employee_contact, @employee_address);

commit tran

this way you will have everything you need.

Community
  • 1
  • 1
balexandre
  • 73,608
  • 45
  • 233
  • 342