0

I have a C# application (ASP.NET MVC) that allows a user to upload an Excel sheet containing an account number key and amount value to be passed as parameters to a stored procedure that then validates those records. It then returns an error or warning if any are found.

My issue is that the BI team implemented a stored procedure which I cannot modify to make it accept a table type as explained in this post

table type array

The issue is that I cannot modify the stored procedure to accept an table type, so I need to optimize the stored procedure call in C# code. The performance is acceptable if there are about 30 to 50 lines the stored procedure needs to validate, but once there are more record e.g a 100 or more the application is slow, is there a way in C# that I can improve the performance of this code, please see my implementation below.

using (SqlConnection conn = new SqlConnection(sqlConnection))
{
    try
    {
        foreach (var claim in supplierClaimsData)
        {
            SqlCommand cmd = new SqlCommand();
            cmd.CommandTimeout = 60;

            SqlDataReader reader;

            cmd.Connection = conn;
            cmd.CommandText = "CRM.Supplier_Claim_Upload";
            cmd.CommandType = CommandType.StoredProcedure;

            cmd.Parameters.Add("@Invoice", SqlDbType.NVarChar).Value = claim.Line_Number;
            cmd.Parameters.Add("@Amount", SqlDbType.Decimal).Value = claim.Total_Claim;

            conn.Open();

            reader = cmd.ExecuteReader();

            while (reader.Read())
            {
                claim.Error_1 = reader.GetString(reader.GetOrdinal("Error1"));

                string lineNumberDoesNotExist = "Error: Invoice line number does not exist";

                if (claim.Error_1.StartsWith(lineNumberDoesNotExist))
                {
                    continue;
                }

                claim.Warning = reader.GetString(reader.GetOrdinal("Warning"));
                claim.Error_2 = reader.GetString(reader.GetOrdinal("Error2"));
            }

            conn.Close();
        }

        foreach (CleanSupplierClaim saveToDBClaim in supplierClaimsData)
        {
            db.CleanSupplierClaims.Attach(saveToDBClaim);

            var entry = db.Entry(saveToDBClaim);
            entry.Property(aa => aa.Line_Number).IsModified = true;
            entry.Property(aa => aa.Total_Claim).IsModified = true;
            entry.Property(aa => aa.Currency).IsModified = true;
            entry.Property(aa => aa.ClaimReference).IsModified = true;
            entry.Property(aa => aa.Action).IsModified = true;
            entry.Property(aa => aa.Domain_Username).IsModified = true;
            entry.Property(aa => aa.Error_1).IsModified = true;
            entry.Property(aa => aa.Error_2).IsModified = true;
            entry.Property(aa => aa.Warning).IsModified = true;
            entry.Property(aa => aa.ImportFlag).IsModified = true;
            entry.Property(aa => aa.ReadyForImport).IsModified = true;

            db.Entry(saveToDBClaim).State = System.Data.Entity.EntityState.Modified;

            db.SaveChanges();
        }
    }
}

I would really appreciate your input in this regard.

Community
  • 1
  • 1
Papi
  • 555
  • 13
  • 40
  • This question is better suited for [Code Review](http://codereview.stackexchange.com/) – Luis Lavieri Sep 06 '16 at 14:47
  • 1
    Look at running the records using Parallel.ForEach – ChrisBint Sep 06 '16 at 14:48
  • 3
    You don't need to open and close the connection every time you loop. – itsme86 Sep 06 '16 at 14:49
  • Make sure you wrap your Command and Reader objects in "using" blocks so they're disposed of promptly. According to the documentation, this is particularly important for the SqlDataReader class: https://msdn.microsoft.com/en-us/library/system.data.sqlclient.sqldatareader.close(v=vs.110).aspx – EJoshuaS - Stand with Ukraine Sep 06 '16 at 15:01

0 Answers0