-1

I am learning c# programming these days and need some help in determining the performance of code. I have to read a file and some details from it.
File has 4 columns:

ID, dob, size, accountno. 

Problem:I have to read every line and insert them into a database and there are more than 50000 entries per day.

Solution I tried: Created a class with 4 properties (ID, dob, size, accountno.) and then I iterate through the file and convert all the data into objects and keep on adding them on ArraList. So, basically now I got an arraylist with 50000 objects. Now, I iterate through the array at last and inserted the detail in database.

Is this correct approach ?

Experts please help.

code :

namespace testing
{
class Program
{
    static void Main(string[] args)
    {
        string timestamp = DateTime.Now.ToString("yyyyMMddHHmmss");
        string InputDirectory = @"My Documents\\2015";

        string FileMask = "comb*.txt";

        ArrayList al = new ArrayList();

        string line;
        var Files = Directory.GetFiles(InputDirectory, FileMask, SearchOption.AllDirectories).Select(f => Path.GetFullPath(f));
        foreach (var f in Files)
        {
            using (StreamReader reader = new StreamReader(f))
            {
                string date;

                while ((line = reader.ReadLine()) != null)
                {
                    Datamodel dm = new Datamodel();

                    string[] values = line.Split(',').Select(sValue => sValue.Trim()).ToArray();
                    dm.ID = values[0].ToString();
                    dm.dob= dm.RPT_ID.Remove(0, 4);
                    dm.size= values[1].ToString();
                    dm.accountno= values[2].ToString();

                    al.Add(dm);
                }

                reader.Close();
            }
        }
        utilityClass.Insert_Entry(al);
    }
}
}
vish1990
  • 272
  • 3
  • 16
  • Here's a relevant article about using `SqlBulkCopy` for inserting a large amount of data: http://www.codeproject.com/Tips/309564/SQL-Bulk-copy-method-to-insert-large-amount-of-dat – cbr Mar 19 '15 at 12:14
  • ArrayList is outdated and _how_ you are reading the file might make a difference. But right now there is no real question here. You are asking for a Code Review w/o providing any code. – H H Mar 19 '15 at 12:22
  • What format is the file? CSV, XML, Json, fixed width? – Bernd Linde Mar 19 '15 at 12:22
  • @HenkHolterman :Sir, I intend to ask for the correct approach here.I am reading a file using streamreader,I was worried about too many objects which will get created.I will post code also if you wish to have a look. – vish1990 Mar 19 '15 at 12:26
  • @BerndLinde -the format of file is .txt. i will share the code also. – vish1990 Mar 19 '15 at 12:26
  • .txt is the extension of the file, I am asking about the format of the data inside the file :) If you can, show a small subsection of the content, this will help with determining the right way. – Bernd Linde Mar 19 '15 at 12:29
  • @BerndLinde ;the actual content : "AKCHXPCIVSk21,20150330120045,79849458749954,8456720" .all plain text – vish1990 Mar 19 '15 at 12:33
  • @HenkHolterman-Sir,I have copied the code .Thanks – vish1990 Mar 19 '15 at 12:58
  • See http://stackoverflow.com/questions/16606753/ . Load the info into a DataSet, link the DataSet to you table and let the underlying components handle the saving to DB section. Might also be worthwhile to load the file segmentally – Bernd Linde Mar 19 '15 at 13:01
  • Right. Note that Code reviews are off topic here, but maybe you have a concrete problem? The code is kind of OK but you may have trouble updating thousands of records to the Db at once. Consider batching it, maybe per input file. – H H Mar 19 '15 at 13:32
  • @HenkHolterman -Thanks Sir,I was worried about creating too many objects.I will try SQl bulk copy as suggested above and batching records as per your suggestion. – vish1990 Mar 19 '15 at 13:35
  • Think about what happens when something goes wrong. Do you have a way to know which files (batches) were already processed successfully? – H H Mar 19 '15 at 13:41
  • @HenkHolterman -Yes Sir ,I have done exception handling and thought of way in case such situation comes..Gratefull for your time. – vish1990 Mar 19 '15 at 13:49

2 Answers2

0

My Solution :Thanks to all above comments.

namespace Test
{
class Program
{
    static void Main(string[] args)
    {
        string timestamp = DateTime.Now.ToString("yyyyMMddHHmmss");
        string InputDirectory = @"My Documents\\2015";

        string FileMask = "comb*.txt";

        try
        {
            string line = null;
            var Files = Directory.GetFiles(InputDirectory, FileMask, SearchOption.AllDirectories).Select(f => Path.GetFullPath(f));
            foreach (var f in Files)
            {
                DataTable table = new DataTable();
                table.TableName = f;
                table.Columns.Add("ID", typeof(Int64));
                table.Columns.Add("dob", typeof(string));
                table.Columns.Add("size", typeof(string));
                table.Columns.Add("accountno", typeof(string));

                using (StreamReader reader = new StreamReader(f))
                {
                    while ((line = reader.ReadLine()) != null)
                    {
                            string[] values = line.Split(',').Select(sValue => sValue.Trim()).ToArray();
                            string uniqueGuid = SequentialGuidGenerator.NewGuid().ToString();
                            uniqueGuid = uniqueGuid.Replace("-", "");
                            int ID = convert.toint(values[0]);

                            string NOTIF_ID = "";
                            table.Rows.Add(ID,values[1].ToString(),values[2]).toString(),values[2]).toString());

                    }

                    reader.Close();
                }
                utilityClass.Insert_Entry(table, env);
            }
        }
        catch (Exception e)
        {
            CustomException.Write(CustomException.CreateExceptionString(e));
        }
    }
}
}

Insert_Entry

using (SqlConnection con = new      SqlConnection(utilityClass.GetConnectionString(environ)))
             {
            con.Open();
            using (SqlBulkCopy bulkCopy = new SqlBulkCopy(con))
            {
                bulkCopy.DestinationTableName = "dbo.inserttablename";
                try
                {
                    bulkCopy.WriteToServer(mfsentdata);
                }
                catch (SqlException e)
                {
                    CustomException.Write(CustomException.CreateExceptionString(e, mfsentdata.TableName));
                }
            }
            con.Close();
        }
    }
Bernd Linde
  • 2,098
  • 2
  • 16
  • 22
vish1990
  • 272
  • 3
  • 16
0

For additional SQL performance look into transactions:

connection.BeginTransaction();
//bulk insert commands here
connection.Commit();
Ministry
  • 116
  • 7