I have a very large DataTable ~ 4million rows.
I need to calculate columns in the table, and if I process entire column in a method (Go1), it is faster than Go2 where I loop through rows and call method for each row.
I need to use Go2 approach, because later I need to add more rows to the table and update all columns.
But why is Go2 approach slower - is it just overhead of call ProcessRow() each time?
Is there a workaround?
public static void AddSignal()
{
foreach (DataRow row in Data.Rows)
{
row[x] = (invertSignal ? -1:1)*Math.Sign(row.Field<double>(y) - row.Field<double>(y));
}
}
public class ByRowAddSignal
{
DataRow row;
public ByRowAddSignal()
{
}
public void ProcessRow(int r)
{
row = Data.Rows[r];
row[x] = (invertSignal ? -1 : 1) * Math.Sign(row.Field<double>(y) - row.Field<double>(y));
}
}
Public static DataTable Data;
public void Go1()
{
Data = LoadData();
AddSignal();
}
public void Go2()
{
Data = LoadData();
ByRowAddSignal byRowAddSignal = new ByRowAddSignal ();
for (int r = 0; r < Data.Rows.Count; r++)
{
byRowAddSignal.ProcessRow(r);
}
}