EDIT: Instead of assuming and randomly saying its a duplicate please check and verify yourself. This doesn't help out users when you immediately downvote and say its a duplicate. Those responses are not doing what I am doing. I have tried the already.
Instead of getting OOM at 700k records it does it at 800k records. So that response did not help.
I am trying to retrieve a ton of rows from a database via a SP and I keep getting Out of Memory Exception on reader.Read() when my class Object reaches a certain limit.
SqlCommand cmd = new SqlCommand("usp_GetTonofData", connection);
cmd.CommandType = System.Data.CommandType.StoredProcedure;
SqlCommandBuilder.DeriveParameters(cmd);
cmd.Parameters[1].Value = "Blah";
cmd.Parameters[2].Value = "foo";
cmd.CommandTimeout = 1000;
SqlDataReader reader = cmd.ExecuteReader(CommandBehavior.SequentialAccess);
while (reader.Read())
{
object[] values = new object[reader.FieldCount];
reader.GetValues(values);
OOM Exception occurs here around the 700k records mark ---> var category = new DataClass()
{
Prop1 = values[0].ToString(),
Prop2 = Convert.ToDateTime(values[1]).ToString("MM/dd/yyyy"),
Prop3 = Convert.ToDateTime(values[2]).ToString("MM/dd/yyyy"),
Prop4 = Convert.ToDateTime(values[3]).ToString("MM/dd/yyyy"),
Prop5 = values[4].ToString(),
Prop6 = values[5].ToString(),
Prop7 = values[28].ToString(),
Prop8 = values[29].ToString(),
Prop9 = values[30].ToString(),
Prop10 = values[31].ToString(),
Prop11 = values[32].ToString(),
Prop12 = values[33].ToString(),
};
storedProcedureList.Add(category);
}
connection.Close();
connection.Dispose();
}
return storedProcedureList;