I around have 400,000 records which is coming from file. But after inserting 30000 to 33000 of records it is throwing error 'System.OutOfMemoryException'
In my system i have 8gb of ram i think it is enough for this. And i inserting bunch of records like 500 records at a time using this code.
this._context.AutoDetectChangesEnabled = false;
if(Counter % 500 == 0)
_context.SaveChanges();
i tried all possible changes and do lots of r & d and also find all possible solution on this stack overflow site but it can't help.
I am using nopcommerce basically for this context using
private readonly IDbContext context;
And also let me know if any confusion!
public partial class EfRepository<T> : IRepository<T> where T : BaseEntity
{
private IDbContext _context;
private IDbSet<T> _entities;
protected virtual IDbSet<T> Entities
{
get
{
if (_entities == null)
_entities = _context.Set<T>();
return _entities;
}
}
public virtual void Insert(IEnumerable<T> entities, bool enableTrackChanges = true)
{
try
{
if (entities == null)
throw new ArgumentNullException("entities");
if(!enableTrackChanges)
{
this._context.AutoDetectChangesEnabled = false;
}
foreach (var entity in entities)
this.Entities.Add(entity);
this._context.SaveChanges();
}
catch (DbEntityValidationException dbEx)
{
throw new Exception(GetFullErrorText(dbEx), dbEx);
}
finally
{
if (!this._context.AutoDetectChangesEnabled)
this._context.AutoDetectChangesEnabled = true;
}
}
}