If the file is very large or you want to chunk in different sizes you might prefer something more generic.
IEnumerable<IEnumerable<string>> ReadFileInChunks(
string fileName,
char[] separators,
int chunkSize)
{
string[] bucket = null;
var count = 0;
foreach(var item in SplitFile(fileName, sperators))
{
if (bucket == null)
{
bucket = new string[chunkSize]
}
bucket[count++] = item;
if (count != size)
{
continue;
}
yield return bucket;
bucket = null;
count = 0;
}
// Alternatively, throw an exception if bucket != null
if (bucket != null)
{
yield return bucket.Take(count);
}
}
private IEnumerable<string> SplitFile(
string FileName,
char[] separators)
{
var check = new HashSet<int>(seperators.Select(c => (int)c);
var buffer = new StringBuilder();
using (var reader = new StreamReader(FileName))
{
var next = reader.Read();
while(next != -1)
{
if (check.Contains(next))
{
yield return buffer.ToString();
buffer.Clear();
continue;
}
buffer.Append((char)next);
next = reader.Read();
}
}
if (buffer.Length > 0)
{
yield return buffer.ToString();
}
}
This will read your file one char
at a time, good if the file is large, not bad if it isn't. It lazily yields the groups in a size you specify.
foreach (var row in ReadFileInChunks(FileName, new[] { '|' }, 8))
{
foreach (var item in row)
{
// ...
}
}
or, if you really wan to re-join the values,
var results = ReadFileInChunks(FileName, new[] { '|' }, 8).Select(row =>
string.Join("|", row));