I'm using the following code to write a sequence of 16-bit grayscale images (empty array for the purpose of this question) to a multi-page TIFF :
int numberOfPages = 1000;
int width = 256;
int height = 256;
string fileName = "test.tif";
ushort[] image = new ushort[width * height];
byte[] buffer = new byte[width * height * sizeof(ushort)];
Stopwatch stopWatch = new Stopwatch();
using (Tiff output = Tiff.Open(fileName, "w"))
{
if (output == null)
{
return;
}
stopWatch.Start();
for (int i = 0; i < numberOfPages; i++)
{
Buffer.BlockCopy(image, 0, buffer, 0, buffer.Length);
output.SetField(TiffTag.IMAGEWIDTH, width);
output.SetField(TiffTag.IMAGELENGTH, height);
output.SetField(TiffTag.SAMPLESPERPIXEL, 1);
output.SetField(TiffTag.BITSPERSAMPLE, 16);
output.SetField(TiffTag.ORIENTATION, Orientation.TOPLEFT);
output.SetField(TiffTag.XRESOLUTION, 96);
output.SetField(TiffTag.YRESOLUTION, 96);
output.SetField(TiffTag.PLANARCONFIG, PlanarConfig.CONTIG);
output.SetField(TiffTag.PHOTOMETRIC, Photometric.MINISBLACK);
output.SetField(TiffTag.COMPRESSION, Compression.NONE);
output.SetField(TiffTag.FILLORDER, FillOrder.MSB2LSB);
output.SetField(TiffTag.SUBFILETYPE, FileType.PAGE);
output.SetField(TiffTag.PAGENUMBER, i + 1, numberOfPages);
output.WriteEncodedStrip(0, buffer, buffer.Length);
output.WriteDirectory();
}
stopWatch.Stop();
}
Debug.WriteLine(stopWatch.ElapsedMilliseconds);
It works fine up to a few hundred pages, but it seems the execution time does not scale linearly with increasing number of pages. For example :
1000 pages --- 3130 ms
2000 pages --- 11778 ms
3000 pages --- 25830 ms
I also tried using append mode inside the loop but got similar results.
Am I doing this wrong or should I expect this kind of overhead?