private void UpdateZipEntry(string zipFileName, string entryFile)
{
SharpZip.ZipFile zipFile = new SharpZip.ZipFile(zipFileName);
string folderPath = Path.GetDirectoryName(entryFile);
string fileName = Directory.GetFiles(folderPath).FirstOrDefault();
zipFile.NameTransform = new ZipNameTransform(folderPath);
zipFile.BeginUpdate();
zipFile.Add(fileName, SharpZip.CompressionMethod.Deflated);
zipFile.CommitUpdate();
zipFile.Close();
}
Hi,
For small file sources (500-1000 files)
zipFile.Add(fileName, SharpZip.CompressionMethod.Deflated); taking less time. But for large sources eg: 5000 to 50,0000 files taking ages for processing. Approximately its taking 6 Hours for processing.
I can use below approach but zipStream.Write() causes memory leak.
ZipOutputStream zipStream=null;
FileStream fStream = new FileStream(ZipFilename, FileMode.Create, FileAccess.Write);
zipStream = new ZipOutputStream(fStream);
var entry = new ZipEntry(entryName);
zipStream.PutNextEntry(entry);
zipStream.Write(documentBytes, 0, documentBytes.Length);
zipStream.CloseEntry();
Is there any way to increase the speed for large files?
Thanks,
Ravichandran