GZipStream
class, GZipStream
is in the System.IO.Compression
namespace. It is assumed that we will compress the files large, and they can not fit entirely in memory. var buffer = new BufferBlock<byte[]>();
var compressor = new TransformBlock<byte[], byte[]>(bytes => Compress(bytes));
private static byte[] Compress(byte[] bytes) { using (var resultStream = new MemoryStream()) { using (var zipStream = new GZipStream(resultStream, CompressionMode.Compress)) { using (var writer = new BinaryWriter(zipStream)) { writer.Write(bytes); return resultStream.ToArray(); } } } }
var writer = new ActionBlock<byte[]>(bytes => outputStream.Write(bytes, 0, bytes.Length));
buffer.LinkTo(compressor); compressor.LinkTo(writer);
Complete
method: buffer.Completion.ContinueWith(task => compressor.Complete()); compressor.Completion.ContinueWith(task => writer.Complete());
Post
method of the block: while (!buffer.Post(bytes)) { }
buffer.Complete();
writer.Completion.Wait();
public static void Compress(Stream inputStream, Stream outputStream) { var buffer = new BufferBlock<byte[]>(); var compressor = new TransformBlock<byte[], byte[]>(bytes => Compress(bytes)); var writer = new ActionBlock<byte[]>(bytes => outputStream.Write(bytes, 0, bytes.Length)); buffer.LinkTo(compressor); buffer.Completion.ContinueWith(task => compressor.Complete()); compressor.LinkTo(writer); compressor.Completion.ContinueWith(task => writer.Complete()); var readBuffer = new byte[BufferSize]; while (true) { int readCount = inputStream.Read(readBuffer, 0, BufferSize); if (readCount > 0) { var bytes = new byte[readCount]; Buffer.BlockCopy(readBuffer, 0, bytes, 0, readCount); while (!buffer.Post(bytes)) { } } if (readCount != BufferSize) { buffer.Complete(); break; } } writer.Completion.Wait(); }
var compressorOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 4 }; var compressor = new TransformBlock<byte[], byte[]>(bytes => Compress(bytes), compressorOptions);
BoundedCapacity
property of our blocks: var buffer = new BufferBlock<byte[]>(new DataflowBlockOptions { BoundedCapacity = 100 }); var compressorOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 4, BoundedCapacity = 100 }; var compressor = new TransformBlock<byte[], byte[]>(bytes => Compress(bytes), compressorOptions); var writerOptions = new ExecutionDataflowBlockOptions { BoundedCapacity = 100, SingleProducerConstrained = true }; var writer = new ActionBlock<byte[]>(bytes => outputStream.Write(bytes, 0, bytes.Length), writerOptions);
public static void Compress(Stream inputStream, Stream outputStream) { var buffer = new BufferBlock<byte[]>(new DataflowBlockOptions {BoundedCapacity = 100}); var compressorOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 4, BoundedCapacity = 100 }; var compressor = new TransformBlock<byte[], byte[]>(bytes => Compress(bytes), compressorOptions); var writerOptions = new ExecutionDataflowBlockOptions { BoundedCapacity = 100, SingleProducerConstrained = true }; var writer = new ActionBlock<byte[]>(bytes => outputStream.Write(bytes, 0, bytes.Length), writerOptions); buffer.LinkTo(compressor); buffer.Completion.ContinueWith(task => compressor.Complete()); compressor.LinkTo(writer); compressor.Completion.ContinueWith(task => writer.Complete()); var readBuffer = new byte[BufferSize]; while (true) { int readCount = inputStream.Read(readBuffer, 0, BufferSize); if (readCount > 0) { var postData = new byte[readCount]; Buffer.BlockCopy(readBuffer, 0, postData, 0, readCount); while (!buffer.Post(postData)) { } } if (readCount != BufferSize) { buffer.Complete(); break; } } writer.Completion.Wait(); }
private const int BufferSize = 16384; static void Main(string[] args) { var stopwatch = Stopwatch.StartNew(); using (var inputStream = File.OpenRead(@"C:\file.bak")) { using (var outputStream = File.Create(@"E:\file.gz")) { Compress(inputStream, outputStream); } } stopwatch.Stop(); Console.WriteLine(); Console.WriteLine(string.Format("Time elapsed: {0}s", stopwatch.Elapsed.TotalSeconds)); Console.ReadKey(); }
Source: https://habr.com/ru/post/138531/
All Articles