使用ConcurrentQueue / BlockingCollection的并发生产者/消费者

时间:2019-05-07 11:08:14

标签: c# .net producer-consumer

在工作中,我们需要异步记录从各个端点接收的JSON数据。我们过去通常以简单的直接归档方式将其写出来。但这证明很慢。因此,我们想切换到生产者/消费者模式。

BlockingCollection似乎很合适,所以我创建了一个使用BlockingCollection这样的类

using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using JetBrains.Annotations;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using RestSharp;

namespace Test
{
    public class JsonRecorder : IJsonRecorder, IDisposable
    {
        private String _jsonFileName { get; set; }
        private DateTime _jsonWriterDate { get; set; } = DateTime.MinValue;
        private readonly JsonSerializerSettings _jsonDateSerializerSettings = new JsonSerializerSettings {DateFormatString = "yyyy-MM-ddTHH:mm:ss.fffZ"};
        private BlockingCollection<string> _itemsToWriteQueue = new BlockingCollection<string>();
        private Boolean _disposed = false;
        private Boolean _ShouldConsumerProcessRun = false;
        private Boolean _isStarted = false;
        private Task _dequeuerTask;
        private object _syncLock = new object();

        public String Name { get; }
        public Exchange Exchange { get; }
        public string FilePath { get;  }
        public ITimeProvider TimeProvider { get; }
        private ISimpleLogService LogService { get; }

        public JsonRecorder(String name, Exchange exchange, [NotNull] ISimpleLogService simpleLogService, String filePath)
            :this(name, exchange, simpleLogService, filePath, new DefaultTimeProvider())
        {
        }

        public JsonRecorder(String name, Exchange exchange, [NotNull] ISimpleLogService simpleLogService, String filePath, [NotNull] ITimeProvider timeProvider)
        {
            Exchange = exchange;
            Name = name;
            LogService = simpleLogService ?? throw new ArgumentNullException(nameof(simpleLogService));
            FilePath = filePath;
            TimeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
        }

        public Boolean InitJsonAuditFile()
        {
            try
            {
                var now = TimeProvider.DateTimeUtcNow;
                if (_jsonWriterDate.Hour == now.Hour)
                    return true;

                if (!String.IsNullOrEmpty(_jsonFileName))
                {
                    ThreadPool.QueueUserWorkItem(_ => { ZipJsonFile(_jsonFileName); });
                    //ZipFileTask.Start();
                }

                _jsonWriterDate = now;
                var directoryName = $"{FilePath}/{_jsonWriterDate:yyyyMMdd}";
                if (!Directory.Exists(directoryName))
                    Directory.CreateDirectory(directoryName);

                _jsonFileName = $@"{directoryName}/{_jsonWriterDate:yyyyMMdd_HHmmss}_{Name}.txt";
                return true;
            }
            catch (Exception ex)
            {
                LogService.LogException(this, LogCategory.GW, Exchange, ex);
            }
            return false;
        }

        public void ZipJsonFile(String fileName)
        {
            if (String.IsNullOrEmpty(fileName))
            {
                throw new ArgumentNullException(nameof(fileName));
            }
            try
            {
                using (var zip = ZipFile.Open($"{fileName}.zip", ZipArchiveMode.Create))
                {
                    zip.CreateEntryFromFile(fileName, Path.GetFileName(fileName));
                }
                File.Delete(fileName);
            }
            catch (Exception ex)
            {
                LogService.LogException(this, LogCategory.GW, Exchange, ex);
            }
        }

        public void JsonRecord(IRestClient client, Dictionary<String, String> body)
        {
            try
            {
                var record = new
                {
                    date = TimeProvider.DateTimeUtcNow,
                    url = client.BaseUrl,
                    body = body?.Select(parameter => new
                    {
                        name = parameter.Key,
                        value = parameter.Value,
                    })
                };
                _itemsToWriteQueue.Add(JsonConvert.SerializeObject(record, _jsonDateSerializerSettings));
            }
            catch (Exception)
            {
                // ignored
            }
        }



        public void JsonRecord(String stringifiedResponse)
        {
            try
            {
                _itemsToWriteQueue.Add(stringifiedResponse);
            }
            catch (Exception ex)
            {
                LogService.LogException(this, LogCategory.GW, Exchange, ex);
            }
        }


        public void Stop()
        {
            lock (_syncLock)
            {
                _itemsToWriteQueue.CompleteAdding();
                _ShouldConsumerProcessRun = false;
                _dequeuerTask?.Wait(TimeSpan.FromSeconds(5));
            }
        }

        public bool Start()
        {
            lock (_syncLock)
            {
                if (!_isStarted)
                {
                    _isStarted = true;
                    _dequeuerTask = Task.Run(() =>
                    {
                        Thread.CurrentThread.Name = "JsonDequeuerTask";
                        RunConsumerProcess();
                    });
                }
                return true;
            }
        }

        /// <inheritdoc />
        public void Dispose()
        {
            Dispose(true);
            GC.SuppressFinalize(this);
        }

        private void RunConsumerProcess()
        {
            _ShouldConsumerProcessRun = true;
            while (_ShouldConsumerProcessRun && !_itemsToWriteQueue.IsCompleted)
            {
                InitJsonAuditFile();

                string itemToWriteToFile = null;
                try
                {
                    itemToWriteToFile = _itemsToWriteQueue.Take();
                }
                catch (InvalidOperationException) { }

                if (itemToWriteToFile != null)
                {
                    using (var stream = File.Open(_jsonFileName, FileMode.Append, FileAccess.Write))
                    {
                        using (var sw = new StreamWriter(stream))
                        {
                            sw.WriteLine(itemToWriteToFile);
                        }
                    }
                }
            }
        }

        private void Dispose(bool disposing)
        {
            if (_disposed)
                return;

            if (disposing)
            {
                Stop();
            }

            _disposed = true;
        }
    }
}

但是,当我在实际的VM上运行此代码时,我们看到内存达到2G。我已经看到了:The .Net Concurrent BlockingCollection has a memory leak?应该已经在.NET 4.5(我们正在运行.NET 4.7.2)中修复,并且我也看到了帖子ConcurrentQueue holding on to a few dequeued elements

仍然看到巨大的内存占用空间。

所以我们换成了使用

public class BlockingQueueSlim<T>
{
    private readonly ConcurrentQueue<T> _queue = new ConcurrentQueue<T>();
    private readonly AutoResetEvent _autoResetEvent = new AutoResetEvent(false);
    private static readonly TimeSpan MinWait = TimeSpan.FromMilliseconds(1);


    public void Add(T item)
    {
        _queue.Enqueue(item);
        _autoResetEvent.Set();
    }

    public bool TryPeek(out T result)
    {
        return _queue.TryPeek(out result);
    }

    public T Take()
    {
        T item;
        while (!_queue.TryDequeue(out item))
            _autoResetEvent.WaitOne();
        return item;
    }

    public bool TryTake(out T item, TimeSpan patience)
    {
        if (_queue.TryDequeue(out item))
            return true;
        var stopwatch = Stopwatch.StartNew();
        while (stopwatch.Elapsed < patience)
        {
            if (_queue.TryDequeue(out item))
                return true;
            var patienceLeft = (patience - stopwatch.Elapsed);
            if (patienceLeft <= TimeSpan.Zero)
                break;
            else if (patienceLeft < MinWait)
                // otherwise the while loop will degenerate into a busy loop,
                // for the last millisecond before patience runs out
                patienceLeft = MinWait;
            _autoResetEvent.WaitOne(patienceLeft);
        }

        return false;
    }

    public int CurrentItemCount => _queue.Count;

}

我在哪里使用它

using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using JetBrains.Annotations;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using RestSharp;

namespace Test
{
    public class JsonRecorder : IJsonRecorder, IDisposable
    {
        private String _jsonFileName { get; set; }
        private DateTime _jsonWriterDate { get; set; } = DateTime.MinValue;
        private readonly JsonSerializerSettings _jsonDateSerializerSettings = new JsonSerializerSettings {DateFormatString = "yyyy-MM-ddTHH:mm:ss.fffZ"};
        private BlockingQueueSlim<string> _itemsToWriteQueue = new BlockingQueueSlim<string>();
        private Boolean _disposed = false;
        private Boolean _ShouldConsumerProcessRun = false;
        private Boolean _isStarted = false;
        private Task _dequeuerTask;
        private object _syncLock = new object();
        private long _seqId = 0;

        public String Name { get; }
        public Exchange Exchange { get; }
        public string FilePath { get;  }
        public ITimeProvider TimeProvider { get; }
        private ISimpleLogService LogService { get; }

        public JsonRecorder(String name, Exchange exchange, [NotNull] ISimpleLogService simpleLogService, String filePath)
            :this(name, exchange, simpleLogService, filePath, new DefaultTimeProvider())
        {
        }

        public JsonRecorder(String name, Exchange exchange, [NotNull] ISimpleLogService simpleLogService, String filePath, [NotNull] ITimeProvider timeProvider)
        {
            Exchange = exchange;
            Name = name;
            LogService = simpleLogService ?? throw new ArgumentNullException(nameof(simpleLogService));
            FilePath = filePath;
            TimeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
        }

        public Boolean InitJsonAuditFile()
        {
            try
            {
                var now = TimeProvider.DateTimeUtcNow;
                if (_jsonWriterDate.Hour == now.Hour)
                    return true;

                if (!String.IsNullOrEmpty(_jsonFileName))
                {
                    ThreadPool.QueueUserWorkItem(_ => { ZipJsonFile(_jsonFileName); });
                    //ZipFileTask.Start();
                }

                _jsonWriterDate = now;
                var directoryName = $"{FilePath}/{_jsonWriterDate:yyyyMMdd}";
                if (!Directory.Exists(directoryName))
                    Directory.CreateDirectory(directoryName);

                _jsonFileName = $@"{directoryName}/{_jsonWriterDate:yyyyMMdd_HHmmss}_{Name}.txt";
                return true;
            }
            catch (Exception ex)
            {
                LogService.LogException(this, LogCategory.GW, Exchange, ex);
            }
            return false;
        }

        public void ZipJsonFile(String fileName)
        {
            if (String.IsNullOrEmpty(fileName))
            {
                throw new ArgumentNullException(nameof(fileName));
            }
            try
            {
                using (var zip = ZipFile.Open($"{fileName}.zip", ZipArchiveMode.Create))
                {
                    zip.CreateEntryFromFile(fileName, Path.GetFileName(fileName));
                }
                File.Delete(fileName);
            }
            catch (Exception ex)
            {
                LogService.LogException(this, LogCategory.GW, Exchange, ex);
            }
        }


        public void JsonRecord(IRestClient client, Dictionary<String, String> body)
        {
            try
            {
                var record = new
                {
                    seqId = Interlocked.Increment(ref _seqId),
                    date = TimeProvider.DateTimeUtcNow,
                    url = client.BaseUrl,
                    body = body?.Select(parameter => new
                    {
                        name = parameter.Key,
                        value = parameter.Value,
                    })
                };
                _itemsToWriteQueue.Add(JsonConvert.SerializeObject(record, _jsonDateSerializerSettings));
            }
            catch (Exception)
            {
                // ignored
            }
        }

        public void JsonRecord(String stringifiedResponse)
        {
            try
            {
                _itemsToWriteQueue.Add(stringifiedResponse);
            }
            catch (Exception ex)
            {
                LogService.LogException(this, LogCategory.GW, Exchange, ex);
            }
        }

        public void Stop()
        {
            lock (_syncLock)
            {
                _isStarted = false;
                _ShouldConsumerProcessRun = false;
                _dequeuerTask?.Wait(TimeSpan.FromSeconds(5));
            }
        }

        public bool Start()
        {
            lock (_syncLock)
            {
                if (!_isStarted)
                {
                    _isStarted = true;
                    _dequeuerTask = Task.Run(() =>
                    {
                        Thread.CurrentThread.Name = "JsonDequeuerTask";
                        RunConsumerProcess();
                    });
                }
                return true;
            }
        }

        /// <inheritdoc />
        public void Dispose()
        {
            Dispose(true);
            GC.SuppressFinalize(this);
        }

        private void RunConsumerProcess()
        {
            _ShouldConsumerProcessRun = true;
            while (_ShouldConsumerProcessRun)
            {
                InitJsonAuditFile();

                string itemToWriteToFile = null;
                try
                {
                    itemToWriteToFile = _itemsToWriteQueue.Take();
                }
                catch (InvalidOperationException) { }

                if (itemToWriteToFile != null)
                {
                    using (var stream = File.Open(_jsonFileName, FileMode.Append, FileAccess.Write))
                    {
                        using (var sw = new StreamWriter(stream))
                        {
                            sw.WriteLine(itemToWriteToFile);
                        }
                    }
                }
            }
        }

        private void Dispose(bool disposing)
        {
            if (_disposed)
                return;

            if (disposing)
            {
                Stop();
            }

            _disposed = true;
        }
    }
}

然而,这最终也消耗了高达2G的大量内存。

我还阅读了有关ConcurrentQueue发生内存泄漏的各种帖子。如here

我现在有点迷茫。我需要的是

  • 我能够从各种来源(不同的线程)产生值
  • 消费者可以在专用线程上运行
  • 我不能丢失数据
  • 如果使用了一些循环缓冲区(我不会丢失数据),我可以。消费者相当快,所以这应该不会发生
  • 使用一些大小参数控制内存需求

到目前为止,.NET类似乎并不为人们所建议,作为一种可行的方法来满足这组要求

0 个答案:

没有答案