Fila FIFO Threadsafe / Buffer

Eu preciso implementar um tipo de buffer de tarefas. Requisitos básicos são:

Processar tarefas em um único encadeamento de segundo planoReceber tarefas de vários encadeamentosProcessar todas as tarefas recebidas, ou seja, certificar-se de que o buffer seja drenado das tarefas armazenadas após o recebimento de um sinal de paradaA ordem das tarefas recebidas por thread deve ser mantida

Eu estava pensando em implementá-lo usando uma fila como abaixo. Gostaria de receber feedback sobre a implementação. Existem outras ideias mais brilhantes para implementar uma coisa dessas?

public class TestBuffer
{
    private readonly object queueLock = new object();
    private Queue<Task> queue = new Queue<Task>();
    private bool running = false;

    public TestBuffer()
    {
    }

    public void start()
    {
        Thread t = new Thread(new ThreadStart(run));
        t.Start();
    }

    private void run()
    {
        running = true;

        bool run = true;
        while(run)
        {
            Task task = null;
            // Lock queue before doing anything
            lock (queueLock)
            {
                // If the queue is currently empty and it is still running
                // we need to wait until we're told something changed
                if (queue.Count == 0 && running)
                {
                    Monitor.Wait(queueLock);
                }

                // Check there is something in the queue
                // Note - there might not be anything in the queue if we were waiting for something to change and the queue was stopped
                if (queue.Count > 0)
                {
                    task = queue.Dequeue();
                }
            }

            // If something was dequeued, handle it
            if (task != null)
            {
                handle(task);
            }

            // Lock the queue again and check whether we need to run again
            // Note - Make sure we drain the queue even if we are told to stop before it is emtpy
            lock (queueLock)
            {
                run = queue.Count > 0 || running;
            }
        }
    }

    public void enqueue(Task toEnqueue)
    {
        lock (queueLock)
        {
            queue.Enqueue(toEnqueue);
            Monitor.PulseAll(queueLock);
        }
    }

    public void stop()
    {
        lock (queueLock)
        {
            running = false;
            Monitor.PulseAll(queueLock);
        }
    }

    public void handle(Task dequeued)
    {
        dequeued.execute();
    }
}

questionAnswers(5)

yourAnswerToTheQuestion