0

Basically I'm rewriting my code from Thread to Process and have some 'surprises'.

How to lock queue shared by multiple processes? (for now I'm using threading.Lock)

class DataProviderProcess:
    def __init__(self, dataset_dir, n_images, batch_size, use_queue_lock,
                 input_img_w=299, input_img_h=299):
        img_filepaths = sorted(get_image_filepaths(dataset_dir))
        img_filepaths = img_filepaths[:n_images]
        self.img_filepath_queue = multiprocessing.Manager().Queue()
        for img_filepath in img_filepaths:
            self.img_filepath_queue.put_nowait(img_filepath)
        self.input_img_w = input_img_w
        self.input_img_h = input_img_h
        self.batch_size = batch_size
        self.use_queue_lock = use_queue_lock
        self.queue_lock = Lock()

    def get_batch(self, thread_id):    
        img_batch = []
        try:
            if self.use_queue_lock:
                self.queue_lock.acquire()

            for _ in range(self.batch_size):
                img_filepath = self.img_filepath_queue.get(block=False)
                print('DEBUG: self.img_filepath_queue.qsize()', self.img_filepath_queue.qsize(),
                      'thread_id:', thread_id)
                img = cv2.imread(img_filepath)
                img = cv2.resize(img, (self.input_img_w, self.input_img_h), interpolation=cv2.INTER_LINEAR)
                img_batch.append(img)
            img_batch = np.array(img_batch)

            if self.use_queue_lock:
                self.queue_lock.release()

            return img_batch
        except queue.Empty:
            if len(img_batch) > 0:
                img_batch = np.array(img_batch)

                if self.use_queue_lock:
                    self.queue_lock.release()

                return img_batch
            else:

                if self.use_queue_lock:
                    self.queue_lock.release()

                return None

Here why I can't use pass self._process to Process?

Why multiprocessing.Manager().Queue() should be used with Process and not queue.Queue()?

class BatchLoaderProcess:
    def __init__(self, data_provider: DataProviderProcess, n_threads=8):
        #self.batch_queue = Queue(data_provider.batch_size * 4) # v1
        self.batch_queue = multiprocessing.Manager().Queue(data_provider.batch_size * 4)
        self.data_provider = data_provider

        self.thread_list = []
        for thread_id in range(n_threads):
            #self.thread_list.append(Process(target=self._process)) # v1
            #self.thread_list.append(Process(target=self._process, args=(self,))) # v2
            self.thread_list.append(Process(target=BatchLoaderProcess._process, args=(self, thread_id))) # v3

        for t in self.thread_list:
            t.start()

    def _process(self, thread_id):
        while True:
            img_batch = self.data_provider.get_batch(thread_id)
            if img_batch is None:
                break
            self.batch_queue.put(img_batch)

    def get_batch(self):
        try:
            img_batch = self.batch_queue.get(block=True, timeout=1)
            return img_batch
        except queue.Empty:
            return None
mrgloom
  • 20,061
  • 36
  • 171
  • 301

1 Answers1

0

When using multiprocessing, it's queue and locking mechanisms should be used, see https://docs.python.org/3.5/library/multiprocessing.html. I suspect it has a lot to do with the GIL, see Multiprocessing vs Threading Python

The multiprocessing queues have their built-in locking mechanism. Hence you shouldn't need to lock the queue again.

Here why I can't use pass self._process to Process?

Because self is an argument to the "static" method.

krissy
  • 81
  • 1
  • 5