0

This is the code and I am loading configuration from app.json file as you can see file_load is getting file from location and using dictConfig I am getting information about logging handlers, formatters etc. I am trying to write in same file named log.json from multiple instances but having windowspermission error. Any body knows anything please help me.

# Try to importing modules
    try:
        import logging
        from logging import config, Formatter, Handler
        import json
        import os
        import traceback
        from threading import Thread
        import time
        import sys
        import multiprocessing
        from multiprocessing import Process, Semaphore, SimpleQueue, Lock, Event, Manager, current_process, BoundedSemaphore
    # if modules can not be installed than raise an Exception.
    except Exception as e:
        print("Exception occured installing modules", e)


    # class logger to print the data in file calling methods.
    class Logger:
        # when create any object of logger class it will
        def __init__(self):
            # self.semaphore = Semaphore(10)
            self.queue = multiprocessing.Queue()
            file_load = open('C:/Users/desaijb/Desktop/gitpractice/gitpractice/test_log/app.json')
            config_file = json.load(file_load)
            final_config_file = config_file['logging']
            config.dictConfig(final_config_file)
            self.logger = logging.getLogger()

        def debug(self, message):
            logging.getLogger('debug_module')
            self.queue.put(message)
            # self.semaphore.acquire()
            # self.logger.debug(message)
            # self.semaphore.release()
            return True

        def info(self, message):
            logging.getLogger('info_module')
            self.logger.info(message)
            return True

        def warning(self, message):
            logging.getLogger('warning_module')
            self.logger.warning(message)

        def error(self,message):
            logging.getLogger('error_module')
            self.logger.error(message)

        def critical(self, message):
            logging.getLogger('critical_module')
            self.logger.critical(message)

        def receive(self):
            while self.queue:
                received_message = self.queue.get()


    if __name__ == "__main__":
        logg = Logger()
        proc = Process(target=logg.debug, args=('message',))
        proc.start()

following is the error I am getting after running the code.

 Traceback (most recent call last):
      File "C:/Users/desaijb/Desktop/gitpractice/gitpractice/test_log/logger.py", line 63, in <module>
        proc.start()
      File "C:\Program Files\Python36\lib\multiprocessing\process.py", line 105, in start
        self._popen = self._Popen(self)
      File "C:\Program Files\Python36\lib\multiprocessing\context.py", line 223, in _Popen
        return _default_context.get_context().Process._Popen(process_obj)
      File "C:\Program Files\Python36\lib\multiprocessing\context.py", line 322, in _Popen
        return Popen(process_obj)
      File "C:\Program Files\Python36\lib\multiprocessing\popen_spawn_win32.py", line 65, in __init__
        reduction.dump(process_obj, to_child)
      File "C:\Program Files\Python36\lib\multiprocessing\reduction.py", line 60, in dump
        ForkingPickler(file, protocol).dump(obj)
    TypeError: can't pickle _thread.RLock objects
  • You should open your file on **append** mode « a+ ». See https://stackoverflow.com/a/30566011 – Laurent LAPORTE Jun 22 '18 at 22:17
  • It will not work because my log file changes every midnight and it throws error. I know this. But I have tried what you say and got this error json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0) – jenil desai Jun 25 '18 at 13:41
  • Since multiple processes can write in the same file, there is ne guarantee that your JSON file stay valid. In fact, this is a bad practice. You can lock your file during writing and reading to make sure it stay valid but you may introduce a bottleneck. It’s better to have one file for each process. – Laurent LAPORTE Jun 25 '18 at 20:21
  • That is the requirement for the project. – jenil desai Jun 25 '18 at 20:29

0 Answers0