I got over 10000 files that i need to open to and in some of them I need to delete part of the data tried to do it with threadpool but from the time its taking i dont thinks its works
from multiprocessing.pool import ThreadPool
def readwrite(file):
with open(file,'rb') as f:
#check something
#if check something is True
#else return
with open(new_file,'wb') as f:
with open(file,'rb') as g:
#here i write only the lines i need from the first file
pool = ThreadPool(40)
for file in files:
pool.apply_async(readwrite,(file,))