I have a large numpy array of dimension (8,512,512,3) which is updated in a for loop running 8 times by calling a function on some images. Can I use multiprocessing / concurrent features to fill the numpy array in lesser time?
def myfun(arr):
#some computation
return out #Dimension of out is (512,512,50,3)
X = np.empty((8,512,512,50,3))
inp = np.ones((8,1000))
for i in range(8):
X[i] = myfun(inp[i])
Update - I tried to use multiprocessing this way but it was slower than sequential
def myfun_mp(inp,return_list):
return_list.append(myfun(inp))
manager = multiprocessing.Manager()
return_list = manager.list()
jobs = []
for i in range(8):
p = multiprocessing.Process(target = myfun_mp, args = (inp[i],return_list))
jobs.append(p)
p.start()
for p in jobs:
p.join()
X = np.array(return_list) #This takes time