I have a function that is a bit computationally expensive. I expected the function to take a lot of time but it instead appears to run out of memory space. This doesnt make sense though as most of the variables in the function are temporary and the function only returns a single value that is stored in a global array.
Here's the function:
function entropy(img)
mat = Matrix{Float64}(img)
mat *= 255
fx = zeros(size(mat))
fy = zeros(size(mat))
fx = Matrix{Float64}(mat[:,3:end] - mat[:,1:end-2])[2:end-1,:]
fy = Matrix{Float64}(mat[3:end,:] - mat[1:end-2,:])[:,2:end-1]
fx = collect(Iterators.flatten(fx))
fy = collect(Iterators.flatten(fy))
range = maximum([abs(minimum(fx)),abs(maximum(fx)),abs(minimum(fy)),abs(maximum(fy))])
bins = 2*range+1
delDensity,xedges,yedges = hist2D(fx,fy,Int(bins))
delDensity = delDensity ./ sum(delDensity)
delDensity = transpose(delDensity)
p = delDensity[delDensity .!= 0]
delDensity,mat,fx,fy,range,bins = [0,0,0,0,0,0]
return -0.5*sum(p .* log2.(p))
end
I apply the function over an array of 2000 values.
as = 0:0.0005:1
ses = Dict(lasm=>[],slmm=>[])
for sys in [lasm,slmm]
systems = [deepcopy(sys) for _ in 1:Threads.nthreads()-1]
pushfirst!(systems, sys)
Threads.@threads for i in eachindex(as)
system = systems[Threads.threadid()]
set_parameter!(system, 1, as[i])
shuf,r,c = shuffle(img,sys)
push!(ses[sys],entropy(shuf))
end
end
I've even tried deallocating the variables withing the function but it was of no use. The memory usage remains stable for sometime and then just skyrockets till the system kills the kernel.
I'm using VSCode to run the .ipynb file using a Julia kernel.