So if I understand the answer given in the duplicate original question. I should read the queue before attempting to join, yes? I swapped the queue get loop with the join loop, and still wound up with a stall (but figured reading the queue so soon would be an issue anyway, so I added a 15 second delay after launching the jobs before reading the queue, and saw teh queue read kick off, but still no join. Is there a way to loop through the queues on my job list until all data is read? And would multiple reads work OK considering I am trying to put just one Dict, with a large entry?
import multiprocessing as mp
import pprint
import csv
import paramiko
def main(argv):
routers = dict(csv.reader(open('routers-sm.csv','r')))
print ("Number of CPU's I have: ",mp.cpu_count())
result_queue = mp.Queue()
jobs = []
for hn in routers:
pname="worker_"+hn
ip=routers[hn]
process = mp.Process(name=pname, target=ssh_gather, args=(username,password,ip,result_queue))
jobs.append(process)
process.start()
time.sleep(15)
results = {}
` for p in jobs: results.update(result_queue.get())
for p in jobs:
p.join()
pprint.pprint(results)
def ssh_gather(username,password,ip,result_queue):
ssh_dict = {}
ssh_dict[ip] = {}
remote_conn_pre = paramiko.SSHClient()
remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy())
remote_conn_pre.connect(ip, username=username, password=password, allow_agent=False, look_for_keys=False)
remote_conn = remote_conn_pre.invoke_shell()
ssh_dict[ip]['config'] = ""
remote_conn.send("show running-config\n")
time.sleep(15)
output = remote_conn.recv(100000).decode("utf-8")
print("Process {0} targeting IP {1} grabbed config of {2} bytes.".format(mp.current_process().name,ip,len(output)))
#ssh_dict[ip]['config'] = "Fake Text that is small enough"
ssh_dict[ip]['config'] = output
remote_conn.close()
result_queue.put(ssh_dict)
What can I do to fix the issue?