I would like to copy several files with specific, identical name from the server to local using paramiko (for a school project). However I would like to have a list of servers for the script to go through and execute the same code and also detect if the server is online or not. How do I do this?
There are several identical files that I do not need. I need to pull the specific "dblatmonstat" files only.
Example of file name pc_dblatmonstat_dpc01n1_scl000101014.log
Like: first go through...
dpc01n1.sccloud.xxx.com
And then the same code through... dpc02n1.sccloud.xxx.com
...and so on and so forth.
Here is what I have so far:
import os
import paramiko
import re
#Create log file
#paramiko.util.log_to_file('/$PMRootDir/SrcFiles/logfetcher.log')
#paramiko.util.load_host_keys(os.path.expanduser('~/.ssh/known_hosts'))
#Credentials
host = 'dpc01n1.sccloud.xxx.com'
port = 22
username = 'pi'
password = 'pi'
#Locations
files = re.search('?<=pc_dblatmonstat_dpc01n1_)\w+', files)
print('There are files:', files)
remote_path = '/home/pi/Desktop/logs'
local_path = r'C:\Users\urale\Desktop\logs'
#Opening ssh and ftp
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(
paramiko.AutoAddPolicy())
ssh.connect(host, username, port, password)
sftp = ssh.open_sftp()
#Getting files
for file in files:
file_remote = remote_path + file
file_local = local_path + file
print (file_remote) + '>>>' + file_local
#sftp.get(file_remote, file_local)
sftp.put(file_local, file_remote)
sftp.close()
ssh.close()
EDIT:
This version keeps downloading the same file over and over again. How can I break the loop when a file has been finished downloading and moves to the next server?
I'm also trying to use the re.search
function in order to only download the pc_dblatmonstat_xxxxxxxxxx_xxxxxxxxxxxxx.log
files. re.search
should match dblatmonstat__.log something like this...
import os
import paramiko
import re
# You could add the local_path to the function to define individual places for the
# files that you download.
Lpath = 'C:\\'
Lpath1 = 'Users'
Lpath2 = 'urale'
Lpath3 = 'Desktop'
Lpath4 = 'logs\\'
local_path = os.path.join(Lpath, Lpath1, Lpath2, Lpath3, Lpath4)
Rpath1 = 'home'
Rpath2 = 'pi'
Rpath3 = 'Desktop'
Rpath4 = 'logs'
remote_path = os.path.join(Rpath1, Rpath2, Rpath3, Rpath4)
# 1. Create function
def get_server_files(local_path, host, port, username, password, remote_path, files):
#Opening ssh and ftp
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, port, username, password)
sftp = ssh.open_sftp()
#Getting files
for file in files:
file_remote = remote_path + files
file_local = local_path + files
print(file_remote, '>>>', file_local)
sftp.get(file_remote, file_local)
#sftp.put(file_local, file_remote)
sftp.close()
ssh.close()
# 2. list of servers
# Add new dictionary for each server to this list
list_of_servers = [
{ 'host': '192.168.1.64',
'port': 22,
'username': 'pi',
'password': 'pi',
'remote_path': '/home/pi/Desktop/logs/',
'files': 'pc_dblatmonstat_dpc01n1_scl000101014.log'
}
]
# 3. Iterate through the list_of_servers, using the function above
for server in list_of_servers:
get_server_files(local_path, **server)