使用Python Paramiko从服务器复制多个相同文件名的文件到本地

0 投票
1 回答
855 浏览
提问于 2025-06-08 05:52

我想用paramiko把几个特定的、名字相同的文件从服务器复制到本地(这是一个学校项目)。不过,我希望能有一个服务器列表,让脚本逐个处理这些服务器,执行相同的代码,并且检测服务器是否在线。我该怎么做呢?

有几个名字相同的文件我并不需要,我只需要提取特定的“dblatmonstat”文件。

比如文件名是这样的:pc_dblatmonstat_dpc01n1_scl000101014.log

像这样:首先处理...

dpc01n1.sccloud.xxx.com

然后用相同的代码处理... dpc02n1.sccloud.xxx.com

...依此类推。

这是我目前的代码:

import os
import paramiko
import re

#Create log file
#paramiko.util.log_to_file('/$PMRootDir/SrcFiles/logfetcher.log')
#paramiko.util.load_host_keys(os.path.expanduser('~/.ssh/known_hosts'))

#Credentials
host = 'dpc01n1.sccloud.xxx.com'
port = 22
username = 'pi'
password = 'pi'

#Locations
files = re.search('?<=pc_dblatmonstat_dpc01n1_)\w+', files)
print('There are files:', files)
remote_path = '/home/pi/Desktop/logs'
local_path = r'C:\Users\urale\Desktop\logs'


#Opening ssh and ftp
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(
            paramiko.AutoAddPolicy())
ssh.connect(host, username, port, password)
sftp = ssh.open_sftp()

#Getting files
for file in files:
    file_remote = remote_path + file
    file_local = local_path + file

    print (file_remote) + '>>>' + file_local

    #sftp.get(file_remote, file_local)
    sftp.put(file_local, file_remote)

sftp.close()
ssh.close()

编辑:

这个版本一直在重复下载同一个文件。我该怎么做才能在一个文件下载完成后跳出循环,继续下一个服务器呢?我还想使用re.search函数,只下载pc_dblatmonstat_xxxxxxxxxx_xxxxxxxxxxxxx.log这样的文件。re.search应该能匹配到dblatmonstat__.log,类似这样的...

import os
import paramiko
import re

# You could add the local_path to the function to define individual places for the 
# files that you download.
Lpath = 'C:\\'
Lpath1 = 'Users'
Lpath2 = 'urale'
Lpath3 = 'Desktop'
Lpath4 = 'logs\\'
local_path = os.path.join(Lpath, Lpath1, Lpath2, Lpath3, Lpath4)

Rpath1 = 'home'
Rpath2 = 'pi'
Rpath3 = 'Desktop'
Rpath4 = 'logs'
remote_path = os.path.join(Rpath1, Rpath2, Rpath3, Rpath4)

# 1. Create function
def get_server_files(local_path, host, port, username, password, remote_path, files):
    #Opening ssh and ftp
    ssh = paramiko.SSHClient()
    ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
    ssh.connect(host, port, username, password)
    sftp = ssh.open_sftp()

    #Getting files
    for file in files:

        file_remote = remote_path + files
        file_local = local_path + files

        print(file_remote, '>>>', file_local)

        sftp.get(file_remote, file_local)
        #sftp.put(file_local, file_remote)

    sftp.close()
    ssh.close()

# 2. list of servers
# Add new dictionary for each server to this list
list_of_servers = [
    { 'host': '192.168.1.64',
      'port': 22, 
      'username': 'pi', 
      'password': 'pi', 
      'remote_path': '/home/pi/Desktop/logs/', 
      'files':  'pc_dblatmonstat_dpc01n1_scl000101014.log'
      }
]

# 3. Iterate through the list_of_servers, using the function above
for server in list_of_servers:
    get_server_files(local_path, **server)

相关问题:

  • 暂无相关问题
暂无标签

1 个回答

1

我还没有测试下面的内容,但它应该能工作,并且能给你一个解决问题的思路。

  1. 把脚本转换成一个函数
  2. 创建一个服务器的列表
  3. 用这个函数遍历列表,获取文件

这在下面的代码中有所体现

import os
import paramiko
import re

# 1. Create function
def get_server_files(local_path, host, port, username, password, remote_path, file_pattern):
    """Connects to host and searches for files matching file_pattern
    in remote_path. Downloads all matches to 'local_path'"""
    #Opening ssh and ftp
    ssh_con = paramiko.SSHClient()
    ssh_con.set_missing_host_key_policy(paramiko.AutoAddPolicy())
    ssh_con.connect(host, port, username, password)
    sftp_con = ssh_con.open_sftp()

    # Finding files
    all_files_in_path = sftp_con.listdir(path=remote_path)
    r = re.compile(file_pattern)
    files = list(filter(r.match, all_files_in_path))

    #Download files
    for file in files:
        file_remote = remote_path + file
        file_local = local_path + file

        print(file_remote) + '>>>' + file_local

        sftp_con.get(file_remote, file_local)
        #sftp_con.put(file_local, file_remote)

sftp_con.close()
ssh_con.close()

# 2. list of servers
# Add new dictionary for each server to this list
list_of_servers = [
    { 'host': 'dpc01n1.sccloud.xxx.com',
      'port': 22, 
      'username': 'pi', 
      'password': 'pi', 
      'remote_path': '/home/pi/Desktop/logs', 
      'file_pattern': 'pc_dblatmonstat_dpc01n1'}
]

# You could add the local_path to the function to define individual places for the
# files that you download.
local_path = r'C:\Users\urale\Desktop\logs'

# 3. Iterate through the list_of_servers, using the function above
for server in list_of_servers:
    get_server_files(local_path, **server)

撰写回答