一、传输单个文件到Linux服务器
import
paramiko
transport
=
paramiko.Transport((
'host'
,
22
))
transport.connect(username
=
'root'
,password
=
'123'
)
sftp
=
paramiko.SFTPClient.from_transport(transport)
sftp.put(
'D:\Pycharm\hadoop_spark\ssh_files\id_rsa'
,
'/usr/local/id_rsa'
)
transport.close()
#D:\Pycharm\hadoop_spark\ssh_files\id_rsa 本地路径,(windows)
#/usr/local/id_rsa 远端服务器路径,(Linux)
#两边路径必须都写上文件名
二、传输整个目录到Linux服务器
import
os
import
paramiko
transport
=
paramiko.Transport((
'host'
,
22
))
transport.connect(username
=
'root'
,password
=
'123'
)
sftp
=
paramiko.SFTPClient.from_transport(transport)
for
path
in
dir_list:
for
root, dirs, files
in
os.walk(
'D:/Pycharm/hadoop_spark/hadoop_config/'
):
for
i
in
files:
sftp.put(
'D:/Pycharm/hadoop_spark/hadoop_config/'
+
i,
'/usr/local/'
+
i)
transport.close()