我正在尝试使用线程和python paramiko库将文件分块SFTP到远程服务器。
它在不同的线程中打开一个本地文件和sftp块到远程服务器。
我基本上遵循这个解决方案,它使用相同的方法通过SFTP下载大文件。我想发送大文件代替。Downloading solution
但是,我在chunk in infile.readv(chunks):
的行上输入write_chunks()
时遇到以下错误:
属性错误:"_io. BufferedReader"对象没有属性"readv"
有谁能帮助解决这个错误吗?我以为infile
是一个文件描述符。我不明白为什么它是一个_io.BufferedReader object
。
import threading, os, time, paramiko
import time, paramiko
MAX_RETRIES = 10
ftp_server = "server.com"
port = 22
remote_file = "/home/filecopy.bin"
local_file = "/home/file.bin"
ssh_conn = sftp_client = None
username = "none"
password = "none"
#you could make the number of threads relative to file size
NUM_THREADS = 2
MAX_RETRIES = 10
def make_filepart_path(file_path, part_number):
"""creates filepart path from filepath"""
return "%s.filepart.%s" % (file_path, part_number+1)
def write_chunks(chunks, tnum, remote_file_part, username, password, ftp_server, max_retries):
ssh_conn = sftp_client = None
for retry in range(max_retries):
try:
ssh_conn = paramiko.Transport((ftp_server, port))
ssh_conn.connect(username=username, password=password)
sftp_client = paramiko.SFTPClient.from_transport(ssh_conn)
with sftp_client.open(remote_file_part, "wb") as outfile:
with open(local_file, "rb") as infile:
for chunk in infile.readv(chunks):
outfile.write(chunk)
break
except (EOFError, paramiko.ssh_exception.SSHException, OSError) as x:
retry += 1
print("%s %s Thread %s - > retrying %s..." % (type(x), x, tnum, retry))
time.sleep(abs(retry) * 10)
finally:
if hasattr(sftp_client, "close") and callable(sftp_client.close):
sftp_client.close()
if hasattr(ssh_conn, "close") and callable(ssh_conn.close):
ssh_conn.close()
start_time = time.time()
for retry in range(MAX_RETRIES):
try:
ssh_conn = paramiko.Transport((ftp_server, port))
ssh_conn.connect(username=username, password=password)
sftp_client = paramiko.SFTPClient.from_transport(ssh_conn)
# connect to get the file's size in order to calculate chunks
#filesize = sftp_client.stat(remote_file).st_size
filesize = os.stat(local_file).st_size
sftp_client.close()
ssh_conn.close()
chunksize = pow(2, 12)
chunks = [(offset, chunksize) for offset in range(0, filesize, chunksize)]
thread_chunk_size = (len(chunks) // NUM_THREADS) + 1
# break the chunks into sub lists to hand off to threads
thread_chunks = [chunks[i:i+thread_chunk_size] for i in range(0, len(chunks) - 1, thread_chunk_size)]
threads = []
fileparts = []
for thread_num in range(len(thread_chunks)):
remote_file_part = make_filepart_path(remote_file, thread_num)
args = (thread_chunks[thread_num], thread_num, remote_file_part, username, password, ftp_server, MAX_RETRIES)
threads.append(threading.Thread(target=write_chunks, args=args))
fileparts.append(remote_file_part)
for thread in threads:
thread.start()
for thread in threads:
thread.join()
# join file parts into one file, remove fileparts
with sftp_client.open(remote_file_part, "wb") as outfile:
for filepart in fileparts:
with open(filepart, "rb") as infile:
outfile.write(infile.read())
os.remove(filepart)
break
except (EOFError, paramiko.ssh_exception.SSHException, OSError) as x:
retry += 1
print("%s %s - > retrying %s..." % (type(x), x, retry))
time.sleep(abs(retry) * 10)
finally:
if hasattr(sftp_client, "close") and callable(sftp_client.close):
sftp_client.close()
if hasattr(ssh_conn, "close") and callable(ssh_conn.close):
ssh_conn.close()
print("Loading File %s Took %d seconds " % (sftp_file, time.time() - start_time))
堆栈跟踪:
Exception in thread Thread-4:
Traceback (most recent call last):
File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner
self.run()
File "/usr/lib/python3.6/threading.py", line 864, in run
self._target(*self._args, **self._kwargs)
File "simpleNNInference.py", line 210, in write_chunks
for chunk in infile.readv(chunks):
AttributeError: '_io.BufferedReader' object has no attribute 'readv'
Exception in thread Thread-3:
Traceback (most recent call last):
File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner
self.run()
File "/usr/lib/python3.6/threading.py", line 864, in run
self._target(*self._args, **self._kwargs)
File "simpleNNInference.py", line 210, in write_chunks
for chunk in infile.readv(chunks):
AttributeError: '_io.BufferedReader' object has no attribute 'readv'
1条答案
按热度按时间w8biq8rn1#
有关如何对一个大文件执行并行多部分上载的示例,请参见以下示例。
请注意,大多数SFTP服务器(包括OpenSSH,直到最新的9.0)不允许远程合并文件,因此您必须使用revert to shell command for that。
我不知道SFTP规范对此有多大支持,但许多SFTP服务器(包括OpenSSH)允许从多个连接并行写入同一个文件,因此您甚至可以不合并文件,直接上传到目标文件的各个部分: