I am uploading a large file using the Python requests package, and I can’t find any way to give data back about the progress of the upload. I have seen a number of progress meters for downloading a file, but these will not work for a file upload.
The ideal solution would be some sort of callback method such as:
def progress(percent): print percent r = requests.post(URL, files={'f':hugeFileHandle}, callback=progress)
Thanks in advance for your help :)
Advertisement
Answer
requests
doesn’t support upload streaming e.g.:
import os import sys import requests # pip install requests class upload_in_chunks(object): def __init__(self, filename, chunksize=1 << 13): self.filename = filename self.chunksize = chunksize self.totalsize = os.path.getsize(filename) self.readsofar = 0 def __iter__(self): with open(self.filename, 'rb') as file: while True: data = file.read(self.chunksize) if not data: sys.stderr.write("n") break self.readsofar += len(data) percent = self.readsofar * 1e2 / self.totalsize sys.stderr.write("r{percent:3.0f}%".format(percent=percent)) yield data def __len__(self): return self.totalsize # XXX fails r = requests.post("http://httpbin.org/post", data=upload_in_chunks(__file__, chunksize=10))
btw, if you don’t need to report progress; you could use memory-mapped file to upload large file.
To workaround it, you could create a file adaptor similar to the one from urllib2 POST progress monitoring:
class IterableToFileAdapter(object): def __init__(self, iterable): self.iterator = iter(iterable) self.length = len(iterable) def read(self, size=-1): # TBD: add buffer for `len(data) > size` case return next(self.iterator, b'') def __len__(self): return self.length
Example
it = upload_in_chunks(__file__, 10) r = requests.post("http://httpbin.org/post", data=IterableToFileAdapter(it)) # pretty print import json json.dump(r.json, sys.stdout, indent=4, ensure_ascii=False)