Not too long ago I wrote a quick article on how to upload files using boto and the multiprocessing module or the Threading module. Here’s yet another variant, using a Queue!
AWS_KEY = "" AWS_SECRET = "" import time from boto.s3.connection import S3Connection from Queue import * from threading import Thread number_workers = 4 q = Queue() filenames = ['1.json', '2.json', '3.json', '4.json', '5.json', '6.json', '7.json', '8.json', '9.json', '10.json'] # the actual upload def upload(myfile): conn = S3Connection(aws_access_key_id=AWS_KEY, aws_secret_access_key=AWS_SECRET) bucket = conn.get_bucket("parallel_upload_tests") key = bucket.new_key(myfile).set_contents_from_string('some content') print "Uploaded %s." % myfile # each worker does this job def pull_from_queue(): while True: item = q.get() print "Found %s in queue" % item upload(item) # init the workers for i in range(number_workers): t = Thread(target=pull_from_queue) t.daemon = True t.start() # put files in the queue for fname in filenames: q.put(fname) while True: time.sleep(1) |