2012-02-06 17:21:23 +00:00
|
|
|
import time
|
|
|
|
import timeit
|
|
|
|
import random
|
|
|
|
import multiprocessing
|
|
|
|
|
|
|
|
import requests
|
|
|
|
|
|
|
|
"""
|
|
|
|
Fire up various threads to unglue.it / please.unglueit.com to see how much load can be handled.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
def get_with_time(url, label=None):
|
|
|
|
print "get_with_time: %s" % (label)
|
|
|
|
time0 = time.time()
|
|
|
|
page = requests.get(url).content
|
|
|
|
time1 = time.time()
|
|
|
|
return (page, time1-time0, label)
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
|
|
|
|
n_calls = 100
|
2012-02-13 19:28:21 +00:00
|
|
|
n_workers = 15
|
2012-02-06 17:21:23 +00:00
|
|
|
pool = multiprocessing.Pool(n_workers)
|
|
|
|
|
|
|
|
url = "http://unglue.it/lists/popular"
|
|
|
|
|
|
|
|
results = [pool.apply_async(get_with_time, (url,k)) for k in xrange(n_calls)]
|
2012-02-13 19:28:21 +00:00
|
|
|
print [result.get(999999999)[1] for result in results]
|
2012-02-06 17:21:23 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|