download_speed/downloader.py

56 lines
2.0 KiB
Python
Raw Normal View History

2019-06-16 16:42:44 +08:00
import requests
import datetime
2019-06-16 18:28:19 +08:00
import model
2019-06-17 22:45:50 +08:00
import config
2019-06-16 16:42:44 +08:00
2019-06-17 22:45:50 +08:00
def get_servers():
js = config.read_json()
_servers = js["servers"]
servers_dic = {}
for s in _servers:
servers_dic[s] = {}
servers_dic[s]["name"] = s
servers_dic[s]["url"] = _servers[s]["url"]
return servers_dic
2019-06-16 16:42:44 +08:00
2019-06-17 22:45:50 +08:00
# file_size if byte
# longest_time seconds
def download_file(chunk_size=4 * 1024, file_size=100 * 1024 * 1024, longest_time=60):
2019-06-17 22:51:34 +08:00
execution_datetime=datetime.datetime.now()
print(execution_datetime)
2019-06-17 22:45:50 +08:00
servers = get_servers()
for server_name in servers:
server = servers[server_name]
2019-06-16 18:28:19 +08:00
url = server["url"]
server_name = server["name"]
chunk_read = 0
s_time = datetime.datetime.now()
2019-06-17 22:45:50 +08:00
duration = longest_time
2019-06-16 23:23:07 +08:00
try:
with requests.get(url, stream=True, timeout=longest_time) as r:
for chunk in r.iter_content(chunk_size):
if chunk:
chunk_read += len(chunk)
e_time = datetime.datetime.now()
duration = (e_time - s_time).total_seconds()
if chunk_read >= file_size:
break
if duration >= longest_time:
print(
"{server} longer than {longest_time} seconds. Stopped.".format(
longest_time=longest_time, server=server_name
)
)
break
speed = chunk_read / duration / 1024 / 1024 * 8 # mega bit
2019-06-17 22:51:34 +08:00
model.add_record(execution_datetime,server_name, speed)
2019-06-16 23:23:07 +08:00
print("{server}:{speed} Mbit/s".format(server=server_name, speed=speed))
except requests.exceptions.Timeout as e:
print(
"timeout before wait {timeout} seconds. Error:{err}".format(
err=str(e), timeout=longest_time
)
)