1.加入logger
This commit is contained in:
parent
83b6166daf
commit
803059f4a2
|
|
@ -64,11 +64,16 @@ def get(_setting):
|
|||
return None
|
||||
|
||||
|
||||
def find_config_file_path():
|
||||
def get_main_path():
|
||||
current_frame_file_path = os.path.dirname(
|
||||
os.path.realpath(inspect.getfile(inspect.currentframe()))
|
||||
)
|
||||
config_file_dir = os.path.dirname(current_frame_file_path)
|
||||
main_path = os.path.dirname(current_frame_file_path)
|
||||
return main_path
|
||||
|
||||
|
||||
def find_config_file_path():
|
||||
config_file_dir = get_main_path()
|
||||
config_file_path = config_file_dir + "/config.json"
|
||||
config_file_path = config_file_path.replace("\\", r"/")
|
||||
return config_file_path
|
||||
|
|
|
|||
|
|
@ -1,17 +1,21 @@
|
|||
import requests
|
||||
import datetime
|
||||
from download_speed import config, model
|
||||
from download_speed import log
|
||||
|
||||
logger = log.get_logger(__name__)
|
||||
|
||||
# file_size if byte
|
||||
# longest_time seconds
|
||||
|
||||
|
||||
def download_file(chunk_size=4 * 1024, file_size=100 * 1024 * 1024, longest_time=60):
|
||||
config_file_size = config.get("download_file_size")
|
||||
if config_file_size is not None:
|
||||
file_size = config_file_size
|
||||
execution_datetime = datetime.datetime.now()
|
||||
print("Download at {time}".format(time=execution_datetime))
|
||||
print(
|
||||
logger.info("Download at {time}".format(time=execution_datetime))
|
||||
logger.info(
|
||||
"Download {MB_file_size}Mbyte file.".format(
|
||||
MB_file_size=file_size / 1024 / 1024
|
||||
)
|
||||
|
|
@ -23,7 +27,7 @@ def download_file(chunk_size=4 * 1024, file_size=100 * 1024 * 1024, longest_time
|
|||
server_name = server["name"]
|
||||
chunk_read = 0
|
||||
s_time = datetime.datetime.now()
|
||||
print("start time:{s_time}".format(s_time=s_time))
|
||||
logger.info("start time:{s_time}".format(s_time=s_time))
|
||||
duration = longest_time
|
||||
try:
|
||||
with requests.get(url, stream=True, timeout=longest_time) as r:
|
||||
|
|
@ -35,18 +39,18 @@ def download_file(chunk_size=4 * 1024, file_size=100 * 1024 * 1024, longest_time
|
|||
if chunk_read >= file_size:
|
||||
break
|
||||
if duration >= longest_time:
|
||||
print(
|
||||
logger.info(
|
||||
"{server} longer than {longest_time} seconds. Stopped.".format(
|
||||
longest_time=longest_time, server=server_name
|
||||
)
|
||||
)
|
||||
break
|
||||
print("end time:{e_time}".format(e_time=e_time))
|
||||
logger.info("end time:{e_time}".format(e_time=e_time))
|
||||
speed = chunk_read / duration / 1024 / 1024 * 8 # mega bit
|
||||
model.add_record(execution_datetime, server_name, speed)
|
||||
print("{server}:{speed} Mbit/s".format(server=server_name, speed=speed))
|
||||
logger.info("{server}:{speed} Mbit/s".format(server=server_name, speed=speed))
|
||||
except requests.exceptions.Timeout as e:
|
||||
print(
|
||||
logger.info(
|
||||
"timeout before wait {timeout} seconds. Error:{err}".format(
|
||||
err=str(e), timeout=longest_time
|
||||
)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,48 @@
|
|||
import logging
|
||||
import datetime
|
||||
import sys
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import download_speed.config as config
|
||||
|
||||
|
||||
def find_main():
|
||||
return config.get_main_path()
|
||||
|
||||
|
||||
def _custom_time(*args):
|
||||
utc_dt = datetime.datetime.utcnow()
|
||||
converted = utc_dt + datetime.timedelta(hours=8)
|
||||
return converted.timetuple()
|
||||
|
||||
|
||||
streamHd = logging.StreamHandler(sys.stdout)
|
||||
_formatter = logging.Formatter("%(asctime)s : %(name)s : %(levelname)s : %(message)s")
|
||||
_formatter.converter = _custom_time
|
||||
streamHd.setFormatter(_formatter)
|
||||
_main_dir = find_main()
|
||||
_log_file_path = "{0}/log/fund_log.log".format(_main_dir)
|
||||
rotatingFd = RotatingFileHandler(_log_file_path, maxBytes=8000 * 1024, backupCount=10)
|
||||
rotatingFd.setFormatter(_formatter)
|
||||
|
||||
_logger_list = []
|
||||
|
||||
|
||||
def get_logger(logger_name):
|
||||
logger = logging.getLogger(logger_name)
|
||||
rotating_hd = streamHd
|
||||
stream_hd = streamHd
|
||||
logger.addHandler(rotating_hd)
|
||||
logger.addHandler(stream_hd)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
_logger_list.append(logger)
|
||||
return logger
|
||||
|
||||
|
||||
def set_global_logger_level(level):
|
||||
for logger in _logger_list:
|
||||
logger.setLevel(level)
|
||||
|
||||
|
||||
def stop_logging(stop):
|
||||
for logger in _logger_list:
|
||||
logger.disabled = stop
|
||||
|
|
@ -2,12 +2,15 @@ from download_speed import config, model
|
|||
import urllib.parse as parse
|
||||
import ping3
|
||||
import datetime
|
||||
from download_speed import log
|
||||
|
||||
logger = log.get_logger(__name__)
|
||||
|
||||
|
||||
def ping():
|
||||
servers = config.get_servers()
|
||||
execution_datetime = datetime.datetime.now()
|
||||
print("Ping at {time}".format(time=execution_datetime))
|
||||
logger.info("Ping at {time}".format(time=execution_datetime))
|
||||
for server_name in servers:
|
||||
server = servers[server_name]
|
||||
url = server["url"]
|
||||
|
|
@ -22,7 +25,7 @@ def ping():
|
|||
for i in range(4):
|
||||
ping_delay_sum += ping3.ping(addr, timeout=5) * 1000 # in millionsecond
|
||||
ping_delay = ping_delay_sum / 4
|
||||
print(
|
||||
logger.info(
|
||||
"Ping delay for {serv} is {ping_val}".format(
|
||||
serv=server_name, ping_val=ping_delay
|
||||
)
|
||||
|
|
|
|||
4
main.py
4
main.py
|
|
@ -1,7 +1,9 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
from download_speed import downloader
|
||||
from download_speed import log
|
||||
|
||||
logger = log.get_logger(__name__)
|
||||
if __name__ == "__main__":
|
||||
downloader.download_file()
|
||||
print("Finished.")
|
||||
logger.info("Finished.")
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
from download_speed import ping
|
||||
from download_speed import log
|
||||
|
||||
logger = log.get_logger(__name__)
|
||||
if __name__ == "__main__":
|
||||
ping.ping()
|
||||
print("Finished.")
|
||||
logger.info("Finished.")
|
||||
|
|
|
|||
Loading…
Reference in New Issue