1
0
mirror of https://github.com/RaidMax/IW4M-Admin.git synced 2025-06-10 15:20:48 -05:00

clean up log reader/make it output more useful message if things go wrong

add unflag as a penalty
show bans/tempbans even after they've expired on penalty list
continue making alias links great again
This commit is contained in:
RaidMax
2019-04-05 13:34:03 -05:00
parent 11dc020d7c
commit e3aa62334a
15 changed files with 226 additions and 152 deletions

View File

@ -5,55 +5,53 @@ import time
class LogReader(object):
def __init__(self):
self.log_file_sizes = {}
# (if the file changes more than this, ignore ) - 0.125 MB
self.max_file_size_change = 125000
# (if the time between checks is greater, ignore ) - 5 minutes
self.max_file_time_change = 60
# (if the time between checks is greater, ignore ) - in seconds
# self.max_file_time_change = 60
self.max_file_time_change = 10
def read_file(self, path):
# this removes old entries that are no longer valid
try:
self._clear_old_logs()
except Exception as e:
print('could not clear old logs')
print(e)
# prevent traversing directories
if re.search('r^.+\.\.\\.+$', path):
return False
# must be a valid log path and log file
if not re.search(r'^.+[\\|\/](.+)[\\|\/].+.log$', path):
return False
# set the initialze size to the current file size
# set the initial size to the current file size
file_size = 0
# this is the first time the log has been requested
if path not in self.log_file_sizes:
self.log_file_sizes[path] = {
'length' : self.file_length(path),
'read': time.time()
}
return True
return ''
# grab the previous values
last_length = self.log_file_sizes[path]['length']
last_read = self.log_file_sizes[path]['read']
# the file is being tracked already
new_file_size = self.file_length(path)
# the log size was unable to be read (probably the wrong path)
# the log size was unable to be read (probably the wrong path)
if new_file_size < 0:
return False
now = time.time()
file_size_difference = new_file_size - last_length
time_difference = now - last_read
# update the new size and actually read the data
self.log_file_sizes[path] = {
'length': new_file_size,
'read': now
'read': time.time()
}
# if it's been too long since we read and the amount changed is too great, discard it
# todo: do we really want old events? maybe make this an "or"
if file_size_difference > self.max_file_size_change or time_difference > self.max_file_time_change:
return True
new_log_info = self.get_file_lines(path, file_size_difference)
return new_log_info
@ -64,13 +62,23 @@ class LogReader(object):
file_data = file_handle.read(length)
file_handle.close()
return file_data.decode('utf-8')
except:
except Exception as e:
print('could not read the log file at {0}, wanted to read {1} bytes'.format(path, length))
print(e)
return False
def _clear_old_logs(self):
expired_logs = [path for path in self.log_file_sizes if int(time.time() - self.log_file_sizes[path]['read']) > self.max_file_time_change]
for log in expired_logs:
print('removing expired log {0}'.format(log))
del self.log_file_sizes[log]
def file_length(self, path):
try:
return os.stat(path).st_size
except:
except Exception as e:
print('could not get the size of the log file at {0}'.format(path))
print(e)
return -1
reader = LogReader()

View File

@ -6,14 +6,10 @@ class LogResource(Resource):
def get(self, path):
path = urlsafe_b64decode(path).decode('utf-8')
log_info = reader.read_file(path)
if log_info is False:
print('could not read log file ' + path)
empty_read = (log_info == False) or (log_info == True)
print(log_info)
return {
'success' : log_info is not False,
'length': -1 if empty_read else len(log_info),
'length': 0 if log_info is False else len(log_info),
'data': log_info
}

View File

@ -2,10 +2,13 @@ from flask import Flask
from flask_restful import Api
from .log_resource import LogResource
from .restart_resource import RestartResource
import logging
app = Flask(__name__)
def init():
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
api = Api(app)
api.add_resource(LogResource, '/log/<string:path>')
api.add_resource(RestartResource, '/restart')