Skip to content
This repository has been archived by the owner on Sep 6, 2023. It is now read-only.

Commit

Permalink
add logging
Browse files Browse the repository at this point in the history
  • Loading branch information
esoadamo committed Apr 20, 2019
1 parent 114c19d commit 32405d8
Show file tree
Hide file tree
Showing 3 changed files with 62 additions and 23 deletions.
2 changes: 1 addition & 1 deletion data/config.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"scanTime": 30,
"scanTime": 60,
"updater": {
"githubOwner": "esoadamo",
"githubRepo": "simple-guardian",
Expand Down
24 changes: 21 additions & 3 deletions log_manipulator.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import time
from datetime import datetime
from hashlib import md5
from logging import Logger
from os.path import getmtime, getsize
from tempfile import TemporaryFile
from threading import Lock
Expand All @@ -14,16 +15,19 @@ class LogParser:
Class for parsing information about attacks from log files
"""

def __init__(self, file_log, rules, service_name=None): # type: (str, List[str], str) -> None
def __init__(self, file_log, rules, service_name=None, logger=None): # type: (str, List[str], str, Logger) -> None
"""
Initialize the log parser
:param file_log: path to the file with logs
:param rules: list of string filters/rules
:param service_name: optional name of the service. If not specified then found attacks are not assigned to any
service
:param logger: optional logger. If specified, LogParsers fires messages into him
"""
self.file_log = file_log
self.rules = [rule if type(rule) == Rule else Rule(rule, service_name) for rule in rules]
self.logger = logger

self._last_file_size = 0
self._last_file_modification_date = None

Expand All @@ -41,43 +45,57 @@ def parse_attacks(self, max_age=None): # type: (float) -> dict
:param max_age: optional, in seconds. If attack is older as this then it is ignored
:return: dictionary. Key is the IP that attacked and value is list of dictionaries with data about every attack
"""
if self.logger is not None:
self.logger.debug('parsing attacks for %s' % self.file_log)

attacks = {}

curr_file_size = getsize(self.file_log)
curr_file_modification_time = getmtime(self.file_log)

if self._last_file_size == curr_file_size and curr_file_modification_time == self._last_file_modification_date:
if self.logger is not None:
self.logger.debug('nothing changed, nothing new to parse')
# it seems that the files has not changed so skip analyzing it
return attacks

continue_in_scanning = True # when set to True, only new content in file is analyzed

if self._last_file_size > curr_file_size:
# when the current file is smaller, something has happened to it. We will rescan it to be sure
if self.logger is not None:
self.logger.debug('file went smaller since las scan, rescan it')
continue_in_scanning = False
self.force_rescan()

if self._last_file_size == curr_file_size and self._last_file_modification_date != curr_file_modification_time:
# the file has the same size but was still modified, we better rescan it
if self.logger is not None:
self.logger.debug('file is the same size but it was modified,rescan it')
continue_in_scanning = False
self.force_rescan()

with open(self.file_log, 'r') as f:
if continue_in_scanning and self._last_bytes['hash'] is not None:
# check last few bytes if they are same
f.seek(self._last_file_size - self._last_bytes['len'])
f.seek(self._last_file_size - self._last_bytes['len'] - 5)
if md5(f.read(self._last_bytes['len']).encode('utf8')).hexdigest() != self._last_bytes['hash']:
# nope, last few bytes differ, something seems really odd about this file. Better rescan it
if self.logger is not None:
self.logger.debug('last few scanned bytes differ, rescan it')
self.force_rescan()

f.seek(self._last_file_size) # skip all already analyzed content
new_content = f.read()

# save last ten bytes so we can know if the file is still the same during new analyze
content_end = new_content[-256:]
content_end = new_content[-256:-5]
self._last_bytes['hash'] = md5(content_end.encode('utf8')).hexdigest()
self._last_bytes['len'] = len(content_end)

if self.logger is not None:
self.logger.debug('new part: "%s"' % new_content.replace('\n', '\\n'))

log_lines = new_content.splitlines()
del new_content, content_end

Expand Down
59 changes: 40 additions & 19 deletions simple-guardian.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
#!/usr/bin/env python3
import json
import logging
import os
import sqlite3
import subprocess
Expand Down Expand Up @@ -50,6 +51,7 @@
# directory with configuration files
CONFIG_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), os.path.pardir, 'data'))

LOGGER_NAME = "SG" # the name of the logger to use
PROFILES_DIR = os.path.join(CONFIG_DIR, 'profiles') # directory with profiles
CONFIG = {} # dictionary with loaded config in main()
ONLINE_DATA = {'loggedIn': False} # type: Dict[str, any] # data about the online server,
Expand Down Expand Up @@ -273,8 +275,9 @@ class ThreadScanner(Thread):
This thread performs the scanning of the logs, looking for attacks and blocking
"""
def run(self):
logger = logging.getLogger(LOGGER_NAME)
while AppRunning.is_running():
print('scanning for attacks')
logger.info('scanning for attacks')
time_scan_start = time.time()
commit_db = False

Expand All @@ -284,7 +287,8 @@ def run(self):

for profile, profile_data in profiles_copy.items():
if 'parser' not in profile_data: # link the parser with the profile
profile_data['parser'] = log_manipulator.LogParser(profile_data['logFile'], profile_data['filters'])
profile_data['parser'] = log_manipulator.LogParser(profile_data['logFile'], profile_data['filters']
, logger=logger)
PROFILES_LOCK.acquire()
if profile in PROFILES: # propagate the change into upcoming scans
PROFILES[profile]['parser'] = profile_data['parser']
Expand Down Expand Up @@ -327,7 +331,7 @@ def run(self):
commit_db = True
if commit_db:
Database.commit()
print('scanning for attacks completed, took %.1f seconds' % (time.time() - time_scan_start))
logger.info('scanning for attacks completed, took %.1f seconds' % (time.time() - time_scan_start))
AppRunning.sleep_while_running(CONFIG['scanTime'])


Expand All @@ -337,6 +341,7 @@ class Updater:
"""
_updater = None # type: github_updater.GithubUpdater
_excluded_file_names = ["config.json", "blocker"]
_logger = logging.getLogger(LOGGER_NAME)

@staticmethod
def init():
Expand Down Expand Up @@ -370,13 +375,13 @@ def update(restart=True): # type: (bool) -> None
:param restart: if set to True, this program will automatically restart to new version after copying new files
:return: None
"""
print('starting update')
Updater._logger.info('starting update')
this_directory = os.path.abspath(os.path.join(os.path.abspath(__file__), os.path.pardir))
Updater._updater.get_and_extract_newest_release_to_directory(this_directory, Updater._excluded_file_names)
if restart:
print('update finished, restarting')
Updater._logger.info('update finished, restarting')
AppRunning.exit(42)
print('update finished')
Updater._logger.info('update finished')

@staticmethod
def update_master(restart=True): # type: (bool) -> None
Expand All @@ -385,13 +390,13 @@ def update_master(restart=True): # type: (bool) -> None
:param restart: if set to True, this program will automatically restart to new version after copying new files
:return: None
"""
print('starting update to the master branch')
Updater._logger.info('starting update to the master branch')
this_directory = os.path.abspath(os.path.join(os.path.abspath(__file__), os.path.pardir))
Updater._updater.extract_master(this_directory, Updater._excluded_file_names)
if restart:
print('update finished, restarting')
Updater._logger.info('update finished, restarting')
AppRunning.exit(42)
print('update finished')
Updater._logger.info('update finished')


def list_attacks(before=None, max_limit=None): # type: (int, int) -> List[dict]
Expand Down Expand Up @@ -437,7 +442,8 @@ def load_profiles(): # type: () -> None
Loads profiles from disc
:return: None
"""
print('Loading profiles')
logger = logging.getLogger(LOGGER_NAME)
logger.info('Loading profiles')
PROFILES_LOCK.acquire()
PROFILES.clear()
if not os.path.exists(PROFILES_DIR):
Expand All @@ -451,7 +457,7 @@ def load_profiles(): # type: () -> None
try:
loaded_profiles = json.load(f)
except json.decoder.JSONDecodeError:
print('Invalid profile - not loading (%s)' % file_profile)
logger.info('Invalid profile - not loading (%s)' % file_profile)
continue
for profile, profile_data in loaded_profiles.items():
if profile not in PROFILES:
Expand Down Expand Up @@ -488,6 +494,7 @@ def init_online(): # type: () -> None
:return: None
"""
socket = HSocket(ONLINE_DATA['server_url'], auto_connect=False)
logger = logging.getLogger(LOGGER_NAME)

class ThreadDisconnectOnProgramEnd(Thread):
"""
Expand All @@ -496,7 +503,7 @@ class ThreadDisconnectOnProgramEnd(Thread):
def run(self):
while AppRunning.is_running():
AppRunning.sleep_while_running(2)
print('disconnecting from server')
logger.info('disconnecting from server')
socket.disconnect()

ThreadDisconnectOnProgramEnd().start() # run the thread that will close the socket on program's exit
Expand All @@ -517,9 +524,9 @@ def login(ok): # type: (bool) -> None
:return: None
"""
if not ok:
print('login with server seems expired, please fix this')
logger.info('login with server seems expired, please fix this')
return
print('login with server ok')
logger.info('login with server ok')

def get_attacks(data): # type: (dict) -> None
"""
Expand Down Expand Up @@ -681,40 +688,54 @@ def cli():


def main():
# Initialize logging
logger = logging.getLogger(LOGGER_NAME)
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s %(name)-4s %(levelname)-4s %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.INFO)
logger.info('starting up')

# Load global configs
with open(os.path.join(CONFIG_DIR, 'config.json'), 'r') as f:
CONFIG.update(json.load(f))
logger.debug('config.json loaded')

try:
if sys.argv[1] == 'client':
logger.debug('entering client mode')
cli()
exit()
except IndexError:
pass

# Load online config
if os.path.isfile(os.path.join(CONFIG_DIR, 'server.json')):
logger.debug('loading online config')
with open(os.path.join(CONFIG_DIR, 'server.json'), 'r') as f:
ONLINE_DATA.update(json.load(f))
if ONLINE_DATA['loggedIn']:
init_online()

# Load all profiles
print('Loading profiles')
logger.info('Loading profiles')
load_profiles()
print('Profiles loaded')
logger.info('Profiles loaded')

# Initialize database
Database.init(os.path.join(CONFIG_DIR, 'db.db'))

print('Blocking all IPs saved in database')
logger.info('Blocking all IPs saved in database')
IPBlocker.block_all_banned()

# Check for updates and perform automatic update if enabled and available
Updater.init()
update_available = Updater.update_available()
print('You are up to date' if not update_available
else 'There is another version on the server: %s (you have %s)' % (Updater.get_latest_name(), VERSION_TAG))
logger.info('You are up to date' if not update_available
else 'There is another version on the server: %s (you have %s)'
% (Updater.get_latest_name(), VERSION_TAG))
if update_available and CONFIG.get('updater', {}).get('autoupdate', False):
Updater.update()

Expand Down

0 comments on commit 32405d8

Please sign in to comment.