Merge pull request #244 from Data-Monkey/split_it_up

Split it up
This commit is contained in:
jokob-sk
2023-06-04 08:17:59 +10:00
committed by GitHub
7 changed files with 182 additions and 151 deletions

0
db/pialert.db Executable file → Normal file
View File

View File

@@ -13,10 +13,9 @@
#=============================================================================== #===============================================================================
# IMPORTS # IMPORTS
#=============================================================================== #===============================================================================
from __future__ import print_function #from __future__ import print_function
import sys import sys
from collections import namedtuple
import time import time
import datetime import datetime
import multiprocessing import multiprocessing
@@ -25,7 +24,7 @@ import multiprocessing
import conf import conf
from const import * from const import *
from logger import mylog from logger import mylog
from helper import filePermissions, isNewVersion, timeNow, timeNowTZ, updateState from helper import filePermissions, isNewVersion, timeNow, updateState
from api import update_api from api import update_api
from networkscan import process_scan, scan_network from networkscan import process_scan, scan_network
from initialise import importConfigs from initialise import importConfigs
@@ -75,6 +74,7 @@ main structure of Pi Alert
""" """
def main (): def main ():
mylog('debug', ['[MAIN] Setting up ...'])
conf.time_started = datetime.datetime.now() conf.time_started = datetime.datetime.now()
conf.cycle = "" conf.cycle = ""
@@ -109,7 +109,7 @@ def main ():
# Open DB once and keep open # Open DB once and keep open
# Opening / closing DB frequently actually casues more issues # Opening / closing DB frequently actually casues more issues
db = DB() # instance of class DB db = DB() # instance of class DB
db.openDB() db.open()
sql = db.sql # To-Do replace with the db class sql = db.sql # To-Do replace with the db class
# Upgrade DB if needed # Upgrade DB if needed
@@ -119,13 +119,12 @@ def main ():
#=============================================================================== #===============================================================================
# This is the main loop of Pi.Alert # This is the main loop of Pi.Alert
#=============================================================================== #===============================================================================
while True: while True:
# update time started # update time started
time_started = datetime.datetime.now() # not sure why we need this ... time_started = datetime.datetime.now() # not sure why we need this ...
loop_start_time = timeNow() loop_start_time = timeNow()
mylog('debug', '[MAIN] Stating loop') mylog('debug', '[MAIN] Starting loop')
# re-load user configuration and plugins # re-load user configuration and plugins
importConfigs(db) importConfigs(db)
@@ -311,4 +310,5 @@ def main ():
# BEGIN # BEGIN
#=============================================================================== #===============================================================================
if __name__ == '__main__': if __name__ == '__main__':
mylog('debug', ['[__main__] Welcome to Pi.Alert'])
sys.exit(main()) sys.exit(main())

View File

@@ -10,7 +10,11 @@ userSubnets = []
mySchedules = [] # bad solution for global - TO-DO mySchedules = [] # bad solution for global - TO-DO
plugins = [] # bad solution for global - TO-DO plugins = [] # bad solution for global - TO-DO
tz = '' tz = ''
lastTimeImported = 0 # Time the config was last imported
# modified time of the most recently imported config file
# set to a small value to force import at first run
lastImportedConfFile = 1.1
plugins_once_run = False plugins_once_run = False
newVersionAvailable = False newVersionAvailable = False
time_started = '' time_started = ''

View File

@@ -26,7 +26,11 @@ piholeDhcpleases = '/etc/pihole/dhcp.leases'
#=============================================================================== #===============================================================================
# SQL queries # SQL queries
#=============================================================================== #===============================================================================
sql_devices_all = "select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_Group, dev_FirstConnection, dev_LastConnection, dev_LastIP, dev_StaticIP, dev_PresentLastScan, dev_LastNotification, dev_NewDevice, dev_Network_Node_MAC_ADDR, dev_Network_Node_port, dev_Icon from Devices" sql_devices_all = """select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_Group,
dev_FirstConnection, dev_LastConnection, dev_LastIP, dev_StaticIP,
dev_PresentLastScan, dev_LastNotification, dev_NewDevice,
dev_Network_Node_MAC_ADDR, dev_Network_Node_port,
dev_Icon from Devices"""
sql_devices_stats = "SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown from Online_History order by Scan_Date desc limit 1" sql_devices_stats = "SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown from Online_History order by Scan_Date desc limit 1"
sql_nmap_scan_all = "SELECT * FROM Nmap_Scan" sql_nmap_scan_all = "SELECT * FROM Nmap_Scan"
sql_pholus_scan_all = "SELECT * FROM Pholus_Scan" sql_pholus_scan_all = "SELECT * FROM Pholus_Scan"
@@ -36,12 +40,12 @@ sql_plugins_objects = "SELECT * FROM Plugins_Objects"
sql_language_strings = "SELECT * FROM Plugins_Language_Strings" sql_language_strings = "SELECT * FROM Plugins_Language_Strings"
sql_plugins_events = "SELECT * FROM Plugins_Events" sql_plugins_events = "SELECT * FROM Plugins_Events"
sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY 'Index' DESC" sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY 'Index' DESC"
sql_new_devices = """SELECT * FROM ( SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC FROM Events_Devices sql_new_devices = """SELECT * FROM (
WHERE eve_PendingAlertEmail = 1 SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC
AND eve_EventType = 'New Device' FROM Events_Devices
ORDER BY eve_DateTime ) t1 WHERE eve_PendingAlertEmail = 1
LEFT JOIN AND eve_EventType = 'New Device'
( ORDER BY eve_DateTime ) t1
SELECT dev_Name, dev_MAC as dev_MAC_t2 FROM Devices LEFT JOIN
) t2 ( SELECT dev_Name, dev_MAC as dev_MAC_t2 FROM Devices) t2
ON t1.dev_MAC = t2.dev_MAC_t2""" ON t1.dev_MAC = t2.dev_MAC_t2"""

View File

@@ -15,13 +15,17 @@ from helper import json_struc, initOrSetParam, row_to_json, timeNow #, updateSta
class DB(): class DB():
"""
DB Class to provide the basic database interactions.
Open / Commit / Close / read / write
"""
def __init__(self): def __init__(self):
self.sql = None self.sql = None
self.sql_connection = None self.sql_connection = None
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def openDB (self): def open (self):
# Check if DB is open # Check if DB is open
if self.sql_connection != None : if self.sql_connection != None :
mylog('debug','openDB: databse already open') mylog('debug','openDB: databse already open')
@@ -69,57 +73,64 @@ class DB():
# Cleanup / upkeep database # Cleanup / upkeep database
#=============================================================================== #===============================================================================
def cleanup_database (self, startTime, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA): def cleanup_database (self, startTime, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA):
# Header """
#updateState(self,"Upkeep: Clean DB") Cleaning out old records from the tables that don't need to keep all data.
mylog('verbose', ['[', startTime, '] Upkeep Database:' ]) """
# Header
#updateState(self,"Upkeep: Clean DB")
mylog('verbose', ['[DB Cleanup] Upkeep Database:' ])
# Cleanup Online History # Cleanup Online History
mylog('verbose', [' Online_History: Delete all but keep latest 150 entries']) mylog('verbose', ['[DB Cleanup] Online_History: Delete all but keep latest 150 entries'])
self.sql.execute ("""DELETE from Online_History where "Index" not in ( SELECT "Index" from Online_History order by Scan_Date desc limit 150)""") self.sql.execute ("""DELETE from Online_History where "Index" not in (
SELECT "Index" from Online_History
order by Scan_Date desc limit 150)""")
mylog('verbose', ['[DB Cleanup] Optimize Database'])
# Cleanup Events
mylog('verbose', ['[DB Cleanup] Events: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days'])
self.sql.execute ("""DELETE FROM Events
WHERE eve_DateTime <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')""")
# Cleanup Plugin Events History
mylog('verbose', ['[DB Cleanup] Plugin Events History: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days'])
self.sql.execute ("""DELETE FROM Plugins_History
WHERE DateTimeChanged <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')""")
# Cleanup Pholus_Scan
if PHOLUS_DAYS_DATA != 0:
mylog('verbose', ['[DB Cleanup] Pholus_Scan: Delete all older than ' + str(PHOLUS_DAYS_DATA) + ' days'])
# improvement possibility: keep at least N per mac
self.sql.execute ("""DELETE FROM Pholus_Scan
WHERE Time <= date('now', '-"+ str(PHOLUS_DAYS_DATA) +" day')""")
mylog('verbose', [' Optimize Database']) # De-Dupe (de-duplicate - remove duplicate entries) from the Pholus_Scan table
# Cleanup Events mylog('verbose', ['[DB Cleanup] Pholus_Scan: Delete all duplicates'])
mylog('verbose', [' Events: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days']) self.sql.execute ("""DELETE FROM Pholus_Scan
self.sql.execute ("DELETE FROM Events WHERE eve_DateTime <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')") WHERE rowid > (
SELECT MIN(rowid) FROM Pholus_Scan p2
WHERE Pholus_Scan.MAC = p2.MAC
AND Pholus_Scan.Value = p2.Value
AND Pholus_Scan.Record_Type = p2.Record_Type
);""")
# De-Dupe (de-duplicate - remove duplicate entries) from the Nmap_Scan table
mylog('verbose', [' Nmap_Scan: Delete all duplicates'])
self.sql.execute ("""DELETE FROM Nmap_Scan
WHERE rowid > (
SELECT MIN(rowid) FROM Nmap_Scan p2
WHERE Nmap_Scan.MAC = p2.MAC
AND Nmap_Scan.Port = p2.Port
AND Nmap_Scan.State = p2.State
AND Nmap_Scan.Service = p2.Service
);""")
# Cleanup Plugin Events History # Shrink DB
mylog('verbose', [' Plugin Events History: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days']) mylog('verbose', [' Shrink Database'])
self.sql.execute ("DELETE FROM Plugins_History WHERE DateTimeChanged <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')") self.sql.execute ("VACUUM;")
self.commitDB()
# Cleanup Pholus_Scan
if PHOLUS_DAYS_DATA != 0:
mylog('verbose', [' Pholus_Scan: Delete all older than ' + str(PHOLUS_DAYS_DATA) + ' days'])
self.sql.execute ("DELETE FROM Pholus_Scan WHERE Time <= date('now', '-"+ str(PHOLUS_DAYS_DATA) +" day')") # improvement possibility: keep at least N per mac
# De-Dupe (de-duplicate - remove duplicate entries) from the Pholus_Scan table
mylog('verbose', [' Pholus_Scan: Delete all duplicates'])
self.sql.execute ("""DELETE FROM Pholus_Scan
WHERE rowid > (
SELECT MIN(rowid) FROM Pholus_Scan p2
WHERE Pholus_Scan.MAC = p2.MAC
AND Pholus_Scan.Value = p2.Value
AND Pholus_Scan.Record_Type = p2.Record_Type
);""")
# De-Dupe (de-duplicate - remove duplicate entries) from the Nmap_Scan table
mylog('verbose', [' Nmap_Scan: Delete all duplicates'])
self.sql.execute ("""DELETE FROM Nmap_Scan
WHERE rowid > (
SELECT MIN(rowid) FROM Nmap_Scan p2
WHERE Nmap_Scan.MAC = p2.MAC
AND Nmap_Scan.Port = p2.Port
AND Nmap_Scan.State = p2.State
AND Nmap_Scan.Service = p2.Service
);""")
# Shrink DB
mylog('verbose', [' Shrink Database'])
self.sql.execute ("VACUUM;")
self.commitDB()
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def upgradeDB(self): def upgradeDB(self):
"""
Check the current tables in the DB and upgrade them if neccessary
"""
sql = self.sql #TO-DO sql = self.sql #TO-DO
# indicates, if Online_History table is available # indicates, if Online_History table is available

View File

@@ -7,8 +7,8 @@ from pathlib import Path
import datetime import datetime
import conf import conf
from const import * from const import fullConfPath
from helper import collect_lang_strings, timeNow, updateSubnets, initOrSetParam from helper import collect_lang_strings, updateSubnets, initOrSetParam
from logger import mylog from logger import mylog
from api import update_api from api import update_api
from scheduler import schedule_class from scheduler import schedule_class
@@ -43,15 +43,26 @@ def importConfigs (db):
sql = db.sql sql = db.sql
lastTimeImported = 0 # get config file name
# get config file
config_file = Path(fullConfPath) config_file = Path(fullConfPath)
# Skip import if last time of import is NEWER than file age # Only import file if the file was modifed since last import.
if (os.path.getmtime(config_file) < lastTimeImported) : # this avoids time zone issues as we just compare the previous timestamp to the current time stamp
mylog('debug', ['[Import Config] checking config file '])
mylog('debug', ['[Import Config] lastImportedConfFile :', conf.lastImportedConfFile])
mylog('debug', ['[Import Config] file modified time :', os.path.getmtime(config_file)])
if (os.path.getmtime(config_file) == conf.lastImportedConfFile) :
mylog('debug', ['[Import Config] skipping config file import'])
return return
conf.lastImportedConfFile = os.path.getmtime(config_file)
mylog('debug', ['[Import Config] importing config file'])
conf.mySettings = [] # reset settings conf.mySettings = [] # reset settings
conf.mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query conf.mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query
@@ -237,6 +248,7 @@ def read_config_file(filename):
""" """
retuns dict on the config file key:value pairs retuns dict on the config file key:value pairs
""" """
mylog('info', '[Config] reading config file')
# load the variables from pialert.conf # load the variables from pialert.conf
code = compile(filename.read_text(), filename.name, "exec") code = compile(filename.read_text(), filename.name, "exec")
confDict = {} # config dictionary confDict = {} # config dictionary

View File

@@ -285,7 +285,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
if conf.REPORT_WEBHOOK and check_config('webhook'): if conf.REPORT_WEBHOOK and check_config('webhook'):
updateState(db,"Send: Webhook") updateState(db,"Send: Webhook")
mylog('info', ['[Notification] Sending report by Webhook']) mylog('info', ['[Notification] Sending report by Webhook'])
send_webhook (json_final, mail_text) send_webhook (msg)
else : else :
mylog('verbose', ['[Notification] Skip webhook']) mylog('verbose', ['[Notification] Skip webhook'])
if conf.REPORT_NTFY and check_config('ntfy'): if conf.REPORT_NTFY and check_config('ntfy'):