From 6edb623b9c0d9db17e43b0703517ca9169284ec3 Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Sun, 4 Jun 2023 14:20:19 +1000 Subject: [PATCH 1/3] fix issue #7 plugins not executed --- pialert/plugin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pialert/plugin.py b/pialert/plugin.py index dd68f4a6..bc1ae736 100644 --- a/pialert/plugin.py +++ b/pialert/plugin.py @@ -12,14 +12,14 @@ from helper import timeNow, updateState, get_file_content, write_file from api import update_api #------------------------------------------------------------------------------- -def run_plugin_scripts(db, runType, plugins = conf.plugins): +def run_plugin_scripts(db, runType): # Header updateState(db,"Run: Plugins") mylog('debug', ['[Plugins] Check if any plugins need to be executed on run type: ', runType]) - for plugin in plugins: + for plugin in conf.plugins: shouldRun = False From 5d0804639cc3123e5cb4ec271172a415606343b3 Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Mon, 5 Jun 2023 13:31:14 +1000 Subject: [PATCH 2/3] attempt to fix issue #6 as well as db.read_one() --- pialert/__main__.py | 2 +- pialert/const.py | 5 ++++- pialert/database.py | 28 +++++++++++++++++++++------- pialert/networkscan.py | 4 +--- 4 files changed, 27 insertions(+), 12 deletions(-) diff --git a/pialert/__main__.py b/pialert/__main__.py index 26d1173f..998a7112 100755 --- a/pialert/__main__.py +++ b/pialert/__main__.py @@ -300,7 +300,7 @@ def main (): else: # do something conf.cycle = "" - mylog('verbose', ['[MAIN] waiting to start next loop']) + mylog('verbose', ['[MAIN] waiting to start next loop']) #loop time.sleep(5) # wait for N seconds diff --git a/pialert/const.py b/pialert/const.py index d848a4d4..edc172e3 100644 --- a/pialert/const.py +++ b/pialert/const.py @@ -31,7 +31,10 @@ sql_devices_all = """select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_G dev_PresentLastScan, dev_LastNotification, dev_NewDevice, dev_Network_Node_MAC_ADDR, dev_Network_Node_port, dev_Icon from Devices""" -sql_devices_stats = "SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown from Online_History order by Scan_Date desc limit 1" +sql_devices_stats = """SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, + (select count(*) from Devices a where dev_NewDevice = 1 ) as new, + (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown + from Online_History order by Scan_Date desc limit 1""" sql_nmap_scan_all = "SELECT * FROM Nmap_Scan" sql_pholus_scan_all = "SELECT * FROM Pholus_Scan" sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0" diff --git a/pialert/database.py b/pialert/database.py index 673c8483..9c3db84d 100644 --- a/pialert/database.py +++ b/pialert/database.py @@ -33,11 +33,15 @@ class DB(): mylog('none', '[Database] Opening DB' ) # Open DB and Cursor - self.sql_connection = sqlite3.connect (fullDbPath, isolation_level=None) - self.sql_connection.execute('pragma journal_mode=wal') # - self.sql_connection.text_factory = str - self.sql_connection.row_factory = sqlite3.Row - self.sql = self.sql_connection.cursor() + try: + self.sql_connection = sqlite3.connect (fullDbPath, isolation_level=None) + self.sql_connection.execute('pragma journal_mode=wal') # + self.sql_connection.text_factory = str + self.sql_connection.row_factory = sqlite3.Row + self.sql = self.sql_connection.cursor() + except sqlite3.Error as e: + mylog('none',[ '[Database] - Open DB Error: ', e]) + #------------------------------------------------------------------------------- def commitDB (self): @@ -421,9 +425,13 @@ class DB(): mylog('debug',[ '[Database] - Read One: ', query, " params: ", args]) rows = self.read(query, *args) + if len(rows) == 1: + return rows[0] + if len(rows) > 1: mylog('none',[ '[Database] - Warning!: query returns multiple rows, only first row is passed on!', query, " params: ", args]) return rows[0] + # empty result set return None @@ -439,18 +447,24 @@ def get_all_devices(db): #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- -def insertOnlineHistory(db, cycle): +def insertOnlineHistory(db): sql = db.sql #TO-DO startTime = timeNow() # Add to History + # only run this if the scans have run + scanCount = db.read_one("SELECT count(*) FROM CurrentScan") + if scanCount[0] == 0 : + mylog('debug',[ '[insertOnlineHistory] - nothing to do, currentScan empty']) + return 0 + History_All = db.read("SELECT * FROM Devices") History_All_Devices = len(History_All) History_Archived = db.read("SELECT * FROM Devices WHERE dev_Archived = 1") History_Archived_Devices = len(History_Archived) - History_Online = db.read("SELECT * FROM CurrentScan WHERE cur_ScanCycle = ? ", cycle) + History_Online = db.read("SELECT * FROM CurrentScan") History_Online_Devices = len(History_Online) History_Offline_Devices = History_All_Devices - History_Archived_Devices - History_Online_Devices diff --git a/pialert/networkscan.py b/pialert/networkscan.py index 7fcf719a..54f818c0 100644 --- a/pialert/networkscan.py +++ b/pialert/networkscan.py @@ -36,8 +36,6 @@ def scan_network (db): db.commitDB() - - # arp-scan command conf.arpscan_devices = [] if conf.ENABLE_ARPSCAN: @@ -117,7 +115,7 @@ def process_scan (db, arpscan_devices = conf.arpscan_devices ): # Sessions snapshot mylog('verbose','[Process Scan] Inserting scan results into Online_History') - insertOnlineHistory(db,conf.cycle) + insertOnlineHistory(db) # Skip repeated notifications mylog('verbose','[Process Scan] Skipping repeated notifications') From 55ed3c4ae06c4dfc85ae40055afc3ef449e9a92e Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Mon, 5 Jun 2023 15:40:32 +1000 Subject: [PATCH 3/3] PiHole testing and fixing --- pialert/mac_vendor.py | 2 +- pialert/scanners/pihole.py | 15 ++++++++++++++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/pialert/mac_vendor.py b/pialert/mac_vendor.py index 3d3fc83e..35359ba8 100644 --- a/pialert/mac_vendor.py +++ b/pialert/mac_vendor.py @@ -88,7 +88,7 @@ def query_MAC_vendor (pMAC): grep_output = subprocess.check_output (grep_args) except subprocess.CalledProcessError as e: # An error occured, handle it - mylog('none', [e.output]) + mylog('none', ["[Mac Vendor Check] Error: ", e.output]) grep_output = " There was an error, check logs for details" # Return Vendor diff --git a/pialert/scanners/pihole.py b/pialert/scanners/pihole.py index 31ce2bdc..c28fa164 100644 --- a/pialert/scanners/pihole.py +++ b/pialert/scanners/pihole.py @@ -27,6 +27,19 @@ def copy_pihole_network (db): try: sql.execute ("DELETE FROM PiHole_Network") + # just for reporting + new_devices = [] + sql.execute ( """SELECT hwaddr, macVendor, lastQuery, + (SELECT name FROM PH.network_addresses + WHERE network_id = id ORDER BY lastseen DESC, ip), + (SELECT ip FROM PH.network_addresses + WHERE network_id = id ORDER BY lastseen DESC, ip) + FROM PH.network + WHERE hwaddr NOT LIKE 'ip-%' + AND hwaddr <> '00:00:00:00:00:00' """) + new_devices = sql.fetchall() + + # insert into PiAlert DB sql.execute ("""INSERT INTO PiHole_Network (PH_MAC, PH_Vendor, PH_LastQuery, PH_Name, PH_IP) SELECT hwaddr, macVendor, lastQuery, @@ -47,7 +60,7 @@ def copy_pihole_network (db): db.commitDB() - mylog('debug',[ '[PiHole Network] - completed - found ',sql.rowcount, ' devices']) + mylog('debug',[ '[PiHole Network] - completed - found ', len(new_devices), ' devices']) return str(sql.rowcount) != "0"