diff --git a/.gitignore b/.gitignore index f908738e..3ace1794 100755 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ .DS_Store config/pialert.conf db/* +db/pialert.db front/log/* front/plugins/**/*.log **/%40eaDir/ diff --git a/README.md b/README.md index f0666661..7ab7fa97 100755 --- a/README.md +++ b/README.md @@ -51,13 +51,14 @@ The system continuously scans the network for, **New devices**, **New connection - Theme Selection (blue, red, green, yellow, black, purple) and Light/Dark-Mode Switch - DB maintenance, Backup, Restore tools and CSV Export / Import - Simple login Support - - 🌟(Experimental) [Plugin system](https://github.com/jokob-sk/Pi.Alert/tree/main/front/plugins) + - 🌟[Plugin system](https://github.com/jokob-sk/Pi.Alert/tree/main/front/plugins) - Create custom plugins with automatically generated settings and UI. - Monitor anything for changes - Check the [instructions](https://github.com/jokob-sk/Pi.Alert/tree/main/front/plugins) carefully if you are up for a challenge! Current plugins include: - Detecting Rogue DHCP servers via NMAP - Monitoring HTTP status changes of domains/URLs - Import devices from DHCP.leases files, a UniFi controller, or an SNMP enabled router + - Creation of dummy devices to visualize your [network map](https://github.com/jokob-sk/Pi.Alert/blob/main/docs/NETWORK_TREE.md) | ![Screen 1][screen1] | ![Screen 2][screen2] | ![Screen 5][screen5] | |----------------------|----------------------| ----------------------| @@ -86,7 +87,8 @@ The system continuously scans the network for, **New devices**, **New connection - [leiweibau](https://github.com/leiweibau/Pi.Alert): Dark mode (and much more) - [Macleykun](https://github.com/Macleykun): Help with Dockerfile clean-up - [Final-Hawk](https://github.com/Final-Hawk): Help with NTFY, styling and other fixes - - [TeroRERO](https://github.com/terorero): Spanish translation + - [TeroRERO](https://github.com/terorero): Spanish translation + - [Data-Monkey] (https://github.com/Data-Monkey): Split-up of the python.py file and more - Please see the [Git contributors](https://github.com/jokob-sk/Pi.Alert/graphs/contributors) for a full list of people and their contributions to the project ## ☕ Support me diff --git a/db/pialert.db b/db/pialert.db index 50a88225..ff4e2731 100755 Binary files a/db/pialert.db and b/db/pialert.db differ diff --git a/docker-compose.yml b/docker-compose.yml index fd148df4..b363fe78 100755 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -7,18 +7,21 @@ services: network_mode: "host" restart: unless-stopped volumes: - - ${APP_DATA_LOCATION}/pialert2/config:/home/pi/pialert/config + # - ${APP_DATA_LOCATION}/pialert_dev/config:/home/pi/pialert/config + - ${APP_DATA_LOCATION}/pialert/config:/home/pi/pialert/config # - ${APP_DATA_LOCATION}/pialert/db/pialert.db:/home/pi/pialert/db/pialert.db - - ${APP_DATA_LOCATION}/pialert2/db:/home/pi/pialert/db + # - ${APP_DATA_LOCATION}/pialert_dev/db:/home/pi/pialert/db + - ${APP_DATA_LOCATION}/pialert/db:/home/pi/pialert/db # (optional) useful for debugging if you have issues setting up the container - ${LOGS_LOCATION}:/home/pi/pialert/front/log # DELETE START anyone trying to use this file: comment out / delete BELOW lines, they are only for development purposes - ${APP_DATA_LOCATION}/pialert/dhcp_samples/dhcp1.leases:/mnt/dhcp1.leases - ${APP_DATA_LOCATION}/pialert/dhcp_samples/dhcp2.leases:/mnt/dhcp2.leases - - ${DEV_LOCATION}/back/pialert.py:/home/pi/pialert/back/pialert.py - - ${DEV_LOCATION}/back/report_template.html:/home/pi/pialert/back/report_template.html - - ${DEV_LOCATION}/back/report_template_new_version.html:/home/pi/pialert/back/report_template_new_version.html - - ${DEV_LOCATION}/back/report_template.txt:/home/pi/pialert/back/report_template.txt + # - ${DEV_LOCATION}/back/pialert.py:/home/pi/pialert/back/pialert.py + - ${DEV_LOCATION}/pialert:/home/pi/pialert/pialert + # - ${DEV_LOCATION}/back/report_template.html:/home/pi/pialert/back/report_template.html + # - ${DEV_LOCATION}/back/report_template_new_version.html:/home/pi/pialert/back/report_template_new_version.html + # - ${DEV_LOCATION}/back/report_template.txt:/home/pi/pialert/back/report_template.txt - ${DEV_LOCATION}/pholus:/home/pi/pialert/pholus - ${DEV_LOCATION}/dockerfiles:/home/pi/pialert/dockerfiles - ${APP_DATA_LOCATION}/pialert/php.ini:/etc/php/7.4/fpm/php.ini diff --git a/docs/README.md b/docs/README.md index db845a9e..5d705d42 100755 --- a/docs/README.md +++ b/docs/README.md @@ -77,6 +77,16 @@ If you submit a PR please: 4. New features code should ideally be re-usable for different purposes, not be for a very narrow use-case. 5. New functionality should ideally be implemented via the Plugins system, if possible. +Suggested test cases: + +- Blank setup with no DB or config +- Existing DB / config +- Sending a notification (e. g. Delete a device and wait for a scan to run) and testing all notification gateways, especially: +- Email, Apprise (e.g. via Telegram), webhook (e.g. via Discord), MQTT (e.g. via HomeAssitant) +- Saving settings +- Test a couple of plugins +- Check the Error log for anything unusual + Some additional context: * Permanent settings/config is stored in the `pialert.conf` file diff --git a/front/php/templates/language/en_us.php b/front/php/templates/language/en_us.php index 4228fa50..ce25fa86 100755 --- a/front/php/templates/language/en_us.php +++ b/front/php/templates/language/en_us.php @@ -116,6 +116,7 @@ $lang['en_us'] = array( 'Presence_CalHead_quarter' => 'quarter', 'Presence_CalHead_month' => 'month', 'Presence_CalHead_week' => 'week', +'Presence_CalHead_day' => 'day', ////////////////////////////////////////////////////////////////// // Events Page diff --git a/front/presence.php b/front/presence.php index e45b1b90..bdc28d25 100755 --- a/front/presence.php +++ b/front/presence.php @@ -234,7 +234,7 @@ function initializeCalendar () { header: { left : 'prev,next today', center : 'title', - right : 'timelineYear,timelineMonth,timelineWeek' + right : 'timelineYear,timelineMonth,timelineWeek,timelineDay' }, defaultView : 'timelineMonth', height : 'auto', @@ -286,6 +286,13 @@ function initializeCalendar () { buttonText : '', slotLabelFormat : 'D', slotDuration : '24:00:01' + }, + timelineDay: { + type : 'timeline', + duration : { day: 1 }, + buttonText : '', + slotLabelFormat : 'H', + slotDuration : '00:30:00' } }, @@ -305,6 +312,15 @@ function initializeCalendar () { if (date.format('YYYY-MM-DD') == moment().format('YYYY-MM-DD')) { cell.addClass ('fc-today'); }; + + if ($('#calendar').fullCalendar('getView').name == 'timelineDay') { + cell.removeClass('fc-sat'); + cell.removeClass('fc-sun'); + cell.removeClass('fc-today'); + if (date.format('YYYY-MM-DD HH') == moment().format('YYYY-MM-DD HH')) { + cell.addClass('fc-today'); + } + }; }, resourceRender: function (resourceObj, labelTds, bodyTds) { diff --git a/pialert/__main__.py b/pialert/__main__.py index 341233a6..998a7112 100755 --- a/pialert/__main__.py +++ b/pialert/__main__.py @@ -13,10 +13,9 @@ #=============================================================================== # IMPORTS #=============================================================================== -from __future__ import print_function +#from __future__ import print_function import sys -from collections import namedtuple import time import datetime import multiprocessing @@ -25,7 +24,7 @@ import multiprocessing import conf from const import * from logger import mylog -from helper import filePermissions, isNewVersion, timeNow, timeNowTZ, updateState +from helper import filePermissions, isNewVersion, timeNow, updateState from api import update_api from networkscan import process_scan, scan_network from initialise import importConfigs @@ -75,7 +74,8 @@ main structure of Pi Alert """ def main (): - + mylog('debug', ['[MAIN] Setting up ...']) + conf.time_started = datetime.datetime.now() conf.cycle = "" conf.check_report = [1, "internet_IP", "update_vendors_silent"] @@ -109,7 +109,7 @@ def main (): # Open DB once and keep open # Opening / closing DB frequently actually casues more issues db = DB() # instance of class DB - db.openDB() + db.open() sql = db.sql # To-Do replace with the db class # Upgrade DB if needed @@ -119,13 +119,12 @@ def main (): #=============================================================================== # This is the main loop of Pi.Alert #=============================================================================== - while True: # update time started time_started = datetime.datetime.now() # not sure why we need this ... loop_start_time = timeNow() - mylog('debug', '[MAIN] Stating loop') + mylog('debug', '[MAIN] Starting loop') # re-load user configuration and plugins importConfigs(db) @@ -301,7 +300,7 @@ def main (): else: # do something conf.cycle = "" - mylog('verbose', ['[MAIN] waiting to start next loop']) + mylog('verbose', ['[MAIN] waiting to start next loop']) #loop time.sleep(5) # wait for N seconds @@ -311,4 +310,5 @@ def main (): # BEGIN #=============================================================================== if __name__ == '__main__': + mylog('debug', ['[__main__] Welcome to Pi.Alert']) sys.exit(main()) diff --git a/pialert/conf.py b/pialert/conf.py index d4062d91..fdb918d3 100755 --- a/pialert/conf.py +++ b/pialert/conf.py @@ -10,7 +10,11 @@ userSubnets = [] mySchedules = [] # bad solution for global - TO-DO plugins = [] # bad solution for global - TO-DO tz = '' -lastTimeImported = 0 # Time the config was last imported + +# modified time of the most recently imported config file +# set to a small value to force import at first run +lastImportedConfFile = 1.1 + plugins_once_run = False newVersionAvailable = False time_started = '' @@ -20,10 +24,12 @@ arpscan_devices = [] # for MQTT mqtt_connected_to_broker = False mqtt_sensors = [] +client = None # mqtt client # for notifications changedPorts_json_struc = None + # ACTUAL CONFIGRATION ITEMS set to defaults # General diff --git a/pialert/const.py b/pialert/const.py index f57aca39..edc172e3 100755 --- a/pialert/const.py +++ b/pialert/const.py @@ -26,8 +26,15 @@ piholeDhcpleases = '/etc/pihole/dhcp.leases' #=============================================================================== # SQL queries #=============================================================================== -sql_devices_all = "select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_Group, dev_FirstConnection, dev_LastConnection, dev_LastIP, dev_StaticIP, dev_PresentLastScan, dev_LastNotification, dev_NewDevice, dev_Network_Node_MAC_ADDR, dev_Network_Node_port, dev_Icon from Devices" -sql_devices_stats = "SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown from Online_History order by Scan_Date desc limit 1" +sql_devices_all = """select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_Group, + dev_FirstConnection, dev_LastConnection, dev_LastIP, dev_StaticIP, + dev_PresentLastScan, dev_LastNotification, dev_NewDevice, + dev_Network_Node_MAC_ADDR, dev_Network_Node_port, + dev_Icon from Devices""" +sql_devices_stats = """SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, + (select count(*) from Devices a where dev_NewDevice = 1 ) as new, + (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown + from Online_History order by Scan_Date desc limit 1""" sql_nmap_scan_all = "SELECT * FROM Nmap_Scan" sql_pholus_scan_all = "SELECT * FROM Pholus_Scan" sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0" @@ -36,12 +43,12 @@ sql_plugins_objects = "SELECT * FROM Plugins_Objects" sql_language_strings = "SELECT * FROM Plugins_Language_Strings" sql_plugins_events = "SELECT * FROM Plugins_Events" sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY 'Index' DESC" -sql_new_devices = """SELECT * FROM ( SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC FROM Events_Devices - WHERE eve_PendingAlertEmail = 1 - AND eve_EventType = 'New Device' - ORDER BY eve_DateTime ) t1 - LEFT JOIN - ( - SELECT dev_Name, dev_MAC as dev_MAC_t2 FROM Devices - ) t2 - ON t1.dev_MAC = t2.dev_MAC_t2""" \ No newline at end of file +sql_new_devices = """SELECT * FROM ( + SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC + FROM Events_Devices + WHERE eve_PendingAlertEmail = 1 + AND eve_EventType = 'New Device' + ORDER BY eve_DateTime ) t1 + LEFT JOIN + ( SELECT dev_Name, dev_MAC as dev_MAC_t2 FROM Devices) t2 + ON t1.dev_MAC = t2.dev_MAC_t2""" \ No newline at end of file diff --git a/pialert/database.py b/pialert/database.py index e31be40c..b3812cae 100755 --- a/pialert/database.py +++ b/pialert/database.py @@ -15,13 +15,17 @@ from helper import json_struc, initOrSetParam, row_to_json, timeNow #, updateSta class DB(): + """ + DB Class to provide the basic database interactions. + Open / Commit / Close / read / write + """ def __init__(self): self.sql = None self.sql_connection = None - - #------------------------------------------------------------------------------- - def openDB (self): + + #------------------------------------------------------------------------------- + def open (self): # Check if DB is open if self.sql_connection != None : mylog('debug','openDB: databse already open') @@ -29,11 +33,15 @@ class DB(): mylog('none', '[Database] Opening DB' ) # Open DB and Cursor - self.sql_connection = sqlite3.connect (fullDbPath, isolation_level=None) - self.sql_connection.execute('pragma journal_mode=wal') # - self.sql_connection.text_factory = str - self.sql_connection.row_factory = sqlite3.Row - self.sql = self.sql_connection.cursor() + try: + self.sql_connection = sqlite3.connect (fullDbPath, isolation_level=None) + self.sql_connection.execute('pragma journal_mode=wal') # + self.sql_connection.text_factory = str + self.sql_connection.row_factory = sqlite3.Row + self.sql = self.sql_connection.cursor() + except sqlite3.Error as e: + mylog('none',[ '[Database] - Open DB Error: ', e]) + #------------------------------------------------------------------------------- def commitDB (self): @@ -44,13 +52,13 @@ class DB(): # Commit changes to DB self.sql_connection.commit() return True - + #------------------------------------------------------------------------------- - def get_sql_array(self, query): + def get_sql_array(self, query): if self.sql_connection == None : mylog('debug','getQueryArray: databse is not open') return - + self.sql.execute(query) rows = self.sql.fetchall() #self.commitDB() @@ -69,81 +77,87 @@ class DB(): # Cleanup / upkeep database #=============================================================================== def cleanup_database (self, startTime, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA): - # Header - #updateState(self,"Upkeep: Clean DB") - mylog('verbose', ['[', startTime, '] Upkeep Database:' ]) + """ + Cleaning out old records from the tables that don't need to keep all data. + """ + # Header + #updateState(self,"Upkeep: Clean DB") + mylog('verbose', ['[DB Cleanup] Upkeep Database:' ]) - # Cleanup Online History - mylog('verbose', [' Online_History: Delete all but keep latest 150 entries']) - self.sql.execute ("""DELETE from Online_History where "Index" not in ( SELECT "Index" from Online_History order by Scan_Date desc limit 150)""") + # Cleanup Online History + mylog('verbose', ['[DB Cleanup] Online_History: Delete all but keep latest 150 entries']) + self.sql.execute ("""DELETE from Online_History where "Index" not in ( + SELECT "Index" from Online_History + order by Scan_Date desc limit 150)""") + mylog('verbose', ['[DB Cleanup] Optimize Database']) + # Cleanup Events + mylog('verbose', ['[DB Cleanup] Events: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days']) + self.sql.execute ("""DELETE FROM Events + WHERE eve_DateTime <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')""") + # Cleanup Plugin Events History + mylog('verbose', ['[DB Cleanup] Plugin Events History: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days']) + self.sql.execute ("""DELETE FROM Plugins_History + WHERE DateTimeChanged <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')""") + # Cleanup Pholus_Scan + if PHOLUS_DAYS_DATA != 0: + mylog('verbose', ['[DB Cleanup] Pholus_Scan: Delete all older than ' + str(PHOLUS_DAYS_DATA) + ' days']) + # improvement possibility: keep at least N per mac + self.sql.execute ("""DELETE FROM Pholus_Scan + WHERE Time <= date('now', '-"+ str(PHOLUS_DAYS_DATA) +" day')""") - mylog('verbose', [' Optimize Database']) - # Cleanup Events - mylog('verbose', [' Events: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days']) - self.sql.execute ("DELETE FROM Events WHERE eve_DateTime <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')") + # De-Dupe (de-duplicate - remove duplicate entries) from the Pholus_Scan table + mylog('verbose', ['[DB Cleanup] Pholus_Scan: Delete all duplicates']) + self.sql.execute ("""DELETE FROM Pholus_Scan + WHERE rowid > ( + SELECT MIN(rowid) FROM Pholus_Scan p2 + WHERE Pholus_Scan.MAC = p2.MAC + AND Pholus_Scan.Value = p2.Value + AND Pholus_Scan.Record_Type = p2.Record_Type + );""") + # De-Dupe (de-duplicate - remove duplicate entries) from the Nmap_Scan table + mylog('verbose', [' Nmap_Scan: Delete all duplicates']) + self.sql.execute ("""DELETE FROM Nmap_Scan + WHERE rowid > ( + SELECT MIN(rowid) FROM Nmap_Scan p2 + WHERE Nmap_Scan.MAC = p2.MAC + AND Nmap_Scan.Port = p2.Port + AND Nmap_Scan.State = p2.State + AND Nmap_Scan.Service = p2.Service + );""") - # Cleanup Plugin Events History - mylog('verbose', [' Plugin Events History: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days']) - self.sql.execute ("DELETE FROM Plugins_History WHERE DateTimeChanged <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')") - - # Cleanup Pholus_Scan - if PHOLUS_DAYS_DATA != 0: - mylog('verbose', [' Pholus_Scan: Delete all older than ' + str(PHOLUS_DAYS_DATA) + ' days']) - self.sql.execute ("DELETE FROM Pholus_Scan WHERE Time <= date('now', '-"+ str(PHOLUS_DAYS_DATA) +" day')") # improvement possibility: keep at least N per mac - - # De-Dupe (de-duplicate - remove duplicate entries) from the Pholus_Scan table - mylog('verbose', [' Pholus_Scan: Delete all duplicates']) - self.sql.execute ("""DELETE FROM Pholus_Scan - WHERE rowid > ( - SELECT MIN(rowid) FROM Pholus_Scan p2 - WHERE Pholus_Scan.MAC = p2.MAC - AND Pholus_Scan.Value = p2.Value - AND Pholus_Scan.Record_Type = p2.Record_Type - );""") - - # De-Dupe (de-duplicate - remove duplicate entries) from the Nmap_Scan table - mylog('verbose', [' Nmap_Scan: Delete all duplicates']) - self.sql.execute ("""DELETE FROM Nmap_Scan - WHERE rowid > ( - SELECT MIN(rowid) FROM Nmap_Scan p2 - WHERE Nmap_Scan.MAC = p2.MAC - AND Nmap_Scan.Port = p2.Port - AND Nmap_Scan.State = p2.State - AND Nmap_Scan.Service = p2.Service - );""") - - # Shrink DB - mylog('verbose', [' Shrink Database']) - self.sql.execute ("VACUUM;") - - self.commitDB() + # Shrink DB + mylog('verbose', [' Shrink Database']) + self.sql.execute ("VACUUM;") + self.commitDB() #------------------------------------------------------------------------------- def upgradeDB(self): - sql = self.sql #TO-DO + """ + Check the current tables in the DB and upgrade them if neccessary + """ - # indicates, if Online_History table is available - onlineHistoryAvailable = sql.execute(""" + # indicates, if Online_History table is available + onlineHistoryAvailable = self.sql.execute(""" SELECT name FROM sqlite_master WHERE type='table' - AND name='Online_History'; + AND name='Online_History'; """).fetchall() != [] # Check if it is incompatible (Check if table has all required columns) isIncompatible = False - + if onlineHistoryAvailable : - isIncompatible = sql.execute (""" + isIncompatible = self.sql.execute (""" SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Online_History') WHERE name='Archived_Devices' """).fetchone()[0] == 0 - + # Drop table if available, but incompatible - if onlineHistoryAvailable and isIncompatible: + if onlineHistoryAvailable and isIncompatible: mylog('none','[upgradeDB] Table is incompatible, Dropping the Online_History table') - sql.execute("DROP TABLE Online_History;") + self.sql.execute("DROP TABLE Online_History;") onlineHistoryAvailable = False if onlineHistoryAvailable == False : - sql.execute(""" + self.sql.execute(""" CREATE TABLE "Online_History" ( "Index" INTEGER, "Scan_Date" TEXT, @@ -152,84 +166,84 @@ class DB(): "All_Devices" INTEGER, "Archived_Devices" INTEGER, PRIMARY KEY("Index" AUTOINCREMENT) - ); + ); """) # Alter Devices table # dev_Network_Node_MAC_ADDR column - dev_Network_Node_MAC_ADDR_missing = sql.execute (""" + dev_Network_Node_MAC_ADDR_missing = self.sql.execute (""" SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_MAC_ADDR' """).fetchone()[0] == 0 if dev_Network_Node_MAC_ADDR_missing : - mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_MAC_ADDR to the Devices table"]) - sql.execute(""" - ALTER TABLE "Devices" ADD "dev_Network_Node_MAC_ADDR" TEXT + mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_MAC_ADDR to the Devices table"]) + self.sql.execute(""" + ALTER TABLE "Devices" ADD "dev_Network_Node_MAC_ADDR" TEXT """) # dev_Network_Node_port column - dev_Network_Node_port_missing = sql.execute (""" + dev_Network_Node_port_missing = self.sql.execute (""" SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_port' """).fetchone()[0] == 0 if dev_Network_Node_port_missing : - mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_port to the Devices table"]) - sql.execute(""" - ALTER TABLE "Devices" ADD "dev_Network_Node_port" INTEGER + mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_port to the Devices table"]) + self.sql.execute(""" + ALTER TABLE "Devices" ADD "dev_Network_Node_port" INTEGER """) # dev_Icon column - dev_Icon_missing = sql.execute (""" + dev_Icon_missing = self.sql.execute (""" SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Icon' """).fetchone()[0] == 0 if dev_Icon_missing : - mylog('verbose', ["[upgradeDB] Adding dev_Icon to the Devices table"]) - sql.execute(""" - ALTER TABLE "Devices" ADD "dev_Icon" TEXT + mylog('verbose', ["[upgradeDB] Adding dev_Icon to the Devices table"]) + self.sql.execute(""" + ALTER TABLE "Devices" ADD "dev_Icon" TEXT """) - # indicates, if Settings table is available - settingsMissing = sql.execute(""" + # indicates, if Settings table is available + settingsMissing = self.sql.execute(""" SELECT name FROM sqlite_master WHERE type='table' - AND name='Settings'; + AND name='Settings'; """).fetchone() == None - # Re-creating Settings table + # Re-creating Settings table mylog('verbose', ["[upgradeDB] Re-creating Settings table"]) - if settingsMissing == False: - sql.execute("DROP TABLE Settings;") + if settingsMissing == False: + self.sql.execute("DROP TABLE Settings;") - sql.execute(""" - CREATE TABLE "Settings" ( + self.sql.execute(""" + CREATE TABLE "Settings" ( "Code_Name" TEXT, "Display_Name" TEXT, - "Description" TEXT, + "Description" TEXT, "Type" TEXT, "Options" TEXT, "RegEx" TEXT, "Value" TEXT, "Group" TEXT, "Events" TEXT - ); + ); """) - # indicates, if Pholus_Scan table is available - pholusScanMissing = sql.execute(""" + # indicates, if Pholus_Scan table is available + pholusScanMissing = self.sql.execute(""" SELECT name FROM sqlite_master WHERE type='table' - AND name='Pholus_Scan'; + AND name='Pholus_Scan'; """).fetchone() == None # if pholusScanMissing == False: - # # Re-creating Pholus_Scan table - # sql.execute("DROP TABLE Pholus_Scan;") - # pholusScanMissing = True + # # Re-creating Pholus_Scan table + # self.sql.execute("DROP TABLE Pholus_Scan;") + # pholusScanMissing = True if pholusScanMissing: mylog('verbose', ["[upgradeDB] Re-creating Pholus_Scan table"]) - sql.execute(""" - CREATE TABLE "Pholus_Scan" ( + self.sql.execute(""" + CREATE TABLE "Pholus_Scan" ( "Index" INTEGER, "Info" TEXT, "Time" TEXT, @@ -239,47 +253,47 @@ class DB(): "Value" TEXT, "Extra" TEXT, PRIMARY KEY("Index" AUTOINCREMENT) - ); + ); """) - # indicates, if Nmap_Scan table is available - nmapScanMissing = sql.execute(""" + # indicates, if Nmap_Scan table is available + nmapScanMissing = self.sql.execute(""" SELECT name FROM sqlite_master WHERE type='table' - AND name='Nmap_Scan'; + AND name='Nmap_Scan'; """).fetchone() == None # Re-creating Parameters table mylog('verbose', ["[upgradeDB] Re-creating Parameters table"]) - sql.execute("DROP TABLE Parameters;") + self.sql.execute("DROP TABLE Parameters;") - sql.execute(""" + self.sql.execute(""" CREATE TABLE "Parameters" ( "par_ID" TEXT PRIMARY KEY, "par_Value" TEXT - ); + ); """) # Initialize Parameters if unavailable initOrSetParam(self, 'Back_App_State','Initializing') # if nmapScanMissing == False: - # # Re-creating Nmap_Scan table - # sql.execute("DROP TABLE Nmap_Scan;") - # nmapScanMissing = True + # # Re-creating Nmap_Scan table + # self.sql.execute("DROP TABLE Nmap_Scan;") + # nmapScanMissing = True if nmapScanMissing: mylog('verbose', ["[upgradeDB] Re-creating Nmap_Scan table"]) - sql.execute(""" - CREATE TABLE "Nmap_Scan" ( + self.sql.execute(""" + CREATE TABLE "Nmap_Scan" ( "Index" INTEGER, "MAC" TEXT, "Port" TEXT, - "Time" TEXT, + "Time" TEXT, "State" TEXT, - "Service" TEXT, + "Service" TEXT, "Extra" TEXT, PRIMARY KEY("Index" AUTOINCREMENT) - ); + ); """) # Plugin state @@ -288,19 +302,19 @@ class DB(): Plugin TEXT NOT NULL, Object_PrimaryID TEXT NOT NULL, Object_SecondaryID TEXT NOT NULL, - DateTimeCreated TEXT NOT NULL, - DateTimeChanged TEXT NOT NULL, + DateTimeCreated TEXT NOT NULL, + DateTimeChanged TEXT NOT NULL, Watched_Value1 TEXT NOT NULL, Watched_Value2 TEXT NOT NULL, Watched_Value3 TEXT NOT NULL, Watched_Value4 TEXT NOT NULL, - Status TEXT NOT NULL, + Status TEXT NOT NULL, Extra TEXT NOT NULL, UserData TEXT NOT NULL, ForeignKey TEXT NOT NULL, PRIMARY KEY("Index" AUTOINCREMENT) ); """ - sql.execute(sql_Plugins_Objects) + self.sql.execute(sql_Plugins_Objects) # Plugin execution results sql_Plugins_Events = """ CREATE TABLE IF NOT EXISTS Plugins_Events( @@ -308,19 +322,19 @@ class DB(): Plugin TEXT NOT NULL, Object_PrimaryID TEXT NOT NULL, Object_SecondaryID TEXT NOT NULL, - DateTimeCreated TEXT NOT NULL, - DateTimeChanged TEXT NOT NULL, + DateTimeCreated TEXT NOT NULL, + DateTimeChanged TEXT NOT NULL, Watched_Value1 TEXT NOT NULL, Watched_Value2 TEXT NOT NULL, Watched_Value3 TEXT NOT NULL, Watched_Value4 TEXT NOT NULL, - Status TEXT NOT NULL, + Status TEXT NOT NULL, Extra TEXT NOT NULL, UserData TEXT NOT NULL, ForeignKey TEXT NOT NULL, PRIMARY KEY("Index" AUTOINCREMENT) ); """ - sql.execute(sql_Plugins_Events) + self.sql.execute(sql_Plugins_Events) # Plugin execution history sql_Plugins_History = """ CREATE TABLE IF NOT EXISTS Plugins_History( @@ -328,40 +342,40 @@ class DB(): Plugin TEXT NOT NULL, Object_PrimaryID TEXT NOT NULL, Object_SecondaryID TEXT NOT NULL, - DateTimeCreated TEXT NOT NULL, - DateTimeChanged TEXT NOT NULL, + DateTimeCreated TEXT NOT NULL, + DateTimeChanged TEXT NOT NULL, Watched_Value1 TEXT NOT NULL, Watched_Value2 TEXT NOT NULL, Watched_Value3 TEXT NOT NULL, Watched_Value4 TEXT NOT NULL, - Status TEXT NOT NULL, + Status TEXT NOT NULL, Extra TEXT NOT NULL, UserData TEXT NOT NULL, ForeignKey TEXT NOT NULL, PRIMARY KEY("Index" AUTOINCREMENT) - ); """ - sql.execute(sql_Plugins_History) + ); """ + self.sql.execute(sql_Plugins_History) # Dynamically generated language strings - # indicates, if Language_Strings table is available - languageStringsMissing = sql.execute(""" + # indicates, if Language_Strings table is available + languageStringsMissing = self.sql.execute(""" SELECT name FROM sqlite_master WHERE type='table' - AND name='Plugins_Language_Strings'; + AND name='Plugins_Language_Strings'; """).fetchone() == None - - if languageStringsMissing == False: - sql.execute("DROP TABLE Plugins_Language_Strings;") - sql.execute(""" CREATE TABLE IF NOT EXISTS Plugins_Language_Strings( + if languageStringsMissing == False: + self.sql.execute("DROP TABLE Plugins_Language_Strings;") + + self.sql.execute(""" CREATE TABLE IF NOT EXISTS Plugins_Language_Strings( "Index" INTEGER, Language_Code TEXT NOT NULL, String_Key TEXT NOT NULL, String_Value TEXT NOT NULL, - Extra TEXT NOT NULL, + Extra TEXT NOT NULL, PRIMARY KEY("Index" AUTOINCREMENT) - ); """) - - self.commitDB() + ); """) + + self.commitDB() #------------------------------------------------------------------------------- @@ -369,15 +383,15 @@ class DB(): mylog('debug',[ '[Database] - get_table_as_json - Query: ', sqlQuery]) try: - self.sql.execute(sqlQuery) - columnNames = list(map(lambda x: x[0], self.sql.description)) - rows = self.sql.fetchall() + self.sql.execute(sqlQuery) + columnNames = list(map(lambda x: x[0], self.sql.description)) + rows = self.sql.fetchall() except sqlite3.Error as e: mylog('none',[ '[Database] - SQL ERROR: ', e]) return None - + result = {"data":[]} - for row in rows: + for row in rows: tmp = row_to_json(columnNames, row) result["data"].append(tmp) @@ -386,10 +400,10 @@ class DB(): #------------------------------------------------------------------------------- # referece from here: https://codereview.stackexchange.com/questions/241043/interface-class-for-sqlite-databases - #------------------------------------------------------------------------------- + #------------------------------------------------------------------------------- def read(self, query, *args): """check the query and arguments are aligned and are read only""" - mylog('debug',[ '[Database] - SELECT Query: ', query, " params: ", args]) + mylog('debug',[ '[Database] - Read All: SELECT Query: ', query, " params: ", args]) try: assert query.count('?') == len(args) assert query.upper().strip().startswith('SELECT') @@ -402,33 +416,57 @@ class DB(): mylog('none',[ '[Database] - SQL ERROR: ', e]) return None + def read_one(self, query, *args): + """ + call read() with the same arguments but only returns the first row. + should only be used when there is a single row result expected + """ + + mylog('debug',[ '[Database] - Read One: ', query, " params: ", args]) + rows = self.read(query, *args) + if len(rows) == 1: + return rows[0] + + if len(rows) > 1: + mylog('none',[ '[Database] - Warning!: query returns multiple rows, only first row is passed on!', query, " params: ", args]) + return rows[0] + # empty result set + return None + + #------------------------------------------------------------------------------- def get_device_stats(db): # columns = ["online","down","all","archived","new","unknown"] - return db.read(sql_devices_stats) + return db.read_one(sql_devices_stats) #------------------------------------------------------------------------------- -def get_all_devices(db): +def get_all_devices(db): return db.read(sql_devices_all) #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- -def insertOnlineHistory(db, cycle): +def insertOnlineHistory(db): sql = db.sql #TO-DO startTime = timeNow() # Add to History - + + # only run this if the scans have run + scanCount = db.read_one("SELECT count(*) FROM CurrentScan") + if scanCount[0] == 0 : + mylog('debug',[ '[insertOnlineHistory] - nothing to do, currentScan empty']) + return 0 + History_All = db.read("SELECT * FROM Devices") History_All_Devices = len(History_All) History_Archived = db.read("SELECT * FROM Devices WHERE dev_Archived = 1") History_Archived_Devices = len(History_Archived) - History_Online = db.read("SELECT * FROM CurrentScan WHERE cur_ScanCycle = ? ", cycle) + History_Online = db.read("SELECT * FROM CurrentScan") History_Online_Devices = len(History_Online) History_Offline_Devices = History_All_Devices - History_Archived_Devices - History_Online_Devices - + sql.execute ("INSERT INTO Online_History (Scan_Date, Online_Devices, Down_Devices, All_Devices, Archived_Devices) "+ "VALUES ( ?, ?, ?, ?, ?)", (startTime, History_Online_Devices, History_Offline_Devices, History_All_Devices, History_Archived_Devices ) ) db.commitDB() \ No newline at end of file diff --git a/pialert/initialise.py b/pialert/initialise.py index 38a6dd5b..61f12078 100755 --- a/pialert/initialise.py +++ b/pialert/initialise.py @@ -7,8 +7,8 @@ from pathlib import Path import datetime import conf -from const import * -from helper import collect_lang_strings, timeNow, updateSubnets, initOrSetParam +from const import fullConfPath +from helper import collect_lang_strings, updateSubnets, initOrSetParam from logger import mylog from api import update_api from scheduler import schedule_class @@ -43,15 +43,26 @@ def importConfigs (db): sql = db.sql - lastTimeImported = 0 - - # get config file + # get config file name config_file = Path(fullConfPath) - # Skip import if last time of import is NEWER than file age - if (os.path.getmtime(config_file) < lastTimeImported) : + # Only import file if the file was modifed since last import. + # this avoids time zone issues as we just compare the previous timestamp to the current time stamp + mylog('debug', ['[Import Config] checking config file ']) + mylog('debug', ['[Import Config] lastImportedConfFile :', conf.lastImportedConfFile]) + mylog('debug', ['[Import Config] file modified time :', os.path.getmtime(config_file)]) + + + if (os.path.getmtime(config_file) == conf.lastImportedConfFile) : + mylog('debug', ['[Import Config] skipping config file import']) return - + + conf.lastImportedConfFile = os.path.getmtime(config_file) + + + + + mylog('debug', ['[Import Config] importing config file']) conf.mySettings = [] # reset settings conf.mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query @@ -237,6 +248,7 @@ def read_config_file(filename): """ retuns dict on the config file key:value pairs """ + mylog('info', '[Config] reading config file') # load the variables from pialert.conf code = compile(filename.read_text(), filename.name, "exec") confDict = {} # config dictionary diff --git a/pialert/mac_vendor.py b/pialert/mac_vendor.py index 3d3fc83e..35359ba8 100755 --- a/pialert/mac_vendor.py +++ b/pialert/mac_vendor.py @@ -88,7 +88,7 @@ def query_MAC_vendor (pMAC): grep_output = subprocess.check_output (grep_args) except subprocess.CalledProcessError as e: # An error occured, handle it - mylog('none', [e.output]) + mylog('none', ["[Mac Vendor Check] Error: ", e.output]) grep_output = " There was an error, check logs for details" # Return Vendor diff --git a/pialert/networkscan.py b/pialert/networkscan.py index 7fcf719a..54f818c0 100755 --- a/pialert/networkscan.py +++ b/pialert/networkscan.py @@ -36,8 +36,6 @@ def scan_network (db): db.commitDB() - - # arp-scan command conf.arpscan_devices = [] if conf.ENABLE_ARPSCAN: @@ -117,7 +115,7 @@ def process_scan (db, arpscan_devices = conf.arpscan_devices ): # Sessions snapshot mylog('verbose','[Process Scan] Inserting scan results into Online_History') - insertOnlineHistory(db,conf.cycle) + insertOnlineHistory(db) # Skip repeated notifications mylog('verbose','[Process Scan] Skipping repeated notifications') diff --git a/pialert/plugin.py b/pialert/plugin.py index dd68f4a6..bc1ae736 100755 --- a/pialert/plugin.py +++ b/pialert/plugin.py @@ -12,14 +12,14 @@ from helper import timeNow, updateState, get_file_content, write_file from api import update_api #------------------------------------------------------------------------------- -def run_plugin_scripts(db, runType, plugins = conf.plugins): +def run_plugin_scripts(db, runType): # Header updateState(db,"Run: Plugins") mylog('debug', ['[Plugins] Check if any plugins need to be executed on run type: ', runType]) - for plugin in plugins: + for plugin in conf.plugins: shouldRun = False diff --git a/pialert/publishers/mqtt.py b/pialert/publishers/mqtt.py index 8e59d205..b955f22d 100755 --- a/pialert/publishers/mqtt.py +++ b/pialert/publishers/mqtt.py @@ -146,8 +146,9 @@ def mqtt_start(db): if conf.mqtt_connected_to_broker == False: conf.mqtt_connected_to_broker = True - client = mqtt_create_client() + conf.client = mqtt_create_client() + client = conf.client # General stats # Create a generic device for overal stats @@ -175,7 +176,7 @@ def mqtt_start(db): # Specific devices # Get all devices - devices = get_all_devices() + devices = get_all_devices(db) sec_delay = len(devices) * int(conf.MQTT_DELAY_SEC)*5 diff --git a/pialert/publishers/ntfy.py b/pialert/publishers/ntfy.py index 363da466..957657cf 100755 --- a/pialert/publishers/ntfy.py +++ b/pialert/publishers/ntfy.py @@ -32,6 +32,12 @@ def send (msg: noti_struc): # add authorization header with hash headers["Authorization"] = "Basic {}".format(basichash) - requests.post("{}/{}".format( conf.NTFY_HOST, conf.NTFY_TOPIC), - data=msg.html, - headers=headers) + try: + requests.post("{}/{}".format( conf.NTFY_HOST, conf.NTFY_TOPIC), + data=msg.text, + headers=headers) + except requests.exceptions.RequestException as e: + mylog('none', ['[NTFY] Error: ', e]) + return -1 + + return 0 diff --git a/pialert/publishers/webhook.py b/pialert/publishers/webhook.py index fc16a9c0..d575dfb5 100755 --- a/pialert/publishers/webhook.py +++ b/pialert/publishers/webhook.py @@ -51,7 +51,7 @@ def send (msg: noti_struc): # execute CURL call try: # try runnning a subprocess - mylog('debug', '[send_webhook] curlParams: '+ curlParams) + mylog('debug', ['[send_webhook] curlParams: ', curlParams]) p = subprocess.Popen(curlParams, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, stderr = p.communicate() diff --git a/pialert/reporting.py b/pialert/reporting.py index 933ce65f..7a078dc0 100755 --- a/pialert/reporting.py +++ b/pialert/reporting.py @@ -285,7 +285,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS): if conf.REPORT_WEBHOOK and check_config('webhook'): updateState(db,"Send: Webhook") mylog('info', ['[Notification] Sending report by Webhook']) - send_webhook (json_final, mail_text) + send_webhook (msg) else : mylog('verbose', ['[Notification] Skip webhook']) if conf.REPORT_NTFY and check_config('ntfy'): diff --git a/pialert/scanners/pihole.py b/pialert/scanners/pihole.py index 31ce2bdc..c28fa164 100755 --- a/pialert/scanners/pihole.py +++ b/pialert/scanners/pihole.py @@ -27,6 +27,19 @@ def copy_pihole_network (db): try: sql.execute ("DELETE FROM PiHole_Network") + # just for reporting + new_devices = [] + sql.execute ( """SELECT hwaddr, macVendor, lastQuery, + (SELECT name FROM PH.network_addresses + WHERE network_id = id ORDER BY lastseen DESC, ip), + (SELECT ip FROM PH.network_addresses + WHERE network_id = id ORDER BY lastseen DESC, ip) + FROM PH.network + WHERE hwaddr NOT LIKE 'ip-%' + AND hwaddr <> '00:00:00:00:00:00' """) + new_devices = sql.fetchall() + + # insert into PiAlert DB sql.execute ("""INSERT INTO PiHole_Network (PH_MAC, PH_Vendor, PH_LastQuery, PH_Name, PH_IP) SELECT hwaddr, macVendor, lastQuery, @@ -47,7 +60,7 @@ def copy_pihole_network (db): db.commitDB() - mylog('debug',[ '[PiHole Network] - completed - found ',sql.rowcount, ' devices']) + mylog('debug',[ '[PiHole Network] - completed - found ', len(new_devices), ' devices']) return str(sql.rowcount) != "0"