diff --git a/Dockerfile b/Dockerfile index f903188e..cd800b59 100755 --- a/Dockerfile +++ b/Dockerfile @@ -8,7 +8,7 @@ ENV USER=pi USER_ID=1000 USER_GID=1000 TZ=Europe/London PORT=20211 RUN apt-get update \ && apt-get install --no-install-recommends tini ca-certificates curl libwww-perl arp-scan perl apt-utils cron sudo nginx-light php php-cgi php-fpm php-sqlite3 php-curl sqlite3 dnsutils net-tools python3 iproute2 nmap python3-pip zip -y \ - && pip3 install requests paho-mqtt scapy cron-converter pytz \ + && pip3 install requests paho-mqtt scapy cron-converter pytz json2table \ && update-alternatives --install /usr/bin/python python /usr/bin/python3 10 \ && apt-get clean autoclean \ && apt-get autoremove \ diff --git a/back/pialert.py b/back/pialert.py index e0f8fb04..7fcbf133 100755 --- a/back/pialert.py +++ b/back/pialert.py @@ -38,6 +38,7 @@ import threading from pathlib import Path from cron_converter import Cron from pytz import timezone +from json2table import convert #=============================================================================== # SQL queries @@ -68,6 +69,7 @@ piholeDhcpleases = '/etc/pihole/dhcp.leases' debug_force_notification = False userSubnets = [] +changedPorts = [] time_started = datetime.datetime.now() cron_instance = Cron() log_timestamp = time_started @@ -289,7 +291,7 @@ def importConfig (): # Nmap global NMAP_ACTIVE, NMAP_TIMEOUT, NMAP_RUN, NMAP_RUN_SCHD, NMAP_ARGS # API - global ENABLE_API, API_RUN, API_RUN_SCHD, API_RUN_INTERVAL + global ENABLE_API, API_RUN, API_RUN_SCHD, API_RUN_INTERVAL, API_CUSTOM_SQL mySettings = [] # reset settings # get config file @@ -312,7 +314,7 @@ def importConfig (): TIMEZONE = ccd('TIMEZONE', 'Europe/Berlin' , c_d, 'Time zone', 'text', '', 'General') PIALERT_WEB_PROTECTION = ccd('PIALERT_WEB_PROTECTION', False , c_d, 'Enable logon', 'boolean', '', 'General') PIALERT_WEB_PASSWORD = ccd('PIALERT_WEB_PASSWORD', '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' , c_d, 'Logon password', 'readonly', '', 'General') - INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events'] , c_d, 'Notify on', 'multiselect', "['internet', 'new_devices', 'down_devices', 'events']", 'General') + INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports']", 'General') SCAN_CYCLE_MINUTES = ccd('SCAN_CYCLE_MINUTES', 5 , c_d, 'Scan cycle delay (m)', 'integer', '', 'General') DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', 'integer', '', 'General') REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://pi.alert/' , c_d, 'PiAlert URL', 'text', '', 'General') @@ -391,8 +393,9 @@ def importConfig (): # API ENABLE_API = ccd('ENABLE_API', True , c_d, 'Enable API', 'boolean', '', 'API') API_RUN = ccd('API_RUN', 'schedule' , c_d, 'API execution', 'selecttext', "['none', 'interval', 'schedule']", 'API') - API_RUN_SCHD = ccd('API_RUN_SCHD', '*/3 * * * *' , c_d, 'API schedule', 'text', '', 'API') + API_RUN_SCHD = ccd('API_RUN_SCHD', '*/3 * * * *' , c_d, 'API schedule', 'text', '', 'API') API_RUN_INTERVAL = ccd('API_RUN_INTERVAL', 10 , c_d, 'API update interval', 'integer', '', 'API') + API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', 'text', '', 'API') # Insert settings into the DB sql.execute ("DELETE FROM Settings") @@ -574,10 +577,16 @@ def main (): if cycle in check_report: # Check if new devices need to be scanned with Nmap if NMAP_ACTIVE: - sql.execute ("""SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC FROM Events_Devices - WHERE eve_PendingAlertEmail = 1 - AND eve_EventType = 'New Device' - ORDER BY eve_DateTime""") + sql.execute ("""SELECT * FROM + ( SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC FROM Events_Devices + WHERE eve_PendingAlertEmail = 1 + AND eve_EventType = 'New Device' + ORDER BY eve_DateTime ) t1 + LEFT JOIN + ( + SELECT dev_Name, dev_MAC as dev_MAC_t2 FROM Devices + ) t2 + ON t1.dev_MAC = t2.dev_MAC_t2""") newDevices = sql.fetchall() commitDB() @@ -1615,6 +1624,8 @@ def update_devices_names (): #------------------------------------------------------------------------------- def performNmapScan(devicesToScan): + global changedPorts + if len(devicesToScan) > 0: timeoutSec = NMAP_TIMEOUT @@ -1660,8 +1671,8 @@ def performNmapScan(devicesToScan): for line in newLines: append_line_to_file (logPath + '/pialert_nmap.log', line +'\n') - # collect ports - params = [] + # collect ports / new Nmap Entries + newEntries = [] index = 0 startCollecting = False @@ -1674,16 +1685,112 @@ def performNmapScan(devicesToScan): startCollecting = True elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line: startCollecting = False # end reached - elif startCollecting and len(line.split()) == 3: - params.append((device["dev_MAC"], timeNow(), line.split()[0], line.split()[1], line.split()[2], '')) + elif startCollecting and len(line.split()) == 3: + newEntries.append(nmap_entry(device["dev_MAC"], timeNow(), line.split()[0], line.split()[1], line.split()[2], device["dev_Name"])) elif 'Nmap done' in line: duration = line.split('scanned in ')[1] index += 1 - if len(params) > 0: - sql.executemany ("""INSERT INTO Nmap_Scan ("MAC", "Time", "Port", "State", "Service", "Extra") VALUES (?, ?, ?, ?, ?, ?)""", params) - commitDB () + # previous Nmap Entries + oldEntries = [] + + if len(newEntries) > 0: + # get all current NMAP ports from the DB + sql.execute(sql_nmap_scan_all) + + rows = sql.fetchall() + + for row in rows: + oldEntries.append(nmap_entry(row["MAC"], row["Port"], row["State"], row["Service"], device["dev_Name"], row["Extra"], row["Index"])) + + indexesToRemove = [] + + # Remove all entries already available in the database + for newEntry in newEntries: + # Check if available in oldEntries + if any(x.hash == newEntry.hash for x in oldEntries): + newEntries.pop(index) + + file_print('[', timeNow(), '] Scan: Nmap found ', len(newEntries), ' new or changed ports') + + # collect new ports, find the corresponding old entry and return for notification purposes + # also update the DB with the new values after deleting the old ones + if len(newEntries) > 0: + + params = [] + indexesToDelete = "" + + # Find old entry matching the new entry hash + for newEntry in newEntries: + + foundEntry = None + + for oldEntry in oldEntries: + if oldEntry.hash == newEntry.hash: + + params.append(newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, oldEntry.extra) + + indexesToDelete = indexesToDelete + str(oldEntry.index) + ',' + + foundEntry = oldEntry + + if foundEntry is not None: + changedPorts.append( + { + 'new' : { + "Name" : foundEntry.name, + "MAC" : newEntry.mac, + "Port" : newEntry.port, + "State" : newEntry.state, + "Service": newEntry.service, + "Extra" : foundEntry.extra + }, + 'old' : { + "Name" : foundEntry.name, + "MAC" : foundEntry.mac, + "Port" : foundEntry.port, + "State" : foundEntry.state, + "Service": foundEntry.service, + "Extra" : foundEntry.extra + } + } + ) + else: + changedPorts.append( + { + 'new' : { + "Name" : "New device", + "MAC" : newEntry.mac, + "Port" : newEntry.port, + "State" : newEntry.state, + "Service": newEntry.service, + "Extra" : "" + } + } + ) + + # Delete old entries if available + if len(indexesToDelete) > 0: + sql.execute ("DELETE FROM Nmap_Scan where Index in (" + indexesToDelete[:-1] +")") + commitDB () + + # Insert new values into the DB + sql.executemany ("""INSERT INTO Nmap_Scan ("MAC", "Time", "Port", "State", "Service", "Extra") VALUES (?, ?, ?, ?, ?, ?)""", params) + commitDB () + +#------------------------------------------------------------------------------- +class nmap_entry: + def __init__(self, mac, time, port, state, service, name = '', extra = '', index = 0): + self.mac = mac + self.time = time + self.port = port + self.state = state + self.service = service + self.name = name + self.extra = extra + self.index = index + self.hash = str(hash(str(mac) + str(port)+ str(state)+ str(service))) #------------------------------------------------------------------------------- def performPholusScan (timeoutSec): @@ -2027,9 +2134,14 @@ def skip_repeated_notifications (): json_final = [] def send_notifications (): - global mail_text, mail_html, json_final + global mail_text, mail_html, json_final, changedPorts deviceUrl = REPORT_DASHBOARD_URL + '/deviceDetails.php?mac=' + table_attributes = {"style" : "border-collapse: collapse; font-size: 12px; color:#70707", "width" : "100%", "cellspacing" : 0, "cellpadding" : "3px", "bordercolor" : "#C0C0C0", "border":"1"} + headerProps = "width='120px' style='color:blue; font-size: 12px;' bgcolor='#909090' " + thProps = "width='120px' style='color:#F0F0F0' bgcolor='#909090' " + + build_direction = "TOP_TO_BOTTOM" # Reporting section file_print(' Check if something to report') @@ -2039,6 +2151,8 @@ def send_notifications (): json_new_devices = [] json_down_devices = [] json_events = [] + json_ports = [] + # Disable reporting on events for devices where reporting is disabled based on the MAC address sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0 @@ -2073,153 +2187,180 @@ def send_notifications (): mail_text = mail_text.replace ('', socket.gethostname() ) mail_html = mail_html.replace ('', socket.gethostname() ) - - if 'internet' in INCLUDED_SECTIONS: - # Compose Internet Section - mail_section_Internet = False - mail_text_Internet = '' - mail_html_Internet = '' - text_line_template = '{} \t{}\t{}\t{}\n' - html_line_template = '\n'+ \ - ' {} \n {} \n'+ \ - ' {} \n'+ \ - ' {} \n\n' + # Compose Internet Section + text = "" - sql.execute ("""SELECT * FROM Events + json_string = get_table_as_json("""SELECT eve_MAC as MAC, eve_IP as IP, eve_DateTime as Datetime, eve_EventType as "Event Type", eve_AdditionalInfo as "Additional info" FROM Events WHERE eve_PendingAlertEmail = 1 AND eve_MAC = 'Internet' - ORDER BY eve_DateTime""") + ORDER BY eve_DateTime""") + if json_string["data"] == []: + html = "" + else: + html = convert(json_string, build_direction=build_direction, table_attributes=table_attributes) + + html = format_table(html, "data", headerProps, "Internet IP change") + + headers = ["MAC", "Datetime", "IP", "Event Type", "Additional info"] + + # prepare text-only message + text_line = '{}\t{}\n' + + for device in json_string["data"]: + for header in headers: + text += text_line.format ( header + ': ', device[header]) + + # Format HTML table headers + for header in headers: + html = format_table(html, header, thProps) - for eventAlert in sql : - mail_section_Internet = 'internet' in INCLUDED_SECTIONS - # collect "internet" (IP changes) for the webhook json - json_internet = add_json_list (eventAlert, json_internet) - - mail_text_Internet += text_line_template.format ( - 'Event:', eventAlert['eve_EventType'], 'Time:', eventAlert['eve_DateTime'], - 'IP:', eventAlert['eve_IP'], 'More Info:', eventAlert['eve_AdditionalInfo']) - mail_html_Internet += html_line_template.format ( - deviceUrl, eventAlert['eve_MAC'], - eventAlert['eve_EventType'], eventAlert['eve_DateTime'], - eventAlert['eve_IP'], eventAlert['eve_AdditionalInfo']) - - - format_report_section (mail_section_Internet, 'SECTION_INTERNET', - 'TABLE_INTERNET', mail_text_Internet, mail_html_Internet) + mail_text = mail_text.replace ('', text + '\n') + mail_html = mail_html.replace ('', html) + + # collect "internet" (IP changes) for the webhook json + json_internet = json_string["data"] if 'new_devices' in INCLUDED_SECTIONS: - # Compose New Devices Section - mail_section_new_devices = False - mail_text_new_devices = '' - mail_html_new_devices = '' - text_line_template = '{}\t{}\n\t{}\t{}\n\t{}\t{}\n\t{}\t{}\n\t{}\t{}\n\n' - html_line_template = '\n'+ \ - ' {} \n {} \n'+\ - ' {} \n {} \n {} \n\n' - - sql.execute ("""SELECT * FROM Events_Devices + # Compose New Devices Section + text = "" + + json_string = get_table_as_json("""SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices WHERE eve_PendingAlertEmail = 1 AND eve_EventType = 'New Device' - ORDER BY eve_DateTime""") + ORDER BY eve_DateTime""") + if json_string["data"] == []: + html = "" + else: + html = convert(json_string, build_direction=build_direction, table_attributes=table_attributes) - for eventAlert in sql : - mail_section_new_devices = 'new_devices' in INCLUDED_SECTIONS - # collect "new_devices" for the webhook json - json_new_devices = add_json_list (eventAlert, json_new_devices) + html = format_table(html, "data", headerProps, "New devices") - mail_text_new_devices += text_line_template.format ( - 'Name: ', eventAlert['dev_Name'], 'MAC: ', eventAlert['eve_MAC'], 'IP: ', eventAlert['eve_IP'], - 'Time: ', eventAlert['eve_DateTime'], 'More Info: ', eventAlert['eve_AdditionalInfo']) - mail_html_new_devices += html_line_template.format ( - deviceUrl, eventAlert['eve_MAC'], eventAlert['eve_MAC'], - eventAlert['eve_DateTime'], eventAlert['eve_IP'], - eventAlert['dev_Name'], eventAlert['eve_AdditionalInfo']) - - format_report_section (mail_section_new_devices, 'SECTION_NEW_DEVICES', - 'TABLE_NEW_DEVICES', mail_text_new_devices, mail_html_new_devices) + headers = ["MAC", "Datetime", "IP", "Event Type", "Device name", "Comments"] + + # prepare text-only message + text_line = '{}\t{}\n' + text = "" + for device in json_string["data"]: + for header in headers: + text += text_line.format ( header + ': ', device[header]) + + # Format HTML table headers + for header in headers: + html = format_table(html, header, thProps) + + mail_text = mail_text.replace ('', text + '\n') + mail_html = mail_html.replace ('', html) + + # collect "new_devices" for the webhook json + json_new_devices = json_string["data"] if 'down_devices' in INCLUDED_SECTIONS: - # Compose Devices Down Section - mail_section_devices_down = False - mail_text_devices_down = '' - mail_html_devices_down = '' - text_line_template = '{}\t{}\n\t{}\t{}\n\t{}\t{}\n\t{}\t{}\n\n' - html_line_template = '\n'+ \ - ' {} \n {} \n'+ \ - ' {} \n {} \n\n' + # Compose Devices Down Section + text = "" - sql.execute ("""SELECT * FROM Events_Devices + json_string = get_table_as_json("""SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices WHERE eve_PendingAlertEmail = 1 AND eve_EventType = 'Device Down' - ORDER BY eve_DateTime""") + ORDER BY eve_DateTime""") + if json_string["data"] == []: + html = "" + else: + html = convert(json_string, build_direction=build_direction, table_attributes=table_attributes) - for eventAlert in sql : - mail_section_devices_down = 'down_devices' in INCLUDED_SECTIONS - # collect "down_devices" for the webhook json - json_down_devices = add_json_list (eventAlert, json_down_devices) + html = format_table(html, "data", headerProps, "Down devices") - mail_text_devices_down += text_line_template.format ( - 'Name: ', eventAlert['dev_Name'], 'MAC: ', eventAlert['eve_MAC'], - 'Time: ', eventAlert['eve_DateTime'],'IP: ', eventAlert['eve_IP']) - mail_html_devices_down += html_line_template.format ( - deviceUrl, eventAlert['eve_MAC'], eventAlert['eve_MAC'], - eventAlert['eve_DateTime'], eventAlert['eve_IP'], - eventAlert['dev_Name']) + headers = ["MAC", "Datetime", "IP", "Event Type", "Device name", "Comments"] - format_report_section (mail_section_devices_down, 'SECTION_DEVICES_DOWN', - 'TABLE_DEVICES_DOWN', mail_text_devices_down, mail_html_devices_down) + # prepare text-only message + text_line = '{}\t{}\n' + text = "" + for device in json_string["data"]: + for header in headers: + text += text_line.format ( header + ': ', device[header]) + # Format HTML table headers + for header in headers: + html = format_table(html, header, thProps) + + mail_text = mail_text.replace ('', text + '\n') + mail_html = mail_html.replace ('', html) + + # collect "down_devices" for the webhook json + json_down_devices = json_string["data"] if 'events' in INCLUDED_SECTIONS: - # Compose Events Section - mail_section_events = False - mail_text_events = '' - mail_html_events = '' - text_line_template = '{}\t{}\n\t{}\t{}\n\t{}\t{}\n\t{}\t{}\n\t{}\t{}\n\t{}\t{}\n\n' - html_line_template = '\n '+ \ - ' {} \n {} \n'+ \ - ' {} \n {} \n {} \n'+ \ - ' {} \n\n' + # Compose Events Section + text = "" - sql.execute ("""SELECT * FROM Events_Devices + json_string = get_table_as_json("""SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices WHERE eve_PendingAlertEmail = 1 AND eve_EventType IN ('Connected','Disconnected', 'IP Changed') - ORDER BY eve_DateTime""") + ORDER BY eve_DateTime""") + if json_string["data"] == []: + html = "" + else: + html = convert(json_string, build_direction=build_direction, table_attributes=table_attributes) - for eventAlert in sql : - mail_section_events = 'events' in INCLUDED_SECTIONS - # collect "events" for the webhook json - json_events = add_json_list (eventAlert, json_events) - - mail_text_events += text_line_template.format ( - 'Name: ', eventAlert['dev_Name'], 'MAC: ', eventAlert['eve_MAC'], - 'IP: ', eventAlert['eve_IP'],'Time: ', eventAlert['eve_DateTime'], - 'Event: ', eventAlert['eve_EventType'],'More Info: ', eventAlert['eve_AdditionalInfo']) - mail_html_events += html_line_template.format ( - deviceUrl, eventAlert['eve_MAC'], eventAlert['eve_MAC'], - eventAlert['eve_DateTime'], eventAlert['eve_IP'], - eventAlert['eve_EventType'], eventAlert['dev_Name'], - eventAlert['eve_AdditionalInfo']) + html = format_table(html, "data", headerProps, "Events") - format_report_section (mail_section_events, 'SECTION_EVENTS', - 'TABLE_EVENTS', mail_text_events, mail_html_events) + headers = ["MAC", "Datetime", "IP", "Event Type", "Device name", "Comments"] + + # prepare text-only message + text_line = '{}\t{}\n' + text = "" + for device in json_string["data"]: + for header in headers: + text += text_line.format ( header + ': ', device[header]) + + # Format HTML table headers + for header in headers: + html = format_table(html, header, thProps) + + mail_text = mail_text.replace ('', text + '\n') + mail_html = mail_html.replace ('', html) + + # collect "events" for the webhook json + json_events = json_string["data"] + + if 'ports' in INCLUDED_SECTIONS: + json_ports = changedPorts + + json_string = { "data" : changedPorts } + + if json_string["data"] == []: + html = "" + else: + html = convert(json_string, build_direction=build_direction, table_attributes=table_attributes) + + html = format_table(html, "data", headerProps, "Changed or new ports") + + headers = ["Name", "MAC", "Port", "State", "Service", "Extra"] + + for header in headers: + html = format_table(html, header, thProps) + + mail_html = mail_html.replace ('', html) json_final = { "internet": json_internet, "new_devices": json_new_devices, "down_devices": json_down_devices, - "events": json_events + "events": json_events, + "ports": json_ports, } + + # Create clickable MAC links + mail_html = generate_mac_links (mail_html, deviceUrl) - # Write output emails for testing + # Write output emails for debug write_file (logPath + '/report_output.txt', mail_text) write_file (logPath + '/report_output.html', mail_html) # Send Mail - if json_internet != [] or json_new_devices != [] or json_down_devices != [] or json_events != [] or debug_force_notification: + if json_internet != [] or json_new_devices != [] or json_down_devices != [] or json_events != [] or json_ports != [] or debug_force_notification: update_api(True) @@ -2272,6 +2413,8 @@ def send_notifications (): """, (datetime.datetime.now(),) ) sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0 WHERE eve_PendingAlertEmail = 1""") + + changedPorts = [] # DEBUG - print number of rows updated file_print(' Notifications: ', sql.rowcount) @@ -2326,6 +2469,26 @@ def check_config(service): +#------------------------------------------------------------------------------- +def format_table (html, thValue, props, newThValue = ''): + + if newThValue == '': + newThValue = thValue + + return html.replace(""+thValue+"", ""+newThValue+"" ) + +#------------------------------------------------------------------------------- +def generate_mac_links (html, deviceUrl): + + p = re.compile(r'(?:[0-9a-fA-F]:?){12}') + + MACs = re.findall(p, html) + + for mac in MACs: + html = html.replace('' + mac + '','' + mac + '') + + return html + #------------------------------------------------------------------------------- def format_report_section (pActive, pSection, pTable, pText, pHTML): global mail_text @@ -2968,29 +3131,30 @@ def update_api(isNotification = False): ["devices", sql_devices_all], ["nmap_scan", sql_nmap_scan_all], ["pholus_scan", sql_pholus_scan_all], - ["events_pending_alert", sql_events_pending_alert] + ["events_pending_alert", sql_events_pending_alert], + ["custom_endpoint", API_CUSTOM_SQL] ] # Save selected database tables for dsSQL in dataSourcesSQLs: - - sql.execute(dsSQL[1]) - columnNames = list(map(lambda x: x[0], sql.description)) - - rows = sql.fetchall() - - json_string = get_table_as_json(rows, columnNames) + json_string = get_table_as_json(dsSQL[1]) write_file(folder + 'table_' + dsSQL[0] + '.json' , json.dumps(json_string)) #------------------------------------------------------------------------------- -def get_table_as_json(rows, names): +def get_table_as_json(sqlQuery): + + sql.execute(sqlQuery) + + columnNames = list(map(lambda x: x[0], sql.description)) + + rows = sql.fetchall() result = {"data":[]} for row in rows: - tmp = fill_row(names, row) + tmp = fill_row(columnNames, row) result["data"].append(tmp) return result @@ -3234,6 +3398,8 @@ def isNewVersion(): buildTimestamp = int(f.read().strip()) f.close() + data = "" + try: url = requests.get("https://api.github.com/repos/jokob-sk/Pi.Alert/releases") text = url.text diff --git a/back/report_sample_2.html b/back/report_sample_2.html index 49c4a5be..66c7da31 100755 --- a/back/report_sample_2.html +++ b/back/report_sample_2.html @@ -1,143 +1,173 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - -
- Pi.Alert Report -
- - - - - - -
Report Date: 2021-01-01 08:00 Scan Cycle: 1 Server: pi4
-
- - - -

New Devices:

- - - - - - - - - - - - - - - - - - - - - - -
MAC Datetime IP Vendor
f8:d0:27:00:00:00 2021-01-01 08:00:00 192.168.1.20 Seiko Epson Corporation
c8:6c:3d:00:00:00 2021-01-01 08:00:00 192.168.1.181 Amazon Technologies Inc.
- -
- - - -

Devices Down:

- - - - - - - - - - - - - - - - -
MAC Datetime IP Device Name
0c:ee:99:00:00:00 2021-01-01 08:00:00 192.168.1.171 Alexa - Echo
- -
- - - -

Events:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - -
MAC Datetime IP Event Type Device Name Additional Info
94:0c:98:00:00:00 2021-01-01 08:00:00 192.168.1.132 Connected Person 1 - iPhone 11
5c:41:5a:00:00:00 2021-01-01 08:00:00 192.168.1.170 IP Changed Alexa Dot Previous IP: 192.168.1.243
- -
- - - - - - -
Puche 2021 Pi.Alert 2.50   /   2021-01-01 GNU GPLv3
-
-
- - + # Pi.Alert + # Open Source Network Guard / WIFI & LAN intrusion detector + # + # repot_template.html - Back module. Template to email reporting in HTML format + #------------------------------------------------------------------------------- + # Puche 2021 pi.alert.application@gmail.com GNU GPLv3 + #--------------------------------------------------------------------------- --> + + + + + + + + + + + + + + + + + + +
+ Pi.Alert Report +
+ 🆕 New version available 🆕 +
+ + + + +
Report Date: 2023-01-30 22:17
+
+ + + + + + + +
New devices
+ + + + + + + + + + + + + + + + + + + + + + + + + +
MACDatetimeIPEvent TypeDevice nameComments
00:00:00:ef:a5:6c2023-01-30 22:15:09192.168.1.1New Device(name not found)
00:00:00:ef:a5:6c2023-01-30 22:17:59192.168.1.82New Device(name not found)
+
+ + + + + + + +
Events
+
    +
  • + + + + + + + + + + + + + + + + + +
    MACDatetimeIPEvent TypeDevice nameComments
    00:00:00:ef:a5:6c2023-01-30 22:15:09192.168.1.92Disconnected(name not found)
    +
  • +
+
+ + + + + + + +
Changed or new ports
+ + + + + + + + + + + +
new
+ + + + + + + + + + + + + + + + + +
NameMACPortStateServiceExtra
New device00:00:00:ef:a5:6c3263/tcpopenecolor-imager
+
+ + + + + + + + + + + + + + + + + +
NameMACPortStateServiceExtra
New device00:00:00:ef:a5:6c3264/tcpopenccmail
+
+
+
+ + + + +
Pi.Alert - Synology-NAS
+
+
+ + + \ No newline at end of file diff --git a/back/report_template.txt b/back/report_template.txt index 9f5ebdad..9d161ec0 100755 --- a/back/report_template.txt +++ b/back/report_template.txt @@ -1,19 +1,15 @@ Report Date: Server: - + New Devices ---------------------- - - + Devices Down ---------------------- - - + Events ---------------------- - - + Internet ---------------------- - - \ No newline at end of file + diff --git a/back/report_template_new_version.html b/back/report_template_new_version.html index 6994c012..7d0e785c 100755 --- a/back/report_template_new_version.html +++ b/back/report_template_new_version.html @@ -21,7 +21,7 @@ - + 🆕 New version available 🆕 @@ -36,76 +36,16 @@ - -

Internet:

- - - - - - - + - -
Event Type Datetime IP Additional Info
- -
-
- - -

New Devices:

- - - - - - - - - - - -
MAC Datetime IP Device Name Vendor
- -
-
- - -

Devices Down:

- - - - - - - - - - -
MAC Datetime IP Device Name
- -
-
- - -

Events:

- - - - - - - - - - - - -
MAC Datetime IP Event Type Device Name Additional Info
-
- - + + + + + + + @@ -118,5 +58,7 @@ + + \ No newline at end of file diff --git a/back/webhook_json_sample.json b/back/webhook_json_sample.json index df7bcf7c..562ca570 100755 --- a/back/webhook_json_sample.json +++ b/back/webhook_json_sample.json @@ -17,197 +17,68 @@ "title": "Pi.Alert Notifications", "title_link": "", "text": { - "internet": [ - [ - "Internet", - "243.243.243.243", - "2022-01-06 18:32:03", - "Internet IP Changed", - "Previous Internet IP: 0.0.0.0", - 1, - null - ], - [ - "Internet", - "243.243.243.243", - "2022-01-06 18:32:03", - "New Device", - null, - 1, - null - ] - ], - "new_devices": [ - [ - "b8:b8:b8:b8:b8:b8", - "192.168.1.19", - "2023-01-06 18:32:03", - "New Device", - "Raspberry Pi Foundation", - 1, - null, - "b8:b8:b8:b8:b8:b8", - "raspberrypi", - "(unknown)", - null, - "Raspberry Pi Foundation", - 0, - null, - null, - "2021-01-06 18:32:03", - "2021-01-06 18:32:03", - "192.168.1.19", - 0, - 1, - 1, - 1, - 0, - 0, - null, - 1, - 1, - null, - 0, - null, - null - ], - [ - "b1:b8:b8:b8:b8:b8", - "192.168.1.45", - "2021-01-06 18:32:03", - "New Device", - "EliteGroup Computer Systems Co., LTD", - 1, - null, - "b1:b8:b8:b8:b8:b8", - "my-NUC", - "(unknown)", - null, - "EliteGroup Computer Systems Co., LTD", - 0, - null, - null, - "2023-01-06 18:32:03", - "2023-01-06 18:32:03", - "192.168.1.45", - 0, - 1, - 1, - 1, - 0, - 0, - null, - 1, - 1, - null, - 0, - null, - null - ] - ], - "down_devices": [ - [ - "aa:77:aa:77:aa:77", - "192.168.1.151", - "2021-01-07 14:20:53", - "Device Down", - "", - 1, - 25, - "aa:77:aa:77:aa:77", - "ttgo_tdisplay_weather", - "(unknown)", - "", - "Espressif Inc.", - 0, - "", - "", - "2021-01-06 23:13:06", - "2021-01-06 23:13:06", - "192.168.1.151", - 0, - 1, - 1, - 0, - 1, - 0, - "2021-01-06 23:34:37.067330", - 0, - 0, - "", - 0, - "", - "" - ] - ], - "events": [ - [ - "aa:77:aa:77:aa:77", - "192.168.1.151", - "2022-08-12 21:48:00", - "Connected", - "", - 1, - null, - "aa:77:aa:77:aa:77", - "ESP32 - display", - "House", - "", - "Espressif Inc.", - 0, - "", - "", - "2022-07-21 20:35:00", - "2022-08-12 21:48:00", - "192.168.1.151", - 0, - 1, - 1, - 1, - 0, - 0, - "2022-08-12 21:42:47.937413", - 1, - 0, - "", - 0, - "aa:77:aa:77:aa:77", - "" - ], - [ - "aa:77:aa:77:aa:77", - "192.168.1.149", - "2022-08-12 21:48:00", - "Connected", - "", - 1, - null, - "aa:77:aa:77:aa:77", - "ESP32 - 1", - "House", - "Singleboard Computer (SBC)", - "Espressif Inc.", - 0, - "", - "", - "2022-07-15 05:30:00", - "2022-08-12 21:48:00", - "192.168.1.149", - 0, - 1, - 1, - 1, - 0, - 0, - "2022-08-12 21:42:47.937413", - 1, - 1, - "", - 0, - "aa:77:aa:77:aa:77", - "" - ] - ] + "internet": [], + "new_devices": [{ + "MAC": "74:ac:74:ac:74:ac", + "Datetime": "2023-01-30 22:15:09", + "IP": "192.168.1.1", + "Event Type": "New Device", + "Device name": "(name not found)", + "Comments": null + }], + "down_devices": [], + "events": [{ + "MAC": "74:ac:74:ac:74:ac", + "Datetime": "2023-01-30 22:15:09", + "IP": "192.168.1.92", + "Event Type": "Disconnected", + "Device name": "(name not found)", + "Comments": null + }, { + "MAC": "74:ac:74:ac:74:ac", + "Datetime": "2023-01-30 22:15:09", + "IP": "192.168.1.150", + "Event Type": "Disconnected", + "Device name": "(name not found)", + "Comments": null + }], + "ports": [{ + "new": { + "Name": "New device", + "MAC": "74:ac:74:ac:74:ac", + "Port": "22/tcp", + "State": "open", + "Service": "ssh", + "Extra": "" + } + }, { + "new": { + "Name": "New device", + "MAC": "74:ac:74:ac:74:ac", + "Port": "53/tcp", + "State": "open", + "Service": "domain", + "Extra": "" + } + }, { + "new": { + "Name": "New device", + "MAC": "74:ac:74:ac:74:ac", + "Port": "80/tcp", + "State": "open", + "Service": "http", + "Extra": "" + } + }, { + "new": { + "Name": "New device", + "MAC": "74:ac:74:ac:74:ac", + "Port": "443/tcp", + "State": "open", + "Service": "https", + "Extra": "" + } + }] } } ] diff --git a/docker-compose.yml b/docker-compose.yml index baad034e..e60008d8 100755 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -9,7 +9,7 @@ services: volumes: - ${APP_DATA_LOCATION}/pialert/config:/home/pi/pialert/config # - ${APP_DATA_LOCATION}/pialert/db/pialert.db:/home/pi/pialert/db/pialert.db - - ${APP_DATA_LOCATION}/pialert/db:/home/pi/pialert/db + - ${APP_DATA_LOCATION}/pialert/db2:/home/pi/pialert/db # (optional) useful for debugging if you have issues setting up the container - ${LOGS_LOCATION}:/home/pi/pialert/front/log # DELETE START anyone trying to use this file: comment out / delete BELOW lines, they are only for development purposes diff --git a/docs/API.md b/docs/API.md index 41125fbb..6d58be3c 100755 --- a/docs/API.md +++ b/docs/API.md @@ -29,6 +29,7 @@ You can access the following files: | `table_nmap_scan.json` | The current state of the discovered ports by the regular NMAP scans. | | `table_pholus_scan.json` | The latest state of the [pholus](https://github.com/jokob-sk/Pi.Alert/tree/main/pholus) (A multicast DNS and DNS Service Discovery Security Assessment Tool) scan results. | | `table_events_pending_alert.json` | The list of the unprocessed (pending) notification events. | + | `table_custom_endpoint.json` | A custom endpoint generated by the SQL query specified by the `API_CUSTOM_SQL` setting. | Current/latest state of the aforementioned files depends on your settings. diff --git a/front/deviceDetails.php b/front/deviceDetails.php index d50a686b..a2144bcd 100755 --- a/front/deviceDetails.php +++ b/front/deviceDetails.php @@ -1575,6 +1575,32 @@ function skipNotifications () { // Set cycle 0 $('#txtScanCycle').val ('no'); } +// ----------------------------------------------------------------------------- +function askDeleteDeviceEvents () { + // Check MAC + if (mac == '') { + return; + } + + // Ask delete device Events + showModalWarning ('', '', + '', '', 'deleteDeviceEvents'); +} + +function deleteDeviceEvents () { + // Check MAC + if (mac == '') { + return; + } + + // Delete device events + $.get('php/server/devices.php?action=deleteDeviceEvents&mac='+ mac, function(msg) { + showMessage (msg); + }); + + // Deactivate controls + $('#panDetails :input').attr('disabled', true); +} // ----------------------------------------------------------------------------- // Overwrite all devices of the same type with the currently selected icon diff --git a/front/devices.php b/front/devices.php index 67af8d2f..ed0d08c0 100755 --- a/front/devices.php +++ b/front/devices.php @@ -204,14 +204,26 @@ // ----------------------------------------------------------------------------- function main () { + // get from cookie if available (need to use decodeURI as saved as part of URI in PHP) + cookieColumnsVisibleStr = decodeURI(getCookie("Front_Devices_Columns_Visible")).replaceAll('%2C',',') + + defaultValue = cookieColumnsVisibleStr == "" ? columnsStr : cookieColumnsVisibleStr; + // get visible columns - $.get('php/server/parameters.php?action=get&expireMinutes=525600&defaultValue='+columnsStr+'¶meter=Front_Devices_Columns_Visible&skipcache', function(data) { + $.get('php/server/parameters.php?action=get&expireMinutes=525600&defaultValue='+defaultValue+'¶meter=Front_Devices_Columns_Visible&skipcache', function(data) { + // save which columns are in the Devices page visible tableColumnVisible = numberArrayFromString(data); + // get from cookie if available (need to use decodeURI as saved as part of URI in PHP) + cookieColumnsOrderStr = decodeURI(getCookie("Front_Devices_Columns_Order")).replaceAll('%2C',',') + + defaultValue = cookieColumnsOrderStr == "" ? columnsStr : cookieColumnsOrderStr; + // get the custom order specified by the user - $.get('php/server/parameters.php?action=get&expireMinutes=525600&defaultValue='+columnsStr+'¶meter=Front_Devices_Columns_Order&skipcache', function(data) { + $.get('php/server/parameters.php?action=get&expireMinutes=525600&defaultValue='+defaultValue+'¶meter=Front_Devices_Columns_Order&skipcache', function(data) { + // save the columns order in the Devices page tableColumnOrder = numberArrayFromString(data); //initialize the table headers in the correct order @@ -243,8 +255,6 @@ function main () { } $('#tableDevices tr').html(html); - - // get parameter value $.get('php/server/parameters.php?action=get&defaultValue=50¶meter='+ parTableRows, function(data) { @@ -282,14 +292,13 @@ function main () { // ----------------------------------------------------------------------------- var tableColumnHide = []; +// mapping the default order to the user specified one function mapIndx(oldIndex) { for(i=0;i