pholus 2.2

This commit is contained in:
Jokob-sk
2022-12-31 00:25:32 +11:00
parent bde96af7da
commit 73fd1ef9b4
10 changed files with 486 additions and 503 deletions

View File

@@ -57,15 +57,15 @@ fullConfPath = pialertPath + confPath
fullDbPath = pialertPath + dbPath fullDbPath = pialertPath + dbPath
STOPARPSCAN = pialertPath + "/db/setting_stoparpscan" STOPARPSCAN = pialertPath + "/db/setting_stoparpscan"
# Global variables
userSubnets = []
time_started = datetime.datetime.now() time_started = datetime.datetime.now()
cron_instance = Cron() cron_instance = Cron()
log_timestamp = time_started log_timestamp = time_started
lastTimeImported = 0 lastTimeImported = 0
sql_connection = None sql_connection = None
next_schedule_timestamp = 0
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def timeNow(): def timeNow():
return datetime.datetime.now().replace(microsecond=0) return datetime.datetime.now().replace(microsecond=0)
@@ -82,15 +82,6 @@ def file_print(*args):
file.close() file.close()
# check RW access of DB and config file
file_print('\n Permissions check (All should be True)')
file_print('------------------------------------------------')
file_print( " " + confPath + " | " + " READ | " + str(os.access(fullConfPath, os.R_OK)))
file_print( " " + confPath + " | " + " WRITE | " + str(os.access(fullConfPath, os.W_OK)))
file_print( " " + dbPath + " | " + " READ | " + str(os.access(fullDbPath, os.R_OK)))
file_print( " " + dbPath + " | " + " WRITE | " + str(os.access(fullDbPath, os.W_OK)))
file_print('------------------------------------------------')
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def append_file_binary (pPath, input): def append_file_binary (pPath, input):
file = open (pPath, 'ab') file = open (pPath, 'ab')
@@ -125,6 +116,48 @@ def print_log (pText):
log_timestamp = log_timestamp2 log_timestamp = log_timestamp2
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
# check RW access of DB and config file
def checkPermissionsOK():
global confR_access, confW_access, dbR_access, dbW_access
confR_access = (os.access(fullConfPath, os.R_OK))
confW_access = (os.access(fullConfPath, os.W_OK))
dbR_access = (os.access(fullDbPath, os.R_OK))
dbW_access = (os.access(fullDbPath, os.W_OK))
file_print('\n Permissions check (All should be True)')
file_print('------------------------------------------------')
file_print( " " , confPath , " | " , " READ | " , confR_access)
file_print( " " , confPath , " | " , " WRITE | " , confW_access)
file_print( " " , dbPath , " | " , " READ | " , dbR_access)
file_print( " " , dbPath , " | " , " WRITE | " , dbW_access)
file_print('------------------------------------------------')
return dbR_access and dbW_access and confR_access and confW_access
def fixPermissions():
# Try fixing access rights if needed
chmodCommands = []
if dbR_access == False or dbW_access == False:
chmodCommands.append(['sudo', 'chmod', 'a+rw', '-R', dbPath])
if confR_access == False or confW_access == False:
chmodCommands.append(['sudo', 'chmod', 'a+rw', '-R', confPath])
for com in chmodCommands:
# Execute command
file_print("[Setup] Attempting to fix permissions.")
try:
# try runnning a subprocess
result = subprocess.check_output (com, universal_newlines=True)
except subprocess.CalledProcessError as e:
# An error occured, handle it
file_print("[Setup] Fix Failed. Execute this command manually inside of the container: ", ' '.join(com))
file_print(e.output)
checkPermissionsOK()
def initialiseFile(pathToCheck, defaultFile): def initialiseFile(pathToCheck, defaultFile):
# if file not readable (missing?) try to copy over the backed-up (default) one # if file not readable (missing?) try to copy over the backed-up (default) one
@@ -153,10 +186,16 @@ def initialiseFile(pathToCheck, defaultFile):
#=============================================================================== #===============================================================================
# check and initialize pialert.conf # check and initialize pialert.conf
initialiseFile(fullConfPath, "/home/pi/pialert/back/pialert.conf_bak" ) if confR_access == False:
initialiseFile(fullConfPath, "/home/pi/pialert/back/pialert.conf_bak" )
# check and initialize pialert.db # check and initialize pialert.db
initialiseFile(fullDbPath, "/home/pi/pialert/back/pialert.db_bak") if dbR_access == False:
initialiseFile(fullDbPath, "/home/pi/pialert/back/pialert.db_bak")
if dbR_access == False or confR_access == False:
if checkPermissionsOK() == False:
fixPermissions()
#=============================================================================== #===============================================================================
@@ -249,8 +288,8 @@ DHCP_ACTIVE = False
# Pholus settings # Pholus settings
# ---------------------- # ----------------------
PHOLUS_ACTIVE = False PHOLUS_ACTIVE = True
PHOLUS_TIMEOUT = 60 PHOLUS_TIMEOUT = 180
PHOLUS_FORCE = False PHOLUS_FORCE = False
PHOLUS_DAYS_DATA = 7 PHOLUS_DAYS_DATA = 7
@@ -418,10 +457,10 @@ def importConfig ():
PHOLUS_ACTIVE = check_config_dict('PHOLUS_ACTIVE', PHOLUS_ACTIVE , config_dict) PHOLUS_ACTIVE = check_config_dict('PHOLUS_ACTIVE', PHOLUS_ACTIVE , config_dict)
PHOLUS_TIMEOUT = check_config_dict('PHOLUS_TIMEOUT', PHOLUS_TIMEOUT , config_dict) PHOLUS_TIMEOUT = check_config_dict('PHOLUS_TIMEOUT', PHOLUS_TIMEOUT , config_dict)
PHOLUS_FORCE = check_config_dict('PHOLUS_FORCE', PHOLUS_FORCE , config_dict) PHOLUS_FORCE = check_config_dict('PHOLUS_FORCE', PHOLUS_FORCE , config_dict)
PHOLUS_DAYS_DATA = check_config_dict('PHOLUS_DAYS_DATA', PHOLUS_DAYS_DATA , config_dict)
PHOLUS_RUN = check_config_dict('PHOLUS_RUN', PHOLUS_RUN , config_dict) PHOLUS_RUN = check_config_dict('PHOLUS_RUN', PHOLUS_RUN , config_dict)
PHOLUS_RUN_TIMEOUT = check_config_dict('PHOLUS_RUN_TIMEOUT', PHOLUS_RUN_TIMEOUT , config_dict) PHOLUS_RUN_TIMEOUT = check_config_dict('PHOLUS_RUN_TIMEOUT', PHOLUS_RUN_TIMEOUT , config_dict)
PHOLUS_RUN_SCHD = check_config_dict('PHOLUS_RUN_SCHD', PHOLUS_RUN_SCHD , config_dict) PHOLUS_RUN_SCHD = check_config_dict('PHOLUS_RUN_SCHD', PHOLUS_RUN_SCHD , config_dict)
PHOLUS_DAYS_DATA = check_config_dict('PHOLUS_DAYS_DATA', PHOLUS_DAYS_DATA , config_dict)
openDB() openDB()
@@ -498,10 +537,10 @@ def importConfig ():
('PHOLUS_ACTIVE', 'Enable Pholus scans', '', 'boolean', '', '' , str(PHOLUS_ACTIVE) , 'Pholus'), ('PHOLUS_ACTIVE', 'Enable Pholus scans', '', 'boolean', '', '' , str(PHOLUS_ACTIVE) , 'Pholus'),
('PHOLUS_TIMEOUT', 'Pholus timeout', '', 'integer', '', '' , str(PHOLUS_TIMEOUT) , 'Pholus'), ('PHOLUS_TIMEOUT', 'Pholus timeout', '', 'integer', '', '' , str(PHOLUS_TIMEOUT) , 'Pholus'),
('PHOLUS_FORCE', 'Pholus force check', '', 'boolean', '', '' , str(PHOLUS_FORCE) , 'Pholus'), ('PHOLUS_FORCE', 'Pholus force check', '', 'boolean', '', '' , str(PHOLUS_FORCE) , 'Pholus'),
('PHOLUS_DAYS_DATA', 'Pholus keep days', '', 'integer', '', '' , str(PHOLUS_DAYS_DATA) , 'Pholus'),
('PHOLUS_RUN', 'Pholus enable schedule', '', 'selecttext', "['none', 'once', 'schedule']", '' , str(PHOLUS_RUN) , 'Pholus'), ('PHOLUS_RUN', 'Pholus enable schedule', '', 'selecttext', "['none', 'once', 'schedule']", '' , str(PHOLUS_RUN) , 'Pholus'),
('PHOLUS_RUN_TIMEOUT', 'Pholus timeout schedule', '', 'integer', '', '' , str(PHOLUS_RUN_TIMEOUT) , 'Pholus'), ('PHOLUS_RUN_TIMEOUT', 'Pholus timeout schedule', '', 'integer', '', '' , str(PHOLUS_RUN_TIMEOUT) , 'Pholus'),
('PHOLUS_RUN_SCHD', 'Pholus schedule', '', 'text', '', '' , str(PHOLUS_RUN_SCHD) , 'Pholus') ('PHOLUS_RUN_SCHD', 'Pholus schedule', '', 'text', '', '' , str(PHOLUS_RUN_SCHD) , 'Pholus'),
('PHOLUS_DAYS_DATA', 'Pholus keep days', '', 'integer', '', '' , str(PHOLUS_DAYS_DATA) , 'Pholus')
] ]
# Insert into DB # Insert into DB
@@ -526,6 +565,9 @@ def importConfig ():
last_next_pholus_schedule = schedule.next() last_next_pholus_schedule = schedule.next()
last_next_pholus_schedule_used = False last_next_pholus_schedule_used = False
# Format and prepare the list of subnets
updateSubnets()
file_print('[', timeNow(), '] Config: Imported new config') file_print('[', timeNow(), '] Config: Imported new config')
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
@@ -550,6 +592,7 @@ last_run = now_minus_24h
last_cleanup = now_minus_24h last_cleanup = now_minus_24h
last_update_vendors = time_started - datetime.timedelta(days = 6) # update vendors 24h after first run and than once a week last_update_vendors = time_started - datetime.timedelta(days = 6) # update vendors 24h after first run and than once a week
def main (): def main ():
# Initialize global variables # Initialize global variables
global time_started, cycle, last_network_scan, last_internet_IP_scan, last_run, last_cleanup, last_update_vendors, last_pholus_scheduled_run global time_started, cycle, last_network_scan, last_internet_IP_scan, last_run, last_cleanup, last_update_vendors, last_pholus_scheduled_run
@@ -560,14 +603,6 @@ def main ():
sql_connection = None sql_connection = None
sql = None sql = None
# # create log files > I don't think this is necessary (e.g. the path was incorrect
# # (missing / at the beginning of teh file name) and there were no issues reported)
# write_file(logPath + 'IP_changes.log', '')
# write_file(logPath + 'stdout.log', '')
# write_file(logPath + 'stderr.log', '')
# write_file(logPath + 'pialert.log', '')
# write_file(logPath + 'pialert_pholus.log', '')
# Upgrade DB if needed # Upgrade DB if needed
upgradeDB() upgradeDB()
@@ -582,11 +617,9 @@ def main ():
# proceed if 1 minute passed # proceed if 1 minute passed
if last_run + datetime.timedelta(minutes=1) < time_started : if last_run + datetime.timedelta(minutes=1) < time_started :
# last time any scan or maintennace/Upkeep was run # last time any scan or maintenance/Upkeep was run
last_run = time_started last_run = time_started
reporting = False
# Header # Header
updateState("Process: Start") updateState("Process: Start")
file_print('[', timeNow(), '] Process: Start') file_print('[', timeNow(), '] Process: Start')
@@ -599,7 +632,7 @@ def main ():
if last_internet_IP_scan + datetime.timedelta(minutes=3) < time_started: if last_internet_IP_scan + datetime.timedelta(minutes=3) < time_started:
cycle = 'internet_IP' cycle = 'internet_IP'
last_internet_IP_scan = time_started last_internet_IP_scan = time_started
reporting = check_internet_IP() check_internet_IP()
# Update vendors once a week # Update vendors once a week
if last_update_vendors + datetime.timedelta(days = 7) < time_started: if last_update_vendors + datetime.timedelta(days = 7) < time_started:
@@ -667,7 +700,6 @@ def main ():
# INTERNET IP CHANGE # INTERNET IP CHANGE
#=============================================================================== #===============================================================================
def check_internet_IP (): def check_internet_IP ():
reporting = False
# Header # Header
updateState("Scan: Internet IP") updateState("Scan: Internet IP")
@@ -688,7 +720,6 @@ def check_internet_IP ():
# Get previous stored IP # Get previous stored IP
file_print(' Retrieving previous IP:') file_print(' Retrieving previous IP:')
openDB()
previous_IP = get_previous_internet_IP () previous_IP = get_previous_internet_IP ()
file_print(' ', previous_IP) file_print(' ', previous_IP)
@@ -697,10 +728,8 @@ def check_internet_IP ():
file_print(' Saving new IP') file_print(' Saving new IP')
save_new_internet_IP (internet_IP) save_new_internet_IP (internet_IP)
file_print(' IP updated') file_print(' IP updated')
reporting = True
else : else :
file_print(' No changes to perform') file_print(' No changes to perform')
closeDB()
# Get Dynamic DNS IP # Get Dynamic DNS IP
if DDNS_ACTIVE : if DDNS_ACTIVE :
@@ -711,7 +740,6 @@ def check_internet_IP ():
if dns_IP == "" : if dns_IP == "" :
file_print(' Error retrieving Dynamic DNS IP') file_print(' Error retrieving Dynamic DNS IP')
file_print(' Exiting...') file_print(' Exiting...')
return False
file_print(' ', dns_IP) file_print(' ', dns_IP)
# Check DNS Change # Check DNS Change
@@ -719,13 +747,11 @@ def check_internet_IP ():
file_print(' Updating Dynamic DNS IP') file_print(' Updating Dynamic DNS IP')
message = set_dynamic_DNS_IP () message = set_dynamic_DNS_IP ()
file_print(' ', message) file_print(' ', message)
reporting = True
else : else :
file_print(' No changes to perform') file_print(' No changes to perform')
else : else :
file_print(' Skipping Dynamic DNS update') file_print(' Skipping Dynamic DNS update')
return reporting
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
@@ -791,8 +817,10 @@ def set_dynamic_DNS_IP ():
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def get_previous_internet_IP (): def get_previous_internet_IP ():
# get previos internet IP stored in DB # get previos internet IP stored in DB
openDB()
sql.execute ("SELECT dev_LastIP FROM Devices WHERE dev_MAC = 'Internet' ") sql.execute ("SELECT dev_LastIP FROM Devices WHERE dev_MAC = 'Internet' ")
previous_IP = sql.fetchone()[0] previous_IP = sql.fetchone()[0]
closeDB()
# return previous IP # return previous IP
return previous_IP return previous_IP
@@ -803,13 +831,16 @@ def save_new_internet_IP (pNewIP):
append_line_to_file (logPath + '/IP_changes.log', append_line_to_file (logPath + '/IP_changes.log',
'['+str(startTime) +']\t'+ pNewIP +'\n') '['+str(startTime) +']\t'+ pNewIP +'\n')
prevIp = get_previous_internet_IP()
openDB()
# Save event # Save event
sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo, eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail) eve_PendingAlertEmail)
VALUES ('Internet', ?, ?, 'Internet IP Changed', VALUES ('Internet', ?, ?, 'Internet IP Changed',
'Previous Internet IP: '|| ?, 1) """, 'Previous Internet IP: '|| ?, 1) """,
(pNewIP, startTime, get_previous_internet_IP() ) ) (pNewIP, startTime, prevIp) )
# Save new IP # Save new IP
sql.execute ("""UPDATE Devices SET dev_LastIP = ? sql.execute ("""UPDATE Devices SET dev_LastIP = ?
@@ -818,6 +849,7 @@ def save_new_internet_IP (pNewIP):
# commit changes # commit changes
sql_connection.commit() sql_connection.commit()
closeDB()
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def check_IP_format (pIP): def check_IP_format (pIP):
@@ -1033,15 +1065,19 @@ def scan_network ():
if PIHOLE_ACTIVE : if PIHOLE_ACTIVE :
file_print(' Pi-hole start') file_print(' Pi-hole start')
openDB() openDB()
reporting = copy_pihole_network() or reporting copy_pihole_network()
closeDB()
# DHCP Leases method # DHCP Leases method
if DHCP_ACTIVE : if DHCP_ACTIVE :
file_print(' DHCP Leases start') file_print(' DHCP Leases start')
reporting = read_DHCP_leases () or reporting openDB()
read_DHCP_leases ()
closeDB()
# Load current scan data # Load current scan data
file_print(' Processing scan results') file_print(' Processing scan results')
openDB()
save_scanned_devices (arpscan_devices, cycle_interval) save_scanned_devices (arpscan_devices, cycle_interval)
# Print stats # Print stats
@@ -1084,6 +1120,7 @@ def scan_network ():
skip_repeated_notifications () skip_repeated_notifications ()
# Commit changes # Commit changes
openDB()
sql_connection.commit() sql_connection.commit()
closeDB() closeDB()
@@ -1114,13 +1151,9 @@ def execute_arpscan ():
# output of possible multiple interfaces # output of possible multiple interfaces
arpscan_output = "" arpscan_output = ""
# multiple interfaces # scan each interface
if type(SCAN_SUBNETS) is list: for interface in userSubnets :
for interface in SCAN_SUBNETS :
arpscan_output += execute_arpscan_on_interface (interface) arpscan_output += execute_arpscan_on_interface (interface)
# one interface only
else:
arpscan_output += execute_arpscan_on_interface (SCAN_SUBNETS)
# Search IP + MAC + Vendor as regular expresion # Search IP + MAC + Vendor as regular expresion
re_ip = r'(?P<ip>((2[0-5]|1[0-9]|[0-9])?[0-9]\.){3}((2[0-5]|1[0-9]|[0-9])?[0-9]))' re_ip = r'(?P<ip>((2[0-5]|1[0-9]|[0-9])?[0-9]\.){3}((2[0-5]|1[0-9]|[0-9])?[0-9]))'
@@ -1153,17 +1186,12 @@ def execute_arpscan ():
return unique_devices return unique_devices
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def execute_arpscan_on_interface (SCAN_SUBNETS): def execute_arpscan_on_interface (interface):
# #101 - arp-scan subnet configuration
# Prepare command arguments # Prepare command arguments
subnets = SCAN_SUBNETS.strip().split() subnets = interface.strip().split()
# Retry is 6 to avoid false offline devices # Retry is 6 to avoid false offline devices
arpscan_args = ['sudo', 'arp-scan', '--ignoredups', '--retry=6'] + subnets arpscan_args = ['sudo', 'arp-scan', '--ignoredups', '--retry=6'] + subnets
mask = subnets[0]
interface = subnets[1].split('=')[1]
# Execute command # Execute command
try: try:
# try runnning a subprocess # try runnning a subprocess
@@ -1635,82 +1663,98 @@ def update_devices_data_from_scan ():
print_log ('Update devices end') print_log ('Update devices end')
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
# Feature #43 - Resolve name for unknown devices
def update_devices_names (): def update_devices_names ():
# Initialize variables # Initialize variables
recordsToUpdate = [] recordsToUpdate = []
recordsNotFound = []
ignored = 0 ignored = 0
notFound = 0 notFound = 0
foundDig = 0
foundPholus = 0
# Devices without name # Devices without name
file_print(' Trying to resolve devices without name') file_print(' Trying to resolve devices without name')
# BUGFIX #97 - Updating name of Devices w/o IP # BUGFIX #97 - Updating name of Devices w/o IP
sql.execute ("SELECT * FROM Devices WHERE dev_Name IN ('(unknown)','') AND dev_LastIP <> '-'") openDB()
sql.execute ("SELECT * FROM Devices WHERE dev_Name IN ('(unknown)','', '(name not found)') AND dev_LastIP <> '-'")
unknownDevices = sql.fetchall() unknownDevices = sql.fetchall()
closeDB()
# perform Pholus scan if (unknown) devices found # perform Pholus scan if (unknown) devices found
if PHOLUS_ACTIVE and (len(unknownDevices) > 0 or PHOLUS_FORCE): if PHOLUS_ACTIVE and (len(unknownDevices) > 0 or PHOLUS_FORCE):
performPholusScan(PHOLUS_TIMEOUT) performPholusScan(PHOLUS_TIMEOUT)
# get names from Pholus scan # get names from Pholus scan
sql.execute ('SELECT * FROM Pholus_Scan where "MAC" in (select "dev_MAC" from Devices where "dev_Name" IN ("(unknown)","")) and "Record_Type"="Answer"') # sql.execute ('SELECT * FROM Pholus_Scan where "MAC" in (select "dev_MAC" from Devices where "dev_Name" IN ("(unknown)","")) and "Record_Type"="Answer"')
pholusResults = sql.fetchall() openDB()
sql.execute ('SELECT * FROM Pholus_Scan where "Record_Type"="Answer"')
pholusResults = list(sql.fetchall())
closeDB()
# Number of entries for unknown MACs from the Pholus scan # Number of entries from previous Pholus scans
file_print(" Pholus entries: ", len(pholusResults)) file_print(" Pholus entries from prev scans: ", len(pholusResults))
for device in unknownDevices: for device in unknownDevices:
# Resolve device name OLD newName = -1
newName = resolve_device_name (device['dev_MAC'], device['dev_LastIP'])
# Resolve with Pholus scan results # Resolve device name with DiG
newName = resolve_device_name_dig (device['dev_MAC'], device['dev_LastIP'])
# count
if newName != -1:
foundDig += 1
# Resolve with Pholus
if newName == -1: if newName == -1:
newName = resolve_device_name_pholus (device['dev_MAC'], device['dev_LastIP'], pholusResults) newName = resolve_device_name_pholus (device['dev_MAC'], device['dev_LastIP'], pholusResults)
# count
if newName != -1:
foundPholus += 1
# isf still not found update name so we can distinguish the devices where we tried already
if newName == -1 : if newName == -1 :
notFound += 1 recordsNotFound.append (["(name not found)", device['dev_MAC']])
elif newName == -2 : else:
ignored += 1 # name wa sfound with DiG or Pholus
# else : recordsToUpdate.append ([newName, device['dev_MAC']])
# recordsToUpdate.append ([newName, device['dev_MAC']])
recordsToUpdate.append (["(name not found)", device['dev_MAC']])
# Print log # Print log
file_print(" Names updated: ", len(recordsToUpdate) ) file_print(" Names Found (DiG/Pholus): ", len(recordsToUpdate), " (",foundDig,"/",foundPholus ,")" )
# DEBUG - print list of record to update file_print(" Names Not Found : ", len(recordsNotFound) )
# file_print(recordsToUpdate)
# update devices openDB()
# update not found devices with (name not found)
sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsNotFound )
# update names of devices which we were bale to resolve
sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsToUpdate ) sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsToUpdate )
closeDB()
# DEBUG - print number of rows updated # DEBUG - print number of rows updated
# file_print(sql.rowcount) # file_print(sql.rowcount)
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def performPholusScan (timeout): def performPholusScan (timeout):
subnetList = []
# handle old string setting
if type(SCAN_SUBNETS) is not list:
subnetList.append(SCAN_SUBNETS)
else:
subnetList = SCAN_SUBNETS
# scan every interface # scan every interface
for subnet in subnetList: for subnet in userSubnets:
temp = subnet.strip().split() temp = subnet.split("--interface=")
mask = temp[0] if len(temp) != 2:
interface = temp[1].split('=')[1] file_print(" Skip interface (need subnet in format '192.168.1.0/24 --inteface=eth0'), got: ", subnet)
return
file_print(" Pholus scan on interface: ", interface, " mask: " , mask) mask = temp[0].strip()
interface = temp[1].strip()
file_print(" Pholus scan on [interface] ", interface, " [mask] " , mask)
updateState("Scan: Pholus") updateState("Scan: Pholus")
file_print('[', timeNow(), '] Scan: Pholus for ', str(timeout), 's ('+ str(round(int(timeout) / 60), 2) +'min)') file_print('[', timeNow(), '] Scan: Pholus for ', str(timeout), 's ('+ str(round(int(timeout) / 60, 1)) +'min)')
pholus_args = ['python3', '/home/pi/pialert/pholus/pholus3.py', interface, "-rdns_scanning", mask, "-stimeout", str(timeout)] adjustedTimeout = str(round(int(timeout) / 2, 0)) # the scan alwasy lasts 2x as long, so the desired user time from settings needs to be halved
pholus_args = ['python3', '/home/pi/pialert/pholus/pholus3.py', interface, "-rdns_scanning", mask, "-stimeout", adjustedTimeout]
# Execute command # Execute command
try: try:
@@ -1738,32 +1782,125 @@ def performPholusScan (timeout):
if len(params) > 0: if len(params) > 0:
openDB () openDB ()
sql.executemany ("""INSERT INTO Pholus_Scan ("Info", "Time", "MAC", "IP_v4_or_v6", "Record_Type", "Value", "Extra") VALUES (?, ?, ?, ?, ?, ?, ?)""", params) sql.executemany ("""INSERT INTO Pholus_Scan ("Info", "Time", "MAC", "IP_v4_or_v6", "Record_Type", "Value", "Extra") VALUES (?, ?, ?, ?, ?, ?, ?)""", params)
closeDB ()
else: else:
file_print('[', timeNow(), '] Scan: Pholus FAIL - check logs') file_print('[', timeNow(), '] Scan: Pholus FAIL - check logs')
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def resolve_device_name_pholus (pMAC, pIP, pholusResults): def cleanResult(str):
newName = -1 # alternative str.split('.')[0]
str = str.replace("._airplay", "")
str = str.replace("._tcp", "")
str = str.replace(".local", "")
str = str.replace("._esphomelib", "")
str = str.replace("._googlecast", "")
str = str.replace(".lan", "")
str = str.replace(".home", "")
# Nest-Audio-ff77ff77ff77ff77ff77ff77ff77ff77 (remove 32 chars at the end matching a regex?)
for result in pholusResults: str = str.replace(".", "")
if pholusResults["MAC"] == pMAC:
return pholusResults["Value"]
return newName return str
# Disclaimer - I'm interfacing with a script I didn't write (pholus3.py) so it's possible I'm missing types of answers
# it's also possible the pholus3.py script can be adjusted to provide a better output to interface with it
# Hit me with a PR if you know how! :)
def resolve_device_name_pholus (pMAC, pIP, allRes):
pholusMatchesIndexes = []
index = 0
for result in allRes:
if result["MAC"] == pMAC and result["Record_Type"] == "Answer" and '._googlezone' not in result["Value"]:
# found entries with a matching MAC address, let's collect indexes
# pholusMatchesAll.append([list(item) for item in result])
pholusMatchesIndexes.append(index)
index += 1
# file_print('pholusMatchesIndexes:', len(pholusMatchesIndexes))
# return if nothing found
if len(pholusMatchesIndexes) == 0:
return -1
# we have some entries let's try to select the most useful one
# airplay matches contain a lot of information
# Matches for example:
# Brand Tv (50)._airplay._tcp.local. TXT Class:32769 "acl=0 deviceid=66:66:66:66:66:66 features=0x77777,0x38BCB46 rsf=0x3 fv=p20.T-FFFFFF-03.1 flags=0x204 model=XXXX manufacturer=Brand serialNumber=XXXXXXXXXXX protovers=1.1 srcvers=777.77.77 pi=FF:FF:FF:FF:FF:FF psi=00000000-0000-0000-0000-FFFFFFFFFF gid=00000000-0000-0000-0000-FFFFFFFFFF gcgl=0 pk=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
for i in pholusMatchesIndexes:
if checkIPV4(allRes[i]['IP_v4_or_v6']) and '._airplay._tcp.local. TXT Class:32769' in str(allRes[i]["Value"]) :
return allRes[i]["Value"].split('._airplay._tcp.local. TXT Class:32769')[0]
# second best - contains airplay
# Matches for example:
# _airplay._tcp.local. PTR Class:IN "Brand Tv (50)._airplay._tcp.local."
for i in pholusMatchesIndexes:
if checkIPV4(allRes[i]['IP_v4_or_v6']) and '_airplay._tcp.local. PTR Class:IN' in allRes[i]["Value"] and ('._googlecast') not in allRes[i]["Value"]:
return cleanResult(allRes[i]["Value"].split('"')[1])
# Contains PTR Class:32769
# Matches for example:
# 3.1.168.192.in-addr.arpa. PTR Class:32769 "MyPc.local."
for i in pholusMatchesIndexes:
if checkIPV4(allRes[i]['IP_v4_or_v6']) and 'PTR Class:32769' in allRes[i]["Value"]:
return cleanResult(allRes[i]["Value"].split('"')[1])
# Contains AAAA Class:IN
# Matches for example:
# DESKTOP-SOMEID.local. AAAA Class:IN "fe80::fe80:fe80:fe80:fe80"
for i in pholusMatchesIndexes:
if checkIPV4(allRes[i]['IP_v4_or_v6']) and 'AAAA Class:IN' in allRes[i]["Value"]:
return cleanResult(allRes[i]["Value"].split('.local.')[0])
# Contains _googlecast._tcp.local. PTR Class:IN
# Matches for example:
# _googlecast._tcp.local. PTR Class:IN "Nest-Audio-ff77ff77ff77ff77ff77ff77ff77ff77._googlecast._tcp.local."
for i in pholusMatchesIndexes:
if checkIPV4(allRes[i]['IP_v4_or_v6']) and '_googlecast._tcp.local. PTR Class:IN' in allRes[i]["Value"] and ('Google-Cast-Group') not in allRes[i]["Value"]:
return cleanResult(allRes[i]["Value"].split('"')[1])
# Contains A Class:32769
# Matches for example:
# Android.local. A Class:32769 "192.168.1.6"
for i in pholusMatchesIndexes:
if checkIPV4(allRes[i]['IP_v4_or_v6']) and ' A Class:32769' in allRes[i]["Value"]:
return cleanResult(allRes[i]["Value"].split(' A Class:32769')[0])
# # Contains PTR Class:IN
# Matches for example:
# _esphomelib._tcp.local. PTR Class:IN "ceiling-light-1._esphomelib._tcp.local."
for i in pholusMatchesIndexes:
if checkIPV4(allRes[i]['IP_v4_or_v6']) and 'PTR Class:IN' in allRes[i]["Value"]:
return cleanResult(allRes[i]["Value"].split('"')[1])
return -1
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def resolve_device_name (pMAC, pIP): def resolve_device_name_dig (pMAC, pIP):
try :
pMACstr = str(pMAC)
# Check MAC parameter newName = ""
mac = pMACstr.replace (':','')
# file_print( ">>>>>> DIG >>>>>") try :
if len(pMACstr) != 17 or len(mac) != 12 : # pMACstr = str(pMAC)
return -2
# # Check MAC parameter
# mac = pMACstr.replace (':','')
# # file_print( ">>>>>> DIG >>>>>")
# if len(pMACstr) != 17 or len(mac) != 12 :
# return -2
# DEBUG # DEBUG
# file_print(pMAC, pIP) # file_print(pMAC, pIP)
@@ -1779,24 +1916,22 @@ def resolve_device_name (pMAC, pIP):
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
# An error occured, handle it # An error occured, handle it
file_print(e.output) file_print(e.output)
newName = "Error - check logs" # newName = "Error - check logs"
return -1
# file_print( ">>>>>> DIG2 >>>>> Name", newName) # file_print( ">>>>>> DIG2 >>>>> Name", newName)
# Check returns # Check returns
newName = newName.strip() newName = newName.strip()
if len(newName) == 0 :
return -2
# Eliminate local domain if len(newName) == 0 :
if newName.endswith('.') : return -1
newName = newName[:-1]
if newName.endswith('.lan') : # Cleanup
newName = newName[:-4] newName = cleanResult(newName)
if newName.endswith('.local') :
newName = newName[:-6] if newName == "" or len(newName) == 0:
if newName.endswith('.home') : return -1
newName = newName[:-5]
# Return newName # Return newName
return newName return newName
@@ -1807,6 +1942,7 @@ def resolve_device_name (pMAC, pIP):
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def void_ghost_disconnections (): def void_ghost_disconnections ():
openDB()
# Void connect ghost events (disconnect event exists in last X min.) # Void connect ghost events (disconnect event exists in last X min.)
print_log ('Void - 1 Connect ghost events') print_log ('Void - 1 Connect ghost events')
sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null, sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null,
@@ -1865,6 +2001,7 @@ def void_ghost_disconnections ():
) """, ) """,
(cycle, startTime) ) (cycle, startTime) )
print_log ('Void end') print_log ('Void end')
closeDB()
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def pair_sessions_events (): def pair_sessions_events ():
@@ -1875,6 +2012,8 @@ def pair_sessions_events ():
# WHERE eve_EventType IN ('New Device', 'Connected') # WHERE eve_EventType IN ('New Device', 'Connected')
# """ ) # """ )
openDB()
# Pair Connection / New Device events # Pair Connection / New Device events
print_log ('Pair session - 1 Connections / New Devices') print_log ('Pair session - 1 Connections / New Devices')
sql.execute ("""UPDATE Events sql.execute ("""UPDATE Events
@@ -1902,8 +2041,12 @@ def pair_sessions_events ():
""" ) """ )
print_log ('Pair session end') print_log ('Pair session end')
closeDB()
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def create_sessions_snapshot (): def create_sessions_snapshot ():
openDB()
# Clean sessions snapshot # Clean sessions snapshot
print_log ('Sessions Snapshot - 1 Clean') print_log ('Sessions Snapshot - 1 Clean')
sql.execute ("DELETE FROM SESSIONS" ) sql.execute ("DELETE FROM SESSIONS" )
@@ -1927,9 +2070,15 @@ def create_sessions_snapshot ():
# SELECT * FROM Convert_Events_to_Sessions_Phase2""" ) # SELECT * FROM Convert_Events_to_Sessions_Phase2""" )
print_log ('Sessions end') print_log ('Sessions end')
closeDB()
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def skip_repeated_notifications (): def skip_repeated_notifications ():
openDB()
# Skip repeated notifications # Skip repeated notifications
# due strfime : Overflow --> use "strftime / 60" # due strfime : Overflow --> use "strftime / 60"
print_log ('Skip Repeated') print_log ('Skip Repeated')
@@ -1946,6 +2095,8 @@ def skip_repeated_notifications ():
""" ) """ )
print_log ('Skip Repeated end') print_log ('Skip Repeated end')
closeDB()
#=============================================================================== #===============================================================================
# REPORTING # REPORTING
@@ -2477,13 +2628,13 @@ def create_generic_device(client):
create_sensor(client, deviceId, deviceName, 'sensor', 'unknown', 'wifi-alert') create_sensor(client, deviceId, deviceName, 'sensor', 'unknown', 'wifi-alert')
# #------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def create_sensor(client, deviceId, deviceName, sensorType, sensorName, icon): def create_sensor(client, deviceId, deviceName, sensorType, sensorName, icon):
new_sensor_config = sensor_config(deviceId, deviceName, sensorType, sensorName, icon) new_sensor_config = sensor_config(deviceId, deviceName, sensorType, sensorName, icon)
# check if config already in list and if not, add it, otherwise skip # check if config already in list and if not, add it, otherwise skip
global mqtt_sensors global mqtt_sensors, uniqueSensorCount
is_unique = True is_unique = True
@@ -2512,8 +2663,6 @@ def publish_sensor(client, sensorConf):
global mqtt_sensors global mqtt_sensors
file_print(" Estimated delay:", (len(mqtt_sensors) * int(MQTT_DELAY_SEC)))
message = '{ \ message = '{ \
"name":"'+ sensorConf.deviceName +' '+sensorConf.sensorName+'", \ "name":"'+ sensorConf.deviceName +' '+sensorConf.sensorName+'", \
"state_topic":"system-sensors/'+sensorConf.sensorType+'/'+sensorConf.deviceId+'/state", \ "state_topic":"system-sensors/'+sensorConf.sensorType+'/'+sensorConf.deviceId+'/state", \
@@ -2604,6 +2753,8 @@ def mqtt_start():
# Get all devices # Get all devices
devices = get_all_devices() devices = get_all_devices()
file_print(" Estimated delay: ", (len(devices) * int(MQTT_DELAY_SEC)), 's ', '(', round((len(devices) * int(MQTT_DELAY_SEC))/60,1) , 'min)' )
for device in devices: for device in devices:
# Create devices in Home Assistant - send config messages # Create devices in Home Assistant - send config messages
@@ -2829,7 +2980,39 @@ def to_binary_sensor(input):
#=============================================================================== #===============================================================================
# UTIL # UTIL
#=============================================================================== #===============================================================================
#-------------------------------------------------------------------------------
# Make a regular expression
# for validating an Ip-address
ipRegex = "^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])$"
# Define a function to
# validate an Ip address
def checkIPV4(ip):
# pass the regular expression
# and the string in search() method
if(re.search(ipRegex, ip)):
return True
else:
return False
#-------------------------------------------------------------------------------
def updateSubnets():
global userSubnets
# remove old list
userSubnets = []
# multiple interfaces
if type(SCAN_SUBNETS) is list:
for interface in SCAN_SUBNETS :
userSubnets.append(interface)
# one interface only
else:
userSubnets.append(SCAN_SUBNETS)
#-------------------------------------------------------------------------------
def sanitize_string(input): def sanitize_string(input):
if isinstance(input, bytes): if isinstance(input, bytes):
@@ -2897,7 +3080,7 @@ def get_device_stats():
# columns = ["online","down","all","archived","new","unknown"] # columns = ["online","down","all","archived","new","unknown"]
sql.execute(""" sql.execute("""
SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(unresolved)' ) as unknown from Online_History order by Scan_Date desc limit 1 SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown from Online_History order by Scan_Date desc limit 1
""") """)
row = sql.fetchone() row = sql.fetchone()

View File

@@ -1,113 +1,117 @@
#------------------------------------------------------------------------------- #-----------------AUTOGENERATED FILE-----------------#
# Pi.Alert # #
# Open Source Network Guard / WIFI & LAN intrusion detector # Generated: 2022-12-30_22-19-40 #
# # #
# pialert.conf - Back module. Configuration file # Config file for the LAN intruder detection app: #
#------------------------------------------------------------------------------- # https://github.com/jokob-sk/Pi.Alert #
# Puche 2021 pi.alert.application@gmail.com GNU GPLv3 # #
#------------------------------------------------------------------------------- #-----------------AUTOGENERATED FILE-----------------#
# GENERAL settings
# ----------------------
PRINT_LOG = False # General
TIMEZONE = 'Europe/Berlin' #---------------------------
PIALERT_WEB_PROTECTION = False
PIALERT_WEB_PASSWORD = '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92'
INCLUDED_SECTIONS = ['internet', 'new_devices', 'down_devices', 'events'] # Specifies which events trigger notifications.
# Remove the event type(s) you don't want to get notified on
# Overrides device-specific settings in the UI.
SCAN_CYCLE_MINUTES = 5 # delay between scans
# EMAIL settings
# ----------------------
SMTP_SERVER = 'smtp.gmail.com'
SMTP_PORT = 587
SMTP_USER = 'user@gmail.com'
SMTP_PASS = 'password'
SMTP_SKIP_TLS = False
SMTP_SKIP_LOGIN = False
REPORT_MAIL = False
REPORT_FROM = 'Pi.Alert <' + SMTP_USER +'>'
REPORT_TO = 'user@gmail.com'
REPORT_DEVICE_URL = 'http://pi.alert/deviceDetails.php?mac='
REPORT_DASHBOARD_URL = 'http://pi.alert/'
# Webhook settings
# ----------------------
REPORT_WEBHOOK = False
WEBHOOK_URL = 'http://n8n.local:5555/webhook-test/aaaaaaaa-aaaa-aaaa-aaaaa-aaaaaaaaaaaa'
WEBHOOK_PAYLOAD = 'json' # webhook payload data format for the "body > attachements > text" attribute
# in https://github.com/jokob-sk/Pi.Alert/blob/main/docs/webhook_json_sample.json
# supported values: 'json', 'html' or 'text'
# e.g.: for discord use 'html'
WEBHOOK_REQUEST_METHOD = 'GET' # POST, GET...
# Apprise settings
#-----------------------
REPORT_APPRISE = False
APPRISE_HOST = 'http://localhost:8000/notify'
APPRISE_URL = 'mailto://smtp-relay.sendinblue.com:587?from=user@gmail.com&name=apprise&user=user@gmail.com&pass=password&to=user@gmail.com'
# NTFY (https://ntfy.sh/) settings
# ----------------------
REPORT_NTFY = False
NTFY_HOST = 'https://ntfy.sh'
NTFY_TOPIC = 'replace_my_secure_topicname_91h889f28'
NTFY_USER = 'user' # set username...
NTFY_PASSWORD = 'passw0rd' # ...and password if authentication is used https://docs.ntfy.sh/config/#users-and-roles
# PUSHSAFER (https://www.pushsafer.com/) settings
# ----------------------
REPORT_PUSHSAFER = False
PUSHSAFER_TOKEN = 'ApiKey'
# MQTT settings
# ----------------------
REPORT_MQTT = False
MQTT_BROKER = '192.168.1.2'
MQTT_PORT = 1883
MQTT_USER = 'mqtt'
MQTT_PASSWORD = 'passw0rd'
MQTT_QOS = 0
MQTT_DELAY_SEC = 2 # delay in seconds, increase if not all devices are created in Home Assistant
# DynDNS
# ----------------------
# QUERY_MYIP_SERVER = 'https://diagnostic.opendns.com/myip'
QUERY_MYIP_SERVER = 'http://ipv4.icanhazip.com'
DDNS_ACTIVE = False
DDNS_DOMAIN = 'your_domain.freeddns.org'
DDNS_USER = 'dynu_user'
DDNS_PASSWORD = 'A0000000B0000000C0000000D0000000'
DDNS_UPDATE_URL = 'https://api.dynu.com/nic/update?'
# PIHOLE settings
# ----------------------
PIHOLE_ACTIVE = False # if enabled you need to map '/etc/pihole/pihole-FTL.db' in docker-compose.yml
PIHOLE_DB = '/etc/pihole/pihole-FTL.db'
DHCP_ACTIVE = False # if enabled you need to map '/etc/pihole/dhcp.leases' in docker-compose.yml
DHCP_LEASES = '/etc/pihole/dhcp.leases'
# arp-scan options & samples
# ----------------------
#
# Scan local network (default)
# SCAN_SUBNETS = '--localnet'
#
# Scan two subnets
# SCAN_SUBNETS = '192.168.11.0/24 192.168.144.0/24'
#
# Scan using interface eth0 # Scan using interface eth0
# SCAN_SUBNETS = '--localnet --interface=eth0' # SCAN_SUBNETS = ['192.168.1.0/24 --interface=eth0']
# #
# Scan multiple interfaces (eth1 and eth0): # Scan multiple interfaces (eth1 and eth0):
# SCAN_SUBNETS = [ '192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0' ] # SCAN_SUBNETS = [ '192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0' ]
SCAN_SUBNETS=['192.168.1.0/24 --interface=eth1']
PRINT_LOG=False
TIMEZONE='Europe/Berlin'
PIALERT_WEB_PROTECTION=False
PIALERT_WEB_PASSWORD='8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92'
INCLUDED_SECTIONS=['internet','new_devices','down_devices','events']
SCAN_CYCLE_MINUTES=5
DAYS_TO_KEEP_EVENTS=90
REPORT_DASHBOARD_URL='http://pi.alert/'
SCAN_SUBNETS = '--localnet'
# Maintenance Task Settings # Email
# ---------------------- #---------------------------
DAYS_TO_KEEP_EVENTS = 90 REPORT_MAIL=False
SMTP_SERVER='smtp.gmail.com'
SMTP_PORT=587
REPORT_TO='user@gmail.com'
REPORT_FROM='Pi.Alert <user@gmail.com>'
SMTP_SKIP_LOGIN=False
SMTP_USER='user@gmail.com'
SMTP_PASS='password'
SMTP_SKIP_TLS=False
# Webhooks
#---------------------------
REPORT_WEBHOOK=False
WEBHOOK_URL='http://n8n.local:5555/webhook-test/aaaaaaaa-aaaa-aaaa-aaaaa-aaaaaaaaaaaa'
WEBHOOK_PAYLOAD='json' # webhook payload data format for the "body > attachements > text" attribute
# in https://github.com/jokob-sk/Pi.Alert/blob/main/docs/webhook_json_sample.json
# supported values: 'json', 'html' or 'text'
# e.g.: for discord use 'html'
WEBHOOK_REQUEST_METHOD='GET'
# Apprise
#---------------------------
REPORT_APPRISE=False
APPRISE_HOST='http://localhost:8000/notify'
APPRISE_URL='mailto://smtp-relay.sendinblue.com:587?from=user@gmail.com&name=apprise&user=user@gmail.com&pass=password&to=user@gmail.com'
# NTFY
#---------------------------
REPORT_NTFY=False
NTFY_HOST='https://ntfy.sh'
NTFY_TOPIC='replace_my_secure_topicname_91h889f28'
NTFY_USER='user'
NTFY_PASSWORD='passw0rd'
# PUSHSAFER
#---------------------------
REPORT_PUSHSAFER=False
PUSHSAFER_TOKEN='ApiKey'
# MQTT
#---------------------------
REPORT_MQTT=False
MQTT_BROKER='192.168.1.2'
MQTT_PORT=1883
MQTT_USER='mqtt'
MQTT_PASSWORD='passw0rd'
MQTT_QOS=0
MQTT_DELAY_SEC=2
# DynDNS
#---------------------------
DDNS_ACTIVE=False
DDNS_DOMAIN='your_domain.freeddns.org'
DDNS_USER='dynu_user'
DDNS_PASSWORD='A0000000B0000000C0000000D0000000'
DDNS_UPDATE_URL='https://api.dynu.com/nic/update?'
# PiHole
#---------------------------
# if enabled you need to map '/etc/pihole/pihole-FTL.db' in docker-compose.yml
PIHOLE_ACTIVE=False
# if enabled you need to map '/etc/pihole/dhcp.leases' in docker-compose.yml
DHCP_ACTIVE=False
# Pholus
#---------------------------
PHOLUS_ACTIVE=False
PHOLUS_TIMEOUT=20
PHOLUS_FORCE=False
PHOLUS_DAYS_DATA=7
PHOLUS_RUN='once'
PHOLUS_RUN_TIMEOUT=300
PHOLUS_RUN_SCHD='0 4 * * *'
#-------------------IMPORTANT INFO-------------------#
# This file is ingested by a python script, so if #
# modified it needs to use python syntax #
#-------------------IMPORTANT INFO-------------------#

View File

@@ -8,7 +8,7 @@ services:
volumes: volumes:
- ${APP_DATA_LOCATION}/pialert/config:/home/pi/pialert/config - ${APP_DATA_LOCATION}/pialert/config:/home/pi/pialert/config
# - ${APP_DATA_LOCATION}/pialert/db/pialert.db:/home/pi/pialert/db/pialert.db # - ${APP_DATA_LOCATION}/pialert/db/pialert.db:/home/pi/pialert/db/pialert.db
- ${APP_DATA_LOCATION}/pialert/db:/home/pi/pialert/db - ${APP_DATA_LOCATION}/pialert/db2:/home/pi/pialert/db
# (optional) map an empty file with the name 'setting_darkmode' if you want to force the dark mode on container rebuilt # (optional) map an empty file with the name 'setting_darkmode' if you want to force the dark mode on container rebuilt
- ${APP_DATA_LOCATION}/pialert/db/setting_darkmode:/home/pi/pialert/db/setting_darkmode - ${APP_DATA_LOCATION}/pialert/db/setting_darkmode:/home/pi/pialert/db/setting_darkmode
# (optional) useful for debugging if you have issues setting up the container # (optional) useful for debugging if you have issues setting up the container
@@ -17,6 +17,7 @@ services:
- ${DEV_LOCATION}/back/pialert.py:/home/pi/pialert/back/pialert.py - ${DEV_LOCATION}/back/pialert.py:/home/pi/pialert/back/pialert.py
- ${DEV_LOCATION}/back/update_vendors.sh:/home/pi/pialert/back/update_vendors.sh - ${DEV_LOCATION}/back/update_vendors.sh:/home/pi/pialert/back/update_vendors.sh
- ${DEV_LOCATION}/pholus:/home/pi/pialert/pholus - ${DEV_LOCATION}/pholus:/home/pi/pialert/pholus
- ${DEV_LOCATION}/dockerfiles:/home/pi/pialert/dockerfiles
- ${APP_DATA_LOCATION}/pialert/php.ini:/etc/php/7.4/fpm/php.ini - ${APP_DATA_LOCATION}/pialert/php.ini:/etc/php/7.4/fpm/php.ini
- ${DEV_LOCATION}/front/css:/home/pi/pialert/front/css - ${DEV_LOCATION}/front/css:/home/pi/pialert/front/css
- ${DEV_LOCATION}/front/js:/home/pi/pialert/front/js - ${DEV_LOCATION}/front/js:/home/pi/pialert/front/js

View File

@@ -3,7 +3,12 @@
# if custom variables not set we do not need to do anything # if custom variables not set we do not need to do anything
if [ -n "${TZ}" ]; then if [ -n "${TZ}" ]; then
FILECONF=/home/pi/pialert/config/pialert.conf
if [ -f "$FILECONF" ]; then
sed -ie "s|Europe/Berlin|${TZ}|g" /home/pi/pialert/config/pialert.conf sed -ie "s|Europe/Berlin|${TZ}|g" /home/pi/pialert/config/pialert.conf
else
sed -ie "s|Europe/Berlin|${TZ}|g" /home/pi/pialert/back/pialert.conf_bak
fi
fi fi
if [ -n "${PORT}" ]; then if [ -n "${PORT}" ]; then
@@ -11,13 +16,16 @@ if [ -n "${PORT}" ]; then
fi fi
# I hope this will fix DB permission issues going forward # I hope this will fix DB permission issues going forward
chown -R www-data:www-data /home/pi/pialert/db/pialert.db FILEDB=/home/pi/pialert/db/pialert.db
if [ -f "$FILEDB" ]; then
chown -R www-data:www-data /home/pi/pialert/db/pialert.db
fi
chmod -R a+rw /home/pi/pialert/front/log chmod -R a+rw /home/pi/pialert/front/log
chmod -R a+rw /home/pi/pialert/config
/etc/init.d/php7.4-fpm start /etc/init.d/php7.4-fpm start
/etc/init.d/nginx start /etc/init.d/nginx start
# cron -f # cron -f
python /home/pi/pialert/back/pialert.py python /home/pi/pialert/back/pialert.py

View File

@@ -1625,6 +1625,9 @@ function loadPholus()
{ {
// console.log(mac) // console.log(mac)
// console.log('php/server/devices.php?action=getPholus&mac='+ mac) // console.log('php/server/devices.php?action=getPholus&mac='+ mac)
$(".deviceSpecific").remove();
$.get('php/server/devices.php?action=getPholus&mac='+ mac, function(data) { $.get('php/server/devices.php?action=getPholus&mac='+ mac, function(data) {
data = sanitize(data); data = sanitize(data);

View File

@@ -733,11 +733,31 @@ function scrollDown()
} }
function initializeTabs () { function initializeTabs () {
key = "activeMaintenanceTab" key = "activeMaintenanceTab"
// --------------------------------------------------------
// default selection
selectedTab = "tab_Settings"
// the #target from the url
target = window.location.hash.substr(1)
// update cookie if target specified
if(target != "")
{
// console.log(target)
setCache(key, target+'_id')
}
// get the tab id from the cookie (already overriden by the target)
if(!emptyArr.includes(getCache(key)))
{
selectedTab = getCache(key);
}
// Activate panel // Activate panel
if(!emptyArr.includes(getCache(key))) if(!emptyArr.includes(getCache(key)))
{ {

View File

@@ -410,127 +410,5 @@ $lang['de_de'] = array(
// Settings // Settings
////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////
//General
'SCAN_SUBNETS_name' => 'Subnets to scan',
'SCAN_SUBNETS_description' => '
The scan time itself depends on the number of IP addresses to check.
The number of Ips to check depends on the <a target="_blank" href="https://www.calculator.net/ip-subnet-calculator.html">network mask</a> you set here.
For example, a <code>/24</code> mask results in 256 IPs to check, where as a <code>/16</code>
mask checks around 65,536. Every IP takes a couple seconds to scan. This means that with an incorrect configuration
the scan will take hours to complete instead of seconds.
<ol>
<li>Specify the network mask. For example, the filter <code>192.168.1.0/24</code> covers IP ranges 192.168.1.0 to 192.168.1.255.</li>
<li>Run <code>iwconfig</code> in your ocntainer to find your interface name(s) (e.g.: <code>eth0</code>, <code>eth1</code>)</li>
</ol>
',
'PRINT_LOG_name' => 'Print additional logging',
'PRINT_LOG_description' => 'This setting will enable more verbose logging. Useful for debugging events writing into the database.',
'TIMEZONE_name' => 'Time zone',
'TIMEZONE_description' => 'Time zone to display stats correctly. Find your time zone <a target="_blank" href="https://en.wikipedia.org/wiki/List_of_tz_database_time_zones" rel="nofollow">here</a>.',
'PIALERT_WEB_PROTECTION_name' => 'Enable login',
'PIALERT_WEB_PROTECTION_description' => 'When enabled a login dialog is displayed. Read below carefully if you get locked out of your instance.',
'PIALERT_WEB_PASSWORD_name' => 'Login password',
'PIALERT_WEB_PASSWORD_description' => 'The default password is <code>123456</code>. To change password run <code>/home/pi/pialert/back/pialert-cli</code> in the container',
'INCLUDED_SECTIONS_name' => 'Notify on',
'INCLUDED_SECTIONS_description' => 'Specifies which events trigger notifications. Remove the event type(s) you don\'t want to get notified on. This setting overrides device-specific settings in the UI. (CTRL + Click to select / deselect).',
'SCAN_CYCLE_MINUTES_name' => 'Scan cycle delay',
'SCAN_CYCLE_MINUTES_description' => 'The delay between scans. If using arp-scan, the scan time itself depends on the number of IP addresses to check. This is influenced by the network mask set in the <code>SCAN_SUBNETS</code> setting at the top. Every IP takes a couple seconds to scan.',
'DAYS_TO_KEEP_EVENTS_name' => 'Delete events older than',
'DAYS_TO_KEEP_EVENTS_description' => 'This is a maintenance setting. This specifies the number of days worth of event entries that will be kept. All older events will be deleted periodically.',
'REPORT_DASHBOARD_URL_name' => 'Pi.Alert URL',
'REPORT_DASHBOARD_URL_description' => 'This URL is used as the base for generating links in the emails. Enter full URL starting with <code>http://</code> including the port number (no trailig slash <code>/</code>).',
//Email
'REPORT_MAIL_name' => 'Enable email',
'REPORT_MAIL_description' => 'If enabled an email is sent out with a list of changes you\'ve subscribed to. Please also fill out all remaining settings related to the SMTP setup below.',
'SMTP_SERVER_name' => 'SMTP server URL',
'SMTP_SERVER_description' => 'The SMTP server host URL. For example <code>smtp-relay.sendinblue.com</code>.',
'SMTP_PORT_name' => 'SMTP server PORT',
'SMTP_PORT_description' => 'Port number used for the SMTP connection.',
'SMTP_SKIP_LOGIN_name' => 'Skip authentication',
'SMTP_SKIP_LOGIN_description' => 'Don\'t use authentication when connecting to the SMTP server.',
'SMTP_USER_name' => 'SMTP user',
'SMTP_USER_description' => 'The user name used to login into the SMTP server (sometimes a full email address).',
'SMTP_PASS_name' => 'SMTP password',
'SMTP_PASS_description' => 'The SMTP server password. ',
'SMTP_SKIP_TLS_name' => 'Don\'t use TLS',
'SMTP_SKIP_TLS_description' => 'Disable TLS when connecting to your SMTP server.',
'REPORT_TO_name' => 'Send email to',
'REPORT_TO_description' => 'Email address to which the notification will be send to.',
'REPORT_FROM_name' => 'Email subject',
'REPORT_FROM_description' => 'Notification email subject line.',
//Webhooks
'REPORT_WEBHOOK_name' => 'Enable Webhooks',
'REPORT_WEBHOOK_description' => 'Enable webhooks for notifications. If enabled, configure related settings below.',
'WEBHOOK_URL_name' => 'Target URL',
'WEBHOOK_URL_description' => 'Target URL starting with <code>http://</code> or <code>https://</code>.',
'WEBHOOK_PAYLOAD_name' => 'Payload type',
'WEBHOOK_PAYLOAD_description' => 'The Webhook payload data format for the "body > attachements > text" attribute in the payload json. See an examnple of the payload <a target="_blank" href="https://github.com/jokob-sk/Pi.Alert/blob/main/docs/webhook_json_sample.json">here</a>. (e.g.: for discord use <code>\'html\'</code>)',
'WEBHOOK_REQUEST_METHOD_name' => 'Request method',
'WEBHOOK_REQUEST_METHOD_description' => 'The HTTP request method to be used for the webhook call.',
// Apprise
'REPORT_APPRISE_name' => 'Enable Apprise',
'REPORT_APPRISE_description' => 'Enable sending notifications via <a target="_blank" href="https://hub.docker.com/r/caronc/apprise">Apprise</a>.',
'APPRISE_HOST_name' => 'Apprise host URL',
'APPRISE_HOST_description' => 'Apprise host URL starting with <code>http://</code> or <code>https://</code>. (don\'t forget to include <code>/notify</code> at the end)',
'APPRISE_URL_name' => 'Apprise notification URL',
'APPRISE_URL_description' => 'Apprise notification target URL.',
// NTFY
'REPORT_NTFY_name' => 'Enable NTFY',
'REPORT_NTFY_description' => 'Enable sending notifications via <a target="_blank" href="https://ntfy.sh/">NTFY</a>.',
'NTFY_HOST_name' => 'NTFY host URL',
'NTFY_HOST_description' => 'NTFY host URL starting with <code>http://</code> or <code>https://</code>. You can use the hosted instance on <a target="_blank" href="https://ntfy.sh/">https://ntfy.sh</a> by simply entering <code>https://ntfy.sh</code>.',
'NTFY_TOPIC_name' => 'NTFY topic',
'NTFY_TOPIC_description' => 'Your secret topic.',
'NTFY_USER_name' => 'NTFY user',
'NTFY_USER_description' => 'Enter user if you need (host) an instance with enabled authetication.',
'NTFY_PASSWORD_name' => 'NTFY password',
'NTFY_PASSWORD_description' => 'Enter password if you need (host) an instance with enabled authetication.',
// Pushsafer
'REPORT_PUSHSAFER_name' => 'Enable Pushsafer',
'REPORT_PUSHSAFER_description' => 'Enable sending notifications via <a target="_blank" href="https://www.pushsafer.com/">Pushsafer</a>.',
'PUSHSAFER_TOKEN_name' => 'Pushsafer token',
'PUSHSAFER_TOKEN_description' => 'Your secret Pushsafer API key (token).',
// MQTT
'REPORT_MQTT_name' => 'Enable MQTT',
'REPORT_MQTT_description' => 'Enable sending notifications via <a target="_blank" href="https://www.home-assistant.io/integrations/mqtt/">MQTT</a> to your Home Assistance instance.',
'MQTT_BROKER_name' => 'MQTT broker URL',
'MQTT_BROKER_description' => 'MQTT host URL (don\'t include <code>http://</code> or <code>https://</code>).',
'MQTT_PORT_name' => 'MQTT broker port',
'MQTT_PORT_description' => 'Port number where the broker is listening. Usually <code>1883</code>.',
'MQTT_USER_name' => 'MQTT user',
'MQTT_USER_description' => 'User name used to login into your MQTT broker instance.',
'MQTT_PASSWORD_name' => 'MQTT password',
'MQTT_PASSWORD_description' => 'Password used to login into your MQTT broker instance.',
'MQTT_QOS_name' => 'MQTT Quality of Service',
'MQTT_QOS_description' => 'Quality of service setting for MQTT message sending. 0 - Low quality to 2 - High quality. The higher the quality the longer the delay.',
'MQTT_DELAY_SEC_name' => 'MQTT delay per device',
'MQTT_DELAY_SEC_description' => 'A little hack - delay adding to the queue in case the process is restarted and previous publish processes aborted (it takes ~2s to update a sensor config on the broker). Tested with 2-3 seconds of delay. This delay is only applied when devices are created (during the first notification loop). It doesn\'t affect subsequent scans or notifications.',
//DynDNS
'DDNS_ACTIVE_name' => 'Enable DynDNS',
'DDNS_ACTIVE_description' => '',
'DDNS_DOMAIN_name' => 'DynDNS domain URL',
'DDNS_DOMAIN_description' => '',
'DDNS_USER_name' => 'DynDNS user',
'DDNS_USER_description' => '',
'DDNS_PASSWORD_name' => 'DynDNS password',
'DDNS_PASSWORD_description' => '',
'DDNS_UPDATE_URL_name' => 'DynDNS update URL',
'DDNS_UPDATE_URL_description' => 'Update URL starting with <code>http://</code> or <code>https://</code>.',
// PiHole
'PIHOLE_ACTIVE_name' => 'Enable PiHole mapping',
'PIHOLE_ACTIVE_description' => 'If enabled you need to map <code>:/etc/pihole/pihole-FTL.db</code> in your <code>docker-compose.yml</code> file.',
'DHCP_ACTIVE_name' => 'Enable PiHole DHCP',
'DHCP_ACTIVE_description' => 'If enabled you need to map <code>:/etc/pihole/dhcp.leases</code> in your <code>docker-compose.yml</code> file.',
); );
?> ?>

View File

@@ -544,20 +544,20 @@ the arp-scan will take hours to complete instead of seconds.
// PiHole // PiHole
'PIHOLE_ACTIVE_name' => 'Enable PiHole mapping', 'PIHOLE_ACTIVE_name' => 'Enable PiHole mapping',
'PIHOLE_ACTIVE_description' => 'If enabled you need to map <code>:/etc/pihole/pihole-FTL.db</code> in your <code>docker-compose.yml</code> file.', 'PIHOLE_ACTIVE_description' => 'You need to map<code>:/etc/pihole/pihole-FTL.db</code> in the <code>docker-compose.yml</code> file if you enable this setting.',
'DHCP_ACTIVE_name' => 'Enable PiHole DHCP', 'DHCP_ACTIVE_name' => 'Enable PiHole DHCP',
'DHCP_ACTIVE_description' => 'If enabled you need to map <code>:/etc/pihole/dhcp.leases</code> in your <code>docker-compose.yml</code> file.', 'DHCP_ACTIVE_description' => 'You need to map <code>:/etc/pihole/dhcp.leases</code> in the <code>docker-compose.yml</code> file if you enable this setting.',
// Pholus // Pholus
'PHOLUS_ACTIVE_name' => 'Enable Pholus scan', 'PHOLUS_ACTIVE_name' => 'Cycle run',
'PHOLUS_ACTIVE_description' => '<a href="https://github.com/jokob-sk/Pi.Alert/tree/main/pholus" target="_blank" >Pholus</a> is a sniffing tool to discover additional information about the devices on the network, including the device name. Please be aware it can spam the network with unnecessary traffic. Depends on the <a href="#SCAN_SUBNETS"><code>SCAN_SUBNETS</code> setting</a>.', 'PHOLUS_ACTIVE_description' => 'If enabled will execute during every network scan cycle until there are no <code>(unknown)</code> or <code>(name not found)</code> devices. <a href="https://github.com/jokob-sk/Pi.Alert/tree/main/pholus" target="_blank" >Pholus</a> is a sniffing tool to discover additional information about the devices on the network, including the device name. Please be aware it can spam the network with unnecessary traffic. Depends on the <a href="#SCAN_SUBNETS"><code>SCAN_SUBNETS</code> setting</a>.',
'PHOLUS_TIMEOUT_name' => 'Pholus timeout', 'PHOLUS_TIMEOUT_name' => 'Cycle run timeout',
'PHOLUS_TIMEOUT_description' => 'How long in seconds should Pholus be sniffing on each interface. Only used if an <code>(unknown)</code> device is found. The longer you leave it on, the more likely devices would broadcast more info. This timeout adds to the time it takes to perform an arp-scan on your network.', 'PHOLUS_TIMEOUT_description' => 'How long in seconds should Pholus be sniffing on each interface if above condition is fulfilled. The longer you leave it on, the more likely devices would broadcast more info. This timeout adds to the time it takes to perform an arp-scan on your network.',
'PHOLUS_FORCE_name' => 'Force scan', 'PHOLUS_FORCE_name' => 'Cycle force scan',
'PHOLUS_FORCE_description' => 'Force scan every network scan, even if there are no <code>(unknown)</code> devices. Be careful enabling this as the sniffing can easily flood your network.', 'PHOLUS_FORCE_description' => 'Force scan every network scan, even if there are no <code>(unknown)</code> or <code>(name not found)</code> devices. Be careful enabling this as the sniffing can easily flood your network.',
'PHOLUS_DAYS_DATA_name' => 'Data retention', 'PHOLUS_DAYS_DATA_name' => 'Data retention',
'PHOLUS_DAYS_DATA_description' => 'How many days of Pholus scan entries should be kept (globally, not device specific!). The <a href="/maintenance.php#tab_Logging_id">pialert_pholus.log</a> file is not touched.', 'PHOLUS_DAYS_DATA_description' => 'How many days of Pholus scan entries should be kept (globally, not device specific!). The <a href="/maintenance.php#tab_Logging">pialert_pholus.log</a> file is not touched.',
'PHOLUS_RUN_name' => 'Scheduled scan', 'PHOLUS_RUN_name' => 'Scheduled run',
'PHOLUS_RUN_description' => 'Enable a regular Pholus scan / sniff on your network. The scheduling settings can be found below.', 'PHOLUS_RUN_description' => 'Enable a regular Pholus scan / sniff on your network. The scheduling settings can be found below.',
'PHOLUS_RUN_TIMEOUT_name' => 'Scheduled run timeout', 'PHOLUS_RUN_TIMEOUT_name' => 'Scheduled run timeout',
'PHOLUS_RUN_TIMEOUT_description' => 'The timeout in seconds for the scheduled Pholus scan. Same notes regarding the duration apply as on the <a href="#PHOLUS_TIMEOUT"><code>PHOLUS_TIMEOUT</code> setting</a>. A scheduled scan doesn\'t check if there are <code>(unknown)</code> devices, the scan is executed either way.', 'PHOLUS_RUN_TIMEOUT_description' => 'The timeout in seconds for the scheduled Pholus scan. Same notes regarding the duration apply as on the <a href="#PHOLUS_TIMEOUT"><code>PHOLUS_TIMEOUT</code> setting</a>. A scheduled scan doesn\'t check if there are <code>(unknown)</code> devices, the scan is executed either way.',

View File

@@ -415,126 +415,5 @@ $lang['es_es'] = array(
// Settings // Settings
////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////
//General
'SCAN_SUBNETS_name' => 'Subnets to scan',
'SCAN_SUBNETS_description' => '
The scan time itself depends on the number of IP addresses to check.
The number of Ips to check depends on the <a target="_blank" href="https://www.calculator.net/ip-subnet-calculator.html">network mask</a> you set here.
For example, a <code>/24</code> mask results in 256 IPs to check, where as a <code>/16</code>
mask checks around 65,536. Every IP takes a couple seconds to scan. This means that with an incorrect configuration
the scan will take hours to complete instead of seconds.
<ol>
<li>Specify the network mask. For example, the filter <code>192.168.1.0/24</code> covers IP ranges 192.168.1.0 to 192.168.1.255.</li>
<li>Run <code>iwconfig</code> in your container to find your interface name(s) (e.g.: <code>eth0</code>, <code>eth1</code>)</li>
</ol>
',
'PRINT_LOG_name' => 'Print additional logging',
'PRINT_LOG_description' => 'This setting will enable more verbose logging. Useful for debugging events writing into the database.',
'TIMEZONE_name' => 'Time zone',
'TIMEZONE_description' => 'Time zone to display stats correctly. Find your time zone <a target="_blank" href="https://en.wikipedia.org/wiki/List_of_tz_database_time_zones" rel="nofollow">here</a>.',
'PIALERT_WEB_PROTECTION_name' => 'Enable login',
'PIALERT_WEB_PROTECTION_description' => 'When enabled a login dialog is displayed. Read below carefully if you get locked out of your instance.',
'PIALERT_WEB_PASSWORD_name' => 'Login password',
'PIALERT_WEB_PASSWORD_description' => 'The default password is <code>123456</code>. To change password run <code>/home/pi/pialert/back/pialert-cli</code> in the container',
'INCLUDED_SECTIONS_name' => 'Notify on',
'INCLUDED_SECTIONS_description' => 'Specifies which events trigger notifications. Remove the event type(s) you don\'t want to get notified on. This setting overrides device-specific settings in the UI. (CTRL + Click to select / deselect).',
'SCAN_CYCLE_MINUTES_name' => 'Scan cycle delay',
'SCAN_CYCLE_MINUTES_description' => 'The delay between scans. If using arp-scan, the scan time itself depends on the number of IP addresses to check. This number depends on the network mask set in the <code>SCAN_SUBNETS</code> setting at the top. Every IP takes a couple seconds to scan.',
'DAYS_TO_KEEP_EVENTS_name' => 'Delete events older than',
'DAYS_TO_KEEP_EVENTS_description' => 'This is a maintenance setting. This specifies the number of days worth of event entries that will be kept. All older events will be deleted periodically.',
'REPORT_DASHBOARD_URL_name' => 'Pi.Alert URL',
'REPORT_DASHBOARD_URL_description' => 'This URL is used as the base for generating links in the emails. Enter full URL starting with <code>http://</code> including the port number (no trailig slash <code>/</code>).',
//Email
'REPORT_MAIL_name' => 'Enable email',
'REPORT_MAIL_description' => 'If enabled an email is sent out with a list of changes you\'ve subscribed to. Please also fill out all remaining settings related to the SMTP setup below.',
'SMTP_SERVER_name' => 'SMTP server URL',
'SMTP_SERVER_description' => 'The SMTP server host URL. For example <code>smtp-relay.sendinblue.com</code>.',
'SMTP_PORT_name' => 'SMTP server PORT',
'SMTP_PORT_description' => 'Port number used for the SMTP connection.',
'SMTP_SKIP_LOGIN_name' => 'Skip authentication',
'SMTP_SKIP_LOGIN_description' => 'Don\'t use authentication when connecting to the SMTP server.',
'SMTP_USER_name' => 'SMTP user',
'SMTP_USER_description' => 'The user name used to login into the SMTP server (sometimes a full email address).',
'SMTP_PASS_name' => 'SMTP password',
'SMTP_PASS_description' => 'The SMTP server password. ',
'SMTP_SKIP_TLS_name' => 'Don\'t use TLS',
'SMTP_SKIP_TLS_description' => 'Disable TLS when connecting to your SMTP server.',
'REPORT_TO_name' => 'Send email to',
'REPORT_TO_description' => 'Email address to which the notification will be send to.',
'REPORT_FROM_name' => 'Email subject',
'REPORT_FROM_description' => 'Notification email subject line.',
//Webhooks
'REPORT_WEBHOOK_name' => 'Enable Webhooks',
'REPORT_WEBHOOK_description' => 'Enable webhooks for notifications. If enabled, configure related settings below.',
'WEBHOOK_URL_name' => 'Target URL',
'WEBHOOK_URL_description' => 'Target URL starting with <code>http://</code> or <code>https://</code>.',
'WEBHOOK_PAYLOAD_name' => 'Payload type',
'WEBHOOK_PAYLOAD_description' => 'The Webhook payload data format for the "body > attachements > text" attribute in the payload json. See an examnple of the payload <a target="_blank" href="https://github.com/jokob-sk/Pi.Alert/blob/main/docs/webhook_json_sample.json">here</a>. (e.g.: for discord use <code>\'html\'</code>)',
'WEBHOOK_REQUEST_METHOD_name' => 'Request method',
'WEBHOOK_REQUEST_METHOD_description' => 'The HTTP request method to be used for the webhook call.',
// Apprise
'REPORT_APPRISE_name' => 'Enable Apprise',
'REPORT_APPRISE_description' => 'Enable sending notifications via <a target="_blank" href="https://hub.docker.com/r/caronc/apprise">Apprise</a>.',
'APPRISE_HOST_name' => 'Apprise host URL',
'APPRISE_HOST_description' => 'Apprise host URL starting with <code>http://</code> or <code>https://</code>. (don\'t forget to include <code>/notify</code> at the end)',
'APPRISE_URL_name' => 'Apprise notification URL',
'APPRISE_URL_description' => 'Apprise notification target URL.',
// NTFY
'REPORT_NTFY_name' => 'Enable NTFY',
'REPORT_NTFY_description' => 'Enable sending notifications via <a target="_blank" href="https://ntfy.sh/">NTFY</a>.',
'NTFY_HOST_name' => 'NTFY host URL',
'NTFY_HOST_description' => 'NTFY host URL starting with <code>http://</code> or <code>https://</code>. You can use the hosted instance on <a target="_blank" href="https://ntfy.sh/">https://ntfy.sh</a> by simply entering <code>https://ntfy.sh</code>.',
'NTFY_TOPIC_name' => 'NTFY topic',
'NTFY_TOPIC_description' => 'Your secret topic.',
'NTFY_USER_name' => 'NTFY user',
'NTFY_USER_description' => 'Enter user if you need (host) an instance with enabled authetication.',
'NTFY_PASSWORD_name' => 'NTFY password',
'NTFY_PASSWORD_description' => 'Enter password if you need (host) an instance with enabled authetication.',
// Pushsafer
'REPORT_PUSHSAFER_name' => 'Enable Pushsafer',
'REPORT_PUSHSAFER_description' => 'Enable sending notifications via <a target="_blank" href="https://www.pushsafer.com/">Pushsafer</a>.',
'PUSHSAFER_TOKEN_name' => 'Pushsafer token',
'PUSHSAFER_TOKEN_description' => 'Your secret Pushsafer API key (token).',
// MQTT
'REPORT_MQTT_name' => 'Enable MQTT',
'REPORT_MQTT_description' => 'Enable sending notifications via <a target="_blank" href="https://www.home-assistant.io/integrations/mqtt/">MQTT</a> to your Home Assistance instance.',
'MQTT_BROKER_name' => 'MQTT broker URL',
'MQTT_BROKER_description' => 'MQTT host URL (don\'t include <code>http://</code> or <code>https://</code>).',
'MQTT_PORT_name' => 'MQTT broker port',
'MQTT_PORT_description' => 'Port number where the broker is listening. Usually <code>1883</code>.',
'MQTT_USER_name' => 'MQTT user',
'MQTT_USER_description' => 'User name used to login into your MQTT broker instance.',
'MQTT_PASSWORD_name' => 'MQTT password',
'MQTT_PASSWORD_description' => 'Password used to login into your MQTT broker instance.',
'MQTT_QOS_name' => 'MQTT Quality of Service',
'MQTT_QOS_description' => 'Quality of service setting for MQTT message sending. 0 - Low quality to 2 - High quality. The higher the quality the longer the delay.',
'MQTT_DELAY_SEC_name' => 'MQTT delay per device',
'MQTT_DELAY_SEC_description' => 'A little hack - delay adding to the queue in case the process is restarted and previous publish processes aborted (it takes ~<code>2</code>s to update a sensor config on the broker). Tested with <code>2</code>-<code>3</code> seconds of delay. This delay is only applied when devices are created (during the first notification loop). It doesn\'t affect subsequent scans or notifications.',
//DynDNS
'DDNS_ACTIVE_name' => 'Enable DynDNS',
'DDNS_ACTIVE_description' => '',
'DDNS_DOMAIN_name' => 'DynDNS domain URL',
'DDNS_DOMAIN_description' => '',
'DDNS_USER_name' => 'DynDNS user',
'DDNS_USER_description' => '',
'DDNS_PASSWORD_name' => 'DynDNS password',
'DDNS_PASSWORD_description' => '',
'DDNS_UPDATE_URL_name' => 'DynDNS update URL',
'DDNS_UPDATE_URL_description' => 'Update URL starting with <code>http://</code> or <code>https://</code>.',
// PiHole
'PIHOLE_ACTIVE_name' => 'Enable PiHole mapping',
'PIHOLE_ACTIVE_description' => 'If enabled you need to map <code>:/etc/pihole/pihole-FTL.db</code> in your <code>docker-compose.yml</code> file.',
'DHCP_ACTIVE_name' => 'Enable PiHole DHCP',
'DHCP_ACTIVE_description' => 'If enabled you need to map <code>:/etc/pihole/dhcp.leases</code> in your <code>docker-compose.yml</code> file.'
); );
?> ?>

View File

@@ -602,7 +602,14 @@ def ext_handler(packets,queue,unidns,show_ttl,print_res,dos_ttl,conflict,ttl,int
rclass="Class:IN" rclass="Class:IN"
else: else:
rclass="Class:"+str(dnsrr.rclass) rclass="Class:"+str(dnsrr.rclass)
res = res0 + " Auth_NS: "+dnsrr.rrname + " " + dns_type[dnsrr.type]+" " + rclass + ' "' +dnsrr.rdata+'"'
str_res0 = str(b_to_str(res0)) + ""
str_rrname = str(b_to_str(dnsrr.rrname)) + ""
str_type = str(b_to_str(dns_type[dnsrr.type])) + ""
str_rdata = str(b_to_str(dnsrr.rdata)) + ""
str_rclass = str(b_to_str(rclass)) + ""
res = str_res0 + " Auth_NS: "+str_rrname + " " +str_type+" " + str_rclass + ' "' +str_rdata+'"'
if show_ttl: if show_ttl:
res = res + " TTL:"+str(dnsrr.ttl) res = res + " TTL:"+str(dnsrr.ttl)
if print_res==1: if print_res==1: