ARPSCAN to plugin rewrite

This commit is contained in:
Jokob-sk
2023-08-07 08:23:39 +10:00
parent d848e18bc0
commit 9a13133a5f
18 changed files with 119 additions and 147 deletions

View File

@@ -36,8 +36,8 @@
{
"function": "RUN",
"type": "text.select",
"default_value":"disabled",
"options": ["disabled", "once", "schedule", "scan_cycle", "always_after_scan", "on_new_device"],
"default_value":"schedule",
"options": ["disabled", "once", "schedule", "always_after_scan", "on_new_device"],
"localized": ["name", "description"],
"name" :[{
"language_code":"en_us",

View File

@@ -24,7 +24,7 @@ import multiprocessing
import conf
from const import *
from logger import mylog
from helper import filePermissions, isNewVersion, timeNow, updateState
from helper import filePermissions, isNewVersion, timeNowTZ, updateState
from api import update_api
from networkscan import process_scan, scan_network
from initialise import importConfigs
@@ -74,28 +74,10 @@ main structure of Pi Alert
"""
def main ():
mylog('debug', ['[MAIN] Setting up ...'])
mylog('none', ['[MAIN] Setting up ...']) # has to be level 'none' as user config not loaded yet
conf.time_started = datetime.datetime.now()
conf.cycle = ""
conf.check_report = [1, "internet_IP", "update_vendors_silent"]
conf.plugins_once_run = False
mylog('none', [f'[conf.tz] Setting up ...{conf.tz}'])
# to be deleted if not used
conf.log_timestamp = conf.time_started
#cron_instance = Cron()
# timestamps of last execution times
startTime = conf.time_started
now_minus_24h = conf.time_started - datetime.timedelta(hours = 24)
# set these times to the past to force the first run
last_network_scan = now_minus_24h
last_internet_IP_scan = now_minus_24h
last_scan_run = now_minus_24h
last_cleanup = now_minus_24h
last_update_vendors = conf.time_started - datetime.timedelta(days = 6) # update vendors 24h after first run and then once a week
last_version_check = now_minus_24h
# indicates, if a new version is available
conf.newVersionAvailable = False
@@ -120,17 +102,18 @@ def main ():
while True:
# update time started
loop_start_time = timeNow()
# re-load user configuration and plugins
importConfigs(db)
# update time started
conf.loop_start_time = timeNowTZ()
loop_start_time = conf.loop_start_time # TODO fix
# check if new version is available / only check once an hour
if last_version_check + datetime.timedelta(hours=1) < loop_start_time :
if conf.last_version_check + datetime.timedelta(hours=1) < loop_start_time :
# if newVersionAvailable is already true the function does nothing and returns true again
mylog('debug', [f"[Version check] Last version check timestamp: {last_version_check}"])
last_version_check = loop_start_time
mylog('debug', [f"[Version check] Last version check timestamp: {conf.last_version_check}"])
conf.last_version_check = loop_start_time
conf.newVersionAvailable = isNewVersion(conf.newVersionAvailable)
# Handle plugins executed ONCE
@@ -210,7 +193,7 @@ def main ():
run = nmapSchedule.runScheduleCheck()
if run:
nmapSchedule.last_run = timeNow()
nmapSchedule.last_run = timeNowTZ()
performNmapScan(db, get_all_devices(db))
# todo replace the scans with plugins

View File

@@ -92,5 +92,5 @@ class api_endpoint_class:
# update hash
apiEndpoints[changedIndex].hash = self.hash
else:
mylog('info', [f'[API] ERROR Updating {self.fileName}'])
mylog('minimal', [f'[API] ERROR Updating {self.fileName}'])

View File

@@ -18,9 +18,17 @@ lastImportedConfFile = 1.1
plugins_once_run = False
newVersionAvailable = False
time_started = ''
startTime = ''
last_network_scan = ''
last_internet_IP_scan = ''
last_scan_run = ''
last_cleanup = ''
last_update_vendors = ''
last_version_check = ''
check_report = []
log_timestamp = 0
arpscan_devices = []
# for MQTT
mqtt_connected_to_broker = False
mqtt_sensors = []
@@ -28,8 +36,6 @@ client = None # mqtt client
# for notifications
changedPorts_json_struc = None
# ACTUAL CONFIGRATION ITEMS set to defaults
# General

View File

@@ -6,7 +6,7 @@ import sqlite3
from const import fullDbPath, sql_devices_stats, sql_devices_all
from logger import mylog
from helper import json_struc, initOrSetParam, row_to_json, timeNow #, updateState
from helper import json_struc, initOrSetParam, row_to_json, timeNowTZ #, updateState
@@ -478,7 +478,7 @@ def get_all_devices(db):
#-------------------------------------------------------------------------------
def insertOnlineHistory(db):
sql = db.sql #TO-DO
startTime = timeNow()
startTime = timeNowTZ()
# Add to History
# only run this if the scans have run

View File

@@ -5,7 +5,7 @@
import subprocess
import conf
from helper import timeNow
from helper import timeNowTZ
from plugin import get_setting_value
from scanners.internet import check_IP_format, get_internet_IP
from logger import mylog, print_log
@@ -21,20 +21,10 @@ def save_scanned_devices (db):
# mylog('debug', ['[ARP Scan] Detected devices:', len(p_arpscan_devices)])
# handled by the ARPSCAN plugin
# # Delete previous scan data
# sql.execute ("DELETE FROM CurrentScan")
# if len(p_arpscan_devices) > 0:
# # Insert new arp-scan devices
# sql.executemany ("INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, "+
# " cur_IP, cur_Vendor, cur_ScanMethod) "+
# "VALUES (1, :mac, :ip, :hw, 'arp-scan')",
# p_arpscan_devices)
# ------------------------ TO CONVERT INTO PLUGIN
# # Insert Pi-hole devices
# startTime = timeNow()
# startTime = timeNowTZ()
# sql.execute ("""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC,
# cur_IP, cur_Vendor, cur_ScanMethod)
# SELECT ?, PH_MAC, PH_IP, PH_Vendor, 'Pi-hole'
@@ -144,7 +134,7 @@ def print_scan_stats (db):
#-------------------------------------------------------------------------------
def create_new_devices (db):
sql = db.sql # TO-DO
startTime = timeNow()
startTime = timeNowTZ()
# arpscan - Insert events for new devices
mylog('debug','[New Devices] New devices - 1 Events')
@@ -289,7 +279,7 @@ def create_new_devices (db):
#-------------------------------------------------------------------------------
def update_devices_data_from_scan (db):
sql = db.sql #TO-DO
startTime = timeNow()
startTime = timeNowTZ()
# Update Last Connection
mylog('debug','[Update Devices] 1 Last Connection')
sql.execute (f"""UPDATE Devices SET dev_LastConnection = '{startTime}',

View File

@@ -18,14 +18,13 @@ from const import *
from logger import mylog, logResult
#-------------------------------------------------------------------------------
def timeNow():
return datetime.datetime.now().replace(microsecond=0)
#-------------------------------------------------------------------------------
def timeNowTZ():
return datetime.datetime.now(conf.tz).replace(microsecond=0)
def timeNow():
return datetime.datetime.now().replace(microsecond=0)
#-------------------------------------------------------------------------------
def updateState(db, newState):
@@ -213,7 +212,7 @@ def isNewVersion(newVersion: bool):
text = url.text
data = json.loads(text)
except requests.exceptions.ConnectionError as e:
mylog('info', [" Couldn't check for new release."])
mylog('minimal', [" Couldn't check for new release."])
data = ""
# make sure we received a valid response and not an API rate limit exceeded message

View File

@@ -77,12 +77,13 @@ def importConfigs (db):
conf.mySettings = [] # reset settings
conf.mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query
# User values loaded from now
c_d = read_config_file(config_file)
# Import setting if found in the dictionary
# General
conf.ENABLE_ARPSCAN = ccd('ENABLE_ARPSCAN', True , c_d, 'Enable arpscan', 'boolean', '', 'General', ['run'])
conf.SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', 'subnets', '', 'General')
conf.LOG_LEVEL = ccd('LOG_LEVEL', 'verbose' , c_d, 'Log verboseness', 'text.select', "['none', 'minimal', 'verbose', 'debug']", 'General')
conf.TIMEZONE = ccd('TIMEZONE', 'Europe/Berlin' , c_d, 'Time zone', 'text', '', 'General')
conf.ENABLE_PLUGINS = ccd('ENABLE_PLUGINS', True , c_d, 'Enable plugins', 'boolean', '', 'General')
@@ -98,6 +99,10 @@ def importConfigs (db):
conf.DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', 'integer', '', 'General')
conf.HRS_TO_KEEP_NEWDEV = ccd('HRS_TO_KEEP_NEWDEV', 0 , c_d, 'Keep new devices for', 'integer', "0", 'General')
# ARPSCAN (+ other settings provided by the ARPSCAN plugin)
conf.ENABLE_ARPSCAN = ccd('ENABLE_ARPSCAN', True , c_d, 'Enable arpscan', 'boolean', '', 'ARPSCAN', ['run'])
conf.SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', 'subnets', '', 'ARPSCAN')
# Email
conf.REPORT_MAIL = ccd('REPORT_MAIL', False , c_d, 'Enable email', 'boolean', '', 'Email', ['test'])
conf.SMTP_SERVER = ccd('SMTP_SERVER', '' , c_d,'SMTP server URL', 'text', '', 'Email')
@@ -176,6 +181,31 @@ def importConfigs (db):
# Init timezone in case it changed
conf.tz = timezone(conf.TIMEZONE)
# TODO cleanup later ----------------------------------------------------------------------------------
# init all time values as we have timezone - all this shoudl be moved into plugin/plugin settings
conf.time_started = datetime.datetime.now(conf.tz)
conf.cycle = ""
conf.check_report = [1, "internet_IP", "update_vendors_silent"]
conf.plugins_once_run = False
# to be deleted if not used
conf.log_timestamp = conf.time_started
#cron_instance = Cron()
# timestamps of last execution times
conf.startTime = conf.time_started
now_minus_24h = conf.time_started - datetime.timedelta(hours = 24)
# set these times to the past to force the first run
conf.last_network_scan = now_minus_24h
conf.last_internet_IP_scan = now_minus_24h
conf.last_scan_run = now_minus_24h
conf.last_cleanup = now_minus_24h
conf.last_update_vendors = conf.time_started - datetime.timedelta(days = 6) # update vendors 24h after first run and then once a week
conf.last_version_check = now_minus_24h
# TODO cleanup later ----------------------------------------------------------------------------------
# global mySchedules
# reset schedules
conf.mySchedules = []
@@ -265,7 +295,7 @@ def importConfigs (db):
#TO DO this creates a circular reference between API and HELPER !
mylog('info', '[Config] Imported new config')
mylog('minimal', '[Config] Imported new config')
@@ -274,7 +304,7 @@ def read_config_file(filename):
"""
retuns dict on the config file key:value pairs
"""
mylog('info', '[Config] reading config file')
mylog('minimal', '[Config] reading config file')
# load the variables from pialert.conf
code = compile(filename.read_text(), filename.name, "exec")
confDict = {} # config dictionary

View File

@@ -9,7 +9,10 @@ from const import *
#-------------------------------------------------------------------------------
# duplication from helper to avoid circle
#-------------------------------------------------------------------------------
def timeNow():
def timeNowTZ():
if conf.tz:
return datetime.datetime.now(conf.tz).replace(microsecond=0)
else:
return datetime.datetime.now().replace(microsecond=0)
@@ -36,7 +39,7 @@ def mylog(requestedDebugLevel, n):
#-------------------------------------------------------------------------------
def file_print (*args):
result = timeNow().strftime ('%H:%M:%S') + ' '
result = timeNowTZ().strftime ('%H:%M:%S') + ' '
for arg in args:
result += str(arg)

View File

@@ -3,7 +3,7 @@ import subprocess
import conf
from const import pialertPath, vendorsDB
from helper import timeNow, updateState
from helper import timeNowTZ, updateState
from logger import mylog
@@ -17,7 +17,7 @@ def update_devices_MAC_vendors (db, pArg = ''):
sql = db.sql # TO-DO
# Header
updateState(db,"Upkeep: Vendors")
mylog('verbose', ['[', timeNow(), '] Upkeep - Update HW Vendors:' ])
mylog('verbose', ['[', timeNowTZ(), '] Upkeep - Update HW Vendors:' ])
# Update vendors DB (iab oui)
mylog('verbose', [' Updating vendors DB (iab & oui)'])

View File

@@ -5,7 +5,7 @@ import conf
from scanners.pihole import copy_pihole_network, read_DHCP_leases
from database import insertOnlineHistory
from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, update_devices_names
from helper import timeNow
from helper import timeNowTZ
from logger import mylog
from reporting import skip_repeated_notifications
@@ -25,15 +25,6 @@ def scan_network (db):
# updateState(db,"Scan: Network")
mylog('verbose', ['[Network Scan] Scan Devices:' ])
# Query ScanCycle properties
scanCycle_data = query_ScanCycle_Data (db, True)
if scanCycle_data is None:
mylog('none', ['\n'])
mylog('none', ['[Network Scan]*************** ERROR ***************'])
mylog('none', ['[Network Scan] ScanCycle %s not found' % conf.cycle ])
mylog('none', ['[Network Scan] Exiting...\n'])
return False
db.commitDB()
# Pi-hole method
@@ -52,21 +43,7 @@ def scan_network (db):
def process_scan (db):
# Query ScanCycle properties
scanCycle_data = query_ScanCycle_Data (db, True)
if scanCycle_data is None:
mylog('none', ['\n'])
mylog('none', ['[Process Scan]*************** ERROR ***************'])
mylog('none', ['[Process Scan] ScanCycle %s not found' % conf.cycle ])
mylog('none', ['[Process Scan] Exiting...\n'])
return False
db.commitDB()
# ScanCycle data
cycle_interval = scanCycle_data['cic_EveryXmin']
# Load current scan data
# Load current scan data
mylog('verbose','[Process Scan] Processing scan results')
save_scanned_devices (db)
@@ -114,34 +91,16 @@ def process_scan (db):
mylog('verbose','[Process Scan] Skipping repeated notifications')
skip_repeated_notifications (db)
# Clear current scan as processed
db.sql.execute ("DELETE FROM CurrentScan")
# Clear current scan as processed TODO uncomment
# db.sql.execute ("DELETE FROM CurrentScan")
# Commit changes
db.commitDB()
# moved plugin execution to main loop
# if ENABLE_PLUGINS:
# run_plugin_scripts(db,'always_after_scan')
#-------------------------------------------------------------------------------
def query_ScanCycle_Data (db, pOpenCloseDB = False, cycle = 1):
# Query Data
db.sql.execute ("""SELECT cic_arpscanCycles, cic_EveryXmin
FROM ScanCycles
WHERE cic_ID = ? """, (cycle,))
sqlRow = db.sql.fetchone()
# Return Row
return sqlRow
#-------------------------------------------------------------------------------
def void_ghost_disconnections (db):
sql = db.sql #TO-DO
startTime = timeNow()
startTime = timeNowTZ()
# Void connect ghost events (disconnect event exists in last X min.)
mylog('debug','[Void Ghost Con] - 1 Connect ghost events')
sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null,
@@ -256,7 +215,7 @@ def create_sessions_snapshot (db):
#-------------------------------------------------------------------------------
def insert_events (db):
sql = db.sql #TO-DO
startTime = timeNow()
startTime = timeNowTZ()
# Check device down
mylog('debug','[Events] - 1 - Devices down')

View File

@@ -8,7 +8,7 @@ from collections import namedtuple
import conf
from const import pluginsPath, logPath
from logger import mylog
from helper import timeNow, updateState, get_file_content, write_file
from helper import timeNowTZ, updateState, get_file_content, write_file
from api import update_api
#-------------------------------------------------------------------------------
@@ -38,7 +38,7 @@ def run_plugin_scripts(db, runType):
shouldRun = schd.runScheduleCheck()
if shouldRun:
# note the last time the scheduled plugin run was executed
schd.last_run = timeNow()
schd.last_run = timeNowTZ()
if shouldRun:
@@ -102,8 +102,8 @@ def get_setting(key):
result = set
if result is None:
mylog('info', [' Error - setting_missing - Setting not found for key: ', key])
mylog('info', [' Error - logging the settings into file: ', logPath + '/setting_missing.json'])
mylog('minimal', [' Error - setting_missing - Setting not found for key: ', key])
mylog('minimal', [' Error - logging the settings into file: ', logPath + '/setting_missing.json'])
write_file (logPath + '/setting_missing.json', json.dumps({ 'data' : conf.mySettings}))
return result
@@ -165,14 +165,14 @@ def execute_plugin(db, plugin):
resolved = get_setting(param["value"])
if resolved != None:
resolved = plugin_param_from_glob_set(resolved)
resolved = passable_string_from_setting(resolved)
# Get Sql result
if param["type"] == "sql":
resolved = flatten_array(db.get_sql_array(param["value"]))
if resolved == None:
mylog('none', ['[Plugins] The parameter "name":"', param["name"], '" was resolved as None'])
mylog('none', [f'[Plugins] The parameter "name":"{param["name"]}" for "value": {param["value"]} was resolved as None'])
else:
params.append( [param["name"], resolved] )
@@ -286,14 +286,14 @@ def handle_empty(value):
#-------------------------------------------------------------------------------
# Flattens a setting to make it passable to a script
def plugin_param_from_glob_set(globalSetting):
def passable_string_from_setting(globalSetting):
setVal = globalSetting[6] # setting value
setTyp = globalSetting[3] # setting type
noConversion = ['text', 'string', 'integer', 'boolean', 'password', 'readonly', 'integer.select', 'text.select', 'integer.checkbox' ]
arrayConversion = ['text.multiselect', 'list']
arrayConversion = ['text.multiselect', 'list', 'subnets']
jsonConversion = ['.template']
if setTyp in noConversion:
@@ -306,6 +306,8 @@ def plugin_param_from_glob_set(globalSetting):
if setTyp.endswith(item):
return json.dumps(setVal)
mylog('none', ['[Plugins]: ERROR: Parameter not converted.'])
#-------------------------------------------------------------------------------

View File

@@ -48,7 +48,7 @@ def publish_mqtt(client, topic, message):
status = result[0]
if status != 0:
mylog('info', ["Waiting to reconnect to MQTT broker"])
mylog('minimal', ["Waiting to reconnect to MQTT broker"])
time.sleep(0.1)
return True
@@ -180,7 +180,7 @@ def mqtt_start(db):
sec_delay = len(devices) * int(conf.MQTT_DELAY_SEC)*5
mylog('info', [" Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60,1) , 'min)' ])
mylog('minimal', [" Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60,1) , 'min)' ])
for device in devices:

View File

@@ -12,7 +12,7 @@ from json2table import convert
# pialert modules
import conf
from const import pialertPath, logPath, apiPath
from helper import noti_struc, generate_mac_links, removeDuplicateNewLines, timeNow, hide_email, updateState, get_file_content, write_file
from helper import noti_struc, generate_mac_links, removeDuplicateNewLines, timeNowTZ, hide_email, updateState, get_file_content, write_file
from logger import logResult, mylog, print_log
@@ -139,7 +139,7 @@ def send_notifications (db):
template_file.close()
# Report Header & footer
timeFormated = timeNow().strftime ('%Y-%m-%d %H:%M')
timeFormated = timeNowTZ().strftime ('%Y-%m-%d %H:%M')
mail_text = mail_text.replace ('<REPORT_DATE>', timeFormated)
mail_html = mail_html.replace ('<REPORT_DATE>', timeFormated)
@@ -271,43 +271,43 @@ def send_notifications (db):
msg = noti_struc(json_final, mail_text, mail_html)
mylog('info', ['[Notification] Udating API files'])
mylog('minimal', ['[Notification] Udating API files'])
send_api()
if conf.REPORT_MAIL and check_config('email'):
updateState(db,"Send: Email")
mylog('info', ['[Notification] Sending report by Email'])
mylog('minimal', ['[Notification] Sending report by Email'])
send_email (msg )
else :
mylog('verbose', ['[Notification] Skip email'])
if conf.REPORT_APPRISE and check_config('apprise'):
updateState(db,"Send: Apprise")
mylog('info', ['[Notification] Sending report by Apprise'])
mylog('minimal', ['[Notification] Sending report by Apprise'])
send_apprise (msg)
else :
mylog('verbose', ['[Notification] Skip Apprise'])
if conf.REPORT_WEBHOOK and check_config('webhook'):
updateState(db,"Send: Webhook")
mylog('info', ['[Notification] Sending report by Webhook'])
mylog('minimal', ['[Notification] Sending report by Webhook'])
send_webhook (msg)
else :
mylog('verbose', ['[Notification] Skip webhook'])
if conf.REPORT_NTFY and check_config('ntfy'):
updateState(db,"Send: NTFY")
mylog('info', ['[Notification] Sending report by NTFY'])
mylog('minimal', ['[Notification] Sending report by NTFY'])
send_ntfy (msg)
else :
mylog('verbose', ['[Notification] Skip NTFY'])
if conf.REPORT_PUSHSAFER and check_config('pushsafer'):
updateState(db,"Send: PUSHSAFER")
mylog('info', ['[Notification] Sending report by PUSHSAFER'])
mylog('minimal', ['[Notification] Sending report by PUSHSAFER'])
send_pushsafer (msg)
else :
mylog('verbose', ['[Notification] Skip PUSHSAFER'])
# Update MQTT entities
if conf.REPORT_MQTT and check_config('mqtt'):
updateState(db,"Send: MQTT")
mylog('info', ['[Notification] Establishing MQTT thread'])
mylog('minimal', ['[Notification] Establishing MQTT thread'])
mqtt_start(db)
else :
mylog('verbose', ['[Notification] Skip MQTT'])
@@ -318,7 +318,7 @@ def send_notifications (db):
sql.execute ("""UPDATE Devices SET dev_LastNotification = ?
WHERE dev_MAC IN (SELECT eve_MAC FROM Events
WHERE eve_PendingAlertEmail = 1)
""", (datetime.datetime.now(),) )
""", (datetime.datetime.now(conf.tz),) )
sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0
WHERE eve_PendingAlertEmail = 1""")
@@ -328,7 +328,7 @@ def send_notifications (db):
conf.changedPorts_json_struc = None
# DEBUG - print number of rows updated
mylog('info', ['[Notification] Notifications changes: ', sql.rowcount])
mylog('minimal', ['[Notification] Notifications changes: ', sql.rowcount])
# Commit changes
db.commitDB()
@@ -488,17 +488,17 @@ def check_and_run_event(db):
def handle_run(runType):
global last_network_scan
mylog('info', ['[', timeNow(), '] START Run: ', runType])
mylog('minimal', ['[', timeNowTZ(), '] START Run: ', runType])
if runType == 'ENABLE_ARPSCAN':
last_network_scan = conf.time_started - datetime.timedelta(hours = 24)
mylog('info', ['[', timeNow(), '] END Run: ', runType])
mylog('minimal', ['[', timeNowTZ(), '] END Run: ', runType])
#-------------------------------------------------------------------------------
def handle_test(testType):
mylog('info', ['[', timeNow(), '] START Test: ', testType])
mylog('minimal', ['[', timeNowTZ(), '] START Test: ', testType])
# Open text sample
sample_txt = get_file_content(pialertPath + '/back/report_sample.txt')
@@ -522,4 +522,4 @@ def handle_test(testType):
if testType == 'REPORT_PUSHSAFER':
send_pushsafer (sample_msg)
mylog('info', ['[Test Publishers] END Test: ', testType])
mylog('minimal', ['[Test Publishers] END Test: ', testType])

View File

@@ -6,7 +6,7 @@ import re
# pialert modules
import conf
from helper import timeNow, updateState
from helper import timeNowTZ, updateState
from logger import append_line_to_file, mylog
from const import logPath
@@ -45,7 +45,7 @@ def check_internet_IP ( db ):
# Check IP Change
if internet_IP != previous_IP :
mylog('info', ['[Internet IP] New internet IP: ', internet_IP])
mylog('minimal', ['[Internet IP] New internet IP: ', internet_IP])
save_new_internet_IP (db, internet_IP)
else :
@@ -116,7 +116,7 @@ def get_previous_internet_IP (db):
def save_new_internet_IP (db, pNewIP):
# Log new IP into logfile
append_line_to_file (logPath + '/IP_changes.log',
'['+str(timeNow()) +']\t'+ pNewIP +'\n')
'['+str(timeNowTZ()) +']\t'+ pNewIP +'\n')
prevIp = get_previous_internet_IP(db)
# Save event
@@ -125,7 +125,7 @@ def save_new_internet_IP (db, pNewIP):
eve_PendingAlertEmail)
VALUES ('Internet', ?, ?, 'Internet IP Changed',
'Previous Internet IP: '|| ?, 1) """,
(pNewIP, timeNow(), prevIp) )
(pNewIP, timeNowTZ(), prevIp) )
# Save new IP
db.sql.execute ("""UPDATE Devices SET dev_LastIP = ?

View File

@@ -3,7 +3,7 @@ import subprocess
import conf
from const import logPath, sql_nmap_scan_all
from helper import json_struc, timeNow, updateState
from helper import json_struc, timeNowTZ, updateState
from logger import append_line_to_file, mylog
#-------------------------------------------------------------------------------
@@ -59,7 +59,7 @@ def performNmapScan(db, devicesToScan):
mylog('verbose', ['[NMAP Scan] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', device["dev_LastIP"], progress])
if output == "": # check if the subprocess failed
mylog('info', ['[NMAP Scan] Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details'])
mylog('minimal', ['[NMAP Scan] Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details'])
else:
mylog('verbose', ['[NMAP Scan] Nmap SUCCESS for ', device["dev_LastIP"], progress])
@@ -87,7 +87,7 @@ def performNmapScan(db, devicesToScan):
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
startCollecting = False # end reached
elif startCollecting and len(line.split()) == 3:
newEntriesTmp.append(nmap_entry(device["dev_MAC"], timeNow(), line.split()[0], line.split()[1], line.split()[2], device["dev_Name"]))
newEntriesTmp.append(nmap_entry(device["dev_MAC"], timeNowTZ(), line.split()[0], line.split()[1], line.split()[2], device["dev_Name"]))
elif 'Nmap done' in line:
duration = line.split('scanned in ')[1]
index += 1

View File

@@ -2,7 +2,7 @@ import subprocess
import re
from const import fullPholusPath, logPath
from helper import checkIPV4, timeNow, updateState
from helper import checkIPV4, timeNowTZ, updateState
from logger import mylog
#-------------------------------------------------------------------------------
@@ -64,7 +64,7 @@ def performPholusScan (db, timeoutSec, userSubnets):
for line in newLines:
columns = line.split("|")
if len(columns) == 4:
params.append(( interface + " " + mask, timeNow() , columns[0].replace(" ", ""), columns[1].replace(" ", ""), columns[2].replace(" ", ""), columns[3], ''))
params.append(( interface + " " + mask, timeNowTZ() , columns[0].replace(" ", ""), columns[1].replace(" ", ""), columns[2].replace(" ", ""), columns[3], ''))
if len(params) > 0:
sql.executemany ("""INSERT INTO Pholus_Scan ("Info", "Time", "MAC", "IP_v4_or_v6", "Record_Type", "Value", "Extra") VALUES (?, ?, ?, ?, ?, ?, ?)""", params)

View File

@@ -6,7 +6,7 @@ sys.path.append(str(pathlib.Path(__file__).parent.parent.resolve()) + "/pialert/
import datetime
from helper import timeNow, updateSubnets
from helper import timeNowTZ, updateSubnets
# -------------------------------------------------------------------------------