diff --git a/front/plugins/arp_scan/config.json b/front/plugins/arp_scan/config.json index 9d3ab850..0a0d241a 100755 --- a/front/plugins/arp_scan/config.json +++ b/front/plugins/arp_scan/config.json @@ -3,7 +3,7 @@ "unique_prefix": "ARPSCAN", "enabled": true, "data_source": "script", - "mapped_to_table": "CurrentScan", + "mapped_to_table": "CurrentScan", "data_filters": [ { "compare_column" : "Object_PrimaryID", diff --git a/pialert/__main__.py b/pialert/__main__.py index c24bf19d..a662db64 100755 --- a/pialert/__main__.py +++ b/pialert/__main__.py @@ -26,7 +26,7 @@ from const import * from logger import mylog from helper import filePermissions, isNewVersion, timeNowTZ, updateState from api import update_api -from networkscan import process_scan, scan_network +# from networkscan import process_scan, scan_network from initialise import importConfigs from mac_vendor import update_devices_MAC_vendors from database import DB, get_all_devices @@ -206,43 +206,43 @@ def main (): # todo replace the scans with plugins # Perform a network scan via arp-scan or pihole - if last_network_scan + datetime.timedelta(minutes=conf.SCAN_CYCLE_MINUTES) < loop_start_time: - last_network_scan = loop_start_time - conf.cycle = 1 # network scan - mylog('verbose', ['[MAIN] cycle:',conf.cycle]) - updateState(db,"Scan: Network") + # if last_network_scan + datetime.timedelta(minutes=conf.SCAN_CYCLE_MINUTES) < loop_start_time: + # last_network_scan = loop_start_time + # conf.cycle = 1 # network scan + # mylog('verbose', ['[MAIN] cycle:',conf.cycle]) + # updateState(db,"Scan: Network") - # scan_network() + # # scan_network() - # DEBUG start ++++++++++++++++++++++++++++++++++++++++++++++++++++++ - # Start scan_network as a process + # # DEBUG start ++++++++++++++++++++++++++++++++++++++++++++++++++++++ + # # Start scan_network as a process - p = multiprocessing.Process(target=scan_network(db)) - p.start() + # p = multiprocessing.Process(target=scan_network(db)) + # p.start() - # Wait for a maximum of 3600 seconds (1h) or until process finishes - p.join(3600) + # # Wait for a maximum of 3600 seconds (1h) or until process finishes + # p.join(3600) - # If thread is still active - if p.is_alive(): - mylog('none', "[MAIN] scan_network running too long - let\'s kill it") + # # If thread is still active + # if p.is_alive(): + # mylog('none', "[MAIN] scan_network running too long - let\'s kill it") - # Terminate - may not work if process is stuck for good - p.terminate() - # OR Kill - will work for sure, no chance for process to finish nicely however - # p.kill() + # # Terminate - may not work if process is stuck for good + # p.terminate() + # # OR Kill - will work for sure, no chance for process to finish nicely however + # # p.kill() - p.join() + # p.join() - # DEBUG end ++++++++++++++++++++++++++++++++++++++++++++++++++++++ - # Run splugin scripts which are set to run every timne after a scan finished - if conf.ENABLE_PLUGINS: - run_plugin_scripts(db,'always_after_scan') + # # DEBUG end ++++++++++++++++++++++++++++++++++++++++++++++++++++++ + # # Run splugin scripts which are set to run every timne after a scan finished + if conf.ENABLE_PLUGINS: + run_plugin_scripts(db,'always_after_scan') # -------------------------------------------------- # process all the scanned data into new devices - mylog('debug', "[MAIN] start processig scan results") - process_scan (db) + # mylog('debug', "[MAIN] start processig scan results") + # process_scan (db) # Reporting if conf.cycle in conf.check_report: diff --git a/pialert/plugin.py b/pialert/plugin.py index 3fcf1cc4..477ed675 100755 --- a/pialert/plugin.py +++ b/pialert/plugin.py @@ -11,6 +11,7 @@ from const import pluginsPath, logPath from logger import mylog from helper import timeNowTZ, updateState, get_file_content, write_file from api import update_api +from networkscan import process_scan #------------------------------------------------------------------------------- def run_plugin_scripts(db, runType): @@ -586,6 +587,12 @@ def process_plugin_events(db, plugin): # This will insert multiple rows into the database in one go. sql.executemany(q, sqlParams) + db.commitDB() + + # perform scan if mapped to CurrentScan table + if dbTable == 'CurrentScan': + process_scan(db) + db.commitDB() @@ -593,6 +600,8 @@ def process_plugin_events(db, plugin): + + #------------------------------------------------------------------------------- class plugin_object_class: def __init__(self, plugin, objDbRow): diff --git a/pialert/reporting.py b/pialert/reporting.py index f702ccb5..286d47a7 100755 --- a/pialert/reporting.py +++ b/pialert/reporting.py @@ -14,7 +14,7 @@ import conf from const import pialertPath, logPath, apiPath from helper import noti_struc, generate_mac_links, removeDuplicateNewLines, timeNowTZ, hide_email, updateState, get_file_content, write_file from logger import logResult, mylog, print_log -from plugin import execute_plugin +# from plugin import execute_plugin from publishers.email import (check_config as email_check_config,