PIHOLE to plugin rewrite
This commit is contained in:
27
front/plugins/pihole_scan/README.md
Executable file
27
front/plugins/pihole_scan/README.md
Executable file
@@ -0,0 +1,27 @@
|
|||||||
|
## Overview
|
||||||
|
|
||||||
|
A plugin allowing for importing Un-Discoverable devices from the settings page.
|
||||||
|
The main usecase is to add dumb network gear like unmanaged hubs and switches to the network view.
|
||||||
|
There might be other usecases, please let me know.
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
- Go to settings and find Un-Discoverabe Devices in the list of plugins.
|
||||||
|
- Enable the plugin by changing the RUN parameter from disabled to `once` or `always_after_scan`.
|
||||||
|
- Add the name of your device to the list. (remove the sample entry first)
|
||||||
|
- SAVE
|
||||||
|
- wait for the next scan to finish
|
||||||
|
|
||||||
|
#### Examples:
|
||||||
|
Settings:
|
||||||
|

|
||||||
|
|
||||||
|
resulting in these devices:
|
||||||
|

|
||||||
|
|
||||||
|
Allowing Un-Discoverable devices like hubs, switches or APs to be added to the network view.
|
||||||
|

|
||||||
|
|
||||||
|
### Known Limitations
|
||||||
|
- Un-Discoverable Devices always show as offline. That is expected as they can not be discovered by Pi.Alert.
|
||||||
|
- All IPs are set to 0.0.0.0 therefore the "Random MAC" icon might show up.
|
||||||
235
front/plugins/pihole_scan/config.json
Executable file
235
front/plugins/pihole_scan/config.json
Executable file
@@ -0,0 +1,235 @@
|
|||||||
|
{
|
||||||
|
"code_name": "pihole_scan",
|
||||||
|
"unique_prefix": "PIHOLE",
|
||||||
|
"enabled": true,
|
||||||
|
"data_source": "sqlite-db-query",
|
||||||
|
"data_source_settings":
|
||||||
|
{
|
||||||
|
"db_path":"/etc/pihole/pihole-FTL.db"
|
||||||
|
},
|
||||||
|
"mapped_to_table": "CurrentScan",
|
||||||
|
"data_filters": [
|
||||||
|
{
|
||||||
|
"compare_column" : "Object_PrimaryID",
|
||||||
|
"compare_operator" : "==",
|
||||||
|
"compare_field_id": "txtMacFilter",
|
||||||
|
"compare_js_template": "'{value}'.toString()",
|
||||||
|
"compare_use_quotes": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"localized": ["display_name", "description", "icon"],
|
||||||
|
|
||||||
|
"display_name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Network scan (PiHole)"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"icon": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "<i class=\"fa-solid fa-search\"></i>"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "This plugin is to import devices from PiHole"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"params" : [
|
||||||
|
{
|
||||||
|
"name" : "subnets",
|
||||||
|
"type" : "setting",
|
||||||
|
"value" : "SCAN_SUBNETS"
|
||||||
|
}],
|
||||||
|
|
||||||
|
"settings": [
|
||||||
|
{
|
||||||
|
"function": "RUN",
|
||||||
|
"type": "text.select",
|
||||||
|
"default_value":"schedule",
|
||||||
|
"options": ["disabled", "once", "schedule", "always_after_scan", "on_new_device"],
|
||||||
|
"localized": ["name", "description"],
|
||||||
|
"name" :[{
|
||||||
|
"language_code":"en_us",
|
||||||
|
"string" : "When to run"
|
||||||
|
}],
|
||||||
|
"description": [{
|
||||||
|
"language_code":"en_us",
|
||||||
|
"string" : "When enabled, ONCE is the preferred option. It runs at startup and after every save of the config here.<br> Changes will only show in the devices <b> after the next scan!</b>"
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"function": "CMD",
|
||||||
|
"type": "text",
|
||||||
|
"default_value":"SELECT hwaddr as Object_PrimaryID, cast('http://' || (SELECT ip FROM PH.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1) as VARCHAR(100)) || ':' || cast( SUBSTR((SELECT name FROM PH.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1), 0, INSTR((SELECT name FROM PH.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1), '/')) as VARCHAR(100)) as Object_SecondaryID, datetime() as DateTime, macVendor as Watched_Value1, lastQuery as Watched_Value2, (SELECT name FROM PH.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1) as Watched_Value3, 'null' as Watched_Value4, '' as Extra, hwaddr as ForeignKey FROM PH.network WHERE hwaddr NOT LIKE 'ip-%' AND hwaddr <> '00:00:00:00:00:00'; ",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name", "description"],
|
||||||
|
"name" : [{
|
||||||
|
"language_code":"en_us",
|
||||||
|
"string" : "SQL to run"
|
||||||
|
}],
|
||||||
|
"description": [{
|
||||||
|
"language_code":"en_us",
|
||||||
|
"string" : "This SQL query is used to populate the coresponding UI tables under the Plugins section."
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"function": "RUN_TIMEOUT",
|
||||||
|
"type": "integer",
|
||||||
|
"default_value": 300,
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name", "description"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Run timeout"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Maximum time in seconds to wait for the script to finish. If this time is exceeded the script is aborted."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"function": "RUN_SCHD",
|
||||||
|
"type": "text",
|
||||||
|
"default_value":"*/3 * * * *",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name", "description"],
|
||||||
|
"name" : [{
|
||||||
|
"language_code":"en_us",
|
||||||
|
"string" : "Schedule"
|
||||||
|
}],
|
||||||
|
"description": [{
|
||||||
|
"language_code":"en_us",
|
||||||
|
"string" : "Only enabled if you select <code>schedule</code> in the <a href=\"#ARPSCAN_RUN\"><code>ARPSCAN_RUN</code> setting</a>. Make sure you enter the schedule in the correct cron-like format (e.g. validate at <a href=\"https://crontab.guru/\" target=\"_blank\">crontab.guru</a>). For example entering <code>*/3 * * * *</code> will run the scan every 3 minutes. Will be run NEXT time the time passes."
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"function": "WATCH",
|
||||||
|
"type": "text.multiselect",
|
||||||
|
"default_value":["Watched_Value1", "Watched_Value2"],
|
||||||
|
"options": ["Watched_Value1","Watched_Value2","Watched_Value3","Watched_Value4"],
|
||||||
|
"localized": ["name", "description"],
|
||||||
|
"name" :[{
|
||||||
|
"language_code":"en_us",
|
||||||
|
"string" : "Watched"
|
||||||
|
}] ,
|
||||||
|
"description":[{
|
||||||
|
"language_code":"en_us",
|
||||||
|
"string" : "Send a notification if selected values change. Use <code>CTRL + Click</code> to select/deselect. <ul> <li><code>Watched_Value1</code> is IP</li><li><code>Watched_Value2</code> is Vendor</li><li><code>Watched_Value3</code> is Interface </li><li><code>Watched_Value4</code> is N/A </li></ul>"
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"function": "REPORT_ON",
|
||||||
|
"type": "text.multiselect",
|
||||||
|
"default_value": ["new"],
|
||||||
|
"options": ["new", "watched-changed", "watched-not-changed"],
|
||||||
|
"localized": ["name", "description"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Report on"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "When should notification be sent out."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
|
||||||
|
"database_column_definitions":
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"column": "Object_PrimaryID",
|
||||||
|
"mapped_to_column": "cur_MAC",
|
||||||
|
"css_classes": "col-sm-2",
|
||||||
|
"show": true,
|
||||||
|
"type": "devicemac",
|
||||||
|
"default_value":"",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name":[{
|
||||||
|
"language_code":"en_us",
|
||||||
|
"string" : "MAC"
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "Watched_Value1",
|
||||||
|
"mapped_to_column": "cur_IP",
|
||||||
|
"css_classes": "col-sm-2",
|
||||||
|
"show": true,
|
||||||
|
"type": "deviceip",
|
||||||
|
"default_value":"",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name":[{
|
||||||
|
"language_code":"en_us",
|
||||||
|
"string" : "IP"
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "Watched_Value2",
|
||||||
|
"mapped_to_column": "cur_Vendor",
|
||||||
|
"css_classes": "col-sm-2",
|
||||||
|
"show": true,
|
||||||
|
"type": "label",
|
||||||
|
"default_value":"",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name":[{
|
||||||
|
"language_code":"en_us",
|
||||||
|
"string" : "Vendor"
|
||||||
|
}]
|
||||||
|
} ,
|
||||||
|
{
|
||||||
|
"column": "Extra",
|
||||||
|
"mapped_to_column": "cur_ScanMethod",
|
||||||
|
"css_classes": "col-sm-2",
|
||||||
|
"show": true,
|
||||||
|
"type": "label",
|
||||||
|
"default_value":"",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name":[{
|
||||||
|
"language_code":"en_us",
|
||||||
|
"string" : "Scan method"
|
||||||
|
}]
|
||||||
|
} ,
|
||||||
|
{
|
||||||
|
"column": "DateTimeCreated",
|
||||||
|
"css_classes": "col-sm-2",
|
||||||
|
"show": true,
|
||||||
|
"type": "label",
|
||||||
|
"default_value":"",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name":[{
|
||||||
|
"language_code":"en_us",
|
||||||
|
"string" : "Created"
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "DateTimeChanged",
|
||||||
|
"css_classes": "col-sm-2",
|
||||||
|
"show": true,
|
||||||
|
"type": "label",
|
||||||
|
"default_value":"",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name":[{
|
||||||
|
"language_code":"en_us",
|
||||||
|
"string" : "Changed"
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
224
front/plugins/pihole_scan/script.py
Normal file
224
front/plugins/pihole_scan/script.py
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
import base64
|
||||||
|
import subprocess
|
||||||
|
from time import strftime
|
||||||
|
|
||||||
|
sys.path.append("/home/pi/pialert/front/plugins")
|
||||||
|
|
||||||
|
from plugin_helper import Plugin_Object, Plugin_Objects
|
||||||
|
|
||||||
|
""" module to import db and leases from PiHole """
|
||||||
|
|
||||||
|
piholeDB = '/etc/pihole/pihole-FTL.db'
|
||||||
|
|
||||||
|
pialertPath = '/home/pi/pialert'
|
||||||
|
dbPath = '/db/pialert.db'
|
||||||
|
fullPiAlertDbPath = pialertPath + dbPath
|
||||||
|
|
||||||
|
CUR_PATH = str(pathlib.Path(__file__).parent.resolve())
|
||||||
|
LOG_FILE = os.path.join(CUR_PATH, 'script.log')
|
||||||
|
RESULT_FILE = os.path.join(CUR_PATH, 'last_result.log')
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
|
||||||
|
|
||||||
|
fullPiAlertDbPath
|
||||||
|
|
||||||
|
# # sample
|
||||||
|
# # /home/pi/pialert/front/plugins/arp_scan/script.py userSubnets=b'MTkyLjE2OC4xLjAvMjQgLS1pbnRlcmZhY2U9ZXRoMQ=='
|
||||||
|
# # the script expects a parameter in the format of userSubnets=subnet1,subnet2,...
|
||||||
|
# parser = argparse.ArgumentParser(description='Import devices from settings')
|
||||||
|
# parser.add_argument('userSubnets', nargs='+', help="list of subnets with options")
|
||||||
|
# values = parser.parse_args()
|
||||||
|
|
||||||
|
# # Assuming Plugin_Objects is a class or function that reads data from the RESULT_FILE
|
||||||
|
# # and returns a list of objects called 'devices'.
|
||||||
|
# devices = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
|
# # Print a message to indicate that the script is starting.
|
||||||
|
# print('In script:')
|
||||||
|
|
||||||
|
# # Assuming 'values' is a dictionary or object that contains a key 'userSubnets'
|
||||||
|
# # which holds a list of user-submitted subnets.
|
||||||
|
# # Printing the userSubnets list to check its content.
|
||||||
|
# print(values.userSubnets)
|
||||||
|
|
||||||
|
# # Extract the base64-encoded subnet information from the first element of the userSubnets list.
|
||||||
|
# # The format of the element is assumed to be like 'userSubnets=b<base64-encoded-data>'.
|
||||||
|
# userSubnetsParamBase64 = values.userSubnets[0].split('userSubnets=b')[1]
|
||||||
|
|
||||||
|
# # Printing the extracted base64-encoded subnet information.
|
||||||
|
# print(userSubnetsParamBase64)
|
||||||
|
|
||||||
|
# # Decode the base64-encoded subnet information to get the actual subnet information in ASCII format.
|
||||||
|
# userSubnetsParam = base64.b64decode(userSubnetsParamBase64).decode('ascii')
|
||||||
|
|
||||||
|
# # Print the decoded subnet information.
|
||||||
|
# print('userSubnetsParam:')
|
||||||
|
# print(userSubnetsParam)
|
||||||
|
|
||||||
|
# # Check if the decoded subnet information contains multiple subnets separated by commas.
|
||||||
|
# # If it does, split the string into a list of individual subnets.
|
||||||
|
# # Otherwise, create a list with a single element containing the subnet information.
|
||||||
|
# if ',' in userSubnetsParam:
|
||||||
|
# subnets_list = userSubnetsParam.split(',')
|
||||||
|
# else:
|
||||||
|
# subnets_list = [userSubnetsParam]
|
||||||
|
|
||||||
|
# # Execute the ARP scanning process on the list of subnets (whether it's one or multiple subnets).
|
||||||
|
# # The function 'execute_arpscan' is assumed to be defined elsewhere in the code.
|
||||||
|
# unique_devices = execute_arpscan(subnets_list)
|
||||||
|
|
||||||
|
|
||||||
|
# for device in unique_devices:
|
||||||
|
# devices.add_object(
|
||||||
|
# primaryId=device['mac'], # MAC (Device Name)
|
||||||
|
# secondaryId=device['ip'], # IP Address
|
||||||
|
# watched1=device['ip'], # Device Name
|
||||||
|
# watched2=device.get('hw', ''), # Vendor (assuming it's in the 'hw' field)
|
||||||
|
# watched3=device.get('interface', ''), # Add the interface
|
||||||
|
# watched4='',
|
||||||
|
# extra='arp-scan',
|
||||||
|
# foreignKey="")
|
||||||
|
|
||||||
|
# devices.write_result_file()
|
||||||
|
|
||||||
|
# return 0
|
||||||
|
|
||||||
|
|
||||||
|
def execute_arpscan(userSubnets):
|
||||||
|
# output of possible multiple interfaces
|
||||||
|
arpscan_output = ""
|
||||||
|
devices_list = []
|
||||||
|
|
||||||
|
# scan each interface
|
||||||
|
|
||||||
|
for interface in userSubnets :
|
||||||
|
|
||||||
|
arpscan_output = execute_arpscan_on_interface (interface)
|
||||||
|
|
||||||
|
print(arpscan_output)
|
||||||
|
|
||||||
|
# Search IP + MAC + Vendor as regular expresion
|
||||||
|
re_ip = r'(?P<ip>((2[0-5]|1[0-9]|[0-9])?[0-9]\.){3}((2[0-5]|1[0-9]|[0-9])?[0-9]))'
|
||||||
|
re_mac = r'(?P<mac>([0-9a-fA-F]{2}[:-]){5}([0-9a-fA-F]{2}))'
|
||||||
|
re_hw = r'(?P<hw>.*)'
|
||||||
|
re_pattern = re.compile (re_ip + '\s+' + re_mac + '\s' + re_hw)
|
||||||
|
|
||||||
|
devices_list_tmp = [
|
||||||
|
{**device.groupdict(), "interface": interface}
|
||||||
|
for device in re.finditer(re_pattern, arpscan_output)
|
||||||
|
]
|
||||||
|
|
||||||
|
devices_list += devices_list_tmp
|
||||||
|
|
||||||
|
# mylog('debug', ['[ARP Scan] Found: Devices including duplicates ', len(devices_list) ])
|
||||||
|
|
||||||
|
# Delete duplicate MAC
|
||||||
|
unique_mac = []
|
||||||
|
unique_devices = []
|
||||||
|
|
||||||
|
for device in devices_list :
|
||||||
|
if device['mac'] not in unique_mac:
|
||||||
|
unique_mac.append(device['mac'])
|
||||||
|
unique_devices.append(device)
|
||||||
|
|
||||||
|
# return list
|
||||||
|
# mylog('debug', ['[ARP Scan] Found: Devices without duplicates ', len(unique_devices) ])
|
||||||
|
|
||||||
|
print("Devices List len:", len(devices_list)) # Add this line to print devices_list
|
||||||
|
print("Devices List:", devices_list) # Add this line to print devices_list
|
||||||
|
|
||||||
|
return devices_list
|
||||||
|
|
||||||
|
|
||||||
|
def execute_arpscan_on_interface(interface):
|
||||||
|
# Prepare command arguments
|
||||||
|
arpscan_args = ['sudo', 'arp-scan', '--ignoredups', '--retry=6'] + interface.split()
|
||||||
|
|
||||||
|
# Execute command
|
||||||
|
try:
|
||||||
|
# try running a subprocess safely
|
||||||
|
result = subprocess.check_output(arpscan_args, universal_newlines=True)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
# An error occurred, handle it
|
||||||
|
error_type = type(e).__name__ # Capture the error type
|
||||||
|
result = ""
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
#-------------------------------------------------------------------------------
|
||||||
|
def copy_pihole_network (db):
|
||||||
|
"""
|
||||||
|
attach the PiHole Database and copy the PiHole_Network table accross into the PiAlert DB
|
||||||
|
"""
|
||||||
|
|
||||||
|
sql = db.sql # TO-DO
|
||||||
|
# Open Pi-hole DB
|
||||||
|
print('[PiHole Network] - attach PiHole DB')
|
||||||
|
|
||||||
|
try:
|
||||||
|
sql.execute ("ATTACH DATABASE '"+ piholeDB +"' AS PH")
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
print(f'[PiHole Network] - SQL ERROR: {e}')
|
||||||
|
|
||||||
|
|
||||||
|
# Copy Pi-hole Network table
|
||||||
|
|
||||||
|
try:
|
||||||
|
sql.execute ("DELETE FROM PiHole_Network")
|
||||||
|
|
||||||
|
# just for reporting
|
||||||
|
new_devices = []
|
||||||
|
sql.execute ( """SELECT hwaddr, macVendor, lastQuery,
|
||||||
|
(SELECT name FROM PH.network_addresses
|
||||||
|
WHERE network_id = id ORDER BY lastseen DESC, ip),
|
||||||
|
(SELECT ip FROM PH.network_addresses
|
||||||
|
WHERE network_id = id ORDER BY lastseen DESC, ip)
|
||||||
|
FROM PH.network
|
||||||
|
WHERE hwaddr NOT LIKE 'ip-%'
|
||||||
|
AND hwaddr <> '00:00:00:00:00:00' """)
|
||||||
|
new_devices = sql.fetchall()
|
||||||
|
|
||||||
|
# insert into PiAlert DB
|
||||||
|
sql.execute ("""INSERT INTO PiHole_Network (PH_MAC, PH_Vendor, PH_LastQuery,
|
||||||
|
PH_Name, PH_IP)
|
||||||
|
SELECT hwaddr, macVendor, lastQuery,
|
||||||
|
(SELECT name FROM PH.network_addresses
|
||||||
|
WHERE network_id = id ORDER BY lastseen DESC, ip),
|
||||||
|
(SELECT ip FROM PH.network_addresses
|
||||||
|
WHERE network_id = id ORDER BY lastseen DESC, ip)
|
||||||
|
FROM PH.network
|
||||||
|
WHERE hwaddr NOT LIKE 'ip-%'
|
||||||
|
AND hwaddr <> '00:00:00:00:00:00' """)
|
||||||
|
sql.execute ("""UPDATE PiHole_Network SET PH_Name = '(unknown)'
|
||||||
|
WHERE PH_Name IS NULL OR PH_Name = '' """)
|
||||||
|
# Close Pi-hole DB
|
||||||
|
sql.execute ("DETACH PH")
|
||||||
|
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
print(f'[PiHole Network] - SQL ERROR: {e}')
|
||||||
|
|
||||||
|
db.commitDB()
|
||||||
|
|
||||||
|
print('[PiHole Network] - completed - found ', len(new_devices), ' devices')
|
||||||
|
return str(sql.rowcount) != "0"
|
||||||
|
|
||||||
|
|
||||||
|
#-------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
#===============================================================================
|
||||||
|
# BEGIN
|
||||||
|
#===============================================================================
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
@@ -63,9 +63,6 @@ main structure of Pi Alert
|
|||||||
run PHOLUS
|
run PHOLUS
|
||||||
run NMAP
|
run NMAP
|
||||||
run "scan_network()"
|
run "scan_network()"
|
||||||
ARP Scan
|
|
||||||
PiHole copy db
|
|
||||||
PiHole DHCP leases
|
|
||||||
processing scan results
|
processing scan results
|
||||||
run plugins (after Scan)
|
run plugins (after Scan)
|
||||||
reporting
|
reporting
|
||||||
|
|||||||
@@ -19,8 +19,8 @@ fullPholusPath = pialertPath+'/pholus/pholus3.py'
|
|||||||
|
|
||||||
|
|
||||||
vendorsDB = '/usr/share/arp-scan/ieee-oui.txt'
|
vendorsDB = '/usr/share/arp-scan/ieee-oui.txt'
|
||||||
piholeDB = '/etc/pihole/pihole-FTL.db'
|
|
||||||
piholeDhcpleases = '/etc/pihole/dhcp.leases'
|
|
||||||
|
|
||||||
|
|
||||||
#===============================================================================
|
#===============================================================================
|
||||||
|
|||||||
@@ -1,12 +1,8 @@
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
import conf
|
import conf
|
||||||
from helper import timeNowTZ
|
from helper import timeNowTZ, get_setting, get_setting_value
|
||||||
from plugin import get_setting_value
|
|
||||||
from scanners.internet import check_IP_format, get_internet_IP
|
from scanners.internet import check_IP_format, get_internet_IP
|
||||||
from logger import mylog, print_log
|
from logger import mylog, print_log
|
||||||
from mac_vendor import query_MAC_vendor
|
from mac_vendor import query_MAC_vendor
|
||||||
|
|||||||
@@ -339,4 +339,35 @@ class noti_struc:
|
|||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
def isJsonObject(value):
|
def isJsonObject(value):
|
||||||
return isinstance(value, dict)
|
return isinstance(value, dict)
|
||||||
|
|
||||||
|
#-------------------------------------------------------------------------------
|
||||||
|
# Return whole setting touple
|
||||||
|
def get_setting(key):
|
||||||
|
result = None
|
||||||
|
# index order: key, name, desc, inputtype, options, regex, result, group, events
|
||||||
|
for set in conf.mySettings:
|
||||||
|
if set[0] == key:
|
||||||
|
result = set
|
||||||
|
|
||||||
|
if result is None:
|
||||||
|
mylog('minimal', [' Error - setting_missing - Setting not found for key: ', key])
|
||||||
|
mylog('minimal', [' Error - logging the settings into file: ', logPath + '/setting_missing.json'])
|
||||||
|
write_file (logPath + '/setting_missing.json', json.dumps({ 'data' : conf.mySettings}))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
#-------------------------------------------------------------------------------
|
||||||
|
# Return setting value
|
||||||
|
def get_setting_value(key):
|
||||||
|
|
||||||
|
set = get_setting(key)
|
||||||
|
|
||||||
|
if get_setting(key) is not None:
|
||||||
|
|
||||||
|
setVal = set[6] # setting value
|
||||||
|
setTyp = set[3] # setting type
|
||||||
|
|
||||||
|
return setVal
|
||||||
|
|
||||||
|
return ''
|
||||||
@@ -156,8 +156,8 @@ def importConfigs (db):
|
|||||||
conf.DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS')
|
conf.DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS')
|
||||||
|
|
||||||
# PiHole
|
# PiHole
|
||||||
conf.PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PiHole')
|
conf.PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PIHOLE')
|
||||||
conf.DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PiHole')
|
conf.DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PIHOLE')
|
||||||
|
|
||||||
# PHOLUS
|
# PHOLUS
|
||||||
conf.PHOLUS_ACTIVE = ccd('PHOLUS_ACTIVE', False , c_d, 'Enable Pholus scans', 'boolean', '', 'Pholus')
|
conf.PHOLUS_ACTIVE = ccd('PHOLUS_ACTIVE', False , c_d, 'Enable Pholus scans', 'boolean', '', 'Pholus')
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from collections import namedtuple
|
|||||||
import conf
|
import conf
|
||||||
from const import pluginsPath, logPath
|
from const import pluginsPath, logPath
|
||||||
from logger import mylog
|
from logger import mylog
|
||||||
from helper import timeNowTZ, updateState, get_file_content, write_file
|
from helper import timeNowTZ, updateState, get_file_content, write_file, get_setting, get_setting_value
|
||||||
from api import update_api
|
from api import update_api
|
||||||
from networkscan import process_scan
|
from networkscan import process_scan
|
||||||
|
|
||||||
@@ -94,21 +94,7 @@ def get_plugin_setting(plugin, function_key):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
# Return whole setting touple
|
|
||||||
def get_setting(key):
|
|
||||||
result = None
|
|
||||||
# index order: key, name, desc, inputtype, options, regex, result, group, events
|
|
||||||
for set in conf.mySettings:
|
|
||||||
if set[0] == key:
|
|
||||||
result = set
|
|
||||||
|
|
||||||
if result is None:
|
|
||||||
mylog('minimal', [' Error - setting_missing - Setting not found for key: ', key])
|
|
||||||
mylog('minimal', [' Error - logging the settings into file: ', logPath + '/setting_missing.json'])
|
|
||||||
write_file (logPath + '/setting_missing.json', json.dumps({ 'data' : conf.mySettings}))
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
@@ -252,6 +238,32 @@ def execute_plugin(db, plugin):
|
|||||||
sqlParams.append((plugin["unique_prefix"], row[0], handle_empty(row[1]), 'null', row[2], row[3], row[4], handle_empty(row[5]), handle_empty(row[6]), 0, row[7], 'null', row[8]))
|
sqlParams.append((plugin["unique_prefix"], row[0], handle_empty(row[1]), 'null', row[2], row[3], row[4], handle_empty(row[5]), handle_empty(row[6]), 0, row[7], 'null', row[8]))
|
||||||
else:
|
else:
|
||||||
mylog('none', ['[Plugins] Skipped invalid sql result'])
|
mylog('none', ['[Plugins] Skipped invalid sql result'])
|
||||||
|
|
||||||
|
# pialert-db-query
|
||||||
|
if plugin['data_source'] == 'sqlite-db-query':
|
||||||
|
# replace single quotes wildcards
|
||||||
|
# set_CMD should contain a SQL query
|
||||||
|
q = set_CMD.replace("{s-quote}", '\'')
|
||||||
|
|
||||||
|
# Execute command
|
||||||
|
mylog('verbose', ['[Plugins] Executing: ', q])
|
||||||
|
|
||||||
|
fullSqlitePath = plugin['data_source_settings']['db_path']
|
||||||
|
|
||||||
|
# try attaching the sqlite DB
|
||||||
|
try:
|
||||||
|
sql.execute ("ATTACH DATABASE '"+ fullSqlitePath +"' AS PH")
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
mylog('none',[ '[Plugin] - ATTACH DATABASE failed with SQL ERROR: ', e])
|
||||||
|
|
||||||
|
arr = db.get_sql_array (q)
|
||||||
|
|
||||||
|
for row in arr:
|
||||||
|
# There has to be always 9 columns
|
||||||
|
if len(row) == 9 and (row[0] in ['','null']) == False :
|
||||||
|
sqlParams.append((plugin["unique_prefix"], row[0], handle_empty(row[1]), 'null', row[2], row[3], row[4], handle_empty(row[5]), handle_empty(row[6]), 0, row[7], 'null', row[8]))
|
||||||
|
else:
|
||||||
|
mylog('none', ['[Plugins] Skipped invalid sql result'])
|
||||||
|
|
||||||
|
|
||||||
# check if the subprocess / SQL query failed / there was no valid output
|
# check if the subprocess / SQL query failed / there was no valid output
|
||||||
@@ -329,20 +341,7 @@ def get_plugin_setting_value(plugin, function_key):
|
|||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
# Return setting value
|
|
||||||
def get_setting_value(key):
|
|
||||||
|
|
||||||
set = get_setting(key)
|
|
||||||
|
|
||||||
if get_setting(key) is not None:
|
|
||||||
|
|
||||||
setVal = set[6] # setting value
|
|
||||||
setTyp = set[3] # setting type
|
|
||||||
|
|
||||||
return setVal
|
|
||||||
|
|
||||||
return ''
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
def flatten_array(arr, encodeBase64=False):
|
def flatten_array(arr, encodeBase64=False):
|
||||||
|
|||||||
@@ -1,11 +1,14 @@
|
|||||||
""" module to import db and leases from PiHole """
|
""" module to import db and leases from PiHole """
|
||||||
|
# TODO remove this file in teh future
|
||||||
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
import conf
|
import conf
|
||||||
from const import piholeDB, piholeDhcpleases
|
|
||||||
from logger import mylog
|
from logger import mylog
|
||||||
|
|
||||||
|
piholeDhcpleases = '/etc/pihole/dhcp.leases'
|
||||||
|
piholeDB = '/etc/pihole/pihole-FTL.db'
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
def copy_pihole_network (db):
|
def copy_pihole_network (db):
|
||||||
"""
|
"""
|
||||||
|
|||||||
Reference in New Issue
Block a user