diff --git a/front/plugins/pihole_scan/README.md b/front/plugins/pihole_scan/README.md
new file mode 100755
index 00000000..622c5306
--- /dev/null
+++ b/front/plugins/pihole_scan/README.md
@@ -0,0 +1,27 @@
+## Overview
+
+A plugin allowing for importing Un-Discoverable devices from the settings page.
+The main usecase is to add dumb network gear like unmanaged hubs and switches to the network view.
+There might be other usecases, please let me know.
+
+### Usage
+
+- Go to settings and find Un-Discoverabe Devices in the list of plugins.
+- Enable the plugin by changing the RUN parameter from disabled to `once` or `always_after_scan`.
+- Add the name of your device to the list. (remove the sample entry first)
+- SAVE
+- wait for the next scan to finish
+
+#### Examples:
+Settings:
+
+
+resulting in these devices:
+
+
+Allowing Un-Discoverable devices like hubs, switches or APs to be added to the network view.
+
+
+### Known Limitations
+ - Un-Discoverable Devices always show as offline. That is expected as they can not be discovered by Pi.Alert.
+ - All IPs are set to 0.0.0.0 therefore the "Random MAC" icon might show up.
diff --git a/front/plugins/pihole_scan/config.json b/front/plugins/pihole_scan/config.json
new file mode 100755
index 00000000..1898f013
--- /dev/null
+++ b/front/plugins/pihole_scan/config.json
@@ -0,0 +1,235 @@
+{
+ "code_name": "pihole_scan",
+ "unique_prefix": "PIHOLE",
+ "enabled": true,
+ "data_source": "sqlite-db-query",
+ "data_source_settings":
+ {
+ "db_path":"/etc/pihole/pihole-FTL.db"
+ },
+ "mapped_to_table": "CurrentScan",
+ "data_filters": [
+ {
+ "compare_column" : "Object_PrimaryID",
+ "compare_operator" : "==",
+ "compare_field_id": "txtMacFilter",
+ "compare_js_template": "'{value}'.toString()",
+ "compare_use_quotes": true
+ }
+ ],
+ "localized": ["display_name", "description", "icon"],
+
+ "display_name": [
+ {
+ "language_code": "en_us",
+ "string": "Network scan (PiHole)"
+ }
+ ],
+ "icon": [
+ {
+ "language_code": "en_us",
+ "string": ""
+ }
+ ],
+ "description": [
+ {
+ "language_code": "en_us",
+ "string": "This plugin is to import devices from PiHole"
+ }
+ ],
+ "params" : [
+ {
+ "name" : "subnets",
+ "type" : "setting",
+ "value" : "SCAN_SUBNETS"
+ }],
+
+ "settings": [
+ {
+ "function": "RUN",
+ "type": "text.select",
+ "default_value":"schedule",
+ "options": ["disabled", "once", "schedule", "always_after_scan", "on_new_device"],
+ "localized": ["name", "description"],
+ "name" :[{
+ "language_code":"en_us",
+ "string" : "When to run"
+ }],
+ "description": [{
+ "language_code":"en_us",
+ "string" : "When enabled, ONCE is the preferred option. It runs at startup and after every save of the config here.
Changes will only show in the devices after the next scan!"
+ }]
+ },
+
+ {
+ "function": "CMD",
+ "type": "text",
+ "default_value":"SELECT hwaddr as Object_PrimaryID, cast('http://' || (SELECT ip FROM PH.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1) as VARCHAR(100)) || ':' || cast( SUBSTR((SELECT name FROM PH.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1), 0, INSTR((SELECT name FROM PH.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1), '/')) as VARCHAR(100)) as Object_SecondaryID, datetime() as DateTime, macVendor as Watched_Value1, lastQuery as Watched_Value2, (SELECT name FROM PH.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1) as Watched_Value3, 'null' as Watched_Value4, '' as Extra, hwaddr as ForeignKey FROM PH.network WHERE hwaddr NOT LIKE 'ip-%' AND hwaddr <> '00:00:00:00:00:00'; ",
+ "options": [],
+ "localized": ["name", "description"],
+ "name" : [{
+ "language_code":"en_us",
+ "string" : "SQL to run"
+ }],
+ "description": [{
+ "language_code":"en_us",
+ "string" : "This SQL query is used to populate the coresponding UI tables under the Plugins section."
+ }]
+ },
+
+ {
+ "function": "RUN_TIMEOUT",
+ "type": "integer",
+ "default_value": 300,
+ "options": [],
+ "localized": ["name", "description"],
+ "name": [
+ {
+ "language_code": "en_us",
+ "string": "Run timeout"
+ }
+ ],
+ "description": [
+ {
+ "language_code": "en_us",
+ "string": "Maximum time in seconds to wait for the script to finish. If this time is exceeded the script is aborted."
+ }
+ ]
+ },
+ {
+ "function": "RUN_SCHD",
+ "type": "text",
+ "default_value":"*/3 * * * *",
+ "options": [],
+ "localized": ["name", "description"],
+ "name" : [{
+ "language_code":"en_us",
+ "string" : "Schedule"
+ }],
+ "description": [{
+ "language_code":"en_us",
+ "string" : "Only enabled if you select schedule in the ARPSCAN_RUN setting. Make sure you enter the schedule in the correct cron-like format (e.g. validate at crontab.guru). For example entering */3 * * * * will run the scan every 3 minutes. Will be run NEXT time the time passes."
+ }]
+ },
+ {
+ "function": "WATCH",
+ "type": "text.multiselect",
+ "default_value":["Watched_Value1", "Watched_Value2"],
+ "options": ["Watched_Value1","Watched_Value2","Watched_Value3","Watched_Value4"],
+ "localized": ["name", "description"],
+ "name" :[{
+ "language_code":"en_us",
+ "string" : "Watched"
+ }] ,
+ "description":[{
+ "language_code":"en_us",
+ "string" : "Send a notification if selected values change. Use CTRL + Click to select/deselect.
Watched_Value1 is IPWatched_Value2 is VendorWatched_Value3 is Interface Watched_Value4 is N/A
"
+ }]
+ },
+ {
+ "function": "REPORT_ON",
+ "type": "text.multiselect",
+ "default_value": ["new"],
+ "options": ["new", "watched-changed", "watched-not-changed"],
+ "localized": ["name", "description"],
+ "name": [
+ {
+ "language_code": "en_us",
+ "string": "Report on"
+ }
+ ],
+ "description": [
+ {
+ "language_code": "en_us",
+ "string": "When should notification be sent out."
+ }
+ ]
+ }
+ ],
+
+ "database_column_definitions":
+ [
+ {
+ "column": "Object_PrimaryID",
+ "mapped_to_column": "cur_MAC",
+ "css_classes": "col-sm-2",
+ "show": true,
+ "type": "devicemac",
+ "default_value":"",
+ "options": [],
+ "localized": ["name"],
+ "name":[{
+ "language_code":"en_us",
+ "string" : "MAC"
+ }]
+ },
+ {
+ "column": "Watched_Value1",
+ "mapped_to_column": "cur_IP",
+ "css_classes": "col-sm-2",
+ "show": true,
+ "type": "deviceip",
+ "default_value":"",
+ "options": [],
+ "localized": ["name"],
+ "name":[{
+ "language_code":"en_us",
+ "string" : "IP"
+ }]
+ },
+ {
+ "column": "Watched_Value2",
+ "mapped_to_column": "cur_Vendor",
+ "css_classes": "col-sm-2",
+ "show": true,
+ "type": "label",
+ "default_value":"",
+ "options": [],
+ "localized": ["name"],
+ "name":[{
+ "language_code":"en_us",
+ "string" : "Vendor"
+ }]
+ } ,
+ {
+ "column": "Extra",
+ "mapped_to_column": "cur_ScanMethod",
+ "css_classes": "col-sm-2",
+ "show": true,
+ "type": "label",
+ "default_value":"",
+ "options": [],
+ "localized": ["name"],
+ "name":[{
+ "language_code":"en_us",
+ "string" : "Scan method"
+ }]
+ } ,
+ {
+ "column": "DateTimeCreated",
+ "css_classes": "col-sm-2",
+ "show": true,
+ "type": "label",
+ "default_value":"",
+ "options": [],
+ "localized": ["name"],
+ "name":[{
+ "language_code":"en_us",
+ "string" : "Created"
+ }]
+ },
+ {
+ "column": "DateTimeChanged",
+ "css_classes": "col-sm-2",
+ "show": true,
+ "type": "label",
+ "default_value":"",
+ "options": [],
+ "localized": ["name"],
+ "name":[{
+ "language_code":"en_us",
+ "string" : "Changed"
+ }]
+ }
+ ]
+}
\ No newline at end of file
diff --git a/front/plugins/pihole_scan/script.py b/front/plugins/pihole_scan/script.py
new file mode 100644
index 00000000..62890d9c
--- /dev/null
+++ b/front/plugins/pihole_scan/script.py
@@ -0,0 +1,224 @@
+#!/usr/bin/env python
+
+import sqlite3
+import os
+import pathlib
+import argparse
+import sys
+import re
+import base64
+import subprocess
+from time import strftime
+
+sys.path.append("/home/pi/pialert/front/plugins")
+
+from plugin_helper import Plugin_Object, Plugin_Objects
+
+""" module to import db and leases from PiHole """
+
+piholeDB = '/etc/pihole/pihole-FTL.db'
+
+pialertPath = '/home/pi/pialert'
+dbPath = '/db/pialert.db'
+fullPiAlertDbPath = pialertPath + dbPath
+
+CUR_PATH = str(pathlib.Path(__file__).parent.resolve())
+LOG_FILE = os.path.join(CUR_PATH, 'script.log')
+RESULT_FILE = os.path.join(CUR_PATH, 'last_result.log')
+
+
+def main():
+
+
+ fullPiAlertDbPath
+
+ # # sample
+ # # /home/pi/pialert/front/plugins/arp_scan/script.py userSubnets=b'MTkyLjE2OC4xLjAvMjQgLS1pbnRlcmZhY2U9ZXRoMQ=='
+ # # the script expects a parameter in the format of userSubnets=subnet1,subnet2,...
+ # parser = argparse.ArgumentParser(description='Import devices from settings')
+ # parser.add_argument('userSubnets', nargs='+', help="list of subnets with options")
+ # values = parser.parse_args()
+
+ # # Assuming Plugin_Objects is a class or function that reads data from the RESULT_FILE
+ # # and returns a list of objects called 'devices'.
+ # devices = Plugin_Objects(RESULT_FILE)
+
+ # # Print a message to indicate that the script is starting.
+ # print('In script:')
+
+ # # Assuming 'values' is a dictionary or object that contains a key 'userSubnets'
+ # # which holds a list of user-submitted subnets.
+ # # Printing the userSubnets list to check its content.
+ # print(values.userSubnets)
+
+ # # Extract the base64-encoded subnet information from the first element of the userSubnets list.
+ # # The format of the element is assumed to be like 'userSubnets=b'.
+ # userSubnetsParamBase64 = values.userSubnets[0].split('userSubnets=b')[1]
+
+ # # Printing the extracted base64-encoded subnet information.
+ # print(userSubnetsParamBase64)
+
+ # # Decode the base64-encoded subnet information to get the actual subnet information in ASCII format.
+ # userSubnetsParam = base64.b64decode(userSubnetsParamBase64).decode('ascii')
+
+ # # Print the decoded subnet information.
+ # print('userSubnetsParam:')
+ # print(userSubnetsParam)
+
+ # # Check if the decoded subnet information contains multiple subnets separated by commas.
+ # # If it does, split the string into a list of individual subnets.
+ # # Otherwise, create a list with a single element containing the subnet information.
+ # if ',' in userSubnetsParam:
+ # subnets_list = userSubnetsParam.split(',')
+ # else:
+ # subnets_list = [userSubnetsParam]
+
+ # # Execute the ARP scanning process on the list of subnets (whether it's one or multiple subnets).
+ # # The function 'execute_arpscan' is assumed to be defined elsewhere in the code.
+ # unique_devices = execute_arpscan(subnets_list)
+
+
+ # for device in unique_devices:
+ # devices.add_object(
+ # primaryId=device['mac'], # MAC (Device Name)
+ # secondaryId=device['ip'], # IP Address
+ # watched1=device['ip'], # Device Name
+ # watched2=device.get('hw', ''), # Vendor (assuming it's in the 'hw' field)
+ # watched3=device.get('interface', ''), # Add the interface
+ # watched4='',
+ # extra='arp-scan',
+ # foreignKey="")
+
+ # devices.write_result_file()
+
+ # return 0
+
+
+def execute_arpscan(userSubnets):
+ # output of possible multiple interfaces
+ arpscan_output = ""
+ devices_list = []
+
+ # scan each interface
+
+ for interface in userSubnets :
+
+ arpscan_output = execute_arpscan_on_interface (interface)
+
+ print(arpscan_output)
+
+ # Search IP + MAC + Vendor as regular expresion
+ re_ip = r'(?P((2[0-5]|1[0-9]|[0-9])?[0-9]\.){3}((2[0-5]|1[0-9]|[0-9])?[0-9]))'
+ re_mac = r'(?P([0-9a-fA-F]{2}[:-]){5}([0-9a-fA-F]{2}))'
+ re_hw = r'(?P.*)'
+ re_pattern = re.compile (re_ip + '\s+' + re_mac + '\s' + re_hw)
+
+ devices_list_tmp = [
+ {**device.groupdict(), "interface": interface}
+ for device in re.finditer(re_pattern, arpscan_output)
+ ]
+
+ devices_list += devices_list_tmp
+
+ # mylog('debug', ['[ARP Scan] Found: Devices including duplicates ', len(devices_list) ])
+
+ # Delete duplicate MAC
+ unique_mac = []
+ unique_devices = []
+
+ for device in devices_list :
+ if device['mac'] not in unique_mac:
+ unique_mac.append(device['mac'])
+ unique_devices.append(device)
+
+ # return list
+ # mylog('debug', ['[ARP Scan] Found: Devices without duplicates ', len(unique_devices) ])
+
+ print("Devices List len:", len(devices_list)) # Add this line to print devices_list
+ print("Devices List:", devices_list) # Add this line to print devices_list
+
+ return devices_list
+
+
+def execute_arpscan_on_interface(interface):
+ # Prepare command arguments
+ arpscan_args = ['sudo', 'arp-scan', '--ignoredups', '--retry=6'] + interface.split()
+
+ # Execute command
+ try:
+ # try running a subprocess safely
+ result = subprocess.check_output(arpscan_args, universal_newlines=True)
+ except subprocess.CalledProcessError as e:
+ # An error occurred, handle it
+ error_type = type(e).__name__ # Capture the error type
+ result = ""
+
+ return result
+
+
+#-------------------------------------------------------------------------------
+def copy_pihole_network (db):
+ """
+ attach the PiHole Database and copy the PiHole_Network table accross into the PiAlert DB
+ """
+
+ sql = db.sql # TO-DO
+ # Open Pi-hole DB
+ print('[PiHole Network] - attach PiHole DB')
+
+ try:
+ sql.execute ("ATTACH DATABASE '"+ piholeDB +"' AS PH")
+ except sqlite3.Error as e:
+ print(f'[PiHole Network] - SQL ERROR: {e}')
+
+
+ # Copy Pi-hole Network table
+
+ try:
+ sql.execute ("DELETE FROM PiHole_Network")
+
+ # just for reporting
+ new_devices = []
+ sql.execute ( """SELECT hwaddr, macVendor, lastQuery,
+ (SELECT name FROM PH.network_addresses
+ WHERE network_id = id ORDER BY lastseen DESC, ip),
+ (SELECT ip FROM PH.network_addresses
+ WHERE network_id = id ORDER BY lastseen DESC, ip)
+ FROM PH.network
+ WHERE hwaddr NOT LIKE 'ip-%'
+ AND hwaddr <> '00:00:00:00:00:00' """)
+ new_devices = sql.fetchall()
+
+ # insert into PiAlert DB
+ sql.execute ("""INSERT INTO PiHole_Network (PH_MAC, PH_Vendor, PH_LastQuery,
+ PH_Name, PH_IP)
+ SELECT hwaddr, macVendor, lastQuery,
+ (SELECT name FROM PH.network_addresses
+ WHERE network_id = id ORDER BY lastseen DESC, ip),
+ (SELECT ip FROM PH.network_addresses
+ WHERE network_id = id ORDER BY lastseen DESC, ip)
+ FROM PH.network
+ WHERE hwaddr NOT LIKE 'ip-%'
+ AND hwaddr <> '00:00:00:00:00:00' """)
+ sql.execute ("""UPDATE PiHole_Network SET PH_Name = '(unknown)'
+ WHERE PH_Name IS NULL OR PH_Name = '' """)
+ # Close Pi-hole DB
+ sql.execute ("DETACH PH")
+
+ except sqlite3.Error as e:
+ print(f'[PiHole Network] - SQL ERROR: {e}')
+
+ db.commitDB()
+
+ print('[PiHole Network] - completed - found ', len(new_devices), ' devices')
+ return str(sql.rowcount) != "0"
+
+
+#-------------------------------------------------------------------------------
+
+
+#===============================================================================
+# BEGIN
+#===============================================================================
+if __name__ == '__main__':
+ main()
diff --git a/pialert/__main__.py b/pialert/__main__.py
index a662db64..8a1551f5 100755
--- a/pialert/__main__.py
+++ b/pialert/__main__.py
@@ -63,9 +63,6 @@ main structure of Pi Alert
run PHOLUS
run NMAP
run "scan_network()"
- ARP Scan
- PiHole copy db
- PiHole DHCP leases
processing scan results
run plugins (after Scan)
reporting
diff --git a/pialert/const.py b/pialert/const.py
index edc172e3..2223fd5a 100755
--- a/pialert/const.py
+++ b/pialert/const.py
@@ -19,8 +19,8 @@ fullPholusPath = pialertPath+'/pholus/pholus3.py'
vendorsDB = '/usr/share/arp-scan/ieee-oui.txt'
-piholeDB = '/etc/pihole/pihole-FTL.db'
-piholeDhcpleases = '/etc/pihole/dhcp.leases'
+
+
#===============================================================================
diff --git a/pialert/device.py b/pialert/device.py
index c747bc9c..06ccfe23 100755
--- a/pialert/device.py
+++ b/pialert/device.py
@@ -1,12 +1,8 @@
-
-
-
import subprocess
import conf
-from helper import timeNowTZ
-from plugin import get_setting_value
+from helper import timeNowTZ, get_setting, get_setting_value
from scanners.internet import check_IP_format, get_internet_IP
from logger import mylog, print_log
from mac_vendor import query_MAC_vendor
diff --git a/pialert/helper.py b/pialert/helper.py
index cb6faa0e..1dd061ee 100755
--- a/pialert/helper.py
+++ b/pialert/helper.py
@@ -339,4 +339,35 @@ class noti_struc:
#-------------------------------------------------------------------------------
def isJsonObject(value):
- return isinstance(value, dict)
\ No newline at end of file
+ return isinstance(value, dict)
+
+#-------------------------------------------------------------------------------
+# Return whole setting touple
+def get_setting(key):
+ result = None
+ # index order: key, name, desc, inputtype, options, regex, result, group, events
+ for set in conf.mySettings:
+ if set[0] == key:
+ result = set
+
+ if result is None:
+ mylog('minimal', [' Error - setting_missing - Setting not found for key: ', key])
+ mylog('minimal', [' Error - logging the settings into file: ', logPath + '/setting_missing.json'])
+ write_file (logPath + '/setting_missing.json', json.dumps({ 'data' : conf.mySettings}))
+
+ return result
+
+#-------------------------------------------------------------------------------
+# Return setting value
+def get_setting_value(key):
+
+ set = get_setting(key)
+
+ if get_setting(key) is not None:
+
+ setVal = set[6] # setting value
+ setTyp = set[3] # setting type
+
+ return setVal
+
+ return ''
\ No newline at end of file
diff --git a/pialert/initialise.py b/pialert/initialise.py
index 2eb57716..b61338a6 100755
--- a/pialert/initialise.py
+++ b/pialert/initialise.py
@@ -156,8 +156,8 @@ def importConfigs (db):
conf.DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS')
# PiHole
- conf.PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PiHole')
- conf.DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PiHole')
+ conf.PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PIHOLE')
+ conf.DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PIHOLE')
# PHOLUS
conf.PHOLUS_ACTIVE = ccd('PHOLUS_ACTIVE', False , c_d, 'Enable Pholus scans', 'boolean', '', 'Pholus')
diff --git a/pialert/plugin.py b/pialert/plugin.py
index 477ed675..5d996dae 100755
--- a/pialert/plugin.py
+++ b/pialert/plugin.py
@@ -9,7 +9,7 @@ from collections import namedtuple
import conf
from const import pluginsPath, logPath
from logger import mylog
-from helper import timeNowTZ, updateState, get_file_content, write_file
+from helper import timeNowTZ, updateState, get_file_content, write_file, get_setting, get_setting_value
from api import update_api
from networkscan import process_scan
@@ -94,21 +94,7 @@ def get_plugin_setting(plugin, function_key):
return result
-#-------------------------------------------------------------------------------
-# Return whole setting touple
-def get_setting(key):
- result = None
- # index order: key, name, desc, inputtype, options, regex, result, group, events
- for set in conf.mySettings:
- if set[0] == key:
- result = set
-
- if result is None:
- mylog('minimal', [' Error - setting_missing - Setting not found for key: ', key])
- mylog('minimal', [' Error - logging the settings into file: ', logPath + '/setting_missing.json'])
- write_file (logPath + '/setting_missing.json', json.dumps({ 'data' : conf.mySettings}))
- return result
#-------------------------------------------------------------------------------
@@ -252,6 +238,32 @@ def execute_plugin(db, plugin):
sqlParams.append((plugin["unique_prefix"], row[0], handle_empty(row[1]), 'null', row[2], row[3], row[4], handle_empty(row[5]), handle_empty(row[6]), 0, row[7], 'null', row[8]))
else:
mylog('none', ['[Plugins] Skipped invalid sql result'])
+
+ # pialert-db-query
+ if plugin['data_source'] == 'sqlite-db-query':
+ # replace single quotes wildcards
+ # set_CMD should contain a SQL query
+ q = set_CMD.replace("{s-quote}", '\'')
+
+ # Execute command
+ mylog('verbose', ['[Plugins] Executing: ', q])
+
+ fullSqlitePath = plugin['data_source_settings']['db_path']
+
+ # try attaching the sqlite DB
+ try:
+ sql.execute ("ATTACH DATABASE '"+ fullSqlitePath +"' AS PH")
+ except sqlite3.Error as e:
+ mylog('none',[ '[Plugin] - ATTACH DATABASE failed with SQL ERROR: ', e])
+
+ arr = db.get_sql_array (q)
+
+ for row in arr:
+ # There has to be always 9 columns
+ if len(row) == 9 and (row[0] in ['','null']) == False :
+ sqlParams.append((plugin["unique_prefix"], row[0], handle_empty(row[1]), 'null', row[2], row[3], row[4], handle_empty(row[5]), handle_empty(row[6]), 0, row[7], 'null', row[8]))
+ else:
+ mylog('none', ['[Plugins] Skipped invalid sql result'])
# check if the subprocess / SQL query failed / there was no valid output
@@ -329,20 +341,7 @@ def get_plugin_setting_value(plugin, function_key):
return None
-#-------------------------------------------------------------------------------
-# Return setting value
-def get_setting_value(key):
-
- set = get_setting(key)
- if get_setting(key) is not None:
-
- setVal = set[6] # setting value
- setTyp = set[3] # setting type
-
- return setVal
-
- return ''
#-------------------------------------------------------------------------------
def flatten_array(arr, encodeBase64=False):
diff --git a/pialert/scanners/pihole.py b/pialert/scanners/pihole.py
index c28fa164..4f58bf6b 100755
--- a/pialert/scanners/pihole.py
+++ b/pialert/scanners/pihole.py
@@ -1,11 +1,14 @@
""" module to import db and leases from PiHole """
+# TODO remove this file in teh future
import sqlite3
import conf
-from const import piholeDB, piholeDhcpleases
from logger import mylog
+piholeDhcpleases = '/etc/pihole/dhcp.leases'
+piholeDB = '/etc/pihole/pihole-FTL.db'
+
#-------------------------------------------------------------------------------
def copy_pihole_network (db):
"""