Plugins code cleanup + refactoring 0.2

This commit is contained in:
Jokob-sk
2023-09-01 22:03:32 +10:00
parent b5e933ba12
commit a379054f5b
11 changed files with 191 additions and 460 deletions

View File

@@ -32,7 +32,7 @@ def main():
# Assuming Plugin_Objects is a class or function that reads data from the RESULT_FILE
# and returns a list of objects called 'devices'.
devices = Plugin_Objects(RESULT_FILE)
plugin_objects = Plugin_Objects(RESULT_FILE)
# Print a message to indicate that the script is starting.
mylog('verbose', ['[ARP Scan] In script '])
@@ -69,7 +69,7 @@ def main():
for device in unique_devices:
devices.add_object(
plugin_objects.add_object(
primaryId=device['mac'], # MAC (Device Name)
secondaryId=device['ip'], # IP Address
watched1=device['ip'], # Device Name
@@ -79,7 +79,7 @@ def main():
extra='arp-scan',
foreignKey="")
devices.write_result_file()
plugin_objects.write_result_file()
return 0

View File

@@ -1,79 +1,50 @@
#!/usr/bin/env python
# Based on the work of https://github.com/leiweibau/Pi.Alert
from __future__ import unicode_literals
from time import sleep, time, strftime
import requests
import pathlib
import threading
import subprocess
import socket
import argparse
import io
import sys
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import pwd
import os
from dhcp_leases import DhcpLeases
sys.path.append("/home/pi/pialert/front/plugins")
sys.path.append('/home/pi/pialert/pialert')
from plugin_helper import Plugin_Object, Plugin_Objects
from logger import mylog, append_line_to_file
from helper import timeNowTZ
from const import logPath, pialertPath
from logger import mylog
from dhcp_leases import DhcpLeases
CUR_PATH = str(pathlib.Path(__file__).parent.resolve())
LOG_FILE = os.path.join(CUR_PATH, 'script.log')
RESULT_FILE = os.path.join(CUR_PATH, 'last_result.log')
def main():
mylog('verbose', ['[DHCPLSS] In script'])
last_run_logfile = open(RESULT_FILE, 'a')
# empty file
last_run_logfile.write("")
parser = argparse.ArgumentParser(description='Import devices from dhcp.leases files')
parser.add_argument('paths', action="store", help="absolute dhcp.leases file paths to check separated by ','")
values = parser.parse_args()
# Init the file
plug_objects = Plugin_Objects( RESULT_FILE )
# parse output
plugin_objects = Plugin_Objects(RESULT_FILE)
if values.paths:
for path in values.paths.split('=')[1].split(','):
plug_objects_tmp = get_entries(path, plug_objects)
plug_objects_tmp = get_entries(path, plugin_objects)
mylog('verbose', [f'[DHCPLSS] {len(plug_objects_tmp)} Entries found in "{path}"'])
plugin_objects = plugin_objects + plug_objects_tmp
plug_objects = plug_objects + plug_objects_tmp
plugin_objects.write_result_file()
plug_objects.write_result_file()
# -----------------------------------------------------------------------------
def get_entries(path, plug_objects):
# PiHole dhcp.leases format
def get_entries(path, plugin_objects):
if 'pihole' in path:
data = []
reporting = False
with open(piholeDhcpleases, 'r') as f:
for line in f:
row = line.rstrip().split()
# rows: DHCP_DateTime, DHCP_MAC, DHCP_IP, DHCP_Name, DHCP_MAC2
if len(row) == 5:
plug_objects.add_object(
plugin_objects.add_object(
primaryId=row[1],
secondaryId=row[2],
watched1='True',
@@ -83,15 +54,11 @@ def get_entries(path, plug_objects):
extra=path,
foreignKey=row[1]
)
# Generic dhcp.leases format
else:
leases = DhcpLeases(path)
leasesList = leases.get()
for lease in leasesList:
plug_objects.add_object(
plugin_objects.add_object(
primaryId=lease.ethernet,
secondaryId=lease.ip,
watched1=lease.active,
@@ -101,13 +68,7 @@ def get_entries(path, plug_objects):
extra=path,
foreignKey=lease.ethernet
)
return plugin_objects
return plug_objects
#===============================================================================
# BEGIN
#===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,133 +1,71 @@
#!/usr/bin/env python
# Based on the work of https://github.com/leiweibau/Pi.Alert
from __future__ import unicode_literals
from time import sleep, time, strftime
import requests
import pathlib
import threading
import subprocess
import socket
import argparse
import io
import sys
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import pwd
import os
curPath = str(pathlib.Path(__file__).parent.resolve())
log_file = curPath + '/script.log'
last_run = curPath + '/last_result.log'
print(last_run)
# Workflow
from datetime import datetime
from plugin_helper import Plugin_Objects, Plugin_Object
from logger import mylog
def main():
last_run_logfile = open(last_run, 'a')
mylog('verbose', ['[DHCPSRVS] In script'])
RESULT_FILE = 'last_result.log'
plugin_objects = Plugin_Objects(RESULT_FILE)
timeoutSec = 10
nmapArgs = ['sudo', 'nmap', '--script', 'broadcast-dhcp-discover']
# Execute N probes and insert in list
dhcp_probes = 1 # N probes
newLines = []
newLines.append(strftime("%Y-%m-%d %H:%M:%S"))
#dhcp_server_list_time = []
for _ in range(dhcp_probes):
output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec ))
newLines = newLines + output.split("\n")
try:
dhcp_probes = 1
newLines = [datetime.now().strftime("%Y-%m-%d %H:%M:%S")]
for _ in range(dhcp_probes):
output = subprocess.check_output(nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=timeoutSec)
newLines += output.split("\n")
# parse output
newEntries = []
duration = ""
for line in newLines:
if newEntries is None:
index = 0
else:
index = len(newEntries) - 1
if 'Response ' in line and ' of ' in line:
newEntries.append(plugin_object_class())
newEntries.append(Plugin_Object())
elif 'Server Identifier' in line:
newEntries[index].primaryId = line.split(':')[1].strip()
newEntries[-1].primaryId = line.split(':')[1].strip()
elif 'Domain Name' in line:
newEntries[index].secondaryId = line.split(':')[1].strip()
newEntries[-1].secondaryId = line.split(':')[1].strip()
elif 'Domain Name Server' in line:
newEntries[index].watched1 = line.split(':')[1].strip()
newEntries[-1].watched1 = line.split(':')[1].strip()
elif 'IP Offered' in line:
newEntries[index].watched2 = line.split(':')[1].strip()
newEntries[-1].watched2 = line.split(':')[1].strip()
elif 'Interface' in line:
newEntries[index].watched3 = line.split(':')[1].strip()
newEntries[-1].watched3 = line.split(':')[1].strip()
elif 'Router' in line:
newEntries[index].watched4 = line.split(':')[1].strip()
newEntries[index].foreignKey = line.split(':')[1].strip()
elif ('IP Address Lease Time' in line or 'Subnet Mask' in line or 'Broadcast Address' in line) :
value = line.split(':')[1].strip()
newEntries[-1].watched4 = value
newEntries[-1].foreignKey = value
if 'IP Address Lease Time' in line or 'Subnet Mask' in line or 'Broadcast Address' in line:
newVal = line.split(':')[1].strip()
if newEntries[index].extra == '':
newEntries[index].extra = newVal
if newEntries[-1].extra == '':
newEntries[-1].extra = newVal
else:
newEntries[index].extra = newEntries[index].extra + ',' + newVal
newEntries[-1].extra += ',' + newVal
for e in newEntries:
# Insert list into the log
service_monitoring_log(e.primaryId, e.secondaryId, e.created, e.watched1, e.watched2, e.watched3, e.watched4, e.extra, e.foreignKey )
# -----------------------------------------------------------------------------
def service_monitoring_log(primaryId, secondaryId, created, watched1, watched2 = '', watched3 = '', watched4 = '', extra ='', foreignKey ='' ):
if watched1 == '':
watched1 = 'null'
if watched2 == '':
watched2 = 'null'
if watched3 == '':
watched3 = 'null'
if watched4 == '':
watched4 = 'null'
with open(last_run, 'a') as last_run_logfile:
# https://www.duckduckgo.com|192.168.0.1|2023-01-02 15:56:30|200|0.9898|null|null|Best search engine|null
last_run_logfile.write("{}|{}|{}|{}|{}|{}|{}|{}|{}\n".format(
primaryId,
secondaryId,
created,
watched1,
watched2,
watched3,
watched4,
extra,
foreignKey
)
plugin_objects.add_object(
primaryId=e.primaryId,
secondaryId=e.secondaryId,
watched1=e.watched1,
watched2=e.watched2,
watched3=e.watched3,
watched4=e.watched4,
extra=e.extra,
foreignKey=e.foreignKey
)
# -------------------------------------------------------------------
class plugin_object_class:
def __init__(self, primaryId = '',secondaryId = '', watched1 = '',watched2 = '',watched3 = '',watched4 = '',extra = '',foreignKey = ''):
self.pluginPref = ''
self.primaryId = primaryId
self.secondaryId = secondaryId
self.created = strftime("%Y-%m-%d %H:%M:%S")
self.changed = ''
self.watched1 = watched1
self.watched2 = watched2
self.watched3 = watched3
self.watched4 = watched4
self.status = ''
self.extra = extra
self.userData = ''
self.foreignKey = foreignKey
plugin_objects.write_result_file()
except Exception as e:
mylog('none', ['Error in main:', str(e)])
#===============================================================================
# BEGIN
#===============================================================================
if __name__ == '__main__':
sys.exit(main())
main()

View File

@@ -227,8 +227,8 @@
} ,
{
"column": "Extra",
"css_classes": "col-sm-3",
"show": true,
"css_classes": "col-sm-1",
"show": false,
"type": "label",
"default_value":"",
"options": [],
@@ -262,7 +262,7 @@
{
"column": "ForeignKey",
"css_classes": "col-sm-2",
"show": true,
"show": false,
"type": "device_mac",
"default_value":"",
"options": [],

View File

@@ -34,7 +34,7 @@ def main():
# Plugin_Objects is a class that reads data from the RESULT_FILE
# and returns a list of results.
results = Plugin_Objects(RESULT_FILE)
plugin_objects = Plugin_Objects(RESULT_FILE)
# Print a message to indicate that the script is starting.
mylog('debug', ['[NMAP Scan] In script '])
@@ -55,7 +55,7 @@ def main():
for entry in entries:
results.add_object(
plugin_objects.add_object(
primaryId = entry.mac, # MAC (Device Name)
secondaryId = entry.port, # IP Address (always 0.0.0.0)
watched1 = entry.state, # Device Name
@@ -67,7 +67,7 @@ def main():
)
# generate last_result.log file
results.write_result_file()
plugin_objects.write_result_file()
#-------------------------------------------------------------------------------

View File

@@ -18,9 +18,7 @@ from scapy.utils import PcapWriter
sys.setrecursionlimit(30000)
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)#supress Scapy warnings`
runPath = os.path.dirname(os.path.abspath(__file__))
runPathTmp = runPath + "/.."
logPath = runPathTmp + '/front/log'
logPath = '/home/pi/pialert/front/log'
# DEBUG
isDebug = False

View File

@@ -37,7 +37,7 @@ def main():
# Assuming Plugin_Objects is a class or function that reads data from the RESULT_FILE
# and returns a list of objects called 'devices'.
plug_objects = Plugin_Objects(RESULT_FILE)
plugin_objects = Plugin_Objects(RESULT_FILE)
# Print a message to indicate that the script is starting.
mylog('verbose',['[PHOLUS] In script'])
@@ -78,7 +78,7 @@ def main():
for entry in all_entries:
plug_objects.add_object(
plugin_objects.add_object(
# "Info", "Time", "MAC", "IP_v4_or_v6", "Record_Type", "Value"
primaryId = entry[2],
secondaryId = entry[3],
@@ -89,7 +89,7 @@ def main():
extra = entry[0],
foreignKey = entry[2])
plug_objects.write_result_file()
plugin_objects.write_result_file()
return 0
@@ -124,8 +124,8 @@ def execute_pholus_scan(userSubnets, timeoutSec):
result_list += pholus_output_list
mylog('verbose', ["List len:", len(result_list)])
mylog('verbose',["List:", result_list])
mylog('verbose', ["[PHOLUS] Pholus output number of entries:", len(result_list)])
mylog('verbose', ["[PHOLUS] List:", result_list])
return result_list

View File

@@ -4,178 +4,94 @@
# python3 /home/pi/pialert/front/plugins/snmp_discovery/script.py routers='snmpwalk -v 2c -c public -OXsq 192.168.1.1 .1.3.6.1.2.1.3.1.1.2'
from __future__ import unicode_literals
from time import sleep, time, strftime
import requests
from requests import Request, Session, packages
import pathlib
import threading
import subprocess
import socket
import json
import argparse
import io
import sys
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import pwd
import os
import sys
sys.path.append("/home/pi/pialert/front/plugins")
sys.path.append('/home/pi/pialert/pialert')
from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64
from logger import mylog, append_line_to_file
from logger import mylog
from helper import timeNowTZ
from const import logPath, pialertPath
CUR_PATH = str(pathlib.Path(__file__).parent.resolve())
LOG_FILE = os.path.join(CUR_PATH, 'script.log')
RESULT_FILE = os.path.join(CUR_PATH, 'last_result.log')
# Workflow
def main():
mylog('verbose', ['[SNMPDSC] In script '])
# init global variables
global ROUTERS
# empty file
open(RESULT_FILE , 'w').close()
last_run_logfile = open(RESULT_FILE, 'a')
parser = argparse.ArgumentParser(description='This plugin is used to discover devices via the arp table(s) of a RFC1213 compliant router or switch.')
parser.add_argument('routers', action="store", help="IP(s) of routers, separated by comma (,) if passing multiple")
values = parser.parse_args()
# parse output
newEntries = []
plugin_objects = Plugin_Objects(RESULT_FILE)
if values.routers:
ROUTERS = values.routers.split('=')[1].replace('\'','')
newEntries = get_entries(newEntries)
mylog('verbose', ['[SNMPDSC] Entries found: ', len(newEntries)])
for e in newEntries:
# Insert list into the log
service_monitoring_log(e.primaryId, e.secondaryId, e.created, e.watched1, e.watched2, e.watched3, e.watched4, e.extra, e.foreignKey )
# -----------------------------------------------------------------------------
def get_entries(newEntries):
routers = []
if ',' in ROUTERS:
# multiple
routers = ROUTERS.split(',')
else:
# only one
routers.append(ROUTERS)
routers = [ROUTERS]
for router in routers:
# snmpwalk -v 2c -c public -OXsq 192.168.1.1 .1.3.6.1.2.1.3.1.1.2
mylog('verbose', ['[SNMPDSC] Router snmpwalk command: ', router])
timeoutSec = 10
snmpwalkArgs = router.split(' ')
# Execute N probes and insert in list
probes = 1 # N probes
newLines = []
for _ in range(probes):
output = subprocess.check_output (snmpwalkArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec ))
newLines = newLines + output.split("\n")
# Process outputs
# Sample: iso.3.6.1.2.1.3.1.1.2.3.1.192.168.1.2 "6C 6C 6C 6C 6C 6C "
mylog('verbose', ['[SNMPDSC] output: ', output])
with open(LOG_FILE, 'a') as run_logfile:
for line in newLines:
lines = output.split('\n')
# debug
run_logfile.write(line)
for line in lines:
tmpSplt = line.split('"')
if len(tmpSplt) == 3:
ipStr = tmpSplt[0].split('.')[-4:] # Get the last 4 elements to extract the IP
macStr = tmpSplt[1].strip().split(' ') # Remove leading/trailing spaces from MAC
if 'iso.' in line and len(ipStr) == 4:
if 'iso.' in output and len(ipStr) == 4:
macAddress = ':'.join(macStr)
ipAddress = '.'.join(ipStr)
tmpEntry = plugin_object_class(
macAddress,
ipAddress,
mylog('verbose', [f'[SNMPDSC] IP: {ipAddress} MAC: {macAddress}'])
plugin_objects.add_object(
primaryId=macAddress,
secondaryId=ipAddress,
watched1='(unknown)',
watched2=snmpwalkArgs[6], # router IP
extra=line
)
newEntries.append(tmpEntry)
return newEntries
# -------------------------------------------------------------------
class plugin_object_class:
def __init__(self, primaryId = '',secondaryId = '', watched1 = '',watched2 = '',watched3 = '',watched4 = '',extra = '',foreignKey = ''):
self.pluginPref = ''
self.primaryId = primaryId
self.secondaryId = secondaryId
self.created = strftime("%Y-%m-%d %H:%M:%S")
self.changed = ''
self.watched1 = watched1
self.watched2 = watched2
self.watched3 = watched3
self.watched4 = watched4
self.status = ''
self.extra = extra
self.userData = ''
self.foreignKey = foreignKey
# -----------------------------------------------------------------------------
def service_monitoring_log(primaryId, secondaryId, created, watched1, watched2 = 'null', watched3 = 'null', watched4 = 'null', extra ='null', foreignKey ='null' ):
if watched1 == '':
watched1 = 'null'
if watched2 == '':
watched2 = 'null'
if watched3 == '':
watched3 = 'null'
if watched4 == '':
watched4 = 'null'
if extra == '':
extra = 'null'
if foreignKey == '':
foreignKey = 'null'
with open(RESULT_FILE, 'a') as last_run_logfile:
last_run_logfile.write("{}|{}|{}|{}|{}|{}|{}|{}|{}\n".format(
primaryId,
secondaryId,
created,
watched1,
watched2,
watched3,
watched4,
extra,
foreignKey
)
extra=line,
foreignKey=macAddress # Use the primary ID as the foreign key
)
mylog('verbose', ['[SNMPDSC] Entries found: ', len(plugin_objects)])
plugin_objects.write_result_file()
#===============================================================================
# BEGIN
#===============================================================================
if __name__ == '__main__':
main()

View File

@@ -27,11 +27,11 @@ def main():
mylog('verbose', ['[UNDIS] In script'])
UNDIS_devices = Plugin_Objects( RESULT_FILE )
plugin_objects = Plugin_Objects( RESULT_FILE )
if values.devices:
for fake_dev in values.devices.split('=')[1].split(','):
UNDIS_devices.add_object(
plugin_objects.add_object(
primaryId=fake_dev, # MAC (Device Name)
secondaryId="0.0.0.0", # IP Address (always 0.0.0.0)
watched1=fake_dev, # Device Name
@@ -41,7 +41,7 @@ def main():
extra="",
foreignKey="")
UNDIS_devices.write_result_file()
plugin_objects.write_result_file()
return 0

View File

@@ -1,148 +1,66 @@
#!/usr/bin/env python
# Based on the work of https://github.com/leiweibau/Pi.Alert
# Example call
# python3 /home/pi/pialert/front/plugins/website_monitor/script.py urls=http://google.com,http://bing.com
from __future__ import unicode_literals
from time import sleep, time, strftime
import argparse
import requests
import pathlib
import argparse
import io
#import smtplib
import sys
#from smtp_config import sender, password, receivers, host, port
import os
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import pwd
import os
sys.path.extend(["/home/pi/pialert/front/plugins", "/home/pi/pialert/pialert"])
curPath = str(pathlib.Path(__file__).parent.resolve())
log_file = curPath + '/script.log'
last_run = curPath + '/last_result.log'
from plugin_helper import Plugin_Objects
from datetime import datetime
from const import logPath
print(last_run)
# Workflow
CUR_PATH = str(pathlib.Path(__file__).parent.resolve())
RESULT_FILE = os.path.join(CUR_PATH, 'last_result.log')
def main():
parser = argparse.ArgumentParser(description='Simple URL monitoring tool')
parser.add_argument('urls', action="store", help="urls to check separated by ','")
parser.add_argument('urls', action="store", help="URLs to check separated by ','")
values = parser.parse_args()
if values.urls:
with open(last_run, 'w') as last_run_logfile:
# empty file
last_run_logfile.write("")
service_monitoring(values.urls.split('=')[1].split(','))
plugin_objects = Plugin_Objects(RESULT_FILE)
plugin_objects = service_monitoring(values.urls.split('=')[1].split(','), plugin_objects)
plugin_objects.write_result_file()
else:
return
# -----------------------------------------------------------------------------
def service_monitoring_log(site, status, latency):
# global monitor_logfile
# Log status message to log file
with open(log_file, 'a') as monitor_logfile:
monitor_logfile.write("{} | {} | {} | {}\n".format(strftime("%Y-%m-%d %H:%M:%S"),
site,
status,
latency,
)
)
with open(last_run, 'a') as last_run_logfile:
# https://www.duckduckgo.com|192.168.0.1|2023-01-02 15:56:30|200|0.9898|null|null|Best search engine|null
last_run_logfile.write("{}|{}|{}|{}|{}|{}|{}|{}|{}\n".format(
site,
'null',
strftime("%Y-%m-%d %H:%M:%S"),
status,
latency,
'null',
'null',
'null',
'null',
)
)
# -----------------------------------------------------------------------------
def check_services_health(site):
# Enable self signed SSL
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
"""Send GET request to input site and return status code"""
try:
resp = requests.get(site, verify=False, timeout=10)
latency = resp.elapsed
latency_str = str(latency)
latency_str_seconds = latency_str.split(":")
format_latency_str = latency_str_seconds[2]
if format_latency_str[0] == "0" and format_latency_str[1] != "." :
format_latency_str = format_latency_str[1:]
return resp.status_code, format_latency_str
latency = resp.elapsed.total_seconds()
status = resp.status_code
except requests.exceptions.SSLError:
pass
status = 503
latency = 99999
except:
latency = "99999"
return 503, latency
status = 503
latency = 99999
return status, latency
# -----------------------------------------------------------------------------
def get_username():
return pwd.getpwuid(os.getuid())[0]
# -----------------------------------------------------------------------------
def service_monitoring(urls):
# Empty Log and write new header
print("Prepare Services Monitoring")
print("... Prepare Logfile")
with open(log_file, 'w') as monitor_logfile:
monitor_logfile.write("Pi.Alert [Prototype]:\n---------------------------------------------------------\n")
monitor_logfile.write("Current User: %s \n\n" % get_username())
monitor_logfile.write("Monitor Web-Services\n")
monitor_logfile.write("Timestamp: " + strftime("%Y-%m-%d %H:%M:%S") + "\n")
monitor_logfile.close()
print("... Get Services List")
sites = urls
print("Start Services Monitoring")
with open(log_file, 'a') as monitor_logfile:
monitor_logfile.write("\nStart Services Monitoring\n\n| Timestamp | URL | StatusCode | ResponseTime |\n-----------------------------------------------\n")
monitor_logfile.close()
while sites:
for site in sites:
def service_monitoring(urls, plugin_objects):
for site in urls:
status, latency = check_services_health(site)
scantime = strftime("%Y-%m-%d %H:%M:%S")
plugin_objects.add_object(
primaryId=site,
secondaryId='null',
watched1=status,
watched2=latency,
watched3='null',
watched4='null',
extra='null',
foreignKey='null'
)
return plugin_objects
# Debugging
# print("{} - {} STATUS: {} ResponseTime: {}".format(strftime("%Y-%m-%d %H:%M:%S"),
# site,
# status,
# latency)
# )
# Write Logfile
service_monitoring_log(site, status, latency)
sys.stdout.flush()
break
else:
with open(log_file, 'a') as monitor_logfile:
monitor_logfile.write("\n\nNo site(s) to monitor!")
monitor_logfile.close()
#===============================================================================
# BEGIN
#===============================================================================
if __name__ == '__main__':
sys.exit(main())

View File

@@ -100,7 +100,7 @@ def construct_notifications(db, sqlQuery, tableTitle, skipText = False, supplied
notiStruc = noti_struc(jsn, text, html)
mylog('debug', ['[Notification] Ports: notiStruc:', json.dumps(notiStruc.__dict__, indent=4) ])
mylog('debug', ['[Notification] notiStruc:', json.dumps(notiStruc.__dict__, indent=4) ])
return notiStruc