Frontend user events rewrite v0.1
This commit is contained in:
@@ -548,6 +548,15 @@ function isEmpty(value)
|
||||
return emptyArr.includes(value)
|
||||
}
|
||||
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Generate a GUID
|
||||
function getGuid() {
|
||||
return "10000000-1000-4000-8000-100000000000".replace(/[018]/g, c =>
|
||||
(c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)
|
||||
);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Loading Spinner overlay
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
0
front/js/settings_utils.js
Normal file → Executable file
0
front/js/settings_utils.js
Normal file → Executable file
@@ -760,7 +760,7 @@ function performLogManage() {
|
||||
showModalOk ('Result', data );
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// --------------------------------------------------------
|
||||
function scrollDown()
|
||||
|
||||
@@ -13,6 +13,7 @@ require dirname(__FILE__).'/../templates/skinUI.php';
|
||||
|
||||
$FUNCTION = [];
|
||||
$SETTINGS = [];
|
||||
$ACTION = "";
|
||||
|
||||
// init request params
|
||||
if(array_key_exists('function', $_REQUEST) != FALSE)
|
||||
@@ -20,21 +21,39 @@ if(array_key_exists('function', $_REQUEST) != FALSE)
|
||||
$FUNCTION = $_REQUEST['function'];
|
||||
}
|
||||
|
||||
if(array_key_exists('settings', $_REQUEST) != FALSE)
|
||||
{
|
||||
$SETTINGS = $_REQUEST['settings'];
|
||||
}
|
||||
|
||||
// call functions based on requested params
|
||||
if ($FUNCTION == 'savesettings')
|
||||
{
|
||||
saveSettings();
|
||||
}
|
||||
elseif ($FUNCTION == 'cleanLog')
|
||||
{
|
||||
cleanLog($SETTINGS);
|
||||
switch ($FUNCTION) {
|
||||
case 'savesettings':
|
||||
saveSettings();
|
||||
break;
|
||||
|
||||
case 'cleanLog':
|
||||
|
||||
if(array_key_exists('settings', $_REQUEST) != FALSE)
|
||||
{
|
||||
$SETTINGS = $_REQUEST['settings'];
|
||||
}
|
||||
|
||||
cleanLog($SETTINGS);
|
||||
break;
|
||||
|
||||
case 'addToExecutionQueue':
|
||||
|
||||
if(array_key_exists('action', $_REQUEST) != FALSE)
|
||||
{
|
||||
$ACTION = $_REQUEST['action'];
|
||||
}
|
||||
|
||||
addToExecutionQueue($ACTION);
|
||||
break;
|
||||
|
||||
default:
|
||||
// Handle any other cases or errors if needed
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Formatting data functions
|
||||
//------------------------------------------------------------------------------
|
||||
@@ -195,6 +214,25 @@ function displayMessage($message, $logAlert = FALSE, $logConsole = TRUE, $logFil
|
||||
|
||||
}
|
||||
|
||||
// Adds an action to perform into the execution_queue.log file
|
||||
function addToExecutionQueue($action)
|
||||
{
|
||||
global $logFolderPath, $timestamp;
|
||||
|
||||
$logFile = 'execution_queue.log';
|
||||
$fullPath = $logFolderPath . $logFile;
|
||||
|
||||
// Open the file or skip if it can't be opened
|
||||
if ($file = fopen($fullPath, 'a')) {
|
||||
fwrite($file, "[" . $timestamp . "]|" . $action . PHP_EOL);
|
||||
fclose($file);
|
||||
displayMessage('Action "'.$action.'" added to the execution queue.', false, true, true, true);
|
||||
} else {
|
||||
displayMessage('Log file not found or couldn\'t be created.', false, true, true, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ----------------------------------------------------------------------------------------
|
||||
function cleanLog($logFile)
|
||||
{
|
||||
|
||||
@@ -489,7 +489,7 @@ while ($row = $result -> fetchArray (SQLITE3_ASSOC)) {
|
||||
data-myparam="${codeName}"
|
||||
data-myparam-plugin="${group}"
|
||||
data-myevent="${event}"
|
||||
onclick="handleEvent(this)"
|
||||
onclick="addToExecutionQueue(this)"
|
||||
>
|
||||
<i title="${getString(event + "_event_tooltip")}" class="fa ${getString(event + "_event_icon")}">
|
||||
</i>
|
||||
@@ -856,22 +856,73 @@ while ($row = $result -> fetchArray (SQLITE3_ASSOC)) {
|
||||
}
|
||||
|
||||
|
||||
function updateModalState(){
|
||||
// function updateModalState(){
|
||||
|
||||
setTimeout(function(){
|
||||
displayedEvent = $('#'+modalEventStatusId).html()
|
||||
// setTimeout(function(){
|
||||
// displayedEvent = $('#'+modalEventStatusId).html()
|
||||
|
||||
// loop until finished
|
||||
if(displayedEvent.indexOf('finished') == -1) // if the message is different from finished, check again in 2s
|
||||
{
|
||||
// // loop until finished
|
||||
// if(displayedEvent.indexOf('finished') == -1) // if the message is different from finished, check again in 2s
|
||||
// {
|
||||
|
||||
getParam(modalEventStatusId,"Front_Event", true)
|
||||
// getParam(modalEventStatusId,"Front_Event", true)
|
||||
|
||||
updateModalState()
|
||||
// updateModalState()
|
||||
|
||||
}
|
||||
// }
|
||||
// }, 2000);
|
||||
// }
|
||||
|
||||
|
||||
// --------------------------------------------------------
|
||||
// Calls a backend function to add a front-end event (specified by the attributes 'data-myevent' and 'data-myparam-plugin' on the passed element) to an execution queue
|
||||
function addToExecutionQueue(element)
|
||||
{
|
||||
|
||||
// value has to be in format event|param. e.g. run|ARPSCAN
|
||||
action = `${getGuid()}|${$(element).attr('data-myevent')}|${$(element).attr('data-myparam-plugin')}`
|
||||
|
||||
// addToExecutionQueue(action)
|
||||
|
||||
$.ajax({
|
||||
method: "POST",
|
||||
url: "php/server/util.php",
|
||||
data: { function: "addToExecutionQueue", action: action },
|
||||
success: function(data, textStatus) {
|
||||
showModalOk ('Result', data );
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// TODO
|
||||
function updateModalState() {
|
||||
setTimeout(function() {
|
||||
// Fetch the content from the log file using an AJAX request
|
||||
$.ajax({
|
||||
url: '~/log/execution_queue.log',
|
||||
type: 'GET',
|
||||
success: function(data) {
|
||||
// Update the content of the HTML element (e.g., a div with id 'logContent')
|
||||
$('#logContent').html(data);
|
||||
|
||||
// Check if the displayed content contains 'finished'
|
||||
if (data.indexOf('finished') === -1) {
|
||||
// If not finished, continue to update
|
||||
updateModalState();
|
||||
}
|
||||
},
|
||||
error: function() {
|
||||
// Handle error, such as the file not being found
|
||||
$('#logContent').html('Error: Log file not found.');
|
||||
}
|
||||
});
|
||||
}, 2000);
|
||||
}
|
||||
}
|
||||
|
||||
// Call the function to start the periodic updates
|
||||
updateModalState();
|
||||
|
||||
|
||||
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
@@ -729,35 +729,66 @@ class plugin_object_class:
|
||||
#===============================================================================
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# def check_and_run_user_event(db, pluginsState):
|
||||
|
||||
# sql = db.sql # TO-DO
|
||||
# sql.execute(""" select * from Parameters where par_ID = "Front_Event" """)
|
||||
# rows = sql.fetchall()
|
||||
|
||||
# event, param = ['','']
|
||||
# if len(rows) > 0 and rows[0]['par_Value'] != 'finished':
|
||||
# keyValue = rows[0]['par_Value'].split('|')
|
||||
|
||||
# if len(keyValue) == 2:
|
||||
# event = keyValue[0]
|
||||
# param = keyValue[1]
|
||||
# else:
|
||||
# return pluginsState
|
||||
|
||||
# if event == 'test':
|
||||
# pluginsState = handle_test(param, db, pluginsState)
|
||||
# if event == 'run':
|
||||
# pluginsState = handle_run(param, db, pluginsState)
|
||||
|
||||
# # clear event execution flag
|
||||
# sql.execute ("UPDATE Parameters SET par_Value='finished' WHERE par_ID='Front_Event'")
|
||||
|
||||
# # commit to DB
|
||||
# db.commitDB()
|
||||
|
||||
# return pluginsState
|
||||
|
||||
|
||||
def check_and_run_user_event(db, pluginsState):
|
||||
# Check if the log file exists
|
||||
logFile = os.path.join(logPath, "execution_queue.log")
|
||||
|
||||
sql = db.sql # TO-DO
|
||||
sql.execute(""" select * from Parameters where par_ID = "Front_Event" """)
|
||||
rows = sql.fetchall()
|
||||
|
||||
event, param = ['','']
|
||||
if len(rows) > 0 and rows[0]['par_Value'] != 'finished':
|
||||
keyValue = rows[0]['par_Value'].split('|')
|
||||
|
||||
if len(keyValue) == 2:
|
||||
event = keyValue[0]
|
||||
param = keyValue[1]
|
||||
else:
|
||||
if not os.path.exists(logFile):
|
||||
return pluginsState
|
||||
|
||||
if event == 'test':
|
||||
pluginsState = handle_test(param, db, pluginsState)
|
||||
if event == 'run':
|
||||
pluginsState = handle_run(param, db, pluginsState)
|
||||
with open(logFile, "r") as file:
|
||||
lines = file.readlines()
|
||||
|
||||
# clear event execution flag
|
||||
sql.execute ("UPDATE Parameters SET par_Value='finished' WHERE par_ID='Front_Event'")
|
||||
for line in lines:
|
||||
# Split the line by '|', and take the third and fourth columns (indices 2 and 3)
|
||||
columns = line.strip().split('|')[2:4]
|
||||
|
||||
# commit to DB
|
||||
db.commitDB()
|
||||
if len(columns) != 2:
|
||||
continue
|
||||
|
||||
event, param = columns
|
||||
|
||||
if event == 'test':
|
||||
pluginsState = handle_test(param, db, pluginsState)
|
||||
if event == 'run':
|
||||
pluginsState = handle_run(param, db, pluginsState)
|
||||
|
||||
# Clear the log file
|
||||
open(logFile, "w").close()
|
||||
|
||||
return pluginsState
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def handle_run(runType, db, pluginsState):
|
||||
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
""" Publishers for Pi.Alert """
|
||||
|
||||
"""
|
||||
each publisher exposes:
|
||||
|
||||
- check_config () returning True / False
|
||||
- send (message) returning True / Fasle
|
||||
"""
|
||||
@@ -1,111 +0,0 @@
|
||||
import json
|
||||
import subprocess
|
||||
import hashlib
|
||||
import hmac
|
||||
|
||||
import conf
|
||||
from const import logPath
|
||||
from helper import noti_obj, write_file
|
||||
from logger import logResult, mylog
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def check_config():
|
||||
if conf.WEBHOOK_URL == '':
|
||||
mylog('none', ['[Check Config] Error: Webhook service not set up correctly. Check your pialert.conf WEBHOOK_* variables.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
def send (msg: noti_obj):
|
||||
|
||||
# limit = 1024 * 1024 # 1MB limit (1024 bytes * 1024 bytes = 1MB)
|
||||
limit = conf.WEBHOOK_SIZE
|
||||
|
||||
# use data type based on specified payload type
|
||||
if conf.WEBHOOK_PAYLOAD == 'json':
|
||||
# In this code, the truncate_json function is used to recursively traverse the JSON object
|
||||
# and remove nodes that exceed the size limit. It checks the size of each node's JSON representation
|
||||
# using json.dumps and includes only the nodes that are within the limit.
|
||||
json_data = msg.json
|
||||
json_str = json.dumps(json_data)
|
||||
|
||||
if len(json_str) <= limit:
|
||||
payloadData = json_data
|
||||
else:
|
||||
def truncate_json(obj):
|
||||
if isinstance(obj, dict):
|
||||
return {
|
||||
key: truncate_json(value)
|
||||
for key, value in obj.items()
|
||||
if len(json.dumps(value)) <= limit
|
||||
}
|
||||
elif isinstance(obj, list):
|
||||
return [
|
||||
truncate_json(item)
|
||||
for item in obj
|
||||
if len(json.dumps(item)) <= limit
|
||||
]
|
||||
else:
|
||||
return obj
|
||||
|
||||
payloadData = truncate_json(json_data)
|
||||
if conf.WEBHOOK_PAYLOAD == 'html':
|
||||
if len(msg.html) > limit:
|
||||
payloadData = msg.html[:limit] + " <h1>(text was truncated)</h1>"
|
||||
else:
|
||||
payloadData = msg.html
|
||||
if conf.WEBHOOK_PAYLOAD == 'text':
|
||||
if len(msg.text) > limit:
|
||||
payloadData = msg.text[:limit] + " (text was truncated)"
|
||||
else:
|
||||
payloadData = msg.text
|
||||
|
||||
# Define slack-compatible payload
|
||||
_json_payload = { "text": payloadData } if conf.WEBHOOK_PAYLOAD == 'text' else {
|
||||
"username": "Pi.Alert",
|
||||
"text": "There are new notifications",
|
||||
"attachments": [{
|
||||
"title": "Pi.Alert Notifications",
|
||||
"title_link": conf.REPORT_DASHBOARD_URL,
|
||||
"text": payloadData
|
||||
}]
|
||||
}
|
||||
|
||||
|
||||
# DEBUG - Write the json payload into a log file for debugging
|
||||
write_file (logPath + '/webhook_payload.json', json.dumps(_json_payload))
|
||||
|
||||
# Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both
|
||||
# Consider: curl has the ability to load in data to POST from a file + piping
|
||||
if(conf.WEBHOOK_URL.startswith('https://discord.com/api/webhooks/') and not conf.WEBHOOK_URL.endswith("/slack")):
|
||||
_WEBHOOK_URL = f"{conf.WEBHOOK_URL}/slack"
|
||||
curlParams = ["curl","-i","-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL]
|
||||
else:
|
||||
_WEBHOOK_URL = conf.WEBHOOK_URL
|
||||
curlParams = ["curl","-i","-X", conf.WEBHOOK_REQUEST_METHOD , "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
|
||||
|
||||
# Add HMAC signature if configured
|
||||
if(conf.WEBHOOK_SECRET != ''):
|
||||
h = hmac.new(conf.WEBHOOK_SECRET.encode("UTF-8"), json.dumps(_json_payload, separators=(',', ':')).encode(), hashlib.sha256).hexdigest()
|
||||
curlParams.insert(4,"-H")
|
||||
curlParams.insert(5,f"X-Webhook-Signature: sha256={h}")
|
||||
|
||||
try:
|
||||
# Execute CURL call
|
||||
mylog('debug', ['[send_webhook] curlParams: ', curlParams])
|
||||
result = subprocess.run(curlParams, capture_output=True, text=True)
|
||||
|
||||
stdout = result.stdout
|
||||
stderr = result.stderr
|
||||
|
||||
# Write stdout and stderr into .log files for debugging if needed
|
||||
mylog('debug', ['[send_webhook] stdout: ', stdout])
|
||||
mylog('debug', ['[send_webhook] stderr: ', stderr])
|
||||
# logResult(stdout, stderr) # TO-DO should be changed to mylog
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occurred, handle it
|
||||
mylog('none', ['[send_webhook] Error: ', e.output])
|
||||
|
||||
@@ -24,10 +24,6 @@ from const import pialertPath, logPath, apiPath
|
||||
from helper import noti_obj, generate_mac_links, removeDuplicateNewLines, timeNowTZ, hide_email, updateState, get_file_content, write_file
|
||||
from logger import logResult, mylog, print_log
|
||||
|
||||
from publishers.webhook import (check_config as webhook_check_config,
|
||||
send as send_webhook)
|
||||
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# REPORTING
|
||||
@@ -178,7 +174,7 @@ def get_notifications (db):
|
||||
|
||||
notiStruc = construct_notifications(db, sqlQuery, "New devices")
|
||||
|
||||
# collect "new_devices" for the webhook json
|
||||
# collect "new_devices" for the json
|
||||
json_new_devices = notiStruc.json["data"]
|
||||
|
||||
mail_text = mail_text.replace ('<NEW_DEVICES_TABLE>', notiStruc.text + '\n')
|
||||
@@ -194,7 +190,7 @@ def get_notifications (db):
|
||||
|
||||
notiStruc = construct_notifications(db, sqlQuery, "Down devices")
|
||||
|
||||
# collect "down_devices" for the webhook json
|
||||
# collect "down_devices" for the json
|
||||
json_down_devices = notiStruc.json["data"]
|
||||
|
||||
mail_text = mail_text.replace ('<DOWN_DEVICES_TABLE>', notiStruc.text + '\n')
|
||||
@@ -211,7 +207,7 @@ def get_notifications (db):
|
||||
|
||||
notiStruc = construct_notifications(db, sqlQuery, "Events")
|
||||
|
||||
# collect "events" for the webhook json
|
||||
# collect "events" for the json
|
||||
json_events = notiStruc.json["data"]
|
||||
|
||||
mail_text = mail_text.replace ('<EVENTS_TABLE>', notiStruc.text + '\n')
|
||||
@@ -224,7 +220,7 @@ def get_notifications (db):
|
||||
|
||||
notiStruc = construct_notifications(db, sqlQuery, "Plugins")
|
||||
|
||||
# collect "plugins" for the webhook json
|
||||
# collect "plugins" for the json
|
||||
json_plugins = notiStruc.json["data"]
|
||||
|
||||
mail_text = mail_text.replace ('<PLUGINS_TABLE>', notiStruc.text + '\n')
|
||||
|
||||
Reference in New Issue
Block a user