unmerge external header server. Sorry!
This commit is contained in:
@@ -4,8 +4,7 @@ import os
|
||||
import re
|
||||
import urllib3
|
||||
import difflib
|
||||
import requests
|
||||
import json
|
||||
|
||||
|
||||
from changedetectionio import content_fetcher, html_tools
|
||||
|
||||
@@ -59,19 +58,6 @@ class perform_site_check():
|
||||
|
||||
# Tweak the base config with the per-watch ones
|
||||
request_headers = self.datastore.data['settings']['headers'].copy()
|
||||
|
||||
if self.datastore.data['watching'][uuid].get('external_header_server') is not None and self.datastore.data['watching'][uuid].get('external_header_server') != "" and self.datastore.data['watching'][uuid].get('external_header_server') != "None":
|
||||
try:
|
||||
resp = requests.get(self.datastore.data['watching'][uuid].get('external_header_server'))
|
||||
if resp.status_code != 200:
|
||||
raise Exception("External header server returned non-200 response. Please check the URL for the server")
|
||||
|
||||
data = json.loads(resp.text.strip())
|
||||
request_headers.update(resp.json())
|
||||
|
||||
except json.decoder.JSONDecodeError:
|
||||
raise Exception("Failed to decode JSON response from external header server")
|
||||
|
||||
request_headers.update(extra_headers)
|
||||
|
||||
# https://github.com/psf/requests/issues/4525
|
||||
|
||||
@@ -370,7 +370,6 @@ class watchForm(commonSettingsForm):
|
||||
title = StringField('Title', default='')
|
||||
|
||||
ignore_text = StringListField('Ignore text', [ValidateListRegex()])
|
||||
external_header_server = fields.URLField('External Header Server', validators=[validators.Optional(), validateURL()])
|
||||
headers = StringDictKeyValue('Request headers')
|
||||
body = TextAreaField('Request body', [validators.Optional()])
|
||||
method = SelectField('Request method', choices=valid_method, default=default_method)
|
||||
|
||||
@@ -26,7 +26,6 @@ class model(dict):
|
||||
'previous_md5': False,
|
||||
'uuid': str(uuid.uuid4()),
|
||||
'headers': {}, # Extra headers to send
|
||||
'external_header_server': None, # URL to a server that will return headers
|
||||
'body': None,
|
||||
'method': 'GET',
|
||||
#'history': {}, # Dict of timestamp and output stripped filename
|
||||
|
||||
@@ -120,12 +120,6 @@
|
||||
<div class="pure-control-group" id="request-method">
|
||||
{{ render_field(form.method) }}
|
||||
</div>
|
||||
<div class="pure-control-group" id="external-header-server">
|
||||
{{ render_field(form.external_header_server, placeholder="http://example.com/watch1") }}
|
||||
<div class="pure-form-message-inline">
|
||||
The watch will perform a GET request before each check to this URL and will use the headers in addition to the ones listed below and in global settings. <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Run-JavaScript-before-change-detection">More help and examples here</a>
|
||||
</div>
|
||||
</div>
|
||||
<div class="pure-control-group" id="request-headers">
|
||||
{{ render_field(form.headers, rows=5, placeholder="Example
|
||||
Cookie: foobar
|
||||
|
||||
@@ -23,7 +23,7 @@ def test_basic_auth(client, live_server):
|
||||
# Check form validation
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": "", "url": test_url, "tag": "", "headers": "", "external_header_server": "", 'fetch_backend': "html_requests"},
|
||||
data={"css_filter": "", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
@@ -98,7 +98,7 @@ def test_check_markup_css_filter_restriction(client, live_server):
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": css_filter, "url": test_url, "tag": "", "headers": "", "external_header_server": "",'fetch_backend': "html_requests"},
|
||||
data={"css_filter": css_filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
@@ -114,7 +114,7 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server):
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"ignore_status_codes": "y", "url": test_url, "tag": "", "headers": "", "external_header_server": "",'fetch_backend': "html_requests"},
|
||||
data={"ignore_status_codes": "y", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
@@ -29,7 +29,7 @@ def test_share_watch(client, live_server):
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": css_filter, "url": test_url, "tag": "", "headers": "", "external_header_server": "",'fetch_backend': "html_requests"},
|
||||
data={"css_filter": css_filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
@@ -89,7 +89,7 @@ def test_check_xpath_filter_utf8(client, live_server):
|
||||
time.sleep(1)
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": filter, "url": test_url, "tag": "", "headers": "", "external_header_server": "",'fetch_backend': "html_requests"},
|
||||
data={"css_filter": filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -143,7 +143,7 @@ def test_check_xpath_text_function_utf8(client, live_server):
|
||||
time.sleep(1)
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": filter, "url": test_url, "tag": "", "headers": "", "external_header_server": "",'fetch_backend': "html_requests"},
|
||||
data={"css_filter": filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -192,7 +192,7 @@ def test_check_markup_xpath_filter_restriction(client, live_server):
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": xpath_filter, "url": test_url, "tag": "", "headers": "", "external_header_server": "",'fetch_backend': "html_requests"},
|
||||
data={"css_filter": xpath_filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -233,7 +233,7 @@ def test_xpath_validation(client, live_server):
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": "/something horrible", "url": test_url, "tag": "", "headers": "", "external_header_server": "",'fetch_backend': "html_requests"},
|
||||
data={"css_filter": "/something horrible", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"is not a valid XPath expression" in res.data
|
||||
@@ -263,7 +263,7 @@ def test_check_with_prefix_css_filter(client, live_server):
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tag": "", "headers": "", "external_header_server": "",'fetch_backend': "html_requests"},
|
||||
data={"css_filter": "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user