Compare commits
18 Commits
with-error
...
1917-glibc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c4009d4f21 | ||
|
|
a86f7457d1 | ||
|
|
5a18c96209 | ||
|
|
a0665e1f18 | ||
|
|
9ffe7e0eaf | ||
|
|
3e5671a3a2 | ||
|
|
cd1aca9ee3 | ||
|
|
6a589e14f3 | ||
|
|
dbb76f3618 | ||
|
|
4ae27af511 | ||
|
|
e1860549dc | ||
|
|
9765d56a23 | ||
|
|
349111eb35 | ||
|
|
71e50569a0 | ||
|
|
c372942295 | ||
|
|
0aef5483d9 | ||
|
|
c266c64b94 | ||
|
|
32e5498a9d |
4
.github/workflows/test-only.yml
vendored
4
.github/workflows/test-only.yml
vendored
@@ -29,8 +29,8 @@ jobs:
|
||||
docker network create changedet-network
|
||||
|
||||
# Selenium+browserless
|
||||
docker run --network changedet-network -d --hostname selenium -p 4444:4444 --rm --shm-size="2g" selenium/standalone-chrome-debug:3.141.59
|
||||
docker run --network changedet-network -d --hostname browserless -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm -p 3000:3000 --shm-size="2g" browserless/chrome:1.53-chrome-stable
|
||||
docker run --network changedet-network -d --hostname selenium -p 4444:4444 --rm --shm-size="2g" selenium/standalone-chrome:4
|
||||
docker run --network changedet-network -d --hostname browserless -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm -p 3000:3000 --shm-size="2g" browserless/chrome:1.60-chrome-stable
|
||||
|
||||
- name: Build changedetection.io container for testing
|
||||
run: |
|
||||
|
||||
10
Dockerfile
10
Dockerfile
@@ -1,5 +1,5 @@
|
||||
# pip dependencies install stage
|
||||
FROM python:3.11-slim-bullseye as builder
|
||||
FROM python:3.11-slim-bookworm as builder
|
||||
|
||||
# See `cryptography` pin comment in requirements.txt
|
||||
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
|
||||
@@ -20,6 +20,11 @@ WORKDIR /install
|
||||
|
||||
COPY requirements.txt /requirements.txt
|
||||
|
||||
# Instructing pip to fetch wheels from piwheels.org" on ARMv6 and ARMv7 machines
|
||||
RUN if [ "$(dpkg --print-architecture)" = "armhf" ] || [ "$(dpkg --print-architecture)" = "armel" ]; then \
|
||||
printf "[global]\nextra-index-url=https://www.piwheels.org/simple\n" > /etc/pip.conf; \
|
||||
fi;
|
||||
|
||||
RUN pip install --target=/dependencies -r /requirements.txt
|
||||
|
||||
# Playwright is an alternative to Selenium
|
||||
@@ -29,10 +34,9 @@ RUN pip install --target=/dependencies playwright~=1.27.1 \
|
||||
|| echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled."
|
||||
|
||||
# Final image stage
|
||||
FROM python:3.11-slim-bullseye
|
||||
FROM python:3.11-slim-bookworm
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
libssl1.1 \
|
||||
libxslt1.1 \
|
||||
# For pdftohtml
|
||||
poppler-utils \
|
||||
|
||||
@@ -38,7 +38,7 @@ from flask_paginate import Pagination, get_page_parameter
|
||||
from changedetectionio import html_tools
|
||||
from changedetectionio.api import api_v1
|
||||
|
||||
__version__ = '0.45.3'
|
||||
__version__ = '0.45.5'
|
||||
|
||||
from changedetectionio.store import BASE_URL_NOT_SET_TEXT
|
||||
|
||||
@@ -416,11 +416,18 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
# Sort by last_changed and add the uuid which is usually the key..
|
||||
sorted_watches = []
|
||||
with_errors = request.args.get('with_errors') == "1"
|
||||
errored_count = 0
|
||||
search_q = request.args.get('q').strip().lower() if request.args.get('q') else False
|
||||
for uuid, watch in datastore.data['watching'].items():
|
||||
if with_errors and not watch.get('last_error'):
|
||||
continue
|
||||
|
||||
if limit_tag and not limit_tag in watch['tags']:
|
||||
continue
|
||||
|
||||
if watch.get('last_error'):
|
||||
errored_count += 1
|
||||
|
||||
if search_q:
|
||||
if (watch.get('title') and search_q in watch.get('title').lower()) or search_q in watch.get('url', '').lower():
|
||||
sorted_watches.append(watch)
|
||||
@@ -442,6 +449,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
active_tag=limit_tag,
|
||||
app_rss_token=datastore.data['settings']['application']['rss_access_token'],
|
||||
datastore=datastore,
|
||||
errored_count=errored_count,
|
||||
form=form,
|
||||
guid=datastore.data['app_guid'],
|
||||
has_proxies=datastore.proxy_list,
|
||||
@@ -855,7 +863,10 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
def mark_all_viewed():
|
||||
|
||||
# Save the current newest history as the most recently viewed
|
||||
with_errors = request.args.get('with_errors') == "1"
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
if with_errors and not watch.get('last_error'):
|
||||
continue
|
||||
datastore.set_last_viewed(watch_uuid, int(time.time()))
|
||||
|
||||
return redirect(url_for('index'))
|
||||
@@ -1264,6 +1275,8 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
# Forced recheck will skip the 'skip if content is the same' rule (, 'reprocess_existing_data': True})))
|
||||
tag = request.args.get('tag')
|
||||
uuid = request.args.get('uuid')
|
||||
with_errors = request.args.get('with_errors') == "1"
|
||||
|
||||
i = 0
|
||||
|
||||
running_uuids = []
|
||||
@@ -1279,6 +1292,8 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
# Items that have this current tag
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
if tag in watch.get('tags', {}):
|
||||
if with_errors and not watch.get('last_error'):
|
||||
continue
|
||||
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
|
||||
update_q.put(
|
||||
queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False})
|
||||
@@ -1289,8 +1304,11 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
# No tag, no uuid, add everything.
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
|
||||
if with_errors and not watch.get('last_error'):
|
||||
continue
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False}))
|
||||
i += 1
|
||||
|
||||
flash("{} watches queued for rechecking.".format(i))
|
||||
return redirect(url_for('index', tag=tag))
|
||||
|
||||
|
||||
@@ -77,13 +77,13 @@ class steppable_browser_interface():
|
||||
def action_goto_url(self, selector=None, value=None):
|
||||
# self.page.set_viewport_size({"width": 1280, "height": 5000})
|
||||
now = time.time()
|
||||
response = self.page.goto(value, timeout=0, wait_until='commit')
|
||||
|
||||
# Wait_until = commit
|
||||
# - `'commit'` - consider operation to be finished when network response is received and the document started loading.
|
||||
# Better to not use any smarts from Playwright and just wait an arbitrary number of seconds
|
||||
# This seemed to solve nearly all 'TimeoutErrors'
|
||||
response = self.page.goto(value, timeout=0, wait_until='load')
|
||||
# Should be the same as the puppeteer_fetch.js methods, means, load with no timeout set (skip timeout)
|
||||
#and also wait for seconds ?
|
||||
#await page.waitForTimeout(1000);
|
||||
#await page.waitForTimeout(extra_wait_ms);
|
||||
print("Time to goto URL ", time.time() - now)
|
||||
return response
|
||||
|
||||
def action_click_element_containing_text(self, selector=None, value=''):
|
||||
if not len(value.strip()):
|
||||
@@ -99,7 +99,8 @@ class steppable_browser_interface():
|
||||
self.page.fill(selector, value, timeout=10 * 1000)
|
||||
|
||||
def action_execute_js(self, selector, value):
|
||||
self.page.evaluate(value)
|
||||
response = self.page.evaluate(value)
|
||||
return response
|
||||
|
||||
def action_click_element(self, selector, value):
|
||||
print("Clicking element")
|
||||
|
||||
@@ -159,6 +159,16 @@ class Fetcher():
|
||||
"""
|
||||
return {k.lower(): v for k, v in self.headers.items()}
|
||||
|
||||
def browser_steps_get_valid_steps(self):
|
||||
if self.browser_steps is not None and len(self.browser_steps):
|
||||
valid_steps = filter(
|
||||
lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
|
||||
self.browser_steps)
|
||||
|
||||
return valid_steps
|
||||
|
||||
return None
|
||||
|
||||
def iterate_browser_steps(self):
|
||||
from changedetectionio.blueprint.browser_steps.browser_steps import steppable_browser_interface
|
||||
from playwright._impl._api_types import TimeoutError
|
||||
@@ -170,10 +180,7 @@ class Fetcher():
|
||||
if self.browser_steps is not None and len(self.browser_steps):
|
||||
interface = steppable_browser_interface()
|
||||
interface.page = self.page
|
||||
|
||||
valid_steps = filter(
|
||||
lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
|
||||
self.browser_steps)
|
||||
valid_steps = self.browser_steps_get_valid_steps()
|
||||
|
||||
for step in valid_steps:
|
||||
step_n += 1
|
||||
@@ -326,9 +333,8 @@ class base_html_playwright(Fetcher):
|
||||
# Remove username/password if it exists in the URL or you will receive "ERR_NO_SUPPORTED_PROXIES" error
|
||||
# Actual authentication handled by Puppeteer/node
|
||||
o = urlparse(self.proxy.get('server'))
|
||||
# Remove scheme, socks5:// doesnt always work and it will autodetect anyway
|
||||
proxy_url = urllib.parse.quote(o._replace(netloc="{}:{}".format(o.hostname, o.port)).geturl().replace(f"{o.scheme}://", '', 1))
|
||||
browserless_function_url = f"{browserless_function_url}&--proxy-server={proxy_url}&dumpio=true"
|
||||
proxy_url = urllib.parse.quote(o._replace(netloc="{}:{}".format(o.hostname, o.port)).geturl())
|
||||
browserless_function_url = f"{browserless_function_url}&--proxy-server={proxy_url}"
|
||||
|
||||
try:
|
||||
amp = '&' if '?' in browserless_function_url else '?'
|
||||
@@ -464,39 +470,26 @@ class base_html_playwright(Fetcher):
|
||||
if len(request_headers):
|
||||
context.set_extra_http_headers(request_headers)
|
||||
|
||||
self.page.set_default_navigation_timeout(90000)
|
||||
self.page.set_default_timeout(90000)
|
||||
# Listen for all console events and handle errors
|
||||
self.page.on("console", lambda msg: print(f"Playwright console: Watch URL: {url} {msg.type}: {msg.text} {msg.args}"))
|
||||
|
||||
# Listen for all console events and handle errors
|
||||
self.page.on("console", lambda msg: print(f"Playwright console: Watch URL: {url} {msg.type}: {msg.text} {msg.args}"))
|
||||
# Re-use as much code from browser steps as possible so its the same
|
||||
from changedetectionio.blueprint.browser_steps.browser_steps import steppable_browser_interface
|
||||
browsersteps_interface = steppable_browser_interface()
|
||||
browsersteps_interface.page = self.page
|
||||
|
||||
# Goto page
|
||||
try:
|
||||
# Wait_until = commit
|
||||
# - `'commit'` - consider operation to be finished when network response is received and the document started loading.
|
||||
# Better to not use any smarts from Playwright and just wait an arbitrary number of seconds
|
||||
# This seemed to solve nearly all 'TimeoutErrors'
|
||||
response = self.page.goto(url, wait_until='commit')
|
||||
except playwright._impl._api_types.Error as e:
|
||||
# Retry once - https://github.com/browserless/chrome/issues/2485
|
||||
# Sometimes errors related to invalid cert's and other can be random
|
||||
print("Content Fetcher > retrying request got error - ", str(e))
|
||||
time.sleep(1)
|
||||
response = self.page.goto(url, wait_until='commit')
|
||||
except Exception as e:
|
||||
print("Content Fetcher > Other exception when page.goto", str(e))
|
||||
response = browsersteps_interface.action_goto_url(value=url)
|
||||
self.headers = response.all_headers()
|
||||
|
||||
if response is None:
|
||||
context.close()
|
||||
browser.close()
|
||||
raise PageUnloadable(url=url, status_code=None, message=str(e))
|
||||
print("Content Fetcher > Response object was none")
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
# Execute any browser steps
|
||||
try:
|
||||
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
|
||||
self.page.wait_for_timeout(extra_wait * 1000)
|
||||
|
||||
if self.webdriver_js_execute_code is not None and len(self.webdriver_js_execute_code):
|
||||
self.page.evaluate(self.webdriver_js_execute_code)
|
||||
|
||||
browsersteps_interface.action_execute_js(value=self.webdriver_js_execute_code, selector=None)
|
||||
except playwright._impl._api_types.TimeoutError as e:
|
||||
context.close()
|
||||
browser.close()
|
||||
@@ -508,28 +501,26 @@ class base_html_playwright(Fetcher):
|
||||
browser.close()
|
||||
raise PageUnloadable(url=url, status_code=None, message=str(e))
|
||||
|
||||
if response is None:
|
||||
context.close()
|
||||
browser.close()
|
||||
print("Content Fetcher > Response object was none")
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
# Run Browser Steps here
|
||||
self.iterate_browser_steps()
|
||||
|
||||
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
|
||||
time.sleep(extra_wait)
|
||||
self.page.wait_for_timeout(extra_wait * 1000)
|
||||
|
||||
|
||||
self.content = self.page.content()
|
||||
self.status_code = response.status
|
||||
|
||||
if self.status_code != 200 and not ignore_status_codes:
|
||||
raise Non200ErrorCodeReceived(url=url, status_code=self.status_code)
|
||||
|
||||
if len(self.page.content().strip()) == 0:
|
||||
context.close()
|
||||
browser.close()
|
||||
print("Content Fetcher > Content was empty")
|
||||
raise EmptyReply(url=url, status_code=response.status)
|
||||
|
||||
self.status_code = response.status
|
||||
self.headers = response.all_headers()
|
||||
# Run Browser Steps here
|
||||
if self.browser_steps_get_valid_steps():
|
||||
self.iterate_browser_steps()
|
||||
|
||||
self.page.wait_for_timeout(extra_wait * 1000)
|
||||
|
||||
# So we can find an element on the page where its selector was entered manually (maybe not xPath etc)
|
||||
if current_include_filters is not None:
|
||||
@@ -541,6 +532,7 @@ class base_html_playwright(Fetcher):
|
||||
"async () => {" + self.xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors) + "}")
|
||||
self.instock_data = self.page.evaluate("async () => {" + self.instock_data_js + "}")
|
||||
|
||||
self.content = self.page.content()
|
||||
# Bug 3 in Playwright screenshot handling
|
||||
# Some bug where it gives the wrong screenshot size, but making a request with the clip set first seems to solve it
|
||||
# JPEG is better here because the screenshots can be very very large
|
||||
@@ -555,7 +547,7 @@ class base_html_playwright(Fetcher):
|
||||
except Exception as e:
|
||||
context.close()
|
||||
browser.close()
|
||||
raise ScreenshotUnavailable(url=url, status_code=None)
|
||||
raise ScreenshotUnavailable(url=url, status_code=response.status_code)
|
||||
|
||||
context.close()
|
||||
browser.close()
|
||||
@@ -614,14 +606,17 @@ class base_html_webdriver(Fetcher):
|
||||
is_binary=False):
|
||||
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
||||
from selenium.webdriver.chrome.options import Options as ChromeOptions
|
||||
from selenium.common.exceptions import WebDriverException
|
||||
# request_body, request_method unused for now, until some magic in the future happens.
|
||||
|
||||
options = ChromeOptions()
|
||||
if self.proxy:
|
||||
options.proxy = self.proxy
|
||||
|
||||
self.driver = webdriver.Remote(
|
||||
command_executor=self.command_executor,
|
||||
desired_capabilities=DesiredCapabilities.CHROME,
|
||||
proxy=self.proxy)
|
||||
options=options)
|
||||
|
||||
try:
|
||||
self.driver.get(url)
|
||||
@@ -653,11 +648,11 @@ class base_html_webdriver(Fetcher):
|
||||
# Does the connection to the webdriver work? run a test connection.
|
||||
def is_ready(self):
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
||||
from selenium.webdriver.chrome.options import Options as ChromeOptions
|
||||
|
||||
self.driver = webdriver.Remote(
|
||||
command_executor=self.command_executor,
|
||||
desired_capabilities=DesiredCapabilities.CHROME)
|
||||
options=ChromeOptions())
|
||||
|
||||
# driver.quit() seems to cause better exceptions
|
||||
self.quit()
|
||||
|
||||
@@ -22,7 +22,8 @@ from wtforms.validators import ValidationError
|
||||
# each select <option data-enabled="enabled-0-0"
|
||||
from changedetectionio.blueprint.browser_steps.browser_steps import browser_step_ui_config
|
||||
|
||||
from changedetectionio import content_fetcher
|
||||
from changedetectionio import content_fetcher, html_tools
|
||||
|
||||
from changedetectionio.notification import (
|
||||
valid_notification_formats,
|
||||
)
|
||||
@@ -284,11 +285,10 @@ class ValidateListRegex(object):
|
||||
def __call__(self, form, field):
|
||||
|
||||
for line in field.data:
|
||||
if line[0] == '/' and line[-1] == '/':
|
||||
# Because internally we dont wrap in /
|
||||
line = line.strip('/')
|
||||
if re.search(html_tools.PERL_STYLE_REGEX, line, re.IGNORECASE):
|
||||
try:
|
||||
re.compile(line)
|
||||
regex = html_tools.perl_style_slash_enclosed_regex_to_options(line)
|
||||
re.compile(regex)
|
||||
except re.error:
|
||||
message = field.gettext('RegEx \'%s\' is not a valid regular expression.')
|
||||
raise ValidationError(message % (line))
|
||||
|
||||
@@ -85,6 +85,7 @@
|
||||
<a href="{{url_for('logout')}}" class="pure-menu-link">LOG OUT</a>
|
||||
</li>
|
||||
{% endif %}
|
||||
{% if current_user.is_authenticated or not has_password %}
|
||||
<li class="pure-menu-item pure-form" id="search-menu-item">
|
||||
<!-- We use GET here so it offers people a chance to set bookmarks etc -->
|
||||
<form name="searchForm" action="" method="GET">
|
||||
@@ -95,6 +96,7 @@
|
||||
</button>
|
||||
</form>
|
||||
</li>
|
||||
{% endif %}
|
||||
<li class="pure-menu-item">
|
||||
<button class="toggle-button" id ="toggle-light-mode" type="button" title="Toggle Light/Dark Mode">
|
||||
<span class="visually-hidden">Toggle light/dark mode</span>
|
||||
|
||||
@@ -455,15 +455,15 @@ Unavailable") }}
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Check count</td>
|
||||
<td>{{ watch.check_count }}</td>
|
||||
<td>{{ "{:,}".format( watch.check_count) }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Consecutive filter failures</td>
|
||||
<td>{{ watch.consecutive_filter_failures }}</td>
|
||||
<td>{{ "{:,}".format( watch.consecutive_filter_failures) }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>History length</td>
|
||||
<td>{{ watch.history|length }}</td>
|
||||
<td>{{ "{:,}".format(watch.history|length) }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Last fetch time</td>
|
||||
|
||||
@@ -178,13 +178,18 @@
|
||||
</tbody>
|
||||
</table>
|
||||
<ul id="post-list-buttons">
|
||||
{% if errored_count %}
|
||||
<li>
|
||||
<a href="{{url_for('index', with_errors=1, tag=request.args.get('tag')) }}" class="pure-button button-tag button-error ">With errors ({{ errored_count }})</a>
|
||||
</li>
|
||||
{% endif %}
|
||||
{% if has_unviewed %}
|
||||
<li>
|
||||
<a href="{{url_for('mark_all_viewed', tag=request.args.get('tag')) }}" class="pure-button button-tag ">Mark all viewed</a>
|
||||
<a href="{{url_for('mark_all_viewed',with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag ">Mark all viewed</a>
|
||||
</li>
|
||||
{% endif %}
|
||||
<li>
|
||||
<a href="{{ url_for('form_watch_checknow', tag=active_tag) }}" class="pure-button button-tag ">Recheck
|
||||
<a href="{{ url_for('form_watch_checknow', tag=active_tag, with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag ">Recheck
|
||||
all {% if active_tag%} in "{{tags[active_tag].title}}"{%endif%}</a>
|
||||
</li>
|
||||
<li>
|
||||
|
||||
@@ -202,3 +202,35 @@ def test_check_filter_and_regex_extract(client, live_server):
|
||||
|
||||
# Should not be here
|
||||
assert b'Some text that did change' not in res.data
|
||||
|
||||
|
||||
|
||||
def test_regex_error_handling(client, live_server):
|
||||
|
||||
#live_server_setup(live_server)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
### test regex error handling
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"extract_text": '/something bad\d{3/XYZ',
|
||||
"url": test_url,
|
||||
"fetch_backend": "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
with open('/tmp/fuck.html', 'wb') as f:
|
||||
f.write(res.data)
|
||||
|
||||
assert b'is not a valid regular expression.' in res.data
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
import os
|
||||
from flask import url_for
|
||||
from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
|
||||
|
||||
def test_setup(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
# Add a site in paused mode, add an invalid filter, we should still have visual selector data ready
|
||||
def test_visual_selector_content_ready(client, live_server):
|
||||
import os
|
||||
import json
|
||||
|
||||
assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
|
||||
time.sleep(1)
|
||||
live_server_setup(live_server)
|
||||
|
||||
|
||||
# Add our URL to the import page, because the docker container (playwright/selenium) wont be able to connect to our usual test url
|
||||
test_url = "https://changedetection.io/ci-test/test-runjs.html"
|
||||
@@ -60,4 +61,75 @@ def test_visual_selector_content_ready(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b'notification_screenshot' in res.data
|
||||
client.get(
|
||||
url_for("form_delete", uuid="all"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
def test_basic_browserstep(client, live_server):
|
||||
|
||||
assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
|
||||
#live_server_setup(live_server)
|
||||
|
||||
# Add our URL to the import page, because the docker container (playwright/selenium) wont be able to connect to our usual test url
|
||||
test_url = "https://changedetection.io/ci-test/test-runjs.html"
|
||||
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added in Paused state, saving will unpause" in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first", unpause_on_save=1),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_webdriver",
|
||||
'browser_steps-0-operation': 'Goto site',
|
||||
'browser_steps-1-operation': 'Click element',
|
||||
'browser_steps-1-selector': 'button[name=test-button]',
|
||||
'browser_steps-1-optional_value': ''
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"unpaused" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
uuid = extract_UUID_from_client(client)
|
||||
|
||||
# Check HTML conversion detected and workd
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid=uuid),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"This text should be removed" not in res.data
|
||||
assert b"I smell JavaScript because the button was pressed" in res.data
|
||||
|
||||
# now test for 404 errors
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid=uuid, unpause_on_save=1),
|
||||
data={
|
||||
"url": "https://changedetection.io/404",
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_webdriver",
|
||||
'browser_steps-0-operation': 'Goto site',
|
||||
'browser_steps-1-operation': 'Click element',
|
||||
'browser_steps-1-selector': 'button[name=test-button]',
|
||||
'browser_steps-1-optional_value': ''
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"unpaused" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'Error - 404' in res.data
|
||||
|
||||
client.get(
|
||||
url_for("form_delete", uuid="all"),
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -68,7 +68,7 @@ services:
|
||||
|
||||
# browser-chrome:
|
||||
# hostname: browser-chrome
|
||||
# image: selenium/standalone-chrome-debug:3.141.59
|
||||
# image: selenium/standalone-chrome:4
|
||||
# environment:
|
||||
# - VNC_NO_PASSWORD=1
|
||||
# - SCREEN_WIDTH=1920
|
||||
|
||||
@@ -49,8 +49,7 @@ beautifulsoup4
|
||||
# XPath filtering, lxml is required by bs4 anyway, but put it here to be safe.
|
||||
lxml
|
||||
|
||||
# 3.141 was missing socksVersion, 3.150 was not in pypi, so we try 4.1.0
|
||||
selenium~=4.1.0
|
||||
selenium~=4.14.0
|
||||
|
||||
# https://stackoverflow.com/questions/71652965/importerror-cannot-import-name-safe-str-cmp-from-werkzeug-security/71653849#71653849
|
||||
# ImportError: cannot import name 'safe_str_cmp' from 'werkzeug.security'
|
||||
|
||||
Reference in New Issue
Block a user