Backups - Backups now operate in the background, provide a nice UI to access/download previous backups (#2755)
This commit is contained in:
161
changedetectionio/blueprint/backups/__init__.py
Normal file
161
changedetectionio/blueprint/backups/__init__.py
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
import datetime
|
||||||
|
import glob
|
||||||
|
import threading
|
||||||
|
|
||||||
|
from flask import Blueprint, render_template, send_from_directory, flash, url_for, redirect, abort
|
||||||
|
import os
|
||||||
|
|
||||||
|
from changedetectionio.store import ChangeDetectionStore
|
||||||
|
from changedetectionio.flask_app import login_optionally_required
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
BACKUP_FILENAME_FORMAT = "changedetection-backup-{}.zip"
|
||||||
|
|
||||||
|
|
||||||
|
def create_backup(datastore_path, watches: dict):
|
||||||
|
logger.debug("Creating backup...")
|
||||||
|
import zipfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# create a ZipFile object
|
||||||
|
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
||||||
|
backupname = BACKUP_FILENAME_FORMAT.format(timestamp)
|
||||||
|
backup_filepath = os.path.join(datastore_path, backupname)
|
||||||
|
|
||||||
|
with zipfile.ZipFile(backup_filepath, "w",
|
||||||
|
compression=zipfile.ZIP_DEFLATED,
|
||||||
|
compresslevel=8) as zipObj:
|
||||||
|
|
||||||
|
# Add the index
|
||||||
|
zipObj.write(os.path.join(datastore_path, "url-watches.json"), arcname="url-watches.json")
|
||||||
|
|
||||||
|
# Add the flask app secret
|
||||||
|
zipObj.write(os.path.join(datastore_path, "secret.txt"), arcname="secret.txt")
|
||||||
|
|
||||||
|
# Add any data in the watch data directory.
|
||||||
|
for uuid, w in watches.items():
|
||||||
|
for f in Path(w.watch_data_dir).glob('*'):
|
||||||
|
zipObj.write(f,
|
||||||
|
# Use the full path to access the file, but make the file 'relative' in the Zip.
|
||||||
|
arcname=os.path.join(f.parts[-2], f.parts[-1]),
|
||||||
|
compress_type=zipfile.ZIP_DEFLATED,
|
||||||
|
compresslevel=8)
|
||||||
|
|
||||||
|
# Create a list file with just the URLs, so it's easier to port somewhere else in the future
|
||||||
|
list_file = "url-list.txt"
|
||||||
|
with open(os.path.join(datastore_path, list_file), "w") as f:
|
||||||
|
for uuid in watches:
|
||||||
|
url = watches[uuid]["url"]
|
||||||
|
f.write("{}\r\n".format(url))
|
||||||
|
list_with_tags_file = "url-list-with-tags.txt"
|
||||||
|
with open(
|
||||||
|
os.path.join(datastore_path, list_with_tags_file), "w"
|
||||||
|
) as f:
|
||||||
|
for uuid in watches:
|
||||||
|
url = watches[uuid].get('url')
|
||||||
|
tag = watches[uuid].get('tags', {})
|
||||||
|
f.write("{} {}\r\n".format(url, tag))
|
||||||
|
|
||||||
|
# Add it to the Zip
|
||||||
|
zipObj.write(
|
||||||
|
os.path.join(datastore_path, list_file),
|
||||||
|
arcname=list_file,
|
||||||
|
compress_type=zipfile.ZIP_DEFLATED,
|
||||||
|
compresslevel=8,
|
||||||
|
)
|
||||||
|
zipObj.write(
|
||||||
|
os.path.join(datastore_path, list_with_tags_file),
|
||||||
|
arcname=list_with_tags_file,
|
||||||
|
compress_type=zipfile.ZIP_DEFLATED,
|
||||||
|
compresslevel=8,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def construct_blueprint(datastore: ChangeDetectionStore):
|
||||||
|
backups_blueprint = Blueprint('backups', __name__, template_folder="templates")
|
||||||
|
backup_threads = []
|
||||||
|
|
||||||
|
@login_optionally_required
|
||||||
|
@backups_blueprint.route("/request-backup", methods=['GET'])
|
||||||
|
def request_backup():
|
||||||
|
if any(thread.is_alive() for thread in backup_threads):
|
||||||
|
flash("A backup is already running, check back in a few minutes", "error")
|
||||||
|
return redirect(url_for('backups.index'))
|
||||||
|
|
||||||
|
if len(find_backups()) > int(os.getenv("MAX_NUMBER_BACKUPS", 100)):
|
||||||
|
flash("Maximum number of backups reached, please remove some", "error")
|
||||||
|
return redirect(url_for('backups.index'))
|
||||||
|
|
||||||
|
# Be sure we're written fresh
|
||||||
|
datastore.sync_to_json()
|
||||||
|
zip_thread = threading.Thread(target=create_backup, args=(datastore.datastore_path, datastore.data.get("watching")))
|
||||||
|
zip_thread.start()
|
||||||
|
backup_threads.append(zip_thread)
|
||||||
|
flash("Backup building in background, check back in a few minutes.")
|
||||||
|
|
||||||
|
return redirect(url_for('backups.index'))
|
||||||
|
|
||||||
|
def find_backups():
|
||||||
|
backup_filepath = os.path.join(datastore.datastore_path, BACKUP_FILENAME_FORMAT.format("*"))
|
||||||
|
backups = glob.glob(backup_filepath)
|
||||||
|
backup_info = []
|
||||||
|
|
||||||
|
for backup in backups:
|
||||||
|
size = os.path.getsize(backup) / (1024 * 1024)
|
||||||
|
creation_time = os.path.getctime(backup)
|
||||||
|
backup_info.append({
|
||||||
|
'filename': os.path.basename(backup),
|
||||||
|
'filesize': f"{size:.2f}",
|
||||||
|
'creation_time': creation_time
|
||||||
|
})
|
||||||
|
|
||||||
|
backup_info.sort(key=lambda x: x['creation_time'], reverse=True)
|
||||||
|
|
||||||
|
return backup_info
|
||||||
|
|
||||||
|
@login_optionally_required
|
||||||
|
@backups_blueprint.route("/download/<string:filename>", methods=['GET'])
|
||||||
|
def download_backup(filename):
|
||||||
|
import re
|
||||||
|
filename = filename.strip()
|
||||||
|
backup_filename_regex = BACKUP_FILENAME_FORMAT.format("\d+")
|
||||||
|
|
||||||
|
full_path = os.path.join(os.path.abspath(datastore.datastore_path), filename)
|
||||||
|
if not full_path.startswith(os.path.abspath(datastore.datastore_path)):
|
||||||
|
abort(404)
|
||||||
|
|
||||||
|
if filename == 'latest':
|
||||||
|
backups = find_backups()
|
||||||
|
filename = backups[0]['filename']
|
||||||
|
|
||||||
|
if not re.match(r"^" + backup_filename_regex + "$", filename):
|
||||||
|
abort(400) # Bad Request if the filename doesn't match the pattern
|
||||||
|
|
||||||
|
logger.debug(f"Backup download request for '{full_path}'")
|
||||||
|
return send_from_directory(os.path.abspath(datastore.datastore_path), filename, as_attachment=True)
|
||||||
|
|
||||||
|
@login_optionally_required
|
||||||
|
@backups_blueprint.route("/", methods=['GET'])
|
||||||
|
def index():
|
||||||
|
backups = find_backups()
|
||||||
|
output = render_template("overview.html",
|
||||||
|
available_backups=backups,
|
||||||
|
backup_running=any(thread.is_alive() for thread in backup_threads)
|
||||||
|
)
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
@login_optionally_required
|
||||||
|
@backups_blueprint.route("/remove-backups", methods=['GET'])
|
||||||
|
def remove_backups():
|
||||||
|
|
||||||
|
backup_filepath = os.path.join(datastore.datastore_path, BACKUP_FILENAME_FORMAT.format("*"))
|
||||||
|
backups = glob.glob(backup_filepath)
|
||||||
|
for backup in backups:
|
||||||
|
os.unlink(backup)
|
||||||
|
|
||||||
|
flash("Backups were deleted.")
|
||||||
|
|
||||||
|
return redirect(url_for('backups.index'))
|
||||||
|
|
||||||
|
return backups_blueprint
|
||||||
36
changedetectionio/blueprint/backups/templates/overview.html
Normal file
36
changedetectionio/blueprint/backups/templates/overview.html
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% block content %}
|
||||||
|
{% from '_helpers.html' import render_simple_field, render_field %}
|
||||||
|
<div class="edit-form">
|
||||||
|
<div class="box-wrap inner">
|
||||||
|
<h4>Backups</h4>
|
||||||
|
{% if backup_running %}
|
||||||
|
<p>
|
||||||
|
<strong>A backup is running!</strong>
|
||||||
|
</p>
|
||||||
|
{% endif %}
|
||||||
|
<p>
|
||||||
|
Here you can download and request a new backup, when a backup is completed you will see it listed below.
|
||||||
|
</p>
|
||||||
|
<br>
|
||||||
|
{% if available_backups %}
|
||||||
|
<ul>
|
||||||
|
{% for backup in available_backups %}
|
||||||
|
<li><a href="{{ url_for('backups.download_backup', filename=backup["filename"]) }}">{{ backup["filename"] }}</a> {{ backup["filesize"] }} Mb</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
{% else %}
|
||||||
|
<p>
|
||||||
|
<strong>No backups found.</strong>
|
||||||
|
</p>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<a class="pure-button pure-button-primary" href="{{ url_for('backups.request_backup') }}">Create backup</a>
|
||||||
|
{% if available_backups %}
|
||||||
|
<a class="pure-button button-small button-error " href="{{ url_for('backups.remove_backups') }}">Remove backups</a>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
@@ -1236,78 +1236,6 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
# We're good but backups are even better!
|
|
||||||
@app.route("/backup", methods=['GET'])
|
|
||||||
@login_optionally_required
|
|
||||||
def get_backup():
|
|
||||||
|
|
||||||
import zipfile
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
# Remove any existing backup file, for now we just keep one file
|
|
||||||
|
|
||||||
for previous_backup_filename in Path(datastore_o.datastore_path).rglob('changedetection-backup-*.zip'):
|
|
||||||
os.unlink(previous_backup_filename)
|
|
||||||
|
|
||||||
# create a ZipFile object
|
|
||||||
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
|
||||||
backupname = "changedetection-backup-{}.zip".format(timestamp)
|
|
||||||
backup_filepath = os.path.join(datastore_o.datastore_path, backupname)
|
|
||||||
|
|
||||||
with zipfile.ZipFile(backup_filepath, "w",
|
|
||||||
compression=zipfile.ZIP_DEFLATED,
|
|
||||||
compresslevel=8) as zipObj:
|
|
||||||
|
|
||||||
# Be sure we're written fresh
|
|
||||||
datastore.sync_to_json()
|
|
||||||
|
|
||||||
# Add the index
|
|
||||||
zipObj.write(os.path.join(datastore_o.datastore_path, "url-watches.json"), arcname="url-watches.json")
|
|
||||||
|
|
||||||
# Add the flask app secret
|
|
||||||
zipObj.write(os.path.join(datastore_o.datastore_path, "secret.txt"), arcname="secret.txt")
|
|
||||||
|
|
||||||
# Add any data in the watch data directory.
|
|
||||||
for uuid, w in datastore.data['watching'].items():
|
|
||||||
for f in Path(w.watch_data_dir).glob('*'):
|
|
||||||
zipObj.write(f,
|
|
||||||
# Use the full path to access the file, but make the file 'relative' in the Zip.
|
|
||||||
arcname=os.path.join(f.parts[-2], f.parts[-1]),
|
|
||||||
compress_type=zipfile.ZIP_DEFLATED,
|
|
||||||
compresslevel=8)
|
|
||||||
|
|
||||||
# Create a list file with just the URLs, so it's easier to port somewhere else in the future
|
|
||||||
list_file = "url-list.txt"
|
|
||||||
with open(os.path.join(datastore_o.datastore_path, list_file), "w") as f:
|
|
||||||
for uuid in datastore.data["watching"]:
|
|
||||||
url = datastore.data["watching"][uuid]["url"]
|
|
||||||
f.write("{}\r\n".format(url))
|
|
||||||
list_with_tags_file = "url-list-with-tags.txt"
|
|
||||||
with open(
|
|
||||||
os.path.join(datastore_o.datastore_path, list_with_tags_file), "w"
|
|
||||||
) as f:
|
|
||||||
for uuid in datastore.data["watching"]:
|
|
||||||
url = datastore.data["watching"][uuid].get('url')
|
|
||||||
tag = datastore.data["watching"][uuid].get('tags', {})
|
|
||||||
f.write("{} {}\r\n".format(url, tag))
|
|
||||||
|
|
||||||
# Add it to the Zip
|
|
||||||
zipObj.write(
|
|
||||||
os.path.join(datastore_o.datastore_path, list_file),
|
|
||||||
arcname=list_file,
|
|
||||||
compress_type=zipfile.ZIP_DEFLATED,
|
|
||||||
compresslevel=8,
|
|
||||||
)
|
|
||||||
zipObj.write(
|
|
||||||
os.path.join(datastore_o.datastore_path, list_with_tags_file),
|
|
||||||
arcname=list_with_tags_file,
|
|
||||||
compress_type=zipfile.ZIP_DEFLATED,
|
|
||||||
compresslevel=8,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Send_from_directory needs to be the full absolute path
|
|
||||||
return send_from_directory(os.path.abspath(datastore_o.datastore_path), backupname, as_attachment=True)
|
|
||||||
|
|
||||||
@app.route("/static/<string:group>/<string:filename>", methods=['GET'])
|
@app.route("/static/<string:group>/<string:filename>", methods=['GET'])
|
||||||
def static_content(group, filename):
|
def static_content(group, filename):
|
||||||
from flask import make_response
|
from flask import make_response
|
||||||
@@ -1687,6 +1615,9 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
import changedetectionio.blueprint.check_proxies as check_proxies
|
import changedetectionio.blueprint.check_proxies as check_proxies
|
||||||
app.register_blueprint(check_proxies.construct_blueprint(datastore=datastore), url_prefix='/check_proxy')
|
app.register_blueprint(check_proxies.construct_blueprint(datastore=datastore), url_prefix='/check_proxy')
|
||||||
|
|
||||||
|
import changedetectionio.blueprint.backups as backups
|
||||||
|
app.register_blueprint(backups.construct_blueprint(datastore), url_prefix='/backups')
|
||||||
|
|
||||||
|
|
||||||
# @todo handle ctrl break
|
# @todo handle ctrl break
|
||||||
ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start()
|
ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start()
|
||||||
|
|||||||
@@ -70,7 +70,7 @@
|
|||||||
<a href="{{ url_for('import_page')}}" class="pure-menu-link">IMPORT</a>
|
<a href="{{ url_for('import_page')}}" class="pure-menu-link">IMPORT</a>
|
||||||
</li>
|
</li>
|
||||||
<li class="pure-menu-item">
|
<li class="pure-menu-item">
|
||||||
<a href="{{ url_for('get_backup')}}" class="pure-menu-link">BACKUP</a>
|
<a href="{{ url_for('backups.index')}}" class="pure-menu-link">BACKUPS</a>
|
||||||
</li>
|
</li>
|
||||||
{% else %}
|
{% else %}
|
||||||
<li class="pure-menu-item">
|
<li class="pure-menu-item">
|
||||||
|
|||||||
@@ -26,8 +26,24 @@ def test_backup(client, live_server, measure_memory_usage):
|
|||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
wait_for_all_checks(client)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
# Launch the thread in the background to create the backup
|
||||||
res = client.get(
|
res = client.get(
|
||||||
url_for("get_backup"),
|
url_for("backups.request_backup"),
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
time.sleep(2)
|
||||||
|
|
||||||
|
res = client.get(
|
||||||
|
url_for("backups.index"),
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
# Can see the download link to the backup
|
||||||
|
assert b'<a href="/backups/download/changedetection-backup-20' in res.data
|
||||||
|
assert b'Remove backups' in res.data
|
||||||
|
|
||||||
|
# Get the latest one
|
||||||
|
res = client.get(
|
||||||
|
url_for("backups.download_backup", filename="latest"),
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -44,3 +60,11 @@ def test_backup(client, live_server, measure_memory_usage):
|
|||||||
|
|
||||||
# Should be two txt files in the archive (history and the snapshot)
|
# Should be two txt files in the archive (history and the snapshot)
|
||||||
assert len(newlist) == 2
|
assert len(newlist) == 2
|
||||||
|
|
||||||
|
# Get the latest one
|
||||||
|
res = client.get(
|
||||||
|
url_for("backups.remove_backups"),
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b'No backups found.' in res.data
|
||||||
Reference in New Issue
Block a user