diff --git a/CTFd/admin/__init__.py b/CTFd/admin/__init__.py index 3149f3c..bf57052 100644 --- a/CTFd/admin/__init__.py +++ b/CTFd/admin/__init__.py @@ -1,13 +1,15 @@ import hashlib import json import os +import datetime from flask import current_app as app, render_template, request, redirect, jsonify, url_for, Blueprint, \ - abort, render_template_string + abort, render_template_string, send_file from passlib.hash import bcrypt_sha256 from sqlalchemy.sql import not_ +from sqlalchemy.exc import IntegrityError -from CTFd.utils import admins_only, is_admin, cache +from CTFd.utils import admins_only, is_admin, cache, export_ctf, import_ctf from CTFd.models import db, Teams, Solves, Awards, Containers, Challenges, WrongKeys, Keys, Tags, Files, Tracking, Pages, Config, DatabaseError from CTFd.scoreboard import get_standings from CTFd.plugins.keys import get_key_class, KEY_CLASSES @@ -48,6 +50,44 @@ def admin_plugin_config(plugin): return '1' +@admin.route('/admin/import', methods=['GET', 'POST']) +@admins_only +def admin_import_ctf(): + backup = request.files['backup'] + segments = request.form.get('segments') + errors = [] + try: + if segments: + import_ctf(backup, segments=segments.split(',')) + else: + import_ctf(backup) + except TypeError: + errors.append('The backup file is invalid') + except IntegrityError as e: + errors.append(e.message) + except Exception as e: + errors.append(type(e).__name__) + + if errors: + return errors[0], 500 + else: + return redirect(url_for('admin.admin_config')) + + +@admin.route('/admin/export', methods=['GET', 'POST']) +@admins_only +def admin_export_ctf(): + segments = request.args.get('segments') + if segments: + backup = export_ctf(segments.split(',')) + else: + backup = export_ctf() + ctf_name = utils.ctf_name() + day = datetime.datetime.now().strftime("%Y-%m-%d") + full_name = "{}.{}.zip".format(ctf_name, day) + return send_file(backup, as_attachment=True, attachment_filename=full_name) + + @admin.route('/admin/config', methods=['GET', 'POST']) @admins_only def admin_config(): diff --git a/CTFd/templates/admin/config.html b/CTFd/templates/admin/config.html index 9eaa4c0..0288472 100644 --- a/CTFd/templates/admin/config.html +++ b/CTFd/templates/admin/config.html @@ -19,6 +19,9 @@
  • CTF Time
  • +
  • + Backup +


  • @@ -32,7 +35,7 @@ aria-hidden="true">× {% endfor %} - +
    @@ -171,7 +174,6 @@
    -
    + +
    + +
    +
    +
    +
    +
    + +
    +
    + +
    +
    + +
    +
    + +
    + + Export +
    +
    +
    +
    +
    +
    +
    + +
    + +
    +
    + +
    +
    + +
    +
    + +
    +
    + +
    + +
    +
    +
    +
    +
    @@ -475,6 +552,53 @@ load_date_values('freeze'); }); + $('#export-button').click(function(e){ + e.preventDefault(); + var segments = []; + $.each($('.export-config:checked'), function(key, value){ + segments.push($(value).val()); + }); + segments = segments.join(','); + var href = script_root + '/admin/export'; + $('#export-button').attr('href', href+'?segments='+segments); + window.location.href = $('#export-button').attr('href'); + }); + + $('#import-button').click(function(e){ + e.preventDefault(); + var segments = []; + $.each($('.import-config:checked'), function(key, value){ + segments.push($(value).val()); + }); + segments = segments.join(','); + console.log(segments); + + var import_file = document.getElementById('import-file').files[0]; + var nonce = $('#nonce').val(); + + var form_data = new FormData(); + form_data.append('segments', segments); + form_data.append('backup', import_file); + form_data.append('nonce', nonce); + + $.ajax({ + url : script_root + '/admin/import', + type : 'POST', + data : form_data, + processData: false, + contentType: false, + statusCode: { + 500: function(resp) { + console.log(resp.responseText); + alert(resp.responseText); + } + }, + success : function(data) { + window.location.reload() + } + }); + }); + $(function () { var hash = window.location.hash; diff --git a/CTFd/utils.py b/CTFd/utils.py index 2f0043a..4cf7906 100644 --- a/CTFd/utils.py +++ b/CTFd/utils.py @@ -16,6 +16,9 @@ import sys import tempfile import time import urllib +import dataset +import zipfile +import io from flask import current_app as app, request, redirect, url_for, session, render_template, abort from flask_caching import Cache @@ -633,3 +636,157 @@ def container_ports(name, verbose=False): return ports except subprocess.CalledProcessError: return [] + + +def export_ctf(segments=None): + db = dataset.connect(get_config('SQLALCHEMY_DATABASE_URI')) + if segments is None: + segments = ['challenges', 'teams', 'both', 'metadata'] + + groups = { + 'challenges': [ + 'challenges', + 'files', + 'tags', + 'keys', + 'hints', + ], + 'teams': [ + 'teams', + 'tracking', + 'awards', + ], + 'both': [ + 'solves', + 'wrong_keys', + 'unlocks', + ], + 'metadata': [ + 'alembic_version', + 'config', + 'pages', + 'containers', + ] + } + + ## Backup database + backup = io.BytesIO() + backup_zip = zipfile.ZipFile(backup, 'w') + + for segment in segments: + group = groups[segment] + for item in group: + result = db[item].all() + result_file = io.BytesIO() + dataset.freeze(result, format='json', fileobj=result_file) + result_file.seek(0) + backup_zip.writestr('db/{}.json'.format(item), result_file.read()) + + ## Backup uploads + upload_folder = os.path.join(os.path.normpath(app.root_path), get_config('UPLOAD_FOLDER')) + for root, dirs, files in os.walk(upload_folder): + for file in files: + parent_dir = os.path.basename(root) + backup_zip.write(os.path.join(root, file), arcname=os.path.join('uploads', parent_dir, file)) + + backup_zip.close() + backup.seek(0) + return backup + + +def import_ctf(backup, segments=None, erase=False): + side_db = dataset.connect(get_config('SQLALCHEMY_DATABASE_URI')) + if segments is None: + segments = ['challenges', 'teams', 'both', 'metadata'] + + if not zipfile.is_zipfile(backup): + raise TypeError + + backup = zipfile.ZipFile(backup) + + groups = { + 'challenges': [ + 'challenges', + 'files', + 'tags', + 'keys', + 'hints', + ], + 'teams': [ + 'teams', + 'tracking', + 'awards', + ], + 'both': [ + 'solves', + 'wrong_keys', + 'unlocks', + ], + 'metadata': [ + 'alembic_version', + 'config', + 'pages', + 'containers', + ] + } + + ## Need special handling of metadata + if 'metadata' in segments: + meta = groups['metadata'] + segments.remove('metadata') + meta.remove('alembic_version') + + for item in meta: + table = side_db[item] + path = "db/{}.json".format(item) + data = backup.open(path).read() + + ## Some JSON files will be empty + if data: + if item == 'config': + saved = json.loads(data) + for entry in saved['results']: + key = entry['key'] + value = entry['value'] + set_config(key, value) + + elif item == 'pages': + saved = json.loads(data) + for entry in saved['results']: + route = entry['route'] + html = entry['html'] + page = Pages.query.filter_by(route=route).first() + if page: + page.html = html + else: + page = Pages(route, html) + db.session.add(page) + db.session.commit() + + elif item == 'containers': + saved = json.loads(data) + for entry in saved['results']: + name = entry['name'] + buildfile = entry['buildfile'] + container = Containers.query.filter_by(name=name).first() + if container: + container.buildfile = buildfile + else: + container = Containers(name, buildfile) + db.session.add(container) + db.session.commit() + + + for segment in segments: + group = groups[segment] + for item in group: + table = side_db[item] + path = "db/{}.json".format(item) + data = backup.open(path).read() + if data: + saved = json.loads(data) + for entry in saved['results']: + entry_id = entry.pop('id', None) + table.insert(entry) + else: + continue diff --git a/requirements.txt b/requirements.txt index b021f69..0f7ff30 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,3 +12,4 @@ itsdangerous==0.24 requests==2.13.0 PyMySQL==0.7.10 gunicorn==19.7.0 +dataset==0.8.0