Initial commit
This commit is contained in:
commit
8f7a3b6b5d
7
README.md
Normal file
7
README.md
Normal file
@ -0,0 +1,7 @@
|
||||
# Monarch Pass Update Wiki Script
|
||||
This updates an installation of MediaWiki by backing up the source code and database, downloading the new version, overlaying it on the target wiki, and then running the database updater script.
|
||||
|
||||
This is not used for containerized installations, which are updated when the container is rebuilt.
|
||||
|
||||
## License
|
||||
GNU GPL v3 or later
|
85
update-wiki
Normal file
85
update-wiki
Normal file
@ -0,0 +1,85 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime
|
||||
from subprocess import check_call, check_output
|
||||
|
||||
RELEASE_URL = "https://releases.wikimedia.org/mediawiki/{}/mediawiki-{}.tar.gz"
|
||||
|
||||
PHP_GLOBALS_SCRIPT = """
|
||||
error_reporting(0);
|
||||
define("MEDIAWIKI", 1);
|
||||
$IP = "%s";
|
||||
function wfLoadExtensions() { }
|
||||
function wfLoadExtension() { }
|
||||
function wfLoadSkin() { }
|
||||
include("$IP/LocalSettings.php");
|
||||
print json_encode([%s]);
|
||||
"""
|
||||
|
||||
if len(sys.argv) < 3:
|
||||
print("Please supply the path to the wiki and the MediaWiki version to update to")
|
||||
exit(1)
|
||||
|
||||
def download_version (version):
|
||||
release_version = version[:version.rfind(".")]
|
||||
url = RELEASE_URL.format(release_version, version)
|
||||
filename = url[url.rfind("/") + 1:]
|
||||
if not os.path.isfile(filename):
|
||||
check_call(["wget", url])
|
||||
|
||||
return filename
|
||||
|
||||
def backup_source (wiki_path, backup_directory):
|
||||
check_call(["tar", "-cvf", os.path.join(backup_directory, "source.tar"), wiki_path])
|
||||
|
||||
def backup_database (wiki_database, backup_directory):
|
||||
check_call([
|
||||
"mysqldump",
|
||||
"-u{wgDBuser}".format(**wiki_database),
|
||||
"-p{wgDBpassword}".format(**wiki_database),
|
||||
"-h{wgDBserver}".format(**wiki_database),
|
||||
"--result-file={}".format(os.path.join(backup_directory, "database.sql")),
|
||||
wiki_database['wgDBname']
|
||||
])
|
||||
|
||||
def get_wiki_globals (wiki_path, *args):
|
||||
script = PHP_GLOBALS_SCRIPT % (wiki_path, ",".join(['"{}" => ${}'.format(var, var) for var in args]))
|
||||
script_out = check_output(["php", "-d", "error_reporting=0", "-r", script])
|
||||
for line in script_out.decode().split("\n"):
|
||||
if line.startswith("{"):
|
||||
return json.loads(line.strip())
|
||||
|
||||
def unpack_tarfile (tarfile, wiki_path):
|
||||
tarsource = tarfile[:-len(".tar.gz")]
|
||||
if not os.path.isdir(tarsource):
|
||||
check_call(["tar", "-xvf", tarfile])
|
||||
|
||||
check_call("cp -Rv {}/* {}".format(tarsource, wiki_path), shell=True)
|
||||
|
||||
def update_database (wiki_path):
|
||||
check_call(["php", "maintenance/update.php"], cwd=wiki_path)
|
||||
|
||||
WIKI_PATH = sys.argv[1]
|
||||
TARGET_VERSION = sys.argv[2]
|
||||
BACKUP_KEY = "backup-{}".format(datetime.now()).replace(" ", "_").replace(":","-")
|
||||
|
||||
os.makedirs(BACKUP_KEY)
|
||||
print(">> Backup directory is: {}".format(BACKUP_KEY))
|
||||
|
||||
print(">> Backing up database")
|
||||
wiki_database = get_wiki_globals(WIKI_PATH, "wgDBtype", "wgDBserver", "wgDBname", "wgDBuser", "wgDBpassword")
|
||||
backup_database(wiki_database, BACKUP_KEY)
|
||||
|
||||
print(">> Backing up wiki source")
|
||||
backup_source(WIKI_PATH, BACKUP_KEY)
|
||||
|
||||
print(">> Downloading MediaWiki {}".format(TARGET_VERSION))
|
||||
tarfile = download_version(TARGET_VERSION)
|
||||
|
||||
print(">> Unpacking {} over {}".format(tarfile, WIKI_PATH))
|
||||
unpack_tarfile(tarfile, WIKI_PATH)
|
||||
|
||||
print(">> Running database update")
|
||||
update_database(WIKI_PATH)
|
Loading…
x
Reference in New Issue
Block a user