Extension:Backup.php
<php><?php
- Extension:Backup
- - Licenced under LGPL (http://www.gnu.org/copyleft/lesser.html)
- - Author: User:Nad
- - Started: 2007-06-09
- - This is a SpecialPage which acts as an in-wiki interface to backup.pl which reduces the backup/compress/distribute queue
if (!defined('MEDIAWIKI')) die('Not an entry point.');
define('BACKUPS_VERSION','0.0.1, 2007-06-18');
$wgExtensionFunctions[] = 'wfSetupBackup';
$wgExtensionCredits['specialpage'][] = array( 'name' => 'Special:Backup', 'author' => 'User:Nad', 'description' => 'A special page for backing up and restoring multiple wikis on a server', 'url' => 'http://www.mediawiki.org/wiki/Extension:Backup_&_Restore', 'version' => BACKUPS_VERSION );
require_once "$IP/includes/SpecialPage.php";
- Define a new class based on the SpecialPage class
class SpecialBackup extends SpecialPage {
# Constructor function SpecialBackup() { SpecialPage::SpecialPage('Backup','sysop'); }
# Override SpecialPage::execute() # - $param is from the URL, eg Special:Backups/param function execute($param) { global $wgOut,$wgRequest; $title = Title::makeTitle(NS_SPECIAL,'Backup');
# create a sortable table for all wiki's found and accessible in mysql $cols = array('ID','Name','Domains','DB','Status','Version','Backup-cycle','Delete','Backup','Repair'); $wgOut->addWikiText(wfMsg('backup_wikilist'));
$wgOut->addHTML('<form>
');foreach ($this->getWikiList() as $wiki) {
$wgOut->addHTML("$something"); } # create a sortable table of all backup files which could be restored # - each row has same cols as above +backup-filename -backup -delete $cols = array('File','ID','Name','Domains','DB','Status','Version','Backup-cycle','Delete','Backup'); $wgOut->addWikiText(wfMsg('backup_backuplist')); $wgOut->addHTML('<form>'.join(' | ',$cols).' |
---|---|
foreach ($backups as $backup) {
$wgOut->addHTML("$something"); } # "Next >" or "Commit" $wgOut->addHTML(wfElement('input',array('type' => 'submit','name' => 'backup_next', 'value' => wfMsg('backup_next')))); $wgOut->addHTML(wfElement('input',array('type' => 'submit','name' => 'backup_commit', 'value' => wfMsg('backup_commit')))); $wgOut->addHTML(wfElement('input',array('type' => 'hidden','name' => 'backup_title', 'value' => $this->title))); $wgOut->addHTML(''.join(' | ',$cols).' |
---|---|
</form>');
# Add list of jobs to be committed by changes (if submitted with "next") if ($wgRequest->getText('backup_next')) { $jobs = submit(); if (count($jobs) > 0) { $wgOut->addWikiText(wfMsg('backup_commitlist')); foreach ($jobs as $job) { $wgOut->addWikiText("*$job"); } } else $wgOut->addWikiText(wfMsg('backup_nojobs')); }
# Queue the jobs if form posted with "commit" if ($wgRequest->getText('backup_commit')) submit(true);
# add list of backup.pl job queue (and link to MW log of completed items) $wgOut->addWikiText(wfMsg('backup_activelist')); $jobs = file(dirname(__FILE__).'/jobs'); if (count($jobs) > 0) { foreach ($jobs as $job) { $wgOut->addWikiText("*$job"); } } else $wgOut->addWikiText(wfMsg('backup_nojobs')); }
# Process posted form function submit($queue = false) { $jobs = array(); foreach ($wikis as $wiki) { # todo: work out jobs from fields changed if ($queue) { # queue the job } } return $jobs; }
# Get list from DB's and LocalSettings files # - it scans DB's as well so that orphans with no LocalSettings can be handled too function getWikiList() {
# Make a hash of all databases and their tables; $db = &wfGetDB(DB_SLAVE); $databases = array(); $dblist = mysql_list_dbs($db); for ($i = 0; $i < mysql_num_rows($dblist); $i++) { $database = mysql_db_name($dblist,$i); $databases[$database] = array(); $db->query("SHOW TABLES FROM $database"); while ($row = mysql_fetch_row($result)) $databases[$database][] = $row[0]; }
# These tables have been the same since version 1.4 $testtables = array( 'archive', 'categorylinks', 'hitcounter', 'image', 'interwiki', 'ipblocks', 'math', 'querycache', 'recentchanges', 'searchindex', 'sitestats', 'text', 'user', 'user_newtalk', 'watchlist' );
# Scan for table prefixes $prefixes = array(); foreach ($databases as $database => $tables) foreach ($tables as $table) if (ereg("^(.*)$testtables[0]$",$table,$prefix)) $prefixes[] = array($database,$prefix[1]);
# Scan the prefixes for all having a complete set of tables $mediawikis = array(); foreach ($prefixes as $p) { list($database,$prefix) = $p; $match = true; foreach ($testtables as $test) if (!in_array($prefix.$test,$databases[$database])) $match = false; if ($match) $mediawikis[] = array('database' => $database,'prefix' => $prefix); }
# todo: import instances found in settings dir
return $mediawikis; }
}
- Called from $wgExtensionFunctions array when initialising extensions
function wfSetupBackup() { global $wgLanguageCode,$wgMessageCache;
# Add the messages used by the specialpage if ($wgLanguageCode == 'en') { $wgMessageCache->addMessages(array( 'backup' => "Backup & Restore", 'backup_wikilist' => "=== List of wikis ===\n*the ID is used for file/dir name and db-prefix\n", 'backup_backuplist' => "=== List of backups and templates ===", 'backup_commitlist' => "The changes you've submitted result in the following jobs:", 'backup_activelist' => "The following jobs are currently executing:", 'backup_next' => "Next >", 'backup_commit' => "Commit", 'backup_nojobs' => "There are no jobs in the list." )); }
# Add the specialpage to the environment SpecialPage::addPage(new SpecialBackup()); } ?> </php>