Difference between revisions of "Transform-changes.php"
m |
m |
||
Line 151: | Line 151: | ||
if ($title == 'Special:Recentchanges') { | if ($title == 'Special:Recentchanges') { | ||
#$article = preg_replace('/(?<=<h1 class="firstHeading">).+(?=<\\/h1>)/', 'Recent changes (see also <a href="/news">news</a>)', $article); | #$article = preg_replace('/(?<=<h1 class="firstHeading">).+(?=<\\/h1>)/', 'Recent changes (see also <a href="/news">news</a>)', $article); | ||
− | + | $article = preg_replace("/<p>Track the most recent changes.+(Below are the last)/s","$1",$article); | |
# Add show/hide-external link | # Add show/hide-external link | ||
Line 157: | Line 157: | ||
$anchor = $_REQUEST['hideexternal'] ? 'show' : 'hide'; | $anchor = $_REQUEST['hideexternal'] ? 'show' : 'hide'; | ||
if (preg_match('/my edits\\s*<br/',$article)) $anchor = ucwords($anchor); | if (preg_match('/my edits\\s*<br/',$article)) $anchor = ucwords($anchor); | ||
− | + | $article = preg_replace('/( patrolled edits\\s*)|( my edits\\s*)(?=<br)/i',"$1$2 | <a href=\"$script?title=$title$link\">$anchor</a> external changes",$article); | |
} | } | ||
else if ($title == 'Special:Contributions') { | else if ($title == 'Special:Contributions') { | ||
$user = str_replace('User:','',$_REQUEST['target']); | $user = str_replace('User:','',$_REQUEST['target']); | ||
− | + | $article = preg_replace('/(?<=<h1 class="firstHeading">).+(?=<\\/h1>)/', "<a href=\"/user:$user\">$user</a>'s contributions", $article); | |
} | } | ||
?> | ?> |
Revision as of 22:42, 8 March 2007
<? $tmp = $article;
- Merge local changes with changes from other specified MediaWikis
# Extract changes from a page into an array ready for rendering in a table function extractChanges(&$changes, &$page, $wd = array()) { global $xwIsAdmin, $xwUserGroups; list($wiki, $url, $tz) = $wd;
# Change exraction rules (for supporting TWiki and MoinMoin later) $cr = "/^\\((.*?(diff|log|undo)(<\\/a>)?)\\)(.+?\\((.+?<\\/a>)\\))?( \\. \\.\\s+(.*?)\\s*(()?\\s*<a.+?<\\/a>(<\\/strong>)?))?.*?; ([0-9]+:[0-9]+) \\. \\. (<a.+?<\\/a>).+?(<a.+?<\\/a>)(.+?\\((.+)\\))?/";
$pr = "/
([0-9]+ [a-z]+ [0-9]+)<\\/h4>\\s*(.+?)<\\/ul>/is";
# Used to make urls from other wiki's absolute
if ($url) { $url = parse_url($url); $url = $url['scheme'].'://'.$url['host']; }
$tr = 999; # patch to stop same time keys overwriting
# First split the page by day sections
preg_match_all($pr, $page, $m1, PREG_SET_ORDER);
# Loop through changes in each day extracting relevent info into $changes array
$first = 0;
foreach ($m1 as $day) {
preg_match_all("/- (.+?)<\\/li>/", $day[2], $m2);
foreach ($m2[1] as $change) {
$change = str_replace('href="', "href=\"$url", $change);
if (preg_match($cr, $change, $m3)) {
list(,$diff,,,,$hist,,$flags,$title,,,$time,$user,$talk,,$comment) = $m3;
ereg('>.+</a>', $user, $uname);
$user = ereg_replace($uname[0], str_replace(' ',' ',$uname[0]), $user);
$comment = preg_replace("/\\[(http[^ \\]]+)\\]/","<a href=\"$1\">$1</a>",$comment);
$comment = preg_replace("/\\[(http[^ \\]]+)\\s+(.+?)\\]/","<a href=\"$1\">$2</a>",$comment);
# Get read-perms for article
if ( (!$readable = $xwIsAdmin) && ereg('>(.+)</a>', $title, $anchor) ) {
$readable = xwGetListByTagname( xwArticleProperties($anchor[1]), 'read' ) + array('anyone');
$readable = count( array_intersect( $readable, $xwUserGroups ) );
}
# Store the change if readable by current user
if ( $readable ) $changes[$first = (strtotime("$day[1] $time")+$tz*3600).'.00'.$tr--] = array($wd,$diff,$hist,$flags,$title,$user,$talk,$comment);
} else xwMessage( "Couldn't match change: $change", 'red' );
}
}
# Remove all changes before the first extracted time (otherwise a large list of externals trails)
foreach ($changes as $k => $v) if ($k < $first) unset($changes[$k]);
return $changes;
}
# Render changes into said table
function renderChanges( &$changes ) {
global $wgArticlePath;
$head = ;
foreach ( array( 'Date', 'Wiki', 'Article', 'Edit summary', 'Account', , ) as $k => $v )
$head .= "<a href=\"/Talk:Special:Recentchanges\">$v</a>\n";
$html = "
\n";
$date = ;
$td = '' : '';
$html .= str_replace(">Date", ">$d", $head);
$date = $d; $bg = 1;
};
$bgtag = ++$bg % 2 ? 'bg1' : 'bg2';
if ($wt = $c[0][0]) $wiki = "<a href=\"".$c[0][1]."\" style=\"color:#009900;\">$wt</a>"; else $wiki = ;
$html .= "\n$td$time$td$wiki";
if (!$c[2]) {
list($diff,$hist) = explode(' ',$c[1]);
preg_match("/(<a.+?<\\/a>)/",$c[7],$m);
$c[4] = $m[1];
} else list(,$diff,$hist) = $c;
if ($diff == 'diff') $diff = ;
if (ereg('^href',$hist)) {
$diff = "<a $hist>log</a>";
$hist = ;
}
if (ereg('target=([^&]+)',$diff,$to))
$c[7] = "moved to <a href=\"".str_replace('$1',$to[1],$wgArticlePath)."\">".str_replace('_',' ',$to[1]);
if ($c[3]) $c[7] = "($c[3]) $c[7]";
if (preg_match('/(.+>)(.{15}).+?(<.+)/',$c[5],$m)) $c[5] = "$m[1]$m[2]…$m[3]";
$html .= "$td$c[4]';
ksort($changes);
foreach (array_reverse( $changes, SORT_STRING) as $t => $c) {
$time = strftime('%H:%M', $t);
if ($date != ($d = strftime('%d %b',$t))) {
$html .= $date ? ' $c[7]\n";
$html .= "$td$c[5]$td$diff$td$hist";
}
return "$html\n
";
}
if ($_REQUEST['hideexternal'] != 1) {
- Get items in global changes-cache
$changes = array();
$tr = 999;
foreach (array($GLOBALS['IP'].'/Global.changes', $GLOBALS['IP'].'/'.$GLOBALS['xwUserName'].'.changes') as $cache)
if (file_exists($cache)) foreach (file($cache) as $change) {
$change = split("\\|",trim($change));
$change[2] = split(',',$change[2]);
$change[2][0] = str_replace('Energy Alternatives','EA',$change[2][0]);
$change[2][0] = str_replace('Azzuro Workgroup','AWG',$change[2][0]);
$change[2][0] = str_replace('JewelWiki','JW',$change[2][0]);
$change[2][0] = str_replace('GeeXboX wiki','GeeXboX',$change[2][0]);
$change[2][0] = str_replace('Organic Design','OD',$change[2][0]);
$change[2][0] = str_replace('OrganicDesign','Closet',$change[2][0]);
$change[2][0] = str_replace('SourceryForge','✿',$change[2][0]);
$change[6] = preg_replace("/\\w+:\\w+@/","",$change[6]);
$changes[(strtotime(array_shift($change))+3600*intval(array_shift($change))).'.00'.$tr--] = $change;
}
}
- Get the list of wikis for which changes are merged
$wikis = array();
- if (!$wl = xwArticleContent($GLOBALS['xwUserName'].'/wiki.list',false)) $wl = xwArticleContent("$tTitle/wiki.list",false);
- if (preg_match_all("/^\\*.*?\"(.*?)\".*?\"(.*?)\".*?\"(.*?)\".*?\"(.*?)\"/m",$wl,$wikis,PREG_SET_ORDER))
- for ($i = 0; $i < count($wikis); $i++) array_shift($wikis[$i]);
- Merge each wikis changes into local table
- - connect with socket so we can bail after a small timeout
if (!$qs = $_SERVER['QUERY_STRING']) $qs = "title=Special:Recentchanges"; # same QS for remote requests
$changes = array();
foreach ($wikis as $wd) {
$url = parse_url($wd[1]);
$domain = $url['host'];
$scheme = $url['scheme'];
$port = $scheme == 'http'?80:443;
$request = $url['path']."?$qs&xpath://view:=&printable=yes";
if (0 && $handle = @fsockopen($domain,$port,$errno,$err,1)) {
fclose($handle);
extractChanges($changes,file_get_contents("$scheme://$domain$request"),$wd);
} else xwMessage("Failed to merge changes from $scheme://$domain",'red');
}
- Merge the local changes
extractChanges($changes,$article);
- Render all changes
if ($swf = $_REQUEST['SWF']) {
$i = -1;
$html = ;
ksort($changes);
foreach (array_reverse($changes,SORT_STRING) as $t => $c) {
$time = strftime('%H:%M', $t);
if ($wt = $c[0][0]) { $key = "<a href=\"".$c[0][1]."\">$wt:</a>"; $col = '#009900'; }
else { $key = $c[5]; $col = '#000080'; }
if (!ereg('m',$c[3])) if ($i++<9) $html .= ($i?'&':).str_replace('&','%26',"item$i=$t,$time,$key,$c[4],$col,$c[7]");
}
ob_end_clean();
header('Content-type: application/x-www-urlform-encoded');
print $html;
if ($_REQUEST['INFO']) print '&user='.$GLOBALS['xwUserName'];
die;
}
else $article = preg_replace( "/[0-9]+ [a-z]+ [0-9]+<\\/h4>\\s*.+(?=
\n"; $date = ; $td = '' : '';
$html .= str_replace(">Date", ">$d", $head); $date = $d; $bg = 1; }; $bgtag = ++$bg % 2 ? 'bg1' : 'bg2'; if ($wt = $c[0][0]) $wiki = "<a href=\"".$c[0][1]."\" style=\"color:#009900;\">$wt</a>"; else $wiki = ;
$html .= "\n$td$time$td$wiki"; if (!$c[2]) { list($diff,$hist) = explode(' ',$c[1]); preg_match("/(<a.+?<\\/a>)/",$c[7],$m); $c[4] = $m[1]; } else list(,$diff,$hist) = $c; if ($diff == 'diff') $diff = ; if (ereg('^href',$hist)) { $diff = "<a $hist>log</a>"; $hist = ; } if (ereg('target=([^&]+)',$diff,$to)) $c[7] = "moved to <a href=\"".str_replace('$1',$to[1],$wgArticlePath)."\">".str_replace('_',' ',$to[1]); if ($c[3]) $c[7] = "($c[3]) $c[7]"; if (preg_match('/(.+>)(.{15}).+?(<.+)/',$c[5],$m)) $c[5] = "$m[1]$m[2]…$m[3]"; $html .= "$td$c[4]';
ksort($changes); foreach (array_reverse( $changes, SORT_STRING) as $t => $c) { $time = strftime('%H:%M', $t); if ($date != ($d = strftime('%d %b',$t))) { $html .= $date ? ' |
$c[7]\n";
$html .= "$td$c[5]$td$diff$td$hist"; } return "$html\n |
}
if ($_REQUEST['hideexternal'] != 1) {
- Get items in global changes-cache
$changes = array(); $tr = 999; foreach (array($GLOBALS['IP'].'/Global.changes', $GLOBALS['IP'].'/'.$GLOBALS['xwUserName'].'.changes') as $cache) if (file_exists($cache)) foreach (file($cache) as $change) { $change = split("\\|",trim($change)); $change[2] = split(',',$change[2]); $change[2][0] = str_replace('Energy Alternatives','EA',$change[2][0]); $change[2][0] = str_replace('Azzuro Workgroup','AWG',$change[2][0]); $change[2][0] = str_replace('JewelWiki','JW',$change[2][0]); $change[2][0] = str_replace('GeeXboX wiki','GeeXboX',$change[2][0]); $change[2][0] = str_replace('Organic Design','OD',$change[2][0]); $change[2][0] = str_replace('OrganicDesign','Closet',$change[2][0]); $change[2][0] = str_replace('SourceryForge','✿',$change[2][0]); $change[6] = preg_replace("/\\w+:\\w+@/","",$change[6]); $changes[(strtotime(array_shift($change))+3600*intval(array_shift($change))).'.00'.$tr--] = $change; } }
- Get the list of wikis for which changes are merged
$wikis = array();
- if (!$wl = xwArticleContent($GLOBALS['xwUserName'].'/wiki.list',false)) $wl = xwArticleContent("$tTitle/wiki.list",false);
- if (preg_match_all("/^\\*.*?\"(.*?)\".*?\"(.*?)\".*?\"(.*?)\".*?\"(.*?)\"/m",$wl,$wikis,PREG_SET_ORDER))
- for ($i = 0; $i < count($wikis); $i++) array_shift($wikis[$i]);
- Merge each wikis changes into local table
- - connect with socket so we can bail after a small timeout
if (!$qs = $_SERVER['QUERY_STRING']) $qs = "title=Special:Recentchanges"; # same QS for remote requests $changes = array(); foreach ($wikis as $wd) {
$url = parse_url($wd[1]); $domain = $url['host']; $scheme = $url['scheme']; $port = $scheme == 'http'?80:443; $request = $url['path']."?$qs&xpath://view:=&printable=yes"; if (0 && $handle = @fsockopen($domain,$port,$errno,$err,1)) { fclose($handle); extractChanges($changes,file_get_contents("$scheme://$domain$request"),$wd); } else xwMessage("Failed to merge changes from $scheme://$domain",'red'); }
- Merge the local changes
extractChanges($changes,$article);
- Render all changes
if ($swf = $_REQUEST['SWF']) { $i = -1; $html = ; ksort($changes); foreach (array_reverse($changes,SORT_STRING) as $t => $c) { $time = strftime('%H:%M', $t); if ($wt = $c[0][0]) { $key = "<a href=\"".$c[0][1]."\">$wt:</a>"; $col = '#009900'; } else { $key = $c[5]; $col = '#000080'; } if (!ereg('m',$c[3])) if ($i++<9) $html .= ($i?'&':).str_replace('&','%26',"item$i=$t,$time,$key,$c[4],$col,$c[7]"); } ob_end_clean(); header('Content-type: application/x-www-urlform-encoded'); print $html; if ($_REQUEST['INFO']) print '&user='.$GLOBALS['xwUserName']; die; }
else $article = preg_replace( "/