Difference between revisions of "Talk:Peer Installation"

From Organic Design wiki
m
(Wikid shit - better hit the sack oi :-O)
Line 4: Line 4:
 
Ok, installed the modules, log file looks ok, and peer is logging into localhost wiki  
 
Ok, installed the modules, log file looks ok, and peer is logging into localhost wiki  
 
--[[User:Sven|Sven]] 00:43, 2 Apr 2007 (NZST)
 
--[[User:Sven|Sven]] 00:43, 2 Apr 2007 (NZST)
 
+
:Wikid - that's 99% of the difficulty out of the way then - gonna hit da sack now I reckon :-O
  
 
following the instructions, i installed it to  
 
following the instructions, i installed it to  

Revision as of 12:50, 1 April 2007

Ran wikid on OS X using

/Users/svenbot/peer/wikid svenbot --start --dir=/Users/svenbot/peer
svenbox.log indicates some perl modules missing, HTTP/Request.pm, Date/Parse.pm

Ok, installed the modules, log file looks ok, and peer is logging into localhost wiki --Sven 00:43, 2 Apr 2007 (NZST)

Wikid - that's 99% of the difficulty out of the way then - gonna hit da sack now I reckon :-O

following the instructions, i installed it to http://www.div0.com/peer/peer.pl

and got 500 Internal Server Error

I chmod +744 and havesame problem.

Trying a differnt interpretation of the instructions, i also installed to http://www.div0.com/cgi-bin/peer.pl

here is the peer.pl that i downloaded:

 #!/usr/bin/perl -w use strict; sub getConfig; sub logAdd; sub readFile; sub writeFile; # HELP if ( grep /^[\/-]+(\?|h|help)$/, @ARGV ) { print readFile 'articles/peer-help.txt'; exit; } # *UX/WIN ? use Cwd; our $cwd = cwd; our $ux = ($cwd =~ m/^\//); $ux ? $cwd = cwd : $cwd =~ s/\//\\/g; $cwd =~ s/\//\\/g unless $cwd =~ m/^\//; $cwd =~ s/[\\\/]$//g; # PATH $0 =~ /^(.*[\/\\])?(.+?)(\.pl)?$/; our $path = $1; # Get config from cf and args our $cf = ''; if ( my @p = grep /^[\/-]+(cf|config)[= ]+.+$/, @ARGV ) { $cf = $1 if $p[$#p] =~ m/[= ]+(.+)$/ } our $peer = getConfig 'id|name'; our $port = getConfig 'p|port' or 1729; our $todo = getConfig 'qf|queue'; our $dialup = getConfig 'd|dialup'; # PEERS our %localPeers = (); our %activePeers = (); # todo get peers list using port as key # START | STOP | RELOAD | STATUS if ( grep /^[\/-]+start$/, @ARGV ) { &start; exit; } if ( grep /^[\/-]+stop$/, @ARGV ) { &stop; exit; } if ( grep /^[\/-]+re(start|load)$/, @ARGV ) { &stop; &start; exit; } if ( grep /^[\/-]+status$/, @ARGV ) { &status; exit; } # General environment vars our $os = $^O; $^V =~ m/(.)(.)(.)/; our $ver = ord($1).'.'.ord($2).'.'.ord($3); # Get rid of cmd console if Win32 #if ( !$ux and grep /^[\/-]+bg$/, @ARGV ) { unless ($ux) { require Win32::Console; my $CONSOLE = new Win32::Console(); $CONSOLE->Free(); } # Declare service functions ( START | STOP | STATUS ) sub start { # - chk if any in localPeers on $port/name, error and return if so logAdd "Starting \"$peer\" started on port $port".($dialup ? " (using -d)" : "").'...'; } sub stop { # - chk if any in localPeers on $port, error if not logAdd "Stopping \"$peer\"..."; # - send a stop to peer using IO::Socket # - poll/timeout peers file to see if it dies # - kill it by pid/sig if it won't die } sub status { # - Get running peers/ports list # - print info for each } # Declare network subroutines sub recvMessage; sub sendMessage; # Set up a global user-agent for making web and ftp requests use HTTP::Request; use LWP::UserAgent; our $client = LWP::UserAgent->new( cookie_jar => {} ); # Initialise server listening on a port use IO::Socket; use IO::Select; our $server = new IO::Socket::INET Listen => 1, LocalPort => $port, Proto => 'tcp'; our $select = new IO::Select $server; our %activePeers = (); # HTML interface environment our $template = readFile "articles/peer-template.html"; $template =~ s//$peer/g; # Views my $views = ''; for ( 'Update','Filter','Events' ) { $views .= "
$_\n" } $template =~ s//$views/; # Navigation my $nav = ''; for ( $peer,'Local Wiki','Events','Schedule','Peers','Statistics','sync.list' ) { $nav .= "
$_\n" } $template =~ s//$nav/; # Main server loop sub serverIterate { for my $handle ($select->can_read) { my $stream = fileno $handle; if ($handle == $server) { # Handle is the server, set up a new peer # - but it can't be identified until its msg processed my $newhandle = $server->accept; $stream = fileno $newhandle; $select->add($newhandle); $activePeers{$stream}{'buffer'} = ''; $activePeers{$stream}{'handle'} = $newhandle; logAdd "New connection: Stream$stream", 'main/new'; } elsif (sysread $handle, my $input, 10000) { # Handle is an existing stream with data to read # NOTE: we should disconnect after certain size limit) # - Process (and remove) all complete messages from this peer's buffer $activePeers{$stream}{'buffer'} .= $input; if ( $activePeers{$stream}{'buffer'} =~ s/^(.*\r?\n\r?\n)//s ) { recvMessage $handle, $_ for split /\r?\n\r?\n/, $1 } } else { # Handle is an existing stream with no more data to read $select->remove($handle); delete $activePeers{$stream}; $handle->close; logAdd "Stream$stream disconnected."; } } } # Send an HTTP message to a handle sub sendMessage { my ($handle, $msg) = @_; print $handle $msg; } # Process an incoming HTTP message sub recvMessage { my ($handle, $msg) = @_; # Extract request info $msg =~ m/GET\s+(.+?)\s+HTTP/; my @path = split /\/+/, $1; push @path, '', '', ''; # Construct response my $ct = 'text/html'; if ($path[1] eq 'images') { # Image request $msg = readFile "images/$path[2]" if my $title = $path[2]; $ct = 'text/plain'; $ct = 'image/jpeg' if $path[2] =~ m/\.jpe?g$/; $ct = 'image/png' if $path[2] =~ m/\.png$/; $ct = 'image/gif' if $path[2] =~ m/\.gif$/; } elsif ($path[1] eq 'articles') { # Article content request $msg = readFile "articles/$path[2]" if my $title = $path[2]; $ct = 'text/plain'; $ct = 'text/css' if $path[2] =~ m/\.css$/; $ct = 'text/xml' if $path[2] =~ m/\.(xml|xslt?)$/; $ct = 'text/html' if $path[2] =~ m/\.html?$/; } elsif ($path[1] eq 'peer') { # Article view request my $article = wikiParse( readFile "articles/$path[2]" ) if my $title = $path[2]; $msg = $template; $msg =~ s//$path[2]/g; $msg =~ s//$article/; } else { # Default / Home my $title = ($path[2] or 'Yi'); my $article = wikiParse( readFile "articles/$title" ); $msg = $template; $msg =~ s//$title/g; $msg =~ s//$article/; } # Hide passwords $msg =~ s/pass=.+?(\s|$)/pass=••••••••/g; # Send response back to requestor my $cl = length $msg; $msg = "HTTP/1.1 200 OK\r\nContent-Type: $ct\r\nContent-Length: $cl\r\n\r\n$msg"; sendMessage $handle, $msg; } # Login to a MediaWiki sub wikiLogin { my ($wiki, $user, $pass) = @_; my $url = "$wiki?title=Special:Userlogin&xpath://view:"; my $success; my $retries = 3; while ($retries--) { if ( $client->request( HTTP::Request->new( GET => $url ) )->is_success) { my %form = ( wpName => $user, wpPassword => $pass, wpLoginattempt => 'Log in' ); my $response = $client->post( "$url&action=submitlogin", \%form ); $success = $response->is_success and $response->content =~ m/You are now logged in/; } logAdd $success ? "$peer has logged $user in to $wiki." : "ERROR: $peer couldn't log $user in to $wiki!"; $retries = 0 if $success; } return $success; } # Edit a MediaWiki page sub wikiPageEdit { my ($wiki, $page, $content, $comment) = @_; my $success; my $retries = 3; while ($retries--) { # Request the page for editing and extract the edit-token my $response = $client->request( HTTP::Request->new( GET => "$wiki?title=$page&action=edit" ) ); if ( $response->is_success and $response->content =~ m/value="(.+?)" name="wpEditToken"/ ) { # Got token, now submit an edit-form including the token my %form = ( wpEditToken => $1, wpTextbox1 => $content, wpSummary => $comment, wpSave => 'Save page', wpEdittime => '', wpSection => '' ); $success = !$client->post( "$wiki?title=$page&action=submit", \%form )->is_error; } if ($success) { $retries = 0 } else { logAdd "ERROR: $peer couldn't edit \"$page\" on $wiki!" } } return $success; } # Get the date of last edit of an article sub wikiLastEdit { my ($wiki, $page) = @_; # Request the last history entry and extract date my $response = $client->request( HTTP::Request->new( GET => "$wiki?title=$page&action=history&limit=1&xpath://view:" ) ); return $1 if $response->is_success and $response->content =~ m/(\d+:\d+.+?\d)<\/a>/; } # Retreive the raw content of a page sub wikiRawPage { my ($wiki, $page) = @_; my $response = $client->request( HTTP::Request->new( GET => "$wiki?title=$page&action=raw" ) ); return $response->content if $response->is_success; } # Logout of a MediaWiki sub wikiLogout { my $wiki = shift; my $success = $client->request( HTTP::Request->new( GET => "$wiki?title=Special:Userlogout&xpath://view:" ) )->is_success; logAdd $success ? "$peer has logged out of $wiki." : "WARNING: $peer couldn't logout of $wiki!"; return $success; } # Wikitext parsing # - only basic, the real effort should be focussed on the nodal version sub wikiParse { $_ = shift; #s/
/\n/g;
	#s/\n\n\n/\n\n/g;
	my $parse = 0;
	my $text = '';
	$text .= ( $parse ^= 1 ) ? wikiSubParse($_) : "$_" for split /<\/?pre>/i;
	return $text;
	}

sub wikiSubParse {
	$_ = shift;
	my $tmp;

	# Paragraphs (lame tmp)
	s/\n\n/\n\n/g;
	
	# Bullet lists
	$tmp = '';
	my $list = 0;
	for ( split /^/, $_ ) {
		if ( /^(\*+)\s*(.+?)$/ ) {
			my $l = length $1;
			$tmp .= "\n" if $l > $list;
			$tmp .= "\n" if $l < $list;
			$list = $l;
			$tmp .= "$2\n";
			}
		else {
			$tmp .= "\n" while $list--;
			$list = 0;
			$tmp .= $_;
			}
		}
	$_ = $tmp;

	# Sections	
	s/(^\={2,})\s*(.+?)\s*\={2,}/"<\/a>$2<\/h".(length $1).">\n"/gem;

	# External Links
	s/\[\s*((http:\/)?\/.+?)\s+(.*?)\s*\]/$3<\/a>/g;
	s/\[\s*((http:\/)?\/.+?)\s*\]/$1<\/a>/g;
	s/((?$1<\/a>/g;

	# Internal links
	s/\[{2}\s*(.+?)\s*\|(.+?)\]{2}(?!\])/$2<\/a>/g;
	s/\[{2}\s*(.+?)\s*\]{2}(?!\])/$1<\/a>/g;

	# Bold and italic
	s/'''(.+?)'''/$1<\/b>/g;
	s/^;(.+?)$/$1<\/b>/gm;
	s/''(.+?)''/$1<\/i>/g;

	return $_;
	}

# Return a page requested over HTTP
# - uses global $client
sub readHTTP {
	my $response = $client->request( HTTP::Request->new( GET => shift ) );
	$response->is_success ? return $response->content : logAdd( $response->error_as_HTML );
	}

# Return content of passed file
sub readFile {
	my $file = shift;
	my $out = '';
	if ( open FH, '<', $file ) {
		binmode FH;
		sysread FH, $out, 1000000;
		close FH;
		}
	return $out;
	}

# Write passed content to passed file
sub writeFile {
	if ( open FH, '>', shift ) {
		binmode FH;
		print FH shift;
		close FH;
		}
	}

# Determine if online from available IP interfaces
# - always true unless dialup specified in args
sub online {
	return 1 unless $dialup;
	if ($ux) { return $1 if `ifconfig` =~ m/^\s*inet.+?((\d+)\.\d+\.\d+\.\d+)/m and $2 != 192 and $2 != 127 }
	return $1 if `ipconfig /all` =~ m/IP Address.+?((\d+)\.\d+\.\d+\.\d+)/m and $2 != 192 and $2 != 127;
	}

# Add an item to the output log (and print if debugging)
sub logAdd {
	my $entry = shift;
	print localtime()." : $entry\n";
	}

# Generate a guid
sub guid {
	return 'p'.(rand()+time);
	}

# For some reason there's no strtotime() equivalent in PERL's default install
# - this is a temp solution working for hh:mm.+d|dd.+mmm|mmmm.+yy|yyyy format
sub cmpTimes {
	my @times = @_;
	my $i = 0;
	for (@times) {
		for my $m ('jan','feb','mar','apr','may','jun','jul','aug','sep','oct','nov','dec') {
			s/$m\w*/$i%12+101/ie;
			$i++;
			}
		s/^\D*(\d\d):(\d\d)\D+(\d+)\D+(\d\d\d)\D+(\d\d)\D*$/20$5$4$3$1$2/g;
		s/^\D*(\d\d):(\d\d)\D+(\d+)\D+(\d\d\d)\D+(\d\d\d\d)\D*$/$5$4$3$1$2/g;
		}
	return $times[1] > $times[0] ? -1 : $times[0] > $times[1];
	}

# Main loop
&serverIterate while (1);

# The wiki-syncronisation needs to be changed to a queue which reduces one transfer at a time
my @wikis = (
	'Mint Media', 'http://www.mintmedia.co.nz/nad/wiki/index.php', 'sync.list', 'Nad', '******',
	'Div0', 'http://www.div0.com/wiki/index.php', 'sync.list', 'Nad', '******',
	'PollenWiki', 'http://localhost/wiki/index.php', 'sync.list', 'Nad', '******'
	);
my $url = '';
my $od = 'http://www.organicdesign.co.nz/wiki/index.php';
while ( $#wikis > 0 ) {
	my $lasturl = $url;
	my $wiki = shift @wikis;
       $url  = shift @wikis;
	my $list = shift @wikis;
	my $user = shift @wikis;
	my $pass = shift @wikis;
	# Log out of current wiki and log in to next one in list
	wikiLogout $lasturl if $lasturl and $lasturl ne $url;
	if ( wikiLogin $url, $user, $pass ) {
		# If logged in successfully, get sync-list and loop through valid entries
		for ( grep /^\*/, split /\n/, wikiRawPage $url, $list ) {
			if ( m/^\*\s*\[+\s*(.+?)\s*\]+/ ) {
				my $page = $1;
				# Get and compare timestamps of src and dst articles
				my $tdst = wikiLastEdit $url, $page;
				my $tsrc = wikiLastEdit $od, $page;
				my $tdiff = cmpTimes $tsrc, $tdst;
				# If od is more recent, update
				if ($tdiff > 0) {
					logAdd "\t$page out of date ($tsrc  >  $tdst)";
					my $content = wikiRawPage $od, $page;
					my $action = $tdst ? 'updated' : 'created';
					wikiPageEdit $url, $page, $content, "Article $action from OrganicDesign by $peer for $user";
					}
				}
			}
		}
	# Logout
	wikiLogout $url if $url;
	}