Difference between revisions of "Commands.pl"

From Organic Design wiki
(remove logins again now that we're remembered)
m
Line 16: Line 16:
  
 
# pon/poff
 
# pon/poff
if ($title eq 'ppp') { $article = `ifconfig` }
+
if ($title eq 'ppp')           { $article = `ifconfig` }
elsif ($title eq 'pon') { `pon`; $article = `tail -n 1 /var/log/syslog|grep pppd`; }
+
elsif ($title eq 'pon')       { `pon`; $article = `tail -n 1 /var/log/syslog|grep pppd`; }
elsif ($title eq 'poff') { `poff`; $article = `tail -n 3 /var/log/syslog|grep pppd`; }
+
elsif ($title eq 'poff')       { `poff`; $article = `tail -n 3 /var/log/syslog|grep pppd`; }
  
 
# syslog
 
# syslog
elsif ($title eq 'syslog') { $article = `tail -n 50 /var/log/syslog` }
+
elsif ($title eq 'syslog')     { $article = `tail -n 50 /var/log/syslog` }
  
 
# peer log
 
# peer log
elsif ($title eq 'peerlog') { $article = qx( tail -n 50 ../$peer.log ) }
+
elsif ($title eq 'peerlog')   { $article = qx( tail -n 50 ../$peer.log ) }
  
 
# reboot
 
# reboot
elsif ($title eq 'reboot') { $article = `shutdown -r now` }
+
elsif ($title eq 'reboot')     { $article = `shutdown -r now` }
  
 
# shutdown
 
# shutdown
elsif ($title eq 'shutdown') { $article = `halt` }
+
elsif ($title eq 'shutdown')   { $article = `halt` }
  
 
# ps
 
# ps
Line 43: Line 43:
  
 
# fileSync
 
# fileSync
elsif ($title eq 'fileSync') { $article = 'Manually executing fileSync()...'; spawn 'fileSync'; }
+
elsif ($title eq 'fileSync')   { $article = 'Manually executing fileSync()...'; spawn 'fileSync'; }
  
 
# wikiSync
 
# wikiSync
elsif ($title eq 'wikiSync') { $article = 'Manually executing wikiSync()...'; spawn 'wikiSync'; }
+
elsif ($title eq 'wikiSync')   { $article = 'Manually executing wikiSync()...'; spawn 'wikiSync'; }
  
 
# wikiBackup
 
# wikiBackup
elsif ($title eq 'wikiBackup') {
+
elsif ($title eq 'wikiBackup') {
 
$article = 'Manually executing wikiBackup()...';
 
$article = 'Manually executing wikiBackup()...';
 
spawn 'wikiBackup', 'od', 'wiki', lc $::peer if $::peer eq 'Bender';
 
spawn 'wikiBackup', 'od', 'wiki', lc $::peer if $::peer eq 'Bender';
Line 57: Line 57:
  
 
# peerBackup
 
# peerBackup
elsif ($title eq 'peerBackup') { $article = 'Manually executing peerBackup()...'; spawn 'peerBackup'; }
+
elsif ($title eq 'peerBackup')   { $article = 'Manually executing peerBackup()...'; spawn 'peerBackup'; }
  
 
# serverBackup
 
# serverBackup
Line 63: Line 63:
  
 
# scpBackups
 
# scpBackups
elsif ($title eq 'scpBackups') {
+
elsif ($title eq 'scpBackups')   {
 
$article = 'Manually executing scpBackups()...';
 
$article = 'Manually executing scpBackups()...';
 
spawn 'scpBackups', 'od', 'gir.peerix.org' if $::peer eq 'Bender';
 
spawn 'scpBackups', 'od', 'gir.peerix.org' if $::peer eq 'Bender';
Line 71: Line 71:
  
 
# swfCompile
 
# swfCompile
elsif ($title eq 'swfCompile') { $article = swfCompile() }
+
elsif ($title eq 'swfCompile')   { $article = swfCompile() }
  
 
# Restart
 
# Restart

Revision as of 20:11, 16 February 2007

use Net::SCP::Expect;

  1. Get initial IP from local wiki log

$::IP = ; if (wikiRawPage($::wiki, $::wikilog) =~ /^.+to ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)/sm) { $::IP = $1; logAdd "Last IP obtained from Network Log: $::IP"; }

sub command {

my $command = shift; $command =~ /(.+?)(\((.+)\))?$/; my ($title, $args) = ( $1, $2 ); my $article;

# pon/poff if ($title eq 'ppp') { $article = `ifconfig` } elsif ($title eq 'pon') { `pon`; $article = `tail -n 1 /var/log/syslog|grep pppd`; } elsif ($title eq 'poff') { `poff`; $article = `tail -n 3 /var/log/syslog|grep pppd`; }

# syslog elsif ($title eq 'syslog') { $article = `tail -n 50 /var/log/syslog` }

# peer log elsif ($title eq 'peerlog') { $article = qx( tail -n 50 ../$peer.log ) }

# reboot elsif ($title eq 'reboot') { $article = `shutdown -r now` }

# shutdown elsif ($title eq 'shutdown') { $article = `halt` }

# ps elsif ($title eq 'env') { $^V =~ m/(.)(.)(.)/; my $ver = ord($1).'.'.ord($2).'.'.ord($3); $cmd =~ /^(.+?)\s+(.+)/; $article = "Environment:\nOS:\t$^O\nPERL:\t$1 ($ver)\nDaemon:\t$2\n\n"; $article .= "Current instances of $::peer:\n"; $article .= qx( ps aux|grep "$::daemon\[:] $::peer" ); }

# fileSync elsif ($title eq 'fileSync') { $article = 'Manually executing fileSync()...'; spawn 'fileSync'; }

# wikiSync elsif ($title eq 'wikiSync') { $article = 'Manually executing wikiSync()...'; spawn 'wikiSync'; }

# wikiBackup elsif ($title eq 'wikiBackup') { $article = 'Manually executing wikiBackup()...'; spawn 'wikiBackup', 'od', 'wiki', lc $::peer if $::peer eq 'Bender'; spawn 'wikiBackup', 'closet', 'wiki', lc $::peer if $::peer eq 'Gir'; spawn 'wikiBackup', 'ea', 'meridian', lc $::peer if $::peer eq 'helios'; }

# peerBackup elsif ($title eq 'peerBackup') { $article = 'Manually executing peerBackup()...'; spawn 'peerBackup'; }

# serverBackup elsif ($title eq 'serverBackup') { $article = 'Manually executing serverBackup()...'; spawn 'serverBackup'; }

# scpBackups elsif ($title eq 'scpBackups') { $article = 'Manually executing scpBackups()...'; spawn 'scpBackups', 'od', 'gir.peerix.org' if $::peer eq 'Bender'; spawn 'scpBackups', 'gir', 'organicdesign.co.nz' if $::peer eq 'Gir'; spawn 'scpBackups', 'ma', 'organicdesign.co.nz' if $::peer eq 'helios'; }

# swfCompile elsif ($title eq 'swfCompile') { $article = swfCompile() }

# Restart # - For some reason it fucks up if it doesn't wait for a minute # - 2005-12-09 tried closing handles and server->shutdown(SHUT_RDWR) but made no difference elsif ($title eq 'restart') { restart() }

elsif ($title eq 'stop') { logAdd "$daemon is stopping"; killChildren(); exit; }

else { logAdd $article = "Unknown command: $command" }

return $article; }

  1. Restart the peer

sub restart { logAdd "$daemon is restarting using: $::cmd"; killChildren(); exec "sleep 1; $::cmd"; }

  1. Kill all processes related to this peer

sub killChildren { for ( split /\n/, qx( ps x|grep "$::daemon\[:] $::peer" ) ) { if ( /^\s*([0-9]+).+?\d+:\d\d\s*(.+)/ and $1 != $$ ) { qx( kill $1 ); logAdd "Kill $2 ($1)"; } } logAdd "Kill $0 ($$)"; }

  1. Backup wiki LocalFS cache

sub peerBackup { my $file = '/var/www/wiki/peer.tgz'; qx(tar -czf $file ./); my $size = int([stat $file]->[7]/104857.6+0.5)/10; my $comment = "Peer Backup: $file ($size\MB)"; logAdd $comment; wikiPageAppend($::wiki, $::wikilog, "\n*".localtime()." : $comment", $comment); }

  1. Backup the server image
  2. - this backup can be unpacked over a minimal debian install, or even over our VPS's default redhat install
  3. - use [[Debian Conversion]] notes for unpacking process

sub serverBackup { my $name = '/server-image-'.strftime('%Y-%m-%d', localtime).'.t7z'; my $incl = '/bin /var /etc /srv /sbin /root /lost+found /lib /usr'; my $excl = ; $excl .= " --exclude='$_'" for ( '/var/backups/*.gz', '/var/tmp/*', '/var/www/*', '/var/peerd', '/var/cache/apt/archives/*.deb', '/var/log/*.gz', '/var/run/mail/*', '/var/log/apache2/*', '/var/log/wtmp*', '/var/log/auth.*', '/var/log/mysql*', '/var/lib/mysql/wiki', '/var/lib/mysql/ib*'); qx(rm -R $name); qx(tar pcf - $incl $excl | 7za a -si -t7z -m0=lzma -mx=9 $name); my $hash = $1 if qx(md5sum $name) =~ /^(\S+)/; my $size = int([stat "$name"]->[7]/1048576+0.5); my $comment = "Server image created: $name (size:$size\MB hash:$hash)"; logAdd $comment; wikiPageAppend($::wiki, $::wikilog, "\n*".localtime()." : $comment", $comment); }

  1. Backup wiki database

sub wikiBackup { my $lpf = shift; my $db = shift; my $user = shift; my $name = "$lpf-wiki-db-".strftime('%Y-%m-%d', localtime); my $ext = '.t7z'; chdir '..'; mkdir $name; if ($lpf eq 'od') { qx(cp -R /var/www/od $name/od); qx(rm -fr $name/od/wiki/tmp); qx(rm -fr $name/od/wiki/bin/*); qx(rm -fr $name/od/wiki/images/temp); qx(rm -fr $name/od/wiki/images/math); qx(rm -fr $name/od/wiki/images/thumb); } else { qx(cp -R /var/www/wiki/images $name/images) } qx(cp -R /var/www/azzuro/wiki $name/azzuro) if $::peer eq 'helios'; qx(mysqldump $db -u $user --password='$::pwd1' > $name/$db.sql); qx(rm $name$ext); qx(tar pcf - $name | 7za a -md=32m -si -t7z -m0=lzma -mx=9 $name$ext); qx(rm -R $name); chmod($::peer eq 'helios' ? 0666 : 0600, "$name$ext"); my $hash = $1 if qx(md5sum $name$ext) =~ /^(\S+)/; my $size = int([stat "$name$ext"]->[7]/104857.6+0.5)/10; my $comment = "Wiki Backup: $name$ext (size:$size\MB hash:$hash)"; chdir 'peer'; logAdd $comment; wikiPageAppend($::wiki, $::wikilog, "\n*".localtime()." : $comment", $comment); }

  1. Transfer the days backup over SSH using SCP

sub scpBackups { my $lpf = shift; if ($#_>0) { spawn 'scpBackups', $lpf, $_ for @_ } else { my $domain = shift; my $name = $lpf.'-wiki-db-'.strftime("%Y-%m-%d", localtime).'.t7z'; my $lp = lc $::peer; logAdd "scp ../$name $lp\@$domain:/home/$lp"; my $scp = new Net::SCP::Expect( host => $domain, user => $lp, password => $::pwd1, auto_yes => 1, preserve => 1 ); $scp->scp("../$name", "/home/$lp"); my $comment = "$name transferred to $domain"; wikiPageAppend($::wiki, $::wikilog, "\n*".localtime()." : $comment", $comment); } logAdd "Exit."; }

  1. Clear sandbox

sub clearSandbox { wikiPageEdit( $::wiki, $_, '

Welcome to the Organic Design wiki Sandbox! This page allows you to carry out experiments. To edit, click here or edit this page above (or the views section for obscure browsers), make your changes and click the Save page button when finished. Content will not stay permanently; this page is automatically cleaned every 24 hours by User:Bender. If too full please use: 18.226.251.72/Sandbox

Content added here will not stay permanently; this page is cleared regularly. Click here to reset the sandbox.

', "Clearing sandbox: $_" ) for @_;

}

  1. update IP with zoneedit.com/namecheap.com if changed

sub dynamicDNS { my $routerIP = shift; my $routerModel = shift; my $domain = shift; my @subDomains = @_;

while (1) {

# Get external IP from local router depending on make and model my $lasti = $::IP; my $i = '0.0.0.0'; if ($routerModel eq 'Dynalink:RTA230') { my $url = "http://admin:$::pwd2\@$routerIP/wancfg.cmd?action=view"; $i = $1 if $::client->get($url)->content =~ /(\d+\.\d+\.\d+\.\d+)/; } elsif ($routerModel eq 'LinkSys:WAG54G') { my $url = "http://admin:$::pwd2\@$routerIP/Status_Router.asp"; $i = $1 if $::client->get($url)->content =~ /IP Address.+?(\d+\.\d+\.\d+\.\d+)/sm; } elsif ($routerModel eq 'Dlink:DSL-G604T') { my $url = "http://admin:$::pwd2\@$routerIP/cgi-bin/webcm"; my %form = ( getpage => '../html/status/deviceinfofile.htm', 'var:mycon' => 'connection0', 'var:conid' => 'encaps0' ); $i = $1 if $::client->post($url,\%form)->content =~ /IP Address.+?IP Address.+?(\d+\.\d+\.\d+\.\d+)/sm; }

# Log if the IP has just become 0.0.0.0 if ( ($lasti ne '0.0.0.0') && ($i eq '0.0.0.0') ) { logAdd 'External IP address has changed to 0.0.0.0' }

# If IP changed, update DNS servers if ( ($i ne '0.0.0.0') && ($i ne $::IP) ) { my $comment; my $tries = 5; while ($tries > 0) { sleep 300 if $tries-- < 5; $comment = "External IP has changed to $i"; my @succeeded = (); my @failed = (); wikiLogin('http://www.organicdesign.co.nz/wiki/index.php',$::peer,$::pwd1); for my $subDomain (@subDomains) { my $url = $domain =~ /\.co\.nz$/  ? "http://ADunkley:$::pwd3\@dynamic.zoneedit.com/auth/dynamic.html?host=$subDomain.$domain"  : "http://dynamicdns.park-your-domain.com/update?host=$subDomain&domain=$domain&password=$::pwd3"; $::client->get($url)->is_success ? push(@succeeded,"$subDomain.$domain") : push(@failed,"$subDomain.$domain"); } if (($#failed >= 0) && ($tries == 0)) { $comment .= ', DNS updates failed!'; wikiPageAppend( 'http://www.organicdesign.co.nz/wiki/index.php', 'user_talk:nad', "\n----\n$comment.\n\nThese domains failed:\n*".join("\n*",@failed)."\n:--~~"."~~\n", "Sorry to bother you, but I need help updating some domains :-/" ); } $tries = 0 if $#failed < 0; }

# update the IP log on the wiki logAdd $comment; wikiPageAppend($::wiki, $::wikilog, "\n*".localtime()." : $comment", $comment);

$::IP = $i; }

sleep 10; } }