From 94a997cd5df17621d23822bfe56079b10e763fba Mon Sep 17 00:00:00 2001 From: Marc Hoppe Date: Fri, 18 Oct 2019 21:23:53 +0200 Subject: [PATCH] aktualisiert --- SyncPOD | 862 -------- backup.sh => alt/backup.sh | 10 +- backupbooks.sh => alt/backupbooks.sh | 0 backupmp3.sh => alt/backupmp3.sh | 0 backuproot.sh => alt/backuproot.sh | 0 bakupnasdata.sh => backupdata.sh | 12 +- bakupnasgps.sh => backupgps.sh | 2 +- bakupme.sh => backupme.sh | 8 +- bakupsrc.sh => backupsrc.sh | 10 +- bakdb | 8 +- cleanowrtrepo.sh | 13 + dropboxstart.sh | 1 + fv | 3 +- fvl | 15 + git-dir2repo | 25 + git-imerge | 2797 ++++++++++++++++++++++++++ git-mv-test | 234 +++ git-remove-history | 25 + git-submodule-rewrite | 226 +++ mirrors.update | 10 +- nfs-tools | 2 + podcastupdate.sh | 12 +- podcatcher | 2528 +++++++++++++++++++++++ reinstall | 1 + repo | 874 ++++++++ swapon.sh | 1 + syncauto.sh | 3 +- taz-download.sh | 4 +- tazsync.sh | 10 +- vdr-reload.sh | 16 + vdr2mp3.rb | 54 + vdr2mp3.sh | 18 + zeitsync.sh | 28 + 33 files changed, 6915 insertions(+), 897 deletions(-) delete mode 100755 SyncPOD rename backup.sh => alt/backup.sh (68%) rename backupbooks.sh => alt/backupbooks.sh (100%) rename backupmp3.sh => alt/backupmp3.sh (100%) rename backuproot.sh => alt/backuproot.sh (100%) rename bakupnasdata.sh => backupdata.sh (71%) rename bakupnasgps.sh => backupgps.sh (93%) rename bakupme.sh => backupme.sh (81%) rename bakupsrc.sh => backupsrc.sh (75%) create mode 100644 cleanowrtrepo.sh create mode 100644 dropboxstart.sh create mode 100755 fvl create mode 100644 git-dir2repo create mode 100755 git-imerge create mode 100644 git-mv-test create mode 100755 git-remove-history create mode 100755 git-submodule-rewrite create mode 100644 nfs-tools create mode 100755 podcatcher create mode 100644 reinstall create mode 100755 repo create mode 100644 swapon.sh create mode 100644 vdr-reload.sh create mode 100644 vdr2mp3.rb create mode 100644 vdr2mp3.sh create mode 100644 zeitsync.sh diff --git a/SyncPOD b/SyncPOD deleted file mode 100755 index 49352b5..0000000 --- a/SyncPOD +++ /dev/null @@ -1,862 +0,0 @@ -#!/usr/bin/perl -w - -# (c) 2002 Armin Obersteiner -# License: GPL v2 - -use MP3::Info; -use Unicode::String qw( latin1 utf16 ); -use Shell qw ( find gzip ); -use Getopt::Std; -use File::Copy; -use Filesys::DiskFree; - -use Data::Dumper qw (Dumper); - -use strict; - -my $version="0.68"; - -# -# options & config -# - -my %opt; -getopts("fcnh",\%opt); - -if($opt{h}) { - print <<"EOF"; -$0 [-c] [-f] [Search Pattern 1] [Search Pattern 2] ... - - -c create: create directory structure on plain ipod before syncing - (default: you get a warning if there is no ipod structure) - - -f force: rename ipod and use it with $0 before syncing - (default: an unknown ipod stays untouched) - - -n name check: checks mp3 names for possible illegal characters - - Search Patterns: for each search pattern a playlist is created - (case insensitive) -EOF - exit; -} - -my $buffer = 5*1024*1024; # leave some MB free for iTunesDB - -my @required = qw ( SYNCMODE PLAYLISTDIR IPODDIR BACKUPDIR ); - -my $rc=readrc("$ENV{HOME}/.ipod/config",\@required); - -#print Dumper($rc); - - -# -# check ipod name -# - -my ($ipod_name, $real_name, $computer_name)=get_ipodname($rc->{IPODDIR}); -unless($ipod_name) { - die "IPOD dir not found: $rc->{IPODDIR}" unless $opt{c}; -} - -# -# check ipod dirs (recreate them if necessary) -# - -mkdir "$rc->{IPODDIR}/iPod_Control",0755 unless(-d "$rc->{IPODDIR}/iPod_Control"); -mkdir "$rc->{IPODDIR}/iPod_Control/Music",0755 unless(-d "$rc->{IPODDIR}/iPod_Control/Music"); -mkdir "$rc->{IPODDIR}/iPod_Control/iTunes",0755 unless(-d "$rc->{IPODDIR}/iPod_Control/iTunes"); -mkdir "$rc->{IPODDIR}/iPod_Control/Device",0755 unless(-d "$rc->{IPODDIR}/iPod_Control/Device"); -for(0..19) { - my $d=sprintf "%.2d",$_; - mkdir "$rc->{IPODDIR}/iPod_Control/Music/F$d",0755 unless(-d "$rc->{IPODDIR}/iPod_Control/Music/F$d"); -} - -unless($opt{c}) { - print STDERR "IPOD name: $ipod_name\n"; - print STDERR "Synced by: $real_name\n"; - print STDERR "Synced on: $computer_name\n"; - - if($rc->{WRITEDEVICEINFO} && !$opt{f}) { - my $exit=0; - unless($rc->{IPODNAME} eq $ipod_name) { - $exit=1; - print STDERR "Your IPOD name: $rc->{IPODNAME}\n"; - } - unless($rc->{REALNAME} eq $real_name) { - $exit=1; - print STDERR "Your real name: $rc->{REALNAME}\n"; - } - unless($rc->{COMPUTERNAME} eq $computer_name) { - $exit=1; - print STDERR "Your computer: $rc->{COMPUTERNAME}\n"; - } - die "names mismatch, use -f to override" if $exit; - } - print STDERR "\n"; -} - -# -# write ipod name -# - -if($rc->{WRITEDEVICEINFO}) { - set_ipodname( - $rc->{IPODDIR},$rc->{BACKUPDIR}, - $rc->{IPODNAME},$rc->{REALNAME},$rc->{COMPUTERNAME} - ); - $ipod_name=$rc->{IPODNAME}; -} - -# -# check for songs -# - -my %songs; -my %check; - -my $dir; -$dir=$rc->{IPODDIR}."/iPod_Control/Music"; -$dir=$rc->{SYNCDIR} if($rc->{SYNCMODE} >= 2); - -my %tosync; -if(($rc->{SYNCLIST}) && ($rc->{SYNCMODE} == 2)) { - open IN,$rc->{SYNCLIST} or die "all-playlist: $rc->{SYNCLIST} not found"; - while() { - chomp; - $tosync{$_}=1; - } - close IN; -} - -my @mp3s; -if(($rc->{SYNCMODE} == 3)) { - my @pl=find("$rc->{PLAYLISTDIR}/* 2>/dev/null"); - my %test; - - for my $p (@pl) { - chomp $p; - my ($n) = $p =~ /.*\/(.*?)$/; - open IN,$p or die "playlist: $p could not be opened"; - while() { - unless($test{$_}) { - push @mp3s,$_; - $test{$_}=1; - } - } - } -} else { - @mp3s=find($dir); -} - -for(@mp3s) { - chomp $_; - next unless(/\.(m|M)(p|P)3$/); - my $name=$_; - - if(keys %tosync) { - next unless($tosync{$name}); - } - - if($opt{n}) { - die "illegal character in filename [$name]\n" unless ($name =~ /^[A-Za-z0-9\.\-_\/\,]+$/); - } - - s/\://g; - s/.*\///g; - $songs{$name}{name}=$_; - if($rc->{SYNCMODE} >= 2) { - $songs{$name}{dir}="F".hash($_); - } else { - ($songs{$name}{dir}) = $name =~ /\/(F\d\d)\//; - } - - { - my ($dev,$ino,$mode,$nlink,$uid,$gid,$rdev,$size, - $atime,$mtime,$ctime,$blksize,$blocks) = stat($name); - $songs{$name}{size}=$size; - $songs{$name}{date}=$mtime; - } - - my $tag; - $tag = get_mp3tag($name) unless($rc->{ALWAYSTEMPLATES}); - - my ($artist,$album,$title,$order,$_dummy_); - - if($tag) { - # print Dumper($tag); - # YEAR ARTIST COMMENT TRACKNUM TITLE ALBUM GENRE - $artist=$tag->{ARTIST}; - $album=$tag->{ALBUM}; - $title=$tag->{TITLE}; - $order=$tag->{TRACKNUM}; - $order=$1 if($order =~ /(\d+)\s*\//); - - } else { - for(sort {length($b) <=> length($a)} keys %{$rc->{FILETEMPLATES}}) { - if(my @x = $name =~ /$_/) { - my $c=0; - for my $x (@x) { - #print "\$$rc->{FILETEMPLATES}->{$_}->[$c]=\"$x\";\n"; - eval "\$$rc->{FILETEMPLATES}->{$_}->[$c]=\"$x\";"; - die "eval error: $@" if($@); - $c++; - } - last; - } - } - } - - unless($title) { - die "no title found in: $name"; - } - - $title =~ s/_/ /g; - $artist =~ s/_/ /g; - $album =~ s/_/ /g; - - $songs{$name}{title}=$title; - $songs{$name}{artist}=""; - $songs{$name}{album}=""; - $songs{$name}{order}=0; - $songs{$name}{artist}=$artist if $artist; - $songs{$name}{album}=$album if $album; - $songs{$name}{order}=$order if $order; - - my $info = get_mp3info ($name); - - $songs{$name}{size}=$info->{SIZE}; - $songs{$name}{bitrate}=$info->{BITRATE}; - $songs{$name}{duration}=int($info->{SECS}*1000); - $songs{$name}{vbr}=$info->{VBR}; - - #print Dumper($info); - - my $n=$songs{$name}{dir}."/".$songs{$name}{name}; - unless($check{$n}) { - $check{$n}=1; - } else { - die "songname: $songs{$name}{name} not unique"; - } -} - -# -# deleting unwanted songs -# - -my %known; -for(keys %songs) { - $known{$songs{$_}{name}}=1; -} - -#print Dumper(\%known); - -my @ipod = find ("$rc->{IPODDIR}/iPod_Control/Music"); -my @todel; -for(@ipod) { - next unless (/\.mp3$/i); - chomp; - - my ($name) = $_ =~ /\/([^\/]+\.mp3)$/i; - unless($known{$name}) { - push @todel,$_; - } -} - -my $del; -if($rc->{DELETEASK} && @todel) { - for(@todel) { - print "del: $_\n"; - } - print "Do you really want to delete this songs? (y/N) "; - my $in=; - chomp $in; - $del=1 if($in =~ /^y$/i); -} else { - $del=1; -} - -if($del) { - for(@todel) { - print STDERR "deleting: $_\n"; - unlink($_); - } -} - -# -# copy songs -# - -my $main_sl=""; -my $main_pl=""; -my $index=500; - -#print Dumper(\%songs); - -my $df = new Filesys::DiskFree; - -SONGS: for my $song (keys %songs) { - my $attr; - my $out=""; - my $attr_c=3; - - if($rc->{SYNCMODE} >= 2) { - my $to = "$rc->{IPODDIR}/iPod_Control/Music/$songs{$song}{dir}/$songs{$song}{name}"; - #my ($dev,$ino,$mode,$nlink,$uid,$gid,$rdev,$size, - # $atime,$mtime,$ctime,$blksize,$blocks) = stat($to); - #$size=0 unless $size; - #print "checking: $song [$songs{$song}{size}] -> $to [$size]\n"; - #if($size != $songs{$song}{size}) { - - unless(-e $to) { - print STDERR "syncing: $songs{$song}{name}\n"; - # cp "\"$song\" \"$to\""; - - $df->df(); - my $free=$df->avail($rc->{IPODDIR}); - - if($free-$songs{$song}{size}-$buffer>0) { - copy($song,$to); - } else { - print STDERR "no space availiable for: $songs{$song}{name} [$songs{$song}{size}]\n"; - delete $songs{$song}; - next SONGS; - } - } - } - - $songs{$song}{index}=$index; - - $out.=create_mhod($songs{$song}{title},1); - - if($songs{$song}{artist}) { - $attr_c++; - $out.=create_mhod($songs{$song}{artist},4); - } - if($songs{$song}{album}) { - $attr_c++; - $out.=create_mhod($songs{$song}{album},3); - } - - $out.=create_mhod("MPEG audio file",6); - $out.=create_mhod(":iPod_Control:Music:".$songs{$song}{dir}.":".$songs{$song}{name},2); - - $out=create_mhit( - $attr_c,length($out),$index,$songs{$song}{vbr}, - $songs{$song}{date},$songs{$song}{size}, - $songs{$song}{duration},$songs{$song}{order}, - $songs{$song}{bitrate} - ).$out; - - $main_sl.=$out; - - $main_pl.=create_mhod_mhip($songs{$song}{index}); - - $index++; -} - -#print Dumper(\%songs); - -my %playlists; -my @pl=find("$rc->{PLAYLISTDIR}/* 2>/dev/null"); - -for my $p (@pl) { - chomp $p; - my ($n) = $p =~ /.*\/(.*?)$/; - open IN,$p or die "playlist: $p could not be opened"; - while() { - my $song=$_; - chomp $song; - - unless($songs{$song}) { - print STDERR "ignoring song in playlist [$p], [$song] does not exist in syncdir or ipod full\n"; - } else { - $playlists{$n}{raw}.=create_mhod_mhip($songs{$song}{index}); - $playlists{$n}{count}++; - } - } - close IN; -} - -# -# creating search pattern playlists -# - -for my $pattern (@ARGV) { - my @list; - for(keys %songs) { - push @list,$songs{$_}{index} if($_ =~ /$pattern/i); - } - unless(@list) { - print STDERR "nothing for searchpattern: $pattern found\n"; - } else { - my ($name)=$pattern=~/(\S\S\S+)/; - unless(length($name)>=3) { - $name=$pattern; - $name =~ s/[^A-Za-z0-9]//g; - } - for(@list) { - $playlists{$name}{raw}.=create_mhod_mhip($_); - $playlists{$name}{count}++; - } - print STDERR @list." songs for searchpattern: $pattern found\n"; - } -} - -#print Dumper(\%playlists); - -# -# build the pieces together -# - -my $output; - -my $song_c=keys %songs; - -print STDERR "\nFound songs: $song_c\n"; - -my $tmp=create_mhlt($song_c).$main_sl; -$main_sl=create_mhsd(96+length($tmp),1).$tmp; - -print STDERR "Songlist created\n"; - -my $pl_c=keys %playlists; - -print STDERR "\nFound additional playlists: $pl_c\n"; - -$tmp=create_mhlp($pl_c+1).create_playlist_main($ipod_name,$song_c).$main_pl; -print STDERR "\nMain playlist created: $song_c songs\n\n"; - -for(keys %playlists) { - $tmp.=create_playlist($_,$playlists{$_}{count}).$playlists{$_}{raw}; - print STDERR "Playlist \"$_\" created: $playlists{$_}{count} songs\n"; -} - -$main_pl=create_mhsd(96+length($tmp),2).$tmp; - - -$output=create_mhbd(104+length($main_sl.$main_pl)).$main_sl.$main_pl; - -# backup old iTunesDB -if(-e "$rc->{IPODDIR}/iPod_Control/iTunes/iTunesDB") { - my $t=time(); - copy("$rc->{IPODDIR}/iPod_Control/iTunes/iTunesDB","$rc->{BACKUPDIR}/iTunesDB_$t"); - gzip("$rc->{BACKUPDIR}/iTunesDB_$t"); -} - -open OUT,">".$rc->{IPODDIR}."/iPod_Control/iTunes/iTunesDB" or die "cannot write iTunesDB"; -print OUT $output; -close OUT; - -print STDERR "\niTunesDB created.\n"; -exit; -# END - - -# -# internal subroutines -# - -sub create_mhbd { - my ($size) = @_; - - my $r= "mhbd"; - $r.= pack "V",104; - $r.= pack "V",$size; - $r.= pack "V",1; - $r.= pack "V",1; - $r.= pack "V",2; - for(1..20) { - $r.= pack "V",0; - } - - return $r; -} - -sub create_mhlp { - my ($count) = @_; - - my $r= "mhlp"; - $r.= pack "V",92; - $r.= pack "V",$count; - for(1..20) { - $r.= pack "V",0; - } - - return $r; -} - -sub create_playlist { - my ($name,$anz) = @_; - - my $ipod_name=create_mhod($name,1); - - my $r= "mhyp"; - $r.= pack "V",108; - $r.= pack "V",108+648+length($ipod_name)+$anz*(76+44); - $r.= pack "V",2; - $r.= pack "V",$anz; - $r.= pack "V",0; - $r.= pack "V",3088620292; - $r.= pack "V",2317718671; - $r.= pack "V",3655876446; - for(1..18) { - $r.= pack "V",0; - } - - $r.= "mhod"; - $r.= pack "V",24; - $r.= pack "V",648; - $r.= pack "V",100; - for(1..3) { - $r.= pack "V",0; - } - $r.= pack "V",12714187; # ?? 12714187 - $r.= pack "V",26215000; - $r.= pack "V",0; - $r.= pack "V",65736; - $r.= pack "V",1; # ?? 1 - $r.= pack "V",6; # ?? 6 - $r.= pack "V",0; # ?? 0 - $r.= pack "V",2555905; # ?? 2555905 - for(1..3) { - $r.= pack "V",0; - } - $r.= pack "V",13107202; - for(1..3) { - $r.= pack "V",0; - } - $r.= pack "V",3276813; - for(1..3) { - $r.= pack "V",0; - } - $r.= pack "V",8192004; - for(1..3) { - $r.= pack "V",0; - } - $r.= pack "V",8192003; - for(1..3) { - $r.= pack "V",0; - } - $r.= pack "V",5242888; - for(1..107) { - $r.= pack "V",0; - } - $r.= pack "V",140; - for(1..19) { - $r.= pack "V",0; - } - - return $r.$ipod_name; -} - -sub create_playlist_main { - my ($name,$anz) = @_; - - my $ipod_name=create_mhod($name,1); - - my $r= "mhyp"; - $r.= pack "V",108; - $r.= pack "V",108+648+length($ipod_name)+$anz*(76+44); - $r.= pack "V",2; - $r.= pack "V",$anz; - $r.= pack "V",1; - $r.= pack "V",3087491191; - $r.= pack "V",837788566; - $r.= pack "V",62365; - for(1..18) { - $r.= pack "V",0; - } - - $r.= "mhod"; - $r.= pack "V",24; - $r.= pack "V",648; - $r.= pack "V",100; - for(1..3) { - $r.= pack "V",0; - } - $r.= pack "V",13172927; # ?? 12714187 - $r.= pack "V",26215000; - $r.= pack "V",0; - $r.= pack "V",65736; - $r.= pack "V",5; # ?? 1 - $r.= pack "V",6; # ?? 6 - $r.= pack "V",3; # ?? 0 - $r.= pack "V",1179649; # ?? 2555905 - for(1..3) { - $r.= pack "V",0; - } - $r.= pack "V",13107202; - for(1..3) { - $r.= pack "V",0; - } - $r.= pack "V",3276813; - for(1..3) { - $r.= pack "V",0; - } - $r.= pack "V",8192004; - for(1..3) { - $r.= pack "V",0; - } - $r.= pack "V",8192003; - for(1..3) { - $r.= pack "V",0; - } - $r.= pack "V",5242888; - for(1..107) { - $r.= pack "V",0; - } - $r.= pack "V",140; - for(1..19) { - $r.= pack "V",0; - } - - return $r.$ipod_name; -} - -sub create_mhod_mhip { - my ($ref) = @_; - - my $r= "mhip"; - $r.= pack "V",76; - $r.= pack "V",76; - $r.= pack "V",1; - $r.= pack "V",0; - $r.= pack "V",$ref-1; - $r.= pack "V",$ref; - $r.= pack "V",3088619525; - for(1..11) { - $r.= pack "V",0; - } - - $r.="mhod"; - $r.= pack "V",24; - $r.= pack "V",44; - $r.= pack "V",100; - $r.= pack "V",0; - $r.= pack "V",0; - $r.= pack "V",$ref-1; - for(1..4) { - $r.= pack "V",0; - } - - return $r; -} - -sub create_mhsd { - my ($size,$type) = @_; - - my $r="mhsd"; - $r.= pack "V",96; - $r.= pack "V",$size; - $r.= pack "V",$type; - for(1..20) { - $r.= pack "V",0; - } - - return $r; -} - -sub create_mhlt { - my ($count) = @_; - - my $r="mhlt"; - $r.= pack "V",92; - $r.= pack "V",$count; - for(1..20) { - $r.= pack "V",0; - } - - return $r; -} - -sub create_mhit { - my ($arttr_c,$attr_s,$index,$vbr,$date,$size,$dur,$order,$bitrate) = @_; - - my $r="mhit"; - $r.= pack "V",156; - $r.= pack "V",156+$attr_s; - $r.= pack "V",$arttr_c; - $r.= pack "V",$index; - $r.= pack "V",1; - $r.= pack "V",0; - my $type=256; - $type+=1 if($vbr); - $r.= pack "V",$type; - $r.= pack "V",$date+2082844800; - $r.= pack "V",$size; - $r.= pack "V",$dur; - $r.= pack "V",$order; - $r.= pack "V",0; - $r.= pack "V",0; - $r.= pack "V",$bitrate; - $r.= pack "V",2890137600; - for(1..23) { - $r.= pack "V",0; - } - - return $r; -} - -sub create_mhod { - my ($string,$type) = @_; - my $len=length($string); - - my $r="mhod"; - $r.= pack "V",24; - $r.= pack "V",(40+2*$len); - $r.= pack "V",$type; - $r.= pack "V2",0; - $r.= pack "V",1; - $r.= pack "V",(2*$len); - $r.= pack "V2",0; - - my $u=latin1($string); - $u->byteswap; - $r.= $u->utf16; - - return $r; -} - -sub set_ipodname { - my ($dev,$backup,$name,$real,$cpu)=@_; - $dev.="/iPod_Control/iTunes/DeviceInfo"; - - my $file; - - for(1..384) { - $file.=pack "V",0; - } - - my $l=length($name); - substr($file,0,2)=pack "v",$l; - my $u=latin1($name); - $u->byteswap; - substr($file,2,$l*2)=$u->utf16; - - $l=length($real); - substr($file,512,2)=pack "v",$l; - $u=latin1($real); - $u->byteswap; - substr($file,514,$l*2)=$u->utf16; - - $l=length($cpu); - substr($file,1024,2)=pack "v",$l; - $u=latin1($cpu); - $u->byteswap; - substr($file,1026,$l*2)=$u->utf16; - - if(-e $dev) { - my $t=time(); - copy($dev,"$backup/DeviceInfo_$t"); - gzip("$backup/DeviceInfo_$t"); - } - open IPOD,">$dev" or die "cannot write DeviceInfo"; - print IPOD $file; - close IPOD; -} - -sub get_ipodname { - my $dev=shift; - $dev.="/iPod_Control/iTunes/DeviceInfo"; - my $file; - my $buff; - - open IPOD,$dev or return undef; - while (read(IPOD, $buff, 8 * 2**10)) { - $file.=$buff; - } - close IPOD; - - my $l=unpack "v",substr($file,0,2); - my $s=substr($file,2,$l*2); - my $u=utf16($s); - $u->byteswap; - my $name=$u->latin1; - - $l=unpack "v",substr($file,512,2); - $s=substr($file,514,$l*2); - $u=utf16($s); - $u->byteswap; - my $realname=$u->latin1; - - $l=unpack "v",substr($file,1024,2); - $s=substr($file,1026,$l*2); - $u=utf16($s); - $u->byteswap; - my $computername=$u->latin1; - - return ($name,$realname,$computername); -} - -sub hash { - my $string=shift; - my $key; - - my $len=length($string); - - for(my $j=$len-1 ; $j>1 ; $j--) { - $key+=ord(substr($string,$j,1)); - } - - return sprintf "%.2d",(substr($key,length($key)-2,2) % 20); -} - -sub readrc { - my $file = shift; - my $req = shift; - my $rc; - - my $sub; - - open IN,$file or die "cannot open rc file: $file"; - while() { - next if /^\s*$/; - next if /^\s*#/; - - if(/^\s*(\S+)\s*=\s*(.*?)\s*$/) { - my $k=$1; - my $n=$2; - ($n) = $n =~ /^\"(.*?)\"$/ if($n =~ /\"/); - unless($sub) { - $rc->{$k}=$n; - } else { - ($k) = $k =~ /^\"(.*?)\"$/ if($k =~ /\"/); - my @n=split /,/,$n; - for(@n) { - s/^\s+//g; - s/\s+$//g; - s/^\"//; - s/\"$//; - } - $rc->{$sub}->{$k}=\@n; - } - } elsif (/^\s*(\S+)\s*\{/) { - $sub=$1; - } elsif (/^\s*}/) { - $sub=undef; - } - } - - if($rc->{SYNCMODE} == 2) { - push @$req,"SYNCDIR"; - } - if($rc->{WRITEDEVICEINFO} == 1) { - push @$req,("IPODNAME","REALNAME","COMPUTERNAME"); - } - if($rc->{ALWAYSTEMPLATES} == 1) { - push @$req,"FILETEMPLATES"; - } - - for my $d (keys %$rc) { - if($d =~ /DIR$/) { - $rc->{$d} =~ s/\~/$ENV{HOME}/; - } - } - $rc->{SYNCLIST} =~ s/\~/$ENV{HOME}/ if $rc->{SYNCLIST}; - - for(@$req) { - die "RC PARAMETER: $_ not found" unless($rc->{$_}); - } - return $rc; -} diff --git a/backup.sh b/alt/backup.sh similarity index 68% rename from backup.sh rename to alt/backup.sh index d23ac67..be2ae44 100644 --- a/backup.sh +++ b/alt/backup.sh @@ -11,15 +11,17 @@ sudo dirvish-expire sudo dirvish-runall # GPS -sudo rsync -vxae --progress --delete --exclude 'garmin/' /dat/gps/ $bakdir/gps/ +gpsdir=/dat/docu/gps/ +sudo rsync -vxae --progress --delete --exclude 'garmin/' $gpsdir $bakdir/gps/ # vdr Struktur der Aufnahmen -sudo -u vdr rsync -vxae --progress --delete --exclude '[0-9][0-9][0-9].vdr' --exclude '[0-9][0-9][0-9][0-9][0-9].ts' /media/nas/video/vdr/ $bakdir/video/ - +videodir=/dat/video/vdr/ +sudo -u vdr rsync -vxae --progress --delete --exclude '[0-9][0-9][0-9].vdr' --exclude '[0-9][0-9][0-9][0-9][0-9].ts' $videodir $bakdir/video/ +srcdir=/dat/src echo "#!/bin/bash" > /tmp/backuphelp echo "dstdir=\$(dirname \$1)" >> /tmp/backuphelp echo "mkdir -p \$bakdir/git\$dstdir" >> /tmp/backuphelp echo "rsync -vxa --progress --delete \$1 \$bakdir/git\$dstdir" >> /tmp/backuphelp -find /home/marc/ -type d -name .git -exec bash -x /tmp/backuphelp {} \; +find $srcdir -type d -name .git -exec bash -x /tmp/backuphelp {} \; diff --git a/backupbooks.sh b/alt/backupbooks.sh similarity index 100% rename from backupbooks.sh rename to alt/backupbooks.sh diff --git a/backupmp3.sh b/alt/backupmp3.sh similarity index 100% rename from backupmp3.sh rename to alt/backupmp3.sh diff --git a/backuproot.sh b/alt/backuproot.sh similarity index 100% rename from backuproot.sh rename to alt/backuproot.sh diff --git a/bakupnasdata.sh b/backupdata.sh similarity index 71% rename from bakupnasdata.sh rename to backupdata.sh index 3441771..6e84c88 100644 --- a/bakupnasdata.sh +++ b/backupdata.sh @@ -1,17 +1,19 @@ #!/bin/sh -REPOSITORY=pi@vdrpi:/media/hdext/borg +REPOSITORY=pi@kodi:/media/hdext/borg PREFIX=nasdat borg create -v --stats --progress --compression zlib --one-file-system \ $REPOSITORY::'nasdat-{now:%Y-%m-%d}' \ /dat/books \ /dat/audio \ - /dat/docu/Foto \ + /dat/docu/ \ /dat/bak/db \ - --exclude 'tmp' \ + --exclude '*/tmp' \ --exclude '*.iso' \ --exclude '/podcast/cache' \ - --exclude '/run' + --exclude '/dat/docu/dropbox' \ + --exclude '/dat/docu/A' \ + --exclude '/dat/docu/E' \ @@ -20,5 +22,5 @@ borg create -v --stats --progress --compression zlib --one-file-system \ # archives of THIS machine. The '{hostname}-' prefix is very important to # limit prune's operation to this machine's archives and not apply to # other machine's archives also. -borg prune -v $REPOSITORY --prefix 'nasdat-' \ +borg prune -v --stats $REPOSITORY --prefix 'nasdat-' \ --keep-daily=7 --keep-weekly=4 --keep-monthly=20 diff --git a/bakupnasgps.sh b/backupgps.sh similarity index 93% rename from bakupnasgps.sh rename to backupgps.sh index abf8f98..31511c3 100644 --- a/bakupnasgps.sh +++ b/backupgps.sh @@ -21,5 +21,5 @@ borg create -v --stats --progress --compression zlib --one-file-system \ # archives of THIS machine. The '{hostname}-' prefix is very important to # limit prune's operation to this machine's archives and not apply to # other machine's archives also. -borg prune -v $REPOSITORY --prefix $PREFIX- \ +borg prune -v --stats $REPOSITORY --prefix $PREFIX- \ --keep-daily=7 --keep-weekly=4 --keep-monthly=20 diff --git a/bakupme.sh b/backupme.sh similarity index 81% rename from bakupme.sh rename to backupme.sh index dcef6bd..df6c9c8 100644 --- a/bakupme.sh +++ b/backupme.sh @@ -1,10 +1,12 @@ #!/bin/sh -REPOSITORY=pi@vdrpi:/media/hdext/borg +REPOSITORY=pi@kodi:/media/hdext/borg +if [ "$1" != "--prune" ]; then borg create -v --stats --progress --compression zlib --one-file-system \ $REPOSITORY::'{hostname}-{now:%Y-%m-%d}' \ / \ --exclude '*/tmp/*' \ + --exclude '*/tmpfile/*' \ --exclude '/var/tmp/*' \ --exclude '/var/crash/*' \ --exclude '*/.cache/*' \ @@ -12,7 +14,7 @@ borg create -v --stats --progress --compression zlib --one-file-system \ --exclude '*/.ccache/*' \ --exclude '*/mlocate.db*' \ --exclude '/run' - +fi @@ -20,5 +22,5 @@ borg create -v --stats --progress --compression zlib --one-file-system \ # archives of THIS machine. The '{hostname}-' prefix is very important to # limit prune's operation to this machine's archives and not apply to # other machine's archives also. -borg prune -v $REPOSITORY --prefix '{hostname}-' \ +borg prune -v --stats $REPOSITORY --prefix '{hostname}-' \ --keep-daily=7 --keep-weekly=4 --keep-monthly=20 diff --git a/bakupsrc.sh b/backupsrc.sh similarity index 75% rename from bakupsrc.sh rename to backupsrc.sh index 037aa69..5f75d41 100644 --- a/bakupsrc.sh +++ b/backupsrc.sh @@ -1,5 +1,5 @@ #!/bin/sh -REPOSITORY=pi@vdrpi:/media/hdext/borg +REPOSITORY=pi@kodi:/media/hdext/borg PREFIX=nassrc borg create -v --stats --progress --compression zlib --one-file-system \ @@ -8,11 +8,17 @@ borg create -v --stats --progress --compression zlib --one-file-system \ --exclude '*/tmp/*' \ --exclude '*/.build*/' \ --exclude '*/build*/' \ + --exclude '*/rpi-build*/' \ + --exclude '*/dest*/' \ --exclude '*/cache*' \ --exclude '*/.cache/*' \ --exclude '*/.ccache/*' \ + --exclude '*/subtree-cache/*' \ --exclude '*/sstate*' \ --exclude '*/dl/*' \ + --exclude '*/downloads/*' \ + --exclude '*/sourcemirror/*' \ + --exclude '*/yocto-dl-sources.git/*' \ --exclude '*.o' \ --exclude '*.ko' \ --exclude '*.so' \ @@ -30,5 +36,5 @@ borg create -v --stats --progress --compression zlib --one-file-system \ # archives of THIS machine. The '{hostname}-' prefix is very important to # limit prune's operation to this machine's archives and not apply to # other machine's archives also. -borg prune -v $REPOSITORY --prefix 'nassrc-' \ +borg prune -v --stats $REPOSITORY --prefix 'nassrc-' \ --keep-daily=7 --keep-weekly=4 --keep-monthly=20 diff --git a/bakdb b/bakdb index 36a0dd7..91b77d0 100755 --- a/bakdb +++ b/bakdb @@ -1,5 +1,11 @@ # !/bin/sh db=$1 pw=$2 -file=/dat/bak/db/${db}_`date +"%Y%m%d"`.sqlbak +bakdir=/dat/bak/db +file=$bakdir/${db}_`date +"%Y%m%d"`.sqlbak mysqldump --lock-tables --user root -p"$pw" $db > $file + +find $bakdir -size 0 -exec rm {} \; +ls -1tr $bakdir/${db}_* | head -n -5 | xargs -d '\n' rm -f -- +#find /dat/bak/db -name "${db}_*" -mtime +5 -exec rm {} \; + diff --git a/cleanowrtrepo.sh b/cleanowrtrepo.sh new file mode 100644 index 0000000..3bd2b8c --- /dev/null +++ b/cleanowrtrepo.sh @@ -0,0 +1,13 @@ +java -jar ~/bin/bfg-1.12.14.jar --delete-folders dl --delete-files dl --no-blob-protection owrttest +cd owrttest +git reflog expire --expire=now --all && git gc --prune=now --aggressive +du -sh +cd .. +java -jar ~/bin/bfg-1.12.14.jar --delete-folders scripts --no-blob-protection owrttest +java -jar ~/bin/bfg-1.12.14.jar --delete-folders target --no-blob-protection owrttest +java -jar ~/bin/bfg-1.12.14.jar --delete-folders toolchain --no-blob-protection owrttest +java -jar ~/bin/bfg-1.12.14.jar --delete-folders tools --no-blob-protection owrttest +java -jar ~/bin/bfg-1.12.14.jar --delete-folders docs --no-blob-protection owrttest +java -jar ~/bin/bfg-1.12.14.jar --delete-folders include --no-blob-protection owrttest +java -jar ~/bin/bfg-1.12.14.jar --delete-folders timing_log_received_sent_correlator --no-blob-protection owrttest + diff --git a/dropboxstart.sh b/dropboxstart.sh new file mode 100644 index 0000000..9784b32 --- /dev/null +++ b/dropboxstart.sh @@ -0,0 +1 @@ +HOME=/home/nas/.dropbox-mhde dropbox start -i diff --git a/fv b/fv index 10d13f8..8eea584 100755 --- a/fv +++ b/fv @@ -11,7 +11,8 @@ lstr2dir () { local lstrtime=$(echo $lstr | awk '{print $3}' | awk -F@ '{print $1}') } -svdrpsend -p2001 LSTR | grep -i "$name" | sed "s/^[0-9-]* //g" > ${tempfile}_1.tmp +#svdrpsend -p2001 LSTR | grep -i "$name" | sed "s/^[0-9-]* //g" > ${tempfile}_1.tmp +svdrpsend LSTR | grep -i "$name" | sed "s/^[0-9-]* //g" > ${tempfile}_1.tmp ( diff --git a/fvl b/fvl new file mode 100755 index 0000000..4c4d757 --- /dev/null +++ b/fvl @@ -0,0 +1,15 @@ +#!/bin/bash + +videodir=/dat/video/vdr +vdrdb=/tmp/vdr.db + +tofind=$1 + +files=$(locate -d $vdrdb -i $1 | grep -v .rec) + +for f in $files; do + if [ -d $f ]; then + #echo $f + du -sh $f + fi +done diff --git a/git-dir2repo b/git-dir2repo new file mode 100644 index 0000000..0bd5350 --- /dev/null +++ b/git-dir2repo @@ -0,0 +1,25 @@ +#!/bin/bash + +if (( $# < 3 )) +then + echo "Usage: $0 " + echo + echo "Example: $0 /Projects/42.git first/answer/ firstAnswer" + exit 1 +fi + + +clone=$PWD/${3}Clone +newN=$PWD/${3} +git clone --no-hardlinks file://$1 ${clone} +cd ${clone} + +git filter-branch --subdirectory-filter $2 --prune-empty --tag-name-filter cat -- --all +cd - +git clone file://${clone} ${newN} +cd ${newN} + +git reflog expire --expire=now --all +git repack -ad +git gc --prune=now + diff --git a/git-imerge b/git-imerge new file mode 100755 index 0000000..82f4e45 --- /dev/null +++ b/git-imerge @@ -0,0 +1,2797 @@ +#! /usr/bin/env python2 + +# Copyright 2012-2013 Michael Haggerty +# +# This file is part of git-imerge. +# +# git-imerge is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 2 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see +# . + +r"""Git incremental merge + +Perform the merge between two branches incrementally. If conflicts +are encountered, figure out exactly which pairs of commits conflict, +and present the user with one pairwise conflict at a time for +resolution. + +git-imerge has two primary design goals: + +* Reduce the pain of resolving merge conflicts to its unavoidable + minimum, by finding and presenting the smallest possible conflicts: + those between the changes introduced by one commit from each branch. + +* Allow a merge to be saved, tested, interrupted, published, and + collaborated on while it is in progress. + +The best thing to read to get started is "git-imerge: A Practical +Introduction" [1]. The theory and benefits of incremental merging are +described in minute detail in a series of blog posts [2], as are the +benefits of retaining history when doing a rebase [3]. + +Multiple incremental merges can be in progress at the same time. Each +incremental merge has a name, and its progress is recorded in the Git +repository as references under 'refs/imerge/NAME'. The current state +of an incremental merge can (crudely) be visualized using the +"diagram" command. + +An incremental merge can be interrupted and resumed arbitrarily, or +even pushed to a server to allow somebody else to work on it. + +When an incremental merge is finished, you can discard the +intermediate merge commits and create a simpler history to record +permanently in your project repository using either the "finish" or +"simplify" command. The incremental merge can be simplified in one of +three ways: + + * merge + keep only a simple merge of the second branch into the first + branch, discarding all intermediate merges. The result is + similar to what you would get from + + git checkout BRANCH1 + git merge BRANCH2 + + * rebase + keep the versions of the commits from the second branch rebased + onto the first branch. The result is similar to what you would + get from + + git checkout BRANCH2 + git rebase BRANCH1 + + * rebase-with-history + like rebase, except that each of the rebased commits is recorded + as a merge from its original version to its rebased predecessor. + This is a style of rebasing that does not discard the old + version of the branch, and allows an already-published branch to + be rebased. See [3] for more information. + +Simple operation: + +For basic operation, you only need to know three git-imerge commands. +To merge BRANCH into MASTER or rebase BRANCH onto MASTER, + + git checkout MASTER + git-imerge start --name=NAME --goal=GOAL --first-parent BRANCH + while not done: + + git commit + git-imerge continue + git-imerge finish + +where + + NAME is the name for this merge (and also the default name of the + branch to which the results will be saved) + + GOAL describes how you want to simplify the results: + + "merge" for a simple merge + + "rebase" for a simple rebase + + "rebase-with-history" for a rebase that retains history. This + is equivalent to merging the commits from BRANCH into MASTER, + one commit at a time. In other words, it transforms this:: + + o---o---o---o MASTER + \ + A---B---C---D BRANCH + + into this:: + + o---o---o---o---A'--B'--C'--D' MASTER + \ / / / / + --------A---B---C---D BRANCH + + This is like a rebase, except that it retains the history of + individual merges. See [3] for more information. + + +[1] http://softwareswirl.blogspot.com/2013/05/git-imerge-practical-introduction.html + +[2] http://softwareswirl.blogspot.com/2012/12/the-conflict-frontier-of-nightmare-merge.html + http://softwareswirl.blogspot.com/2012/12/mapping-merge-conflict-frontier.html + http://softwareswirl.blogspot.com/2012/12/real-world-conflict-diagrams.html + http://softwareswirl.blogspot.com/2013/05/git-incremental-merge.html + http://softwareswirl.blogspot.com/2013/05/one-merge-to-rule-them-all.html + http://softwareswirl.blogspot.com/2013/05/incremental-merge-vs-direct-merge-vs.html + +[3] http://softwareswirl.blogspot.com/2009/04/truce-in-merge-vs-rebase-war.html + http://softwareswirl.blogspot.com/2009/08/upstream-rebase-just-works-if-history.html + http://softwareswirl.blogspot.com/2009/08/rebase-with-history-implementation.html + +""" + +import sys +import re +import subprocess +import itertools +import functools +import argparse +from cStringIO import StringIO +import json +import os + +# CalledProcessError, check_call, and check_output were not in the +# original Python 2.4 subprocess library, so implement it here if +# necessary (implementations are taken from the Python 2.7 library): +try: + from subprocess import CalledProcessError +except ImportError: + class CalledProcessError(Exception): + def __init__(self, returncode, cmd, output=None): + self.returncode = returncode + self.cmd = cmd + self.output = output + def __str__(self): + return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode) + +try: + from subprocess import check_call +except ImportError: + def check_call(*popenargs, **kwargs): + retcode = subprocess.call(*popenargs, **kwargs) + if retcode: + cmd = kwargs.get("args") + if cmd is None: + cmd = popenargs[0] + raise CalledProcessError(retcode, cmd) + return 0 + +try: + from subprocess import check_output +except ImportError: + def check_output(*popenargs, **kwargs): + if 'stdout' in kwargs: + raise ValueError('stdout argument not allowed, it will be overridden.') + process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) + output, unused_err = process.communicate() + retcode = process.poll() + if retcode: + cmd = kwargs.get("args") + if cmd is None: + cmd = popenargs[0] + # We don't store output in the CalledProcessError because + # the "output" keyword parameter was not supported in + # Python 2.6: + raise CalledProcessError(retcode, cmd) + return output + + +STATE_VERSION = (1, 0, 0) + +ZEROS = '0' * 40 + +ALLOWED_GOALS = [ + #'full', + 'rebase-with-history', + 'rebase', + 'merge', + ] +DEFAULT_GOAL = 'merge' + + +class Failure(Exception): + """An exception that indicates a normal failure of the script. + + Failures are reported at top level via sys.exit(str(e)) rather + than via a Python stack dump.""" + + @classmethod + def wrap(klass, f): + """Wrap a function inside a try...except that catches this error. + + If the exception is thrown, call sys.exit(). This function + can be used as a decorator.""" + + @functools.wraps(f) + def wrapper(*args, **kw): + try: + return f(*args, **kw) + except klass, e: + sys.exit(str(e)) + + return wrapper + + pass + + +class AnsiColor: + BLACK = '\033[0;30m' + RED = '\033[0;31m' + GREEN = '\033[0;32m' + YELLOW = '\033[0;33m' + BLUE = '\033[0;34m' + MAGENTA = '\033[0;35m' + CYAN = '\033[0;36m' + B_GRAY = '\033[0;37m' + D_GRAY = '\033[1;30m' + B_RED = '\033[1;31m' + B_GREEN = '\033[1;32m' + B_YELLOW = '\033[1;33m' + B_BLUE = '\033[1;34m' + B_MAGENTA = '\033[1;35m' + B_CYAN = '\033[1;36m' + WHITE = '\033[1;37m' + END = '\033[0m' + + @classmethod + def disable(cls): + cls.BLACK = '' + cls.RED = '' + cls.GREEN = '' + cls.YELLOW = '' + cls.BLUE = '' + cls.MAGENTA = '' + cls.CYAN = '' + cls.B_GRAY = '' + cls.D_GRAY = '' + cls.B_RED = '' + cls.B_GREEN = '' + cls.B_YELLOW = '' + cls.B_BLUE = '' + cls.B_MAGENTA = '' + cls.B_CYAN = '' + cls.WHITE = '' + cls.END = '' + + +def iter_neighbors(iterable): + """For an iterable (x0, x1, x2, ...) generate [(x0,x1), (x1,x2), ...].""" + + i = iter(iterable) + + try: + last = i.next() + except StopIteration: + return + + for x in i: + yield (last, x) + last = x + + +def find_first_false(f, lo, hi): + """Return the smallest i in lo <= i < hi for which f(i) returns False using bisection. + + If there is no such i, return hi. + + """ + + # Loop invariant: f(i) returns True for i < lo; f(i) returns False + # for i >= hi. + + while lo < hi: + mid = (lo + hi) // 2 + if f(mid): + lo = mid + 1 + else: + hi = mid + + return lo + + +def call_silently(cmd): + try: + NULL = open('/dev/null', 'w') + except IOError: + NULL = subprocess.PIPE + + p = subprocess.Popen( + cmd, stdout=NULL, stderr=subprocess.PIPE, + ) + (out,err) = p.communicate() + retcode = p.wait() + if retcode: + raise CalledProcessError(retcode, cmd) + + +class UncleanWorkTreeError(Failure): + pass + + +def require_clean_work_tree(action): + """Verify that the current tree is clean. + + The code is a Python translation of the git-sh-setup(1) function + of the same name.""" + + process = subprocess.Popen( + ['git', 'rev-parse', '--verify', 'HEAD'], + stdout=subprocess.PIPE, stderr=subprocess.PIPE, + ) + _unused, err = process.communicate() + retcode = process.poll() + if retcode: + raise UncleanWorkTreeError(err.rstrip()) + + process = subprocess.Popen( + ['git', 'update-index', '-q', '--ignore-submodules', '--refresh'], + stdout=subprocess.PIPE, stderr=subprocess.PIPE, + ) + out, err = process.communicate() + retcode = process.poll() + if retcode: + raise UncleanWorkTreeError(err.rstrip() or out.rstrip()) + + error = [] + try: + check_call(['git', 'diff-files', '--quiet', '--ignore-submodules']) + except CalledProcessError: + error.append('Cannot %s: You have unstaged changes.' % (action,)) + + try: + check_call([ + 'git', 'diff-index', '--cached', '--quiet', + '--ignore-submodules', 'HEAD', '--', + ]) + except CalledProcessError: + if not error: + error.append('Cannot %s: Your index contains uncommitted changes.' % (action,)) + else: + error.append('Additionally, your index contains uncommitted changes.') + + if error: + raise UncleanWorkTreeError('\n'.join(error)) + + +def rev_parse(arg): + return check_output(['git', 'rev-parse', '--verify', '--quiet', arg]).strip() + + +def rev_list(*args): + return [ + l.strip() + for l in check_output(['git', 'rev-list'] + list(args),).splitlines() + ] + + +def get_type(arg): + """Return the type of a git object ('commit', 'tree', 'blob', or 'tag').""" + + return check_output(['git', 'cat-file', '-t', arg]).strip() + + +def get_tree(arg): + return rev_parse('%s^{tree}' % (arg,)) + + +BRANCH_PREFIX = 'refs/heads/' + +def checkout(refname): + if refname.startswith(BRANCH_PREFIX): + target = refname[len(BRANCH_PREFIX):] + else: + target = '%s^0' % (refname,) + check_call(['git', 'checkout', target]) + + +def get_commit_sha1(arg): + """Convert arg into a SHA1 and verify that it refers to a commit. + + If not, raise ValueError.""" + + try: + return rev_parse('%s^{commit}' % (arg,)) + except CalledProcessError: + raise ValueError('%r does not refer to a valid git commit' % (arg,)) + + +def get_commit_parents(commit): + """Return a list containing the parents of commit.""" + + return check_output( + ['git', 'log', '--no-walk', '--pretty=format:%P', commit] + ).strip().split() + + +def get_log_message(commit): + contents = check_output([ + 'git', 'cat-file', 'commit', commit + ]).splitlines(True) + contents = contents[contents.index('\n') + 1:] + if contents and contents[-1][-1:] != '\n': + contents.append('\n') + return ''.join(contents) + + +def get_author_info(commit): + a = check_output([ + 'git', 'log', '-n1', + '--format=%an%x00%ae%x00%ad', commit + ]).strip().split('\x00') + + return { + 'GIT_AUTHOR_NAME': a[0], + 'GIT_AUTHOR_EMAIL': a[1], + 'GIT_AUTHOR_DATE': a[2], + } + + +def commit_tree(tree, parents, msg, metadata=None): + """Create a commit containing the specified tree. + + metadata can be author or committer information to be added to the + environment; e.g., {'GIT_AUTHOR_NAME' : 'me'}. + + Return the SHA-1 of the new commit object.""" + + cmd = ['git', 'commit-tree', tree] + for parent in parents: + cmd += ['-p', parent] + + if metadata is not None: + env = os.environ.copy() + env.update(metadata) + else: + env = os.environ + + process = subprocess.Popen( + cmd, env=env, stdin=subprocess.PIPE, stdout=subprocess.PIPE, + ) + out, err = process.communicate(msg) + retcode = process.poll() + + if retcode: + # We don't store the output in the CalledProcessError because + # the "output" keyword parameter was not supported in Python + # 2.6: + raise CalledProcessError(retcode, cmd) + + return out.strip() + + +class TemporaryHead(object): + """A context manager that records the current HEAD state then restores it. + + The message is used for the reflog.""" + + def __enter__(self, message='imerge: restoring'): + self.message = message + try: + self.head_name = check_output(['git', 'symbolic-ref', '--quiet', 'HEAD']).strip() + except CalledProcessError: + self.head_name = None + self.orig_head = get_commit_sha1('HEAD') + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.head_name: + try: + check_call([ + 'git', 'symbolic-ref', + '-m', self.message, 'HEAD', + self.head_name, + ]) + except Exception, e: + raise Failure( + 'Could not restore HEAD to %r!: %s\n' + % (self.head_name, e.message,) + ) + else: + try: + check_call(['git', 'reset', '--hard', self.orig_head]) + except Exception, e: + raise Failure( + 'Could not restore HEAD to %r!: %s\n' + % (self.orig_head, e.message,) + ) + return False + + +def reparent(commit, parent_sha1s): + """Create a new commit object like commit, but with the specified parents. + + commit is the SHA1 of an existing commit and parent_sha1s is a + list of SHA1s. Create a new commit exactly like that one, except + that it has the specified parent commits. Return the SHA1 of the + resulting commit object, which is already stored in the object + database but is not yet referenced by anything.""" + + old_commit = check_output(['git', 'cat-file', 'commit', commit]) + separator = old_commit.index('\n\n') + headers = old_commit[:separator + 1].splitlines(True) + rest = old_commit[separator + 1:] + + new_commit = StringIO() + for i in range(len(headers)): + line = headers[i] + if line.startswith('tree '): + new_commit.write(line) + for parent_sha1 in parent_sha1s: + new_commit.write('parent %s\n' % (parent_sha1,)) + elif line.startswith('parent '): + # Discard old parents: + pass + else: + new_commit.write(line) + + new_commit.write(rest) + + process = subprocess.Popen( + ['git', 'hash-object', '-t', 'commit', '-w', '--stdin'], + stdin=subprocess.PIPE, stdout=subprocess.PIPE, + ) + out, err = process.communicate(new_commit.getvalue()) + retcode = process.poll() + if retcode: + raise Failure('Could not reparent commit %s' % (commit,)) + return out.strip() + + +class AutomaticMergeFailed(Exception): + def __init__(self, commit1, commit2): + Exception.__init__( + self, 'Automatic merge of %s and %s failed' % (commit1, commit2,) + ) + self.commit1, self.commit2 = commit1, commit2 + + +def automerge(commit1, commit2): + """Attempt an automatic merge of commit1 and commit2. + + Return the SHA1 of the resulting commit, or raise + AutomaticMergeFailed on error. This must be called with a clean + worktree.""" + + call_silently(['git', 'checkout', '-f', commit1]) + try: + call_silently(['git', '-c', 'rerere.enabled=false', 'merge', commit2]) + except CalledProcessError: + # We don't use "git merge --abort" here because it was only + # added in git version 1.7.4. + call_silently(['git', 'reset', '--merge']) + raise AutomaticMergeFailed(commit1, commit2) + else: + return get_commit_sha1('HEAD') + + +class MergeRecord(object): + # Bits for the flags field: + + # There is a saved successful auto merge: + SAVED_AUTO = 0x01 + + # An auto merge (which may have been unsuccessful) has been done: + NEW_AUTO = 0x02 + + # There is a saved successful manual merge: + SAVED_MANUAL = 0x04 + + # A manual merge (which may have been unsuccessful) has been done: + NEW_MANUAL = 0x08 + + # A merge that is currently blocking the merge frontier: + BLOCKED = 0x10 + + # Some useful bit combinations: + SAVED = SAVED_AUTO | SAVED_MANUAL + NEW = NEW_AUTO | NEW_MANUAL + + AUTO = SAVED_AUTO | NEW_AUTO + MANUAL = SAVED_MANUAL | NEW_MANUAL + + ALLOWED_INITIAL_FLAGS = [ + SAVED_AUTO, + SAVED_MANUAL, + NEW_AUTO, + NEW_MANUAL, + ] + + def __init__(self, sha1=None, flags=0): + # The currently believed correct merge, or None if it is + # unknown or the best attempt was unsuccessful. + self.sha1 = sha1 + + if self.sha1 is None: + if flags != 0: + raise ValueError('Initial flags (%s) for sha1=None should be 0' % (flags,)) + elif flags not in self.ALLOWED_INITIAL_FLAGS: + raise ValueError('Initial flags (%s) is invalid' % (flags,)) + + # See bits above. + self.flags = flags + + def record_merge(self, sha1, source): + """Record a merge at this position. + + source must be SAVED_AUTO, SAVED_MANUAL, NEW_AUTO, or NEW_MANUAL.""" + + if source == self.SAVED_AUTO: + # SAVED_AUTO is recorded in any case, but only used if it + # is the only info available. + if self.flags & (self.MANUAL | self.NEW) == 0: + self.sha1 = sha1 + self.flags |= source + elif source == self.NEW_AUTO: + # NEW_AUTO is silently ignored if any MANUAL value is + # already recorded. + if self.flags & self.MANUAL == 0: + self.sha1 = sha1 + self.flags |= source + elif source == self.SAVED_MANUAL: + # SAVED_MANUAL is recorded in any case, but only used if + # no NEW_MANUAL is available. + if self.flags & self.NEW_MANUAL == 0: + self.sha1 = sha1 + self.flags |= source + elif source == self.NEW_MANUAL: + # NEW_MANUAL is always used, and also causes NEW_AUTO to + # be forgotten if present. + self.sha1 = sha1 + self.flags = (self.flags | source) & ~self.NEW_AUTO + else: + raise ValueError('Undefined source: %s' % (source,)) + + def record_blocked(self, blocked): + if blocked: + self.flags |= self.BLOCKED + else: + self.flags &= ~self.BLOCKED + + def is_known(self): + return self.sha1 is not None + + def is_blocked(self): + return self.flags & self.BLOCKED != 0 + + def is_manual(self): + return self.flags & self.MANUAL != 0 + + def save(self, name, i1, i2): + """If this record has changed, save it.""" + + def set_ref(source): + check_call([ + 'git', 'update-ref', + '-m', 'imerge %r: Record %s merge' % (name, source,), + 'refs/imerge/%s/%s/%d-%d' % (name, source, i1, i2), + self.sha1, + ]) + + def clear_ref(source): + check_call([ + 'git', 'update-ref', + '-d', 'imerge %r: Remove obsolete %s merge' % (name, source,), + 'refs/imerge/%s/%s/%d-%d' % (name, source, i1, i2), + ]) + + if self.flags & self.MANUAL: + if self.flags & self.AUTO: + # Any MANUAL obsoletes any AUTO: + if self.flags & self.SAVED_AUTO: + clear_ref('auto') + + self.flags &= ~self.AUTO + + if self.flags & self.NEW_MANUAL: + # Convert NEW_MANUAL to SAVED_MANUAL. + if self.sha1: + set_ref('manual') + self.flags |= self.SAVED_MANUAL + elif self.flags & self.SAVED_MANUAL: + # Delete any existing SAVED_MANUAL: + clear_ref('manual') + self.flags &= ~self.SAVED_MANUAL + self.flags &= ~self.NEW_MANUAL + + elif self.flags & self.NEW_AUTO: + # Convert NEW_AUTO to SAVED_AUTO. + if self.sha1: + set_ref('auto') + self.flags |= self.SAVED_AUTO + elif self.flags & self.SAVED_AUTO: + # Delete any existing SAVED_AUTO: + clear_ref('auto') + self.flags &= ~self.SAVED_AUTO + self.flags &= ~self.NEW_AUTO + + +class UnexpectedMergeFailure(Exception): + def __init__(self, msg, i1, i2): + Exception.__init__(self, msg) + self.i1, self.i2 = i1, i2 + + +class BlockCompleteError(Exception): + pass + + +class FrontierBlockedError(Exception): + def __init__(self, msg, i1, i2): + Exception.__init__(self, msg) + self.i1 = i1 + self.i2 = i2 + + +class NotABlockingCommitError(Exception): + pass + + +class MergeFrontier(object): + """Represents the merge frontier within a Block. + + A MergeFrontier is represented by a list of SubBlocks, each of + which is thought to be completely mergeable. The list is kept in + normalized form: + + * Only non-empty blocks are retained + + * Blocks are sorted from bottom left to upper right + + * No redundant blocks + + """ + + @staticmethod + def map_known_frontier(block): + """Return the MergeFrontier describing existing successful merges in block. + + The return value only includes the part that is fully outlined + and whose outline consists of rectangles reaching back to + (0,0). + + A blocked commit is *not* considered to be within the + frontier, even if a merge is registered for it. Such merges + must be explicitly unblocked.""" + + # FIXME: This algorithm can take combinatorial time, for + # example if there is a big block of merges that is a dead + # end: + # + # +++++++ + # +?+++++ + # +?+++++ + # +?+++++ + # +?*++++ + # + # The problem is that the algorithm will explore all of the + # ways of getting to commit *, and the number of paths grows + # like a binomial coefficient. The solution would be to + # remember dead-ends and reject any curves that visit a point + # to the right of a dead-end. + # + # For now we don't intend to allow a situation like this to be + # created, so we ignore the problem. + + # A list (i1, i2, down) of points in the path so far. down is + # True iff the attempted step following this one was + # downwards. + path = [] + + def create_frontier(path): + blocks = [] + for ((i1old, i2old, downold), (i1new, i2new, downnew)) in iter_neighbors(path): + if downold == True and downnew == False: + blocks.append(block[:i1new + 1, :i2new + 1]) + return MergeFrontier(block, blocks) + + # Loop invariants: + # + # * path is a valid path + # + # * (i1, i2) is in block but it not yet added to path + # + # * down is True if a step downwards from (i1, i2) has not yet + # been attempted + (i1, i2) = (block.len1 - 1, 0) + down = True + while True: + if down: + if i2 == block.len2 - 1: + # Hit edge of block; can't move down: + down = False + elif (i1, i2 + 1) in block and not block.is_blocked(i1, i2 + 1): + # Can move down + path.append((i1, i2, True)) + i2 += 1 + else: + # Can't move down. + down = False + else: + if i1 == 0: + # Success! + path.append((i1, i2, False)) + return create_frontier(path) + elif (i1 - 1, i2) in block and not block.is_blocked(i1 - 1, i2): + # Can move left + path.append((i1, i2, False)) + down = True + i1 -= 1 + else: + # There's no way to go forward; backtrack until we + # find a place where we can still try going left: + while True: + try: + (i1, i2, down) = path.pop() + except IndexError: + # This shouldn't happen because, in the + # worst case, there is a valid path across + # the top edge of the merge diagram. + raise RuntimeError('Block is improperly formed!') + if down: + down = False + break + + @staticmethod + def compute_by_bisection(block): + """Return a MergeFrontier instance for block. + + We make the following assumptions (using Python subscript + notation): + + 0. All of the merges in block[1:,0] and block[0,1:] are + already known. (This is an invariant of the Block class.) + + 1. If a direct merge can be done between block[i1-1,0] and + block[0,i2-1], then all of the pairwise merges in + block[1:i1, 1:i2] can also be done. + + 2. If a direct merge fails between block[i1-1,0] and + block[0,i2-1], then all of the pairwise merges in + block[i1-1:,i2-1:] would also fail. + + Under these assumptions, the merge frontier is a stepstair + pattern going from the bottom-left to the top-right, and + bisection can be used to find the transition between mergeable + and conflicting in any row or column. + + Of course these assumptions are not rigorously true, so the + MergeFrontier returned by this function is only an + approximation of the real merge diagram. We check for and + correct such inconsistencies later.""" + + # Given that these diagrams typically have few blocks, check + # the end of a range first to see if the whole range can be + # determined, and fall back to bisection otherwise. We + # determine the frontier block by block, starting in the lower + # left. + + if block.len1 <= 1 or block.len2 <= 1 or block.is_blocked(1, 1): + return MergeFrontier(block, []) + + if not block.is_mergeable(1, 1): + # There are no mergeable blocks in block; therefore, + # block[1,1] must itself be unmergeable. Record that + # fact: + block[1,1].record_blocked(True) + return MergeFrontier(block, []) + + blocks = [] + + # At this point, we know that there is at least one mergeable + # commit in the first column. Find the height of the success + # block in column 1: + i1 = 1 + i2 = find_first_false( + lambda i: block.is_mergeable(i1, i), + 1, block.len2, + ) + + # Now we know that (1,i2-1) is mergeable but (1,i2) is not; + # e.g., (i1, i2) is like 'A' (or maybe 'B') in the following + # diagram (where '*' means mergeable, 'x' means not mergeable, + # and '?' means indeterminate) and that the merge under 'A' is + # not mergeable: + # + # i1 + # + # 0123456 + # 0 ******* + # 1 **????? + # i2 2 **????? + # 3 **????? + # 4 *Axxxxx + # 5 *xxxxxx + # B + + while True: + if i2 == 1: + break + + # At this point in the loop, we know that any blocks to + # the left of 'A' have already been recorded, (i1, i2-1) + # is mergeable but (i1, i2) is not; e.g., we are at a + # place like 'A' in the following diagram: + # + # i1 + # + # 0123456 + # 0 **|**** + # 1 **|*??? + # i2 2 **|*??? + # 3 **|Axxx + # 4 --+xxxx + # 5 *xxxxxx + # + # This implies that (i1, i2) is the first unmergeable + # commit in a blocker block (though blocker blocks are not + # recorded explicitly). It also implies that a mergeable + # block has its last mergeable commit somewhere in row + # i2-1; find its width. + if block.is_mergeable(block.len1 - 1, i2 - 1): + i1 = block.len1 + blocks.append(block[:i1,:i2]) + break + else: + i1 = find_first_false( + lambda i: block.is_mergeable(i, i2 - 1), + i1 + 1, block.len1 - 1, + ) + blocks.append(block[:i1,:i2]) + + # At this point in the loop, (i1-1, i2-1) is mergeable but + # (i1, i2-1) is not; e.g., 'A' in the following diagram: + # + # i1 + # + # 0123456 + # 0 **|*|** + # 1 **|*|?? + # i2 2 --+-+xx + # 3 **|xxAx + # 4 --+xxxx + # 5 *xxxxxx + # + # The block ending at (i1-1,i2-1) has just been recorded. + # Now find the height of the conflict rectangle at column + # i1 and fill it in: + if block.is_mergeable(i1, 1): + i2 = find_first_false( + lambda i: block.is_mergeable(i1, i), + 2, i2 - 1, + ) + else: + break + + return MergeFrontier(block, blocks) + + def __init__(self, block, blocks=None): + self.block = block + blocks = list(self._iter_non_empty_blocks(blocks or [])) + blocks.sort(key=lambda block: block.len1) + self.blocks = list(self._iter_non_redundant_blocks(blocks)) + + def __iter__(self): + """Iterate over blocks from bottom left to upper right.""" + + return iter(self.blocks) + + def __nonzero__(self): + """Return True iff this frontier has no completed parts.""" + + return bool(self.blocks) + + def is_complete(self): + """Return True iff the frontier covers the whole block.""" + + return ( + len(self.blocks) == 1 + and self.blocks[0].len1 == self.block.len1 + and self.blocks[0].len2 == self.block.len2 + ) + + # Additional codes used in the 2D array returned from create_diagram() + FRONTIER_WITHIN = 0x10 + FRONTIER_RIGHT_EDGE = 0x20 + FRONTIER_BOTTOM_EDGE = 0x40 + FRONTIER_MASK = 0x70 + + @classmethod + def default_formatter(cls, node, legend=None): + def color(node, within): + if within: + return AnsiColor.B_GREEN + node + AnsiColor.END + else: + return AnsiColor.B_RED + node + AnsiColor.END + + if legend is None: + legend = ["?", "*", ".", "#", "@", "-", "|", "+"] + merge = node & ~cls.FRONTIER_MASK + within = merge == Block.MERGE_MANUAL or (node & cls.FRONTIER_WITHIN) + skip = [Block.MERGE_MANUAL, Block.MERGE_BLOCKED, Block.MERGE_UNBLOCKED] + if merge not in skip: + vertex = (cls.FRONTIER_BOTTOM_EDGE | cls.FRONTIER_RIGHT_EDGE) + edge_status = node & vertex + if edge_status == vertex: + return color(legend[-1], within) + elif edge_status == cls.FRONTIER_RIGHT_EDGE: + return color(legend[-2], within) + elif edge_status == cls.FRONTIER_BOTTOM_EDGE: + return color(legend[-3], within) + return color(legend[merge], within) + + def create_diagram(self): + """Generate a diagram of this frontier. + + The returned diagram is a nested list of integers forming a 2D array, + representing the merge frontier embedded in the diagram of commits + returned from Block.create_diagram(). + + At each node in the returned diagram is an integer whose value is a + bitwise-or of existing MERGE_* constant from Block.create_diagram() + and zero or more of the FRONTIER_* constants defined in this class.""" + + diagram = self.block.create_diagram() + for block in self: + for i1 in range(block.len1): + for i2 in range(block.len2): + v = self.FRONTIER_WITHIN + if i1 == block.len1 - 1: + v |= self.FRONTIER_RIGHT_EDGE + if i2 == block.len2 - 1: + v |= self.FRONTIER_BOTTOM_EDGE + diagram[i1][i2] |= v + return diagram + + def format_diagram(self, formatter=None, diagram=None): + if formatter is None: + formatter = self.default_formatter + if diagram is None: + diagram = self.create_diagram() + return [ + [formatter(diagram[i1][i2]) for i2 in range(self.block.len2)] + for i1 in range(self.block.len1)] + + def write(self, f): + """Write this frontier to file-like object f.""" + diagram = self.format_diagram() + for i2 in range(self.block.len2): + for i1 in range(self.block.len1): + f.write(diagram[i1][i2]) + f.write('\n') + + def write_html(self, f, name, cssfile="imerge.css", abbrev_sha1=7): + class_map = { + Block.MERGE_UNKNOWN: "merge_unknown", + Block.MERGE_MANUAL: "merge_manual", + Block.MERGE_AUTOMATIC: "merge_automatic", + Block.MERGE_BLOCKED: "merge_blocked", + Block.MERGE_UNBLOCKED: "merge_unblocked", + self.FRONTIER_WITHIN: "frontier_within", + self.FRONTIER_RIGHT_EDGE: "frontier_right_edge", + self.FRONTIER_BOTTOM_EDGE: "frontier_bottom_edge", + } + + def map_to_classes(node): + merge = node & ~self.FRONTIER_MASK + ret = [class_map[merge]] + for bit in [self.FRONTIER_WITHIN, self.FRONTIER_RIGHT_EDGE, + self.FRONTIER_BOTTOM_EDGE]: + if node & bit: + ret.append(class_map[bit]) + return ret + + f.write("""\ + + +git-imerge: %s + + + + +""" % (name, cssfile)) + + diagram = self.create_diagram() + for i2 in range(self.block.len2): + f.write(" \n") + for i1 in range(self.block.len1): + classes = map_to_classes(diagram[i1][i2]) + record = self.block.get_value(i1, i2) + sha1 = record.sha1 or "" + td_id = record.sha1 and ' id="%s"' % (record.sha1) or '' + td_class = classes and ' class="' + " ".join(classes) + '"' or '' + f.write(" %.*s\n" % ( + td_id, td_class, abbrev_sha1, sha1)) + f.write(" \n") + f.write("
\n\n\n") + + @staticmethod + def _iter_non_empty_blocks(blocks): + for block in blocks: + if block.len1 > 1 and block.len2 > 1: + yield block + + @staticmethod + def _iter_non_redundant_blocks(blocks): + def contains(block1, block2): + """Return true if block1 contains block2.""" + + return block1.len1 >= block2.len1 and block1.len2 >= block2.len2 + + i = iter(blocks) + try: + last = i.next() + except StopIteration: + return + + for block in i: + if contains(last, block): + pass + elif contains(block, last): + last = block + else: + yield last + last = block + + yield last + + def remove_failure(self, i1, i2): + """Refine the merge frontier given that the specified merge fails.""" + + newblocks = [] + shrunk_block = False + + for block in self.blocks: + if i1 < block.len1 and i2 < block.len2: + if i1 > 1: + newblocks.append(block[:i1, :]) + if i2 > 1: + newblocks.append(block[:, :i2]) + shrunk_block = True + else: + newblocks.append(block) + + if shrunk_block: + self.blocks = list(self._iter_non_redundant_blocks(newblocks)) + + def partition(self, block): + """Return two MergeFrontier instances partitioned by block. + + Return (frontier1, frontier2), where each frontier is limited + to each side of the argument. + + block must be contained in this MergeFrontier and already be + outlined.""" + + # Remember that the new blocks have to include the outlined + # edge of the partitioning block to satisfy the invariant that + # the left and upper edge of a block has to be known. + + left = [] + right = [] + for b in self.blocks: + if b.len1 == block.len1 and b.len2 == block.len2: + # That's the block we're partitioning on; just skip it. + pass + elif b.len1 < block.len1 and b.len2 > block.len2: + left.append(b[:, block.len2 - 1:]) + elif b.len1 > block.len1 and b.len2 < block.len2: + right.append(b[block.len1 - 1:, :]) + else: + raise ValueError( + 'MergeFrontier partitioned with inappropriate block' + ) + return ( + MergeFrontier(self.block[:block.len1, block.len2 - 1:], left), + MergeFrontier(self.block[block.len1 - 1:, :block.len2], right), + ) + + def iter_blocker_blocks(self): + """Iterate over the blocks on the far side of this frontier. + + This must only be called for an outlined frontier.""" + + if not self: + yield self.block + return + + blockruns = [] + if self.blocks[0].len2 < self.block.len2: + blockruns.append([self.block[0,:]]) + blockruns.append(self) + if self.blocks[-1].len1 < self.block.len1: + blockruns.append([self.block[:,0]]) + + for block1, block2 in iter_neighbors(itertools.chain(*blockruns)): + yield self.block[block1.len1 - 1:block2.len1, block2.len2 - 1: block1.len2] + + def get_affected_blocker_block(self, i1, i2): + """Return the blocker block that a successful merge (i1,i2) would unblock. + + If there is no such block, raise NotABlockingCommitError.""" + + for block in self.iter_blocker_blocks(): + try: + (block_i1, block_i2) = block.convert_original_indexes(i1, i2) + except IndexError: + pass + else: + if (block_i1, block_i2) == (1,1): + # That's the one we need to improve this block: + return block + else: + # An index pair can only be in a single blocker + # block, which we've already found: + raise NotABlockingCommitError( + 'Commit %d-%d was not blocking the frontier.' + % self.block.get_original_indexes(i1, i2) + ) + else: + raise NotABlockingCommitError( + 'Commit %d-%d was not on the frontier.' + % self.block.get_original_indexes(i1, i2) + ) + + def auto_expand(self): + """Try pushing out one of the blocks on this frontier. + + Raise BlockCompleteError if the whole block has already been + solved. Raise FrontierBlockedError if the frontier is blocked + everywhere. This method does *not* update self; if it returns + successfully you should recompute the frontier from + scratch.""" + + blocks = list(self.iter_blocker_blocks()) + if not blocks: + raise BlockCompleteError('The block is already complete') + # Try blocks from biggest to smallest: + blocks.sort(key=lambda block: block.get_area(), reverse=True) + for block in blocks: + if block.auto_outline_frontier(): + return + else: + # None of the blocks could be expanded. Suggest that the + # caller do a manual merge of the commit that is blocking + # the *smallest* blocker block. + i1, i2 = blocks[-1].get_original_indexes(1, 1) + raise FrontierBlockedError( + 'Frontier blocked; suggest manual merge of %d-%d' % (i1, i2), + i1, i2 + ) + + +class NoManualMergeError(Exception): + pass + + +class ManualMergeUnusableError(Exception): + def __init__(self, msg, commit): + Exception.__init__(self, 'Commit %s is not usable; %s' % (commit, msg)) + self.commit = commit + + +class CommitNotFoundError(Exception): + def __init__(self, commit): + Exception.__init__( + self, + 'Commit %s was not found among the known merge commits' % (commit,), + ) + self.commit = commit + + +class Block(object): + """A rectangular range of commits, indexed by (i1,i2). + + The commits block[0,1:] and block[1:,0] are always all known. + block[0,0] may or may not be known; it is usually unneeded (except + maybe implicitly). + + Members: + + len1, len2 -- the dimensions of the block. + + """ + + def __init__(self, len1, len2): + self.len1 = len1 + self.len2 = len2 + + def get_area(self): + """Return the area of this block, ignoring the known edges.""" + + return (self.len1 - 1) * (self.len2 - 1) + + def _check_indexes(self, i1, i2): + if not (0 <= i1 < self.len1): + raise IndexError('first index (%s) is out of range 0:%d' % (i1, self.len1,)) + if not (0 <= i2 < self.len2): + raise IndexError('second index (%s) is out of range 0:%d' % (i2, self.len2,)) + + def _normalize_indexes(self, index): + """Return a pair of non-negative integers (i1, i2).""" + + try: + (i1, i2) = index + except TypeError: + raise IndexError('Block indexing requires exactly two indexes') + + if i1 < 0: + i1 += self.len1 + if i2 < 0: + i2 += self.len2 + + self._check_indexes(i1, i2) + return (i1, i2) + + def get_original_indexes(self, i1, i2): + """Return the original indexes corresponding to (i1,i2) in this block. + + This function supports negative indexes.""" + + return self._normalize_indexes((i1, i2)) + + def convert_original_indexes(self, i1, i2): + """Return the indexes in this block corresponding to original indexes (i1,i2). + + raise IndexError if they are not within this block. This + method does not support negative indices.""" + + return (i1, i2) + + def _set_value(self, i1, i2, value): + """Set the MergeRecord for integer indexes (i1, i2). + + i1 and i2 must be non-negative.""" + + raise NotImplementedError() + + def get_value(self, i1, i2): + """Return the MergeRecord for integer indexes (i1, i2). + + i1 and i2 must be non-negative.""" + + raise NotImplementedError() + + def __getitem__(self, index): + """Return the MergeRecord at (i1, i2) (requires two indexes). + + If i1 and i2 are integers but the merge is unknown, return + None. If either index is a slice, return a SubBlock.""" + + try: + (i1, i2) = index + except TypeError: + raise IndexError('Block indexing requires exactly two indexes') + if isinstance(i1, slice) or isinstance(i2, slice): + return SubBlock(self, i1, i2) + else: + return self.get_value(*self._normalize_indexes((i1, i2))) + + def __contains__(self, index): + return self[index].is_known() + + def is_blocked(self, i1, i2): + """Return True iff the specified commit is blocked.""" + + (i1, i2) = self._normalize_indexes((i1, i2)) + return self[i1, i2].is_blocked() + + def is_mergeable(self, i1, i2): + """Determine whether (i1,i2) can be merged automatically. + + If we already have a merge record for (i1,i2), return True. + Otherwise, attempt a merge (discarding the result).""" + + (i1, i2) = self._normalize_indexes((i1, i2)) + if (i1, i2) in self: + return True + else: + sys.stderr.write( + 'Attempting automerge of %d-%d...' % self.get_original_indexes(i1, i2) + ) + try: + automerge(self[i1, 0].sha1, self[0, i2].sha1) + sys.stderr.write('success.\n') + return True + except AutomaticMergeFailed: + sys.stderr.write('failure.\n') + return False + + def auto_outline(self): + """Complete the outline of this Block. + + raise UnexpectedMergeFailure if automerging fails.""" + + # Check that all of the merges go through before recording any + # of them permanently. + merges = [] + + def do_merge(i1, commit1, i2, commit2, msg='Autofilling %d-%d...', record=True): + if (i1, i2) in self: + return self[i1,i2].sha1 + try: + sys.stderr.write(msg % self.get_original_indexes(i1, i2)) + merge = automerge(commit1, commit2) + sys.stderr.write('success.\n') + except AutomaticMergeFailed, e: + sys.stderr.write('unexpected conflict. Backtracking...\n') + raise UnexpectedMergeFailure(str(e), i1, i2) + if record: + merges.append((i1, i2, merge)) + return merge + + i2 = self.len2 - 1 + left = self[0, i2].sha1 + for i1 in range(1, self.len1 - 1): + left = do_merge(i1, self[i1,0].sha1, i2, left) + + i1 = self.len1 - 1 + above = self[i1, 0].sha1 + for i2 in range(1, self.len2 - 1): + above = do_merge(i1, above, i2, self[0,i2].sha1) + + # We will compare two ways of doing the final "vertex" merge: + # as a continuation of the bottom edge, or as a continuation + # of the right edge. We only accept it if both approaches + # succeed and give identical trees. + i1, i2 = self.len1 - 1, self.len2 - 1 + vertex_v1 = do_merge( + i1, self[i1,0].sha1, i2, left, + msg='Autofilling %d-%d (first way)...', + record=False, + ) + vertex_v2 = do_merge( + i1, above, i2, self[0,i2].sha1, + msg='Autofilling %d-%d (second way)...', + record=False, + ) + if get_tree(vertex_v1) == get_tree(vertex_v2): + sys.stderr.write( + 'The two ways of autofilling %d-%d agree.\n' + % self.get_original_indexes(i1, i2) + ) + else: + sys.stderr.write( + 'The two ways of autofilling %d-%d do not agree. Backtracking...\n' + % self.get_original_indexes(i1, i2) + ) + raise UnexpectedMergeFailure('Inconsistent vertex merges', i1, i2) + + # Everything is OK. Now reparent the actual vertex merge to + # have above and left as its parents: + merges.append((i1, i2, reparent(vertex_v1, [above, left]))) + + # Done! Now we can record the results: + sys.stderr.write('Recording autofilled block %s.\n' % (self,)) + for (i1, i2, merge) in merges: + self[i1, i2].record_merge(merge, MergeRecord.NEW_AUTO) + + def auto_outline_frontier(self, merge_frontier=None): + """Try to outline the merge frontier of this block. + + Return True iff some progress was made.""" + + if merge_frontier is None: + merge_frontier = MergeFrontier.compute_by_bisection(self) + + if not merge_frontier: + # Nothing to do. + return False + + best_block = max(merge_frontier, key=lambda block: block.get_area()) + + try: + best_block.auto_outline() + except UnexpectedMergeFailure, e: + # One of the merges that we expected to succeed in + # fact failed. + merge_frontier.remove_failure(e.i1, e.i2) + return self.auto_outline_frontier(merge_frontier) + else: + f1, f2 = merge_frontier.partition(best_block) + if f1: + f1.block.auto_outline_frontier(f1) + if f2: + f2.block.auto_outline_frontier(f2) + return True + + # The codes in the 2D array returned from create_diagram() + MERGE_UNKNOWN = 0 + MERGE_MANUAL = 1 + MERGE_AUTOMATIC = 2 + MERGE_BLOCKED = 3 + MERGE_UNBLOCKED = 4 + + # A map {(is_known(), manual, is_blocked()) : integer constant} + MergeState = { + (False, False, False) : MERGE_UNKNOWN, + (False, False, True) : MERGE_BLOCKED, + (True, False, True) : MERGE_UNBLOCKED, + (True, True, True) : MERGE_UNBLOCKED, + (True, False, False) : MERGE_AUTOMATIC, + (True, True, False) : MERGE_MANUAL, + } + + def create_diagram(self): + """Generate a diagram of this Block. + + The returned diagram, is a nested list of integers forming a 2D array, + where the integer at diagram[i1][i2] is one of MERGE_UNKNOWN, + MERGE_MANUAL, MERGE_AUTOMATIC, MERGE_BLOCKED, or MERGE_UNBLOCKED, + representing the state of the commit at (i1, i2).""" + + diagram = [[None for i2 in range(self.len2)] for i1 in range(self.len1)] + + for i2 in range(self.len2): + for i1 in range(self.len1): + rec = self.get_value(i1, i2) + c = self.MergeState[ + rec.is_known(), rec.is_manual(), rec.is_blocked()] + diagram[i1][i2] = c + + return diagram + + def format_diagram(self, legend=None, diagram=None): + if legend is None: + legend = [ + AnsiColor.D_GRAY + "?" + AnsiColor.END, + AnsiColor.B_GREEN + "*" + AnsiColor.END, + AnsiColor.B_GREEN + "." + AnsiColor.END, + AnsiColor.B_RED + "#" + AnsiColor.END, + AnsiColor.B_YELLOW + "@" + AnsiColor.END, + ] + if diagram is None: + diagram = self.create_diagram() + return [ + [legend[diagram[i1][i2]] for i2 in range(self.len2)] + for i1 in range(self.len1)] + + def write(self, f, legend=None, sep='', linesep='\n'): + diagram = self.format_diagram(legend) + for i2 in range(self.len2): + f.write(sep.join(diagram[i1][i2] for i1 in range(self.len1)) + linesep) + + def writeppm(self, f): + f.write('P3\n') + f.write('%d %d 255\n' % (self.len1, self.len2,)) + legend = ['127 127 0', '0 255 0', '0 127 0', '255 0 0', '127 0 0'] + self.write(f, legend, sep=' ') + + +class SubBlock(Block): + @staticmethod + def _convert_to_slice(i, len): + """Return (start, len) for the specified index. + + i may be an integer or a slice with step equal to 1.""" + + if isinstance(i, int): + if i < 0: + i += len + i = slice(i, i + 1) + elif isinstance(i, slice): + if i.step is not None and i.step != 1: + raise ValueError('Index has a non-zero step size') + else: + raise ValueError('Index cannot be converted to a slice') + + (start, stop, step) = i.indices(len) + return (start, stop - start) + + def __init__(self, block, slice1, slice2): + (start1, len1) = self._convert_to_slice(slice1, block.len1) + (start2, len2) = self._convert_to_slice(slice2, block.len2) + Block.__init__(self, len1, len2) + if isinstance(block, SubBlock): + # Peel away one level of indirection: + self._block = block._block + self._start1 = start1 + block._start1 + self._start2 = start2 + block._start2 + else: + self._block = block + self._start1 = start1 + self._start2 = start2 + + def get_original_indexes(self, i1, i2): + i1, i2 = self._normalize_indexes((i1, i2)) + return self._block.get_original_indexes(i1 + self._start1, i2 + self._start2) + + def convert_original_indexes(self, i1, i2): + (i1, i2) = self._block.convert_original_indexes(i1, i2) + if not ( + self._start1 <= i1 < self._start1 + self.len1 + and self._start2 <= i2 < self._start2 + self.len2 + ): + raise IndexError('Indexes are not within block') + return (i1 - self._start1, i2 - self._start2) + + def _set_value(self, i1, i2, sha1, flags): + self._check_indexes(i1, i2) + self._block._set_value(i1 + self._start1, i2 + self._start2, sha1, flags) + + def get_value(self, i1, i2): + self._check_indexes(i1, i2) + return self._block.get_value(i1 + self._start1, i2 + self._start2) + + def __str__(self): + return '%s[%d:%d,%d:%d]' % ( + self._block, + self._start1, self._start1 + self.len1, + self._start2, self._start2 + self.len2, + ) + + +class MergeState(Block): + SOURCE_TABLE = { + 'auto' : MergeRecord.SAVED_AUTO, + 'manual' : MergeRecord.SAVED_MANUAL, + } + + MERGE_STATE_RE = re.compile( + r""" + ^ + refs\/imerge\/ + (?P[^\/]+) + \/state + $ + """, + re.VERBOSE, + ) + + @staticmethod + def iter_existing_names(): + """Iterate over the names of existing MergeStates in this repo.""" + + for line in check_output(['git', 'for-each-ref', 'refs/imerge',]).splitlines(): + (sha1, type, refname) = line.split() + if type == 'blob': + m = MergeState.MERGE_STATE_RE.match(refname) + if m: + yield m.group('name') + + @staticmethod + def get_scratch_refname(name): + return 'refs/heads/imerge/%s' % (name,) + + @staticmethod + def _read_state(name, sha1): + state_string = check_output(['git', 'cat-file', 'blob', sha1]) + state = json.loads(state_string) + + version = tuple(map(int, state['version'].split('.'))) + if version[0] != STATE_VERSION[0] or version > STATE_VERSION: + raise Failure( + 'The format of imerge %s (%s) is not compatible with this script version.' + % (name, state['version'],) + ) + state['version'] = version + return state + + @staticmethod + def check_exists(name): + """Verify that a MergeState with the given name exists. + + Just check for the existence, readability, and compatible + version of the "state" reference. If the reference doesn't + exist, just return False. If it exists but is unusable for + some other reason, raise an exception.""" + + try: + call_silently( + ['git', 'check-ref-format', 'refs/imerge/%s' % (name,)] + ) + except CalledProcessError: + raise Failure('Name %r is not a valid refname component!' % (name,)) + + state_refname = 'refs/imerge/%s/state' % (name,) + for line in check_output(['git', 'for-each-ref', state_refname]).splitlines(): + (sha1, type, refname) = line.split() + if refname == state_refname and type == 'blob': + MergeState._read_state(name, sha1) + # If that didn't throw an exception: + return True + else: + return False + + @staticmethod + def set_default_name(name): + """Set the default merge to the specified one. + + name can be None to cause the default to be cleared.""" + + if name is None: + try: + check_call(['git', 'config', '--unset', 'imerge.default']) + except CalledProcessError, e: + if e.returncode == 5: + # Value was not set + pass + else: + raise + else: + check_call(['git', 'config', 'imerge.default', name]) + + @staticmethod + def get_default_name(): + """Get the name of the default merge, or None if none is currently set.""" + + try: + return check_output(['git', 'config', 'imerge.default']).rstrip() + except CalledProcessError: + return None + + @staticmethod + def _check_no_merges(commits): + multiparent_commits = [ + commit + for commit in commits + if len(get_commit_parents(commit)) > 1 + ] + if multiparent_commits: + raise Failure( + 'The following commits on the to-be-merged branch are merge commits:\n' + ' %s\n' + '--goal=\'rebase\' is not yet supported for branches that include merges.\n' + % ('\n '.join(multiparent_commits),) + ) + + @staticmethod + def initialize(name, goal, tip1, tip2): + """Create and return a new MergeState object.""" + + if '/' in name: + raise Failure('Name %r must not include a slash character!' % (name,)) + + try: + call_silently( + ['git', 'check-ref-format', 'refs/imerge/%s' % (name,)] + ) + except CalledProcessError: + raise Failure('Name %r is not a valid refname component!' % (name,)) + + if check_output(['git', 'for-each-ref', 'refs/imerge/%s' % (name,)]): + raise Failure('Name %r is already in use!' % (name,)) + + merge_base = check_output(['git', 'merge-base', '--all', tip1, tip2]).splitlines() + if not merge_base: + raise Failure('%r and %r do not have a common merge base' % (tip1, tip2)) + if len(merge_base) > 1: + raise Failure('%r and %r do not have a unique merge base' % (tip1, tip2)) + + [merge_base] = merge_base + + ancestry_path1 = set(rev_list('--ancestry-path', '%s..%s' % (merge_base, tip1))) + commits1 = [ + sha1 + for sha1 in rev_list('--first-parent', '%s..%s' % (merge_base, tip1)) + if sha1 in ancestry_path1 + ] + commits1.reverse() + if not commits1: + raise Failure( + 'There are no commits on %r that are not already in %r' % (tip1, tip2) + ) + + ancestry_path2 = set(rev_list('--ancestry-path', '%s..%s' % (merge_base, tip2))) + commits2 = [ + sha1 + for sha1 in rev_list('--first-parent', '%s..%s' % (merge_base, tip2)) + if sha1 in ancestry_path2 + ] + commits2.reverse() + if not commits2: + raise Failure( + 'There are no commits on %r that are not already in %r' % (tip2, tip1) + ) + + if goal == 'rebase': + MergeState._check_no_merges(commits2) + + return MergeState(name, goal, merge_base, commits1, commits2, MergeRecord.NEW_MANUAL) + + @staticmethod + def read(name): + merge_ref_re = re.compile( + r""" + ^ + refs\/imerge\/ + """ + re.escape(name) + r""" + \/(?Pauto|manual)\/ + (?P0|[1-9][0-9]*) + \- + (?P0|[1-9][0-9]*) + $ + """, + re.VERBOSE, + ) + + state_ref_re = re.compile( + r""" + ^ + refs\/imerge\/ + """ + re.escape(name) + r""" + \/state + $ + """, + re.VERBOSE, + ) + + state = None + + # A map {(i1, i2) : (sha1, source)}: + merges = {} + + # refnames that were found but not understood: + unexpected = [] + + for line in check_output([ + 'git', 'for-each-ref', 'refs/imerge/%s' % (name,) + ]).splitlines(): + (sha1, type, refname) = line.split() + m = merge_ref_re.match(refname) + if m: + if type != 'commit': + raise Failure('Reference %r is not a commit!' % (refname,)) + i1, i2 = int(m.group('i1')), int(m.group('i2')) + source = MergeState.SOURCE_TABLE[m.group('source')] + merges[i1, i2] = (sha1, source) + continue + + m = state_ref_re.match(refname) + if m: + if type != 'blob': + raise Failure('Reference %r is not a blob!' % (refname,)) + state = MergeState._read_state(name, sha1) + continue + + unexpected.append(refname) + + if state is None: + raise Failure( + 'No state found; it should have been a blob reference at ' + '"refs/imerge/%s/state' % (name,) + ) + + goal = state['goal'] + if goal not in ALLOWED_GOALS: + raise Failure('Goal %r, read from state, is not recognized.' % (goal,)) + + blockers = state.get('blockers', []) + + if unexpected: + raise Failure( + 'Unexpected reference(s) found in "refs/imerge/%s" namespace:\n %s\n' + % (name, '\n '.join(unexpected),) + ) + + # Find merge_base, commits1, and commits2: + (merge_base, source) = merges.pop((0, 0)) + if source != MergeRecord.SAVED_MANUAL: + raise Failure('Merge base should be manual!') + commits1 = [] + for i1 in itertools.count(1): + try: + (sha1, source) = merges.pop((i1, 0)) + if source != MergeRecord.SAVED_MANUAL: + raise Failure('Merge %d-0 should be manual!' % (i1,)) + commits1.append(sha1) + except KeyError: + break + + commits2 = [] + for i2 in itertools.count(1): + try: + (sha1, source) = merges.pop((0, i2)) + if source != MergeRecord.SAVED_MANUAL: + raise Failure('Merge (0,%d) should be manual!' % (i2,)) + commits2.append(sha1) + except KeyError: + break + + state = MergeState(name, goal, merge_base, commits1, commits2, MergeRecord.SAVED_MANUAL) + + # Now write the rest of the merges to state: + for ((i1, i2), (sha1, source)) in merges.iteritems(): + if i1 == 0 and i2 >= state.len2: + raise Failure('Merge 0-%d is missing!' % (state.len2,)) + if i1 >= state.len1 and i2 == 0: + raise Failure('Merge %d-0 is missing!' % (state.len1,)) + if i1 >= state.len1 or i2 >= state.len2: + raise Failure( + 'Merge %d-%d is out of range [0:%d,0:%d]' + % (i1, i2, state.len1, state.len2) + ) + state[i1, i2].record_merge(sha1, source) + + # Record any blockers: + for (i1,i2) in blockers: + state[i1, i2].record_blocked(True) + + return state + + @staticmethod + def remove(name): + # If HEAD is the scratch refname, abort any in-progress + # commits and detach HEAD: + scratch_refname = MergeState.get_scratch_refname(name) + try: + head_refname = check_output(['git', 'symbolic-ref', '--quiet', 'HEAD']).strip() + except CalledProcessError: + head_refname = None + if head_refname == scratch_refname: + try: + # We don't use "git merge --abort" here because it was + # only added in git version 1.7.4. + check_call(['git', 'reset', '--merge']) + except CalledProcessError: + pass + # Detach head so that we can delete scratch_refname: + check_call([ + 'git', 'update-ref', '--no-deref', + '-m', 'Detach HEAD from %s' % (scratch_refname,), + 'HEAD', get_commit_sha1('HEAD'), + ]) + + # Delete the scratch refname: + check_call([ + 'git', 'update-ref', + '-m', 'imerge %s: remove scratch reference' % (name,), + '-d', scratch_refname, + ]) + + # Remove any references referring to intermediate merges: + for line in check_output([ + 'git', 'for-each-ref', 'refs/imerge/%s' % (name,) + ]).splitlines(): + (sha1, type, refname) = line.split() + try: + check_call([ + 'git', 'update-ref', + '-m', 'imerge: remove merge %r' % (name,), + '-d', refname, + ]) + except CalledProcessError, e: + sys.stderr.write( + 'Warning: error removing reference %r: %s' % (refname, e) + ) + + # If this merge was the default, unset the default: + if MergeState.get_default_name() == name: + MergeState.set_default_name(None) + + def __init__(self, name, goal, merge_base, commits1, commits2, source): + Block.__init__(self, len(commits1) + 1, len(commits2) + 1) + self.name = name + self.goal = goal + + # A simulated 2D array. Values are None or MergeRecord instances. + self._data = [[None] * self.len2 for i1 in range(self.len1)] + + self.get_value(0, 0).record_merge(merge_base, source) + for (i1, commit) in enumerate(commits1, 1): + self.get_value(i1, 0).record_merge(commit, source) + for (i2, commit) in enumerate(commits2, 1): + self.get_value(0, i2).record_merge(commit, source) + + def set_goal(self, goal): + if goal not in ALLOWED_GOALS: + raise ValueError('%r is not an allowed goal' % (goal,)) + + if goal == 'rebase': + self._check_no_merges( + [self[0,i2].sha1 for i2 in range(1,self.len2)] + ) + + self.goal = goal + + def _set_value(self, i1, i2, value): + self._data[i1][i2] = value + + def get_value(self, i1, i2): + value = self._data[i1][i2] + # Missing values spring to life on first access: + if value is None: + value = MergeRecord() + self._data[i1][i2] = value + return value + + def __contains__(self, index): + # Avoid creating new MergeRecord objects here. + (i1, i2) = self._normalize_indexes(index) + value = self._data[i1][i2] + return (value is not None) and value.is_known() + + def auto_complete_frontier(self): + """Complete the frontier using automerges. + + If progress is blocked before the frontier is complete, raise + a FrontierBlockedError. Save the state as progress is + made.""" + + progress_made = False + try: + while True: + frontier = MergeFrontier.map_known_frontier(self) + frontier.auto_expand() + self.save() + progress_made = True + except BlockCompleteError: + return + except FrontierBlockedError, e: + self.save() + if not progress_made: + # Adjust the error message: + raise FrontierBlockedError( + 'No progress was possible; suggest manual merge of %d-%d' + % (e.i1, e.i2), + e.i1, e.i2, + ) + else: + raise + + def find_index(self, commit): + """Return (i1,i2) for the specified commit. + + Raise CommitNotFoundError if it is not known.""" + + for i2 in range(0, self.len2): + for i1 in range(0, self.len1): + if (i1, i2) in self: + record = self[i1,i2] + if record.sha1 == commit: + return (i1, i2) + raise CommitNotFoundError(commit) + + def incorporate_manual_merge(self, commit): + """Record commit as a manual merge of its parents. + + Return the indexes (i1,i2) where it was recorded. If the + commit is not usable for some reason, raise + ManualMergeUnusableError.""" + + parents = get_commit_parents(commit) + if len(parents) < 2: + raise ManualMergeUnusableError('it is not a merge', commit) + if len(parents) > 2: + raise ManualMergeUnusableError('it is an octopus merge', commit) + # Find the parents among our contents... + try: + (i1first, i2first) = self.find_index(parents[0]) + (i1second, i2second) = self.find_index(parents[1]) + except CommitNotFoundError: + raise ManualMergeUnusableError( + 'its parents are not known merge commits', commit, + ) + swapped = False + if i1first < i1second: + # Swap parents to make the parent from above the first parent: + (i1first, i2first, i1second, i2second) = (i1second, i2second, i2first, i1first) + swapped = True + if i1first != i1second + 1 or i2first != i2second - 1: + raise ManualMergeUnusableError( + 'it is not a pairwise merge of adjacent parents', commit, + ) + if swapped: + # Create a new merge with the parents in the conventional order: + commit = reparent(commit, [parents[1], parents[0]]) + + i1, i2 = i1first, i2second + self[i1, i2].record_merge(commit, MergeRecord.NEW_MANUAL) + return (i1, i2) + + @staticmethod + def _is_ancestor(commit1, commit2): + """Return True iff commit1 is an ancestor (or equal to) commit2.""" + + if commit1 == commit2: + return True + else: + return int( + check_output([ + 'git', 'rev-list', '--count', '--ancestry-path', + '%s..%s' % (commit1, commit2,), + ]).strip() + ) != 0 + + @staticmethod + def _set_refname(refname, commit, force=False): + try: + ref_oldval = get_commit_sha1(refname) + except ValueError: + # refname doesn't already exist; simply point it at commit: + check_call(['git', 'update-ref', refname, commit]) + checkout(refname) + else: + # refname already exists. This has two ramifications: + # 1. HEAD might point at it + # 2. We may only fast-forward it (unless force is set) + try: + head_refname = check_output(['git', 'symbolic-ref', '--quiet', 'HEAD']).strip() + except CalledProcessError: + head_refname = None + + if not force: + if not MergeState._is_ancestor(ref_oldval, commit): + raise Failure( + '%s cannot be fast-forwarded to %s!' % (refname, commit) + ) + + if head_refname == refname: + check_call(['git', 'reset', '--hard', commit]) + else: + check_call([ + 'git', 'update-ref', + '-m', 'imerge: recording final merge', + refname, commit, + ]) + checkout(refname) + + def simplify_to_rebase_with_history(self, refname, force=False): + i1 = self.len1 - 1 + for i2 in range(1, self.len2): + if not (i1, i2) in self: + raise Failure( + 'Cannot simplify to rebase-with-history because ' + 'merge %d-%d is not yet done' + % (i1, i2) + ) + + commit = self[i1, 0].sha1 + for i2 in range(1, self.len2): + orig = self[0, i2].sha1 + tree = get_tree(self[i1, i2].sha1) + + # Create a commit, copying the old log message: + commit = commit_tree(tree, [commit, orig], msg=get_log_message(orig)) + + self._set_refname(refname, commit, force=force) + + def simplify_to_rebase(self, refname, force=False): + i1 = self.len1 - 1 + for i2 in range(1, self.len2): + if not (i1, i2) in self: + raise Failure( + 'Cannot simplify to rebase because merge %d-%d is not yet done' + % (i1, i2) + ) + + commit = self[i1, 0].sha1 + for i2 in range(1, self.len2): + orig = self[0, i2].sha1 + tree = get_tree(self[i1, i2].sha1) + authordata = get_author_info(orig) + + # Create a commit, copying the old log message and author info: + commit = commit_tree( + tree, [commit], msg=get_log_message(orig), metadata=authordata, + ) + + self._set_refname(refname, commit, force=force) + + def simplify_to_merge(self, refname, force=False): + if not (-1, -1) in self: + raise Failure( + 'Cannot simplify to merge because merge %d-%d is not yet done' + % (self.len1 - 1, self.len2 - 1) + ) + tree = get_tree(self[-1, -1].sha1) + parents = [self[-1,0].sha1, self[0,-1].sha1] + + # Create a preliminary commit with a generic commit message: + sha1 = commit_tree( + tree, parents, + msg='Merge commit %s into commit %s' % (parents[1], parents[0]), + ) + + self._set_refname(refname, sha1, force=force) + + # Now let the user edit the commit log message: + check_call(['git', 'commit', '--amend']) + + def simplify(self, refname, force=False): + """Simplify this MergeState and save the result to refname. + + The merge must be complete before calling this method.""" + + if self.goal == 'rebase-with-history': + self.simplify_to_rebase_with_history(refname, force=force) + elif self.goal == 'rebase': + self.simplify_to_rebase(refname, force=force) + elif self.goal == 'merge': + self.simplify_to_merge(refname, force=force) + else: + raise ValueError('Invalid value for goal (%r)' % (self.goal,)) + + def save(self): + """Write the current MergeState to the repository.""" + + blockers = [] + for i2 in range(0, self.len2): + for i1 in range(0, self.len1): + record = self[i1,i2] + if record.is_known(): + record.save(self.name, i1, i2) + if record.is_blocked(): + blockers.append((i1, i2)) + + state = dict( + version='.'.join(map(str, STATE_VERSION)), + goal=self.goal, + blockers=blockers, + ) + state_string = json.dumps(state, sort_keys=True) + '\n' + + cmd = ['git', 'hash-object', '-t', 'blob', '-w', '--stdin'] + p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE) + (out, err) = p.communicate(state_string) + retcode = p.poll() + if retcode: + raise CalledProcessError(retcode, cmd) + sha1 = out.strip() + check_call([ + 'git', 'update-ref', + '-m', 'imerge %r: Record state' % (self.name,), + 'refs/imerge/%s/state' % (self.name,), + sha1, + ]) + + def __str__(self): + return 'MergeState(%r, goal=%r)' % (self.name, self.goal,) + + +def request_user_merge(merge_state, i1, i2): + """Prepare the working tree for the user to do a manual merge. + + It is assumed that the merge above and to the left of (i1, i2) are + already done.""" + + above = merge_state[i1, i2 - 1] + left = merge_state[i1 - 1, i2] + if not above.is_known() or not left.is_known(): + raise RuntimeError('The parents of merge %d-%d are not ready' % (i1, i2)) + refname = MergeState.get_scratch_refname(merge_state.name) + check_call([ + 'git', 'update-ref', + '-m', 'imerge %r: Prepare merge %d-%d' % (merge_state.name, i1, i2,), + refname, above.sha1, + ]) + checkout(refname) + try: + check_call([ + 'git', 'merge', '--no-commit', + '-m', 'Merge %d-%d of incremental merge \'%s\'' % (i1, i2, merge_state.name,), + left.sha1, + ]) + except CalledProcessError: + # We expect an error (otherwise we would have automerged!) + pass + sys.stderr.write( + '\n' + 'Original first commit:\n' + ) + check_call(['git', 'log', '--no-walk', merge_state[i1,0].sha1]) + sys.stderr.write( + '\n' + 'Original second commit:\n' + ) + check_call(['git', 'log', '--no-walk', merge_state[0,i2].sha1]) + sys.stderr.write( + '\n' + 'There was a conflict merging commit %d-%d, shown above.\n' + 'Please resolve the conflict, commit the result, then type\n' + '\n' + ' git-imerge continue\n' + % (i1, i2) + ) + +def incorporate_user_merge(merge_state): + """If the user has done a merge for us, incorporate the results. + + If reference refs/heads/imerge/NAME exists, try to incorporate it + into merge_state, delete the reference, and return (i1,i2) + corresponding to the merge. If the reference cannot be used, + raise NoManualMergeError(). If the reference exists but cannot be + used, raise a ManualMergeUnusableError. This function must be + called with a clean work tree.""" + + refname = MergeState.get_scratch_refname(merge_state.name) + try: + commit = get_commit_sha1(refname) + except ValueError: + raise NoManualMergeError('There was no merge at %s!' % (refname,)) + + merge_frontier = MergeFrontier.map_known_frontier(merge_state) + + # This might throw ManualMergeUnusableError: + (i1, i2) = merge_state.incorporate_manual_merge(commit) + + try: + headref = check_output(['git', 'symbolic-ref', '-q', 'HEAD']).strip() + except CalledProcessError: + pass + else: + if headref == refname: + # Detach head so that we can delete refname. + check_call([ + 'git', 'update-ref', '--no-deref', + '-m', 'Detach HEAD from %s' % (refname,), + 'HEAD', commit, + ]) + + check_call([ + 'git', 'update-ref', + '-m', 'imerge %s: remove scratch reference' % (merge_state.name,), + '-d', refname, + ]) + + try: + # This might throw NotABlockingCommitError: + unblocked_block = merge_frontier.get_affected_blocker_block(i1, i2) + unblocked_block[1,1].record_blocked(False) + sys.stderr.write( + 'Merge has been recorded for merge %d-%d.\n' + % unblocked_block.get_original_indexes(1, 1) + ) + except NotABlockingCommitError: + raise + finally: + merge_state.save() + + +def choose_merge_name(name, default_to_unique=True): + # If a name was specified, try to use it and fail if not possible: + if name is not None: + if not MergeState.check_exists(name): + raise Failure('There is no incremental merge called \'%s\'!' % (name,)) + MergeState.set_default_name(name) + return name + + # A name was not specified. Try to use the default name: + default_name = MergeState.get_default_name() + if default_name: + if MergeState.check_exists(default_name): + return default_name + else: + # There's no reason to keep the invalid default around: + MergeState.set_default_name(None) + raise Failure( + 'Warning: The default incremental merge \'%s\' has disappeared.\n' + '(The setting imerge.default has been cleared.)\n' + 'Please select an incremental merge using --name' + % (default_name,) + ) + + if default_to_unique: + # If there is exactly one imerge, set it to be the default and use it. + names = list(MergeState.iter_existing_names()) + if len(names) == 1 and MergeState.check_exists(names[0]): + MergeState.set_default_name(names[0]) + return names[0] + + raise Failure('Please select an incremental merge using --name') + + +def read_merge_state(name=None): + return MergeState.read(choose_merge_name(name)) + + +@Failure.wrap +def main(args): + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + subparsers = parser.add_subparsers(dest='subcommand', help='sub-command') + + parser_start = subparsers.add_parser( + 'start', + help=( + 'start a new incremental merge ' + '(equivalent to "init" followed by "continue")' + ), + ) + parser_start.add_argument( + '--name', action='store', default=None, + help='name to use for this incremental merge', + ) + parser_start.add_argument( + '--goal', + action='store', default=DEFAULT_GOAL, + choices=ALLOWED_GOALS, + help='the goal of the incremental merge', + ) + #parser_start.add_argument( + # '--conflicts', ... + # action='store', default='pairwise', + # choices=['pairwise', 'fewest'], + # help='what sort of conflicts will be presented to the user', + # ) + parser_start.add_argument( + '--first-parent', action='store_true', default=None, + help=( + 'handle only the first parent commits ' + '(this option is currently required)' + ), + ) + parser_start.add_argument( + 'branch', action='store', + help='the tip of the branch to be merged into HEAD', + ) + + parser_continue = subparsers.add_parser( + 'continue', + help=( + 'record the merge at branch imerge/NAME ' + 'and start the next step of the merge ' + '(equivalent to "record" followed by "autofill" ' + 'and then sets up the working copy with the next ' + 'conflict that has to be resolved manually)' + ), + ) + parser_continue.add_argument( + '--name', action='store', default=None, + help='name of merge to continue', + ) + + parser_finish = subparsers.add_parser( + 'finish', + help=( + 'simplify then remove a completed incremental merge ' + '(equivalent to "simplify" followed by "remove")' + ), + ) + parser_finish.add_argument( + '--name', action='store', default=None, + help='name of merge to finish', + ) + parser_finish.add_argument( + '--goal', + action='store', default=None, + choices=ALLOWED_GOALS, + help=( + 'the type of simplification to be made ' + '(default is the value provided to "init" or "start")' + ), + ) + parser_finish.add_argument( + '--branch', + action='store', default=None, + help=( + 'the name of the branch to which to store the result ' + '(default is the name of the merge). If ' + 'BRANCH already exists then it must be able to be ' + 'fast-forwarded to the result unless the --force option is ' + 'specified.' + ), + ) + parser_finish.add_argument( + '--force', + action='store_true', default=False, + help='allow the target branch to be updated in a non-fast-forward manner', + ) + + parser_diagram = subparsers.add_parser( + 'diagram', + help='display a diagram of the current state of a merge', + ) + parser_diagram.add_argument( + '--name', action='store', default=None, + help='name of merge to diagram', + ) + parser_diagram.add_argument( + '--commits', action='store_true', default=False, + help='show the merges that have been made so far', + ) + parser_diagram.add_argument( + '--frontier', action='store_true', default=False, + help='show the current merge frontier', + ) + parser_diagram.add_argument( + '--html', action='store', default=None, + help='generate HTML diagram showing the current merge frontier', + ) + parser_diagram.add_argument( + '--color', dest='color', action='store_true', default=None, + help='draw diagram with colors', + ) + parser_diagram.add_argument( + '--no-color', dest='color', action='store_false', + help='draw diagram without colors', + ) + + parser_list = subparsers.add_parser( + 'list', + help=( + 'list the names of incremental merges that are currently in progress. ' + 'The active merge is shown with an asterisk next to it.' + ), + ) + + parser_init = subparsers.add_parser( + 'init', + help='initialize a new incremental merge', + ) + parser_init.add_argument( + '--name', action='store', default=None, + help='name to use for this incremental merge', + ) + parser_init.add_argument( + '--goal', + action='store', default=DEFAULT_GOAL, + choices=ALLOWED_GOALS, + help='the goal of the incremental merge', + ) + #parser_init.add_argument( + # '--conflicts', ... + # action='store', default='pairwise', + # choices=['pairwise', 'fewest'], + # help='what sort of conflicts will be presented to the user', + # ) + parser_init.add_argument( + '--first-parent', action='store_true', default=None, + help=( + 'handle only the first parent commits ' + '(this option is currently required)' + ), + ) + parser_init.add_argument( + 'branch', action='store', + help='the tip of the branch to be merged into HEAD', + ) + + parser_record = subparsers.add_parser( + 'record', + help='record the merge at branch imerge/NAME', + ) + parser_record.add_argument( + '--name', action='store', default=None, + help='name of merge to which the merge should be added', + ) + + parser_autofill = subparsers.add_parser( + 'autofill', + help='autofill non-conflicting merges', + ) + parser_autofill.add_argument( + '--name', action='store', default=None, + help='name of merge to autofill', + ) + + parser_simplify = subparsers.add_parser( + 'simplify', + help=( + 'simplify a completed incremental merge by discarding unneeded ' + 'intermediate merges and cleaning up the ancestry of the commits ' + 'that are retained' + ), + ) + parser_simplify.add_argument( + '--name', action='store', default=None, + help='name of merge to simplify', + ) + parser_simplify.add_argument( + '--goal', + action='store', default=None, + choices=ALLOWED_GOALS, + help=( + 'the type of simplification to be made ' + '(default is the value provided to "init" or "start")' + ), + ) + parser_simplify.add_argument( + '--branch', + action='store', default=None, + help=( + 'the name of the branch to which to store the result ' + '(default is the name of the merge). If ' + 'BRANCH already exists then it must be able to be ' + 'fast-forwarded to the result unless the --force option is ' + 'specified.' + ), + ) + parser_simplify.add_argument( + '--force', + action='store_true', default=False, + help='allow the target branch to be updated in a non-fast-forward manner', + ) + + parser_remove = subparsers.add_parser( + 'remove', + help='irrevocably remove an incremental merge', + ) + parser_remove.add_argument( + '--name', action='store', default=None, + help='name of incremental merge to remove', + ) + + parser_reparent = subparsers.add_parser( + 'reparent', + help='change the parents of the HEAD commit', + ) + parser_reparent.add_argument( + 'parents', nargs='*', help='[PARENT...]', + ) + + options = parser.parse_args(args) + + if options.subcommand == 'list': + default_merge = MergeState.get_default_name() + for name in MergeState.iter_existing_names(): + if name == default_merge: + sys.stdout.write('* %s\n' % (name,)) + else: + sys.stdout.write(' %s\n' % (name,)) + elif options.subcommand == 'init': + require_clean_work_tree('proceed') + + if not options.first_parent: + parser.error( + 'The --first-parent option is currently required for the "init" command' + ) + if not options.name: + parser.error( + 'Please specify the --name to be used for this incremental merge' + ) + merge_state = MergeState.initialize( + options.name, options.goal, 'HEAD', options.branch, + ) + merge_state.save() + MergeState.set_default_name(options.name) + elif options.subcommand == 'start': + require_clean_work_tree('proceed') + + if not options.first_parent: + parser.error( + 'The --first-parent option is currently required for the "start" command' + ) + if not options.name: + parser.error( + 'Please specify the --name to be used for this incremental merge' + ) + merge_state = MergeState.initialize( + options.name, options.goal, 'HEAD', options.branch, + ) + merge_state.save() + MergeState.set_default_name(options.name) + + try: + merge_state.auto_complete_frontier() + except FrontierBlockedError, e: + request_user_merge(merge_state, e.i1, e.i2) + else: + sys.stderr.write('Merge is complete!\n') + elif options.subcommand == 'remove': + MergeState.remove(choose_merge_name(options.name, default_to_unique=False)) + elif options.subcommand == 'continue': + require_clean_work_tree('proceed') + merge_state = read_merge_state(options.name) + try: + incorporate_user_merge(merge_state) + except NoManualMergeError: + pass + except NotABlockingCommitError: + raise Failure(str(e)) + except ManualMergeUnusableError, e: + raise Failure(str(e)) + + try: + merge_state.auto_complete_frontier() + except FrontierBlockedError, e: + request_user_merge(merge_state, e.i1, e.i2) + else: + sys.stderr.write('Merge is complete!\n') + elif options.subcommand == 'record': + require_clean_work_tree('proceed') + merge_state = read_merge_state(options.name) + try: + incorporate_user_merge(merge_state) + except NoManualMergeError, e: + raise Failure(str(e)) + except NotABlockingCommitError: + raise Failure(str(e)) + except ManualMergeUnusableError, e: + raise Failure(str(e)) + + try: + merge_state.auto_complete_frontier() + except FrontierBlockedError, e: + pass + else: + sys.stderr.write('Merge is complete!\n') + elif options.subcommand == 'autofill': + require_clean_work_tree('proceed') + merge_state = read_merge_state(options.name) + with TemporaryHead(): + try: + merge_state.auto_complete_frontier() + except FrontierBlockedError, e: + raise Failure(str(e)) + elif options.subcommand == 'simplify': + require_clean_work_tree('proceed') + merge_state = read_merge_state(options.name) + merge_frontier = MergeFrontier.map_known_frontier(merge_state) + if not merge_frontier.is_complete(): + raise Failure('Merge %s is not yet complete!' % (merge_state.name,)) + refname = 'refs/heads/%s' % ((options.branch or merge_state.name),) + if options.goal is not None: + merge_state.set_goal(options.goal) + merge_state.save() + merge_state.simplify(refname, force=options.force) + elif options.subcommand == 'finish': + require_clean_work_tree('proceed') + options.name = choose_merge_name(options.name, default_to_unique=False) + merge_state = read_merge_state(options.name) + merge_frontier = MergeFrontier.map_known_frontier(merge_state) + if not merge_frontier.is_complete(): + raise Failure('Merge %s is not yet complete!' % (merge_state.name,)) + refname = 'refs/heads/%s' % ((options.branch or merge_state.name),) + if options.goal is not None: + merge_state.set_goal(options.goal) + merge_state.save() + merge_state.simplify(refname, force=options.force) + MergeState.remove(options.name) + elif options.subcommand == 'diagram': + if not (options.commits or options.frontier): + options.frontier = True + if not (options.color or (options.color is None and sys.stdout.isatty())): + AnsiColor.disable() + + merge_state = read_merge_state(options.name) + if options.commits: + merge_state.write(sys.stdout) + sys.stdout.write('\n') + if options.frontier: + merge_frontier = MergeFrontier.map_known_frontier(merge_state) + merge_frontier.write(sys.stdout) + sys.stdout.write('\n') + if options.html: + merge_frontier = MergeFrontier.map_known_frontier(merge_state) + html = open(options.html, "w") + merge_frontier.write_html(html, merge_state.name) + html.close() + sys.stdout.write( + 'Key:\n' + ) + if options.frontier: + sys.stdout.write( + ' |,-,+ = rectangles forming current merge frontier\n' + ) + sys.stdout.write( + ' * = merge done manually\n' + ' . = merge done automatically\n' + ' # = conflict that is currently blocking progress\n' + ' @ = merge was blocked but has been resolved\n' + ' ? = no merge recorded\n' + '\n' + ) + elif options.subcommand == 'reparent': + try: + commit_sha1 = get_commit_sha1('HEAD') + except ValueError: + sys.exit('HEAD is not a valid commit') + + try: + parent_sha1s = map(get_commit_sha1, options.parents) + except ValueError, e: + sys.exit(e.message) + + sys.stdout.write('%s\n' % (reparent(commit_sha1, parent_sha1s),)) + else: + parser.error('Unrecognized subcommand') + + +main(sys.argv[1:]) + diff --git a/git-mv-test b/git-mv-test new file mode 100644 index 0000000..bdc0a93 --- /dev/null +++ b/git-mv-test @@ -0,0 +1,234 @@ +#!/usr/bin/env bash + +# This script builds on the excellent work by Lucas Jenß, described in his blog +# post "Integrating a submodule into the parent repository", but automates the +# entire process and cleans up a few other corner cases. +# https://x3ro.de/2013/09/01/Integrating-a-submodule-into-the-parent-repository.html + +function usage(){ + echo "Usage: $0 []" + echo "Merge a single branch of into a repo, retaining file history." + echo "If provided then will be merged, otherwise master." + echo "" + echo "options:" + echo " -h, --help Print this message" + echo " -v, --verbose Display verbose output" +} + +function abort { + echo "$(tput setaf 1)$1$(tput sgr0)" + exit 1 +} + +function request_confirmation { + read -p "$(tput setaf 4)$1 (y/n) $(tput sgr0)" + [ "$REPLY" == "y" ] || abort "Aborted!" +} + +function warn() { + cat << EOF + This script will convert your "${sub}" git submodule into + a simple subdirectory in the parent repository while retaining all + contents, file history and its own submodules. + + The script will: + * delete the ${sub} submodule configuration from .gitmodules and + .git/config and commit it. + * rewrite the entire history of the ${sub} submodule so that all + paths are prefixed by ${path}. + This ensures that git log will correctly follow the original file + history. + * merge the submodule into its parent repository and commit it. + * reinstate any of the submodule's own submodules as part of the parent + repository + + NOTE: This script might completely garble your repository, so PLEASE apply + this only to a fresh clone of the repository where it does not matter if + the repo is destroyed. It would be wise to keep a backup clone of your + repository, so that you can reconstitute it if need be. You have been + warned. Use at your own risk. + +EOF + + request_confirmation "Do you want to proceed?" +} + +function git_version_lte() { + OP_VERSION=$(printf "%03d%03d%03d%03d" $(echo "$1" | tr '.' '\n' | head -n 4)) + GIT_VERSION=$(git version) + GIT_VERSION=$(printf "%03d%03d%03d%03d" $(echo "${GIT_VERSION#git version }" | sed -E "s/([0-9.]*).*/\1/" | tr '.' '\n' | head -n 4)) + echo -e "${GIT_VERSION}\n${OP_VERSION}" | sort | head -n1 + [ ${GIT_VERSION} -le ${OP_VERSION} ] +} + +# Convert a url to an absolute url +# +# Parameters: +# $1: The url to check +# $2: The base url to use if $1 is a relative path +# +# Returns an absolute url +function absolute_url { + local url=$1 + local base=$2 + + if [[ $url =~ \.\. ]]; then + echo "$base/$(basename $url)" + else + echo $url + fi +} + +function main() { + + warn + + if [ "${verbose}" == "true" ]; then + set -x + fi + + # Remove submodule and commit + #git config -f .gitmodules --remove-section "submodule.${sub}" + #if git config -f .git/config --get "submodule.${sub}.url"; then + # git config -f .git/config --remove-section "submodule.${sub}" + #fi + #rm -rf "${path}" + #git add -A . + #git commit -m "Remove submodule ${sub}" + #rm -rf ".git/modules/${sub}" + + # Rewrite submodule history + local tmpdir="$(mktemp -d -t submodule-rewrite-XXXXXX)" + git clone -b "${branch}" "${url}" "${tmpdir}" + pushd "${tmpdir}" + local tab="$(printf '\t')" + local filter="git ls-files -s | sed \"s:${tab}:${tab}${path}/:\" | GIT_INDEX_FILE=\${GIT_INDEX_FILE}.new git update-index --index-info && mv \${GIT_INDEX_FILE}.new \${GIT_INDEX_FILE} || true" + git filter-branch --index-filter "${filter}" HEAD + popd + + # Merge in rewritten submodule history + git remote add "${sub}" "${tmpdir}" + git fetch "${sub}" + + if git_version_lte 2.8.4 + then + # Previous to git 2.9.0 the parameter would yield an error + ALLOW_UNRELATED_HISTORIES="" + else + # From git 2.9.0 this parameter is required + ALLOW_UNRELATED_HISTORIES="--allow-unrelated-histories" + fi + + git merge -s ours --no-commit ${ALLOW_UNRELATED_HISTORIES} "${sub}/${branch}" + rm -rf tmpdir + + # Add submodule content + git clone -b "${branch}" "${url}" "${path}" + + # Transfer its own submodules to the parent + #add_submod_cmds="" + #if [ -f ${path}/.gitmodules ]; then + # sub_names=$(git config -f ${path}/.gitmodules --get-regex path | sed 's/.* \(.*\)$/\1/g') + + # for sub_name in ${sub_names}; do + # sub_branch=$(git config -f ${path}/.gitmodules --get "submodule.${sub_name}.branch") || true + # [ -n "${sub_branch}" ] && sub_branch="-b ${sub_branch}" + # sub_path=$(git config -f ${path}/.gitmodules --get "submodule.${sub_name}.path") + # sub_url=$(git config -f ${path}/.gitmodules --get "submodule.${sub_name}.url") + + # # remove the sub-submodule (which should be empty) and cache the command to reinstate it + # rmdir ${path}/${sub_path} + # add_submod_cmds="$add_submod_cmds git submodule add ${sub_branch} --name ${sub_name} -- ${sub_url} ${path}/${sub_path} ; " + # done + #fi + + rm -rf "${path}/.git" + #"${path}/.gitmodules" + git add "${path}" + #if [ -n "${add_submod_cmds}" ]; then + # bash -c "${add_submod_cmds}" + #fi + + git commit -m "Merge submodule contents for ${sub}/${branch}" + #git config -f .git/config --remove-section "remote.${sub}" + + set +x + echo "$(tput setaf 2)Submodule merge complete. Push changes after review.$(tput sgr0)" +} + +set -euo pipefail + +declare verbose=false +while [ $# -gt 0 ]; do + case "$1" in + (-h|--help) + usage + exit 0 + ;; + (-v|--verbose) + verbose=true + ;; + (*) + break + ;; + esac + shift +done + +declare sub="${1:-}" +declare url="${2:-}" +declare branch="${3:-master}" + +if [ -z "${sub}" ]; then + >&2 echo "Error: No submodule specified" + usage + exit 1 +fi + +shift + +if [ -n "${1:-}" ]; then + shift +fi + +if [ -n "${1:-}" ]; then + shift +fi + +if [ -n "${1:-}" ]; then + >&2 echo "Error: Unknown option: ${1:-}" + usage + exit 1 +fi + +if ! [ -d ".git" ]; then + >&2 echo "Error: No git repository found. Must be run from the root of a git repository" + usage + exit 1 +fi + +#declare path="$(git config -f .gitmodules --get "submodule.${sub}.path")" +declare path=$sub +#declare superproject_dir="$(dirname $(git config --get remote.origin.url))" +#declare url=$(absolute_url $(git config -f .gitmodules --get "submodule.${sub}.url") $superproject_dir) + + +if [ -z "${path}" ]; then + >&2 echo "Error: Submodule not found: ${sub}" + usage + exit 1 +fi + +#if [ -z "${superproject_dir}" ]; then +# >&2 echo "Error: Could not determine the remote origin for this superproject: ${superproject_dir}" +# usage +# exit 1 +#fi + +#if ! [ -d "${path}" ]; then +# >&2 echo "Error: Submodule path not found: ${path}" +# usage +# exit 1 +#fi + +main diff --git a/git-remove-history b/git-remove-history new file mode 100755 index 0000000..1f329eb --- /dev/null +++ b/git-remove-history @@ -0,0 +1,25 @@ +!/bin/bash +set -o errexit + +# Author: David Underhill +# Script to permanently delete files/folders from your git repository. To use +# it, cd to your repository's root and then run the script with a list of paths +# you want to delete, e.g., git-delete-history path1 path2 + +if [ $# -eq 0 ]; then + exit 0 +fi + +# make sure we're at the root of git repo +if [ ! -d .git ]; then + echo "Error: must run this script from the root of a git repository" + exit 1 +fi + +# remove all paths passed as arguments from the history of the repo +files=$@ +git filter-branch --index-filter "git rm -rf --cached --ignore-unmatch $files" HEAD + +# remove the temporary history git-filter-branch otherwise leaves behind for a long time +rm -rf .git/refs/original/ && git reflog expire --all && git gc --aggressive --prune + diff --git a/git-submodule-rewrite b/git-submodule-rewrite new file mode 100755 index 0000000..404fbd9 --- /dev/null +++ b/git-submodule-rewrite @@ -0,0 +1,226 @@ +#!/usr/bin/env bash + +# This script builds on the excellent work by Lucas Jenß, described in his blog +# post "Integrating a submodule into the parent repository", but automates the +# entire process and cleans up a few other corner cases. +# https://x3ro.de/2013/09/01/Integrating-a-submodule-into-the-parent-repository.html + +function usage(){ + echo "Usage: $0 []" + echo "Merge a single branch of into a repo, retaining file history." + echo "If provided then will be merged, otherwise master." + echo "" + echo "options:" + echo " -h, --help Print this message" + echo " -v, --verbose Display verbose output" +} + +function abort { + echo "$(tput setaf 1)$1$(tput sgr0)" + exit 1 +} + +function request_confirmation { + read -p "$(tput setaf 4)$1 (y/n) $(tput sgr0)" + [ "$REPLY" == "y" ] || abort "Aborted!" +} + +function warn() { + cat << EOF + This script will convert your "${sub}" git submodule into + a simple subdirectory in the parent repository while retaining all + contents, file history and its own submodules. + + The script will: + * delete the ${sub} submodule configuration from .gitmodules and + .git/config and commit it. + * rewrite the entire history of the ${sub} submodule so that all + paths are prefixed by ${path}. + This ensures that git log will correctly follow the original file + history. + * merge the submodule into its parent repository and commit it. + * reinstate any of the submodule's own submodules as part of the parent + repository + + NOTE: This script might completely garble your repository, so PLEASE apply + this only to a fresh clone of the repository where it does not matter if + the repo is destroyed. It would be wise to keep a backup clone of your + repository, so that you can reconstitute it if need be. You have been + warned. Use at your own risk. + +EOF + + request_confirmation "Do you want to proceed?" +} + +function git_version_lte() { + OP_VERSION=$(printf "%03d%03d%03d%03d" $(echo "$1" | tr '.' '\n' | head -n 4)) + GIT_VERSION=$(git version) + GIT_VERSION=$(printf "%03d%03d%03d%03d" $(echo "${GIT_VERSION#git version }" | sed -E "s/([0-9.]*).*/\1/" | tr '.' '\n' | head -n 4)) + echo -e "${GIT_VERSION}\n${OP_VERSION}" | sort | head -n1 + [ ${GIT_VERSION} -le ${OP_VERSION} ] +} + +# Convert a url to an absolute url +# +# Parameters: +# $1: The url to check +# $2: The base url to use if $1 is a relative path +# +# Returns an absolute url +function absolute_url { + local url=$1 + local base=$2 + + if [[ $url =~ \.\. ]]; then + echo "$base/$(basename $url)" + else + echo $url + fi +} + +function main() { + + warn + + if [ "${verbose}" == "true" ]; then + set -x + fi + + # Remove submodule and commit + git config -f .gitmodules --remove-section "submodule.${sub}" + if git config -f .git/config --get "submodule.${sub}.url"; then + git config -f .git/config --remove-section "submodule.${sub}" + fi + rm -rf "${path}" + git add -A . + git commit -m "Remove submodule ${sub}" + rm -rf ".git/modules/${sub}" + + # Rewrite submodule history + local tmpdir="$(mktemp -d -t submodule-rewrite-XXXXXX)" + git clone -b "${branch}" "${url}" "${tmpdir}" + pushd "${tmpdir}" + local tab="$(printf '\t')" + local filter="git ls-files -s | sed \"s:${tab}:${tab}${path}/:\" | GIT_INDEX_FILE=\${GIT_INDEX_FILE}.new git update-index --index-info && mv \${GIT_INDEX_FILE}.new \${GIT_INDEX_FILE} || true" + git filter-branch --index-filter "${filter}" HEAD + popd + + # Merge in rewritten submodule history + git remote add "${sub}" "${tmpdir}" + git fetch "${sub}" + + if git_version_lte 2.8.4 + then + # Previous to git 2.9.0 the parameter would yield an error + ALLOW_UNRELATED_HISTORIES="" + else + # From git 2.9.0 this parameter is required + ALLOW_UNRELATED_HISTORIES="--allow-unrelated-histories" + fi + + git merge -s ours --no-commit ${ALLOW_UNRELATED_HISTORIES} "${sub}/${branch}" + rm -rf tmpdir + + # Add submodule content + git clone -b "${branch}" "${url}" "${path}" + + # Transfer its own submodules to the parent + add_submod_cmds="" + if [ -f ${path}/.gitmodules ]; then + sub_names=$(git config -f ${path}/.gitmodules --get-regex path | sed 's/.* \(.*\)$/\1/g') + + for sub_name in ${sub_names}; do + sub_branch=$(git config -f ${path}/.gitmodules --get "submodule.${sub_name}.branch") || true + [ -n "${sub_branch}" ] && sub_branch="-b ${sub_branch}" + sub_path=$(git config -f ${path}/.gitmodules --get "submodule.${sub_name}.path") + sub_url=$(git config -f ${path}/.gitmodules --get "submodule.${sub_name}.url") + + # remove the sub-submodule (which should be empty) and cache the command to reinstate it + rmdir ${path}/${sub_path} + add_submod_cmds="$add_submod_cmds git submodule add ${sub_branch} --name ${sub_name} -- ${sub_url} ${path}/${sub_path} ; " + done + fi + + rm -rf "${path}/.git" "${path}/.gitmodules" + git add "${path}" + if [ -n "${add_submod_cmds}" ]; then + bash -c "${add_submod_cmds}" + fi + + git commit -m "Merge submodule contents for ${sub}/${branch}" + git config -f .git/config --remove-section "remote.${sub}" + + set +x + echo "$(tput setaf 2)Submodule merge complete. Push changes after review.$(tput sgr0)" +} + +set -euo pipefail + +declare verbose=false +while [ $# -gt 0 ]; do + case "$1" in + (-h|--help) + usage + exit 0 + ;; + (-v|--verbose) + verbose=true + ;; + (*) + break + ;; + esac + shift +done + +declare sub="${1:-}" +declare branch="${2:-master}" + +if [ -z "${sub}" ]; then + >&2 echo "Error: No submodule specified" + usage + exit 1 +fi + +shift + +if [ -n "${1:-}" ]; then + shift +fi + +if [ -n "${1:-}" ]; then + >&2 echo "Error: Unknown option: ${1:-}" + usage + exit 1 +fi + +if ! [ -d ".git" ]; then + >&2 echo "Error: No git repository found. Must be run from the root of a git repository" + usage + exit 1 +fi + +declare path="$(git config -f .gitmodules --get "submodule.${sub}.path")" +declare superproject_dir="$(dirname $(git config --get remote.origin.url))" +declare url=$(absolute_url $(git config -f .gitmodules --get "submodule.${sub}.url") $superproject_dir) + +if [ -z "${path}" ]; then + >&2 echo "Error: Submodule not found: ${sub}" + usage + exit 1 +fi + +if [ -z "${superproject_dir}" ]; then + >&2 echo "Error: Could not determine the remote origin for this superproject: ${superproject_dir}" + usage + exit 1 +fi + +if ! [ -d "${path}" ]; then + >&2 echo "Error: Submodule path not found: ${path}" + usage + exit 1 +fi + +main diff --git a/mirrors.update b/mirrors.update index 904c335..01d6764 100644 --- a/mirrors.update +++ b/mirrors.update @@ -1,15 +1,11 @@ #!/bin/bash -fhemdir=/home/marc/src/fhem -fhem_mirror=$fhemdir/fhem-git -myfhem=$fhemdir/my +fhemdir=/dat/src/haus +#fhem_mirror=$fhemdir/fhem-sf/code +fhem_mirror=$fhemdir//fhem-svn cd $fhem_mirror git svn rebase -#git merge remotes/git-svn git push github master -#cd $myfhem -#git fetch origin -#git push github master diff --git a/nfs-tools b/nfs-tools new file mode 100644 index 0000000..7887098 --- /dev/null +++ b/nfs-tools @@ -0,0 +1,2 @@ + + showmount -e nas diff --git a/podcastupdate.sh b/podcastupdate.sh index dee30fd..85abcb7 100755 --- a/podcastupdate.sh +++ b/podcastupdate.sh @@ -1,13 +1,15 @@ #!/bin/sh LOGFILE=/tmp/podcatcher.log +podcastdir=/dat/audio/podcast + set -x -sleep +sleep 1 #swapon /dat/tmp/swap.img -svdrpsend MESG "Podcasts aktualisieren-gestarted" -cd /mp3/podcast +#svdrpsend MESG "Podcasts aktualisieren-gestarted" +cd $podcastdir echo "start ----------------------------" >> $LOGFILE date >> $LOGFILE -./catch.sh >> $LOGFILE 2>&1 -svdrpsend MESG "Podcasts aktualisiert" +./catch2.sh >> $LOGFILE 2>&1 +#svdrpsend MESG "Podcasts aktualisiert" #swapoff /dat/tmp/swap.img diff --git a/podcatcher b/podcatcher new file mode 100755 index 0000000..338f54a --- /dev/null +++ b/podcatcher @@ -0,0 +1,2528 @@ +#!/usr/bin/env ruby +#:mode=ruby: + +# This program is released under the GNU General Public Licence. Please see +# http://opensource.org/licenses/gpl-license.php for more information. +# Author: Doga Armangil, armangild@yahoo.com + +PODCATCHER_WEBSITE = 'http://podcatcher.rubyforge.org/' +PODCATCHER_VERSION = '3.1.6' + +# todo: allow files to be selected not only by its MIME type, but also other attributes. Example: --content '^video/ width:680-1024 height:400' +# todo: --proxy option +# todo: download at most one enclosure or media:content per rss item +# todo: support for --content and --language options in search mode +# todo: code refactoring: do not duplicate option handling for 'options' option, factor out conversion between MIME type and file extension, avoid code duplication between implementations of download and search functions +# todo: "item search" - search function that generates a feed containing relevant items of feeds (":item" or ":show" ?) +# todo: option to specify share ratio for torrents +# todo: symlink support in directory (for history, cache etc) +# todo: improve playlist generation when using --strategy cache (only include audio and video content) +# todo: improve --feeds implementation +# todo: resuming of failed media downloads +# todo: --subscriptions option (subscription d/l limit) +# todo: informative exception messages +# todo: only fetch bittorrent metainfo for d/l candidates +# todo: option to download shows concurrently +# todo: "lock" directory to prevent concurrency issues +# todo: option to throttle non-BitTorrent downloads +# 3.1.6: fix a bug whereby a failed content download caused all other content from the same feed to be ignored, fix ruby 1.9 compatibility bug (String#each becomes String#each_line) +# 3.1.5: updated --arguments file format (# now comments out line), updated sponsor message +# 3.1.4: added publication date to content titles in generated playlists, added better handling of invalid URLs in feeds and subscription lists (such URLs are now simply ignored instead of causing the whole document to be skipped) +# 3.1.3: --restrictednames option is now enabled by default, fixed directory name generation bug that allowed '!' character when --perfeed and --restrictednames options were used simultaneously, updated sponsor message +# 3.1.2: modified the help text that appears when --help option is used, updated sponsor message +# 3.1.1: fixed a bug in verbose mode that caused content to be listed twice if it is declared as both RSS enclosure and Media RSS content, changed the sponsor message +# 3.1.0: added support for yyyy and yyyy.mm formats for --horizon parameter +# 3.0.0: added the --cachedir option for explicitely specifying cache directory, added --language option for selecting feeds by language, added the --horizon option that prevents the downloading of content older than a given date, added --restrictednames option for using content subdirectory and file names that are acceptable for restrictive filesystems such as VFAT, http://search.yahoo.com/mrss is now accepted as namespace for RSS Media module, fixed a bug in update checking (flash now only appears if podcatcherstats version is newer than current one), fixed a bug that caused votes to be sent for feeds that have file URLs or filenames. +# 2.0.1: fixed Yahoo Media RSS module handling bug +# 2.0.0: fixed a bug that caused the generation of invalid playlists for feeds containing control characters (such as Ctrl-M) in their title or in the title of one of its entries, added --order option that determines feed order, changed default feed order from 'sequential' to 'random', all content is downloaded by default (not only MP3), changed default cache size to 512MB, added support for the Yahoo Media RSS module (http://search.yahoo.com/mrss), added strategies for downloading content in chronological order (chron_one, chron, chron_all), added -C option that specifies the types of content that are to be received (overrides the default types), added -o option for reading options from a file, added -A option for reading arguments from a file, changed the default download strategy to 'one', added -V alias for --version option, fixed a bug that caused the order of feeds to be ignored in OPML files, fixed a bug that caused downloads of some video files to fail in vodcatcher mode, added --checkforupdate option for informing the user when a new version is available, added --vote option for voting in favour of downloaded podcasts at podcatcherstats.com +# 1.3.7: added status code and content type check when downloading a media file using HTTP, removed some debugging comments +# 1.3.5: fixed a bug that caused wrong cache filenames to be generated when an HTTP redirection was received from a server, added Operating System and processor information to the User-Agent HTTP header sent to web servers +# 1.3.4: fixed the help message +# 1.3.3: added the -p option that assigns a separate cache subfolder to each feed +# 1.3.2: bug fix +# 1.3.1: added robust handling of subscription lists that directly link to media files (such links are now ignored), fixed an OPML generation bug for interrupted searches +# 1.3.0: added search function for online podcast directories such as the iPodder podcast directory, added xspf support +# 1.2.0: added support for decentralized subscription lists (i.e. subscription lists that point to other subscription lists), fixed a bug that sometimes caused an invalid Referer header to be sent in HTTP requests, added the -f option, added support for Atom feeds that do not list items in reverse chronological order, added support for RSS/Atom feeds as command line arguments, added support for Extended M3U and Extended PLS playlist formats, M3U playlists can now also be generated in vodcatcher mode, m3u is now the default type in vodcatcher mode, added "cache" strategy which deprecates -c option +# 1.1.1: added support for iTunes .pcast subscription files +# 1.1.0: names of media files downloaded via BitTorrent are now preserved, done some refactoring so that the script can function as a vodcatcher +# 1.0.4: added support for RSS feeds that do not list items in reverse chronological order +# 1.0.3: fixed an RSS parsing bug that caused enclosures of some feeds to be ignored +# 1.0.2: fixed some minor MP3 file naming bugs +# 1.0.1: names of downloaded MP3 files are now preserved +# 1.0.0: added ATOM support +# 0.4.0: added duplicate removal for MP3, RSS/Atom and OPML URLs and pathnames; added the -i option that attempts to increase the listen-time given to podcasts which frequently release short shows +# 0.3.2: fixed BitTorrent handling bug +# 0.3.1: added robust handling of network exceptions, removed support for Ctrl-C to terminate execution +# 0.3.0: added support for opml format used by podcastalley, added podcast title information in playlists, reduced RAM usage by not loading the history file in memory, history file and playlist are now updated after each download +# 0.2.1: added support for Ctrl-C to terminate execution; added robust handling of some bad command line arguments; (James Carter patch) fixed the "OPML truncation" issue where a bad RSS feed was considered the last of the list +# 0.2.0: added a new download strategy ("one"); added support for more than one OPML argument, fixed some issues +# 0.1.7: bug fix +# 0.1.6: added internal Bittorrent support, fixed flawed handling of some exceptions +# 0.1.5: changed -d option description, added external handling of Bittorrent files +# 0.1.4: bug-fix, robust handling of bad //enclosure/@length attributes, handling of relative enclosure URLs +# 0.1.3: podcast download strategies (and changed default), download retries +# 0.1.2: added TOX playlist support, added HTTP and FTP support for the OPML parameter, done some code clean-up +# 0.1.1: fixed RSS parsing issue +# 0.1.0: initial version + +require 'uri' +require 'open-uri' +require 'ostruct' +require 'optparse' +require 'pathname' +require 'date' +require 'cgi' +require 'yaml' +require 'net/http' +require 'rexml/document' + +include REXML + +#PODCATCHER_ENV = :development +PODCATCHER_ENV = :production + +USER_AGENT = "podcatcher/#{PODCATCHER_VERSION} Ruby/#{RUBY_VERSION} #{RUBY_PLATFORM}" +UPDATE_CHECK_INTERVAL = 6 #months + +opt = OpenStruct.new +opt.PLAYLIST_TYPES = [:m3u, :smil, :pls, :asx, :tox, :xspf] +opt.playlist_type = opt.PLAYLIST_TYPES[0] +opt.size = 512 +opt.content_type = Regexp.new '' +opt.DESCRIPTION = <=1 + when 'memsize' + if value.instance_of?(Fixnum) + opt.memsize = value + opt.memsize = nil if opt.memsize<1 + end + when 'content' + begin + opt.content_type = Regexp.new(value.downcase) + rescue Exception + $stderr.puts "Error: '#{value.downcase}' is not a valid regular expression and will be ignored" + end + when 'language' + opt.language = value.split ',' + for i in 0...opt.language.size + opt.language[i].downcase! + opt.language[i] = opt.language[i].split '-' + end + when 'order' + opt.order = value.to_sym if opt.ORDERS.detect{|s| value.to_sym == s} + when 'function' + opt.function = value.to_sym if opt.FUNCTIONS.detect{|s| value.to_sym == s} + when 'feeds' + if value.instance_of?(Fixnum) + opt.feeds = value + opt.feeds = nil if opt.feeds<1 + end + when 'horizon' + begin + date = value.split '.' + if (1..3).include? date.size + while date.size < 3 + date << '01' + end + opt.horizon = Date.parse date.join('-') + end + rescue ArgumentError + end + when 'torrentdir' + dir = Pathname.new value + if dir.exist? and dir.directory? + opt.torrent_dir = dir + end + when 'uploadrate' + opt.upload_rate = value if value.instance_of?(Fixnum) and value>=1 + when 'itemsize' + opt.itemsize = value if value.instance_of?(Fixnum) and value>=0 + when 'perfeed' + opt.per_feed = value if value.instance_of?(FalseClass) or value.instance_of?(TrueClass) + when 'cache' + opt.strategy = :cache if value.instance_of?(TrueClass) + when 'empty' + opt.empty = value if value.instance_of?(FalseClass) or value.instance_of?(TrueClass) + when 'asif' + opt.simulate = value if value.instance_of?(FalseClass) or value.instance_of?(TrueClass) + when 'checkforupdate' + opt.check_for_update = value if value.instance_of?(FalseClass) or value.instance_of?(TrueClass) + when 'vote' + opt.vote = value if value.instance_of?(FalseClass) or value.instance_of?(TrueClass) + when 'verbose' + opt.verbose = value if value.instance_of?(FalseClass) or value.instance_of?(TrueClass) + when 'restrictednames' + opt.restricted_names = value if value.instance_of?(FalseClass) or value.instance_of?(TrueClass) + end + end + break + end + end + c.separator "" + c.separator "Usage examples:" + c.separator " #{$0} --dir ~/podcasts http://podcastalley.com/PodcastAlleyTop50.opml > latest.m3u" + c.separator "" + c.separator " #{$0} --dir ~/podcasts rss.xml atom.xml *.pcast feeds.opml http://host/second.opml > latest.m3u" + c.separator "" + c.separator " #{$0} --dir ~/podcasts --strategy cache > cache.m3u" + c.separator "" + c.separator " cat feeds.opml | #{$0} --dir ~/podcasts > latest.m3u" + c.separator "" + c.separator " #{$0} -vd ~/podcasts -s 500 -m 10_000 -t tox feeds.opml > latest.tox" + c.separator "" + c.separator " #{$0} -vF search 'book health daily source code' 'http://www.ipodder.org/discuss/reader$4.opml' > results.opml" + c.separator "" + c.separator " #{$0} -F search -f 12 mac http://www.podfeed.net/opml/directory.opml > results.opml" +end +option_parser.parse! + +class Playlist + def initialize(playlisttype) + @playlisttype = playlisttype + @audio_or_video = Regexp.new '^audio/|^video/' + @size = 0 + end + def start() + @str = "" + case @playlisttype + when :tox + @str = "# toxine playlist \n" + when :m3u + @str = "#EXTM3U\n" + when :pls + @str = "[playlist]\n" + when :asx + @str = < +END + when :smil + @str = < + + + + +END + when :xspf + @doc = Document.new + @doc.xml_decl.dowrite + @doc.add_element Element.new("playlist") + @doc.root.add_attribute "version", "1" + @doc.root.add_attribute "xmlns", "http://xspf.org/ns/0/" + @tracklist = Element.new("trackList") + @doc.root.add_element @tracklist + end + print @str + @str + end + def add(content) + return unless content + if content.mime + return unless @audio_or_video =~ content.mime + end + @size+=1 + feed_title = content.feed_title + feed_title = '' unless feed_title + feed_title = sanitize feed_title + title = content.title + title = '' unless title + title = sanitize title + title = "#{content.pub_date.strftime('%Y.%m.%d')} - "+title if content.pub_date + entry = "" + case @playlisttype + when :m3u + feed_title = feed_title.gsub(/,/," ") + title = title.gsub(/,/," ") + entry = "#EXTINF:-1,[#{feed_title}] #{title}\n#{content.file.to_s}\n" + when :pls + entry = "File#{@size}:#{content.file}\nTitle#{@size}:[#{feed_title}] #{title}\nLength#{@size}:-1\n" + when :asx + entry = " \n" + when :smil + entry = " \n" + when :tox + entry = "entry { \n\tidentifier = [#{feed_title}] #{title};\n\tmrl = #{content.file};\n};\n" + when :xspf + track = Element.new("track") + @tracklist.add_element track + title = Element.new("title") + title.add_text "[#{feed_title}] #{title}" + track.add_element title + location = Element.new("location") + location.add_text fileurl(content.file) + track.add_element location + end + @str += entry + print entry + entry + end + def finish() + res = "" + case @playlisttype + when :tox + res = "# end " + when :asx + res = < +END + when :smil + res = < + +END + when :pls + res = "NumberOfEntries=#{@size}\nVersion=2\n" + when :xspf + @doc.write $stdout, 0 + end + @str += res + print res + res + end + def to_s() + if @doc + @doc.to_s + else + @str + end + end +private + def fileurl(path) + res = "" + loop do + path, base = path.split + if base.root? + if base.to_s != "/" + res = "/"+CGI.escape(base.to_s)+res + end + break + end + res = "/"+CGI.escape(base.to_s)+res + end + "file://"+res + end + def sanitize(text) #removes invisible characters from text + return nil unless text + res = '' + text.each_byte() do |c| + case c + when 0..31, 127 #control chars + res << ' ' + else + res << c + end + end + res + end +end + +class Update + def initialize(dir) + @now = Time.now + @data = {'last-check' => @now, 'latest-version' => PODCATCHER_VERSION, 'latest-version-description' => ''} + @server = URI.parse('http://www.podcatcherstats.com/podcatcher/latest_release') + @server = URI.parse('http://0.0.0.0:3000/podcatcher/latest_release') if PODCATCHER_ENV == :development + return unless dir + return unless dir.directory? + @file = dir + 'updates' + if @file.exist? and @file.file? + begin + data = nil + @file.open() do |f| + data = YAML.load f + end + if data.instance_of? Hash + if newer_or_equal? data['latest-version'] + data.each() do |key, value| + case key + when 'last-check' + @data[key] = value if value.instance_of? Time and value < @now + when 'latest-version' + @data[key] = value if value.instance_of? String + when 'latest-version-description' + @data[key] = value if value.instance_of? String + end + end + end + end + rescue Interrupt + @file.delete + rescue SystemExit + exit 1 + rescue Exception + @file.delete + end + end + save + exit 1 unless @file.file? + end + def check() + if @now - @data['last-check'] > 60.0 * 60.0 * 24 * 30 * UPDATE_CHECK_INTERVAL + @data['last-check'] = @now + begin + Net::HTTP.start(@server.host, @server.port) do |http| + resp = http.get(@server.path, {'User-Agent' => USER_AGENT, 'Connection' => 'close'}) + loop do + break unless resp.code =~ Regexp.new('^2') + doc = Document.new resp.body + break unless doc and doc.root and doc.root.name == 'release' + version = XPath.first doc.root, 'version' + break unless version + break unless newer? version.text + description = XPath.first doc.root, 'description' + if description + description = description.text.strip + else + description = '' + end + @data['latest-version'] = version.join '.' + @data['latest-version-description'] = description + save + break + end + # read resp.body + end + rescue Interrupt + rescue SystemExit + exit 1 + rescue Exception + end + end + flash + end + def to_s() + res = '' + if @data + @data.each() do |key, value| + res+= "#{key}: #{value}\n" + end + end + res + end +private + def flash() + return unless newer? @data['latest-version'] #if equal? @data['latest-version'] + #constants + line_length = 70 + p = '**** ' + # + $stderr.puts "" + $stderr.puts p+"New release:" + $stderr.puts p+"Version #{@data['latest-version']} is available at #{PODCATCHER_WEBSITE}." + if @data['latest-version-description'].size>0 + descr = [] + @data['latest-version-description'].each() do |line| + descr = descr + line.chomp.split(' ') + end + line = nil + descr.each() do |word| + if line and (line + ' ' + word).size>line_length + $stderr.puts p+line + line = nil + end + if line + line += ' '+word + else + line = word + end + + end + $stderr.puts p+line if line + end + $stderr.puts "" + end + def save() + @file.open('w') do |f| + YAML.dump @data, f + end + end + def compare_with(version) # Return values: -1: versioninstalled_version + return -1 unless version + version = version.strip.split '.' + for i in 0...version.size + version[i] = version[i].to_i + end + current_version = PODCATCHER_VERSION.strip.split '.' + for i in 0...current_version.size + current_version[i] = current_version[i].to_i + end + res = 0 + for i in 0...version.size + break if i>=current_version.size + if current_version[i]>version[i] + res = -1 + break + end + if current_version[i] 1.0} + @server = URI.parse('http://www.podcatcherstats.com/podcatcher/ping') + @server = URI.parse('http://0.0.0.0:3000/podcatcher/ping') if PODCATCHER_ENV == :development + return unless dir + return unless dir.directory? + @file = dir + 'votes' + if @file.exist? and @file.file? + data = nil + begin + @file.open() do |f| + data = YAML.load f + end + rescue Interrupt + @file.delete + rescue SystemExit + exit 1 + rescue Exception + @file.delete + end + if data.instance_of? Hash + # $stderr.puts "votes file read" + data.each() do |key, value| + case key + when 'ping-probability' + @data[key] = value unless value<0.0 or 1.0 0 and feed[0].feedurl and feed[0].feedurl.size<255 and (not URI.parse(feed[0].feedurl).instance_of?(URI::Generic)) and sent_feeds < max_sent_feeds + stats.root.add_element 'feed', {'url' => feed[0].feedurl} + sent_feeds += 1 + end + end + break unless sent_feeds>0 + #send + stats_str = '' + stats.write stats_str + if PODCATCHER_ENV != :production + $stderr.puts "Sent:" + $stderr.puts stats_str + end + change_state = nil + Net::HTTP.start(@server.host, @server.port) do |http| + resp = http.request_post @server.path, stats_str, 'User-Agent' => USER_AGENT, 'Content-Type' => 'application/xml', 'Connection' => 'close' + if PODCATCHER_ENV != :production + $stderr.puts "Received:" + $stderr.puts "#{resp.body}" + end + change resp.body + end + @data['last-ping'] = now+0 + break + end + rescue Interrupt + # $stderr.puts "int1 #{$!}" + rescue SystemExit + exit 1 + rescue Exception + # $stderr.puts "exc #{$!}" + end + @data['last-session'] = now+0 + save + # $stderr.puts "#{to_s}" + end + def ping_search(opt, query) + return unless opt + return unless query + return if opt.simulate + now = Time.now + begin + loop do + break unless opt.vote + break unless ping? + # $stderr.puts "ping.." + stats = Document.new + stats.add_element 'searching', {'query' => query} + #state + stats.root.add_element state_element + #send + stats_str = '' + stats.write stats_str + # $stderr.puts stats_str + change_state = nil + Net::HTTP.start(@server.host, @server.port) do |http| + resp = http.request_post @server.path, stats_str, 'User-Agent' => USER_AGENT, 'Content-Type' => 'application/xml', 'Connection' => 'close' + # $stderr.puts "#{resp.body}" + change resp.body + end + @data['last-ping'] = now+0 + break + end + rescue Interrupt + # $stderr.puts "int1 #{$!}" + rescue SystemExit + exit 1 + rescue Exception + # $stderr.puts "exc #{$!}" + end + @data['last-session'] = now+0 + save + # $stderr.puts "#{to_s}" + end + def to_s() + res = '' + if @data + @data.each() do |key, value| + res+= "#{key}: #{value}\n" + end + end + res + end +private + def save() + @file.open('w') do |f| + YAML.dump @data, f + end + end + def ping?() + r = rand + # $stderr.puts "random: #{r}, ping-probability: #{@data['ping-probability']}" + return r < @data['ping-probability'] + end + def change(doc_str) + return unless doc_str + begin + change_state = Document.new doc_str + loop do + break unless change_state + break unless change_state.root + break unless change_state.root.name == 'state' + #ping-probability + ping = change_state.root.attributes['ping'] + if ping and ping.size>0 + ping = ping.to_f + unless ping<0.0 or 1.0 limit #shrink + @history_old.delete if @history_old.exist? + @history.rename @history_old + @history.open("w") do |f| + @history_old.each_line() do |url| + f.print(url) if history_size <= limit + history_size -= 1 + end + end + @history_old.unlink + end + rescue Interrupt, SystemExit + exit 1 + rescue Exception + $stderr.puts "Error: failure during history file clean-up." + end if limit + end +end + +class Cache + def initialize(opt) + super() + @opt = opt + @@TORRENT = "application/x-bittorrent" + @@MEDIA_RSS_NS = ['http://search.yahoo.com/mrss/'] + @@MEDIA_RSS_NS << 'http://search.yahoo.com/mrss' + @@ATOM_NS = Regexp.new "^http://purl.org/atom/ns#" + #history + @history = History.new opt.dir + #stats + @stats = Stats.new opt.dir + #cache + @cache_dir = opt.cachedir #opt.dir+"cache" + @cache_dir.mkdir() unless @cache_dir.exist? + exit 1 unless @cache_dir.directory? + @cache_dir.each_entry() do |e| + e = @cache_dir+e + e = e.cleanpath + next if e == @cache_dir or e == @cache_dir.parent + if e.directory? #feed subfolder + e.each_entry() do |e2| + e2 = e+e2 + next if e2.directory? + if opt.empty + unless opt.simulate or opt.strategy == :cache + $stderr.puts "Deleting: #{e2}" if opt.verbose + e2.delete + end + end + end + e.delete if e.entries.size == 2 + elsif opt.empty + unless opt.simulate or opt.strategy == :cache + $stderr.puts "Deleting: #{e}" if opt.verbose + e.delete + end + end + end + @cache = @cache_dir.entries.collect() do |e| + e = @cache_dir+e + e = e.cleanpath + next if e == @cache_dir or e == @cache_dir.parent + if e.file? + content = OpenStruct.new + content.file = e + content.size = e.size + content.title = e.to_s + content + elsif e.directory? + e.entries.collect() do |e2| + e2 = e+e2 + if e2.file? + content = OpenStruct.new + content.file = e2 + content.size = e2.size + content.title = e2.to_s + content + else + nil + end + end + else + nil + end + end + @cache.flatten! + @cache.compact! + @cache.sort!() do |e,e2| + e.file.mtime() <=> e2.file.mtime() + end + end + def createplaylist(urls) + playlist = Playlist.new @opt.playlist_type + if @opt.strategy == :cache + playlist.start + @cache.reverse! + @cache.each() do |content| + playlist.add content + end + playlist.finish + return playlist.to_s + end + playlist.start + doc = nil + if urls.size == 0 + $stderr.puts "Reading document from standard input" if @opt.verbose + begin + xml = "" + $stdin.each() do |e| + xml += e + end + doc = OpenStruct.new + doc.dom = Document.new(xml) + doc = nil unless doc.dom + rescue Interrupt, SystemExit + exit 1 + rescue Exception + $stderr.puts "Error: unreadable document" + doc = nil + end + end + dochistory = [] + feeds = [] + urls.uniq! + links = urls.collect() do |e| + l = OpenStruct.new + l.url = e + l + end + loop do + break if @opt.feeds and feeds.size >= @opt.feeds + while not doc + link = links.shift + break unless link + if dochistory.detect{|e| e == link.url} + $stderr.puts "Skipping duplicate: #{link.url}" if @opt.verbose + next + end + $stderr.puts "Fetching: #{link.url}" if @opt.verbose + dochistory << link.url + begin + doc = fetchdoc(link) + rescue Interrupt, SystemExit + exit 1 + rescue Exception + $stderr.puts "Error: skipping unreadable document" + end + end + break unless doc + begin + if doc.dom.root.name == "opml" + newlinks = [] + outlines = [] + doc.dom.elements.each("/opml/body") do |body| + body.elements.each() do |e| + next unless e.name == 'outline' + outlines << e + end + end + while outlines.size>0 + outline = outlines.shift + url = outline.attributes["xmlUrl"] + url = outline.attributes["url"] unless url + if url + begin + url = URI.parse(doc.url).merge(url).to_s if doc.url + link = OpenStruct.new + link.url = url + link.referrer = doc.url + newlinks << link + rescue URI::InvalidURIError + end + next + end + new_outlines = [] + outline.elements.each() do |e| + next unless e.name == 'outline' + new_outlines << e + end + outlines = new_outlines + outlines + end + links = newlinks + links + elsif doc.dom.root.name == "pcast" + newlinks = [] + XPath.each(doc.dom,"//link[@rel='feed']") do |outline| + url = outline.attributes["href"] + next unless url + begin + url = URI.parse(doc.url).merge(url).to_s if doc.url + link = OpenStruct.new + link.url = url + link.referrer = doc.url + newlinks << link + rescue URI::InvalidURIError + end + end + links = newlinks + links + elsif doc.dom.root.namespace =~ @@ATOM_NS + feed = [] + XPath.each(doc.dom.root,"//*[@rel='enclosure']") do |e2| + next unless e2.namespace =~ @@ATOM_NS + content = OpenStruct.new + XPath.each(e2,"parent::/title/text()") do |node| + content.title = "" + node.value.each_line() do |e3| #remove line breaks + content.title+= e3.chomp+" " + end + content.title.strip! + end + XPath.each(e2,"parent::/created/text()") do |node| + pub_date = "" + node.value.each_line() do |e3| #remove line breaks + pub_date+= e3.chomp+" " + end + begin + content.pub_date = DateTime.parse(pub_date.strip, true) + rescue Exception + end + end + content.mime = e2.attributes["type"].downcase + next if @opt.content_type !~ content.mime and content.mime != @@TORRENT + next if content.mime == @@TORRENT and not (@opt.torrent_dir or @opt.rubytorrent) + content.feedurl = doc.url + begin + content.url = URI.parse(content.feedurl).merge(e2.attributes["href"]).to_s if content.feedurl + content.size = e2.attributes["length"].to_i + content.size = 2 unless content.size and content.size>0 + content.size = 0 if content.mime == @@TORRENT #not strictly necessary + feed << content + rescue URI::InvalidURIError + end + end + #sort by date + feed.sort!() do |a,b| + if a.pub_date + if b.pub_date + b.pub_date <=> a.pub_date + else + -1 + end + else + if b.pub_date + 1 + else + 0 + end + end + end + feed.each() do |content| + $stderr.puts "Enclosure: #{content.url}" + end if @opt.verbose + #title + node = XPath.first(doc.dom,"/feed/title/text()") + feed_title = "" + node.value.each_line() do |e3| #remove line breaks + feed_title += e3.chomp+" " + end + feed_title.strip! + feed.each() do |content| + content.feed_title = feed_title + end + # + feeds << feed + elsif doc.dom.root.name = "rss" + feed = [] + doc.dom.root.elements.each() do |e| #channel + e.elements.each() do |e1| #item + title = '' + XPath.each(e1,"title/text()") do |node| + title = '' + node.value.each_line() do |e3| #remove line breaks + title+= e3.chomp+" " + end + title.strip! + end + pub_date = nil + XPath.each(e1,"pubDate/text()") do |node| + pub_date = "" + node.value.each_line() do |e3| #remove line breaks + pub_date+= e3.chomp+" " + end + begin + pub_date = DateTime.parse(pub_date.strip, true) + rescue Exception + pub_date = nil + end + end + e1.elements.each() do |e2| + if e2.name == "enclosure" + content = OpenStruct.new + content.title = title + content.pub_date = pub_date + content.mime = e2.attributes["type"].downcase + next if @opt.content_type !~ content.mime and content.mime != @@TORRENT + next if content.mime == @@TORRENT and not (@opt.torrent_dir or @opt.rubytorrent) + content.feedurl = doc.url + begin + content.url = URI.parse(content.feedurl).merge(e2.attributes["url"]).to_s if content.feedurl + content.size = e2.attributes["length"].to_i + content.size = 2 unless content.size and content.size>0 + content.size = 0 if content.mime == @@TORRENT #not strictly necessary + feed << content + rescue URI::InvalidURIError + end + elsif @@MEDIA_RSS_NS.include? e2.namespace + case e2.name + when 'content' + content = OpenStruct.new + content.title = title + content.pub_date = pub_date + content.mime = e2.attributes["type"].downcase + next if @opt.content_type !~ content.mime and content.mime != @@TORRENT + next if content.mime == @@TORRENT and not (@opt.torrent_dir or @opt.rubytorrent) + content.feedurl = doc.url + begin + content.url = URI.parse(content.feedurl).merge(e2.attributes["url"]).to_s if content.feedurl + content.size = e2.attributes["fileSize"].to_i + content.size = 2 unless content.size and content.size>0 + content.size = 0 if content.mime == @@TORRENT #not strictly necessary + feed << content + rescue URI::InvalidURIError + end + when 'group' + e2.elements.each() do |e4| + if e4.name == 'content' and @@MEDIA_RSS_NS.include?(e4.namespace) + content = OpenStruct.new + content.title = title + content.pub_date = pub_date + content.mime = e4.attributes["type"].downcase + next if @opt.content_type !~ content.mime and content.mime != @@TORRENT + next if content.mime == @@TORRENT and not (@opt.torrent_dir or @opt.rubytorrent) + content.feedurl = doc.url + begin + content.url = URI.parse(content.feedurl).merge(e4.attributes["url"]).to_s if content.feedurl + content.size = e4.attributes["fileSize"].to_i + content.size = 2 unless content.size and content.size>0 + content.size = 0 if content.mime == @@TORRENT #not strictly necessary + feed << content + rescue URI::InvalidURIError + end + break + end + end + end + + end + end if e1.name == "item" + end if e.name == "channel" + end + #remove duplicates (duplication occurs in particular for content declared as both enclosure and Media RSS content) + for i in 0...feed.size + content = feed[i] + next unless content + for j in i+1...feed.size + next unless feed[j] + feed[j] = nil if feed[j].url == content.url + end + end + feed.compact! + #sort by date + feed.sort!() do |a,b| + if a.pub_date + if b.pub_date + b.pub_date <=> a.pub_date + else + -1 + end + else + if b.pub_date + 1 + else + 0 + end + end + end + feed.each() do |content| + $stderr.puts "Enclosure: #{content.url}" + end if @opt.verbose + #title + node = XPath.first(doc.dom,"//channel/title/text()") + feed_title = "" + node.value.each_line() do |e3| #remove line breaks + feed_title += e3.chomp+" " + end + feed_title.strip! + feed.each() do |content| + content.feed_title = feed_title + end + #language + if @opt.language.size > 0 + loop do + node = XPath.first doc.dom, '//channel/language/text()' + break unless node + break unless node.value + feed_lang = node.value.strip.downcase.split '-' + break if feed_lang.size == 0 + langmatch = @opt.language.collect() do |lang| + next false if feed_lang.size < lang.size + matches = true + for i in 0...lang.size + next if lang[i] == feed_lang[i] + matches = false + end + matches + end + feeds << feed if langmatch.include? true + break + end + else + feeds << feed + end + end + rescue Interrupt, SystemExit + exit 1 + rescue Exception + $stderr.puts "Error: skipping document because of an internal error" + end + doc = nil + end + #remove content older than the horizon date + if @opt.horizon + feeds.each() do |feed| + for i in 0...feed.size + if feed[i].pub_date + feed[i] = nil if feed[i].pub_date < @opt.horizon + else + feed[i] = nil + end + end + feed.compact! + end + end + #apply download strategy + @history.mark_old_content feeds + if @opt.strategy == :chron or @opt.strategy == :chron_one or @opt.strategy == :chron_all + feeds.each() do |feed| + feed.reverse! + end + @opt.strategy = :back_catalog if @opt.strategy == :chron + @opt.strategy = :one if @opt.strategy == :chron_one + @opt.strategy = :all if @opt.strategy == :chron_all + end + case @opt.strategy #remove ignored content + when :new + feeds.each() do |feed| + in_hist = nil + for i in 0...feed.size + if feed[i].in_history + in_hist = i + break + end + end + feed.slice! in_hist...feed.size if in_hist + end + when :all + else + feeds.each() do |feed| + for i in 0...feed.size + feed[i] = nil if feed[i].in_history + end + feed.compact! + end + end + if @opt.strategy == :new or @opt.strategy == :one + feeds.each() do |feed| + itemsize = 0 + index = nil + for i in 0...feed.size + itemsize += feed[i].size + if itemsize >= @opt.itemsize + index = i+1 + break + end + end + feed.slice! index...feed.size if index + end + end + #feed order + case @opt.order + when :random + srand + feeds.sort!() do |a,b| + if a.size>0 + if b.size>0 + rand(3)-1 + else + -1 + end + else + if b.size>0 + 1 + else + 0 + end + end + end + when :alphabetical + feeds.sort!() do |a,b| + if a.size>0 + if b.size>0 + a[0].feed_title <=> b[0].feed_title + else + -1 + end + else + if b.size>0 + 1 + else + 0 + end + end + end + when :reverse + feeds.reverse! + end + #remove duplicate content + feeds.each() do |feed| + feed.each() do |content| + next unless content + dup = false + feeds.each() do |f| + for i in 0...f.size + next unless f[i] + if f[i].url == content.url + f[i] = nil if dup + dup = true + end + $stderr.puts "Removed duplicate: #{content.url}" unless f[i] or (not @opt.verbose) + end + end + end + feed.compact! + end + #send usage statistics + @stats.ping @opt, feeds + #fetch torrent metainfo files + feeds.each() do |feed| + feed.each() do |content| + next if content.mime != @@TORRENT + content.mime = nil + begin + $stderr.puts "Fetching torrent metainfo: #{content.url}" if @opt.verbose + content.metainfo = RubyTorrent::MetaInfo.from_location content.url + content.size = content.metainfo.info.length + content.mime = case content.metainfo.info.name.downcase + when /\.mp3$/ + "audio/mpeg" + when /\.wma$/ + "audio/x-ms-wma" + when /\.mpg$|\.mpeg$|\.mpe$|\.mpa$|\.mp2$|\.mpv2$/ + "video/mpeg" + when /\.mov$|\.qt$/ + "video/quicktime" + when /\.avi$/ + "video/x-msvideo" + when /\.wmv$/ + "video/x-ms-wmv" + when /\.asf$/ + "video/x-ms-asf" + when /\.m4v$|\.mp4$|\.mpg4$/ + "video/mp4" + else + nil + end + content.url = nil unless content.mime + content.url = nil unless (@opt.content_type =~ content.mime) + content.url = nil unless content.metainfo.info.single? + rescue Interrupt + content.url = nil + $stderr.puts "Error: unreadable torrent metainfo" if @opt.verbose + rescue SystemExit + exit 1 + rescue Exception + content.url = nil + $stderr.puts "Error: unreadable torrent metainfo" if @opt.verbose + end + end + for i in 0...feed.size + feed[i] = nil unless feed[i].url + end + feed.compact! + end + #fetch enclosures + item = total = 0 + @cache.each() do |e| + total+= e.size + end + torrents = [] + torrentfiles = [] + inc = 1 + while inc>0 + inc = 0 + itemsize = 0 + feeds.each do |e| + #find next enclosure in feed + content = e.shift + unless content + itemsize = 0 + next + end + #make place in cache + while @opt.size and content.size+inc+total > @opt.size + break if @opt.simulate + f = @cache.shift + break unless f + total-= f.size + parent = f.file.parent + $stderr.puts "Deleting: #{f.file}" if @opt.verbose + f.file.delete + if parent.parent != @opt.dir and parent.entries.size == 2 + #delete empty feed subfolder + $stderr.puts "Deleting: #{parent}" if @opt.verbose + parent.delete + end + end + unless @opt.simulate + break if @opt.size and content.size+inc+total > @opt.size + end + #download + 1.upto(@opt.retries) do |i| + begin + if content.metainfo + if @opt.torrent_dir + loop do + content.file = @opt.torrent_dir+(Time.now.to_f.to_s+".torrent") + break unless content.file.exist? + sleep 1 + end + $stderr.puts "Copying: #{content.url} to #{content.file}" if @opt.verbose and i == 1 + if not @opt.simulate + if content.feedurl and (content.feedurl =~ %r{^http:} or content.feedurl =~ %r{^ftp:}) + open(content.url, "User-Agent" => USER_AGENT, "Referer" => content.feedurl) do |fin| + content.file.open("wb") do |fout| + fin.each_byte() do |b| + fout.putc b + end + end + end + else + open(content.url, "User-Agent" => USER_AGENT) do |fin| + content.file.open("wb") do |fout| + fin.each_byte() do |b| + fout.putc b + end + end + end + end + end + else + $stderr.puts "Fetching in background: #{content.url}" if @opt.verbose and i == 1 + unless @opt.simulate + content.file = filename(content, @cache_dir) + package = RubyTorrent::Package.new content.metainfo, content.file.to_s + bt = RubyTorrent::BitTorrent.new content.metainfo, package, :dlratelim => nil, :ulratelim => @opt.upload_rate, :http_proxy => ENV["http_proxy"] + torrents << bt + torrentfiles << content + end + inc+= content.size + itemsize+= content.size + end + else + $stderr.puts "Fetching: #{content.url} (#{content.size.to_s} bytes)" if @opt.verbose and i == 1 + if not @opt.simulate + headers = {"User-Agent" => USER_AGENT} + headers["Referer"] = content.feedurl if content.feedurl and (content.feedurl =~ %r{^http:} or content.feedurl =~ %r{^ftp:}) + content.download_url = content.url unless content.download_url + open(content.download_url, headers) do |fin| + if fin.base_uri.instance_of?(URI::HTTP) + if fin.status[0] =~ Regexp.new('^3') + content.download_url = fin.meta['location'] + raise "redirecting" + elsif fin.status[0] !~ Regexp.new('^2') + raise 'failed' + end + end + # write content to cache + content.redirection_url = fin.base_uri.to_s # content.redirection_url is used for finding the correct filename in case of redirection + content.redirection_url = nil if content.redirection_url.eql?(content.url) + content.file = filename(content, @cache_dir) + content.file.open("wb") do |fout| + fin.each_byte() do |b| + fout.putc b + end + end + end + content.size = content.file.size + @history.add content + end + playlist.add(content) + inc+= content.size + itemsize+= content.size + end + break + rescue Interrupt + rescue SystemExit + exit 1 + rescue Exception + end + $stderr.puts "Attempt #{i} aborted" if @opt.verbose + if content.file and i == @opt.retries + if content.file.exist? + parent = content.file.parent + content.file.delete + if parent.parent != @opt.dir and parent.entries.size == 2 + #delete empty feed subfolder + parent.delete + end + end + content.file = nil + end + sleep 5 + end + redo unless content.file # skip unavailable enclosures + redo if @opt.itemsize > itemsize + itemsize = 0 + end + total+=inc + end + #shut down torrents + if torrents.length > 0 + $stderr.puts "Fetching torrents (duration: 30min to a couple of hours) " if @opt.verbose + bt = torrents[0] + completion = torrents.collect() do |e| + e.percent_completed + end + while torrents.length > 0 + sleep 30*60 + for i in 0...torrents.length + c = torrents[i].percent_completed + complete = torrents[i].complete? + $stderr.puts "Fetched: #{c}% of #{torrentfiles[i].url} " if @opt.verbose + if complete or c == completion[i] + begin + torrents[i].shutdown + rescue SystemExit + exit 1 + rescue Interrupt, Exception + end + if complete + playlist.add(torrentfiles[i]) + @history.add torrentfiles[i] + else + $stderr.puts "Aborted: #{torrentfiles[i].url}" if @opt.verbose + begin + torrentfiles[i].file.delete if torrentfiles[i].file.exist? + torrentfiles[i] = nil + rescue Interrupt, SystemExit + exit 1 + rescue Exception + end + end + torrents[i] = nil + torrentfiles[i] = nil + completion[i] = nil + next + end + completion[i] = c + end + torrents.compact! + torrentfiles.compact! + completion.compact! + end + begin + bt.shutdown_all + rescue Interrupt, SystemExit + exit 1 + rescue Exception + end + $stderr.puts "BitTorrent stopped" if @opt.verbose + end + playlist.finish + @history.trim(@opt.memsize) unless @opt.simulate or @opt.strategy == :cache + playlist.to_s + end +private + def fetchdoc(link) + doc = "" + 1.upto(@opt.retries) do |i| + begin + if link.url =~ %r{^http:} or link.url =~ %r{^ftp:} + if link.referrer and (link.referrer =~ %r{^http:} or link.referrer =~ %r{^ftp:}) + open(link.url, "User-Agent" => USER_AGENT, "Referer" => link.referrer) do |f| + break if f.content_type.index "audio/" + break if f.content_type.index "video/" + f.each_line() do |e| + doc += e + end + end + else + open(link.url, "User-Agent" => USER_AGENT) do |f| + break if f.content_type.index "audio/" + break if f.content_type.index "video/" + f.each_line() do |e| + doc += e + end + end + end + else + open(link.url) do |f| + f.each_line() do |e| + doc += e + end + end + end + break + rescue Interrupt + rescue SystemExit + exit 1 + rescue Exception + end + $stderr.puts "Attempt #{i} aborted" if @opt.verbose + doc = "" + sleep 5 + end + res = OpenStruct.new + begin + res.dom = Document.new doc + rescue Exception + end + if res.dom + res.url = link.url + else + res = nil + end + res + end + def filename(content, dir) #produce filename for content to be downloaded + begin #per-feed subfolder + if @opt.per_feed and content.feed_title and content.feed_title.size > 0 + newdir = dir+content.feed_title + newdir = dir+content.feed_title.gsub(/[\\\/:*?\"<>|!]/, ' ').gsub(/-+/,'-').gsub(/\s+/,' ').strip if @opt.restricted_names + if newdir.exist? + if newdir.directory? + dir = newdir + end + else + newdir.mkdir + dir = newdir + end + end + rescue Exception + # $stderr.puts "error: #{$!}" + end + ext = [""] + if content.metainfo + begin + ext = ["."+content.metainfo.info.name.split(".").reverse[0]] + rescue Exception + end + else + ext = case content.mime.downcase + when "audio/mpeg" + [".mp3"] + when "audio/x-mpeg" + [".mp3"] + when "audio/x-ms-wma" + [".wma"] + when "audio/x-m4a" + [".m4a"] + when "video/mpeg" + [".mpg",".mpeg",".mpe",".mpa",".mp2",".mpv2"] + when "video/quicktime" + [".mov",".qt"] + when "video/x-msvideo" + [".avi"] + when "video/x-ms-wmv" + [".wmv"] + when "video/x-ms-asf" + [".asf"] + when "video/mp4" + [".m4v", ".mp4",".mpg4"] + when "video/x-m4v" + [".m4v", ".mp4",".mpg4"] + else + [""] + end + end + #name from url? + name = nil + begin + if content.metainfo + name = content.metainfo.info.name + name = nil if (dir+name).exist? + else + urlname = nil + urlname = URI.split(content.redirection_url)[5].split("/")[-1] if content.redirection_url + urlname = URI.split(content.url)[5].split("/")[-1] unless urlname + ext.each() do |e| + if e.length == 0 or urlname[-e.length..-1].downcase == e + name = urlname + name = URI.unescape(name) + name = nil if (dir+name).exist? + break if name + end + end + end + rescue Exception + end + #unique name? + loop do + name = Time.now.to_f.to_s+ext[0] + break unless (dir+name).exist? + sleep 1 + end unless name + dir+name + end +end +class OPML + def initialize(title = nil) + @doc = Document.new + @doc.xml_decl.dowrite + @doc.add_element Element.new("opml") + @doc.root.add_attribute "version", "1.1" + head = Element.new("head") + @doc.root.add_element head + if title + titlee = Element.new("title") + titlee.text = title + head.add_element titlee + end + @body = Element.new("body") + @doc.root.add_element @body + @size = 0 + end + def add(feedurl, text=nil) + e = Element.new("outline") + e.add_attribute("text", text) if text + e.add_attribute "type", "link" + e.add_attribute "url", feedurl + @body.add_element e + @size += 1 + end + def write() + @doc.write $stdout, 0 + end + def size() + @size + end +end + +class Query + def initialize(opt, query) + @@ATOM_NS = Regexp.new '^http://purl.org/atom/ns#' + @@ITUNES_NS = 'http://www.itunes.com/dtds/podcast-1.0.dtd' + @opt = opt + if query + @query = query.downcase.split + @query = nil if @query.size == 0 + end + @stats = Stats.new opt.dir + end + def search(urls) + res = [] + begin + newpaths = [] + dochistory = [] + paths = [] + if urls.size == 0 + $stderr.puts "Reading subscriptions from standard input" if @opt.verbose + begin + xml = "" + $stdin.each() do |e| + xml += e + end + path = OpenStruct.new + path.doc = Document.new(xml) + if path.doc and path.doc.root + path.relevance = 0 + newpaths << path + end + rescue Interrupt, SystemExit + raise + rescue Exception + $stderr.puts "Error: unreadable subscriptions" + end + else + newpaths = urls.uniq.collect() do |e| + path = OpenStruct.new + path.url = e + path + end + newpaths = newpaths.collect() do |path| + $stderr.puts "Fetching: #{path.url}" if @opt.verbose + dochistory << path.url + path.doc = fetchdoc(path) + if path.doc + path.relevance = 0 + path + else + $stderr.puts "Skipping unreadable document" if @opt.verbose + nil + end + end + newpaths.compact! + end + #send usage statistics + @stats.ping_search @opt, @query.join(' ') + # + loop do + break if @opt.feeds and res.size >= @opt.feeds + begin + newpaths.sort!() do |path1, path2| + path2.relevance <=> path1.relevance + end + paths = newpaths + paths + newpaths = [] + path = nil + loop do + path = paths.shift + break unless path + if path.doc + break + else + if dochistory.detect{|e| e == path.url} + $stderr.puts "Skipping duplicate: #{path.url}" if @opt.verbose + next + end + $stderr.puts "Fetching: #{path.url}" if @opt.verbose + dochistory << path.url + path.doc = fetchdoc(path) + if path.doc + break + end + $stderr.puts "Error: skipping unreadable document" + end + end + break unless path + if path.doc.root.name == "opml" + #doc relevance + path.relevance += relevance_of(XPath.first(path.doc, "/opml/head/title/text()")) + #outgoing links + XPath.each(path.doc,"//outline") do |outline| + url = outline.attributes["xmlUrl"] + url = outline.attributes["url"] unless url + next unless url + begin + url = URI.parse(path.url).merge(url).to_s if path.url + rescue Interrupt, SystemExit + raise + rescue Exception + end + newpath = OpenStruct.new + newpath.url = url + newpath.referrer = path.url + #link relevance + newpath.relevance = path.relevance + XPath.each(outline, "ancestor-or-self::outline") do |e| + newpath.relevance += relevance_of(e.attributes["text"]) + end + # + newpaths << newpath + end + elsif path.doc.root.name == "pcast" + #outgoing links + XPath.each(path.doc,"/pcast/channel") do |channel| + link = XPath.first(channel, "link[@rel='feed']") + next unless link + url = link.attributes["href"] + next unless url + begin + url = URI.parse(path.url).merge(url).to_s if path.url + rescue Interrupt, SystemExit + raise + rescue Exception + end + newpath = OpenStruct.new + newpath.url = url + newpath.referrer = path.url + #link relevance + newpath.relevance = path.relevance + newpath.relevance += relevance_of(XPath.first(channel, "title/text()")) + newpath.relevance += relevance_of(XPath.first(channel, "subtitle/text()")) + # + newpaths << newpath + end + elsif path.doc.root.namespace =~ @@ATOM_NS and path.url + #doc relevance + title = nil + begin + XPath.each(path.doc.root,"/*/*") do |e| + next unless e.namespace =~ @@ATOM_NS + next unless e.name == "title" or e.name == "subtitle" + title = e.text if e.name == "title" + path.relevance += relevance_of(e.text) + end + rescue Interrupt, SystemExit + raise + rescue Exception + #$stderr.puts "error: #{$!}" + end + if path.relevance > 0 + $stderr.puts "Found: #{title} (relevance: #{path.relevance})" if @opt.verbose + if title + path.title = "" + title.value.each() do |e3| #remove line breaks + path.title+= e3.chomp+" " + end + path.title.strip! + end + res << path + end + elsif path.doc.root.name = "rss" and path.url + #doc relevance + title = XPath.first(path.doc, "//channel/title/text()") + path.relevance += relevance_of(title) + path.relevance += relevance_of(XPath.first(path.doc, "//channel/description/text()")) + begin + XPath.each(path.doc.root,"//channel/*") do |e| + next unless e.name == "category" + if e.namespace == @@ITUNES_NS + XPath.each(e, "descendant-or-self::*") do |e2| + next unless e2.name == "category" + path.relevance += relevance_of(e2.attributes["text"]) + end + else + path.relevance += relevance_of(e.text) + end + end + rescue Interrupt, SystemExit + raise + rescue Exception + #$stderr.puts "error: #{$!}" + end + if path.relevance > 0 + $stderr.puts "Found: #{title} (relevance: #{path.relevance})" if @opt.verbose + if title + path.title = "" + title.value.each() do |e3| #remove line breaks + path.title+= e3.chomp+" " + end + path.title.strip! + end + res << path + end + end + rescue Interrupt, SystemExit + raise + rescue Exception + $stderr.puts "Error: skipping unreadable document" + end + end + rescue Interrupt, SystemExit + $stderr.puts "Execution interrupted" + rescue Exception + end + result = nil + while not result + begin + res.sort!() do |path1, path2| + path2.relevance <=> path1.relevance + end + opml = OPML.new "Search results for \"#{@query.collect(){|e| "#{e} "}}\"" + res.each() do |path| + opml.add path.url, path.title + end + result = opml + rescue Exception + end + end + result.write + result + end +private + def relevance_of(meta) + return 0 unless meta + unless meta.kind_of? String #Text todo: resolve entities + meta = meta.value + end + meta = meta.downcase + meta = meta.split + res = 0 + @query.each() do |e| + meta.each() do |e2| + res += 1 if e2.index(e) + end + end + res + end + def fetchdoc(link) + doc = "" + 1.upto(@opt.retries) do |i| + begin + if link.url =~ %r{^http:} or link.url =~ %r{^ftp:} + if link.referrer and (link.referrer =~ %r{^http:} or link.referrer =~ %r{^ftp:}) + open(link.url, "User-Agent" => USER_AGENT, "Referer" => link.referrer) do |f| + break if f.content_type.index "audio/" + break if f.content_type.index "video/" + f.each_line() do |e| + doc += e + end + end + else + open(link.url, "User-Agent" => USER_AGENT) do |f| + break if f.content_type.index "audio/" + break if f.content_type.index "video/" + f.each_line() do |e| + doc += e + end + end + end + else + open(link.url) do |f| + f.each_line() do |e| + doc += e + end + end + end + break + rescue Interrupt + rescue SystemExit + break + rescue Exception + end + $stderr.puts "Attempt #{i} aborted" if @opt.verbose + doc = "" + sleep 5 + end + res = nil + begin + res = Document.new doc + rescue Exception + end + res = nil unless res and res.root + res + end +end + +opt.size *= 1_000_000 if opt.size +opt.upload_rate *= 1024 if opt.upload_rate +opt.itemsize *= 1_000_000 +arguments = arguments + ARGV + +unless opt.check_for_update + $stderr.puts "Disabling update check." if opt.verbose +end + +unless opt.vote + $stderr.puts "Disabling the sending of anonymous usage statistics." if opt.verbose +end + +begin + require "rubytorrent" + opt.rubytorrent = true + $stderr.puts "RubyTorrent detected." if opt.verbose +rescue Interrupt, SystemExit + exit 1 +rescue Exception +end + +if opt.function == :download + cache = Cache.new opt + cache.createplaylist arguments +elsif opt.function == :search + dir = Query.new opt, arguments.shift + dir.search arguments +end + +if opt.check_for_update + update = Update.new opt.dir + update.check +end + +if opt.verbose + $stderr.puts "" + $stderr.puts " *********************************************************************" + $stderr.puts " **** Qworum - A platform for web-based services (sponsor) ****" + $stderr.puts " *********************************************************************" + $stderr.puts " **** Sell and buy services: ****" + $stderr.puts " **** Host services on your own domain; sell them to websites ****" + $stderr.puts " **** or businesses on the service marketplace. ****" + $stderr.puts " **** ****" + $stderr.puts " **** Build enterprise information systems: ****" + $stderr.puts " **** Use Qworum in your information system, and enjoy the ****" + $stderr.puts " **** benefits of a powerful SOA technology. ****" + $stderr.puts " **** ****" + $stderr.puts " **** Learn more at http://www.qworum.com/ ****" + $stderr.puts " *********************************************************************" + $stderr.puts "" +end + +$stderr.puts "End of podcatching session." if opt.verbose + + diff --git a/reinstall b/reinstall new file mode 100644 index 0000000..a491ff2 --- /dev/null +++ b/reinstall @@ -0,0 +1 @@ +sudo apt-get --download-only --reinstall install diff --git a/repo b/repo new file mode 100755 index 0000000..4721174 --- /dev/null +++ b/repo @@ -0,0 +1,874 @@ +#!/usr/bin/env python + +# repo default configuration +# +import os +REPO_URL = os.environ.get('REPO_URL', None) +if not REPO_URL: + REPO_URL = 'https://gerrit.googlesource.com/git-repo' +REPO_REV = 'stable' + +# Copyright (C) 2008 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# increment this whenever we make important changes to this script +VERSION = (1, 22) + +# increment this if the MAINTAINER_KEYS block is modified +KEYRING_VERSION = (1, 2) +MAINTAINER_KEYS = """ + + Repo Maintainer +-----BEGIN PGP PUBLIC KEY BLOCK----- +Version: GnuPG v1.4.2.2 (GNU/Linux) + +mQGiBEj3ugERBACrLJh/ZPyVSKeClMuznFIrsQ+hpNnmJGw1a9GXKYKk8qHPhAZf +WKtrBqAVMNRLhL85oSlekRz98u41H5si5zcuv+IXJDF5MJYcB8f22wAy15lUqPWi +VCkk1l8qqLiuW0fo+ZkPY5qOgrvc0HW1SmdH649uNwqCbcKb6CxaTxzhOwCgj3AP +xI1WfzLqdJjsm1Nq98L0cLcD/iNsILCuw44PRds3J75YP0pze7YF/6WFMB6QSFGu +aUX1FsTTztKNXGms8i5b2l1B8JaLRWq/jOnZzyl1zrUJhkc0JgyZW5oNLGyWGhKD +Fxp5YpHuIuMImopWEMFIRQNrvlg+YVK8t3FpdI1RY0LYqha8pPzANhEYgSfoVzOb +fbfbA/4ioOrxy8ifSoga7ITyZMA+XbW8bx33WXutO9N7SPKS/AK2JpasSEVLZcON +ae5hvAEGVXKxVPDjJBmIc2cOe7kOKSi3OxLzBqrjS2rnjiP4o0ekhZIe4+ocwVOg +e0PLlH5avCqihGRhpoqDRsmpzSHzJIxtoeb+GgGEX8KkUsVAhbQpUmVwbyBNYWlu +dGFpbmVyIDxyZXBvQGFuZHJvaWQua2VybmVsLm9yZz6IYAQTEQIAIAUCSPe6AQIb +AwYLCQgHAwIEFQIIAwQWAgMBAh4BAheAAAoJEBZTDV6SD1xl1GEAn0x/OKQpy7qI +6G73NJviU0IUMtftAKCFMUhGb/0bZvQ8Rm3QCUpWHyEIu7kEDQRI97ogEBAA2wI6 +5fs9y/rMwD6dkD/vK9v4C9mOn1IL5JCPYMJBVSci+9ED4ChzYvfq7wOcj9qIvaE0 +GwCt2ar7Q56me5J+byhSb32Rqsw/r3Vo5cZMH80N4cjesGuSXOGyEWTe4HYoxnHv +gF4EKI2LK7xfTUcxMtlyn52sUpkfKsCpUhFvdmbAiJE+jCkQZr1Z8u2KphV79Ou+ +P1N5IXY/XWOlq48Qf4MWCYlJFrB07xjUjLKMPDNDnm58L5byDrP/eHysKexpbakL +xCmYyfT6DV1SWLblpd2hie0sL3YejdtuBMYMS2rI7Yxb8kGuqkz+9l1qhwJtei94 +5MaretDy/d/JH/pRYkRf7L+ke7dpzrP+aJmcz9P1e6gq4NJsWejaALVASBiioqNf +QmtqSVzF1wkR5avZkFHuYvj6V/t1RrOZTXxkSk18KFMJRBZrdHFCWbc5qrVxUB6e +N5pja0NFIUCigLBV1c6I2DwiuboMNh18VtJJh+nwWeez/RueN4ig59gRTtkcc0PR +35tX2DR8+xCCFVW/NcJ4PSePYzCuuLvp1vEDHnj41R52Fz51hgddT4rBsp0nL+5I +socSOIIezw8T9vVzMY4ArCKFAVu2IVyBcahTfBS8q5EM63mONU6UVJEozfGljiMw +xuQ7JwKcw0AUEKTKG7aBgBaTAgT8TOevpvlw91cAAwUP/jRkyVi/0WAb0qlEaq/S +ouWxX1faR+vU3b+Y2/DGjtXQMzG0qpetaTHC/AxxHpgt/dCkWI6ljYDnxgPLwG0a +Oasm94BjZc6vZwf1opFZUKsjOAAxRxNZyjUJKe4UZVuMTk6zo27Nt3LMnc0FO47v +FcOjRyquvgNOS818irVHUf12waDx8gszKxQTTtFxU5/ePB2jZmhP6oXSe4K/LG5T ++WBRPDrHiGPhCzJRzm9BP0lTnGCAj3o9W90STZa65RK7IaYpC8TB35JTBEbrrNCp +w6lzd74LnNEp5eMlKDnXzUAgAH0yzCQeMl7t33QCdYx2hRs2wtTQSjGfAiNmj/WW +Vl5Jn+2jCDnRLenKHwVRFsBX2e0BiRWt/i9Y8fjorLCXVj4z+7yW6DawdLkJorEo +p3v5ILwfC7hVx4jHSnOgZ65L9s8EQdVr1ckN9243yta7rNgwfcqb60ILMFF1BRk/ +0V7wCL+68UwwiQDvyMOQuqkysKLSDCLb7BFcyA7j6KG+5hpsREstFX2wK1yKeraz +5xGrFy8tfAaeBMIQ17gvFSp/suc9DYO0ICK2BISzq+F+ZiAKsjMYOBNdH/h0zobQ +HTHs37+/QLMomGEGKZMWi0dShU2J5mNRQu3Hhxl3hHDVbt5CeJBb26aQcQrFz69W +zE3GNvmJosh6leayjtI9P2A6iEkEGBECAAkFAkj3uiACGwwACgkQFlMNXpIPXGWp +TACbBS+Up3RpfYVfd63c1cDdlru13pQAn3NQy/SN858MkxN+zym86UBgOad2 +=CMiZ +-----END PGP PUBLIC KEY BLOCK----- + + Conley Owens +-----BEGIN PGP PUBLIC KEY BLOCK----- +Version: GnuPG v1.4.11 (GNU/Linux) + +mQENBFHRvc8BCADFg45Xx/y6QDC+T7Y/gGc7vx0ww7qfOwIKlAZ9xG3qKunMxo+S +hPCnzEl3cq+6I1Ww/ndop/HB3N3toPXRCoN8Vs4/Hc7by+SnaLFnacrm+tV5/OgT +V37Lzt8lhay1Kl+YfpFwHYYpIEBLFV9knyfRXS/428W2qhdzYfvB15/AasRmwmor +py4NIzSs8UD/SPr1ihqNCdZM76+MQyN5HMYXW/ALZXUFG0pwluHFA7hrfPG74i8C +zMiP7qvMWIl/r/jtzHioH1dRKgbod+LZsrDJ8mBaqsZaDmNJMhss9g76XvfMyLra +9DI9/iFuBpGzeqBv0hwOGQspLRrEoyTeR6n1ABEBAAG0H0NvbmxleSBPd2VucyA8 +Y2NvM0BhbmRyb2lkLmNvbT6JATgEEwECACIFAlHRvc8CGwMGCwkIBwMCBhUIAgkK +CwQWAgMBAh4BAheAAAoJEGe35EhpKzgsP6AIAJKJmNtn4l7hkYHKHFSo3egb6RjQ +zEIP3MFTcu8HFX1kF1ZFbrp7xqurLaE53kEkKuAAvjJDAgI8mcZHP1JyplubqjQA +xvv84gK+OGP3Xk+QK1ZjUQSbjOpjEiSZpRhWcHci3dgOUH4blJfByHw25hlgHowd +a/2PrNKZVcJ92YienaxxGjcXEUcd0uYEG2+rwllQigFcnMFDhr9B71MfalRHjFKE +fmdoypqLrri61YBc59P88Rw2/WUpTQjgNubSqa3A2+CKdaRyaRw+2fdF4TdR0h8W +zbg+lbaPtJHsV+3mJC7fq26MiJDRJa5ZztpMn8su20gbLgi2ShBOaHAYDDi5AQ0E +UdG9zwEIAMoOBq+QLNozAhxOOl5GL3StTStGRgPRXINfmViTsihrqGCWBBUfXlUE +OytC0mYcrDUQev/8ToVoyqw+iGSwDkcSXkrEUCKFtHV/GECWtk1keyHgR10YKI1R +mquSXoubWGqPeG1PAI74XWaRx8UrL8uCXUtmD8Q5J7mDjKR5NpxaXrwlA0bKsf2E +Gp9tu1kKauuToZhWHMRMqYSOGikQJwWSFYKT1KdNcOXLQF6+bfoJ6sjVYdwfmNQL +Ixn8QVhoTDedcqClSWB17VDEFDFa7MmqXZz2qtM3X1R/MUMHqPtegQzBGNhRdnI2 +V45+1Nnx/uuCxDbeI4RbHzujnxDiq70AEQEAAYkBHwQYAQIACQUCUdG9zwIbDAAK +CRBnt+RIaSs4LNVeB/0Y2pZ8I7gAAcEM0Xw8drr4omg2fUoK1J33ozlA/RxeA/lJ +I3KnyCDTpXuIeBKPGkdL8uMATC9Z8DnBBajRlftNDVZS3Hz4G09G9QpMojvJkFJV +By+01Flw/X+eeN8NpqSuLV4W+AjEO8at/VvgKr1AFvBRdZ7GkpI1o6DgPe7ZqX+1 +dzQZt3e13W0rVBb/bUgx9iSLoeWP3aq/k+/GRGOR+S6F6BBSl0SQ2EF2+dIywb1x +JuinEP+AwLAUZ1Bsx9ISC0Agpk2VeHXPL3FGhroEmoMvBzO0kTFGyoeT7PR/BfKv ++H/g3HsL2LOB9uoIm8/5p2TTU5ttYCXMHhQZ81AY +=AUp4 +-----END PGP PUBLIC KEY BLOCK----- +""" + +GIT = 'git' # our git command +MIN_GIT_VERSION = (1, 7, 2) # minimum supported git version +repodir = '.repo' # name of repo's private directory +S_repo = 'repo' # special repo repository +S_manifests = 'manifests' # special manifest repository +REPO_MAIN = S_repo + '/main.py' # main script +MIN_PYTHON_VERSION = (2, 6) # minimum supported python version +GITC_CONFIG_FILE = '/gitc/.config' +GITC_FS_ROOT_DIR = '/gitc/manifest-rw/' + + +import errno +import optparse +import re +import shutil +import stat +import subprocess +import sys + +if sys.version_info[0] == 3: + import urllib.request + import urllib.error +else: + import imp + import urllib2 + urllib = imp.new_module('urllib') + urllib.request = urllib2 + urllib.error = urllib2 + + +def _print(*objects, **kwargs): + sep = kwargs.get('sep', ' ') + end = kwargs.get('end', '\n') + out = kwargs.get('file', sys.stdout) + out.write(sep.join(objects) + end) + + +# Python version check +ver = sys.version_info +if (ver[0], ver[1]) < MIN_PYTHON_VERSION: + _print('error: Python version %s unsupported.\n' + 'Please use Python 2.6 - 2.7 instead.' + % sys.version.split(' ')[0], file=sys.stderr) + sys.exit(1) + +home_dot_repo = os.path.expanduser('~/.repoconfig') +gpg_dir = os.path.join(home_dot_repo, 'gnupg') + +extra_args = [] +init_optparse = optparse.OptionParser(usage="repo init -u url [options]") + +# Logging +group = init_optparse.add_option_group('Logging options') +group.add_option('-q', '--quiet', + dest="quiet", action="store_true", default=False, + help="be quiet") + +# Manifest +group = init_optparse.add_option_group('Manifest options') +group.add_option('-u', '--manifest-url', + dest='manifest_url', + help='manifest repository location', metavar='URL') +group.add_option('-b', '--manifest-branch', + dest='manifest_branch', + help='manifest branch or revision', metavar='REVISION') +group.add_option('-m', '--manifest-name', + dest='manifest_name', + help='initial manifest file', metavar='NAME.xml') +group.add_option('--mirror', + dest='mirror', action='store_true', + help='create a replica of the remote repositories ' + 'rather than a client working directory') +group.add_option('--reference', + dest='reference', + help='location of mirror directory', metavar='DIR') +group.add_option('--depth', type='int', default=None, + dest='depth', + help='create a shallow clone with given depth; see git clone') +group.add_option('--archive', + dest='archive', action='store_true', + help='checkout an archive instead of a git repository for ' + 'each project. See git archive.') +group.add_option('-g', '--groups', + dest='groups', default='default', + help='restrict manifest projects to ones with specified ' + 'group(s) [default|all|G1,G2,G3|G4,-G5,-G6]', + metavar='GROUP') +group.add_option('-p', '--platform', + dest='platform', default="auto", + help='restrict manifest projects to ones with a specified ' + 'platform group [auto|all|none|linux|darwin|...]', + metavar='PLATFORM') + + +# Tool +group = init_optparse.add_option_group('repo Version options') +group.add_option('--repo-url', + dest='repo_url', + help='repo repository location', metavar='URL') +group.add_option('--repo-branch', + dest='repo_branch', + help='repo branch or revision', metavar='REVISION') +group.add_option('--no-repo-verify', + dest='no_repo_verify', action='store_true', + help='do not verify repo source code') + +# Other +group = init_optparse.add_option_group('Other options') +group.add_option('--config-name', + dest='config_name', action="store_true", default=False, + help='Always prompt for name/e-mail') + + +def _GitcInitOptions(init_optparse_arg): + init_optparse_arg.set_usage("repo gitc-init -u url -c client [options]") + g = init_optparse_arg.add_option_group('GITC options') + g.add_option('-f', '--manifest-file', + dest='manifest_file', + help='Optional manifest file to use for this GITC client.') + g.add_option('-c', '--gitc-client', + dest='gitc_client', + help='The name of the gitc_client instance to create or modify.') + +_gitc_manifest_dir = None + + +def get_gitc_manifest_dir(): + global _gitc_manifest_dir + if _gitc_manifest_dir is None: + _gitc_manifest_dir = '' + try: + with open(GITC_CONFIG_FILE, 'r') as gitc_config: + for line in gitc_config: + match = re.match('gitc_dir=(?P.*)', line) + if match: + _gitc_manifest_dir = match.group('gitc_manifest_dir') + except IOError: + pass + return _gitc_manifest_dir + + +def gitc_parse_clientdir(gitc_fs_path): + """Parse a path in the GITC FS and return its client name. + + @param gitc_fs_path: A subdirectory path within the GITC_FS_ROOT_DIR. + + @returns: The GITC client name + """ + if gitc_fs_path == GITC_FS_ROOT_DIR: + return None + if not gitc_fs_path.startswith(GITC_FS_ROOT_DIR): + manifest_dir = get_gitc_manifest_dir() + if manifest_dir == '': + return None + if manifest_dir[-1] != '/': + manifest_dir += '/' + if gitc_fs_path == manifest_dir: + return None + if not gitc_fs_path.startswith(manifest_dir): + return None + return gitc_fs_path.split(manifest_dir)[1].split('/')[0] + return gitc_fs_path.split(GITC_FS_ROOT_DIR)[1].split('/')[0] + + +class CloneFailure(Exception): + + """Indicate the remote clone of repo itself failed. + """ + + +def _Init(args, gitc_init=False): + """Installs repo by cloning it over the network. + """ + if gitc_init: + _GitcInitOptions(init_optparse) + opt, args = init_optparse.parse_args(args) + if args: + init_optparse.print_usage() + sys.exit(1) + + url = opt.repo_url + if not url: + url = REPO_URL + extra_args.append('--repo-url=%s' % url) + + branch = opt.repo_branch + if not branch: + branch = REPO_REV + extra_args.append('--repo-branch=%s' % branch) + + if branch.startswith('refs/heads/'): + branch = branch[len('refs/heads/'):] + if branch.startswith('refs/'): + _print("fatal: invalid branch name '%s'" % branch, file=sys.stderr) + raise CloneFailure() + + try: + if gitc_init: + gitc_manifest_dir = get_gitc_manifest_dir() + if not gitc_manifest_dir: + _print('fatal: GITC filesystem is not available. Exiting...', + file=sys.stderr) + sys.exit(1) + gitc_client = opt.gitc_client + if not gitc_client: + gitc_client = gitc_parse_clientdir(os.getcwd()) + if not gitc_client: + _print('fatal: GITC client (-c) is required.', file=sys.stderr) + sys.exit(1) + client_dir = os.path.join(gitc_manifest_dir, gitc_client) + if not os.path.exists(client_dir): + os.makedirs(client_dir) + os.chdir(client_dir) + if os.path.exists(repodir): + # This GITC Client has already initialized repo so continue. + return + + os.mkdir(repodir) + except OSError as e: + if e.errno != errno.EEXIST: + _print('fatal: cannot make %s directory: %s' + % (repodir, e.strerror), file=sys.stderr) + # Don't raise CloneFailure; that would delete the + # name. Instead exit immediately. + # + sys.exit(1) + + _CheckGitVersion() + try: + if NeedSetupGnuPG(): + can_verify = SetupGnuPG(opt.quiet) + else: + can_verify = True + + dst = os.path.abspath(os.path.join(repodir, S_repo)) + _Clone(url, dst, opt.quiet) + + if can_verify and not opt.no_repo_verify: + rev = _Verify(dst, branch, opt.quiet) + else: + rev = 'refs/remotes/origin/%s^0' % branch + + _Checkout(dst, branch, rev, opt.quiet) + except CloneFailure: + if opt.quiet: + _print('fatal: repo init failed; run without --quiet to see why', + file=sys.stderr) + raise + + +def ParseGitVersion(ver_str): + if not ver_str.startswith('git version '): + return None + + num_ver_str = ver_str[len('git version '):].strip().split('-')[0] + to_tuple = [] + for num_str in num_ver_str.split('.')[:3]: + if num_str.isdigit(): + to_tuple.append(int(num_str)) + else: + to_tuple.append(0) + return tuple(to_tuple) + + +def _CheckGitVersion(): + cmd = [GIT, '--version'] + try: + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) + except OSError as e: + _print(file=sys.stderr) + _print("fatal: '%s' is not available" % GIT, file=sys.stderr) + _print('fatal: %s' % e, file=sys.stderr) + _print(file=sys.stderr) + _print('Please make sure %s is installed and in your path.' % GIT, + file=sys.stderr) + raise CloneFailure() + + ver_str = proc.stdout.read().strip() + proc.stdout.close() + proc.wait() + + ver_act = ParseGitVersion(ver_str) + if ver_act is None: + _print('error: "%s" unsupported' % ver_str, file=sys.stderr) + raise CloneFailure() + + if ver_act < MIN_GIT_VERSION: + need = '.'.join(map(str, MIN_GIT_VERSION)) + _print('fatal: git %s or later required' % need, file=sys.stderr) + raise CloneFailure() + + +def NeedSetupGnuPG(): + if not os.path.isdir(home_dot_repo): + return True + + kv = os.path.join(home_dot_repo, 'keyring-version') + if not os.path.exists(kv): + return True + + kv = open(kv).read() + if not kv: + return True + + kv = tuple(map(int, kv.split('.'))) + if kv < KEYRING_VERSION: + return True + return False + + +def SetupGnuPG(quiet): + try: + os.mkdir(home_dot_repo) + except OSError as e: + if e.errno != errno.EEXIST: + _print('fatal: cannot make %s directory: %s' + % (home_dot_repo, e.strerror), file=sys.stderr) + sys.exit(1) + + try: + os.mkdir(gpg_dir, stat.S_IRWXU) + except OSError as e: + if e.errno != errno.EEXIST: + _print('fatal: cannot make %s directory: %s' % (gpg_dir, e.strerror), + file=sys.stderr) + sys.exit(1) + + env = os.environ.copy() + env['GNUPGHOME'] = gpg_dir.encode() + + cmd = ['gpg', '--import'] + try: + proc = subprocess.Popen(cmd, + env=env, + stdin=subprocess.PIPE) + except OSError as e: + if not quiet: + _print('warning: gpg (GnuPG) is not available.', file=sys.stderr) + _print('warning: Installing it is strongly encouraged.', file=sys.stderr) + _print(file=sys.stderr) + return False + + proc.stdin.write(MAINTAINER_KEYS) + proc.stdin.close() + + if proc.wait() != 0: + _print('fatal: registering repo maintainer keys failed', file=sys.stderr) + sys.exit(1) + _print() + + fd = open(os.path.join(home_dot_repo, 'keyring-version'), 'w') + fd.write('.'.join(map(str, KEYRING_VERSION)) + '\n') + fd.close() + return True + + +def _SetConfig(local, name, value): + """Set a git configuration option to the specified value. + """ + cmd = [GIT, 'config', name, value] + if subprocess.Popen(cmd, cwd=local).wait() != 0: + raise CloneFailure() + + +def _InitHttp(): + handlers = [] + + mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() + try: + import netrc + n = netrc.netrc() + for host in n.hosts: + p = n.hosts[host] + mgr.add_password(p[1], 'http://%s/' % host, p[0], p[2]) + mgr.add_password(p[1], 'https://%s/' % host, p[0], p[2]) + except: # pylint: disable=bare-except + pass + handlers.append(urllib.request.HTTPBasicAuthHandler(mgr)) + handlers.append(urllib.request.HTTPDigestAuthHandler(mgr)) + + if 'http_proxy' in os.environ: + url = os.environ['http_proxy'] + handlers.append(urllib.request.ProxyHandler({'http': url, 'https': url})) + if 'REPO_CURL_VERBOSE' in os.environ: + handlers.append(urllib.request.HTTPHandler(debuglevel=1)) + handlers.append(urllib.request.HTTPSHandler(debuglevel=1)) + urllib.request.install_opener(urllib.request.build_opener(*handlers)) + + +def _Fetch(url, local, src, quiet): + if not quiet: + _print('Get %s' % url, file=sys.stderr) + + cmd = [GIT, 'fetch'] + if quiet: + cmd.append('--quiet') + err = subprocess.PIPE + else: + err = None + cmd.append(src) + cmd.append('+refs/heads/*:refs/remotes/origin/*') + cmd.append('refs/tags/*:refs/tags/*') + + proc = subprocess.Popen(cmd, cwd=local, stderr=err) + if err: + proc.stderr.read() + proc.stderr.close() + if proc.wait() != 0: + raise CloneFailure() + + +def _DownloadBundle(url, local, quiet): + if not url.endswith('/'): + url += '/' + url += 'clone.bundle' + + proc = subprocess.Popen( + [GIT, 'config', '--get-regexp', 'url.*.insteadof'], + cwd=local, + stdout=subprocess.PIPE) + for line in proc.stdout: + m = re.compile(r'^url\.(.*)\.insteadof (.*)$').match(line) + if m: + new_url = m.group(1) + old_url = m.group(2) + if url.startswith(old_url): + url = new_url + url[len(old_url):] + break + proc.stdout.close() + proc.wait() + + if not url.startswith('http:') and not url.startswith('https:'): + return False + + dest = open(os.path.join(local, '.git', 'clone.bundle'), 'w+b') + try: + try: + r = urllib.request.urlopen(url) + except urllib.error.HTTPError as e: + if e.code in [401, 403, 404]: + return False + _print('fatal: Cannot get %s' % url, file=sys.stderr) + _print('fatal: HTTP error %s' % e.code, file=sys.stderr) + raise CloneFailure() + except urllib.error.URLError as e: + _print('fatal: Cannot get %s' % url, file=sys.stderr) + _print('fatal: error %s' % e.reason, file=sys.stderr) + raise CloneFailure() + try: + if not quiet: + _print('Get %s' % url, file=sys.stderr) + while True: + buf = r.read(8192) + if buf == '': + return True + dest.write(buf) + finally: + r.close() + finally: + dest.close() + + +def _ImportBundle(local): + path = os.path.join(local, '.git', 'clone.bundle') + try: + _Fetch(local, local, path, True) + finally: + os.remove(path) + + +def _Clone(url, local, quiet): + """Clones a git repository to a new subdirectory of repodir + """ + try: + os.mkdir(local) + except OSError as e: + _print('fatal: cannot make %s directory: %s' % (local, e.strerror), + file=sys.stderr) + raise CloneFailure() + + cmd = [GIT, 'init', '--quiet'] + try: + proc = subprocess.Popen(cmd, cwd=local) + except OSError as e: + _print(file=sys.stderr) + _print("fatal: '%s' is not available" % GIT, file=sys.stderr) + _print('fatal: %s' % e, file=sys.stderr) + _print(file=sys.stderr) + _print('Please make sure %s is installed and in your path.' % GIT, + file=sys.stderr) + raise CloneFailure() + if proc.wait() != 0: + _print('fatal: could not create %s' % local, file=sys.stderr) + raise CloneFailure() + + _InitHttp() + _SetConfig(local, 'remote.origin.url', url) + _SetConfig(local, + 'remote.origin.fetch', + '+refs/heads/*:refs/remotes/origin/*') + if _DownloadBundle(url, local, quiet): + _ImportBundle(local) + _Fetch(url, local, 'origin', quiet) + + +def _Verify(cwd, branch, quiet): + """Verify the branch has been signed by a tag. + """ + cmd = [GIT, 'describe', 'origin/%s' % branch] + proc = subprocess.Popen(cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=cwd) + cur = proc.stdout.read().strip() + proc.stdout.close() + + proc.stderr.read() + proc.stderr.close() + + if proc.wait() != 0 or not cur: + _print(file=sys.stderr) + _print("fatal: branch '%s' has not been signed" % branch, file=sys.stderr) + raise CloneFailure() + + m = re.compile(r'^(.*)-[0-9]{1,}-g[0-9a-f]{1,}$').match(cur) + if m: + cur = m.group(1) + if not quiet: + _print(file=sys.stderr) + _print("info: Ignoring branch '%s'; using tagged release '%s'" + % (branch, cur), file=sys.stderr) + _print(file=sys.stderr) + + env = os.environ.copy() + env['GNUPGHOME'] = gpg_dir.encode() + + cmd = [GIT, 'tag', '-v', cur] + proc = subprocess.Popen(cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=cwd, + env=env) + out = proc.stdout.read() + proc.stdout.close() + + err = proc.stderr.read() + proc.stderr.close() + + if proc.wait() != 0: + _print(file=sys.stderr) + _print(out, file=sys.stderr) + _print(err, file=sys.stderr) + _print(file=sys.stderr) + raise CloneFailure() + return '%s^0' % cur + + +def _Checkout(cwd, branch, rev, quiet): + """Checkout an upstream branch into the repository and track it. + """ + cmd = [GIT, 'update-ref', 'refs/heads/default', rev] + if subprocess.Popen(cmd, cwd=cwd).wait() != 0: + raise CloneFailure() + + _SetConfig(cwd, 'branch.default.remote', 'origin') + _SetConfig(cwd, 'branch.default.merge', 'refs/heads/%s' % branch) + + cmd = [GIT, 'symbolic-ref', 'HEAD', 'refs/heads/default'] + if subprocess.Popen(cmd, cwd=cwd).wait() != 0: + raise CloneFailure() + + cmd = [GIT, 'read-tree', '--reset', '-u'] + if not quiet: + cmd.append('-v') + cmd.append('HEAD') + if subprocess.Popen(cmd, cwd=cwd).wait() != 0: + raise CloneFailure() + + +def _FindRepo(): + """Look for a repo installation, starting at the current directory. + """ + curdir = os.getcwd() + repo = None + + olddir = None + while curdir != '/' \ + and curdir != olddir \ + and not repo: + repo = os.path.join(curdir, repodir, REPO_MAIN) + if not os.path.isfile(repo): + repo = None + olddir = curdir + curdir = os.path.dirname(curdir) + return (repo, os.path.join(curdir, repodir)) + + +class _Options(object): + help = False + + +def _ParseArguments(args): + cmd = None + opt = _Options() + arg = [] + + for i in range(len(args)): + a = args[i] + if a == '-h' or a == '--help': + opt.help = True + + elif not a.startswith('-'): + cmd = a + arg = args[i + 1:] + break + return cmd, opt, arg + + +def _Usage(): + gitc_usage = "" + if get_gitc_manifest_dir(): + gitc_usage = " gitc-init Initialize a GITC Client.\n" + + _print( + """usage: repo COMMAND [ARGS] + +repo is not yet installed. Use "repo init" to install it here. + +The most commonly used repo commands are: + + init Install repo in the current working directory +""" + gitc_usage + + """ help Display detailed help on a command + +For access to the full online help, install repo ("repo init"). +""", file=sys.stderr) + sys.exit(1) + + +def _Help(args): + if args: + if args[0] == 'init': + init_optparse.print_help() + sys.exit(0) + elif args[0] == 'gitc-init': + _GitcInitOptions(init_optparse) + init_optparse.print_help() + sys.exit(0) + else: + _print("error: '%s' is not a bootstrap command.\n" + ' For access to online help, install repo ("repo init").' + % args[0], file=sys.stderr) + else: + _Usage() + sys.exit(1) + + +def _NotInstalled(): + _print('error: repo is not installed. Use "repo init" to install it here.', + file=sys.stderr) + sys.exit(1) + + +def _NoCommands(cmd): + _print("""error: command '%s' requires repo to be installed first. + Use "repo init" to install it here.""" % cmd, file=sys.stderr) + sys.exit(1) + + +def _RunSelf(wrapper_path): + my_dir = os.path.dirname(wrapper_path) + my_main = os.path.join(my_dir, 'main.py') + my_git = os.path.join(my_dir, '.git') + + if os.path.isfile(my_main) and os.path.isdir(my_git): + for name in ['git_config.py', + 'project.py', + 'subcmds']: + if not os.path.exists(os.path.join(my_dir, name)): + return None, None + return my_main, my_git + return None, None + + +def _SetDefaultsTo(gitdir): + global REPO_URL + global REPO_REV + + REPO_URL = gitdir + proc = subprocess.Popen([GIT, + '--git-dir=%s' % gitdir, + 'symbolic-ref', + 'HEAD'], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + REPO_REV = proc.stdout.read().strip() + proc.stdout.close() + + proc.stderr.read() + proc.stderr.close() + + if proc.wait() != 0: + _print('fatal: %s has no current branch' % gitdir, file=sys.stderr) + sys.exit(1) + + +def main(orig_args): + cmd, opt, args = _ParseArguments(orig_args) + + repo_main, rel_repo_dir = None, None + # Don't use the local repo copy, make sure to switch to the gitc client first. + if cmd != 'gitc-init': + repo_main, rel_repo_dir = _FindRepo() + + wrapper_path = os.path.abspath(__file__) + my_main, my_git = _RunSelf(wrapper_path) + + cwd = os.getcwd() + if get_gitc_manifest_dir() and cwd.startswith(get_gitc_manifest_dir()): + _print('error: repo cannot be used in the GITC local manifest directory.' + '\nIf you want to work on this GITC client please rerun this ' + 'command from the corresponding client under /gitc/', + file=sys.stderr) + sys.exit(1) + if not repo_main: + if opt.help: + _Usage() + if cmd == 'help': + _Help(args) + if not cmd: + _NotInstalled() + if cmd == 'init' or cmd == 'gitc-init': + if my_git: + _SetDefaultsTo(my_git) + try: + _Init(args, gitc_init=(cmd == 'gitc-init')) + except CloneFailure: + shutil.rmtree(os.path.join(repodir, S_repo), ignore_errors=True) + sys.exit(1) + repo_main, rel_repo_dir = _FindRepo() + else: + _NoCommands(cmd) + + if my_main: + repo_main = my_main + + ver_str = '.'.join(map(str, VERSION)) + me = [sys.executable, repo_main, + '--repo-dir=%s' % rel_repo_dir, + '--wrapper-version=%s' % ver_str, + '--wrapper-path=%s' % wrapper_path, + '--'] + me.extend(orig_args) + me.extend(extra_args) + try: + os.execv(sys.executable, me) + except OSError as e: + _print("fatal: unable to start %s" % repo_main, file=sys.stderr) + _print("fatal: %s" % e, file=sys.stderr) + sys.exit(148) + + +if __name__ == '__main__': + if ver[0] == 3: + _print('warning: Python 3 support is currently experimental. YMMV.\n' + 'Please use Python 2.6 - 2.7 instead.', + file=sys.stderr) + main(sys.argv[1:]) diff --git a/swapon.sh b/swapon.sh new file mode 100644 index 0000000..da34e82 --- /dev/null +++ b/swapon.sh @@ -0,0 +1 @@ + sudo swapon /dev/zvol/dat/swap diff --git a/syncauto.sh b/syncauto.sh index d84e5ae..e2cd183 100644 --- a/syncauto.sh +++ b/syncauto.sh @@ -1,3 +1,4 @@ +srcdir=/dat/audio/auto # sudo mount -o uid=marc /dev/sdt1 /media/hdext -rsync -a --no-o --no-p --no-g -L --modify-window 1 --stats --delete --progress /mp3/auto/ /media/hdext/ +rsync -a --no-o --no-p --no-g -L --modify-window 1 --stats --delete --progress $srcdir /media/hdext/ diff --git a/taz-download.sh b/taz-download.sh index 6c549f0..1204191 100755 --- a/taz-download.sh +++ b/taz-download.sh @@ -8,7 +8,7 @@ # So sehen die Dateien aus: # http://www.taz.de/cgi-bin/digiabo/2007_04_17_HTM.zip -TAZDIR=/media/nas/books/taz +TAZDIR=/dat/books/taz TAZUSER=103450 TAZPASSWD=oxculo TAZTYPE=.pdf @@ -74,4 +74,4 @@ do done # Sync to Dropbox -bash /home/marc/bin/tazsync.sh +bash $HOME/bin/tazsync.sh diff --git a/tazsync.sh b/tazsync.sh index b02073a..f3247dc 100644 --- a/tazsync.sh +++ b/tazsync.sh @@ -1,8 +1,8 @@ -daysback=10 +daysback=22 srcdir=/dat/books/taz -dstdir=/home/marc/Dropbox-ipad/taz +dstdir=/dat/docu/sync/taz tmpdir=/tmp [ -d $dstdir ] || exit @@ -15,10 +15,14 @@ find . -type f | sort> $tmpdir/alt.lst cd $srcdir find . -mtime -$daysback -name "*.epub" | sort > $tmpdir/new.lst -rsync --files-from=$tmpdir/new.lst . $dstdir/ +#exit + +diff $tmpdir/new.lst $tmpdir/all.lst | grep "^<" | sed "s/^< //" > $tmpdir/added.lst +rsync --files-from=$tmpdir/added.lst --progress . $dstdir/ cd $dstdir + cat $tmpdir/alt.lst $tmpdir/new.lst | sort -u > $tmpdir/all.lst cat $tmpdir/all.lst cat $tmpdir/new.lst diff --git a/vdr-reload.sh b/vdr-reload.sh new file mode 100644 index 0000000..f9bd733 --- /dev/null +++ b/vdr-reload.sh @@ -0,0 +1,16 @@ + + +sudo service vdr stop +cd ~/src/vdr/dvbsky-current +sudo make rmmod +sudo make rmmod +sudo rmmod cx2341x +sudo rmmod v4l2_common +sudo rmmod videobuf2_core +sudo rmmod videobuf2_v4l2 +sudo rmmod videobuf2_core +sudo make rmmod + +sudo modprobe cx2341x cx23885 videobuf2_v4l2 cx25840 videobuf2_v4l2 videobuf2_core +sudo service vdr start + diff --git a/vdr2mp3.rb b/vdr2mp3.rb new file mode 100644 index 0000000..40fba17 --- /dev/null +++ b/vdr2mp3.rb @@ -0,0 +1,54 @@ +require 'fileutils' + +def split_all(path) + head, tail = File.split(path) + return [tail] if head == '.' || tail == '/' + return [head, tail] if head == '/' + return split_all(head) + [tail] +end + + +vidroot="/dat/video/vdr" +outdir="/dat/audio/vdr" +recdir=vidroot+"/"+ARGV.at(0) +#puts "p:#{p}" + +# aus videopfad extraieren +dirsplitted=split_all(recdir) +if dirsplitted.length >= 2 + namedir=dirsplitted.at(-2).gsub("%", "") + datedir=dirsplitted.at(-1) +end +#puts "dn:#{namedir}" + +# Titel und Datum aus Infofile holen +infofile=recdir+"/info" +infofile=recdir+"/info.vdr" unless File.exists?(infofile) +puts "infofile:#{infofile}" +title=open(infofile).grep(/^\s*T/).to_s.gsub("T","") +tim=open(infofile).grep(/^\s*E/).to_s.split(" ").at(2) +t=Time.at(tim.to_i) +#puts "time:#{t.to_s}" +#puts "tim:#{tim}" + +#Zielverzeichnisbaum anlegen +outdir=outdir+"/"+namedir +Dir::mkdir(outdir) unless File.exists?(outdir) +outdir=outdir+"/"+datedir +Dir::mkdir(outdir) unless File.exists?(outdir) +# Infofile kopieren +FileUtils.cp(infofile, outdir) + +# Kommando zum konvertieren +recfile=recdir+"/00001.ts" +recfile=recdir+"/001.vdr" unless File.exists?(recfile) +title=title.to_s.strip +bitrate="128k" +samplerate="44100" +outfile=outdir+"/"+namedir+".mp3" +author="vdr" +yr=t.year +cmd="avconv -i #{recfile} -acodec libmp3lame -f mp3 -metadata title='#{title}' -metadata author=#{author} -metadata year=#{yr} -ss 00:00:00 -ab #{bitrate} -ar #{samplerate} -threads auto #{outfile}" +puts "cmd:#{cmd}" +exec(cmd) +puts "Fertig." diff --git a/vdr2mp3.sh b/vdr2mp3.sh new file mode 100644 index 0000000..7f34530 --- /dev/null +++ b/vdr2mp3.sh @@ -0,0 +1,18 @@ +outfile=$1.mp3 + + +[ -z $vdrts ] && vdrts=00001.ts +[ -z $outfile ] && outfile="out.mp3" + +title="" +author="" +year=$(date "+%Y") +album="vdr" + + +avconv -i $vdrts -acodec libmp3lame -f mp3 -metadata title=$title -metadata author=$author -metadata year=$year -metadata album=$album -ss 00:\ +00:00 -ab 128k -ar 44100 -threads auto $outfile + + + + diff --git a/zeitsync.sh b/zeitsync.sh new file mode 100644 index 0000000..f8922e0 --- /dev/null +++ b/zeitsync.sh @@ -0,0 +1,28 @@ + +daysback=40 + +srcdir=/dat/books/zeit +dstdir=/dat/docu/sync/zeit +tmpdir=/tmp + +[ -d $dstdir ] || exit +[ -d $srcdir ] || exit +[ -d $tmpdir ] || exit + +cd $dstdir +find . -type f | sort> $tmpdir/alt.lst + +cd $srcdir +find . -mtime -$daysback -name "*.epub" | sort > $tmpdir/new.lst + + +rsync --files-from=$tmpdir/new.lst --progress . $dstdir/ + +cd $dstdir + + +cat $tmpdir/alt.lst $tmpdir/new.lst | sort -u > $tmpdir/all.lst +cat $tmpdir/all.lst +cat $tmpdir/new.lst +diff $tmpdir/new.lst $tmpdir/all.lst | grep "^>" | sed "s/^> //" | xargs rm +# $rmfiles