summaryrefslogtreecommitdiff
path: root/contrib
diff options
context:
space:
mode:
Diffstat (limited to 'contrib')
-rw-r--r--contrib/README43
-rw-r--r--contrib/buildsystems/Generators.pm42
-rw-r--r--contrib/buildsystems/Generators/QMake.pm189
-rw-r--r--contrib/buildsystems/Generators/Vcproj.pm626
-rw-r--r--contrib/buildsystems/engine.pl359
-rw-r--r--contrib/buildsystems/generate29
-rw-r--r--contrib/buildsystems/parse.pl228
-rw-r--r--contrib/ciabot/INSTALL54
-rw-r--r--contrib/ciabot/README11
-rwxr-xr-xcontrib/ciabot/ciabot.py255
-rwxr-xr-xcontrib/ciabot/ciabot.sh233
-rw-r--r--contrib/completion/git-completion.bash2663
-rw-r--r--contrib/completion/git-completion.tcsh128
-rw-r--r--contrib/completion/git-completion.zsh216
-rw-r--r--contrib/completion/git-prompt.sh461
-rw-r--r--contrib/convert-objects/convert-objects.c329
-rw-r--r--contrib/convert-objects/git-convert-objects.txt29
-rw-r--r--contrib/credential/gnome-keyring/.gitignore1
-rw-r--r--contrib/credential/gnome-keyring/Makefile24
-rw-r--r--contrib/credential/gnome-keyring/git-credential-gnome-keyring.c445
-rw-r--r--contrib/credential/netrc/Makefile5
-rwxr-xr-xcontrib/credential/netrc/git-credential-netrc421
-rw-r--r--contrib/credential/netrc/test.netrc13
-rwxr-xr-xcontrib/credential/netrc/test.pl106
-rw-r--r--contrib/credential/osxkeychain/.gitignore1
-rw-r--r--contrib/credential/osxkeychain/Makefile17
-rw-r--r--contrib/credential/osxkeychain/git-credential-osxkeychain.c183
-rw-r--r--contrib/credential/wincred/Makefile14
-rw-r--r--contrib/credential/wincred/git-credential-wincred.c301
-rw-r--r--contrib/diff-highlight/README152
-rwxr-xr-xcontrib/diff-highlight/diff-highlight173
-rw-r--r--contrib/diffall/README31
-rwxr-xr-xcontrib/diffall/git-diffall257
-rw-r--r--contrib/emacs/.gitignore1
-rw-r--r--contrib/emacs/Makefile21
-rw-r--r--contrib/emacs/README39
-rw-r--r--contrib/emacs/git-blame.el484
-rw-r--r--contrib/emacs/git.el1705
-rw-r--r--contrib/examples/README3
-rw-r--r--contrib/examples/builtin-fetch--tool.c574
-rwxr-xr-xcontrib/examples/git-checkout.sh302
-rwxr-xr-xcontrib/examples/git-clean.sh118
-rwxr-xr-xcontrib/examples/git-clone.sh525
-rwxr-xr-xcontrib/examples/git-commit.sh639
-rwxr-xr-xcontrib/examples/git-fetch.sh379
-rwxr-xr-xcontrib/examples/git-gc.sh37
-rwxr-xr-xcontrib/examples/git-ls-remote.sh142
-rwxr-xr-xcontrib/examples/git-merge-ours.sh14
-rwxr-xr-xcontrib/examples/git-merge.sh620
-rwxr-xr-xcontrib/examples/git-notes.sh121
-rwxr-xr-xcontrib/examples/git-remote.perl474
-rwxr-xr-xcontrib/examples/git-rerere.perl284
-rwxr-xr-xcontrib/examples/git-reset.sh106
-rwxr-xr-xcontrib/examples/git-resolve.sh112
-rwxr-xr-xcontrib/examples/git-revert.sh207
-rwxr-xr-xcontrib/examples/git-svnimport.perl976
-rw-r--r--contrib/examples/git-svnimport.txt179
-rwxr-xr-xcontrib/examples/git-tag.sh205
-rwxr-xr-xcontrib/examples/git-verify-tag.sh45
-rwxr-xr-xcontrib/fast-import/git-import.perl64
-rwxr-xr-xcontrib/fast-import/git-import.sh38
-rw-r--r--contrib/fast-import/git-p4.README12
-rwxr-xr-xcontrib/fast-import/import-directories.perl417
-rwxr-xr-xcontrib/fast-import/import-tars.perl189
-rwxr-xr-xcontrib/fast-import/import-zips.py78
-rw-r--r--contrib/git-jump/README92
-rwxr-xr-xcontrib/git-jump/git-jump69
-rwxr-xr-xcontrib/git-resurrect.sh181
-rw-r--r--contrib/git-shell-commands/README18
-rwxr-xr-xcontrib/git-shell-commands/help18
-rwxr-xr-xcontrib/git-shell-commands/list10
-rwxr-xr-xcontrib/gitview/gitview1305
-rw-r--r--contrib/gitview/gitview.txt57
-rwxr-xr-xcontrib/hg-to-git/hg-to-git.py255
-rw-r--r--contrib/hg-to-git/hg-to-git.txt21
-rwxr-xr-xcontrib/hooks/post-receive-email748
-rw-r--r--contrib/hooks/pre-auto-gc-battery43
-rw-r--r--contrib/hooks/setgitperms.perl214
-rw-r--r--contrib/hooks/update-paranoid421
-rw-r--r--contrib/mw-to-git/.gitignore1
-rw-r--r--contrib/mw-to-git/Makefile17
-rwxr-xr-xcontrib/mw-to-git/git-remote-mediawiki.perl1322
-rw-r--r--contrib/mw-to-git/git-remote-mediawiki.txt7
-rw-r--r--contrib/mw-to-git/t/.gitignore4
-rw-r--r--contrib/mw-to-git/t/Makefile31
-rw-r--r--contrib/mw-to-git/t/README124
-rwxr-xr-xcontrib/mw-to-git/t/install-wiki.sh45
-rw-r--r--contrib/mw-to-git/t/install-wiki/.gitignore1
-rw-r--r--contrib/mw-to-git/t/install-wiki/LocalSettings.php129
-rw-r--r--contrib/mw-to-git/t/install-wiki/db_install.php120
-rw-r--r--contrib/mw-to-git/t/push-pull-tests.sh144
-rwxr-xr-xcontrib/mw-to-git/t/t9360-mw-to-git-clone.sh257
-rwxr-xr-xcontrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh24
-rwxr-xr-xcontrib/mw-to-git/t/t9362-mw-to-git-utf8.sh347
-rwxr-xr-xcontrib/mw-to-git/t/t9363-mw-to-git-export-import.sh198
-rwxr-xr-xcontrib/mw-to-git/t/t9364-pull-by-rev.sh17
-rwxr-xr-xcontrib/mw-to-git/t/test-gitmw-lib.sh435
-rwxr-xr-xcontrib/mw-to-git/t/test-gitmw.pl225
-rw-r--r--contrib/mw-to-git/t/test.config35
-rw-r--r--contrib/p4import/README1
-rw-r--r--contrib/p4import/git-p4import.py365
-rw-r--r--contrib/p4import/git-p4import.txt167
-rw-r--r--contrib/persistent-https/LICENSE202
-rw-r--r--contrib/persistent-https/Makefile38
-rw-r--r--contrib/persistent-https/README62
-rw-r--r--contrib/persistent-https/client.go189
-rw-r--r--contrib/persistent-https/main.go82
-rw-r--r--contrib/persistent-https/proxy.go190
-rw-r--r--contrib/persistent-https/socket.go97
-rw-r--r--contrib/remote-helpers/Makefile14
-rwxr-xr-xcontrib/remote-helpers/git-remote-bzr960
-rwxr-xr-xcontrib/remote-helpers/git-remote-hg1220
-rwxr-xr-xcontrib/remote-helpers/test-bzr.sh361
-rwxr-xr-xcontrib/remote-helpers/test-hg-bidi.sh240
-rwxr-xr-xcontrib/remote-helpers/test-hg-hg-git.sh527
-rwxr-xr-xcontrib/remote-helpers/test-hg.sh692
-rwxr-xr-xcontrib/remotes2config.sh33
-rwxr-xr-xcontrib/rerere-train.sh52
-rwxr-xr-xcontrib/stats/git-common-hash26
-rwxr-xr-xcontrib/stats/mailmap.pl70
-rwxr-xr-xcontrib/stats/packinfo.pl212
-rw-r--r--contrib/subtree/.gitignore6
-rw-r--r--contrib/subtree/COPYING339
-rw-r--r--contrib/subtree/INSTALL28
-rw-r--r--contrib/subtree/Makefile53
-rw-r--r--contrib/subtree/README8
-rwxr-xr-xcontrib/subtree/git-subtree.sh725
-rw-r--r--contrib/subtree/git-subtree.txt367
-rw-r--r--contrib/subtree/t/Makefile69
-rwxr-xr-xcontrib/subtree/t/t7900-subtree.sh468
-rw-r--r--contrib/subtree/todo50
-rw-r--r--contrib/svn-fe/.gitignore4
-rw-r--r--contrib/svn-fe/Makefile63
-rw-r--r--contrib/svn-fe/svn-fe.c18
-rw-r--r--contrib/svn-fe/svn-fe.txt71
-rwxr-xr-xcontrib/svn-fe/svnrdump_sim.py57
-rw-r--r--contrib/thunderbird-patch-inline/README20
-rwxr-xr-xcontrib/thunderbird-patch-inline/appp.sh55
-rw-r--r--contrib/vim/README22
-rwxr-xr-xcontrib/workdir/git-new-workdir82
140 files changed, 32389 insertions, 0 deletions
diff --git a/contrib/README b/contrib/README
new file mode 100644
index 0000000..05f291c
--- /dev/null
+++ b/contrib/README
@@ -0,0 +1,43 @@
+Contributed Software
+
+Although these pieces are available as part of the official git
+source tree, they are in somewhat different status. The
+intention is to keep interesting tools around git here, maybe
+even experimental ones, to give users an easier access to them,
+and to give tools wider exposure, so that they can be improved
+faster.
+
+I am not expecting to touch these myself that much. As far as
+my day-to-day operation is concerned, these subdirectories are
+owned by their respective primary authors. I am willing to help
+if users of these components and the contrib/ subtree "owners"
+have technical/design issues to resolve, but the initiative to
+fix and/or enhance things _must_ be on the side of the subtree
+owners. IOW, I won't be actively looking for bugs and rooms for
+enhancements in them as the git maintainer -- I may only do so
+just as one of the users when I want to scratch my own itch. If
+you have patches to things in contrib/ area, the patch should be
+first sent to the primary author, and then the primary author
+should ack and forward it to me (git pull request is nicer).
+This is the same way as how I have been treating gitk, and to a
+lesser degree various foreign SCM interfaces, so you know the
+drill.
+
+I expect that things that start their life in the contrib/ area
+to graduate out of contrib/ once they mature, either by becoming
+projects on their own, or moving to the toplevel directory. On
+the other hand, I expect I'll be proposing removal of disused
+and inactive ones from time to time.
+
+If you have new things to add to this area, please first propose
+it on the git mailing list, and after a list discussion proves
+there are some general interests (it does not have to be a
+list-wide consensus for a tool targeted to a relatively narrow
+audience -- for example I do not work with projects whose
+upstream is svn, so I have no use for git-svn myself, but it is
+of general interest for people who need to interoperate with SVN
+repositories in a way git-svn works better than git-svnimport),
+submit a patch to create a subdirectory of contrib/ and put your
+stuff there.
+
+-jc
diff --git a/contrib/buildsystems/Generators.pm b/contrib/buildsystems/Generators.pm
new file mode 100644
index 0000000..408ef71
--- /dev/null
+++ b/contrib/buildsystems/Generators.pm
@@ -0,0 +1,42 @@
+package Generators;
+require Exporter;
+
+use strict;
+use File::Basename;
+no strict 'refs';
+use vars qw($VERSION @AVAILABLE);
+
+our $VERSION = '1.00';
+our(@ISA, @EXPORT, @EXPORT_OK, @AVAILABLE);
+@ISA = qw(Exporter);
+
+BEGIN {
+ local(*D);
+ my $me = $INC{"Generators.pm"};
+ die "Couldn't find myself in \@INC, which is required to load the generators!" if ("$me" eq "");
+ $me = dirname($me);
+ if (opendir(D,"$me/Generators")) {
+ foreach my $gen (readdir(D)) {
+ next if ($gen =~ /^\.\.?$/);
+ require "${me}/Generators/$gen";
+ $gen =~ s,\.pm,,;
+ push(@AVAILABLE, $gen);
+ }
+ closedir(D);
+ my $gens = join(', ', @AVAILABLE);
+ }
+
+ push @EXPORT_OK, qw(available);
+}
+
+sub available {
+ return @AVAILABLE;
+}
+
+sub generate {
+ my ($gen, $git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+ return eval("Generators::${gen}::generate(\$git_dir, \$out_dir, \$rel_dir, \%build_structure)") if grep(/^$gen$/, @AVAILABLE);
+ die "Generator \"${gen}\" is not available!\nAvailable generators are: @AVAILABLE\n";
+}
+
+1;
diff --git a/contrib/buildsystems/Generators/QMake.pm b/contrib/buildsystems/Generators/QMake.pm
new file mode 100644
index 0000000..ff3b657
--- /dev/null
+++ b/contrib/buildsystems/Generators/QMake.pm
@@ -0,0 +1,189 @@
+package Generators::QMake;
+require Exporter;
+
+use strict;
+use vars qw($VERSION);
+
+our $VERSION = '1.00';
+our(@ISA, @EXPORT, @EXPORT_OK, @AVAILABLE);
+@ISA = qw(Exporter);
+
+BEGIN {
+ push @EXPORT_OK, qw(generate);
+}
+
+sub generate {
+ my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+
+ my @libs = @{$build_structure{"LIBS"}};
+ foreach (@libs) {
+ createLibProject($_, $git_dir, $out_dir, $rel_dir, %build_structure);
+ }
+
+ my @apps = @{$build_structure{"APPS"}};
+ foreach (@apps) {
+ createAppProject($_, $git_dir, $out_dir, $rel_dir, %build_structure);
+ }
+
+ createGlueProject($git_dir, $out_dir, $rel_dir, %build_structure);
+ return 0;
+}
+
+sub createLibProject {
+ my ($libname, $git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+ print "Generate $libname lib project\n";
+ $rel_dir = "../$rel_dir";
+
+ my $sources = join(" \\\n\t", sort(map("$rel_dir/$_", @{$build_structure{"LIBS_${libname}_SOURCES"}})));
+ my $defines = join(" \\\n\t", sort(@{$build_structure{"LIBS_${libname}_DEFINES"}}));
+ my $includes= join(" \\\n\t", sort(map("$rel_dir/$_", @{$build_structure{"LIBS_${libname}_INCLUDES"}})));
+ my $cflags = join(" ", sort(@{$build_structure{"LIBS_${libname}_CFLAGS"}}));
+
+ my $cflags_debug = $cflags;
+ $cflags_debug =~ s/-MT/-MTd/;
+ $cflags_debug =~ s/-O.//;
+
+ my $cflags_release = $cflags;
+ $cflags_release =~ s/-MTd/-MT/;
+
+ my @tmp = @{$build_structure{"LIBS_${libname}_LFLAGS"}};
+ my @tmp2 = ();
+ foreach (@tmp) {
+ if (/^-LTCG/) {
+ } elsif (/^-L/) {
+ $_ =~ s/^-L/-LIBPATH:$rel_dir\//;
+ }
+ push(@tmp2, $_);
+ }
+ my $lflags = join(" ", sort(@tmp));
+
+ my $target = $libname;
+ $target =~ s/\//_/g;
+ $defines =~ s/-D//g;
+ $defines =~ s/"/\\\\"/g;
+ $includes =~ s/-I//g;
+ mkdir "$target" || die "Could not create the directory $target for lib project!\n";
+ open F, ">$target/$target.pro" || die "Could not open $target/$target.pro for writing!\n";
+ print F << "EOM";
+TEMPLATE = lib
+TARGET = $target
+DESTDIR = $rel_dir
+
+CONFIG -= qt
+CONFIG += static
+
+QMAKE_CFLAGS =
+QMAKE_CFLAGS_RELEASE = $cflags_release
+QMAKE_CFLAGS_DEBUG = $cflags_debug
+QMAKE_LIBFLAGS = $lflags
+
+DEFINES += \\
+ $defines
+
+INCLUDEPATH += \\
+ $includes
+
+SOURCES += \\
+ $sources
+EOM
+ close F;
+}
+
+sub createAppProject {
+ my ($appname, $git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+ print "Generate $appname app project\n";
+ $rel_dir = "../$rel_dir";
+
+ my $sources = join(" \\\n\t", sort(map("$rel_dir/$_", @{$build_structure{"APPS_${appname}_SOURCES"}})));
+ my $defines = join(" \\\n\t", sort(@{$build_structure{"APPS_${appname}_DEFINES"}}));
+ my $includes= join(" \\\n\t", sort(map("$rel_dir/$_", @{$build_structure{"APPS_${appname}_INCLUDES"}})));
+ my $cflags = join(" ", sort(@{$build_structure{"APPS_${appname}_CFLAGS"}}));
+
+ my $cflags_debug = $cflags;
+ $cflags_debug =~ s/-MT/-MTd/;
+ $cflags_debug =~ s/-O.//;
+
+ my $cflags_release = $cflags;
+ $cflags_release =~ s/-MTd/-MT/;
+
+ my $libs;
+ foreach (sort(@{$build_structure{"APPS_${appname}_LIBS"}})) {
+ $_ =~ s/\//_/g;
+ $libs .= " $_";
+ }
+ my @tmp = @{$build_structure{"APPS_${appname}_LFLAGS"}};
+ my @tmp2 = ();
+ foreach (@tmp) {
+ # next if ($_ eq "-NODEFAULTLIB:MSVCRT.lib");
+ if (/^-LTCG/) {
+ } elsif (/^-L/) {
+ $_ =~ s/^-L/-LIBPATH:$rel_dir\//;
+ }
+ push(@tmp2, $_);
+ }
+ my $lflags = join(" ", sort(@tmp));
+
+ my $target = $appname;
+ $target =~ s/\.exe//;
+ $target =~ s/\//_/g;
+ $defines =~ s/-D//g;
+ $defines =~ s/"/\\\\"/g;
+ $includes =~ s/-I//g;
+ mkdir "$target" || die "Could not create the directory $target for app project!\n";
+ open F, ">$target/$target.pro" || die "Could not open $target/$target.pro for writing!\n";
+ print F << "EOM";
+TEMPLATE = app
+TARGET = $target
+DESTDIR = $rel_dir
+
+CONFIG -= qt embed_manifest_exe
+CONFIG += console
+
+QMAKE_CFLAGS =
+QMAKE_CFLAGS_RELEASE = $cflags_release
+QMAKE_CFLAGS_DEBUG = $cflags_debug
+QMAKE_LFLAGS = $lflags
+LIBS = $libs
+
+DEFINES += \\
+ $defines
+
+INCLUDEPATH += \\
+ $includes
+
+win32:QMAKE_LFLAGS += -LIBPATH:$rel_dir
+else: QMAKE_LFLAGS += -L$rel_dir
+
+SOURCES += \\
+ $sources
+EOM
+ close F;
+}
+
+sub createGlueProject {
+ my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+ my $libs = join(" \\ \n", map("\t$_|$_.pro", @{$build_structure{"LIBS"}}));
+ my $apps = join(" \\ \n", map("\t$_|$_.pro", @{$build_structure{"APPS"}}));
+ $libs =~ s/\.a//g;
+ $libs =~ s/\//_/g;
+ $libs =~ s/\|/\//g;
+ $apps =~ s/\.exe//g;
+ $apps =~ s/\//_/g;
+ $apps =~ s/\|/\//g;
+
+ my $filename = $out_dir;
+ $filename =~ s/.*\/([^\/]+)$/$1/;
+ $filename =~ s/\/$//;
+ print "Generate glue project $filename.pro\n";
+ open F, ">$filename.pro" || die "Could not open $filename.pro for writing!\n";
+ print F << "EOM";
+TEMPLATE = subdirs
+CONFIG += ordered
+SUBDIRS += \\
+$libs \\
+$apps
+EOM
+ close F;
+}
+
+1;
diff --git a/contrib/buildsystems/Generators/Vcproj.pm b/contrib/buildsystems/Generators/Vcproj.pm
new file mode 100644
index 0000000..cfa74ad
--- /dev/null
+++ b/contrib/buildsystems/Generators/Vcproj.pm
@@ -0,0 +1,626 @@
+package Generators::Vcproj;
+require Exporter;
+
+use strict;
+use vars qw($VERSION);
+
+our $VERSION = '1.00';
+our(@ISA, @EXPORT, @EXPORT_OK, @AVAILABLE);
+@ISA = qw(Exporter);
+
+BEGIN {
+ push @EXPORT_OK, qw(generate);
+}
+
+my $guid_index = 0;
+my @GUIDS = (
+ "{E07B9989-2BF7-4F21-8918-BE22BA467AC3}",
+ "{278FFB51-0296-4A44-A81A-22B87B7C3592}",
+ "{7346A2C4-F0FD-444F-9EBE-1AF23B2B5650}",
+ "{67F421AC-EB34-4D49-820B-3196807B423F}",
+ "{385DCFE1-CC8C-4211-A451-80FCFC31CA51}",
+ "{97CC46C5-D2CC-4D26-B634-E75792B79916}",
+ "{C7CE21FE-6EF8-4012-A5C7-A22BCEDFBA11}",
+ "{51575134-3FDF-42D1-BABD-3FB12669C6C9}",
+ "{0AE195E4-9823-4B87-8E6F-20C5614AF2FF}",
+ "{4B918255-67CA-43BB-A46C-26704B666E6B}",
+ "{18CCFEEF-C8EE-4CC1-A265-26F95C9F4649}",
+ "{5D5D90FA-01B7-4973-AFE5-CA88C53AC197}",
+ "{1F054320-036D-49E1-B384-FB5DF0BC8AC0}",
+ "{7CED65EE-F2D9-4171-825B-C7D561FE5786}",
+ "{8D341679-0F07-4664-9A56-3BA0DE88B9BC}",
+ "{C189FEDC-2957-4BD7-9FA4-7622241EA145}",
+ "{66844203-1B9F-4C53-9274-164FFF95B847}",
+ "{E4FEA145-DECC-440D-AEEA-598CF381FD43}",
+ "{73300A8E-C8AC-41B0-B555-4F596B681BA7}",
+ "{873FDEB1-D01D-40BF-A1BF-8BBC58EC0F51}",
+ "{7922C8BE-76C5-4AC6-8BF7-885C0F93B782}",
+ "{E245D370-308B-4A49-BFC1-1E527827975F}",
+ "{F6FA957B-66FC-4ED7-B260-E59BBE4FE813}",
+ "{E6055070-0198-431A-BC49-8DB6CEE770AE}",
+ "{54159234-C3EB-43DA-906B-CE5DA5C74654}",
+ "{594CFC35-0B60-46F6-B8EF-9983ACC1187D}",
+ "{D93FCAB7-1F01-48D2-B832-F761B83231A5}",
+ "{DBA5E6AC-E7BE-42D3-8703-4E787141526E}",
+ "{6171953F-DD26-44C7-A3BE-CC45F86FC11F}",
+ "{9E19DDBE-F5E4-4A26-A2FE-0616E04879B8}",
+ "{AE81A615-99E3-4885-9CE0-D9CAA193E867}",
+ "{FBF4067E-1855-4F6C-8BCD-4D62E801A04D}",
+ "{17007948-6593-4AEB-8106-F7884B4F2C19}",
+ "{199D4C8D-8639-4DA6-82EF-08668C35DEE0}",
+ "{E085E50E-C140-4CF3-BE4B-094B14F0DDD6}",
+ "{00785268-A9CC-4E40-AC29-BAC0019159CE}",
+ "{4C06F56A-DCDB-46A6-B67C-02339935CF12}",
+ "{3A62D3FD-519E-4EC9-8171-D2C1BFEA022F}",
+ "{3A62D3FD-519E-4EC9-8171-D2C1BFEA022F}",
+ "{9392EB58-D7BA-410B-B1F0-B2FAA6BC89A7}",
+ "{2ACAB2D5-E0CE-4027-BCA0-D78B2D7A6C66}",
+ "{86E216C3-43CE-481A-BCB2-BE5E62850635}",
+ "{FB631291-7923-4B91-9A57-7B18FDBB7A42}",
+ "{0A176EC9-E934-45B8-B87F-16C7F4C80039}",
+ "{DF55CA80-46E8-4C53-B65B-4990A23DD444}",
+ "{3A0F9895-55D2-4710-BE5E-AD7498B5BF44}",
+ "{294BDC5A-F448-48B6-8110-DD0A81820F8C}",
+ "{4B9F66E9-FAC9-47AB-B1EF-C16756FBFD06}",
+ "{72EA49C6-2806-48BD-B81B-D4905102E19C}",
+ "{5728EB7E-8929-486C-8CD5-3238D060E768}"
+);
+
+sub generate {
+ my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+ my @libs = @{$build_structure{"LIBS"}};
+ foreach (@libs) {
+ createLibProject($_, $git_dir, $out_dir, $rel_dir, \%build_structure);
+ }
+
+ my @apps = @{$build_structure{"APPS"}};
+ foreach (@apps) {
+ createAppProject($_, $git_dir, $out_dir, $rel_dir, \%build_structure);
+ }
+
+ createGlueProject($git_dir, $out_dir, $rel_dir, %build_structure);
+ return 0;
+}
+
+sub createLibProject {
+ my ($libname, $git_dir, $out_dir, $rel_dir, $build_structure) = @_;
+ print "Generate $libname vcproj lib project\n";
+ $rel_dir = "..\\$rel_dir";
+ $rel_dir =~ s/\//\\/g;
+
+ my $target = $libname;
+ $target =~ s/\//_/g;
+ $target =~ s/\.a//;
+
+ my $uuid = $GUIDS[$guid_index];
+ $$build_structure{"LIBS_${target}_GUID"} = $uuid;
+ $guid_index += 1;
+
+ my @srcs = sort(map("$rel_dir\\$_", @{$$build_structure{"LIBS_${libname}_SOURCES"}}));
+ my @sources;
+ foreach (@srcs) {
+ $_ =~ s/\//\\/g;
+ push(@sources, $_);
+ }
+ my $defines = join(",", sort(@{$$build_structure{"LIBS_${libname}_DEFINES"}}));
+ my $includes= join(";", sort(map("&quot;$rel_dir\\$_&quot;", @{$$build_structure{"LIBS_${libname}_INCLUDES"}})));
+ my $cflags = join(" ", sort(@{$$build_structure{"LIBS_${libname}_CFLAGS"}}));
+ $cflags =~ s/\"/&quot;/g;
+
+ my $cflags_debug = $cflags;
+ $cflags_debug =~ s/-MT/-MTd/;
+ $cflags_debug =~ s/-O.//;
+
+ my $cflags_release = $cflags;
+ $cflags_release =~ s/-MTd/-MT/;
+
+ my @tmp = @{$$build_structure{"LIBS_${libname}_LFLAGS"}};
+ my @tmp2 = ();
+ foreach (@tmp) {
+ if (/^-LTCG/) {
+ } elsif (/^-L/) {
+ $_ =~ s/^-L/-LIBPATH:$rel_dir\//;
+ }
+ push(@tmp2, $_);
+ }
+ my $lflags = join(" ", sort(@tmp));
+
+ $defines =~ s/-D//g;
+ $defines =~ s/\"/\\&quot;/g;
+ $defines =~ s/\'//g;
+ $includes =~ s/-I//g;
+ mkdir "$target" || die "Could not create the directory $target for lib project!\n";
+ open F, ">$target/$target.vcproj" || die "Could not open $target/$target.pro for writing!\n";
+ binmode F, ":crlf";
+ print F << "EOM";
+<?xml version="1.0" encoding = "Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="9,00"
+ Name="$target"
+ ProjectGUID="$uuid">
+ <Platforms>
+ <Platform
+ Name="Win32"/>
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ OutputDirectory="$rel_dir"
+ ConfigurationType="4"
+ CharacterSet="0"
+ IntermediateDirectory="\$(ProjectDir)\$(ConfigurationName)"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ AdditionalOptions="$cflags_debug"
+ Optimization="0"
+ InlineFunctionExpansion="1"
+ AdditionalIncludeDirectories="$includes"
+ PreprocessorDefinitions="WIN32,_DEBUG,$defines"
+ MinimalRebuild="true"
+ RuntimeLibrary="1"
+ UsePrecompiledHeader="0"
+ ProgramDataBaseFileName="\$(IntDir)\\\$(TargetName).pdb"
+ WarningLevel="3"
+ DebugInformationFormat="3"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLibrarianTool"
+ SuppressStartupBanner="true"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ OutputDirectory="$rel_dir"
+ ConfigurationType="4"
+ CharacterSet="0"
+ WholeProgramOptimization="1"
+ IntermediateDirectory="\$(ProjectDir)\$(ConfigurationName)"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ AdditionalOptions="$cflags_release"
+ Optimization="2"
+ InlineFunctionExpansion="1"
+ EnableIntrinsicFunctions="true"
+ AdditionalIncludeDirectories="$includes"
+ PreprocessorDefinitions="WIN32,NDEBUG,$defines"
+ RuntimeLibrary="0"
+ EnableFunctionLevelLinking="true"
+ UsePrecompiledHeader="0"
+ ProgramDataBaseFileName="\$(IntDir)\\\$(TargetName).pdb"
+ WarningLevel="3"
+ DebugInformationFormat="3"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLibrarianTool"
+ SuppressStartupBanner="true"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <Files>
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cxx;def;odl;idl;hpj;bat;asm;asmx"
+ UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}">
+EOM
+ foreach(@sources) {
+ print F << "EOM";
+ <File
+ RelativePath="$_"/>
+EOM
+ }
+ print F << "EOM";
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
+EOM
+ close F;
+}
+
+sub createAppProject {
+ my ($appname, $git_dir, $out_dir, $rel_dir, $build_structure) = @_;
+ print "Generate $appname vcproj app project\n";
+ $rel_dir = "..\\$rel_dir";
+ $rel_dir =~ s/\//\\/g;
+
+ my $target = $appname;
+ $target =~ s/\//_/g;
+ $target =~ s/\.exe//;
+
+ my $uuid = $GUIDS[$guid_index];
+ $$build_structure{"APPS_${target}_GUID"} = $uuid;
+ $guid_index += 1;
+
+ my @srcs = sort(map("$rel_dir\\$_", @{$$build_structure{"APPS_${appname}_SOURCES"}}));
+ my @sources;
+ foreach (@srcs) {
+ $_ =~ s/\//\\/g;
+ push(@sources, $_);
+ }
+ my $defines = join(",", sort(@{$$build_structure{"APPS_${appname}_DEFINES"}}));
+ my $includes= join(";", sort(map("&quot;$rel_dir\\$_&quot;", @{$$build_structure{"APPS_${appname}_INCLUDES"}})));
+ my $cflags = join(" ", sort(@{$$build_structure{"APPS_${appname}_CFLAGS"}}));
+ $cflags =~ s/\"/&quot;/g;
+
+ my $cflags_debug = $cflags;
+ $cflags_debug =~ s/-MT/-MTd/;
+ $cflags_debug =~ s/-O.//;
+
+ my $cflags_release = $cflags;
+ $cflags_release =~ s/-MTd/-MT/;
+
+ my $libs;
+ foreach (sort(@{$$build_structure{"APPS_${appname}_LIBS"}})) {
+ $_ =~ s/\//_/g;
+ $libs .= " $_";
+ }
+ my @tmp = @{$$build_structure{"APPS_${appname}_LFLAGS"}};
+ my @tmp2 = ();
+ foreach (@tmp) {
+ if (/^-LTCG/) {
+ } elsif (/^-L/) {
+ $_ =~ s/^-L/-LIBPATH:$rel_dir\//;
+ }
+ push(@tmp2, $_);
+ }
+ my $lflags = join(" ", sort(@tmp)) . " -LIBPATH:$rel_dir";
+
+ $defines =~ s/-D//g;
+ $defines =~ s/\"/\\&quot;/g;
+ $defines =~ s/\'//g;
+ $defines =~ s/\\\\/\\/g;
+ $includes =~ s/-I//g;
+ mkdir "$target" || die "Could not create the directory $target for lib project!\n";
+ open F, ">$target/$target.vcproj" || die "Could not open $target/$target.pro for writing!\n";
+ binmode F, ":crlf";
+ print F << "EOM";
+<?xml version="1.0" encoding = "Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="9,00"
+ Name="$target"
+ ProjectGUID="$uuid">
+ <Platforms>
+ <Platform
+ Name="Win32"/>
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ OutputDirectory="$rel_dir"
+ ConfigurationType="1"
+ CharacterSet="0"
+ IntermediateDirectory="\$(ProjectDir)\$(ConfigurationName)"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ AdditionalOptions="$cflags_debug"
+ Optimization="0"
+ InlineFunctionExpansion="1"
+ AdditionalIncludeDirectories="$includes"
+ PreprocessorDefinitions="WIN32,_DEBUG,$defines"
+ MinimalRebuild="true"
+ RuntimeLibrary="1"
+ UsePrecompiledHeader="0"
+ ProgramDataBaseFileName="\$(IntDir)\\\$(TargetName).pdb"
+ WarningLevel="3"
+ DebugInformationFormat="3"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="$libs"
+ AdditionalOptions="$lflags"
+ LinkIncremental="2"
+ GenerateDebugInformation="true"
+ SubSystem="1"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ OutputDirectory="$rel_dir"
+ ConfigurationType="1"
+ CharacterSet="0"
+ WholeProgramOptimization="1"
+ IntermediateDirectory="\$(ProjectDir)\$(ConfigurationName)"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ AdditionalOptions="$cflags_release"
+ Optimization="2"
+ InlineFunctionExpansion="1"
+ EnableIntrinsicFunctions="true"
+ AdditionalIncludeDirectories="$includes"
+ PreprocessorDefinitions="WIN32,NDEBUG,$defines"
+ RuntimeLibrary="0"
+ EnableFunctionLevelLinking="true"
+ UsePrecompiledHeader="0"
+ ProgramDataBaseFileName="\$(IntDir)\\\$(TargetName).pdb"
+ WarningLevel="3"
+ DebugInformationFormat="3"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="$libs"
+ AdditionalOptions="$lflags"
+ LinkIncremental="1"
+ GenerateDebugInformation="true"
+ SubSystem="1"
+ TargetMachine="1"
+ OptimizeReferences="2"
+ EnableCOMDATFolding="2"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <Files>
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cxx;def;odl;idl;hpj;bat;asm;asmx"
+ UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}">
+EOM
+ foreach(@sources) {
+ print F << "EOM";
+ <File
+ RelativePath="$_"/>
+EOM
+ }
+ print F << "EOM";
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
+EOM
+ close F;
+}
+
+sub createGlueProject {
+ my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+ print "Generate solutions file\n";
+ $rel_dir = "..\\$rel_dir";
+ $rel_dir =~ s/\//\\/g;
+ my $SLN_HEAD = "Microsoft Visual Studio Solution File, Format Version 10.00\n# Visual Studio 2008\n";
+ my $SLN_PRE = "Project(\"{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}\") = ";
+ my $SLN_POST = "\nEndProject\n";
+
+ my @libs = @{$build_structure{"LIBS"}};
+ my @tmp;
+ foreach (@libs) {
+ $_ =~ s/\//_/g;
+ $_ =~ s/\.a//;
+ push(@tmp, $_);
+ }
+ @libs = @tmp;
+
+ my @apps = @{$build_structure{"APPS"}};
+ @tmp = ();
+ foreach (@apps) {
+ $_ =~ s/\//_/g;
+ $_ =~ s/\.exe//;
+ push(@tmp, $_);
+ }
+ @apps = @tmp;
+
+ open F, ">git.sln" || die "Could not open git.sln for writing!\n";
+ binmode F, ":crlf";
+ print F "$SLN_HEAD";
+ foreach (@libs) {
+ my $libname = $_;
+ my $uuid = $build_structure{"LIBS_${libname}_GUID"};
+ print F "$SLN_PRE";
+ print F "\"${libname}\", \"${libname}\\${libname}.vcproj\", \"${uuid}\"";
+ print F "$SLN_POST";
+ }
+ my $uuid_libgit = $build_structure{"LIBS_libgit_GUID"};
+ my $uuid_xdiff_lib = $build_structure{"LIBS_xdiff_lib_GUID"};
+ foreach (@apps) {
+ my $appname = $_;
+ my $uuid = $build_structure{"APPS_${appname}_GUID"};
+ print F "$SLN_PRE";
+ print F "\"${appname}\", \"${appname}\\${appname}.vcproj\", \"${uuid}\"\n";
+ print F " ProjectSection(ProjectDependencies) = postProject\n";
+ print F " ${uuid_libgit} = ${uuid_libgit}\n";
+ print F " ${uuid_xdiff_lib} = ${uuid_xdiff_lib}\n";
+ print F " EndProjectSection";
+ print F "$SLN_POST";
+ }
+
+ print F << "EOM";
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Win32 = Debug|Win32
+ Release|Win32 = Release|Win32
+ EndGlobalSection
+EOM
+ print F << "EOM";
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+EOM
+ foreach (@libs) {
+ my $libname = $_;
+ my $uuid = $build_structure{"LIBS_${libname}_GUID"};
+ print F "\t\t${uuid}.Debug|Win32.ActiveCfg = Debug|Win32\n";
+ print F "\t\t${uuid}.Debug|Win32.Build.0 = Debug|Win32\n";
+ print F "\t\t${uuid}.Release|Win32.ActiveCfg = Release|Win32\n";
+ print F "\t\t${uuid}.Release|Win32.Build.0 = Release|Win32\n";
+ }
+ foreach (@apps) {
+ my $appname = $_;
+ my $uuid = $build_structure{"APPS_${appname}_GUID"};
+ print F "\t\t${uuid}.Debug|Win32.ActiveCfg = Debug|Win32\n";
+ print F "\t\t${uuid}.Debug|Win32.Build.0 = Debug|Win32\n";
+ print F "\t\t${uuid}.Release|Win32.ActiveCfg = Release|Win32\n";
+ print F "\t\t${uuid}.Release|Win32.Build.0 = Release|Win32\n";
+ }
+
+ print F << "EOM";
+ EndGlobalSection
+EndGlobal
+EOM
+ close F;
+}
+
+1;
diff --git a/contrib/buildsystems/engine.pl b/contrib/buildsystems/engine.pl
new file mode 100644
index 0000000..23da787
--- /dev/null
+++ b/contrib/buildsystems/engine.pl
@@ -0,0 +1,359 @@
+#!/usr/bin/perl -w
+######################################################################
+# Do not call this script directly!
+#
+# The generate script ensures that @INC is correct before the engine
+# is executed.
+#
+# Copyright (C) 2009 Marius Storm-Olsen <mstormo@gmail.com>
+######################################################################
+use strict;
+use File::Basename;
+use File::Spec;
+use Cwd;
+use Generators;
+
+my (%build_structure, %compile_options, @makedry);
+my $out_dir = getcwd();
+my $git_dir = $out_dir;
+$git_dir =~ s=\\=/=g;
+$git_dir = dirname($git_dir) while (!-e "$git_dir/git.c" && "$git_dir" ne "");
+die "Couldn't find Git repo" if ("$git_dir" eq "");
+
+my @gens = Generators::available();
+my $gen = "Vcproj";
+
+sub showUsage
+{
+ my $genlist = join(', ', @gens);
+ print << "EOM";
+generate usage:
+ -g <GENERATOR> --gen <GENERATOR> Specify the buildsystem generator (default: $gen)
+ Available: $genlist
+ -o <PATH> --out <PATH> Specify output directory generation (default: .)
+ -i <FILE> --in <FILE> Specify input file, instead of running GNU Make
+ -h,-? --help This help
+EOM
+ exit 0;
+}
+
+# Parse command-line options
+while (@ARGV) {
+ my $arg = shift @ARGV;
+ if ("$arg" eq "-h" || "$arg" eq "--help" || "$arg" eq "-?") {
+ showUsage();
+ exit(0);
+ } elsif("$arg" eq "--out" || "$arg" eq "-o") {
+ $out_dir = shift @ARGV;
+ } elsif("$arg" eq "--gen" || "$arg" eq "-g") {
+ $gen = shift @ARGV;
+ } elsif("$arg" eq "--in" || "$arg" eq "-i") {
+ my $infile = shift @ARGV;
+ open(F, "<$infile") || die "Couldn't open file $infile";
+ @makedry = <F>;
+ close(F);
+ }
+}
+
+# NOT using File::Spec->rel2abs($path, $base) here, as
+# it fails badly for me in the msysgit environment
+$git_dir = File::Spec->rel2abs($git_dir);
+$out_dir = File::Spec->rel2abs($out_dir);
+my $rel_dir = makeOutRel2Git($git_dir, $out_dir);
+
+# Print some information so the user feels informed
+print << "EOM";
+-----
+Generator: $gen
+Git dir: $git_dir
+Out dir: $out_dir
+-----
+Running GNU Make to figure out build structure...
+EOM
+
+# Pipe a make --dry-run into a variable, if not already loaded from file
+@makedry = `cd $git_dir && make -n MSVC=1 V=1 2>/dev/null` if !@makedry;
+
+# Parse the make output into usable info
+parseMakeOutput();
+
+# Finally, ask the generator to start generating..
+Generators::generate($gen, $git_dir, $out_dir, $rel_dir, %build_structure);
+
+# main flow ends here
+# -------------------------------------------------------------------------------------------------
+
+
+# 1) path: /foo/bar/baz 2) path: /foo/bar/baz 3) path: /foo/bar/baz
+# base: /foo/bar/baz/temp base: /foo/bar base: /tmp
+# rel: .. rel: baz rel: ../foo/bar/baz
+sub makeOutRel2Git
+{
+ my ($path, $base) = @_;
+ my $rel;
+ if ("$path" eq "$base") {
+ return ".";
+ } elsif ($base =~ /^$path/) {
+ # case 1
+ my $tmp = $base;
+ $tmp =~ s/^$path//;
+ foreach (split('/', $tmp)) {
+ $rel .= "../" if ("$_" ne "");
+ }
+ } elsif ($path =~ /^$base/) {
+ # case 2
+ $rel = $path;
+ $rel =~ s/^$base//;
+ $rel = "./$rel";
+ } else {
+ my $tmp = $base;
+ foreach (split('/', $tmp)) {
+ $rel .= "../" if ("$_" ne "");
+ }
+ $rel .= $path;
+ }
+ $rel =~ s/\/\//\//g; # simplify
+ $rel =~ s/\/$//; # don't end with /
+ return $rel;
+}
+
+sub parseMakeOutput
+{
+ print "Parsing GNU Make output to figure out build structure...\n";
+ my $line = 0;
+ while (my $text = shift @makedry) {
+ my $ate_next;
+ do {
+ $ate_next = 0;
+ $line++;
+ chomp $text;
+ chop $text if ($text =~ /\r$/);
+ if ($text =~ /\\$/) {
+ $text =~ s/\\$//;
+ $text .= shift @makedry;
+ $ate_next = 1;
+ }
+ } while($ate_next);
+
+ if ($text =~ /^test /) {
+ # options to test (eg -o) may be mistaken for linker options
+ next;
+ }
+
+ if($text =~ / -c /) {
+ # compilation
+ handleCompileLine($text, $line);
+
+ } elsif ($text =~ / -o /) {
+ # linking executable
+ handleLinkLine($text, $line);
+
+ } elsif ($text =~ /\.o / && $text =~ /\.a /) {
+ # libifying
+ handleLibLine($text, $line);
+#
+# } elsif ($text =~ /^cp /) {
+# # copy file around
+#
+# } elsif ($text =~ /^rm -f /) {
+# # shell command
+#
+# } elsif ($text =~ /^make[ \[]/) {
+# # make output
+#
+# } elsif ($text =~ /^echo /) {
+# # echo to file
+#
+# } elsif ($text =~ /^if /) {
+# # shell conditional
+#
+# } elsif ($text =~ /^tclsh /) {
+# # translation stuff
+#
+# } elsif ($text =~ /^umask /) {
+# # handling boilerplates
+#
+# } elsif ($text =~ /\$\(\:\)/) {
+# # ignore
+#
+# } elsif ($text =~ /^FLAGS=/) {
+# # flags check for dependencies
+#
+# } elsif ($text =~ /^'\/usr\/bin\/perl' -MError -e/) {
+# # perl commands for copying files
+#
+# } elsif ($text =~ /generate-cmdlist\.sh/) {
+# # command for generating list of commands
+#
+# } elsif ($text =~ /new locations or Tcl/) {
+# # command for detecting Tcl/Tk changes
+#
+# } elsif ($text =~ /mkdir -p/) {
+# # command creating path
+#
+# } elsif ($text =~ /: no custom templates yet/) {
+# # whatever
+#
+# } else {
+# print "Unhandled (line: $line): $text\n";
+ }
+ }
+
+# use Data::Dumper;
+# print "Parsed build structure:\n";
+# print Dumper(%build_structure);
+}
+
+# variables for the compilation part of each step
+my (@defines, @incpaths, @cflags, @sources);
+
+sub clearCompileStep
+{
+ @defines = ();
+ @incpaths = ();
+ @cflags = ();
+ @sources = ();
+}
+
+sub removeDuplicates
+{
+ my (%dupHash, $entry);
+ %dupHash = map { $_, 1 } @defines;
+ @defines = keys %dupHash;
+
+ %dupHash = map { $_, 1 } @incpaths;
+ @incpaths = keys %dupHash;
+
+ %dupHash = map { $_, 1 } @cflags;
+ @cflags = keys %dupHash;
+}
+
+sub handleCompileLine
+{
+ my ($line, $lineno) = @_;
+ my @parts = split(' ', $line);
+ my $sourcefile;
+ shift(@parts); # ignore cmd
+ while (my $part = shift @parts) {
+ if ("$part" eq "-o") {
+ # ignore object file
+ shift @parts;
+ } elsif ("$part" eq "-c") {
+ # ignore compile flag
+ } elsif ("$part" eq "-c") {
+ } elsif ($part =~ /^.?-I/) {
+ push(@incpaths, $part);
+ } elsif ($part =~ /^.?-D/) {
+ push(@defines, $part);
+ } elsif ($part =~ /^-/) {
+ push(@cflags, $part);
+ } elsif ($part =~ /\.(c|cc|cpp)$/) {
+ $sourcefile = $part;
+ } else {
+ die "Unhandled compiler option @ line $lineno: $part";
+ }
+ }
+ @{$compile_options{"${sourcefile}_CFLAGS"}} = @cflags;
+ @{$compile_options{"${sourcefile}_DEFINES"}} = @defines;
+ @{$compile_options{"${sourcefile}_INCPATHS"}} = @incpaths;
+ clearCompileStep();
+}
+
+sub handleLibLine
+{
+ my ($line, $lineno) = @_;
+ my (@objfiles, @lflags, $libout, $part);
+ # kill cmd and rm 'prefix'
+ $line =~ s/^rm -f .* && .* rcs //;
+ my @parts = split(' ', $line);
+ while ($part = shift @parts) {
+ if ($part =~ /^-/) {
+ push(@lflags, $part);
+ } elsif ($part =~ /\.(o|obj)$/) {
+ push(@objfiles, $part);
+ } elsif ($part =~ /\.(a|lib)$/) {
+ $libout = $part;
+ $libout =~ s/\.a$//;
+ } else {
+ die "Unhandled lib option @ line $lineno: $part";
+ }
+ }
+# print "LibOut: '$libout'\nLFlags: @lflags\nOfiles: @objfiles\n";
+# exit(1);
+ foreach (@objfiles) {
+ my $sourcefile = $_;
+ $sourcefile =~ s/\.o/.c/;
+ push(@sources, $sourcefile);
+ push(@cflags, @{$compile_options{"${sourcefile}_CFLAGS"}});
+ push(@defines, @{$compile_options{"${sourcefile}_DEFINES"}});
+ push(@incpaths, @{$compile_options{"${sourcefile}_INCPATHS"}});
+ }
+ removeDuplicates();
+
+ push(@{$build_structure{"LIBS"}}, $libout);
+ @{$build_structure{"LIBS_${libout}"}} = ("_DEFINES", "_INCLUDES", "_CFLAGS", "_SOURCES",
+ "_OBJECTS");
+ @{$build_structure{"LIBS_${libout}_DEFINES"}} = @defines;
+ @{$build_structure{"LIBS_${libout}_INCLUDES"}} = @incpaths;
+ @{$build_structure{"LIBS_${libout}_CFLAGS"}} = @cflags;
+ @{$build_structure{"LIBS_${libout}_LFLAGS"}} = @lflags;
+ @{$build_structure{"LIBS_${libout}_SOURCES"}} = @sources;
+ @{$build_structure{"LIBS_${libout}_OBJECTS"}} = @objfiles;
+ clearCompileStep();
+}
+
+sub handleLinkLine
+{
+ my ($line, $lineno) = @_;
+ my (@objfiles, @lflags, @libs, $appout, $part);
+ my @parts = split(' ', $line);
+ shift(@parts); # ignore cmd
+ while ($part = shift @parts) {
+ if ($part =~ /^-IGNORE/) {
+ push(@lflags, $part);
+ } elsif ($part =~ /^-[GRIMDO]/) {
+ # eat compiler flags
+ } elsif ("$part" eq "-o") {
+ $appout = shift @parts;
+ } elsif ("$part" eq "-lz") {
+ push(@libs, "zlib.lib");
+ } elsif ("$part" eq "-lcrypto") {
+ push(@libs, "libeay32.lib");
+ } elsif ("$part" eq "-lssl") {
+ push(@libs, "ssleay32.lib");
+ } elsif ($part =~ /^-/) {
+ push(@lflags, $part);
+ } elsif ($part =~ /\.(a|lib)$/) {
+ $part =~ s/\.a$/.lib/;
+ push(@libs, $part);
+ } elsif ($part =~ /\.(o|obj)$/) {
+ push(@objfiles, $part);
+ } else {
+ die "Unhandled lib option @ line $lineno: $part";
+ }
+ }
+# print "AppOut: '$appout'\nLFlags: @lflags\nLibs : @libs\nOfiles: @objfiles\n";
+# exit(1);
+ foreach (@objfiles) {
+ my $sourcefile = $_;
+ $sourcefile =~ s/\.o/.c/;
+ push(@sources, $sourcefile);
+ push(@cflags, @{$compile_options{"${sourcefile}_CFLAGS"}});
+ push(@defines, @{$compile_options{"${sourcefile}_DEFINES"}});
+ push(@incpaths, @{$compile_options{"${sourcefile}_INCPATHS"}});
+ }
+ removeDuplicates();
+
+ removeDuplicates();
+ push(@{$build_structure{"APPS"}}, $appout);
+ @{$build_structure{"APPS_${appout}"}} = ("_DEFINES", "_INCLUDES", "_CFLAGS", "_LFLAGS",
+ "_SOURCES", "_OBJECTS", "_LIBS");
+ @{$build_structure{"APPS_${appout}_DEFINES"}} = @defines;
+ @{$build_structure{"APPS_${appout}_INCLUDES"}} = @incpaths;
+ @{$build_structure{"APPS_${appout}_CFLAGS"}} = @cflags;
+ @{$build_structure{"APPS_${appout}_LFLAGS"}} = @lflags;
+ @{$build_structure{"APPS_${appout}_SOURCES"}} = @sources;
+ @{$build_structure{"APPS_${appout}_OBJECTS"}} = @objfiles;
+ @{$build_structure{"APPS_${appout}_LIBS"}} = @libs;
+ clearCompileStep();
+}
diff --git a/contrib/buildsystems/generate b/contrib/buildsystems/generate
new file mode 100644
index 0000000..bc10f25
--- /dev/null
+++ b/contrib/buildsystems/generate
@@ -0,0 +1,29 @@
+#!/usr/bin/perl -w
+######################################################################
+# Generate buildsystem files
+#
+# This script generate buildsystem files based on the output of a
+# GNU Make --dry-run, enabling Windows users to develop Git with their
+# trusted IDE with native projects.
+#
+# Note:
+# It is not meant as *the* way of building Git with MSVC, but merely a
+# convenience. The correct way of building Git with MSVC is to use the
+# GNU Make tool to build with the maintained Makefile in the root of
+# the project. If you have the msysgit environment installed and
+# available in your current console, together with the Visual Studio
+# environment you wish to build for, all you have to do is run the
+# command:
+# make MSVC=1
+#
+# Copyright (C) 2009 Marius Storm-Olsen <mstormo@gmail.com>
+######################################################################
+use strict;
+use File::Basename;
+use Cwd;
+
+my $git_dir = getcwd();
+$git_dir =~ s=\\=/=g;
+$git_dir = dirname($git_dir) while (!-e "$git_dir/git.c" && "$git_dir" ne "");
+die "Couldn't find Git repo" if ("$git_dir" eq "");
+exec join(" ", ("PERL5LIB=${git_dir}/contrib/buildsystems ${git_dir}/contrib/buildsystems/engine.pl", @ARGV));
diff --git a/contrib/buildsystems/parse.pl b/contrib/buildsystems/parse.pl
new file mode 100644
index 0000000..c9656ec
--- /dev/null
+++ b/contrib/buildsystems/parse.pl
@@ -0,0 +1,228 @@
+#!/usr/bin/perl -w
+######################################################################
+# Do not call this script directly!
+#
+# The generate script ensures that @INC is correct before the engine
+# is executed.
+#
+# Copyright (C) 2009 Marius Storm-Olsen <mstormo@gmail.com>
+######################################################################
+use strict;
+use File::Basename;
+use Cwd;
+
+my $file = $ARGV[0];
+die "No file provided!" if !defined $file;
+
+my ($cflags, $target, $type, $line);
+
+open(F, "<$file") || die "Couldn't open file $file";
+my @data = <F>;
+close(F);
+
+while (my $text = shift @data) {
+ my $ate_next;
+ do {
+ $ate_next = 0;
+ $line++;
+ chomp $text;
+ chop $text if ($text =~ /\r$/);
+ if ($text =~ /\\$/) {
+ $text =~ s/\\$//;
+ $text .= shift @data;
+ $ate_next = 1;
+ }
+ } while($ate_next);
+
+ if($text =~ / -c /) {
+ # compilation
+ handleCompileLine($text, $line);
+
+ } elsif ($text =~ / -o /) {
+ # linking executable
+ handleLinkLine($text, $line);
+
+ } elsif ($text =~ /\.o / && $text =~ /\.a /) {
+ # libifying
+ handleLibLine($text, $line);
+
+# } elsif ($text =~ /^cp /) {
+# # copy file around
+#
+# } elsif ($text =~ /^rm -f /) {
+# # shell command
+#
+# } elsif ($text =~ /^make[ \[]/) {
+# # make output
+#
+# } elsif ($text =~ /^echo /) {
+# # echo to file
+#
+# } elsif ($text =~ /^if /) {
+# # shell conditional
+#
+# } elsif ($text =~ /^tclsh /) {
+# # translation stuff
+#
+# } elsif ($text =~ /^umask /) {
+# # handling boilerplates
+#
+# } elsif ($text =~ /\$\(\:\)/) {
+# # ignore
+#
+# } elsif ($text =~ /^FLAGS=/) {
+# # flags check for dependencies
+#
+# } elsif ($text =~ /^'\/usr\/bin\/perl' -MError -e/) {
+# # perl commands for copying files
+#
+# } elsif ($text =~ /generate-cmdlist\.sh/) {
+# # command for generating list of commands
+#
+# } elsif ($text =~ /^test / && $text =~ /|| rm -f /) {
+# # commands removing executables, if they exist
+#
+# } elsif ($text =~ /new locations or Tcl/) {
+# # command for detecting Tcl/Tk changes
+#
+# } elsif ($text =~ /mkdir -p/) {
+# # command creating path
+#
+# } elsif ($text =~ /: no custom templates yet/) {
+# # whatever
+
+ } else {
+# print "Unhandled (line: $line): $text\n";
+ }
+}
+close(F);
+
+# use Data::Dumper;
+# print "Parsed build structure:\n";
+# print Dumper(%build_structure);
+
+# -------------------------------------------------------------------
+# Functions under here
+# -------------------------------------------------------------------
+my (%build_structure, @defines, @incpaths, @cflags, @sources);
+
+sub clearCompileStep
+{
+ @defines = ();
+ @incpaths = ();
+ @cflags = ();
+ @sources = ();
+}
+
+sub removeDuplicates
+{
+ my (%dupHash, $entry);
+ %dupHash = map { $_, 1 } @defines;
+ @defines = keys %dupHash;
+
+ %dupHash = map { $_, 1 } @incpaths;
+ @incpaths = keys %dupHash;
+
+ %dupHash = map { $_, 1 } @cflags;
+ @cflags = keys %dupHash;
+
+ %dupHash = map { $_, 1 } @sources;
+ @sources = keys %dupHash;
+}
+
+sub handleCompileLine
+{
+ my ($line, $lineno) = @_;
+ my @parts = split(' ', $line);
+ shift(@parts); # ignore cmd
+ while (my $part = shift @parts) {
+ if ("$part" eq "-o") {
+ # ignore object file
+ shift @parts;
+ } elsif ("$part" eq "-c") {
+ # ignore compile flag
+ } elsif ("$part" eq "-c") {
+ } elsif ($part =~ /^.?-I/) {
+ push(@incpaths, $part);
+ } elsif ($part =~ /^.?-D/) {
+ push(@defines, $part);
+ } elsif ($part =~ /^-/) {
+ push(@cflags, $part);
+ } elsif ($part =~ /\.(c|cc|cpp)$/) {
+ push(@sources, $part);
+ } else {
+ die "Unhandled compiler option @ line $lineno: $part";
+ }
+ }
+ #print "Sources: @sources\nCFlags: @cflags\nDefine: @defines\nIncpat: @incpaths\n";
+ #exit(1);
+}
+
+sub handleLibLine
+{
+ my ($line, $lineno) = @_;
+ my (@objfiles, @lflags, $libout, $part);
+ # kill cmd and rm 'prefix'
+ $line =~ s/^rm -f .* && .* rcs //;
+ my @parts = split(' ', $line);
+ while ($part = shift @parts) {
+ if ($part =~ /^-/) {
+ push(@lflags, $part);
+ } elsif ($part =~ /\.(o|obj)$/) {
+ push(@objfiles, $part);
+ } elsif ($part =~ /\.(a|lib)$/) {
+ $libout = $part;
+ } else {
+ die "Unhandled lib option @ line $lineno: $part";
+ }
+ }
+ #print "LibOut: '$libout'\nLFlags: @lflags\nOfiles: @objfiles\n";
+ #exit(1);
+ removeDuplicates();
+ push(@{$build_structure{"LIBS"}}, $libout);
+ @{$build_structure{"LIBS_${libout}"}} = ("_DEFINES", "_INCLUDES", "_CFLAGS", "_SOURCES",
+ "_OBJECTS");
+ @{$build_structure{"LIBS_${libout}_DEFINES"}} = @defines;
+ @{$build_structure{"LIBS_${libout}_INCLUDES"}} = @incpaths;
+ @{$build_structure{"LIBS_${libout}_CFLAGS"}} = @cflags;
+ @{$build_structure{"LIBS_${libout}_SOURCES"}} = @sources;
+ @{$build_structure{"LIBS_${libout}_OBJECTS"}} = @objfiles;
+ clearCompileStep();
+}
+
+sub handleLinkLine
+{
+ my ($line, $lineno) = @_;
+ my (@objfiles, @lflags, @libs, $appout, $part);
+ my @parts = split(' ', $line);
+ shift(@parts); # ignore cmd
+ while ($part = shift @parts) {
+ if ($part =~ /^-[GRIDO]/) {
+ # eat compiler flags
+ } elsif ("$part" eq "-o") {
+ $appout = shift @parts;
+ } elsif ($part =~ /^-/) {
+ push(@lflags, $part);
+ } elsif ($part =~ /\.(a|lib)$/) {
+ push(@libs, $part);
+ } elsif ($part =~ /\.(o|obj)$/) {
+ push(@objfiles, $part);
+ } else {
+ die "Unhandled lib option @ line $lineno: $part";
+ }
+ }
+ #print "AppOut: '$appout'\nLFlags: @lflags\nLibs : @libs\nOfiles: @objfiles\n";
+ #exit(1);
+ removeDuplicates();
+ push(@{$build_structure{"APPS"}}, $appout);
+ @{$build_structure{"APPS_${appout}"}} = ("_DEFINES", "_INCLUDES", "_CFLAGS", "_LFLAGS",
+ "_SOURCES", "_OBJECTS", "_LIBS");
+ @{$build_structure{"APPS_${appout}_DEFINES"}} = @defines;
+ @{$build_structure{"APPS_${appout}_INCLUDES"}} = @incpaths;
+ @{$build_structure{"APPS_${appout}_CFLAGS"}} = @cflags;
+ @{$build_structure{"APPS_${appout}_LFLAGS"}} = @lflags;
+ @{$build_structure{"APPS_${appout}_SOURCES"}} = @sources;
+ @{$build_structure{"APPS_${appout}_OBJECTS"}} = @objfiles;
+ @{$build_structure{"APPS_${appout}_LIBS"}} = @libs;
+ clearCompileStep();
+}
diff --git a/contrib/ciabot/INSTALL b/contrib/ciabot/INSTALL
new file mode 100644
index 0000000..7222961
--- /dev/null
+++ b/contrib/ciabot/INSTALL
@@ -0,0 +1,54 @@
+= Installation instructions =
+
+Two scripts are included. The Python one (ciabot.py) is faster and
+more capable; the shell one (ciabot.sh) is a fallback in case Python
+gives your git hosting site indigestion. (I know of no such sites.)
+
+It is no longer necessary to modify the script in order to put it
+in place; in fact, this is now discouraged. It is entirely
+configurable with the following git config variables:
+
+ciabot.project = name of the project
+ciabot.repo = name of the project repo for gitweb/cgit purposes
+ciabot.xmlrpc = if true, ship notifications via XML-RPC
+ciabot.revformat = format in which the revision is shown
+
+The revformat variable may have the following values
+raw -> full hex ID of commit
+short -> first 12 chars of hex ID
+describe -> describe relative to last tag, falling back to short
+
+ciabot.project defaults to the directory name of the repository toplevel.
+ciabot.repo defaults to ciabot.project lowercased.
+ciabot.xmlrpc defaults to True
+ciabot.revformat defaults to 'describe'.
+
+This means that in the normal case you need not do any configuration at all,
+however setting ciabot.project will allow the hook to run slightly faster.
+
+Once you've set these variables, try your script with -n to see the
+notification message dumped to stdout and verify that it looks sane.
+
+To live-test these scripts, your project needs to have been registered with
+the CIA site. Here are the steps:
+
+1. Open an IRC window on irc://freenode/commits or your registered
+ project IRC channel.
+
+2. Run ciabot.py and/or ciabot.sh from any directory under git
+ control.
+
+You should see a notification on the channel for your most recent commit.
+
+After verifying correct function, install one of these scripts either
+in a post-commit hook or in an update hook.
+
+In post-commit, run it without arguments. It will query for
+current HEAD and the latest commit ID to get the information it
+needs.
+
+In update, call it with a refname followed by a list of commits:
+You want to reverse the order git rev-list emits because it lists
+from most recent to oldest.
+
+/path/to/ciabot.py ${refname} $(git rev-list ${oldhead}..${newhead} | tac)
diff --git a/contrib/ciabot/README b/contrib/ciabot/README
new file mode 100644
index 0000000..2dfe1f9
--- /dev/null
+++ b/contrib/ciabot/README
@@ -0,0 +1,11 @@
+These are hook scripts for the CIA notification service at <http://cia.vc/>
+
+They are maintained by Eric S. Raymond <esr@thyrsus.com>. There is an
+upstream resource page for them at <http://www.catb.org/esr/ciabot/>,
+but they are unlikely to change rapidly.
+
+You probably want the Python version; it's faster, more capable, and
+better documented. The shell version is maintained only as a fallback
+for use on hosting sites that don't permit Python hook scripts.
+
+See the file INSTALL for installation instructions.
diff --git a/contrib/ciabot/ciabot.py b/contrib/ciabot/ciabot.py
new file mode 100755
index 0000000..36b5665
--- /dev/null
+++ b/contrib/ciabot/ciabot.py
@@ -0,0 +1,255 @@
+#!/usr/bin/env python
+# Copyright (c) 2010 Eric S. Raymond <esr@thyrsus.com>
+# Distributed under BSD terms.
+#
+# This script contains porcelain and porcelain byproducts.
+# It's Python because the Python standard libraries avoid portability/security
+# issues raised by callouts in the ancestral Perl and sh scripts. It should
+# be compatible back to Python 2.1.5
+#
+# usage: ciabot.py [-V] [-n] [-p projectname] [refname [commits...]]
+#
+# This script is meant to be run either in a post-commit hook or in an
+# update hook. Try it with -n to see the notification mail dumped to
+# stdout and verify that it looks sane. With -V it dumps its version
+# and exits.
+#
+# In post-commit, run it without arguments. It will query for
+# current HEAD and the latest commit ID to get the information it
+# needs.
+#
+# In update, call it with a refname followed by a list of commits:
+# You want to reverse the order git rev-list emits because it lists
+# from most recent to oldest.
+#
+# /path/to/ciabot.py ${refname} $(git rev-list ${oldhead}..${newhead} | tac)
+#
+# Configuration variables affecting this script:
+#
+# ciabot.project = name of the project
+# ciabot.repo = name of the project repo for gitweb/cgit purposes
+# ciabot.xmlrpc = if true (default), ship notifications via XML-RPC
+# ciabot.revformat = format in which the revision is shown
+#
+# ciabot.project defaults to the directory name of the repository toplevel.
+# ciabot.repo defaults to ciabot.project lowercased.
+#
+# This means that in the normal case you need not do any configuration at all,
+# but setting the project name will speed it up slightly.
+#
+# The revformat variable may have the following values
+# raw -> full hex ID of commit
+# short -> first 12 chars of hex ID
+# describe = -> describe relative to last tag, falling back to short
+# The default is 'describe'.
+#
+# Note: the CIA project now says only XML-RPC is reliable, so
+# we default to that.
+#
+
+import sys
+if sys.hexversion < 0x02000000:
+ # The limiter is the xml.sax module
+ sys.stderr.write("ciabot.py: requires Python 2.0.0 or later.\n")
+ sys.exit(1)
+
+import os, commands, socket, urllib
+from xml.sax.saxutils import escape
+
+# Changeset URL prefix for your repo: when the commit ID is appended
+# to this, it should point at a CGI that will display the commit
+# through gitweb or something similar. The defaults will probably
+# work if you have a typical gitweb/cgit setup.
+#
+#urlprefix="http://%(host)s/cgi-bin/gitweb.cgi?p=%(repo)s;a=commit;h="
+urlprefix="http://%(host)s/cgi-bin/cgit.cgi/%(repo)s/commit/?id="
+
+# The service used to turn your gitwebbish URL into a tinyurl so it
+# will take up less space on the IRC notification line.
+tinyifier = "http://tinyurl.com/api-create.php?url="
+
+# The template used to generate the XML messages to CIA. You can make
+# visible changes to the IRC-bot notification lines by hacking this.
+# The default will produce a notfication line that looks like this:
+#
+# ${project}: ${author} ${repo}:${branch} * ${rev} ${files}: ${logmsg} ${url}
+#
+# By omitting $files you can collapse the files part to a single slash.
+xml = '''\
+<message>
+ <generator>
+ <name>CIA Python client for Git</name>
+ <version>%(version)s</version>
+ <url>%(generator)s</url>
+ </generator>
+ <source>
+ <project>%(project)s</project>
+ <branch>%(repo)s:%(branch)s</branch>
+ </source>
+ <timestamp>%(ts)s</timestamp>
+ <body>
+ <commit>
+ <author>%(author)s</author>
+ <revision>%(rev)s</revision>
+ <files>
+ %(files)s
+ </files>
+ <log>%(logmsg)s %(url)s</log>
+ <url>%(url)s</url>
+ </commit>
+ </body>
+</message>
+'''
+
+#
+# No user-serviceable parts below this line:
+#
+
+# Where to ship e-mail notifications.
+toaddr = "cia@cia.vc"
+
+# Identify the generator script.
+# Should only change when the script itself gets a new home and maintainer.
+generator = "http://www.catb.org/~esr/ciabot.py"
+version = "3.6"
+
+def do(command):
+ return commands.getstatusoutput(command)[1]
+
+def report(refname, merged, xmlrpc=True):
+ "Generate a commit notification to be reported to CIA"
+
+ # Try to tinyfy a reference to a web view for this commit.
+ try:
+ url = open(urllib.urlretrieve(tinyifier + urlprefix + merged)[0]).read()
+ except:
+ url = urlprefix + merged
+
+ branch = os.path.basename(refname)
+
+ # Compute a description for the revision
+ if revformat == 'raw':
+ rev = merged
+ elif revformat == 'short':
+ rev = ''
+ else: # revformat == 'describe'
+ rev = do("git describe %s 2>/dev/null" % merged)
+ if not rev:
+ rev = merged[:12]
+
+ # Extract the meta-information for the commit
+ files=do("git diff-tree -r --name-only '"+ merged +"' | sed -e '1d' -e 's-.*-<file>&</file>-'")
+ metainfo = do("git log -1 '--pretty=format:%an <%ae>%n%at%n%s' " + merged)
+ (author, ts, logmsg) = metainfo.split("\n")
+ logmsg = escape(logmsg)
+
+ # This discards the part of the author's address after @.
+ # Might be be nice to ship the full email address, if not
+ # for spammers' address harvesters - getting this wrong
+ # would make the freenode #commits channel into harvester heaven.
+ author = escape(author.replace("<", "").split("@")[0].split()[-1])
+
+ # This ignores the timezone. Not clear what to do with it...
+ ts = ts.strip().split()[0]
+
+ context = locals()
+ context.update(globals())
+
+ out = xml % context
+ mail = '''\
+Message-ID: <%(merged)s.%(author)s@%(project)s>
+From: %(fromaddr)s
+To: %(toaddr)s
+Content-type: text/xml
+Subject: DeliverXML
+
+%(out)s''' % locals()
+
+ if xmlrpc:
+ return out
+ else:
+ return mail
+
+if __name__ == "__main__":
+ import getopt
+
+ # Get all config variables
+ revformat = do("git config --get ciabot.revformat")
+ project = do("git config --get ciabot.project")
+ repo = do("git config --get ciabot.repo")
+ xmlrpc = do("git config --get ciabot.xmlrpc")
+ xmlrpc = not (xmlrpc and xmlrpc == "false")
+
+ host = socket.getfqdn()
+ fromaddr = "CIABOT-NOREPLY@" + host
+
+ try:
+ (options, arguments) = getopt.getopt(sys.argv[1:], "np:xV")
+ except getopt.GetoptError, msg:
+ print "ciabot.py: " + str(msg)
+ raise SystemExit, 1
+
+ notify = True
+ for (switch, val) in options:
+ if switch == '-p':
+ project = val
+ elif switch == '-n':
+ notify = False
+ elif switch == '-x':
+ xmlrpc = True
+ elif switch == '-V':
+ print "ciabot.py: version", version
+ sys.exit(0)
+
+ # The project variable defaults to the name of the repository toplevel.
+ if not project:
+ here = os.getcwd()
+ while True:
+ if os.path.exists(os.path.join(here, ".git")):
+ project = os.path.basename(here)
+ break
+ elif here == '/':
+ sys.stderr.write("ciabot.py: no .git below root!\n")
+ sys.exit(1)
+ here = os.path.dirname(here)
+
+ if not repo:
+ repo = project.lower()
+
+ urlprefix = urlprefix % globals()
+
+ # The script wants a reference to head followed by the list of
+ # commit ID to report about.
+ if len(arguments) == 0:
+ refname = do("git symbolic-ref HEAD 2>/dev/null")
+ merges = [do("git rev-parse HEAD")]
+ else:
+ refname = arguments[0]
+ merges = arguments[1:]
+
+ if notify:
+ if xmlrpc:
+ import xmlrpclib
+ server = xmlrpclib.Server('http://cia.vc/RPC2');
+ else:
+ import smtplib
+ server = smtplib.SMTP('localhost')
+
+ for merged in merges:
+ message = report(refname, merged, xmlrpc)
+ if not notify:
+ print message
+ elif xmlrpc:
+ try:
+ # RPC server is flaky, this can fail due to timeout.
+ server.hub.deliver(message)
+ except socket.error, e:
+ sys.stderr.write("%s\n" % e)
+ else:
+ server.sendmail(fromaddr, [toaddr], message)
+
+ if notify:
+ if not xmlrpc:
+ server.quit()
+
+#End
diff --git a/contrib/ciabot/ciabot.sh b/contrib/ciabot/ciabot.sh
new file mode 100755
index 0000000..3fbbc53
--- /dev/null
+++ b/contrib/ciabot/ciabot.sh
@@ -0,0 +1,233 @@
+#!/bin/sh
+# Distributed under the terms of the GNU General Public License v2
+# Copyright (c) 2006 Fernando J. Pereda <ferdy@gentoo.org>
+# Copyright (c) 2008 Natanael Copa <natanael.copa@gmail.com>
+# Copyright (c) 2010 Eric S. Raymond <esr@thyrsus.com>
+# Assistance and review by Petr Baudis, author of ciabot.pl,
+# is gratefully acknowledged.
+#
+# This is a version 3.x of ciabot.sh; use -V to find the exact
+# version. Versions 1 and 2 were shipped in 2006 and 2008 and are not
+# version-stamped. The version 2 maintainer has passed the baton.
+#
+# Note: This script should be considered obsolete.
+# There is a faster, better-documented rewrite in Python: find it as ciabot.py
+# Use this only if your hosting site forbids Python hooks.
+# It requires: git(1), hostname(1), cut(1), sendmail(1), and wget(1).
+#
+# Originally based on Git ciabot.pl by Petr Baudis.
+# This script contains porcelain and porcelain byproducts.
+#
+# usage: ciabot.sh [-V] [-n] [-p projectname] [refname commit]
+#
+# This script is meant to be run either in a post-commit hook or in an
+# update hook. Try it with -n to see the notification mail dumped to
+# stdout and verify that it looks sane. With -V it dumps its version
+# and exits.
+#
+# In post-commit, run it without arguments. It will query for
+# current HEAD and the latest commit ID to get the information it
+# needs.
+#
+# In update, you have to call it once per merged commit:
+#
+# refname=$1
+# oldhead=$2
+# newhead=$3
+# for merged in $(git rev-list ${oldhead}..${newhead} | tac) ; do
+# /path/to/ciabot.sh ${refname} ${merged}
+# done
+#
+# The reason for the tac call is that git rev-list emits commits from
+# most recent to least - better to ship notifactions from oldest to newest.
+#
+# Configuration variables affecting this script:
+#
+# ciabot.project = name of the project
+# ciabot.repo = name of the project repo for gitweb/cgit purposes
+# ciabot.revformat = format in which the revision is shown
+#
+# ciabot.project defaults to the directory name of the repository toplevel.
+# ciabot.repo defaults to ciabot.project lowercased.
+#
+# This means that in the normal case you need not do any configuration at all,
+# but setting the project name will speed it up slightly.
+#
+# The revformat variable may have the following values
+# raw -> full hex ID of commit
+# short -> first 12 chars of hex ID
+# describe = -> describe relative to last tag, falling back to short
+# The default is 'describe'.
+#
+# Note: the shell ancestors of this script used mail, not XML-RPC, in
+# order to avoid stalling until timeout when the CIA XML-RPC server is
+# down. It is unknown whether this is still an issue in 2010, but
+# XML-RPC would be annoying to do from sh in any case. (XML-RPC does
+# have the advantage that it guarantees notification of multiple commits
+# shpped from an update in their actual order.)
+#
+
+# The project as known to CIA. You can set this with a -p option,
+# or let it default to the directory name of the repo toplevel.
+project=$(git config --get ciabot.project)
+
+if [ -z $project ]
+then
+ here=`pwd`;
+ while :; do
+ if [ -d $here/.git ]
+ then
+ project=`basename $here`
+ break
+ elif [ $here = '/' ]
+ then
+ echo "ciabot.sh: no .git below root!"
+ exit 1
+ fi
+ here=`dirname $here`
+ done
+fi
+
+# Name of the repo for gitweb/cgit purposes
+repo=$(git config --get ciabot.repo)
+[ -z $repo] && repo=$(echo "${project}" | tr '[A-Z]' '[a-z]')
+
+# What revision format do we want in the summary?
+revformat=$(git config --get ciabot.revformat)
+
+# Fully qualified domain name of the repo host. You can hardwire this
+# to make the script faster. The -f option works under Linux and FreeBSD,
+# but not OpenBSD and NetBSD. But under OpenBSD and NetBSD,
+# hostname without options gives the FQDN.
+if hostname -f >/dev/null 2>&1
+then
+ hostname=`hostname -f`
+else
+ hostname=`hostname`
+fi
+
+# Changeset URL prefix for your repo: when the commit ID is appended
+# to this, it should point at a CGI that will display the commit
+# through gitweb or something similar. The defaults will probably
+# work if you have a typical gitweb/cgit setup.
+#urlprefix="http://${host}/cgi-bin/gitweb.cgi?p=${repo};a=commit;h="
+urlprefix="http://${host}/cgi-bin/cgit.cgi/${repo}/commit/?id="
+
+#
+# You probably will not need to change the following:
+#
+
+# Identify the script. The 'generator' variable should change only
+# when the script itself gets a new home and maintainer.
+generator="http://www.catb.org/~esr/ciabot/ciabot.sh"
+version=3.5
+
+# Addresses for the e-mail
+from="CIABOT-NOREPLY@${hostname}"
+to="cia@cia.vc"
+
+# SMTP client to use - may need to edit the absolute pathname for your system
+sendmail="sendmail -t -f ${from}"
+
+#
+# No user-serviceable parts below this line:
+#
+
+# Should include all places sendmail is likely to lurk.
+PATH="$PATH:/usr/sbin/"
+
+mode=mailit
+while getopts pnV opt
+do
+ case $opt in
+ p) project=$2; shift ; shift ;;
+ n) mode=dumpit; shift ;;
+ V) echo "ciabot.sh: version $version"; exit 0; shift ;;
+ esac
+done
+
+# Cough and die if user has not specified a project
+if [ -z "$project" ]
+then
+ echo "ciabot.sh: no project specified, bailing out." >&2
+ exit 1
+fi
+
+if [ $# -eq 0 ] ; then
+ refname=$(git symbolic-ref HEAD 2>/dev/null)
+ merged=$(git rev-parse HEAD)
+else
+ refname=$1
+ merged=$2
+fi
+
+# This tries to turn your gitwebbish URL into a tinyurl so it will take up
+# less space on the IRC notification line. Some repo sites (I'm looking at
+# you, berlios.de!) forbid wget calls for security reasons. On these,
+# the code will fall back to the full un-tinyfied URL.
+longurl=${urlprefix}${merged}
+url=$(wget -O - -q http://tinyurl.com/api-create.php?url=${longurl} 2>/dev/null)
+if [ -z "$url" ]; then
+ url="${longurl}"
+fi
+
+refname=${refname##refs/heads/}
+
+case $revformat in
+raw) rev=$merged ;;
+short) rev='' ;;
+*) rev=$(git describe ${merged} 2>/dev/null) ;;
+esac
+[ -z ${rev} ] && rev=$(echo "$merged" | cut -c 1-12)
+
+# We discard the part of the author's address after @.
+# Might be nice to ship the full email address, if not
+# for spammers' address harvesters - getting this wrong
+# would make the freenode #commits channel into harvester heaven.
+author=$(git log -1 '--pretty=format:%an <%ae>' $merged)
+author=$(echo "$author" | sed -n -e '/^.*<\([^@]*\).*$/s--\1-p')
+
+logmessage=$(git log -1 '--pretty=format:%s' $merged)
+ts=$(git log -1 '--pretty=format:%at' $merged)
+files=$(git diff-tree -r --name-only ${merged} | sed -e '1d' -e 's-.*-<file>&</file>-')
+
+out="
+<message>
+ <generator>
+ <name>CIA Shell client for Git</name>
+ <version>${version}</version>
+ <url>${generator}</url>
+ </generator>
+ <source>
+ <project>${project}</project>
+ <branch>$repo:${refname}</branch>
+ </source>
+ <timestamp>${ts}</timestamp>
+ <body>
+ <commit>
+ <author>${author}</author>
+ <revision>${rev}</revision>
+ <files>
+ ${files}
+ </files>
+ <log>${logmessage} ${url}</log>
+ <url>${url}</url>
+ </commit>
+ </body>
+</message>"
+
+if [ "$mode" = "dumpit" ]
+then
+ sendmail=cat
+fi
+
+${sendmail} << EOM
+Message-ID: <${merged}.${author}@${project}>
+From: ${from}
+To: ${to}
+Content-type: text/xml
+Subject: DeliverXML
+${out}
+EOM
+
+# vim: set tw=70 :
diff --git a/contrib/completion/git-completion.bash b/contrib/completion/git-completion.bash
new file mode 100644
index 0000000..fd9a1d5
--- /dev/null
+++ b/contrib/completion/git-completion.bash
@@ -0,0 +1,2663 @@
+#!bash
+#
+# bash/zsh completion support for core Git.
+#
+# Copyright (C) 2006,2007 Shawn O. Pearce <spearce@spearce.org>
+# Conceptually based on gitcompletion (http://gitweb.hawaga.org.uk/).
+# Distributed under the GNU General Public License, version 2.0.
+#
+# The contained completion routines provide support for completing:
+#
+# *) local and remote branch names
+# *) local and remote tag names
+# *) .git/remotes file names
+# *) git 'subcommands'
+# *) tree paths within 'ref:path/to/file' expressions
+# *) file paths within current working directory and index
+# *) common --long-options
+#
+# To use these routines:
+#
+# 1) Copy this file to somewhere (e.g. ~/.git-completion.sh).
+# 2) Add the following line to your .bashrc/.zshrc:
+# source ~/.git-completion.sh
+# 3) Consider changing your PS1 to also show the current branch,
+# see git-prompt.sh for details.
+
+case "$COMP_WORDBREAKS" in
+*:*) : great ;;
+*) COMP_WORDBREAKS="$COMP_WORDBREAKS:"
+esac
+
+# __gitdir accepts 0 or 1 arguments (i.e., location)
+# returns location of .git repo
+__gitdir ()
+{
+ # Note: this function is duplicated in git-prompt.sh
+ # When updating it, make sure you update the other one to match.
+ if [ -z "${1-}" ]; then
+ if [ -n "${__git_dir-}" ]; then
+ echo "$__git_dir"
+ elif [ -n "${GIT_DIR-}" ]; then
+ test -d "${GIT_DIR-}" || return 1
+ echo "$GIT_DIR"
+ elif [ -d .git ]; then
+ echo .git
+ else
+ git rev-parse --git-dir 2>/dev/null
+ fi
+ elif [ -d "$1/.git" ]; then
+ echo "$1/.git"
+ else
+ echo "$1"
+ fi
+}
+
+# The following function is based on code from:
+#
+# bash_completion - programmable completion functions for bash 3.2+
+#
+# Copyright © 2006-2008, Ian Macdonald <ian@caliban.org>
+# © 2009-2010, Bash Completion Maintainers
+# <bash-completion-devel@lists.alioth.debian.org>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# The latest version of this software can be obtained here:
+#
+# http://bash-completion.alioth.debian.org/
+#
+# RELEASE: 2.x
+
+# This function can be used to access a tokenized list of words
+# on the command line:
+#
+# __git_reassemble_comp_words_by_ref '=:'
+# if test "${words_[cword_-1]}" = -w
+# then
+# ...
+# fi
+#
+# The argument should be a collection of characters from the list of
+# word completion separators (COMP_WORDBREAKS) to treat as ordinary
+# characters.
+#
+# This is roughly equivalent to going back in time and setting
+# COMP_WORDBREAKS to exclude those characters. The intent is to
+# make option types like --date=<type> and <rev>:<path> easy to
+# recognize by treating each shell word as a single token.
+#
+# It is best not to set COMP_WORDBREAKS directly because the value is
+# shared with other completion scripts. By the time the completion
+# function gets called, COMP_WORDS has already been populated so local
+# changes to COMP_WORDBREAKS have no effect.
+#
+# Output: words_, cword_, cur_.
+
+__git_reassemble_comp_words_by_ref()
+{
+ local exclude i j first
+ # Which word separators to exclude?
+ exclude="${1//[^$COMP_WORDBREAKS]}"
+ cword_=$COMP_CWORD
+ if [ -z "$exclude" ]; then
+ words_=("${COMP_WORDS[@]}")
+ return
+ fi
+ # List of word completion separators has shrunk;
+ # re-assemble words to complete.
+ for ((i=0, j=0; i < ${#COMP_WORDS[@]}; i++, j++)); do
+ # Append each nonempty word consisting of just
+ # word separator characters to the current word.
+ first=t
+ while
+ [ $i -gt 0 ] &&
+ [ -n "${COMP_WORDS[$i]}" ] &&
+ # word consists of excluded word separators
+ [ "${COMP_WORDS[$i]//[^$exclude]}" = "${COMP_WORDS[$i]}" ]
+ do
+ # Attach to the previous token,
+ # unless the previous token is the command name.
+ if [ $j -ge 2 ] && [ -n "$first" ]; then
+ ((j--))
+ fi
+ first=
+ words_[$j]=${words_[j]}${COMP_WORDS[i]}
+ if [ $i = $COMP_CWORD ]; then
+ cword_=$j
+ fi
+ if (($i < ${#COMP_WORDS[@]} - 1)); then
+ ((i++))
+ else
+ # Done.
+ return
+ fi
+ done
+ words_[$j]=${words_[j]}${COMP_WORDS[i]}
+ if [ $i = $COMP_CWORD ]; then
+ cword_=$j
+ fi
+ done
+}
+
+if ! type _get_comp_words_by_ref >/dev/null 2>&1; then
+_get_comp_words_by_ref ()
+{
+ local exclude cur_ words_ cword_
+ if [ "$1" = "-n" ]; then
+ exclude=$2
+ shift 2
+ fi
+ __git_reassemble_comp_words_by_ref "$exclude"
+ cur_=${words_[cword_]}
+ while [ $# -gt 0 ]; do
+ case "$1" in
+ cur)
+ cur=$cur_
+ ;;
+ prev)
+ prev=${words_[$cword_-1]}
+ ;;
+ words)
+ words=("${words_[@]}")
+ ;;
+ cword)
+ cword=$cword_
+ ;;
+ esac
+ shift
+ done
+}
+fi
+
+__gitcompadd ()
+{
+ local i=0
+ for x in $1; do
+ if [[ "$x" == "$3"* ]]; then
+ COMPREPLY[i++]="$2$x$4"
+ fi
+ done
+}
+
+# Generates completion reply, appending a space to possible completion words,
+# if necessary.
+# It accepts 1 to 4 arguments:
+# 1: List of possible completion words.
+# 2: A prefix to be added to each possible completion word (optional).
+# 3: Generate possible completion matches for this word (optional).
+# 4: A suffix to be appended to each possible completion word (optional).
+__gitcomp ()
+{
+ local cur_="${3-$cur}"
+
+ case "$cur_" in
+ --*=)
+ ;;
+ *)
+ local c i=0 IFS=$' \t\n'
+ for c in $1; do
+ c="$c${4-}"
+ if [[ $c == "$cur_"* ]]; then
+ case $c in
+ --*=*|*.) ;;
+ *) c="$c " ;;
+ esac
+ COMPREPLY[i++]="${2-}$c"
+ fi
+ done
+ ;;
+ esac
+}
+
+# Generates completion reply from newline-separated possible completion words
+# by appending a space to all of them.
+# It accepts 1 to 4 arguments:
+# 1: List of possible completion words, separated by a single newline.
+# 2: A prefix to be added to each possible completion word (optional).
+# 3: Generate possible completion matches for this word (optional).
+# 4: A suffix to be appended to each possible completion word instead of
+# the default space (optional). If specified but empty, nothing is
+# appended.
+__gitcomp_nl ()
+{
+ local IFS=$'\n'
+ __gitcompadd "$1" "${2-}" "${3-$cur}" "${4- }"
+}
+
+# Generates completion reply with compgen from newline-separated possible
+# completion filenames.
+# It accepts 1 to 3 arguments:
+# 1: List of possible completion filenames, separated by a single newline.
+# 2: A directory prefix to be added to each possible completion filename
+# (optional).
+# 3: Generate possible completion matches for this word (optional).
+__gitcomp_file ()
+{
+ local IFS=$'\n'
+
+ # XXX does not work when the directory prefix contains a tilde,
+ # since tilde expansion is not applied.
+ # This means that COMPREPLY will be empty and Bash default
+ # completion will be used.
+ __gitcompadd "$1" "${2-}" "${3-$cur}" ""
+
+ # use a hack to enable file mode in bash < 4
+ compopt -o filenames +o nospace 2>/dev/null ||
+ compgen -f /non-existing-dir/ > /dev/null
+}
+
+# Execute 'git ls-files', unless the --committable option is specified, in
+# which case it runs 'git diff-index' to find out the files that can be
+# committed. It return paths relative to the directory specified in the first
+# argument, and using the options specified in the second argument.
+__git_ls_files_helper ()
+{
+ (
+ test -n "${CDPATH+set}" && unset CDPATH
+ cd "$1"
+ if [ "$2" == "--committable" ]; then
+ git diff-index --name-only --relative HEAD
+ else
+ # NOTE: $2 is not quoted in order to support multiple options
+ git ls-files --exclude-standard $2
+ fi
+ ) 2>/dev/null
+}
+
+
+# __git_index_files accepts 1 or 2 arguments:
+# 1: Options to pass to ls-files (required).
+# 2: A directory path (optional).
+# If provided, only files within the specified directory are listed.
+# Sub directories are never recursed. Path must have a trailing
+# slash.
+__git_index_files ()
+{
+ local dir="$(__gitdir)" root="${2-.}" file
+
+ if [ -d "$dir" ]; then
+ __git_ls_files_helper "$root" "$1" |
+ while read -r file; do
+ case "$file" in
+ ?*/*) echo "${file%%/*}" ;;
+ *) echo "$file" ;;
+ esac
+ done | sort | uniq
+ fi
+}
+
+__git_heads ()
+{
+ local dir="$(__gitdir)"
+ if [ -d "$dir" ]; then
+ git --git-dir="$dir" for-each-ref --format='%(refname:short)' \
+ refs/heads
+ return
+ fi
+}
+
+__git_tags ()
+{
+ local dir="$(__gitdir)"
+ if [ -d "$dir" ]; then
+ git --git-dir="$dir" for-each-ref --format='%(refname:short)' \
+ refs/tags
+ return
+ fi
+}
+
+# __git_refs accepts 0, 1 (to pass to __gitdir), or 2 arguments
+# presence of 2nd argument means use the guess heuristic employed
+# by checkout for tracking branches
+__git_refs ()
+{
+ local i hash dir="$(__gitdir "${1-}")" track="${2-}"
+ local format refs
+ if [ -d "$dir" ]; then
+ case "$cur" in
+ refs|refs/*)
+ format="refname"
+ refs="${cur%/*}"
+ track=""
+ ;;
+ *)
+ for i in HEAD FETCH_HEAD ORIG_HEAD MERGE_HEAD; do
+ if [ -e "$dir/$i" ]; then echo $i; fi
+ done
+ format="refname:short"
+ refs="refs/tags refs/heads refs/remotes"
+ ;;
+ esac
+ git --git-dir="$dir" for-each-ref --format="%($format)" \
+ $refs
+ if [ -n "$track" ]; then
+ # employ the heuristic used by git checkout
+ # Try to find a remote branch that matches the completion word
+ # but only output if the branch name is unique
+ local ref entry
+ git --git-dir="$dir" for-each-ref --shell --format="ref=%(refname:short)" \
+ "refs/remotes/" | \
+ while read -r entry; do
+ eval "$entry"
+ ref="${ref#*/}"
+ if [[ "$ref" == "$cur"* ]]; then
+ echo "$ref"
+ fi
+ done | sort | uniq -u
+ fi
+ return
+ fi
+ case "$cur" in
+ refs|refs/*)
+ git ls-remote "$dir" "$cur*" 2>/dev/null | \
+ while read -r hash i; do
+ case "$i" in
+ *^{}) ;;
+ *) echo "$i" ;;
+ esac
+ done
+ ;;
+ *)
+ echo "HEAD"
+ git for-each-ref --format="%(refname:short)" -- "refs/remotes/$dir/" | sed -e "s#^$dir/##"
+ ;;
+ esac
+}
+
+# __git_refs2 requires 1 argument (to pass to __git_refs)
+__git_refs2 ()
+{
+ local i
+ for i in $(__git_refs "$1"); do
+ echo "$i:$i"
+ done
+}
+
+# __git_refs_remotes requires 1 argument (to pass to ls-remote)
+__git_refs_remotes ()
+{
+ local i hash
+ git ls-remote "$1" 'refs/heads/*' 2>/dev/null | \
+ while read -r hash i; do
+ echo "$i:refs/remotes/$1/${i#refs/heads/}"
+ done
+}
+
+__git_remotes ()
+{
+ local i IFS=$'\n' d="$(__gitdir)"
+ test -d "$d/remotes" && ls -1 "$d/remotes"
+ for i in $(git --git-dir="$d" config --get-regexp 'remote\..*\.url' 2>/dev/null); do
+ i="${i#remote.}"
+ echo "${i/.url*/}"
+ done
+}
+
+__git_list_merge_strategies ()
+{
+ git merge -s help 2>&1 |
+ sed -n -e '/[Aa]vailable strategies are: /,/^$/{
+ s/\.$//
+ s/.*://
+ s/^[ ]*//
+ s/[ ]*$//
+ p
+ }'
+}
+
+__git_merge_strategies=
+# 'git merge -s help' (and thus detection of the merge strategy
+# list) fails, unfortunately, if run outside of any git working
+# tree. __git_merge_strategies is set to the empty string in
+# that case, and the detection will be repeated the next time it
+# is needed.
+__git_compute_merge_strategies ()
+{
+ test -n "$__git_merge_strategies" ||
+ __git_merge_strategies=$(__git_list_merge_strategies)
+}
+
+__git_complete_revlist_file ()
+{
+ local pfx ls ref cur_="$cur"
+ case "$cur_" in
+ *..?*:*)
+ return
+ ;;
+ ?*:*)
+ ref="${cur_%%:*}"
+ cur_="${cur_#*:}"
+ case "$cur_" in
+ ?*/*)
+ pfx="${cur_%/*}"
+ cur_="${cur_##*/}"
+ ls="$ref:$pfx"
+ pfx="$pfx/"
+ ;;
+ *)
+ ls="$ref"
+ ;;
+ esac
+
+ case "$COMP_WORDBREAKS" in
+ *:*) : great ;;
+ *) pfx="$ref:$pfx" ;;
+ esac
+
+ __gitcomp_nl "$(git --git-dir="$(__gitdir)" ls-tree "$ls" 2>/dev/null \
+ | sed '/^100... blob /{
+ s,^.* ,,
+ s,$, ,
+ }
+ /^120000 blob /{
+ s,^.* ,,
+ s,$, ,
+ }
+ /^040000 tree /{
+ s,^.* ,,
+ s,$,/,
+ }
+ s/^.* //')" \
+ "$pfx" "$cur_" ""
+ ;;
+ *...*)
+ pfx="${cur_%...*}..."
+ cur_="${cur_#*...}"
+ __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
+ ;;
+ *..*)
+ pfx="${cur_%..*}.."
+ cur_="${cur_#*..}"
+ __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
+ ;;
+ *)
+ __gitcomp_nl "$(__git_refs)"
+ ;;
+ esac
+}
+
+
+# __git_complete_index_file requires 1 argument:
+# 1: the options to pass to ls-file
+#
+# The exception is --committable, which finds the files appropriate commit.
+__git_complete_index_file ()
+{
+ local pfx="" cur_="$cur"
+
+ case "$cur_" in
+ ?*/*)
+ pfx="${cur_%/*}"
+ cur_="${cur_##*/}"
+ pfx="${pfx}/"
+ ;;
+ esac
+
+ __gitcomp_file "$(__git_index_files "$1" "$pfx")" "$pfx" "$cur_"
+}
+
+__git_complete_file ()
+{
+ __git_complete_revlist_file
+}
+
+__git_complete_revlist ()
+{
+ __git_complete_revlist_file
+}
+
+__git_complete_remote_or_refspec ()
+{
+ local cur_="$cur" cmd="${words[1]}"
+ local i c=2 remote="" pfx="" lhs=1 no_complete_refspec=0
+ if [ "$cmd" = "remote" ]; then
+ ((c++))
+ fi
+ while [ $c -lt $cword ]; do
+ i="${words[c]}"
+ case "$i" in
+ --mirror) [ "$cmd" = "push" ] && no_complete_refspec=1 ;;
+ --all)
+ case "$cmd" in
+ push) no_complete_refspec=1 ;;
+ fetch)
+ return
+ ;;
+ *) ;;
+ esac
+ ;;
+ -*) ;;
+ *) remote="$i"; break ;;
+ esac
+ ((c++))
+ done
+ if [ -z "$remote" ]; then
+ __gitcomp_nl "$(__git_remotes)"
+ return
+ fi
+ if [ $no_complete_refspec = 1 ]; then
+ return
+ fi
+ [ "$remote" = "." ] && remote=
+ case "$cur_" in
+ *:*)
+ case "$COMP_WORDBREAKS" in
+ *:*) : great ;;
+ *) pfx="${cur_%%:*}:" ;;
+ esac
+ cur_="${cur_#*:}"
+ lhs=0
+ ;;
+ +*)
+ pfx="+"
+ cur_="${cur_#+}"
+ ;;
+ esac
+ case "$cmd" in
+ fetch)
+ if [ $lhs = 1 ]; then
+ __gitcomp_nl "$(__git_refs2 "$remote")" "$pfx" "$cur_"
+ else
+ __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
+ fi
+ ;;
+ pull|remote)
+ if [ $lhs = 1 ]; then
+ __gitcomp_nl "$(__git_refs "$remote")" "$pfx" "$cur_"
+ else
+ __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
+ fi
+ ;;
+ push)
+ if [ $lhs = 1 ]; then
+ __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
+ else
+ __gitcomp_nl "$(__git_refs "$remote")" "$pfx" "$cur_"
+ fi
+ ;;
+ esac
+}
+
+__git_complete_strategy ()
+{
+ __git_compute_merge_strategies
+ case "$prev" in
+ -s|--strategy)
+ __gitcomp "$__git_merge_strategies"
+ return 0
+ esac
+ case "$cur" in
+ --strategy=*)
+ __gitcomp "$__git_merge_strategies" "" "${cur##--strategy=}"
+ return 0
+ ;;
+ esac
+ return 1
+}
+
+__git_commands () {
+ if test -n "${GIT_TESTING_COMMAND_COMPLETION:-}"
+ then
+ printf "%s" "${GIT_TESTING_COMMAND_COMPLETION}"
+ else
+ git help -a|egrep '^ [a-zA-Z0-9]'
+ fi
+}
+
+__git_list_all_commands ()
+{
+ local i IFS=" "$'\n'
+ for i in $(__git_commands)
+ do
+ case $i in
+ *--*) : helper pattern;;
+ *) echo $i;;
+ esac
+ done
+}
+
+__git_all_commands=
+__git_compute_all_commands ()
+{
+ test -n "$__git_all_commands" ||
+ __git_all_commands=$(__git_list_all_commands)
+}
+
+__git_list_porcelain_commands ()
+{
+ local i IFS=" "$'\n'
+ __git_compute_all_commands
+ for i in $__git_all_commands
+ do
+ case $i in
+ *--*) : helper pattern;;
+ applymbox) : ask gittus;;
+ applypatch) : ask gittus;;
+ archimport) : import;;
+ cat-file) : plumbing;;
+ check-attr) : plumbing;;
+ check-ignore) : plumbing;;
+ check-ref-format) : plumbing;;
+ checkout-index) : plumbing;;
+ commit-tree) : plumbing;;
+ count-objects) : infrequent;;
+ credential-cache) : credentials helper;;
+ credential-store) : credentials helper;;
+ cvsexportcommit) : export;;
+ cvsimport) : import;;
+ cvsserver) : daemon;;
+ daemon) : daemon;;
+ diff-files) : plumbing;;
+ diff-index) : plumbing;;
+ diff-tree) : plumbing;;
+ fast-import) : import;;
+ fast-export) : export;;
+ fsck-objects) : plumbing;;
+ fetch-pack) : plumbing;;
+ fmt-merge-msg) : plumbing;;
+ for-each-ref) : plumbing;;
+ hash-object) : plumbing;;
+ http-*) : transport;;
+ index-pack) : plumbing;;
+ init-db) : deprecated;;
+ local-fetch) : plumbing;;
+ lost-found) : infrequent;;
+ ls-files) : plumbing;;
+ ls-remote) : plumbing;;
+ ls-tree) : plumbing;;
+ mailinfo) : plumbing;;
+ mailsplit) : plumbing;;
+ merge-*) : plumbing;;
+ mktree) : plumbing;;
+ mktag) : plumbing;;
+ pack-objects) : plumbing;;
+ pack-redundant) : plumbing;;
+ pack-refs) : plumbing;;
+ parse-remote) : plumbing;;
+ patch-id) : plumbing;;
+ peek-remote) : plumbing;;
+ prune) : plumbing;;
+ prune-packed) : plumbing;;
+ quiltimport) : import;;
+ read-tree) : plumbing;;
+ receive-pack) : plumbing;;
+ remote-*) : transport;;
+ repo-config) : deprecated;;
+ rerere) : plumbing;;
+ rev-list) : plumbing;;
+ rev-parse) : plumbing;;
+ runstatus) : plumbing;;
+ sh-setup) : internal;;
+ shell) : daemon;;
+ show-ref) : plumbing;;
+ send-pack) : plumbing;;
+ show-index) : plumbing;;
+ ssh-*) : transport;;
+ stripspace) : plumbing;;
+ symbolic-ref) : plumbing;;
+ tar-tree) : deprecated;;
+ unpack-file) : plumbing;;
+ unpack-objects) : plumbing;;
+ update-index) : plumbing;;
+ update-ref) : plumbing;;
+ update-server-info) : daemon;;
+ upload-archive) : plumbing;;
+ upload-pack) : plumbing;;
+ write-tree) : plumbing;;
+ var) : infrequent;;
+ verify-pack) : infrequent;;
+ verify-tag) : plumbing;;
+ *) echo $i;;
+ esac
+ done
+}
+
+__git_porcelain_commands=
+__git_compute_porcelain_commands ()
+{
+ __git_compute_all_commands
+ test -n "$__git_porcelain_commands" ||
+ __git_porcelain_commands=$(__git_list_porcelain_commands)
+}
+
+__git_pretty_aliases ()
+{
+ local i IFS=$'\n'
+ for i in $(git --git-dir="$(__gitdir)" config --get-regexp "pretty\..*" 2>/dev/null); do
+ case "$i" in
+ pretty.*)
+ i="${i#pretty.}"
+ echo "${i/ */}"
+ ;;
+ esac
+ done
+}
+
+__git_aliases ()
+{
+ local i IFS=$'\n'
+ for i in $(git --git-dir="$(__gitdir)" config --get-regexp "alias\..*" 2>/dev/null); do
+ case "$i" in
+ alias.*)
+ i="${i#alias.}"
+ echo "${i/ */}"
+ ;;
+ esac
+ done
+}
+
+# __git_aliased_command requires 1 argument
+__git_aliased_command ()
+{
+ local word cmdline=$(git --git-dir="$(__gitdir)" \
+ config --get "alias.$1")
+ for word in $cmdline; do
+ case "$word" in
+ \!gitk|gitk)
+ echo "gitk"
+ return
+ ;;
+ \!*) : shell command alias ;;
+ -*) : option ;;
+ *=*) : setting env ;;
+ git) : git itself ;;
+ *)
+ echo "$word"
+ return
+ esac
+ done
+}
+
+# __git_find_on_cmdline requires 1 argument
+__git_find_on_cmdline ()
+{
+ local word subcommand c=1
+ while [ $c -lt $cword ]; do
+ word="${words[c]}"
+ for subcommand in $1; do
+ if [ "$subcommand" = "$word" ]; then
+ echo "$subcommand"
+ return
+ fi
+ done
+ ((c++))
+ done
+}
+
+__git_has_doubledash ()
+{
+ local c=1
+ while [ $c -lt $cword ]; do
+ if [ "--" = "${words[c]}" ]; then
+ return 0
+ fi
+ ((c++))
+ done
+ return 1
+}
+
+# Try to count non option arguments passed on the command line for the
+# specified git command.
+# When options are used, it is necessary to use the special -- option to
+# tell the implementation were non option arguments begin.
+# XXX this can not be improved, since options can appear everywhere, as
+# an example:
+# git mv x -n y
+#
+# __git_count_arguments requires 1 argument: the git command executed.
+__git_count_arguments ()
+{
+ local word i c=0
+
+ # Skip "git" (first argument)
+ for ((i=1; i < ${#words[@]}; i++)); do
+ word="${words[i]}"
+
+ case "$word" in
+ --)
+ # Good; we can assume that the following are only non
+ # option arguments.
+ ((c = 0))
+ ;;
+ "$1")
+ # Skip the specified git command and discard git
+ # main options
+ ((c = 0))
+ ;;
+ ?*)
+ ((c++))
+ ;;
+ esac
+ done
+
+ printf "%d" $c
+}
+
+__git_whitespacelist="nowarn warn error error-all fix"
+
+_git_am ()
+{
+ local dir="$(__gitdir)"
+ if [ -d "$dir"/rebase-apply ]; then
+ __gitcomp "--skip --continue --resolved --abort"
+ return
+ fi
+ case "$cur" in
+ --whitespace=*)
+ __gitcomp "$__git_whitespacelist" "" "${cur##--whitespace=}"
+ return
+ ;;
+ --*)
+ __gitcomp "
+ --3way --committer-date-is-author-date --ignore-date
+ --ignore-whitespace --ignore-space-change
+ --interactive --keep --no-utf8 --signoff --utf8
+ --whitespace= --scissors
+ "
+ return
+ esac
+}
+
+_git_apply ()
+{
+ case "$cur" in
+ --whitespace=*)
+ __gitcomp "$__git_whitespacelist" "" "${cur##--whitespace=}"
+ return
+ ;;
+ --*)
+ __gitcomp "
+ --stat --numstat --summary --check --index
+ --cached --index-info --reverse --reject --unidiff-zero
+ --apply --no-add --exclude=
+ --ignore-whitespace --ignore-space-change
+ --whitespace= --inaccurate-eof --verbose
+ "
+ return
+ esac
+}
+
+_git_add ()
+{
+ case "$cur" in
+ --*)
+ __gitcomp "
+ --interactive --refresh --patch --update --dry-run
+ --ignore-errors --intent-to-add
+ "
+ return
+ esac
+
+ # XXX should we check for --update and --all options ?
+ __git_complete_index_file "--others --modified"
+}
+
+_git_archive ()
+{
+ case "$cur" in
+ --format=*)
+ __gitcomp "$(git archive --list)" "" "${cur##--format=}"
+ return
+ ;;
+ --remote=*)
+ __gitcomp_nl "$(__git_remotes)" "" "${cur##--remote=}"
+ return
+ ;;
+ --*)
+ __gitcomp "
+ --format= --list --verbose
+ --prefix= --remote= --exec=
+ "
+ return
+ ;;
+ esac
+ __git_complete_file
+}
+
+_git_bisect ()
+{
+ __git_has_doubledash && return
+
+ local subcommands="start bad good skip reset visualize replay log run"
+ local subcommand="$(__git_find_on_cmdline "$subcommands")"
+ if [ -z "$subcommand" ]; then
+ if [ -f "$(__gitdir)"/BISECT_START ]; then
+ __gitcomp "$subcommands"
+ else
+ __gitcomp "replay start"
+ fi
+ return
+ fi
+
+ case "$subcommand" in
+ bad|good|reset|skip|start)
+ __gitcomp_nl "$(__git_refs)"
+ ;;
+ *)
+ ;;
+ esac
+}
+
+_git_branch ()
+{
+ local i c=1 only_local_ref="n" has_r="n"
+
+ while [ $c -lt $cword ]; do
+ i="${words[c]}"
+ case "$i" in
+ -d|-m) only_local_ref="y" ;;
+ -r) has_r="y" ;;
+ esac
+ ((c++))
+ done
+
+ case "$cur" in
+ --set-upstream-to=*)
+ __gitcomp "$(__git_refs)" "" "${cur##--set-upstream-to=}"
+ ;;
+ --*)
+ __gitcomp "
+ --color --no-color --verbose --abbrev= --no-abbrev
+ --track --no-track --contains --merged --no-merged
+ --set-upstream-to= --edit-description --list
+ --unset-upstream
+ "
+ ;;
+ *)
+ if [ $only_local_ref = "y" -a $has_r = "n" ]; then
+ __gitcomp_nl "$(__git_heads)"
+ else
+ __gitcomp_nl "$(__git_refs)"
+ fi
+ ;;
+ esac
+}
+
+_git_bundle ()
+{
+ local cmd="${words[2]}"
+ case "$cword" in
+ 2)
+ __gitcomp "create list-heads verify unbundle"
+ ;;
+ 3)
+ # looking for a file
+ ;;
+ *)
+ case "$cmd" in
+ create)
+ __git_complete_revlist
+ ;;
+ esac
+ ;;
+ esac
+}
+
+_git_checkout ()
+{
+ __git_has_doubledash && return
+
+ case "$cur" in
+ --conflict=*)
+ __gitcomp "diff3 merge" "" "${cur##--conflict=}"
+ ;;
+ --*)
+ __gitcomp "
+ --quiet --ours --theirs --track --no-track --merge
+ --conflict= --orphan --patch
+ "
+ ;;
+ *)
+ # check if --track, --no-track, or --no-guess was specified
+ # if so, disable DWIM mode
+ local flags="--track --no-track --no-guess" track=1
+ if [ -n "$(__git_find_on_cmdline "$flags")" ]; then
+ track=''
+ fi
+ __gitcomp_nl "$(__git_refs '' $track)"
+ ;;
+ esac
+}
+
+_git_cherry ()
+{
+ __gitcomp "$(__git_refs)"
+}
+
+_git_cherry_pick ()
+{
+ local dir="$(__gitdir)"
+ if [ -f "$dir"/CHERRY_PICK_HEAD ]; then
+ __gitcomp "--continue --quit --abort"
+ return
+ fi
+ case "$cur" in
+ --*)
+ __gitcomp "--edit --no-commit --signoff --strategy= --mainline"
+ ;;
+ *)
+ __gitcomp_nl "$(__git_refs)"
+ ;;
+ esac
+}
+
+_git_clean ()
+{
+ case "$cur" in
+ --*)
+ __gitcomp "--dry-run --quiet"
+ return
+ ;;
+ esac
+
+ # XXX should we check for -x option ?
+ __git_complete_index_file "--others"
+}
+
+_git_clone ()
+{
+ case "$cur" in
+ --*)
+ __gitcomp "
+ --local
+ --no-hardlinks
+ --shared
+ --reference
+ --quiet
+ --no-checkout
+ --bare
+ --mirror
+ --origin
+ --upload-pack
+ --template=
+ --depth
+ --single-branch
+ --branch
+ "
+ return
+ ;;
+ esac
+}
+
+_git_commit ()
+{
+ case "$prev" in
+ -c|-C)
+ __gitcomp_nl "$(__git_refs)" "" "${cur}"
+ return
+ ;;
+ esac
+
+ case "$cur" in
+ --cleanup=*)
+ __gitcomp "default strip verbatim whitespace
+ " "" "${cur##--cleanup=}"
+ return
+ ;;
+ --reuse-message=*|--reedit-message=*|\
+ --fixup=*|--squash=*)
+ __gitcomp_nl "$(__git_refs)" "" "${cur#*=}"
+ return
+ ;;
+ --untracked-files=*)
+ __gitcomp "all no normal" "" "${cur##--untracked-files=}"
+ return
+ ;;
+ --*)
+ __gitcomp "
+ --all --author= --signoff --verify --no-verify
+ --edit --no-edit
+ --amend --include --only --interactive
+ --dry-run --reuse-message= --reedit-message=
+ --reset-author --file= --message= --template=
+ --cleanup= --untracked-files --untracked-files=
+ --verbose --quiet --fixup= --squash=
+ "
+ return
+ esac
+
+ if git rev-parse --verify --quiet HEAD >/dev/null; then
+ __git_complete_index_file "--committable"
+ else
+ # This is the first commit
+ __git_complete_index_file "--cached"
+ fi
+}
+
+_git_describe ()
+{
+ case "$cur" in
+ --*)
+ __gitcomp "
+ --all --tags --contains --abbrev= --candidates=
+ --exact-match --debug --long --match --always
+ "
+ return
+ esac
+ __gitcomp_nl "$(__git_refs)"
+}
+
+__git_diff_algorithms="myers minimal patience histogram"
+
+__git_diff_common_options="--stat --numstat --shortstat --summary
+ --patch-with-stat --name-only --name-status --color
+ --no-color --color-words --no-renames --check
+ --full-index --binary --abbrev --diff-filter=
+ --find-copies-harder
+ --text --ignore-space-at-eol --ignore-space-change
+ --ignore-all-space --exit-code --quiet --ext-diff
+ --no-ext-diff
+ --no-prefix --src-prefix= --dst-prefix=
+ --inter-hunk-context=
+ --patience --histogram --minimal
+ --raw
+ --dirstat --dirstat= --dirstat-by-file
+ --dirstat-by-file= --cumulative
+ --diff-algorithm=
+"
+
+_git_diff ()
+{
+ __git_has_doubledash && return
+
+ case "$cur" in
+ --diff-algorithm=*)
+ __gitcomp "$__git_diff_algorithms" "" "${cur##--diff-algorithm=}"
+ return
+ ;;
+ --*)
+ __gitcomp "--cached --staged --pickaxe-all --pickaxe-regex
+ --base --ours --theirs --no-index
+ $__git_diff_common_options
+ "
+ return
+ ;;
+ esac
+ __git_complete_revlist_file
+}
+
+__git_mergetools_common="diffuse ecmerge emerge kdiff3 meld opendiff
+ tkdiff vimdiff gvimdiff xxdiff araxis p4merge bc3 codecompare
+"
+
+_git_difftool ()
+{
+ __git_has_doubledash && return
+
+ case "$cur" in
+ --tool=*)
+ __gitcomp "$__git_mergetools_common kompare" "" "${cur##--tool=}"
+ return
+ ;;
+ --*)
+ __gitcomp "--cached --staged --pickaxe-all --pickaxe-regex
+ --base --ours --theirs
+ --no-renames --diff-filter= --find-copies-harder
+ --relative --ignore-submodules
+ --tool="
+ return
+ ;;
+ esac
+ __git_complete_revlist_file
+}
+
+__git_fetch_options="
+ --quiet --verbose --append --upload-pack --force --keep --depth=
+ --tags --no-tags --all --prune --dry-run
+"
+
+_git_fetch ()
+{
+ case "$cur" in
+ --*)
+ __gitcomp "$__git_fetch_options"
+ return
+ ;;
+ esac
+ __git_complete_remote_or_refspec
+}
+
+__git_format_patch_options="
+ --stdout --attach --no-attach --thread --thread= --no-thread
+ --numbered --start-number --numbered-files --keep-subject --signoff
+ --signature --no-signature --in-reply-to= --cc= --full-index --binary
+ --not --all --cover-letter --no-prefix --src-prefix= --dst-prefix=
+ --inline --suffix= --ignore-if-in-upstream --subject-prefix=
+ --output-directory --reroll-count --to= --quiet --notes
+"
+
+_git_format_patch ()
+{
+ case "$cur" in
+ --thread=*)
+ __gitcomp "
+ deep shallow
+ " "" "${cur##--thread=}"
+ return
+ ;;
+ --*)
+ __gitcomp "$__git_format_patch_options"
+ return
+ ;;
+ esac
+ __git_complete_revlist
+}
+
+_git_fsck ()
+{
+ case "$cur" in
+ --*)
+ __gitcomp "
+ --tags --root --unreachable --cache --no-reflogs --full
+ --strict --verbose --lost-found
+ "
+ return
+ ;;
+ esac
+}
+
+_git_gc ()
+{
+ case "$cur" in
+ --*)
+ __gitcomp "--prune --aggressive"
+ return
+ ;;
+ esac
+}
+
+_git_gitk ()
+{
+ _gitk
+}
+
+__git_match_ctag() {
+ awk "/^${1////\\/}/ { print \$1 }" "$2"
+}
+
+_git_grep ()
+{
+ __git_has_doubledash && return
+
+ case "$cur" in
+ --*)
+ __gitcomp "
+ --cached
+ --text --ignore-case --word-regexp --invert-match
+ --full-name --line-number
+ --extended-regexp --basic-regexp --fixed-strings
+ --perl-regexp
+ --files-with-matches --name-only
+ --files-without-match
+ --max-depth
+ --count
+ --and --or --not --all-match
+ "
+ return
+ ;;
+ esac
+
+ case "$cword,$prev" in
+ 2,*|*,-*)
+ if test -r tags; then
+ __gitcomp_nl "$(__git_match_ctag "$cur" tags)"
+ return
+ fi
+ ;;
+ esac
+
+ __gitcomp_nl "$(__git_refs)"
+}
+
+_git_help ()
+{
+ case "$cur" in
+ --*)
+ __gitcomp "--all --info --man --web"
+ return
+ ;;
+ esac
+ __git_compute_all_commands
+ __gitcomp "$__git_all_commands $(__git_aliases)
+ attributes cli core-tutorial cvs-migration
+ diffcore gitk glossary hooks ignore modules
+ namespaces repository-layout tutorial tutorial-2
+ workflows
+ "
+}
+
+_git_init ()
+{
+ case "$cur" in
+ --shared=*)
+ __gitcomp "
+ false true umask group all world everybody
+ " "" "${cur##--shared=}"
+ return
+ ;;
+ --*)
+ __gitcomp "--quiet --bare --template= --shared --shared="
+ return
+ ;;
+ esac
+}
+
+_git_ls_files ()
+{
+ case "$cur" in
+ --*)
+ __gitcomp "--cached --deleted --modified --others --ignored
+ --stage --directory --no-empty-directory --unmerged
+ --killed --exclude= --exclude-from=
+ --exclude-per-directory= --exclude-standard
+ --error-unmatch --with-tree= --full-name
+ --abbrev --ignored --exclude-per-directory
+ "
+ return
+ ;;
+ esac
+
+ # XXX ignore options like --modified and always suggest all cached
+ # files.
+ __git_complete_index_file "--cached"
+}
+
+_git_ls_remote ()
+{
+ __gitcomp_nl "$(__git_remotes)"
+}
+
+_git_ls_tree ()
+{
+ __git_complete_file
+}
+
+# Options that go well for log, shortlog and gitk
+__git_log_common_options="
+ --not --all
+ --branches --tags --remotes
+ --first-parent --merges --no-merges
+ --max-count=
+ --max-age= --since= --after=
+ --min-age= --until= --before=
+ --min-parents= --max-parents=
+ --no-min-parents --no-max-parents
+"
+# Options that go well for log and gitk (not shortlog)
+__git_log_gitk_options="
+ --dense --sparse --full-history
+ --simplify-merges --simplify-by-decoration
+ --left-right --notes --no-notes
+"
+# Options that go well for log and shortlog (not gitk)
+__git_log_shortlog_options="
+ --author= --committer= --grep=
+ --all-match
+"
+
+__git_log_pretty_formats="oneline short medium full fuller email raw format:"
+__git_log_date_formats="relative iso8601 rfc2822 short local default raw"
+
+_git_log ()
+{
+ __git_has_doubledash && return
+
+ local g="$(git rev-parse --git-dir 2>/dev/null)"
+ local merge=""
+ if [ -f "$g/MERGE_HEAD" ]; then
+ merge="--merge"
+ fi
+ case "$cur" in
+ --pretty=*|--format=*)
+ __gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases)
+ " "" "${cur#*=}"
+ return
+ ;;
+ --date=*)
+ __gitcomp "$__git_log_date_formats" "" "${cur##--date=}"
+ return
+ ;;
+ --decorate=*)
+ __gitcomp "long short" "" "${cur##--decorate=}"
+ return
+ ;;
+ --*)
+ __gitcomp "
+ $__git_log_common_options
+ $__git_log_shortlog_options
+ $__git_log_gitk_options
+ --root --topo-order --date-order --reverse
+ --follow --full-diff
+ --abbrev-commit --abbrev=
+ --relative-date --date=
+ --pretty= --format= --oneline
+ --cherry-pick
+ --graph
+ --decorate --decorate=
+ --walk-reflogs
+ --parents --children
+ $merge
+ $__git_diff_common_options
+ --pickaxe-all --pickaxe-regex
+ "
+ return
+ ;;
+ esac
+ __git_complete_revlist
+}
+
+__git_merge_options="
+ --no-commit --no-stat --log --no-log --squash --strategy
+ --commit --stat --no-squash --ff --no-ff --ff-only --edit --no-edit
+"
+
+_git_merge ()
+{
+ __git_complete_strategy && return
+
+ case "$cur" in
+ --*)
+ __gitcomp "$__git_merge_options"
+ return
+ esac
+ __gitcomp_nl "$(__git_refs)"
+}
+
+_git_mergetool ()
+{
+ case "$cur" in
+ --tool=*)
+ __gitcomp "$__git_mergetools_common tortoisemerge" "" "${cur##--tool=}"
+ return
+ ;;
+ --*)
+ __gitcomp "--tool="
+ return
+ ;;
+ esac
+}
+
+_git_merge_base ()
+{
+ __gitcomp_nl "$(__git_refs)"
+}
+
+_git_mv ()
+{
+ case "$cur" in
+ --*)
+ __gitcomp "--dry-run"
+ return
+ ;;
+ esac
+
+ if [ $(__git_count_arguments "mv") -gt 0 ]; then
+ # We need to show both cached and untracked files (including
+ # empty directories) since this may not be the last argument.
+ __git_complete_index_file "--cached --others --directory"
+ else
+ __git_complete_index_file "--cached"
+ fi
+}
+
+_git_name_rev ()
+{
+ __gitcomp "--tags --all --stdin"
+}
+
+_git_notes ()
+{
+ local subcommands='add append copy edit list prune remove show'
+ local subcommand="$(__git_find_on_cmdline "$subcommands")"
+
+ case "$subcommand,$cur" in
+ ,--*)
+ __gitcomp '--ref'
+ ;;
+ ,*)
+ case "$prev" in
+ --ref)
+ __gitcomp_nl "$(__git_refs)"
+ ;;
+ *)
+ __gitcomp "$subcommands --ref"
+ ;;
+ esac
+ ;;
+ add,--reuse-message=*|append,--reuse-message=*|\
+ add,--reedit-message=*|append,--reedit-message=*)
+ __gitcomp_nl "$(__git_refs)" "" "${cur#*=}"
+ ;;
+ add,--*|append,--*)
+ __gitcomp '--file= --message= --reedit-message=
+ --reuse-message='
+ ;;
+ copy,--*)
+ __gitcomp '--stdin'
+ ;;
+ prune,--*)
+ __gitcomp '--dry-run --verbose'
+ ;;
+ prune,*)
+ ;;
+ *)
+ case "$prev" in
+ -m|-F)
+ ;;
+ *)
+ __gitcomp_nl "$(__git_refs)"
+ ;;
+ esac
+ ;;
+ esac
+}
+
+_git_pull ()
+{
+ __git_complete_strategy && return
+
+ case "$cur" in
+ --*)
+ __gitcomp "
+ --rebase --no-rebase
+ $__git_merge_options
+ $__git_fetch_options
+ "
+ return
+ ;;
+ esac
+ __git_complete_remote_or_refspec
+}
+
+_git_push ()
+{
+ case "$prev" in
+ --repo)
+ __gitcomp_nl "$(__git_remotes)"
+ return
+ esac
+ case "$cur" in
+ --repo=*)
+ __gitcomp_nl "$(__git_remotes)" "" "${cur##--repo=}"
+ return
+ ;;
+ --*)
+ __gitcomp "
+ --all --mirror --tags --dry-run --force --verbose
+ --receive-pack= --repo= --set-upstream
+ "
+ return
+ ;;
+ esac
+ __git_complete_remote_or_refspec
+}
+
+_git_rebase ()
+{
+ local dir="$(__gitdir)"
+ if [ -d "$dir"/rebase-apply ] || [ -d "$dir"/rebase-merge ]; then
+ __gitcomp "--continue --skip --abort"
+ return
+ fi
+ __git_complete_strategy && return
+ case "$cur" in
+ --whitespace=*)
+ __gitcomp "$__git_whitespacelist" "" "${cur##--whitespace=}"
+ return
+ ;;
+ --*)
+ __gitcomp "
+ --onto --merge --strategy --interactive
+ --preserve-merges --stat --no-stat
+ --committer-date-is-author-date --ignore-date
+ --ignore-whitespace --whitespace=
+ --autosquash
+ "
+
+ return
+ esac
+ __gitcomp_nl "$(__git_refs)"
+}
+
+_git_reflog ()
+{
+ local subcommands="show delete expire"
+ local subcommand="$(__git_find_on_cmdline "$subcommands")"
+
+ if [ -z "$subcommand" ]; then
+ __gitcomp "$subcommands"
+ else
+ __gitcomp_nl "$(__git_refs)"
+ fi
+}
+
+__git_send_email_confirm_options="always never auto cc compose"
+__git_send_email_suppresscc_options="author self cc bodycc sob cccmd body all"
+
+_git_send_email ()
+{
+ case "$cur" in
+ --confirm=*)
+ __gitcomp "
+ $__git_send_email_confirm_options
+ " "" "${cur##--confirm=}"
+ return
+ ;;
+ --suppress-cc=*)
+ __gitcomp "
+ $__git_send_email_suppresscc_options
+ " "" "${cur##--suppress-cc=}"
+
+ return
+ ;;
+ --smtp-encryption=*)
+ __gitcomp "ssl tls" "" "${cur##--smtp-encryption=}"
+ return
+ ;;
+ --thread=*)
+ __gitcomp "
+ deep shallow
+ " "" "${cur##--thread=}"
+ return
+ ;;
+ --*)
+ __gitcomp "--annotate --bcc --cc --cc-cmd --chain-reply-to
+ --compose --confirm= --dry-run --envelope-sender
+ --from --identity
+ --in-reply-to --no-chain-reply-to --no-signed-off-by-cc
+ --no-suppress-from --no-thread --quiet
+ --signed-off-by-cc --smtp-pass --smtp-server
+ --smtp-server-port --smtp-encryption= --smtp-user
+ --subject --suppress-cc= --suppress-from --thread --to
+ --validate --no-validate
+ $__git_format_patch_options"
+ return
+ ;;
+ esac
+ __git_complete_revlist
+}
+
+_git_stage ()
+{
+ _git_add
+}
+
+__git_config_get_set_variables ()
+{
+ local prevword word config_file= c=$cword
+ while [ $c -gt 1 ]; do
+ word="${words[c]}"
+ case "$word" in
+ --system|--global|--local|--file=*)
+ config_file="$word"
+ break
+ ;;
+ -f|--file)
+ config_file="$word $prevword"
+ break
+ ;;
+ esac
+ prevword=$word
+ c=$((--c))
+ done
+
+ git --git-dir="$(__gitdir)" config $config_file --list 2>/dev/null |
+ while read -r line
+ do
+ case "$line" in
+ *.*=*)
+ echo "${line/=*/}"
+ ;;
+ esac
+ done
+}
+
+_git_config ()
+{
+ case "$prev" in
+ branch.*.remote|branch.*.pushremote)
+ __gitcomp_nl "$(__git_remotes)"
+ return
+ ;;
+ branch.*.merge)
+ __gitcomp_nl "$(__git_refs)"
+ return
+ ;;
+ branch.*.rebase)
+ __gitcomp "false true"
+ return
+ ;;
+ remote.pushdefault)
+ __gitcomp_nl "$(__git_remotes)"
+ return
+ ;;
+ remote.*.fetch)
+ local remote="${prev#remote.}"
+ remote="${remote%.fetch}"
+ if [ -z "$cur" ]; then
+ __gitcomp_nl "refs/heads/" "" "" ""
+ return
+ fi
+ __gitcomp_nl "$(__git_refs_remotes "$remote")"
+ return
+ ;;
+ remote.*.push)
+ local remote="${prev#remote.}"
+ remote="${remote%.push}"
+ __gitcomp_nl "$(git --git-dir="$(__gitdir)" \
+ for-each-ref --format='%(refname):%(refname)' \
+ refs/heads)"
+ return
+ ;;
+ pull.twohead|pull.octopus)
+ __git_compute_merge_strategies
+ __gitcomp "$__git_merge_strategies"
+ return
+ ;;
+ color.branch|color.diff|color.interactive|\
+ color.showbranch|color.status|color.ui)
+ __gitcomp "always never auto"
+ return
+ ;;
+ color.pager)
+ __gitcomp "false true"
+ return
+ ;;
+ color.*.*)
+ __gitcomp "
+ normal black red green yellow blue magenta cyan white
+ bold dim ul blink reverse
+ "
+ return
+ ;;
+ diff.submodule)
+ __gitcomp "log short"
+ return
+ ;;
+ help.format)
+ __gitcomp "man info web html"
+ return
+ ;;
+ log.date)
+ __gitcomp "$__git_log_date_formats"
+ return
+ ;;
+ sendemail.aliasesfiletype)
+ __gitcomp "mutt mailrc pine elm gnus"
+ return
+ ;;
+ sendemail.confirm)
+ __gitcomp "$__git_send_email_confirm_options"
+ return
+ ;;
+ sendemail.suppresscc)
+ __gitcomp "$__git_send_email_suppresscc_options"
+ return
+ ;;
+ --get|--get-all|--unset|--unset-all)
+ __gitcomp_nl "$(__git_config_get_set_variables)"
+ return
+ ;;
+ *.*)
+ return
+ ;;
+ esac
+ case "$cur" in
+ --*)
+ __gitcomp "
+ --system --global --local --file=
+ --list --replace-all
+ --get --get-all --get-regexp
+ --add --unset --unset-all
+ --remove-section --rename-section
+ "
+ return
+ ;;
+ branch.*.*)
+ local pfx="${cur%.*}." cur_="${cur##*.}"
+ __gitcomp "remote pushremote merge mergeoptions rebase" "$pfx" "$cur_"
+ return
+ ;;
+ branch.*)
+ local pfx="${cur%.*}." cur_="${cur#*.}"
+ __gitcomp_nl "$(__git_heads)" "$pfx" "$cur_" "."
+ return
+ ;;
+ guitool.*.*)
+ local pfx="${cur%.*}." cur_="${cur##*.}"
+ __gitcomp "
+ argprompt cmd confirm needsfile noconsole norescan
+ prompt revprompt revunmerged title
+ " "$pfx" "$cur_"
+ return
+ ;;
+ difftool.*.*)
+ local pfx="${cur%.*}." cur_="${cur##*.}"
+ __gitcomp "cmd path" "$pfx" "$cur_"
+ return
+ ;;
+ man.*.*)
+ local pfx="${cur%.*}." cur_="${cur##*.}"
+ __gitcomp "cmd path" "$pfx" "$cur_"
+ return
+ ;;
+ mergetool.*.*)
+ local pfx="${cur%.*}." cur_="${cur##*.}"
+ __gitcomp "cmd path trustExitCode" "$pfx" "$cur_"
+ return
+ ;;
+ pager.*)
+ local pfx="${cur%.*}." cur_="${cur#*.}"
+ __git_compute_all_commands
+ __gitcomp_nl "$__git_all_commands" "$pfx" "$cur_"
+ return
+ ;;
+ remote.*.*)
+ local pfx="${cur%.*}." cur_="${cur##*.}"
+ __gitcomp "
+ url proxy fetch push mirror skipDefaultUpdate
+ receivepack uploadpack tagopt pushurl
+ " "$pfx" "$cur_"
+ return
+ ;;
+ remote.*)
+ local pfx="${cur%.*}." cur_="${cur#*.}"
+ __gitcomp_nl "$(__git_remotes)" "$pfx" "$cur_" "."
+ return
+ ;;
+ url.*.*)
+ local pfx="${cur%.*}." cur_="${cur##*.}"
+ __gitcomp "insteadOf pushInsteadOf" "$pfx" "$cur_"
+ return
+ ;;
+ esac
+ __gitcomp "
+ add.ignoreErrors
+ advice.commitBeforeMerge
+ advice.detachedHead
+ advice.implicitIdentity
+ advice.pushNonFastForward
+ advice.resolveConflict
+ advice.statusHints
+ alias.
+ am.keepcr
+ apply.ignorewhitespace
+ apply.whitespace
+ branch.autosetupmerge
+ branch.autosetuprebase
+ browser.
+ clean.requireForce
+ color.branch
+ color.branch.current
+ color.branch.local
+ color.branch.plain
+ color.branch.remote
+ color.decorate.HEAD
+ color.decorate.branch
+ color.decorate.remoteBranch
+ color.decorate.stash
+ color.decorate.tag
+ color.diff
+ color.diff.commit
+ color.diff.frag
+ color.diff.func
+ color.diff.meta
+ color.diff.new
+ color.diff.old
+ color.diff.plain
+ color.diff.whitespace
+ color.grep
+ color.grep.context
+ color.grep.filename
+ color.grep.function
+ color.grep.linenumber
+ color.grep.match
+ color.grep.selected
+ color.grep.separator
+ color.interactive
+ color.interactive.error
+ color.interactive.header
+ color.interactive.help
+ color.interactive.prompt
+ color.pager
+ color.showbranch
+ color.status
+ color.status.added
+ color.status.changed
+ color.status.header
+ color.status.nobranch
+ color.status.untracked
+ color.status.updated
+ color.ui
+ commit.status
+ commit.template
+ core.abbrev
+ core.askpass
+ core.attributesfile
+ core.autocrlf
+ core.bare
+ core.bigFileThreshold
+ core.compression
+ core.createObject
+ core.deltaBaseCacheLimit
+ core.editor
+ core.eol
+ core.excludesfile
+ core.fileMode
+ core.fsyncobjectfiles
+ core.gitProxy
+ core.ignoreCygwinFSTricks
+ core.ignoreStat
+ core.ignorecase
+ core.logAllRefUpdates
+ core.loosecompression
+ core.notesRef
+ core.packedGitLimit
+ core.packedGitWindowSize
+ core.pager
+ core.preferSymlinkRefs
+ core.preloadindex
+ core.quotepath
+ core.repositoryFormatVersion
+ core.safecrlf
+ core.sharedRepository
+ core.sparseCheckout
+ core.symlinks
+ core.trustctime
+ core.warnAmbiguousRefs
+ core.whitespace
+ core.worktree
+ diff.autorefreshindex
+ diff.external
+ diff.ignoreSubmodules
+ diff.mnemonicprefix
+ diff.noprefix
+ diff.renameLimit
+ diff.renames
+ diff.statGraphWidth
+ diff.submodule
+ diff.suppressBlankEmpty
+ diff.tool
+ diff.wordRegex
+ diff.algorithm
+ difftool.
+ difftool.prompt
+ fetch.recurseSubmodules
+ fetch.unpackLimit
+ format.attach
+ format.cc
+ format.headers
+ format.numbered
+ format.pretty
+ format.signature
+ format.signoff
+ format.subjectprefix
+ format.suffix
+ format.thread
+ format.to
+ gc.
+ gc.aggressiveWindow
+ gc.auto
+ gc.autopacklimit
+ gc.packrefs
+ gc.pruneexpire
+ gc.reflogexpire
+ gc.reflogexpireunreachable
+ gc.rerereresolved
+ gc.rerereunresolved
+ gitcvs.allbinary
+ gitcvs.commitmsgannotation
+ gitcvs.dbTableNamePrefix
+ gitcvs.dbdriver
+ gitcvs.dbname
+ gitcvs.dbpass
+ gitcvs.dbuser
+ gitcvs.enabled
+ gitcvs.logfile
+ gitcvs.usecrlfattr
+ guitool.
+ gui.blamehistoryctx
+ gui.commitmsgwidth
+ gui.copyblamethreshold
+ gui.diffcontext
+ gui.encoding
+ gui.fastcopyblame
+ gui.matchtrackingbranch
+ gui.newbranchtemplate
+ gui.pruneduringfetch
+ gui.spellingdictionary
+ gui.trustmtime
+ help.autocorrect
+ help.browser
+ help.format
+ http.lowSpeedLimit
+ http.lowSpeedTime
+ http.maxRequests
+ http.minSessions
+ http.noEPSV
+ http.postBuffer
+ http.proxy
+ http.sslCAInfo
+ http.sslCAPath
+ http.sslCert
+ http.sslCertPasswordProtected
+ http.sslKey
+ http.sslVerify
+ http.useragent
+ i18n.commitEncoding
+ i18n.logOutputEncoding
+ imap.authMethod
+ imap.folder
+ imap.host
+ imap.pass
+ imap.port
+ imap.preformattedHTML
+ imap.sslverify
+ imap.tunnel
+ imap.user
+ init.templatedir
+ instaweb.browser
+ instaweb.httpd
+ instaweb.local
+ instaweb.modulepath
+ instaweb.port
+ interactive.singlekey
+ log.date
+ log.decorate
+ log.showroot
+ mailmap.file
+ man.
+ man.viewer
+ merge.
+ merge.conflictstyle
+ merge.log
+ merge.renameLimit
+ merge.renormalize
+ merge.stat
+ merge.tool
+ merge.verbosity
+ mergetool.
+ mergetool.keepBackup
+ mergetool.keepTemporaries
+ mergetool.prompt
+ notes.displayRef
+ notes.rewrite.
+ notes.rewrite.amend
+ notes.rewrite.rebase
+ notes.rewriteMode
+ notes.rewriteRef
+ pack.compression
+ pack.deltaCacheLimit
+ pack.deltaCacheSize
+ pack.depth
+ pack.indexVersion
+ pack.packSizeLimit
+ pack.threads
+ pack.window
+ pack.windowMemory
+ pager.
+ pretty.
+ pull.octopus
+ pull.twohead
+ push.default
+ rebase.autosquash
+ rebase.stat
+ receive.autogc
+ receive.denyCurrentBranch
+ receive.denyDeleteCurrent
+ receive.denyDeletes
+ receive.denyNonFastForwards
+ receive.fsckObjects
+ receive.unpackLimit
+ receive.updateserverinfo
+ remote.pushdefault
+ remotes.
+ repack.usedeltabaseoffset
+ rerere.autoupdate
+ rerere.enabled
+ sendemail.
+ sendemail.aliasesfile
+ sendemail.aliasfiletype
+ sendemail.bcc
+ sendemail.cc
+ sendemail.cccmd
+ sendemail.chainreplyto
+ sendemail.confirm
+ sendemail.envelopesender
+ sendemail.from
+ sendemail.identity
+ sendemail.multiedit
+ sendemail.signedoffbycc
+ sendemail.smtpdomain
+ sendemail.smtpencryption
+ sendemail.smtppass
+ sendemail.smtpserver
+ sendemail.smtpserveroption
+ sendemail.smtpserverport
+ sendemail.smtpuser
+ sendemail.suppresscc
+ sendemail.suppressfrom
+ sendemail.thread
+ sendemail.to
+ sendemail.validate
+ showbranch.default
+ status.relativePaths
+ status.showUntrackedFiles
+ status.submodulesummary
+ submodule.
+ tar.umask
+ transfer.unpackLimit
+ url.
+ user.email
+ user.name
+ user.signingkey
+ web.browser
+ branch. remote.
+ "
+}
+
+_git_remote ()
+{
+ local subcommands="add rename remove set-head set-branches set-url show prune update"
+ local subcommand="$(__git_find_on_cmdline "$subcommands")"
+ if [ -z "$subcommand" ]; then
+ __gitcomp "$subcommands"
+ return
+ fi
+
+ case "$subcommand" in
+ rename|remove|set-url|show|prune)
+ __gitcomp_nl "$(__git_remotes)"
+ ;;
+ set-head|set-branches)
+ __git_complete_remote_or_refspec
+ ;;
+ update)
+ local i c='' IFS=$'\n'
+ for i in $(git --git-dir="$(__gitdir)" config --get-regexp "remotes\..*" 2>/dev/null); do
+ i="${i#remotes.}"
+ c="$c ${i/ */}"
+ done
+ __gitcomp "$c"
+ ;;
+ *)
+ ;;
+ esac
+}
+
+_git_replace ()
+{
+ __gitcomp_nl "$(__git_refs)"
+}
+
+_git_reset ()
+{
+ __git_has_doubledash && return
+
+ case "$cur" in
+ --*)
+ __gitcomp "--merge --mixed --hard --soft --patch"
+ return
+ ;;
+ esac
+ __gitcomp_nl "$(__git_refs)"
+}
+
+_git_revert ()
+{
+ case "$cur" in
+ --*)
+ __gitcomp "--edit --mainline --no-edit --no-commit --signoff"
+ return
+ ;;
+ esac
+ __gitcomp_nl "$(__git_refs)"
+}
+
+_git_rm ()
+{
+ case "$cur" in
+ --*)
+ __gitcomp "--cached --dry-run --ignore-unmatch --quiet"
+ return
+ ;;
+ esac
+
+ __git_complete_index_file "--cached"
+}
+
+_git_shortlog ()
+{
+ __git_has_doubledash && return
+
+ case "$cur" in
+ --*)
+ __gitcomp "
+ $__git_log_common_options
+ $__git_log_shortlog_options
+ --numbered --summary
+ "
+ return
+ ;;
+ esac
+ __git_complete_revlist
+}
+
+_git_show ()
+{
+ __git_has_doubledash && return
+
+ case "$cur" in
+ --pretty=*|--format=*)
+ __gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases)
+ " "" "${cur#*=}"
+ return
+ ;;
+ --diff-algorithm=*)
+ __gitcomp "$__git_diff_algorithms" "" "${cur##--diff-algorithm=}"
+ return
+ ;;
+ --*)
+ __gitcomp "--pretty= --format= --abbrev-commit --oneline
+ $__git_diff_common_options
+ "
+ return
+ ;;
+ esac
+ __git_complete_revlist_file
+}
+
+_git_show_branch ()
+{
+ case "$cur" in
+ --*)
+ __gitcomp "
+ --all --remotes --topo-order --current --more=
+ --list --independent --merge-base --no-name
+ --color --no-color
+ --sha1-name --sparse --topics --reflog
+ "
+ return
+ ;;
+ esac
+ __git_complete_revlist
+}
+
+_git_stash ()
+{
+ local save_opts='--keep-index --no-keep-index --quiet --patch'
+ local subcommands='save list show apply clear drop pop create branch'
+ local subcommand="$(__git_find_on_cmdline "$subcommands")"
+ if [ -z "$subcommand" ]; then
+ case "$cur" in
+ --*)
+ __gitcomp "$save_opts"
+ ;;
+ *)
+ if [ -z "$(__git_find_on_cmdline "$save_opts")" ]; then
+ __gitcomp "$subcommands"
+ fi
+ ;;
+ esac
+ else
+ case "$subcommand,$cur" in
+ save,--*)
+ __gitcomp "$save_opts"
+ ;;
+ apply,--*|pop,--*)
+ __gitcomp "--index --quiet"
+ ;;
+ show,--*|drop,--*|branch,--*)
+ ;;
+ show,*|apply,*|drop,*|pop,*|branch,*)
+ __gitcomp_nl "$(git --git-dir="$(__gitdir)" stash list \
+ | sed -n -e 's/:.*//p')"
+ ;;
+ *)
+ ;;
+ esac
+ fi
+}
+
+_git_submodule ()
+{
+ __git_has_doubledash && return
+
+ local subcommands="add status init deinit update summary foreach sync"
+ if [ -z "$(__git_find_on_cmdline "$subcommands")" ]; then
+ case "$cur" in
+ --*)
+ __gitcomp "--quiet --cached"
+ ;;
+ *)
+ __gitcomp "$subcommands"
+ ;;
+ esac
+ return
+ fi
+}
+
+_git_svn ()
+{
+ local subcommands="
+ init fetch clone rebase dcommit log find-rev
+ set-tree commit-diff info create-ignore propget
+ proplist show-ignore show-externals branch tag blame
+ migrate mkdirs reset gc
+ "
+ local subcommand="$(__git_find_on_cmdline "$subcommands")"
+ if [ -z "$subcommand" ]; then
+ __gitcomp "$subcommands"
+ else
+ local remote_opts="--username= --config-dir= --no-auth-cache"
+ local fc_opts="
+ --follow-parent --authors-file= --repack=
+ --no-metadata --use-svm-props --use-svnsync-props
+ --log-window-size= --no-checkout --quiet
+ --repack-flags --use-log-author --localtime
+ --ignore-paths= --include-paths= $remote_opts
+ "
+ local init_opts="
+ --template= --shared= --trunk= --tags=
+ --branches= --stdlayout --minimize-url
+ --no-metadata --use-svm-props --use-svnsync-props
+ --rewrite-root= --prefix= --use-log-author
+ --add-author-from $remote_opts
+ "
+ local cmt_opts="
+ --edit --rmdir --find-copies-harder --copy-similarity=
+ "
+
+ case "$subcommand,$cur" in
+ fetch,--*)
+ __gitcomp "--revision= --fetch-all $fc_opts"
+ ;;
+ clone,--*)
+ __gitcomp "--revision= $fc_opts $init_opts"
+ ;;
+ init,--*)
+ __gitcomp "$init_opts"
+ ;;
+ dcommit,--*)
+ __gitcomp "
+ --merge --strategy= --verbose --dry-run
+ --fetch-all --no-rebase --commit-url
+ --revision --interactive $cmt_opts $fc_opts
+ "
+ ;;
+ set-tree,--*)
+ __gitcomp "--stdin $cmt_opts $fc_opts"
+ ;;
+ create-ignore,--*|propget,--*|proplist,--*|show-ignore,--*|\
+ show-externals,--*|mkdirs,--*)
+ __gitcomp "--revision="
+ ;;
+ log,--*)
+ __gitcomp "
+ --limit= --revision= --verbose --incremental
+ --oneline --show-commit --non-recursive
+ --authors-file= --color
+ "
+ ;;
+ rebase,--*)
+ __gitcomp "
+ --merge --verbose --strategy= --local
+ --fetch-all --dry-run $fc_opts
+ "
+ ;;
+ commit-diff,--*)
+ __gitcomp "--message= --file= --revision= $cmt_opts"
+ ;;
+ info,--*)
+ __gitcomp "--url"
+ ;;
+ branch,--*)
+ __gitcomp "--dry-run --message --tag"
+ ;;
+ tag,--*)
+ __gitcomp "--dry-run --message"
+ ;;
+ blame,--*)
+ __gitcomp "--git-format"
+ ;;
+ migrate,--*)
+ __gitcomp "
+ --config-dir= --ignore-paths= --minimize
+ --no-auth-cache --username=
+ "
+ ;;
+ reset,--*)
+ __gitcomp "--revision= --parent"
+ ;;
+ *)
+ ;;
+ esac
+ fi
+}
+
+_git_tag ()
+{
+ local i c=1 f=0
+ while [ $c -lt $cword ]; do
+ i="${words[c]}"
+ case "$i" in
+ -d|-v)
+ __gitcomp_nl "$(__git_tags)"
+ return
+ ;;
+ -f)
+ f=1
+ ;;
+ esac
+ ((c++))
+ done
+
+ case "$prev" in
+ -m|-F)
+ ;;
+ -*|tag)
+ if [ $f = 1 ]; then
+ __gitcomp_nl "$(__git_tags)"
+ fi
+ ;;
+ *)
+ __gitcomp_nl "$(__git_refs)"
+ ;;
+ esac
+}
+
+_git_whatchanged ()
+{
+ _git_log
+}
+
+__git_main ()
+{
+ local i c=1 command __git_dir
+
+ while [ $c -lt $cword ]; do
+ i="${words[c]}"
+ case "$i" in
+ --git-dir=*) __git_dir="${i#--git-dir=}" ;;
+ --bare) __git_dir="." ;;
+ --help) command="help"; break ;;
+ -c) c=$((++c)) ;;
+ -*) ;;
+ *) command="$i"; break ;;
+ esac
+ ((c++))
+ done
+
+ if [ -z "$command" ]; then
+ case "$cur" in
+ --*) __gitcomp "
+ --paginate
+ --no-pager
+ --git-dir=
+ --bare
+ --version
+ --exec-path
+ --exec-path=
+ --html-path
+ --info-path
+ --work-tree=
+ --namespace=
+ --no-replace-objects
+ --help
+ "
+ ;;
+ *) __git_compute_porcelain_commands
+ __gitcomp "$__git_porcelain_commands $(__git_aliases)" ;;
+ esac
+ return
+ fi
+
+ local completion_func="_git_${command//-/_}"
+ declare -f $completion_func >/dev/null && $completion_func && return
+
+ local expansion=$(__git_aliased_command "$command")
+ if [ -n "$expansion" ]; then
+ completion_func="_git_${expansion//-/_}"
+ declare -f $completion_func >/dev/null && $completion_func
+ fi
+}
+
+__gitk_main ()
+{
+ __git_has_doubledash && return
+
+ local g="$(__gitdir)"
+ local merge=""
+ if [ -f "$g/MERGE_HEAD" ]; then
+ merge="--merge"
+ fi
+ case "$cur" in
+ --*)
+ __gitcomp "
+ $__git_log_common_options
+ $__git_log_gitk_options
+ $merge
+ "
+ return
+ ;;
+ esac
+ __git_complete_revlist
+}
+
+if [[ -n ${ZSH_VERSION-} ]]; then
+ echo "WARNING: this script is deprecated, please see git-completion.zsh" 1>&2
+
+ autoload -U +X compinit && compinit
+
+ __gitcomp ()
+ {
+ emulate -L zsh
+
+ local cur_="${3-$cur}"
+
+ case "$cur_" in
+ --*=)
+ ;;
+ *)
+ local c IFS=$' \t\n'
+ local -a array
+ for c in ${=1}; do
+ c="$c${4-}"
+ case $c in
+ --*=*|*.) ;;
+ *) c="$c " ;;
+ esac
+ array+=("$c")
+ done
+ compset -P '*[=:]'
+ compadd -Q -S '' -p "${2-}" -a -- array && _ret=0
+ ;;
+ esac
+ }
+
+ __gitcomp_nl ()
+ {
+ emulate -L zsh
+
+ local IFS=$'\n'
+ compset -P '*[=:]'
+ compadd -Q -S "${4- }" -p "${2-}" -- ${=1} && _ret=0
+ }
+
+ __gitcomp_file ()
+ {
+ emulate -L zsh
+
+ local IFS=$'\n'
+ compset -P '*[=:]'
+ compadd -Q -p "${2-}" -f -- ${=1} && _ret=0
+ }
+
+ _git ()
+ {
+ local _ret=1 cur cword prev
+ cur=${words[CURRENT]}
+ prev=${words[CURRENT-1]}
+ let cword=CURRENT-1
+ emulate ksh -c __${service}_main
+ let _ret && _default && _ret=0
+ return _ret
+ }
+
+ compdef _git git gitk
+ return
+fi
+
+__git_func_wrap ()
+{
+ local cur words cword prev
+ _get_comp_words_by_ref -n =: cur words cword prev
+ $1
+}
+
+# Setup completion for certain functions defined above by setting common
+# variables and workarounds.
+# This is NOT a public function; use at your own risk.
+__git_complete ()
+{
+ local wrapper="__git_wrap${2}"
+ eval "$wrapper () { __git_func_wrap $2 ; }"
+ complete -o bashdefault -o default -o nospace -F $wrapper $1 2>/dev/null \
+ || complete -o default -o nospace -F $wrapper $1
+}
+
+# wrapper for backwards compatibility
+_git ()
+{
+ __git_wrap__git_main
+}
+
+# wrapper for backwards compatibility
+_gitk ()
+{
+ __git_wrap__gitk_main
+}
+
+__git_complete git __git_main
+__git_complete gitk __gitk_main
+
+# The following are necessary only for Cygwin, and only are needed
+# when the user has tab-completed the executable name and consequently
+# included the '.exe' suffix.
+#
+if [ Cygwin = "$(uname -o 2>/dev/null)" ]; then
+__git_complete git.exe __git_main
+fi
diff --git a/contrib/completion/git-completion.tcsh b/contrib/completion/git-completion.tcsh
new file mode 100644
index 0000000..eaacaf0
--- /dev/null
+++ b/contrib/completion/git-completion.tcsh
@@ -0,0 +1,128 @@
+#!tcsh
+#
+# tcsh completion support for core Git.
+#
+# Copyright (C) 2012 Marc Khouzam <marc.khouzam@gmail.com>
+# Distributed under the GNU General Public License, version 2.0.
+#
+# When sourced, this script will generate a new script that uses
+# the git-completion.bash script provided by core Git. This new
+# script can be used by tcsh to perform git completion.
+# The current script also issues the necessary tcsh 'complete'
+# commands.
+#
+# To use this completion script:
+#
+# 0) You need tcsh 6.16.00 or newer.
+# 1) Copy both this file and the bash completion script to ${HOME}.
+# You _must_ use the name ${HOME}/.git-completion.bash for the
+# bash script.
+# (e.g. ~/.git-completion.tcsh and ~/.git-completion.bash).
+# 2) Add the following line to your .tcshrc/.cshrc:
+# source ~/.git-completion.tcsh
+# 3) For completion similar to bash, it is recommended to also
+# add the following line to your .tcshrc/.cshrc:
+# set autolist=ambiguous
+# It will tell tcsh to list the possible completion choices.
+
+set __git_tcsh_completion_version = `\echo ${tcsh} | \sed 's/\./ /g'`
+if ( ${__git_tcsh_completion_version[1]} < 6 || \
+ ( ${__git_tcsh_completion_version[1]} == 6 && \
+ ${__git_tcsh_completion_version[2]} < 16 ) ) then
+ echo "git-completion.tcsh: Your version of tcsh is too old, you need version 6.16.00 or newer. Git completion will not work."
+ exit
+endif
+unset __git_tcsh_completion_version
+
+set __git_tcsh_completion_original_script = ${HOME}/.git-completion.bash
+set __git_tcsh_completion_script = ${HOME}/.git-completion.tcsh.bash
+
+# Check that the user put the script in the right place
+if ( ! -e ${__git_tcsh_completion_original_script} ) then
+ echo "git-completion.tcsh: Cannot find: ${__git_tcsh_completion_original_script}. Git completion will not work."
+ exit
+endif
+
+cat << EOF > ${__git_tcsh_completion_script}
+#!bash
+#
+# This script is GENERATED and will be overwritten automatically.
+# Do not modify it directly. Instead, modify git-completion.tcsh
+# and source it again.
+
+source ${__git_tcsh_completion_original_script}
+
+# Remove the colon as a completion separator because tcsh cannot handle it
+COMP_WORDBREAKS=\${COMP_WORDBREAKS//:}
+
+# For file completion, tcsh needs the '/' to be appended to directories.
+# By default, the bash script does not do that.
+# We can achieve this by using the below compatibility
+# method of the git-completion.bash script.
+__git_index_file_list_filter ()
+{
+ __git_index_file_list_filter_compat
+}
+
+# Set COMP_WORDS in a way that can be handled by the bash script.
+COMP_WORDS=(\$2)
+
+# The cursor is at the end of parameter #1.
+# We must check for a space as the last character which will
+# tell us that the previous word is complete and the cursor
+# is on the next word.
+if [ "\${2: -1}" == " " ]; then
+ # The last character is a space, so our location is at the end
+ # of the command-line array
+ COMP_CWORD=\${#COMP_WORDS[@]}
+else
+ # The last character is not a space, so our location is on the
+ # last word of the command-line array, so we must decrement the
+ # count by 1
+ COMP_CWORD=\$((\${#COMP_WORDS[@]}-1))
+fi
+
+# Call _git() or _gitk() of the bash script, based on the first argument
+_\${1}
+
+IFS=\$'\n'
+if [ \${#COMPREPLY[*]} -eq 0 ]; then
+ # No completions suggested. In this case, we want tcsh to perform
+ # standard file completion. However, there does not seem to be way
+ # to tell tcsh to do that. To help the user, we try to simulate
+ # file completion directly in this script.
+ #
+ # Known issues:
+ # - Possible completions are shown with their directory prefix.
+ # - Completions containing shell variables are not handled.
+ # - Completions with ~ as the first character are not handled.
+
+ # No file completion should be done unless we are completing beyond
+ # the git sub-command. An improvement on the bash completion :)
+ if [ \${COMP_CWORD} -gt 1 ]; then
+ TO_COMPLETE="\${COMP_WORDS[\${COMP_CWORD}]}"
+
+ # We don't support ~ expansion: too tricky.
+ if [ "\${TO_COMPLETE:0:1}" != "~" ]; then
+ # Use ls so as to add the '/' at the end of directories.
+ COMPREPLY=(\`ls -dp \${TO_COMPLETE}* 2> /dev/null\`)
+ fi
+ fi
+fi
+
+# tcsh does not automatically remove duplicates, so we do it ourselves
+echo "\${COMPREPLY[*]}" | sort | uniq
+
+# If there is a single completion and it is a directory, we output it
+# a second time to trick tcsh into not adding a space after it.
+if [ \${#COMPREPLY[*]} -eq 1 ] && [ "\${COMPREPLY[0]: -1}" == "/" ]; then
+ echo "\${COMPREPLY[*]}"
+fi
+
+EOF
+
+# Don't need this variable anymore, so don't pollute the users environment
+unset __git_tcsh_completion_original_script
+
+complete git 'p,*,`bash ${__git_tcsh_completion_script} git "${COMMAND_LINE}"`,'
+complete gitk 'p,*,`bash ${__git_tcsh_completion_script} gitk "${COMMAND_LINE}"`,'
diff --git a/contrib/completion/git-completion.zsh b/contrib/completion/git-completion.zsh
new file mode 100644
index 0000000..fac5e71
--- /dev/null
+++ b/contrib/completion/git-completion.zsh
@@ -0,0 +1,216 @@
+#compdef git gitk
+
+# zsh completion wrapper for git
+#
+# Copyright (c) 2012-2013 Felipe Contreras <felipe.contreras@gmail.com>
+#
+# You need git's bash completion script installed somewhere, by default it
+# would be the location bash-completion uses.
+#
+# If your script is somewhere else, you can configure it on your ~/.zshrc:
+#
+# zstyle ':completion:*:*:git:*' script ~/.git-completion.sh
+#
+# The recommended way to install this script is to copy to '~/.zsh/_git', and
+# then add the following to your ~/.zshrc file:
+#
+# fpath=(~/.zsh $fpath)
+
+complete ()
+{
+ # do nothing
+ return 0
+}
+
+zstyle -T ':completion:*:*:git:*' tag-order && \
+ zstyle ':completion:*:*:git:*' tag-order 'common-commands'
+
+zstyle -s ":completion:*:*:git:*" script script
+if [ -z "$script" ]; then
+ local -a locations
+ local e
+ locations=(
+ '/etc/bash_completion.d/git' # fedora, old debian
+ '/usr/share/bash-completion/completions/git' # arch, ubuntu, new debian
+ '/usr/share/bash-completion/git' # gentoo
+ $(dirname ${funcsourcetrace[1]%:*})/git-completion.bash
+ )
+ for e in $locations; do
+ test -f $e && script="$e" && break
+ done
+fi
+ZSH_VERSION='' . "$script"
+
+__gitcomp ()
+{
+ emulate -L zsh
+
+ local cur_="${3-$cur}"
+
+ case "$cur_" in
+ --*=)
+ ;;
+ *)
+ local c IFS=$' \t\n'
+ local -a array
+ for c in ${=1}; do
+ c="$c${4-}"
+ case $c in
+ --*=*|*.) ;;
+ *) c="$c " ;;
+ esac
+ array+=("$c")
+ done
+ compset -P '*[=:]'
+ compadd -Q -S '' -p "${2-}" -a -- array && _ret=0
+ ;;
+ esac
+}
+
+__gitcomp_nl ()
+{
+ emulate -L zsh
+
+ local IFS=$'\n'
+ compset -P '*[=:]'
+ compadd -Q -S "${4- }" -p "${2-}" -- ${=1} && _ret=0
+}
+
+__gitcomp_file ()
+{
+ emulate -L zsh
+
+ local IFS=$'\n'
+ compset -P '*[=:]'
+ compadd -Q -p "${2-}" -f -- ${=1} && _ret=0
+}
+
+__git_zsh_bash_func ()
+{
+ emulate -L ksh
+
+ local command=$1
+
+ local completion_func="_git_${command//-/_}"
+ declare -f $completion_func >/dev/null && $completion_func && return
+
+ local expansion=$(__git_aliased_command "$command")
+ if [ -n "$expansion" ]; then
+ completion_func="_git_${expansion//-/_}"
+ declare -f $completion_func >/dev/null && $completion_func
+ fi
+}
+
+__git_zsh_cmd_common ()
+{
+ local -a list
+ list=(
+ add:'add file contents to the index'
+ bisect:'find by binary search the change that introduced a bug'
+ branch:'list, create, or delete branches'
+ checkout:'checkout a branch or paths to the working tree'
+ clone:'clone a repository into a new directory'
+ commit:'record changes to the repository'
+ diff:'show changes between commits, commit and working tree, etc'
+ fetch:'download objects and refs from another repository'
+ grep:'print lines matching a pattern'
+ init:'create an empty Git repository or reinitialize an existing one'
+ log:'show commit logs'
+ merge:'join two or more development histories together'
+ mv:'move or rename a file, a directory, or a symlink'
+ pull:'fetch from and merge with another repository or a local branch'
+ push:'update remote refs along with associated objects'
+ rebase:'forward-port local commits to the updated upstream head'
+ reset:'reset current HEAD to the specified state'
+ rm:'remove files from the working tree and from the index'
+ show:'show various types of objects'
+ status:'show the working tree status'
+ tag:'create, list, delete or verify a tag object signed with GPG')
+ _describe -t common-commands 'common commands' list && _ret=0
+}
+
+__git_zsh_cmd_alias ()
+{
+ local -a list
+ list=(${${${(0)"$(git config -z --get-regexp '^alias\.')"}#alias.}%$'\n'*})
+ _describe -t alias-commands 'aliases' list $* && _ret=0
+}
+
+__git_zsh_cmd_all ()
+{
+ local -a list
+ emulate ksh -c __git_compute_all_commands
+ list=( ${=__git_all_commands} )
+ _describe -t all-commands 'all commands' list && _ret=0
+}
+
+__git_zsh_main ()
+{
+ local curcontext="$curcontext" state state_descr line
+ typeset -A opt_args
+ local -a orig_words
+
+ orig_words=( ${words[@]} )
+
+ _arguments -C \
+ '(-p --paginate --no-pager)'{-p,--paginate}'[pipe all output into ''less'']' \
+ '(-p --paginate)--no-pager[do not pipe git output into a pager]' \
+ '--git-dir=-[set the path to the repository]: :_directories' \
+ '--bare[treat the repository as a bare repository]' \
+ '(- :)--version[prints the git suite version]' \
+ '--exec-path=-[path to where your core git programs are installed]:: :_directories' \
+ '--html-path[print the path where git''s HTML documentation is installed]' \
+ '--info-path[print the path where the Info files are installed]' \
+ '--man-path[print the manpath (see `man(1)`) for the man pages]' \
+ '--work-tree=-[set the path to the working tree]: :_directories' \
+ '--namespace=-[set the git namespace]' \
+ '--no-replace-objects[do not use replacement refs to replace git objects]' \
+ '(- :)--help[prints the synopsis and a list of the most commonly used commands]: :->arg' \
+ '(-): :->command' \
+ '(-)*:: :->arg' && return
+
+ case $state in
+ (command)
+ _alternative \
+ 'alias-commands:alias:__git_zsh_cmd_alias' \
+ 'common-commands:common:__git_zsh_cmd_common' \
+ 'all-commands:all:__git_zsh_cmd_all' && _ret=0
+ ;;
+ (arg)
+ local command="${words[1]}" __git_dir
+
+ if (( $+opt_args[--bare] )); then
+ __git_dir='.'
+ else
+ __git_dir=${opt_args[--git-dir]}
+ fi
+
+ (( $+opt_args[--help] )) && command='help'
+
+ words=( ${orig_words[@]} )
+
+ __git_zsh_bash_func $command
+ ;;
+ esac
+}
+
+_git ()
+{
+ local _ret=1
+ local cur cword prev
+
+ cur=${words[CURRENT]}
+ prev=${words[CURRENT-1]}
+ let cword=CURRENT-1
+
+ if (( $+functions[__${service}_zsh_main] )); then
+ __${service}_zsh_main
+ else
+ emulate ksh -c __${service}_main
+ fi
+
+ let _ret && _default && _ret=0
+ return _ret
+}
+
+_git
diff --git a/contrib/completion/git-prompt.sh b/contrib/completion/git-prompt.sh
new file mode 100644
index 0000000..86a4f3f
--- /dev/null
+++ b/contrib/completion/git-prompt.sh
@@ -0,0 +1,461 @@
+# bash/zsh git prompt support
+#
+# Copyright (C) 2006,2007 Shawn O. Pearce <spearce@spearce.org>
+# Distributed under the GNU General Public License, version 2.0.
+#
+# This script allows you to see the current branch in your prompt.
+#
+# To enable:
+#
+# 1) Copy this file to somewhere (e.g. ~/.git-prompt.sh).
+# 2) Add the following line to your .bashrc/.zshrc:
+# source ~/.git-prompt.sh
+# 3a) Change your PS1 to call __git_ps1 as
+# command-substitution:
+# Bash: PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ '
+# ZSH: PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ '
+# the optional argument will be used as format string.
+# 3b) Alternatively, if you are using bash, __git_ps1 can be
+# used for PROMPT_COMMAND with two parameters, <pre> and
+# <post>, which are strings you would put in $PS1 before
+# and after the status string generated by the git-prompt
+# machinery. e.g.
+# Bash: PROMPT_COMMAND='__git_ps1 "\u@\h:\w" "\\\$ "'
+# ZSH: precmd () { __git_ps1 "%n" ":%~$ " "|%s" }
+# will show username, at-sign, host, colon, cwd, then
+# various status string, followed by dollar and SP, as
+# your prompt.
+# Optionally, you can supply a third argument with a printf
+# format string to finetune the output of the branch status
+#
+# The argument to __git_ps1 will be displayed only if you are currently
+# in a git repository. The %s token will be the name of the current
+# branch.
+#
+# In addition, if you set GIT_PS1_SHOWDIRTYSTATE to a nonempty value,
+# unstaged (*) and staged (+) changes will be shown next to the branch
+# name. You can configure this per-repository with the
+# bash.showDirtyState variable, which defaults to true once
+# GIT_PS1_SHOWDIRTYSTATE is enabled.
+#
+# You can also see if currently something is stashed, by setting
+# GIT_PS1_SHOWSTASHSTATE to a nonempty value. If something is stashed,
+# then a '$' will be shown next to the branch name.
+#
+# If you would like to see if there're untracked files, then you can set
+# GIT_PS1_SHOWUNTRACKEDFILES to a nonempty value. If there're untracked
+# files, then a '%' will be shown next to the branch name. You can
+# configure this per-repository with the bash.showUntrackedFiles
+# variable, which defaults to true once GIT_PS1_SHOWUNTRACKEDFILES is
+# enabled.
+#
+# If you would like to see the difference between HEAD and its upstream,
+# set GIT_PS1_SHOWUPSTREAM="auto". A "<" indicates you are behind, ">"
+# indicates you are ahead, "<>" indicates you have diverged and "="
+# indicates that there is no difference. You can further control
+# behaviour by setting GIT_PS1_SHOWUPSTREAM to a space-separated list
+# of values:
+#
+# verbose show number of commits ahead/behind (+/-) upstream
+# legacy don't use the '--count' option available in recent
+# versions of git-rev-list
+# git always compare HEAD to @{upstream}
+# svn always compare HEAD to your SVN upstream
+#
+# By default, __git_ps1 will compare HEAD to your SVN upstream if it can
+# find one, or @{upstream} otherwise. Once you have set
+# GIT_PS1_SHOWUPSTREAM, you can override it on a per-repository basis by
+# setting the bash.showUpstream config variable.
+#
+# If you would like to see more information about the identity of
+# commits checked out as a detached HEAD, set GIT_PS1_DESCRIBE_STYLE
+# to one of these values:
+#
+# contains relative to newer annotated tag (v1.6.3.2~35)
+# branch relative to newer tag or branch (master~4)
+# describe relative to older annotated tag (v1.6.3.1-13-gdd42c2f)
+# default exactly matching tag
+#
+# If you would like a colored hint about the current dirty state, set
+# GIT_PS1_SHOWCOLORHINTS to a nonempty value. The colors are based on
+# the colored output of "git status -sb".
+
+# __gitdir accepts 0 or 1 arguments (i.e., location)
+# returns location of .git repo
+__gitdir ()
+{
+ # Note: this function is duplicated in git-completion.bash
+ # When updating it, make sure you update the other one to match.
+ if [ -z "${1-}" ]; then
+ if [ -n "${__git_dir-}" ]; then
+ echo "$__git_dir"
+ elif [ -n "${GIT_DIR-}" ]; then
+ test -d "${GIT_DIR-}" || return 1
+ echo "$GIT_DIR"
+ elif [ -d .git ]; then
+ echo .git
+ else
+ git rev-parse --git-dir 2>/dev/null
+ fi
+ elif [ -d "$1/.git" ]; then
+ echo "$1/.git"
+ else
+ echo "$1"
+ fi
+}
+
+# stores the divergence from upstream in $p
+# used by GIT_PS1_SHOWUPSTREAM
+__git_ps1_show_upstream ()
+{
+ local key value
+ local svn_remote svn_url_pattern count n
+ local upstream=git legacy="" verbose=""
+
+ svn_remote=()
+ # get some config options from git-config
+ local output="$(git config -z --get-regexp '^(svn-remote\..*\.url|bash\.showupstream)$' 2>/dev/null | tr '\0\n' '\n ')"
+ while read -r key value; do
+ case "$key" in
+ bash.showupstream)
+ GIT_PS1_SHOWUPSTREAM="$value"
+ if [[ -z "${GIT_PS1_SHOWUPSTREAM}" ]]; then
+ p=""
+ return
+ fi
+ ;;
+ svn-remote.*.url)
+ svn_remote[$((${#svn_remote[@]} + 1))]="$value"
+ svn_url_pattern+="\\|$value"
+ upstream=svn+git # default upstream is SVN if available, else git
+ ;;
+ esac
+ done <<< "$output"
+
+ # parse configuration values
+ for option in ${GIT_PS1_SHOWUPSTREAM}; do
+ case "$option" in
+ git|svn) upstream="$option" ;;
+ verbose) verbose=1 ;;
+ legacy) legacy=1 ;;
+ esac
+ done
+
+ # Find our upstream
+ case "$upstream" in
+ git) upstream="@{upstream}" ;;
+ svn*)
+ # get the upstream from the "git-svn-id: ..." in a commit message
+ # (git-svn uses essentially the same procedure internally)
+ local -a svn_upstream
+ svn_upstream=($(git log --first-parent -1 \
+ --grep="^git-svn-id: \(${svn_url_pattern#??}\)" 2>/dev/null))
+ if [[ 0 -ne ${#svn_upstream[@]} ]]; then
+ svn_upstream=${svn_upstream[${#svn_upstream[@]} - 2]}
+ svn_upstream=${svn_upstream%@*}
+ local n_stop="${#svn_remote[@]}"
+ for ((n=1; n <= n_stop; n++)); do
+ svn_upstream=${svn_upstream#${svn_remote[$n]}}
+ done
+
+ if [[ -z "$svn_upstream" ]]; then
+ # default branch name for checkouts with no layout:
+ upstream=${GIT_SVN_ID:-git-svn}
+ else
+ upstream=${svn_upstream#/}
+ fi
+ elif [[ "svn+git" = "$upstream" ]]; then
+ upstream="@{upstream}"
+ fi
+ ;;
+ esac
+
+ # Find how many commits we are ahead/behind our upstream
+ if [[ -z "$legacy" ]]; then
+ count="$(git rev-list --count --left-right \
+ "$upstream"...HEAD 2>/dev/null)"
+ else
+ # produce equivalent output to --count for older versions of git
+ local commits
+ if commits="$(git rev-list --left-right "$upstream"...HEAD 2>/dev/null)"
+ then
+ local commit behind=0 ahead=0
+ for commit in $commits
+ do
+ case "$commit" in
+ "<"*) ((behind++)) ;;
+ *) ((ahead++)) ;;
+ esac
+ done
+ count="$behind $ahead"
+ else
+ count=""
+ fi
+ fi
+
+ # calculate the result
+ if [[ -z "$verbose" ]]; then
+ case "$count" in
+ "") # no upstream
+ p="" ;;
+ "0 0") # equal to upstream
+ p="=" ;;
+ "0 "*) # ahead of upstream
+ p=">" ;;
+ *" 0") # behind upstream
+ p="<" ;;
+ *) # diverged from upstream
+ p="<>" ;;
+ esac
+ else
+ case "$count" in
+ "") # no upstream
+ p="" ;;
+ "0 0") # equal to upstream
+ p=" u=" ;;
+ "0 "*) # ahead of upstream
+ p=" u+${count#0 }" ;;
+ *" 0") # behind upstream
+ p=" u-${count% 0}" ;;
+ *) # diverged from upstream
+ p=" u+${count#* }-${count% *}" ;;
+ esac
+ fi
+
+}
+
+# Helper function that is meant to be called from __git_ps1. It
+# builds up a gitstring injecting color codes into the appropriate
+# places.
+__git_ps1_colorize_gitstring ()
+{
+ if [[ -n ${ZSH_VERSION-} ]]; then
+ local c_red='%F{red}'
+ local c_green='%F{green}'
+ local c_lblue='%F{blue}'
+ local c_clear='%f'
+ local bad_color=$c_red
+ local ok_color=$c_green
+ local branch_color="$c_clear"
+ local flags_color="$c_lblue"
+ local branchstring="$c${b##refs/heads/}"
+
+ if [ $detached = no ]; then
+ branch_color="$ok_color"
+ else
+ branch_color="$bad_color"
+ fi
+
+ gitstring="$branch_color$branchstring$c_clear"
+
+ if [ -n "$w$i$s$u$r$p" ]; then
+ gitstring="$gitstring$z"
+ fi
+ if [ "$w" = "*" ]; then
+ gitstring="$gitstring$bad_color$w"
+ fi
+ if [ -n "$i" ]; then
+ gitstring="$gitstring$ok_color$i"
+ fi
+ if [ -n "$s" ]; then
+ gitstring="$gitstring$flags_color$s"
+ fi
+ if [ -n "$u" ]; then
+ gitstring="$gitstring$bad_color$u"
+ fi
+ gitstring="$gitstring$c_clear$r$p"
+ return
+ fi
+ local c_red='\e[31m'
+ local c_green='\e[32m'
+ local c_lblue='\e[1;34m'
+ local c_clear='\e[0m'
+ local bad_color=$c_red
+ local ok_color=$c_green
+ local branch_color="$c_clear"
+ local flags_color="$c_lblue"
+ local branchstring="$c${b##refs/heads/}"
+
+ if [ $detached = no ]; then
+ branch_color="$ok_color"
+ else
+ branch_color="$bad_color"
+ fi
+
+ # Setting gitstring directly with \[ and \] around colors
+ # is necessary to prevent wrapping issues!
+ gitstring="\[$branch_color\]$branchstring\[$c_clear\]"
+
+ if [ -n "$w$i$s$u$r$p" ]; then
+ gitstring="$gitstring$z"
+ fi
+ if [ "$w" = "*" ]; then
+ gitstring="$gitstring\[$bad_color\]$w"
+ fi
+ if [ -n "$i" ]; then
+ gitstring="$gitstring\[$ok_color\]$i"
+ fi
+ if [ -n "$s" ]; then
+ gitstring="$gitstring\[$flags_color\]$s"
+ fi
+ if [ -n "$u" ]; then
+ gitstring="$gitstring\[$bad_color\]$u"
+ fi
+ gitstring="$gitstring\[$c_clear\]$r$p"
+}
+
+# __git_ps1 accepts 0 or 1 arguments (i.e., format string)
+# when called from PS1 using command substitution
+# in this mode it prints text to add to bash PS1 prompt (includes branch name)
+#
+# __git_ps1 requires 2 or 3 arguments when called from PROMPT_COMMAND (pc)
+# in that case it _sets_ PS1. The arguments are parts of a PS1 string.
+# when two arguments are given, the first is prepended and the second appended
+# to the state string when assigned to PS1.
+# The optional third parameter will be used as printf format string to further
+# customize the output of the git-status string.
+# In this mode you can request colored hints using GIT_PS1_SHOWCOLORHINTS=true
+__git_ps1 ()
+{
+ local pcmode=no
+ local detached=no
+ local ps1pc_start='\u@\h:\w '
+ local ps1pc_end='\$ '
+ local printf_format=' (%s)'
+
+ case "$#" in
+ 2|3) pcmode=yes
+ ps1pc_start="$1"
+ ps1pc_end="$2"
+ printf_format="${3:-$printf_format}"
+ ;;
+ 0|1) printf_format="${1:-$printf_format}"
+ ;;
+ *) return
+ ;;
+ esac
+
+ local g="$(__gitdir)"
+ if [ -z "$g" ]; then
+ if [ $pcmode = yes ]; then
+ #In PC mode PS1 always needs to be set
+ PS1="$ps1pc_start$ps1pc_end"
+ fi
+ else
+ local r=""
+ local b=""
+ local step=""
+ local total=""
+ if [ -d "$g/rebase-merge" ]; then
+ b="$(cat "$g/rebase-merge/head-name")"
+ step=$(cat "$g/rebase-merge/msgnum")
+ total=$(cat "$g/rebase-merge/end")
+ if [ -f "$g/rebase-merge/interactive" ]; then
+ r="|REBASE-i"
+ else
+ r="|REBASE-m"
+ fi
+ else
+ if [ -d "$g/rebase-apply" ]; then
+ step=$(cat "$g/rebase-apply/next")
+ total=$(cat "$g/rebase-apply/last")
+ if [ -f "$g/rebase-apply/rebasing" ]; then
+ b="$(cat "$g/rebase-apply/head-name")"
+ r="|REBASE"
+ elif [ -f "$g/rebase-apply/applying" ]; then
+ r="|AM"
+ else
+ r="|AM/REBASE"
+ fi
+ elif [ -f "$g/MERGE_HEAD" ]; then
+ r="|MERGING"
+ elif [ -f "$g/CHERRY_PICK_HEAD" ]; then
+ r="|CHERRY-PICKING"
+ elif [ -f "$g/REVERT_HEAD" ]; then
+ r="|REVERTING"
+ elif [ -f "$g/BISECT_LOG" ]; then
+ r="|BISECTING"
+ fi
+
+ test -n "$b" ||
+ b="$(git symbolic-ref HEAD 2>/dev/null)" || {
+ detached=yes
+ b="$(
+ case "${GIT_PS1_DESCRIBE_STYLE-}" in
+ (contains)
+ git describe --contains HEAD ;;
+ (branch)
+ git describe --contains --all HEAD ;;
+ (describe)
+ git describe HEAD ;;
+ (* | default)
+ git describe --tags --exact-match HEAD ;;
+ esac 2>/dev/null)" ||
+
+ b="$(cut -c1-7 "$g/HEAD" 2>/dev/null)..." ||
+ b="unknown"
+ b="($b)"
+ }
+ fi
+
+ if [ -n "$step" ] && [ -n "$total" ]; then
+ r="$r $step/$total"
+ fi
+
+ local w=""
+ local i=""
+ local s=""
+ local u=""
+ local c=""
+ local p=""
+
+ if [ "true" = "$(git rev-parse --is-inside-git-dir 2>/dev/null)" ]; then
+ if [ "true" = "$(git rev-parse --is-bare-repository 2>/dev/null)" ]; then
+ c="BARE:"
+ else
+ b="GIT_DIR!"
+ fi
+ elif [ "true" = "$(git rev-parse --is-inside-work-tree 2>/dev/null)" ]; then
+ if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ] &&
+ [ "$(git config --bool bash.showDirtyState)" != "false" ]
+ then
+ git diff --no-ext-diff --quiet --exit-code || w="*"
+ if git rev-parse --quiet --verify HEAD >/dev/null; then
+ git diff-index --cached --quiet HEAD -- || i="+"
+ else
+ i="#"
+ fi
+ fi
+ if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ]; then
+ git rev-parse --verify refs/stash >/dev/null 2>&1 && s="$"
+ fi
+
+ if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ] &&
+ [ "$(git config --bool bash.showUntrackedFiles)" != "false" ] &&
+ [ -n "$(git ls-files --others --exclude-standard)" ]
+ then
+ u="%${ZSH_VERSION+%}"
+ fi
+
+ if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then
+ __git_ps1_show_upstream
+ fi
+ fi
+
+ local z="${GIT_PS1_STATESEPARATOR-" "}"
+ local f="$w$i$s$u"
+ if [ $pcmode = yes ]; then
+ local gitstring=
+ if [ -n "${GIT_PS1_SHOWCOLORHINTS-}" ]; then
+ __git_ps1_colorize_gitstring
+ else
+ gitstring="$c${b##refs/heads/}${f:+$z$f}$r$p"
+ fi
+ gitstring=$(printf -- "$printf_format" "$gitstring")
+ PS1="$ps1pc_start$gitstring$ps1pc_end"
+ else
+ # NO color option unless in PROMPT_COMMAND mode
+ printf -- "$printf_format" "$c${b##refs/heads/}${f:+$z$f}$r$p"
+ fi
+ fi
+}
diff --git a/contrib/convert-objects/convert-objects.c b/contrib/convert-objects/convert-objects.c
new file mode 100644
index 0000000..f3b57bf
--- /dev/null
+++ b/contrib/convert-objects/convert-objects.c
@@ -0,0 +1,329 @@
+#include "cache.h"
+#include "blob.h"
+#include "commit.h"
+#include "tree.h"
+
+struct entry {
+ unsigned char old_sha1[20];
+ unsigned char new_sha1[20];
+ int converted;
+};
+
+#define MAXOBJECTS (1000000)
+
+static struct entry *convert[MAXOBJECTS];
+static int nr_convert;
+
+static struct entry * convert_entry(unsigned char *sha1);
+
+static struct entry *insert_new(unsigned char *sha1, int pos)
+{
+ struct entry *new = xcalloc(1, sizeof(struct entry));
+ hashcpy(new->old_sha1, sha1);
+ memmove(convert + pos + 1, convert + pos, (nr_convert - pos) * sizeof(struct entry *));
+ convert[pos] = new;
+ nr_convert++;
+ if (nr_convert == MAXOBJECTS)
+ die("you're kidding me - hit maximum object limit");
+ return new;
+}
+
+static struct entry *lookup_entry(unsigned char *sha1)
+{
+ int low = 0, high = nr_convert;
+
+ while (low < high) {
+ int next = (low + high) / 2;
+ struct entry *n = convert[next];
+ int cmp = hashcmp(sha1, n->old_sha1);
+ if (!cmp)
+ return n;
+ if (cmp < 0) {
+ high = next;
+ continue;
+ }
+ low = next+1;
+ }
+ return insert_new(sha1, low);
+}
+
+static void convert_binary_sha1(void *buffer)
+{
+ struct entry *entry = convert_entry(buffer);
+ hashcpy(buffer, entry->new_sha1);
+}
+
+static void convert_ascii_sha1(void *buffer)
+{
+ unsigned char sha1[20];
+ struct entry *entry;
+
+ if (get_sha1_hex(buffer, sha1))
+ die("expected sha1, got '%s'", (char *) buffer);
+ entry = convert_entry(sha1);
+ memcpy(buffer, sha1_to_hex(entry->new_sha1), 40);
+}
+
+static unsigned int convert_mode(unsigned int mode)
+{
+ unsigned int newmode;
+
+ newmode = mode & S_IFMT;
+ if (S_ISREG(mode))
+ newmode |= (mode & 0100) ? 0755 : 0644;
+ return newmode;
+}
+
+static int write_subdirectory(void *buffer, unsigned long size, const char *base, int baselen, unsigned char *result_sha1)
+{
+ char *new = xmalloc(size);
+ unsigned long newlen = 0;
+ unsigned long used;
+
+ used = 0;
+ while (size) {
+ int len = 21 + strlen(buffer);
+ char *path = strchr(buffer, ' ');
+ unsigned char *sha1;
+ unsigned int mode;
+ char *slash, *origpath;
+
+ if (!path || strtoul_ui(buffer, 8, &mode))
+ die("bad tree conversion");
+ mode = convert_mode(mode);
+ path++;
+ if (memcmp(path, base, baselen))
+ break;
+ origpath = path;
+ path += baselen;
+ slash = strchr(path, '/');
+ if (!slash) {
+ newlen += sprintf(new + newlen, "%o %s", mode, path);
+ new[newlen++] = '\0';
+ hashcpy((unsigned char *)new + newlen, (unsigned char *) buffer + len - 20);
+ newlen += 20;
+
+ used += len;
+ size -= len;
+ buffer = (char *) buffer + len;
+ continue;
+ }
+
+ newlen += sprintf(new + newlen, "%o %.*s", S_IFDIR, (int)(slash - path), path);
+ new[newlen++] = 0;
+ sha1 = (unsigned char *)(new + newlen);
+ newlen += 20;
+
+ len = write_subdirectory(buffer, size, origpath, slash-origpath+1, sha1);
+
+ used += len;
+ size -= len;
+ buffer = (char *) buffer + len;
+ }
+
+ write_sha1_file(new, newlen, tree_type, result_sha1);
+ free(new);
+ return used;
+}
+
+static void convert_tree(void *buffer, unsigned long size, unsigned char *result_sha1)
+{
+ void *orig_buffer = buffer;
+ unsigned long orig_size = size;
+
+ while (size) {
+ size_t len = 1+strlen(buffer);
+
+ convert_binary_sha1((char *) buffer + len);
+
+ len += 20;
+ if (len > size)
+ die("corrupt tree object");
+ size -= len;
+ buffer = (char *) buffer + len;
+ }
+
+ write_subdirectory(orig_buffer, orig_size, "", 0, result_sha1);
+}
+
+static unsigned long parse_oldstyle_date(const char *buf)
+{
+ char c, *p;
+ char buffer[100];
+ struct tm tm;
+ const char *formats[] = {
+ "%c",
+ "%a %b %d %T",
+ "%Z",
+ "%Y",
+ " %Y",
+ NULL
+ };
+ /* We only ever did two timezones in the bad old format .. */
+ const char *timezones[] = {
+ "PDT", "PST", "CEST", NULL
+ };
+ const char **fmt = formats;
+
+ p = buffer;
+ while (isspace(c = *buf))
+ buf++;
+ while ((c = *buf++) != '\n')
+ *p++ = c;
+ *p++ = 0;
+ buf = buffer;
+ memset(&tm, 0, sizeof(tm));
+ do {
+ const char *next = strptime(buf, *fmt, &tm);
+ if (next) {
+ if (!*next)
+ return mktime(&tm);
+ buf = next;
+ } else {
+ const char **p = timezones;
+ while (isspace(*buf))
+ buf++;
+ while (*p) {
+ if (!memcmp(buf, *p, strlen(*p))) {
+ buf += strlen(*p);
+ break;
+ }
+ p++;
+ }
+ }
+ fmt++;
+ } while (*buf && *fmt);
+ printf("left: %s\n", buf);
+ return mktime(&tm);
+}
+
+static int convert_date_line(char *dst, void **buf, unsigned long *sp)
+{
+ unsigned long size = *sp;
+ char *line = *buf;
+ char *next = strchr(line, '\n');
+ char *date = strchr(line, '>');
+ int len;
+
+ if (!next || !date)
+ die("missing or bad author/committer line %s", line);
+ next++; date += 2;
+
+ *buf = next;
+ *sp = size - (next - line);
+
+ len = date - line;
+ memcpy(dst, line, len);
+ dst += len;
+
+ /* Is it already in new format? */
+ if (isdigit(*date)) {
+ int datelen = next - date;
+ memcpy(dst, date, datelen);
+ return len + datelen;
+ }
+
+ /*
+ * Hacky hacky: one of the sparse old-style commits does not have
+ * any date at all, but we can fake it by using the committer date.
+ */
+ if (*date == '\n' && strchr(next, '>'))
+ date = strchr(next, '>')+2;
+
+ return len + sprintf(dst, "%lu -0700\n", parse_oldstyle_date(date));
+}
+
+static void convert_date(void *buffer, unsigned long size, unsigned char *result_sha1)
+{
+ char *new = xmalloc(size + 100);
+ unsigned long newlen = 0;
+
+ /* "tree <sha1>\n" */
+ memcpy(new + newlen, buffer, 46);
+ newlen += 46;
+ buffer = (char *) buffer + 46;
+ size -= 46;
+
+ /* "parent <sha1>\n" */
+ while (!memcmp(buffer, "parent ", 7)) {
+ memcpy(new + newlen, buffer, 48);
+ newlen += 48;
+ buffer = (char *) buffer + 48;
+ size -= 48;
+ }
+
+ /* "author xyz <xyz> date" */
+ newlen += convert_date_line(new + newlen, &buffer, &size);
+ /* "committer xyz <xyz> date" */
+ newlen += convert_date_line(new + newlen, &buffer, &size);
+
+ /* Rest */
+ memcpy(new + newlen, buffer, size);
+ newlen += size;
+
+ write_sha1_file(new, newlen, commit_type, result_sha1);
+ free(new);
+}
+
+static void convert_commit(void *buffer, unsigned long size, unsigned char *result_sha1)
+{
+ void *orig_buffer = buffer;
+ unsigned long orig_size = size;
+
+ if (memcmp(buffer, "tree ", 5))
+ die("Bad commit '%s'", (char *) buffer);
+ convert_ascii_sha1((char *) buffer + 5);
+ buffer = (char *) buffer + 46; /* "tree " + "hex sha1" + "\n" */
+ while (!memcmp(buffer, "parent ", 7)) {
+ convert_ascii_sha1((char *) buffer + 7);
+ buffer = (char *) buffer + 48;
+ }
+ convert_date(orig_buffer, orig_size, result_sha1);
+}
+
+static struct entry * convert_entry(unsigned char *sha1)
+{
+ struct entry *entry = lookup_entry(sha1);
+ enum object_type type;
+ void *buffer, *data;
+ unsigned long size;
+
+ if (entry->converted)
+ return entry;
+ data = read_sha1_file(sha1, &type, &size);
+ if (!data)
+ die("unable to read object %s", sha1_to_hex(sha1));
+
+ buffer = xmalloc(size);
+ memcpy(buffer, data, size);
+
+ if (type == OBJ_BLOB) {
+ write_sha1_file(buffer, size, blob_type, entry->new_sha1);
+ } else if (type == OBJ_TREE)
+ convert_tree(buffer, size, entry->new_sha1);
+ else if (type == OBJ_COMMIT)
+ convert_commit(buffer, size, entry->new_sha1);
+ else
+ die("unknown object type %d in %s", type, sha1_to_hex(sha1));
+ entry->converted = 1;
+ free(buffer);
+ free(data);
+ return entry;
+}
+
+int main(int argc, char **argv)
+{
+ unsigned char sha1[20];
+ struct entry *entry;
+
+ setup_git_directory();
+
+ if (argc != 2)
+ usage("git-convert-objects <sha1>");
+ if (get_sha1(argv[1], sha1))
+ die("Not a valid object name %s", argv[1]);
+
+ entry = convert_entry(sha1);
+ printf("new sha1: %s\n", sha1_to_hex(entry->new_sha1));
+ return 0;
+}
diff --git a/contrib/convert-objects/git-convert-objects.txt b/contrib/convert-objects/git-convert-objects.txt
new file mode 100644
index 0000000..0565d83
--- /dev/null
+++ b/contrib/convert-objects/git-convert-objects.txt
@@ -0,0 +1,29 @@
+git-convert-objects(1)
+======================
+
+NAME
+----
+git-convert-objects - Converts old-style git repository
+
+
+SYNOPSIS
+--------
+[verse]
+'git-convert-objects'
+
+DESCRIPTION
+-----------
+Converts old-style git repository to the latest format
+
+
+Author
+------
+Written by Linus Torvalds <torvalds@osdl.org>
+
+Documentation
+--------------
+Documentation by David Greaves, Junio C Hamano and the git-list <git@vger.kernel.org>.
+
+GIT
+---
+Part of the gitlink:git[7] suite
diff --git a/contrib/credential/gnome-keyring/.gitignore b/contrib/credential/gnome-keyring/.gitignore
new file mode 100644
index 0000000..88d8fcd
--- /dev/null
+++ b/contrib/credential/gnome-keyring/.gitignore
@@ -0,0 +1 @@
+git-credential-gnome-keyring
diff --git a/contrib/credential/gnome-keyring/Makefile b/contrib/credential/gnome-keyring/Makefile
new file mode 100644
index 0000000..e6561d8
--- /dev/null
+++ b/contrib/credential/gnome-keyring/Makefile
@@ -0,0 +1,24 @@
+MAIN:=git-credential-gnome-keyring
+all:: $(MAIN)
+
+CC = gcc
+RM = rm -f
+CFLAGS = -g -O2 -Wall
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+INCS:=$(shell pkg-config --cflags gnome-keyring-1)
+LIBS:=$(shell pkg-config --libs gnome-keyring-1)
+
+SRCS:=$(MAIN).c
+OBJS:=$(SRCS:.c=.o)
+
+%.o: %.c
+ $(CC) $(CFLAGS) $(CPPFLAGS) $(INCS) -o $@ -c $<
+
+$(MAIN): $(OBJS)
+ $(CC) -o $@ $(LDFLAGS) $^ $(LIBS)
+
+clean:
+ @$(RM) $(MAIN) $(OBJS)
diff --git a/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c b/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c
new file mode 100644
index 0000000..f2cdefe
--- /dev/null
+++ b/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c
@@ -0,0 +1,445 @@
+/*
+ * Copyright (C) 2011 John Szakmeister <john@szakmeister.net>
+ * 2012 Philipp A. Hartmann <pah@qo.cx>
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ */
+
+/*
+ * Credits:
+ * - GNOME Keyring API handling originally written by John Szakmeister
+ * - ported to credential helper API by Philipp A. Hartmann
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <stdarg.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <gnome-keyring.h>
+
+/*
+ * This credential struct and API is simplified from git's credential.{h,c}
+ */
+struct credential
+{
+ char *protocol;
+ char *host;
+ unsigned short port;
+ char *path;
+ char *username;
+ char *password;
+};
+
+#define CREDENTIAL_INIT \
+ { NULL,NULL,0,NULL,NULL,NULL }
+
+void credential_init(struct credential *c);
+void credential_clear(struct credential *c);
+int credential_read(struct credential *c);
+void credential_write(const struct credential *c);
+
+typedef int (*credential_op_cb)(struct credential*);
+
+struct credential_operation
+{
+ char *name;
+ credential_op_cb op;
+};
+
+#define CREDENTIAL_OP_END \
+ { NULL,NULL }
+
+/*
+ * Table with operation callbacks is defined in concrete
+ * credential helper implementation and contains entries
+ * like { "get", function_to_get_credential } terminated
+ * by CREDENTIAL_OP_END.
+ */
+struct credential_operation const credential_helper_ops[];
+
+/* ---------------- common helper functions ----------------- */
+
+static inline void free_password(char *password)
+{
+ char *c = password;
+ if (!password)
+ return;
+
+ while (*c) *c++ = '\0';
+ free(password);
+}
+
+static inline void warning(const char *fmt, ...)
+{
+ va_list ap;
+
+ va_start(ap, fmt);
+ fprintf(stderr, "warning: ");
+ vfprintf(stderr, fmt, ap);
+ fprintf(stderr, "\n" );
+ va_end(ap);
+}
+
+static inline void error(const char *fmt, ...)
+{
+ va_list ap;
+
+ va_start(ap, fmt);
+ fprintf(stderr, "error: ");
+ vfprintf(stderr, fmt, ap);
+ fprintf(stderr, "\n" );
+ va_end(ap);
+}
+
+static inline void die(const char *fmt, ...)
+{
+ va_list ap;
+
+ va_start(ap,fmt);
+ error(fmt, ap);
+ va_end(ap);
+ exit(EXIT_FAILURE);
+}
+
+static inline void die_errno(int err)
+{
+ error("%s", strerror(err));
+ exit(EXIT_FAILURE);
+}
+
+static inline char *xstrdup(const char *str)
+{
+ char *ret = strdup(str);
+ if (!ret)
+ die_errno(errno);
+
+ return ret;
+}
+
+/* ----------------- GNOME Keyring functions ----------------- */
+
+/* create a special keyring option string, if path is given */
+static char* keyring_object(struct credential *c)
+{
+ char* object = NULL;
+
+ if (!c->path)
+ return object;
+
+ object = (char*) malloc(strlen(c->host)+strlen(c->path)+8);
+ if(!object)
+ die_errno(errno);
+
+ if(c->port)
+ sprintf(object,"%s:%hd/%s",c->host,c->port,c->path);
+ else
+ sprintf(object,"%s/%s",c->host,c->path);
+
+ return object;
+}
+
+int keyring_get(struct credential *c)
+{
+ char* object = NULL;
+ GList *entries;
+ GnomeKeyringNetworkPasswordData *password_data;
+ GnomeKeyringResult result;
+
+ if (!c->protocol || !(c->host || c->path))
+ return EXIT_FAILURE;
+
+ object = keyring_object(c);
+
+ result = gnome_keyring_find_network_password_sync(
+ c->username,
+ NULL /* domain */,
+ c->host,
+ object,
+ c->protocol,
+ NULL /* authtype */,
+ c->port,
+ &entries);
+
+ free(object);
+
+ if (result == GNOME_KEYRING_RESULT_NO_MATCH)
+ return EXIT_SUCCESS;
+
+ if (result == GNOME_KEYRING_RESULT_CANCELLED)
+ return EXIT_SUCCESS;
+
+ if (result != GNOME_KEYRING_RESULT_OK) {
+ error("%s",gnome_keyring_result_to_message(result));
+ return EXIT_FAILURE;
+ }
+
+ /* pick the first one from the list */
+ password_data = (GnomeKeyringNetworkPasswordData *) entries->data;
+
+ free_password(c->password);
+ c->password = xstrdup(password_data->password);
+
+ if (!c->username)
+ c->username = xstrdup(password_data->user);
+
+ gnome_keyring_network_password_list_free(entries);
+
+ return EXIT_SUCCESS;
+}
+
+
+int keyring_store(struct credential *c)
+{
+ guint32 item_id;
+ char *object = NULL;
+
+ /*
+ * Sanity check that what we are storing is actually sensible.
+ * In particular, we can't make a URL without a protocol field.
+ * Without either a host or pathname (depending on the scheme),
+ * we have no primary key. And without a username and password,
+ * we are not actually storing a credential.
+ */
+ if (!c->protocol || !(c->host || c->path) ||
+ !c->username || !c->password)
+ return EXIT_FAILURE;
+
+ object = keyring_object(c);
+
+ gnome_keyring_set_network_password_sync(
+ GNOME_KEYRING_DEFAULT,
+ c->username,
+ NULL /* domain */,
+ c->host,
+ object,
+ c->protocol,
+ NULL /* authtype */,
+ c->port,
+ c->password,
+ &item_id);
+
+ free(object);
+ return EXIT_SUCCESS;
+}
+
+int keyring_erase(struct credential *c)
+{
+ char *object = NULL;
+ GList *entries;
+ GnomeKeyringNetworkPasswordData *password_data;
+ GnomeKeyringResult result;
+
+ /*
+ * Sanity check that we actually have something to match
+ * against. The input we get is a restrictive pattern,
+ * so technically a blank credential means "erase everything".
+ * But it is too easy to accidentally send this, since it is equivalent
+ * to empty input. So explicitly disallow it, and require that the
+ * pattern have some actual content to match.
+ */
+ if (!c->protocol && !c->host && !c->path && !c->username)
+ return EXIT_FAILURE;
+
+ object = keyring_object(c);
+
+ result = gnome_keyring_find_network_password_sync(
+ c->username,
+ NULL /* domain */,
+ c->host,
+ object,
+ c->protocol,
+ NULL /* authtype */,
+ c->port,
+ &entries);
+
+ free(object);
+
+ if (result == GNOME_KEYRING_RESULT_NO_MATCH)
+ return EXIT_SUCCESS;
+
+ if (result == GNOME_KEYRING_RESULT_CANCELLED)
+ return EXIT_SUCCESS;
+
+ if (result != GNOME_KEYRING_RESULT_OK)
+ {
+ error("%s",gnome_keyring_result_to_message(result));
+ return EXIT_FAILURE;
+ }
+
+ /* pick the first one from the list (delete all matches?) */
+ password_data = (GnomeKeyringNetworkPasswordData *) entries->data;
+
+ result = gnome_keyring_item_delete_sync(
+ password_data->keyring, password_data->item_id);
+
+ gnome_keyring_network_password_list_free(entries);
+
+ if (result != GNOME_KEYRING_RESULT_OK)
+ {
+ error("%s",gnome_keyring_result_to_message(result));
+ return EXIT_FAILURE;
+ }
+
+ return EXIT_SUCCESS;
+}
+
+/*
+ * Table with helper operation callbacks, used by generic
+ * credential helper main function.
+ */
+struct credential_operation const credential_helper_ops[] =
+{
+ { "get", keyring_get },
+ { "store", keyring_store },
+ { "erase", keyring_erase },
+ CREDENTIAL_OP_END
+};
+
+/* ------------------ credential functions ------------------ */
+
+void credential_init(struct credential *c)
+{
+ memset(c, 0, sizeof(*c));
+}
+
+void credential_clear(struct credential *c)
+{
+ free(c->protocol);
+ free(c->host);
+ free(c->path);
+ free(c->username);
+ free_password(c->password);
+
+ credential_init(c);
+}
+
+int credential_read(struct credential *c)
+{
+ char buf[1024];
+ ssize_t line_len = 0;
+ char *key = buf;
+ char *value;
+
+ while (fgets(buf, sizeof(buf), stdin))
+ {
+ line_len = strlen(buf);
+
+ if(buf[line_len-1]=='\n')
+ buf[--line_len]='\0';
+
+ if(!line_len)
+ break;
+
+ value = strchr(buf,'=');
+ if(!value) {
+ warning("invalid credential line: %s", key);
+ return -1;
+ }
+ *value++ = '\0';
+
+ if (!strcmp(key, "protocol")) {
+ free(c->protocol);
+ c->protocol = xstrdup(value);
+ } else if (!strcmp(key, "host")) {
+ free(c->host);
+ c->host = xstrdup(value);
+ value = strrchr(c->host,':');
+ if (value) {
+ *value++ = '\0';
+ c->port = atoi(value);
+ }
+ } else if (!strcmp(key, "path")) {
+ free(c->path);
+ c->path = xstrdup(value);
+ } else if (!strcmp(key, "username")) {
+ free(c->username);
+ c->username = xstrdup(value);
+ } else if (!strcmp(key, "password")) {
+ free_password(c->password);
+ c->password = xstrdup(value);
+ while (*value) *value++ = '\0';
+ }
+ /*
+ * Ignore other lines; we don't know what they mean, but
+ * this future-proofs us when later versions of git do
+ * learn new lines, and the helpers are updated to match.
+ */
+ }
+ return 0;
+}
+
+void credential_write_item(FILE *fp, const char *key, const char *value)
+{
+ if (!value)
+ return;
+ fprintf(fp, "%s=%s\n", key, value);
+}
+
+void credential_write(const struct credential *c)
+{
+ /* only write username/password, if set */
+ credential_write_item(stdout, "username", c->username);
+ credential_write_item(stdout, "password", c->password);
+}
+
+static void usage(const char *name)
+{
+ struct credential_operation const *try_op = credential_helper_ops;
+ const char *basename = strrchr(name,'/');
+
+ basename = (basename) ? basename + 1 : name;
+ fprintf(stderr, "usage: %s <", basename);
+ while(try_op->name) {
+ fprintf(stderr,"%s",(try_op++)->name);
+ if(try_op->name)
+ fprintf(stderr,"%s","|");
+ }
+ fprintf(stderr,"%s",">\n");
+}
+
+int main(int argc, char *argv[])
+{
+ int ret = EXIT_SUCCESS;
+
+ struct credential_operation const *try_op = credential_helper_ops;
+ struct credential cred = CREDENTIAL_INIT;
+
+ if (!argv[1]) {
+ usage(argv[0]);
+ goto out;
+ }
+
+ /* lookup operation callback */
+ while(try_op->name && strcmp(argv[1], try_op->name))
+ try_op++;
+
+ /* unsupported operation given -- ignore silently */
+ if(!try_op->name || !try_op->op)
+ goto out;
+
+ ret = credential_read(&cred);
+ if(ret)
+ goto out;
+
+ /* perform credential operation */
+ ret = (*try_op->op)(&cred);
+
+ credential_write(&cred);
+
+out:
+ credential_clear(&cred);
+ return ret;
+}
diff --git a/contrib/credential/netrc/Makefile b/contrib/credential/netrc/Makefile
new file mode 100644
index 0000000..51b7613
--- /dev/null
+++ b/contrib/credential/netrc/Makefile
@@ -0,0 +1,5 @@
+test:
+ ./test.pl
+
+testverbose:
+ ./test.pl -d -v
diff --git a/contrib/credential/netrc/git-credential-netrc b/contrib/credential/netrc/git-credential-netrc
new file mode 100755
index 0000000..6c51c43
--- /dev/null
+++ b/contrib/credential/netrc/git-credential-netrc
@@ -0,0 +1,421 @@
+#!/usr/bin/perl
+
+use strict;
+use warnings;
+
+use Getopt::Long;
+use File::Basename;
+
+my $VERSION = "0.1";
+
+my %options = (
+ help => 0,
+ debug => 0,
+ verbose => 0,
+ insecure => 0,
+ file => [],
+
+ # identical token maps, e.g. host -> host, will be inserted later
+ tmap => {
+ port => 'protocol',
+ machine => 'host',
+ path => 'path',
+ login => 'username',
+ user => 'username',
+ password => 'password',
+ }
+ );
+
+# Map each credential protocol token to itself on the netrc side.
+foreach (values %{$options{tmap}}) {
+ $options{tmap}->{$_} = $_;
+}
+
+# Now, $options{tmap} has a mapping from the netrc format to the Git credential
+# helper protocol.
+
+# Next, we build the reverse token map.
+
+# When $rmap{foo} contains 'bar', that means that what the Git credential helper
+# protocol calls 'bar' is found as 'foo' in the netrc/authinfo file. Keys in
+# %rmap are what we expect to read from the netrc/authinfo file.
+
+my %rmap;
+foreach my $k (keys %{$options{tmap}}) {
+ push @{$rmap{$options{tmap}->{$k}}}, $k;
+}
+
+Getopt::Long::Configure("bundling");
+
+# TODO: maybe allow the token map $options{tmap} to be configurable.
+GetOptions(\%options,
+ "help|h",
+ "debug|d",
+ "insecure|k",
+ "verbose|v",
+ "file|f=s@",
+ );
+
+if ($options{help}) {
+ my $shortname = basename($0);
+ $shortname =~ s/git-credential-//;
+
+ print <<EOHIPPUS;
+
+$0 [-f AUTHFILE1] [-f AUTHFILEN] [-d] [-v] [-k] get
+
+Version $VERSION by tzz\@lifelogs.com. License: BSD.
+
+Options:
+
+ -f|--file AUTHFILE : specify netrc-style files. Files with the .gpg extension
+ will be decrypted by GPG before parsing. Multiple -f
+ arguments are OK. They are processed in order, and the
+ first matching entry found is returned via the credential
+ helper protocol (see below).
+
+ When no -f option is given, .authinfo.gpg, .netrc.gpg,
+ .authinfo, and .netrc files in your home directory are used
+ in this order.
+
+ -k|--insecure : ignore bad file ownership or permissions
+
+ -d|--debug : turn on debugging (developer info)
+
+ -v|--verbose : be more verbose (show files and information found)
+
+To enable this credential helper:
+
+ git config credential.helper '$shortname -f AUTHFILE1 -f AUTHFILE2'
+
+(Note that Git will prepend "git-credential-" to the helper name and look for it
+in the path.)
+
+...and if you want lots of debugging info:
+
+ git config credential.helper '$shortname -f AUTHFILE -d'
+
+...or to see the files opened and data found:
+
+ git config credential.helper '$shortname -f AUTHFILE -v'
+
+Only "get" mode is supported by this credential helper. It opens every AUTHFILE
+and looks for the first entry that matches the requested search criteria:
+
+ 'port|protocol':
+ The protocol that will be used (e.g., https). (protocol=X)
+
+ 'machine|host':
+ The remote hostname for a network credential. (host=X)
+
+ 'path':
+ The path with which the credential will be used. (path=X)
+
+ 'login|user|username':
+ The credential’s username, if we already have one. (username=X)
+
+Thus, when we get this query on STDIN:
+
+host=github.com
+protocol=https
+username=tzz
+
+this credential helper will look for the first entry in every AUTHFILE that
+matches
+
+machine github.com port https login tzz
+
+OR
+
+machine github.com protocol https login tzz
+
+OR... etc. acceptable tokens as listed above. Any unknown tokens are
+simply ignored.
+
+Then, the helper will print out whatever tokens it got from the entry, including
+"password" tokens, mapping back to Git's helper protocol; e.g. "port" is mapped
+back to "protocol". Any redundant entry tokens (part of the original query) are
+skipped.
+
+Again, note that only the first matching entry from all the AUTHFILEs, processed
+in the sequence given on the command line, is used.
+
+Netrc/authinfo tokens can be quoted as 'STRING' or "STRING".
+
+No caching is performed by this credential helper.
+
+EOHIPPUS
+
+ exit 0;
+}
+
+my $mode = shift @ARGV;
+
+# Credentials must get a parameter, so die if it's missing.
+die "Syntax: $0 [-f AUTHFILE1] [-f AUTHFILEN] [-d] get" unless defined $mode;
+
+# Only support 'get' mode; with any other unsupported ones we just exit.
+exit 0 unless $mode eq 'get';
+
+my $files = $options{file};
+
+# if no files were given, use a predefined list.
+# note that .gpg files come first
+unless (scalar @$files) {
+ my @candidates = qw[
+ ~/.authinfo.gpg
+ ~/.netrc.gpg
+ ~/.authinfo
+ ~/.netrc
+ ];
+
+ $files = $options{file} = [ map { glob $_ } @candidates ];
+}
+
+my $query = read_credential_data_from_stdin();
+
+FILE:
+foreach my $file (@$files) {
+ my $gpgmode = $file =~ m/\.gpg$/;
+ unless (-r $file) {
+ log_verbose("Unable to read $file; skipping it");
+ next FILE;
+ }
+
+ # the following check is copied from Net::Netrc, for non-GPG files
+ # OS/2 and Win32 do not handle stat in a way compatible with this check :-(
+ unless ($gpgmode || $options{insecure} ||
+ $^O eq 'os2'
+ || $^O eq 'MSWin32'
+ || $^O eq 'MacOS'
+ || $^O =~ /^cygwin/) {
+ my @stat = stat($file);
+
+ if (@stat) {
+ if ($stat[2] & 077) {
+ log_verbose("Insecure $file (mode=%04o); skipping it",
+ $stat[2] & 07777);
+ next FILE;
+ }
+
+ if ($stat[4] != $<) {
+ log_verbose("Not owner of $file; skipping it");
+ next FILE;
+ }
+ }
+ }
+
+ my @entries = load_netrc($file, $gpgmode);
+
+ unless (scalar @entries) {
+ if ($!) {
+ log_verbose("Unable to open $file: $!");
+ } else {
+ log_verbose("No netrc entries found in $file");
+ }
+
+ next FILE;
+ }
+
+ my $entry = find_netrc_entry($query, @entries);
+ if ($entry) {
+ print_credential_data($entry, $query);
+ # we're done!
+ last FILE;
+ }
+}
+
+exit 0;
+
+sub load_netrc {
+ my $file = shift @_;
+ my $gpgmode = shift @_;
+
+ my $io;
+ if ($gpgmode) {
+ my @cmd = (qw(gpg --decrypt), $file);
+ log_verbose("Using GPG to open $file: [@cmd]");
+ open $io, "-|", @cmd;
+ } else {
+ log_verbose("Opening $file...");
+ open $io, '<', $file;
+ }
+
+ # nothing to do if the open failed (we log the error later)
+ return unless $io;
+
+ # Net::Netrc does this, but the functionality is merged with the file
+ # detection logic, so we have to extract just the part we need
+ my @netrc_entries = net_netrc_loader($io);
+
+ # these entries will use the credential helper protocol token names
+ my @entries;
+
+ foreach my $nentry (@netrc_entries) {
+ my %entry;
+ my $num_port;
+
+ if (!defined $nentry->{machine}) {
+ next;
+ }
+ if (defined $nentry->{port} && $nentry->{port} =~ m/^\d+$/) {
+ $num_port = $nentry->{port};
+ delete $nentry->{port};
+ }
+
+ # create the new entry for the credential helper protocol
+ $entry{$options{tmap}->{$_}} = $nentry->{$_} foreach keys %$nentry;
+
+ # for "host X port Y" where Y is an integer (captured by
+ # $num_port above), set the host to "X:Y"
+ if (defined $entry{host} && defined $num_port) {
+ $entry{host} = join(':', $entry{host}, $num_port);
+ }
+
+ push @entries, \%entry;
+ }
+
+ return @entries;
+}
+
+sub net_netrc_loader {
+ my $fh = shift @_;
+ my @entries;
+ my ($mach, $macdef, $tok, @tok);
+
+ LINE:
+ while (<$fh>) {
+ undef $macdef if /\A\n\Z/;
+
+ if ($macdef) {
+ next LINE;
+ }
+
+ s/^\s*//;
+ chomp;
+
+ while (length && s/^("((?:[^"]+|\\.)*)"|((?:[^\\\s]+|\\.)*))\s*//) {
+ (my $tok = $+) =~ s/\\(.)/$1/g;
+ push(@tok, $tok);
+ }
+
+ TOKEN:
+ while (@tok) {
+ if ($tok[0] eq "default") {
+ shift(@tok);
+ $mach = { machine => undef };
+ next TOKEN;
+ }
+
+ $tok = shift(@tok);
+
+ if ($tok eq "machine") {
+ my $host = shift @tok;
+ $mach = { machine => $host };
+ push @entries, $mach;
+ } elsif (exists $options{tmap}->{$tok}) {
+ unless ($mach) {
+ log_debug("Skipping token $tok because no machine was given");
+ next TOKEN;
+ }
+
+ my $value = shift @tok;
+ unless (defined $value) {
+ log_debug("Token $tok had no value, skipping it.");
+ next TOKEN;
+ }
+
+ # Following line added by rmerrell to remove '/' escape char in .netrc
+ $value =~ s/\/\\/\\/g;
+ $mach->{$tok} = $value;
+ } elsif ($tok eq "macdef") { # we ignore macros
+ next TOKEN unless $mach;
+ my $value = shift @tok;
+ $macdef = 1;
+ }
+ }
+ }
+
+ return @entries;
+}
+
+sub read_credential_data_from_stdin {
+ # the query: start with every token with no value
+ my %q = map { $_ => undef } values(%{$options{tmap}});
+
+ while (<STDIN>) {
+ next unless m/^([^=]+)=(.+)/;
+
+ my ($token, $value) = ($1, $2);
+ die "Unknown search token $token" unless exists $q{$token};
+ $q{$token} = $value;
+ log_debug("We were given search token $token and value $value");
+ }
+
+ foreach (sort keys %q) {
+ log_debug("Searching for %s = %s", $_, $q{$_} || '(any value)');
+ }
+
+ return \%q;
+}
+
+# takes the search tokens and then a list of entries
+# each entry is a hash reference
+sub find_netrc_entry {
+ my $query = shift @_;
+
+ ENTRY:
+ foreach my $entry (@_)
+ {
+ my $entry_text = join ', ', map { "$_=$entry->{$_}" } keys %$entry;
+ foreach my $check (sort keys %$query) {
+ if (defined $query->{$check}) {
+ log_debug("compare %s [%s] to [%s] (entry: %s)",
+ $check,
+ $entry->{$check},
+ $query->{$check},
+ $entry_text);
+ unless ($query->{$check} eq $entry->{$check}) {
+ next ENTRY;
+ }
+ } else {
+ log_debug("OK: any value satisfies check $check");
+ }
+ }
+
+ return $entry;
+ }
+
+ # nothing was found
+ return;
+}
+
+sub print_credential_data {
+ my $entry = shift @_;
+ my $query = shift @_;
+
+ log_debug("entry has passed all the search checks");
+ TOKEN:
+ foreach my $git_token (sort keys %$entry) {
+ log_debug("looking for useful token $git_token");
+ # don't print unknown (to the credential helper protocol) tokens
+ next TOKEN unless exists $query->{$git_token};
+
+ # don't print things asked in the query (the entry matches them)
+ next TOKEN if defined $query->{$git_token};
+
+ log_debug("FOUND: $git_token=$entry->{$git_token}");
+ printf "%s=%s\n", $git_token, $entry->{$git_token};
+ }
+}
+sub log_verbose {
+ return unless $options{verbose};
+ printf STDERR @_;
+ printf STDERR "\n";
+}
+
+sub log_debug {
+ return unless $options{debug};
+ printf STDERR @_;
+ printf STDERR "\n";
+}
diff --git a/contrib/credential/netrc/test.netrc b/contrib/credential/netrc/test.netrc
new file mode 100644
index 0000000..ba119a9
--- /dev/null
+++ b/contrib/credential/netrc/test.netrc
@@ -0,0 +1,13 @@
+machine imap login tzz@lifelogs.com port imaps password letmeknow
+machine imap login bob port imaps password bobwillknow
+
+# comment test
+
+machine imap2 login tzz port 1099 password tzzknow
+machine imap2 login bob password bobwillknow
+
+# another command
+
+machine github.com
+ multilinetoken anothervalue
+ login carol password carolknows
diff --git a/contrib/credential/netrc/test.pl b/contrib/credential/netrc/test.pl
new file mode 100755
index 0000000..169b646
--- /dev/null
+++ b/contrib/credential/netrc/test.pl
@@ -0,0 +1,106 @@
+#!/usr/bin/perl
+
+use warnings;
+use strict;
+use Test;
+use IPC::Open2;
+
+BEGIN { plan tests => 15 }
+
+my @global_credential_args = @ARGV;
+my $netrc = './test.netrc';
+print "# Testing insecure file, nothing should be found\n";
+chmod 0644, $netrc;
+my $cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'github.com' });
+
+ok(scalar keys %$cred, 0, "Got 0 keys from insecure file");
+
+print "# Testing missing file, nothing should be found\n";
+chmod 0644, $netrc;
+$cred = run_credential(['-f', '///nosuchfile///', 'get'],
+ { host => 'github.com' });
+
+ok(scalar keys %$cred, 0, "Got 0 keys from missing file");
+
+chmod 0600, $netrc;
+
+print "# Testing with invalid data\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ "bad data");
+ok(scalar keys %$cred, 4, "Got first found keys with bad data");
+
+print "# Testing netrc file for a missing corovamilkbar entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'corovamilkbar' });
+
+ok(scalar keys %$cred, 0, "Got no corovamilkbar keys");
+
+print "# Testing netrc file for a github.com entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'github.com' });
+
+ok(scalar keys %$cred, 2, "Got 2 Github keys");
+
+ok($cred->{password}, 'carolknows', "Got correct Github password");
+ok($cred->{username}, 'carol', "Got correct Github username");
+
+print "# Testing netrc file for a username-specific entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'imap', username => 'bob' });
+
+ok(scalar keys %$cred, 2, "Got 2 username-specific keys");
+
+ok($cred->{password}, 'bobwillknow', "Got correct user-specific password");
+ok($cred->{protocol}, 'imaps', "Got correct user-specific protocol");
+
+print "# Testing netrc file for a host:port-specific entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'imap2:1099' });
+
+ok(scalar keys %$cred, 2, "Got 2 host:port-specific keys");
+
+ok($cred->{password}, 'tzzknow', "Got correct host:port-specific password");
+ok($cred->{username}, 'tzz', "Got correct host:port-specific username");
+
+print "# Testing netrc file that 'host:port kills host' entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+ { host => 'imap2' });
+
+ok(scalar keys %$cred, 2, "Got 2 'host:port kills host' keys");
+
+ok($cred->{password}, 'bobwillknow', "Got correct 'host:port kills host' password");
+ok($cred->{username}, 'bob', "Got correct 'host:port kills host' username");
+
+sub run_credential
+{
+ my $args = shift @_;
+ my $data = shift @_;
+ my $pid = open2(my $chld_out, my $chld_in,
+ './git-credential-netrc', @global_credential_args,
+ @$args);
+
+ die "Couldn't open pipe to netrc credential helper: $!" unless $pid;
+
+ if (ref $data eq 'HASH')
+ {
+ print $chld_in "$_=$data->{$_}\n" foreach sort keys %$data;
+ }
+ else
+ {
+ print $chld_in "$data\n";
+ }
+
+ close $chld_in;
+ my %ret;
+
+ while (<$chld_out>)
+ {
+ chomp;
+ next unless m/^([^=]+)=(.+)/;
+
+ $ret{$1} = $2;
+ }
+
+ return \%ret;
+}
diff --git a/contrib/credential/osxkeychain/.gitignore b/contrib/credential/osxkeychain/.gitignore
new file mode 100644
index 0000000..6c5b702
--- /dev/null
+++ b/contrib/credential/osxkeychain/.gitignore
@@ -0,0 +1 @@
+git-credential-osxkeychain
diff --git a/contrib/credential/osxkeychain/Makefile b/contrib/credential/osxkeychain/Makefile
new file mode 100644
index 0000000..4b3a08a
--- /dev/null
+++ b/contrib/credential/osxkeychain/Makefile
@@ -0,0 +1,17 @@
+all:: git-credential-osxkeychain
+
+CC = gcc
+RM = rm -f
+CFLAGS = -g -O2 -Wall
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+git-credential-osxkeychain: git-credential-osxkeychain.o
+ $(CC) $(CFLAGS) -o $@ $< $(LDFLAGS) -Wl,-framework -Wl,Security
+
+git-credential-osxkeychain.o: git-credential-osxkeychain.c
+ $(CC) -c $(CFLAGS) $<
+
+clean:
+ $(RM) git-credential-osxkeychain git-credential-osxkeychain.o
diff --git a/contrib/credential/osxkeychain/git-credential-osxkeychain.c b/contrib/credential/osxkeychain/git-credential-osxkeychain.c
new file mode 100644
index 0000000..bcd3f57
--- /dev/null
+++ b/contrib/credential/osxkeychain/git-credential-osxkeychain.c
@@ -0,0 +1,183 @@
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <Security/Security.h>
+
+static SecProtocolType protocol;
+static char *host;
+static char *path;
+static char *username;
+static char *password;
+static UInt16 port;
+
+static void die(const char *err, ...)
+{
+ char msg[4096];
+ va_list params;
+ va_start(params, err);
+ vsnprintf(msg, sizeof(msg), err, params);
+ fprintf(stderr, "%s\n", msg);
+ va_end(params);
+ exit(1);
+}
+
+static void *xstrdup(const char *s1)
+{
+ void *ret = strdup(s1);
+ if (!ret)
+ die("Out of memory");
+ return ret;
+}
+
+#define KEYCHAIN_ITEM(x) (x ? strlen(x) : 0), x
+#define KEYCHAIN_ARGS \
+ NULL, /* default keychain */ \
+ KEYCHAIN_ITEM(host), \
+ 0, NULL, /* account domain */ \
+ KEYCHAIN_ITEM(username), \
+ KEYCHAIN_ITEM(path), \
+ port, \
+ protocol, \
+ kSecAuthenticationTypeDefault
+
+static void write_item(const char *what, const char *buf, int len)
+{
+ printf("%s=", what);
+ fwrite(buf, 1, len, stdout);
+ putchar('\n');
+}
+
+static void find_username_in_item(SecKeychainItemRef item)
+{
+ SecKeychainAttributeList list;
+ SecKeychainAttribute attr;
+
+ list.count = 1;
+ list.attr = &attr;
+ attr.tag = kSecAccountItemAttr;
+
+ if (SecKeychainItemCopyContent(item, NULL, &list, NULL, NULL))
+ return;
+
+ write_item("username", attr.data, attr.length);
+ SecKeychainItemFreeContent(&list, NULL);
+}
+
+static void find_internet_password(void)
+{
+ void *buf;
+ UInt32 len;
+ SecKeychainItemRef item;
+
+ if (SecKeychainFindInternetPassword(KEYCHAIN_ARGS, &len, &buf, &item))
+ return;
+
+ write_item("password", buf, len);
+ if (!username)
+ find_username_in_item(item);
+
+ SecKeychainItemFreeContent(NULL, buf);
+}
+
+static void delete_internet_password(void)
+{
+ SecKeychainItemRef item;
+
+ /*
+ * Require at least a protocol and host for removal, which is what git
+ * will give us; if you want to do something more fancy, use the
+ * Keychain manager.
+ */
+ if (!protocol || !host)
+ return;
+
+ if (SecKeychainFindInternetPassword(KEYCHAIN_ARGS, 0, NULL, &item))
+ return;
+
+ SecKeychainItemDelete(item);
+}
+
+static void add_internet_password(void)
+{
+ /* Only store complete credentials */
+ if (!protocol || !host || !username || !password)
+ return;
+
+ if (SecKeychainAddInternetPassword(
+ KEYCHAIN_ARGS,
+ KEYCHAIN_ITEM(password),
+ NULL))
+ return;
+}
+
+static void read_credential(void)
+{
+ char buf[1024];
+
+ while (fgets(buf, sizeof(buf), stdin)) {
+ char *v;
+
+ if (!strcmp(buf, "\n"))
+ break;
+ buf[strlen(buf)-1] = '\0';
+
+ v = strchr(buf, '=');
+ if (!v)
+ die("bad input: %s", buf);
+ *v++ = '\0';
+
+ if (!strcmp(buf, "protocol")) {
+ if (!strcmp(v, "imap"))
+ protocol = kSecProtocolTypeIMAP;
+ else if (!strcmp(v, "imaps"))
+ protocol = kSecProtocolTypeIMAPS;
+ else if (!strcmp(v, "ftp"))
+ protocol = kSecProtocolTypeFTP;
+ else if (!strcmp(v, "ftps"))
+ protocol = kSecProtocolTypeFTPS;
+ else if (!strcmp(v, "https"))
+ protocol = kSecProtocolTypeHTTPS;
+ else if (!strcmp(v, "http"))
+ protocol = kSecProtocolTypeHTTP;
+ else if (!strcmp(v, "smtp"))
+ protocol = kSecProtocolTypeSMTP;
+ else /* we don't yet handle other protocols */
+ exit(0);
+ }
+ else if (!strcmp(buf, "host")) {
+ char *colon = strchr(v, ':');
+ if (colon) {
+ *colon++ = '\0';
+ port = atoi(colon);
+ }
+ host = xstrdup(v);
+ }
+ else if (!strcmp(buf, "path"))
+ path = xstrdup(v);
+ else if (!strcmp(buf, "username"))
+ username = xstrdup(v);
+ else if (!strcmp(buf, "password"))
+ password = xstrdup(v);
+ }
+}
+
+int main(int argc, const char **argv)
+{
+ const char *usage =
+ "usage: git credential-osxkeychain <get|store|erase>";
+
+ if (!argv[1])
+ die(usage);
+
+ read_credential();
+
+ if (!strcmp(argv[1], "get"))
+ find_internet_password();
+ else if (!strcmp(argv[1], "store"))
+ add_internet_password();
+ else if (!strcmp(argv[1], "erase"))
+ delete_internet_password();
+ /* otherwise, ignore unknown action */
+
+ return 0;
+}
diff --git a/contrib/credential/wincred/Makefile b/contrib/credential/wincred/Makefile
new file mode 100644
index 0000000..bad45ca
--- /dev/null
+++ b/contrib/credential/wincred/Makefile
@@ -0,0 +1,14 @@
+all: git-credential-wincred.exe
+
+CC = gcc
+RM = rm -f
+CFLAGS = -O2 -Wall
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+git-credential-wincred.exe : git-credential-wincred.c
+ $(LINK.c) $^ $(LOADLIBES) $(LDLIBS) -o $@
+
+clean:
+ $(RM) git-credential-wincred.exe
diff --git a/contrib/credential/wincred/git-credential-wincred.c b/contrib/credential/wincred/git-credential-wincred.c
new file mode 100644
index 0000000..a1d38f0
--- /dev/null
+++ b/contrib/credential/wincred/git-credential-wincred.c
@@ -0,0 +1,301 @@
+/*
+ * A git credential helper that interface with Windows' Credential Manager
+ *
+ */
+#include <windows.h>
+#include <stdio.h>
+#include <io.h>
+#include <fcntl.h>
+
+/* common helpers */
+
+#define ARRAY_SIZE(x) (sizeof(x)/sizeof(x[0]))
+
+static void die(const char *err, ...)
+{
+ char msg[4096];
+ va_list params;
+ va_start(params, err);
+ vsnprintf(msg, sizeof(msg), err, params);
+ fprintf(stderr, "%s\n", msg);
+ va_end(params);
+ exit(1);
+}
+
+static void *xmalloc(size_t size)
+{
+ void *ret = malloc(size);
+ if (!ret && !size)
+ ret = malloc(1);
+ if (!ret)
+ die("Out of memory");
+ return ret;
+}
+
+/* MinGW doesn't have wincred.h, so we need to define stuff */
+
+typedef struct _CREDENTIAL_ATTRIBUTEW {
+ LPWSTR Keyword;
+ DWORD Flags;
+ DWORD ValueSize;
+ LPBYTE Value;
+} CREDENTIAL_ATTRIBUTEW, *PCREDENTIAL_ATTRIBUTEW;
+
+typedef struct _CREDENTIALW {
+ DWORD Flags;
+ DWORD Type;
+ LPWSTR TargetName;
+ LPWSTR Comment;
+ FILETIME LastWritten;
+ DWORD CredentialBlobSize;
+ LPBYTE CredentialBlob;
+ DWORD Persist;
+ DWORD AttributeCount;
+ PCREDENTIAL_ATTRIBUTEW Attributes;
+ LPWSTR TargetAlias;
+ LPWSTR UserName;
+} CREDENTIALW, *PCREDENTIALW;
+
+#define CRED_TYPE_GENERIC 1
+#define CRED_PERSIST_LOCAL_MACHINE 2
+#define CRED_MAX_ATTRIBUTES 64
+
+typedef BOOL (WINAPI *CredWriteWT)(PCREDENTIALW, DWORD);
+typedef BOOL (WINAPI *CredEnumerateWT)(LPCWSTR, DWORD, DWORD *,
+ PCREDENTIALW **);
+typedef VOID (WINAPI *CredFreeT)(PVOID);
+typedef BOOL (WINAPI *CredDeleteWT)(LPCWSTR, DWORD, DWORD);
+
+static HMODULE advapi;
+static CredWriteWT CredWriteW;
+static CredEnumerateWT CredEnumerateW;
+static CredFreeT CredFree;
+static CredDeleteWT CredDeleteW;
+
+static void load_cred_funcs(void)
+{
+ /* load DLLs */
+ advapi = LoadLibrary("advapi32.dll");
+ if (!advapi)
+ die("failed to load advapi32.dll");
+
+ /* get function pointers */
+ CredWriteW = (CredWriteWT)GetProcAddress(advapi, "CredWriteW");
+ CredEnumerateW = (CredEnumerateWT)GetProcAddress(advapi,
+ "CredEnumerateW");
+ CredFree = (CredFreeT)GetProcAddress(advapi, "CredFree");
+ CredDeleteW = (CredDeleteWT)GetProcAddress(advapi, "CredDeleteW");
+ if (!CredWriteW || !CredEnumerateW || !CredFree || !CredDeleteW)
+ die("failed to load functions");
+}
+
+static WCHAR *wusername, *password, *protocol, *host, *path, target[1024];
+
+static void write_item(const char *what, LPCWSTR wbuf, int wlen)
+{
+ char *buf;
+ int len = WideCharToMultiByte(CP_UTF8, 0, wbuf, wlen, NULL, 0, NULL,
+ FALSE);
+ buf = xmalloc(len);
+
+ if (!WideCharToMultiByte(CP_UTF8, 0, wbuf, wlen, buf, len, NULL, FALSE))
+ die("WideCharToMultiByte failed!");
+
+ printf("%s=", what);
+ fwrite(buf, 1, len, stdout);
+ putchar('\n');
+ free(buf);
+}
+
+/*
+ * Match an (optional) expected string and a delimiter in the target string,
+ * consuming the matched text by updating the target pointer.
+ */
+static int match_part(LPCWSTR *ptarget, LPCWSTR want, LPCWSTR delim)
+{
+ LPCWSTR delim_pos, start = *ptarget;
+ int len;
+
+ /* find start of delimiter (or end-of-string if delim is empty) */
+ if (*delim)
+ delim_pos = wcsstr(start, delim);
+ else
+ delim_pos = start + wcslen(start);
+
+ /*
+ * match text up to delimiter, or end of string (e.g. the '/' after
+ * host is optional if not followed by a path)
+ */
+ if (delim_pos)
+ len = delim_pos - start;
+ else
+ len = wcslen(start);
+
+ /* update ptarget if we either found a delimiter or need a match */
+ if (delim_pos || want)
+ *ptarget = delim_pos ? delim_pos + wcslen(delim) : start + len;
+
+ return !want || (!wcsncmp(want, start, len) && !want[len]);
+}
+
+static int match_cred(const CREDENTIALW *cred)
+{
+ LPCWSTR target = cred->TargetName;
+ if (wusername && wcscmp(wusername, cred->UserName))
+ return 0;
+
+ return match_part(&target, L"git", L":") &&
+ match_part(&target, protocol, L"://") &&
+ match_part(&target, wusername, L"@") &&
+ match_part(&target, host, L"/") &&
+ match_part(&target, path, L"");
+}
+
+static void get_credential(void)
+{
+ CREDENTIALW **creds;
+ DWORD num_creds;
+ int i;
+
+ if (!CredEnumerateW(L"git:*", 0, &num_creds, &creds))
+ return;
+
+ /* search for the first credential that matches username */
+ for (i = 0; i < num_creds; ++i)
+ if (match_cred(creds[i])) {
+ write_item("username", creds[i]->UserName,
+ wcslen(creds[i]->UserName));
+ write_item("password",
+ (LPCWSTR)creds[i]->CredentialBlob,
+ creds[i]->CredentialBlobSize / sizeof(WCHAR));
+ break;
+ }
+
+ CredFree(creds);
+}
+
+static void store_credential(void)
+{
+ CREDENTIALW cred;
+
+ if (!wusername || !password)
+ return;
+
+ cred.Flags = 0;
+ cred.Type = CRED_TYPE_GENERIC;
+ cred.TargetName = target;
+ cred.Comment = L"saved by git-credential-wincred";
+ cred.CredentialBlobSize = (wcslen(password)) * sizeof(WCHAR);
+ cred.CredentialBlob = (LPVOID)password;
+ cred.Persist = CRED_PERSIST_LOCAL_MACHINE;
+ cred.AttributeCount = 0;
+ cred.Attributes = NULL;
+ cred.TargetAlias = NULL;
+ cred.UserName = wusername;
+
+ if (!CredWriteW(&cred, 0))
+ die("CredWrite failed");
+}
+
+static void erase_credential(void)
+{
+ CREDENTIALW **creds;
+ DWORD num_creds;
+ int i;
+
+ if (!CredEnumerateW(L"git:*", 0, &num_creds, &creds))
+ return;
+
+ for (i = 0; i < num_creds; ++i) {
+ if (match_cred(creds[i]))
+ CredDeleteW(creds[i]->TargetName, creds[i]->Type, 0);
+ }
+
+ CredFree(creds);
+}
+
+static WCHAR *utf8_to_utf16_dup(const char *str)
+{
+ int wlen = MultiByteToWideChar(CP_UTF8, 0, str, -1, NULL, 0);
+ WCHAR *wstr = xmalloc(sizeof(WCHAR) * wlen);
+ MultiByteToWideChar(CP_UTF8, 0, str, -1, wstr, wlen);
+ return wstr;
+}
+
+static void read_credential(void)
+{
+ char buf[1024];
+
+ while (fgets(buf, sizeof(buf), stdin)) {
+ char *v;
+ int len = strlen(buf);
+ /* strip trailing CR / LF */
+ while (len && strchr("\r\n", buf[len - 1]))
+ buf[--len] = 0;
+
+ if (!*buf)
+ break;
+
+ v = strchr(buf, '=');
+ if (!v)
+ die("bad input: %s", buf);
+ *v++ = '\0';
+
+ if (!strcmp(buf, "protocol"))
+ protocol = utf8_to_utf16_dup(v);
+ else if (!strcmp(buf, "host"))
+ host = utf8_to_utf16_dup(v);
+ else if (!strcmp(buf, "path"))
+ path = utf8_to_utf16_dup(v);
+ else if (!strcmp(buf, "username")) {
+ wusername = utf8_to_utf16_dup(v);
+ } else if (!strcmp(buf, "password"))
+ password = utf8_to_utf16_dup(v);
+ else
+ die("unrecognized input");
+ }
+}
+
+int main(int argc, char *argv[])
+{
+ const char *usage =
+ "usage: git credential-wincred <get|store|erase>\n";
+
+ if (!argv[1])
+ die(usage);
+
+ /* git use binary pipes to avoid CRLF-issues */
+ _setmode(_fileno(stdin), _O_BINARY);
+ _setmode(_fileno(stdout), _O_BINARY);
+
+ read_credential();
+
+ load_cred_funcs();
+
+ if (!protocol || !(host || path))
+ return 0;
+
+ /* prepare 'target', the unique key for the credential */
+ wcscpy(target, L"git:");
+ wcsncat(target, protocol, ARRAY_SIZE(target));
+ wcsncat(target, L"://", ARRAY_SIZE(target));
+ if (wusername) {
+ wcsncat(target, wusername, ARRAY_SIZE(target));
+ wcsncat(target, L"@", ARRAY_SIZE(target));
+ }
+ if (host)
+ wcsncat(target, host, ARRAY_SIZE(target));
+ if (path) {
+ wcsncat(target, L"/", ARRAY_SIZE(target));
+ wcsncat(target, path, ARRAY_SIZE(target));
+ }
+
+ if (!strcmp(argv[1], "get"))
+ get_credential();
+ else if (!strcmp(argv[1], "store"))
+ store_credential();
+ else if (!strcmp(argv[1], "erase"))
+ erase_credential();
+ /* otherwise, ignore unknown action */
+ return 0;
+}
diff --git a/contrib/diff-highlight/README b/contrib/diff-highlight/README
new file mode 100644
index 0000000..502e03b
--- /dev/null
+++ b/contrib/diff-highlight/README
@@ -0,0 +1,152 @@
+diff-highlight
+==============
+
+Line oriented diffs are great for reviewing code, because for most
+hunks, you want to see the old and the new segments of code next to each
+other. Sometimes, though, when an old line and a new line are very
+similar, it's hard to immediately see the difference.
+
+You can use "--color-words" to highlight only the changed portions of
+lines. However, this can often be hard to read for code, as it loses
+the line structure, and you end up with oddly formatted bits.
+
+Instead, this script post-processes the line-oriented diff, finds pairs
+of lines, and highlights the differing segments. It's currently very
+simple and stupid about doing these tasks. In particular:
+
+ 1. It will only highlight hunks in which the number of removed and
+ added lines is the same, and it will pair lines within the hunk by
+ position (so the first removed line is compared to the first added
+ line, and so forth). This is simple and tends to work well in
+ practice. More complex changes don't highlight well, so we tend to
+ exclude them due to the "same number of removed and added lines"
+ restriction. Or even if we do try to highlight them, they end up
+ not highlighting because of our "don't highlight if the whole line
+ would be highlighted" rule.
+
+ 2. It will find the common prefix and suffix of two lines, and
+ consider everything in the middle to be "different". It could
+ instead do a real diff of the characters between the two lines and
+ find common subsequences. However, the point of the highlight is to
+ call attention to a certain area. Even if some small subset of the
+ highlighted area actually didn't change, that's OK. In practice it
+ ends up being more readable to just have a single blob on the line
+ showing the interesting bit.
+
+The goal of the script is therefore not to be exact about highlighting
+changes, but to call attention to areas of interest without being
+visually distracting. Non-diff lines and existing diff coloration is
+preserved; the intent is that the output should look exactly the same as
+the input, except for the occasional highlight.
+
+Use
+---
+
+You can try out the diff-highlight program with:
+
+---------------------------------------------
+git log -p --color | /path/to/diff-highlight
+---------------------------------------------
+
+If you want to use it all the time, drop it in your $PATH and put the
+following in your git configuration:
+
+---------------------------------------------
+[pager]
+ log = diff-highlight | less
+ show = diff-highlight | less
+ diff = diff-highlight | less
+---------------------------------------------
+
+Bugs
+----
+
+Because diff-highlight relies on heuristics to guess which parts of
+changes are important, there are some cases where the highlighting is
+more distracting than useful. Fortunately, these cases are rare in
+practice, and when they do occur, the worst case is simply a little
+extra highlighting. This section documents some cases known to be
+sub-optimal, in case somebody feels like working on improving the
+heuristics.
+
+1. Two changes on the same line get highlighted in a blob. For example,
+ highlighting:
+
+----------------------------------------------
+-foo(buf, size);
++foo(obj->buf, obj->size);
+----------------------------------------------
+
+ yields (where the inside of "+{}" would be highlighted):
+
+----------------------------------------------
+-foo(buf, size);
++foo(+{obj->buf, obj->}size);
+----------------------------------------------
+
+ whereas a more semantically meaningful output would be:
+
+----------------------------------------------
+-foo(buf, size);
++foo(+{obj->}buf, +{obj->}size);
+----------------------------------------------
+
+ Note that doing this right would probably involve a set of
+ content-specific boundary patterns, similar to word-diff. Otherwise
+ you get junk like:
+
+-----------------------------------------------------
+-this line has some -{i}nt-{ere}sti-{ng} text on it
++this line has some +{fa}nt+{a}sti+{c} text on it
+-----------------------------------------------------
+
+ which is less readable than the current output.
+
+2. The multi-line matching assumes that lines in the pre- and post-image
+ match by position. This is often the case, but can be fooled when a
+ line is removed from the top and a new one added at the bottom (or
+ vice versa). Unless the lines in the middle are also changed, diffs
+ will show this as two hunks, and it will not get highlighted at all
+ (which is good). But if the lines in the middle are changed, the
+ highlighting can be misleading. Here's a pathological case:
+
+-----------------------------------------------------
+-one
+-two
+-three
+-four
++two 2
++three 3
++four 4
++five 5
+-----------------------------------------------------
+
+ which gets highlighted as:
+
+-----------------------------------------------------
+-one
+-t-{wo}
+-three
+-f-{our}
++two 2
++t+{hree 3}
++four 4
++f+{ive 5}
+-----------------------------------------------------
+
+ because it matches "two" to "three 3", and so forth. It would be
+ nicer as:
+
+-----------------------------------------------------
+-one
+-two
+-three
+-four
++two +{2}
++three +{3}
++four +{4}
++five 5
+-----------------------------------------------------
+
+ which would probably involve pre-matching the lines into pairs
+ according to some heuristic.
diff --git a/contrib/diff-highlight/diff-highlight b/contrib/diff-highlight/diff-highlight
new file mode 100755
index 0000000..c4404d4
--- /dev/null
+++ b/contrib/diff-highlight/diff-highlight
@@ -0,0 +1,173 @@
+#!/usr/bin/perl
+
+use warnings FATAL => 'all';
+use strict;
+
+# Highlight by reversing foreground and background. You could do
+# other things like bold or underline if you prefer.
+my $HIGHLIGHT = "\x1b[7m";
+my $UNHIGHLIGHT = "\x1b[27m";
+my $COLOR = qr/\x1b\[[0-9;]*m/;
+my $BORING = qr/$COLOR|\s/;
+
+my @removed;
+my @added;
+my $in_hunk;
+
+while (<>) {
+ if (!$in_hunk) {
+ print;
+ $in_hunk = /^$COLOR*\@/;
+ }
+ elsif (/^$COLOR*-/) {
+ push @removed, $_;
+ }
+ elsif (/^$COLOR*\+/) {
+ push @added, $_;
+ }
+ else {
+ show_hunk(\@removed, \@added);
+ @removed = ();
+ @added = ();
+
+ print;
+ $in_hunk = /^$COLOR*[\@ ]/;
+ }
+
+ # Most of the time there is enough output to keep things streaming,
+ # but for something like "git log -Sfoo", you can get one early
+ # commit and then many seconds of nothing. We want to show
+ # that one commit as soon as possible.
+ #
+ # Since we can receive arbitrary input, there's no optimal
+ # place to flush. Flushing on a blank line is a heuristic that
+ # happens to match git-log output.
+ if (!length) {
+ local $| = 1;
+ }
+}
+
+# Flush any queued hunk (this can happen when there is no trailing context in
+# the final diff of the input).
+show_hunk(\@removed, \@added);
+
+exit 0;
+
+sub show_hunk {
+ my ($a, $b) = @_;
+
+ # If one side is empty, then there is nothing to compare or highlight.
+ if (!@$a || !@$b) {
+ print @$a, @$b;
+ return;
+ }
+
+ # If we have mismatched numbers of lines on each side, we could try to
+ # be clever and match up similar lines. But for now we are simple and
+ # stupid, and only handle multi-line hunks that remove and add the same
+ # number of lines.
+ if (@$a != @$b) {
+ print @$a, @$b;
+ return;
+ }
+
+ my @queue;
+ for (my $i = 0; $i < @$a; $i++) {
+ my ($rm, $add) = highlight_pair($a->[$i], $b->[$i]);
+ print $rm;
+ push @queue, $add;
+ }
+ print @queue;
+}
+
+sub highlight_pair {
+ my @a = split_line(shift);
+ my @b = split_line(shift);
+
+ # Find common prefix, taking care to skip any ansi
+ # color codes.
+ my $seen_plusminus;
+ my ($pa, $pb) = (0, 0);
+ while ($pa < @a && $pb < @b) {
+ if ($a[$pa] =~ /$COLOR/) {
+ $pa++;
+ }
+ elsif ($b[$pb] =~ /$COLOR/) {
+ $pb++;
+ }
+ elsif ($a[$pa] eq $b[$pb]) {
+ $pa++;
+ $pb++;
+ }
+ elsif (!$seen_plusminus && $a[$pa] eq '-' && $b[$pb] eq '+') {
+ $seen_plusminus = 1;
+ $pa++;
+ $pb++;
+ }
+ else {
+ last;
+ }
+ }
+
+ # Find common suffix, ignoring colors.
+ my ($sa, $sb) = ($#a, $#b);
+ while ($sa >= $pa && $sb >= $pb) {
+ if ($a[$sa] =~ /$COLOR/) {
+ $sa--;
+ }
+ elsif ($b[$sb] =~ /$COLOR/) {
+ $sb--;
+ }
+ elsif ($a[$sa] eq $b[$sb]) {
+ $sa--;
+ $sb--;
+ }
+ else {
+ last;
+ }
+ }
+
+ if (is_pair_interesting(\@a, $pa, $sa, \@b, $pb, $sb)) {
+ return highlight_line(\@a, $pa, $sa),
+ highlight_line(\@b, $pb, $sb);
+ }
+ else {
+ return join('', @a),
+ join('', @b);
+ }
+}
+
+sub split_line {
+ local $_ = shift;
+ return map { /$COLOR/ ? $_ : (split //) }
+ split /($COLOR*)/;
+}
+
+sub highlight_line {
+ my ($line, $prefix, $suffix) = @_;
+
+ return join('',
+ @{$line}[0..($prefix-1)],
+ $HIGHLIGHT,
+ @{$line}[$prefix..$suffix],
+ $UNHIGHLIGHT,
+ @{$line}[($suffix+1)..$#$line]
+ );
+}
+
+# Pairs are interesting to highlight only if we are going to end up
+# highlighting a subset (i.e., not the whole line). Otherwise, the highlighting
+# is just useless noise. We can detect this by finding either a matching prefix
+# or suffix (disregarding boring bits like whitespace and colorization).
+sub is_pair_interesting {
+ my ($a, $pa, $sa, $b, $pb, $sb) = @_;
+ my $prefix_a = join('', @$a[0..($pa-1)]);
+ my $prefix_b = join('', @$b[0..($pb-1)]);
+ my $suffix_a = join('', @$a[($sa+1)..$#$a]);
+ my $suffix_b = join('', @$b[($sb+1)..$#$b]);
+
+ return $prefix_a !~ /^$COLOR*-$BORING*$/ ||
+ $prefix_b !~ /^$COLOR*\+$BORING*$/ ||
+ $suffix_a !~ /^$BORING*$/ ||
+ $suffix_b !~ /^$BORING*$/;
+}
diff --git a/contrib/diffall/README b/contrib/diffall/README
new file mode 100644
index 0000000..507f17d
--- /dev/null
+++ b/contrib/diffall/README
@@ -0,0 +1,31 @@
+The git-diffall script provides a directory based diff mechanism
+for git.
+
+To determine what diff viewer is used, the script requires either
+the 'diff.tool' or 'merge.tool' configuration option to be set.
+
+This script is compatible with most common forms used to specify a
+range of revisions to diff:
+
+ 1. git diffall: shows diff between working tree and staged changes
+ 2. git diffall --cached [<commit>]: shows diff between staged
+ changes and HEAD (or other named commit)
+ 3. git diffall <commit>: shows diff between working tree and named
+ commit
+ 4. git diffall <commit> <commit>: show diff between two named commits
+ 5. git diffall <commit>..<commit>: same as above
+ 6. git diffall <commit>...<commit>: show the changes on the branch
+ containing and up to the second, starting at a common ancestor
+ of both <commit>
+
+Note: all forms take an optional path limiter [-- <path>*]
+
+The '--extcmd=<command>' option allows the user to specify a custom
+command for viewing diffs. When given, configured defaults are
+ignored and the script runs $command $LOCAL $REMOTE. Additionally,
+$BASE is set in the environment.
+
+This script is based on an example provided by Thomas Rast on the
+Git list [1]:
+
+[1] http://thread.gmane.org/gmane.comp.version-control.git/124807
diff --git a/contrib/diffall/git-diffall b/contrib/diffall/git-diffall
new file mode 100755
index 0000000..84f2b65
--- /dev/null
+++ b/contrib/diffall/git-diffall
@@ -0,0 +1,257 @@
+#!/bin/sh
+# Copyright 2010 - 2012, Tim Henigan <tim.henigan@gmail.com>
+#
+# Perform a directory diff between commits in the repository using
+# the external diff or merge tool specified in the user's config.
+
+USAGE='[--cached] [--copy-back] [-x|--extcmd=<command>] <commit>{0,2} [-- <path>*]
+
+ --cached Compare to the index rather than the working tree.
+
+ --copy-back Copy files back to the working tree when the diff
+ tool exits (in case they were modified by the
+ user). This option is only valid if the diff
+ compared with the working tree.
+
+ -x=<command>
+ --extcmd=<command> Specify a custom command for viewing diffs.
+ git-diffall ignores the configured defaults and
+ runs $command $LOCAL $REMOTE when this option is
+ specified. Additionally, $BASE is set in the
+ environment.
+'
+
+SUBDIRECTORY_OK=1
+. "$(git --exec-path)/git-sh-setup"
+
+TOOL_MODE=diff
+. "$(git --exec-path)/git-mergetool--lib"
+
+merge_tool="$(get_merge_tool)"
+if test -z "$merge_tool"
+then
+ echo "Error: Either the 'diff.tool' or 'merge.tool' option must be set."
+ usage
+fi
+
+start_dir=$(pwd)
+
+# All the file paths returned by the diff command are relative to the root
+# of the working copy. So if the script is called from a subdirectory, it
+# must switch to the root of working copy before trying to use those paths.
+cdup=$(git rev-parse --show-cdup) &&
+cd "$cdup" || {
+ echo >&2 "Cannot chdir to $cdup, the toplevel of the working tree"
+ exit 1
+}
+
+# set up temp dir
+tmp=$(perl -e 'use File::Temp qw(tempdir);
+ $t=tempdir("/tmp/git-diffall.XXXXX") or exit(1);
+ print $t') || exit 1
+trap 'rm -rf "$tmp"' EXIT
+
+left=
+right=
+paths=
+dashdash_seen=
+compare_staged=
+merge_base=
+left_dir=
+right_dir=
+diff_tool=
+copy_back=
+
+while test $# != 0
+do
+ case "$1" in
+ -h|--h|--he|--hel|--help)
+ usage
+ ;;
+ --cached)
+ compare_staged=1
+ ;;
+ --copy-back)
+ copy_back=1
+ ;;
+ -x|--e|--ex|--ext|--extc|--extcm|--extcmd)
+ if test $# = 1
+ then
+ echo You must specify the tool for use with --extcmd
+ usage
+ else
+ diff_tool=$2
+ shift
+ fi
+ ;;
+ --)
+ dashdash_seen=1
+ ;;
+ -*)
+ echo Invalid option: "$1"
+ usage
+ ;;
+ *)
+ # could be commit, commit range or path limiter
+ case "$1" in
+ *...*)
+ left=${1%...*}
+ right=${1#*...}
+ merge_base=1
+ ;;
+ *..*)
+ left=${1%..*}
+ right=${1#*..}
+ ;;
+ *)
+ if test -n "$dashdash_seen"
+ then
+ paths="$paths$1 "
+ elif test -z "$left"
+ then
+ left=$1
+ elif test -z "$right"
+ then
+ right=$1
+ else
+ paths="$paths$1 "
+ fi
+ ;;
+ esac
+ ;;
+ esac
+ shift
+done
+
+# Determine the set of files which changed
+if test -n "$left" && test -n "$right"
+then
+ left_dir="cmt-$(git rev-parse --short $left)"
+ right_dir="cmt-$(git rev-parse --short $right)"
+
+ if test -n "$compare_staged"
+ then
+ usage
+ elif test -n "$merge_base"
+ then
+ git diff --name-only "$left"..."$right" -- $paths >"$tmp/filelist"
+ else
+ git diff --name-only "$left" "$right" -- $paths >"$tmp/filelist"
+ fi
+elif test -n "$left"
+then
+ left_dir="cmt-$(git rev-parse --short $left)"
+
+ if test -n "$compare_staged"
+ then
+ right_dir="staged"
+ git diff --name-only --cached "$left" -- $paths >"$tmp/filelist"
+ else
+ right_dir="working_tree"
+ git diff --name-only "$left" -- $paths >"$tmp/filelist"
+ fi
+else
+ left_dir="HEAD"
+
+ if test -n "$compare_staged"
+ then
+ right_dir="staged"
+ git diff --name-only --cached -- $paths >"$tmp/filelist"
+ else
+ right_dir="working_tree"
+ git diff --name-only -- $paths >"$tmp/filelist"
+ fi
+fi
+
+# Exit immediately if there are no diffs
+if test ! -s "$tmp/filelist"
+then
+ exit 0
+fi
+
+if test -n "$copy_back" && test "$right_dir" != "working_tree"
+then
+ echo "--copy-back is only valid when diff includes the working tree."
+ exit 1
+fi
+
+# Create the named tmp directories that will hold the files to be compared
+mkdir -p "$tmp/$left_dir" "$tmp/$right_dir"
+
+# Populate the tmp/right_dir directory with the files to be compared
+while read name
+do
+ if test -n "$right"
+ then
+ ls_list=$(git ls-tree $right "$name")
+ if test -n "$ls_list"
+ then
+ mkdir -p "$tmp/$right_dir/$(dirname "$name")"
+ git show "$right":"$name" >"$tmp/$right_dir/$name" || true
+ fi
+ elif test -n "$compare_staged"
+ then
+ ls_list=$(git ls-files -- "$name")
+ if test -n "$ls_list"
+ then
+ mkdir -p "$tmp/$right_dir/$(dirname "$name")"
+ git show :"$name" >"$tmp/$right_dir/$name"
+ fi
+ else
+ if test -e "$name"
+ then
+ mkdir -p "$tmp/$right_dir/$(dirname "$name")"
+ cp "$name" "$tmp/$right_dir/$name"
+ fi
+ fi
+done < "$tmp/filelist"
+
+# Populate the tmp/left_dir directory with the files to be compared
+while read name
+do
+ if test -n "$left"
+ then
+ ls_list=$(git ls-tree $left "$name")
+ if test -n "$ls_list"
+ then
+ mkdir -p "$tmp/$left_dir/$(dirname "$name")"
+ git show "$left":"$name" >"$tmp/$left_dir/$name" || true
+ fi
+ else
+ if test -n "$compare_staged"
+ then
+ ls_list=$(git ls-tree HEAD "$name")
+ if test -n "$ls_list"
+ then
+ mkdir -p "$tmp/$left_dir/$(dirname "$name")"
+ git show HEAD:"$name" >"$tmp/$left_dir/$name"
+ fi
+ else
+ mkdir -p "$tmp/$left_dir/$(dirname "$name")"
+ git show :"$name" >"$tmp/$left_dir/$name"
+ fi
+ fi
+done < "$tmp/filelist"
+
+LOCAL="$tmp/$left_dir"
+REMOTE="$tmp/$right_dir"
+
+if test -n "$diff_tool"
+then
+ export BASE
+ eval $diff_tool '"$LOCAL"' '"$REMOTE"'
+else
+ run_merge_tool "$merge_tool" false
+fi
+
+# Copy files back to the working dir, if requested
+if test -n "$copy_back" && test "$right_dir" = "working_tree"
+then
+ cd "$start_dir"
+ git_top_dir=$(git rev-parse --show-toplevel)
+ find "$tmp/$right_dir" -type f |
+ while read file
+ do
+ cp "$file" "$git_top_dir/${file#$tmp/$right_dir/}"
+ done
+fi
diff --git a/contrib/emacs/.gitignore b/contrib/emacs/.gitignore
new file mode 100644
index 0000000..c531d98
--- /dev/null
+++ b/contrib/emacs/.gitignore
@@ -0,0 +1 @@
+*.elc
diff --git a/contrib/emacs/Makefile b/contrib/emacs/Makefile
new file mode 100644
index 0000000..24d9312
--- /dev/null
+++ b/contrib/emacs/Makefile
@@ -0,0 +1,21 @@
+## Build and install stuff
+
+EMACS = emacs
+
+ELC = git.elc git-blame.elc
+INSTALL ?= install
+INSTALL_ELC = $(INSTALL) -m 644
+prefix ?= $(HOME)
+emacsdir = $(prefix)/share/emacs/site-lisp
+RM ?= rm -f
+
+all: $(ELC)
+
+install: all
+ $(INSTALL) -d $(DESTDIR)$(emacsdir)
+ $(INSTALL_ELC) $(ELC:.elc=.el) $(ELC) $(DESTDIR)$(emacsdir)
+
+%.elc: %.el
+ $(EMACS) -batch -f batch-byte-compile $<
+
+clean:; $(RM) $(ELC)
diff --git a/contrib/emacs/README b/contrib/emacs/README
new file mode 100644
index 0000000..82368bd
--- /dev/null
+++ b/contrib/emacs/README
@@ -0,0 +1,39 @@
+This directory contains various modules for Emacs support.
+
+To make the modules available to Emacs, you should add this directory
+to your load-path, and then require the modules you want. This can be
+done by adding to your .emacs something like this:
+
+ (add-to-list 'load-path ".../git/contrib/emacs")
+ (require 'git)
+ (require 'git-blame)
+
+
+The following modules are available:
+
+* git.el:
+
+ Status manager that displays the state of all the files of the
+ project, and provides easy access to the most frequently used git
+ commands. The user interface is as far as possible compatible with
+ the pcl-cvs mode. It can be started with `M-x git-status'.
+
+* git-blame.el:
+
+ Emacs implementation of incremental git-blame. When you turn it on
+ while viewing a file, the editor buffer will be updated by setting
+ the background of individual lines to a color that reflects which
+ commit it comes from. And when you move around the buffer, a
+ one-line summary will be shown in the echo area.
+
+* vc-git.el:
+
+ This file used to contain the VC-mode backend for git, but it is no
+ longer distributed with git. It is now maintained as part of Emacs
+ and included in standard Emacs distributions starting from version
+ 22.2.
+
+ If you have an earlier Emacs version, upgrading to Emacs 22 is
+ recommended, since the VC mode in older Emacs is not generic enough
+ to be able to support git in a reasonable manner, and no attempt has
+ been made to backport vc-git.el.
diff --git a/contrib/emacs/git-blame.el b/contrib/emacs/git-blame.el
new file mode 100644
index 0000000..e671f6c
--- /dev/null
+++ b/contrib/emacs/git-blame.el
@@ -0,0 +1,484 @@
+;;; git-blame.el --- Minor mode for incremental blame for Git -*- coding: utf-8 -*-
+;;
+;; Copyright (C) 2007 David KÃ¥gedal
+;;
+;; Authors: David KÃ¥gedal <davidk@lysator.liu.se>
+;; Created: 31 Jan 2007
+;; Message-ID: <87iren2vqx.fsf@morpheus.local>
+;; License: GPL
+;; Keywords: git, version control, release management
+;;
+;; Compatibility: Emacs21, Emacs22 and EmacsCVS
+;; Git 1.5 and up
+
+;; This file is *NOT* part of GNU Emacs.
+;; This file is distributed under the same terms as GNU Emacs.
+
+;; This program is free software; you can redistribute it and/or
+;; modify it under the terms of the GNU General Public License as
+;; published by the Free Software Foundation; either version 2 of
+;; the License, or (at your option) any later version.
+
+;; This program is distributed in the hope that it will be
+;; useful, but WITHOUT ANY WARRANTY; without even the implied
+;; warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+;; PURPOSE. See the GNU General Public License for more details.
+
+;; You should have received a copy of the GNU General Public
+;; License along with this program; if not, write to the Free
+;; Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
+;; MA 02111-1307 USA
+
+;; http://www.fsf.org/copyleft/gpl.html
+
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;
+;;; Commentary:
+;;
+;; Here is an Emacs implementation of incremental git-blame. When you
+;; turn it on while viewing a file, the editor buffer will be updated by
+;; setting the background of individual lines to a color that reflects
+;; which commit it comes from. And when you move around the buffer, a
+;; one-line summary will be shown in the echo area.
+
+;;; Installation:
+;;
+;; To use this package, put it somewhere in `load-path' (or add
+;; directory with git-blame.el to `load-path'), and add the following
+;; line to your .emacs:
+;;
+;; (require 'git-blame)
+;;
+;; If you do not want to load this package before it is necessary, you
+;; can make use of the `autoload' feature, e.g. by adding to your .emacs
+;; the following lines
+;;
+;; (autoload 'git-blame-mode "git-blame"
+;; "Minor mode for incremental blame for Git." t)
+;;
+;; Then first use of `M-x git-blame-mode' would load the package.
+
+;;; Compatibility:
+;;
+;; It requires GNU Emacs 21 or later and Git 1.5.0 and up
+;;
+;; If you'are using Emacs 20, try changing this:
+;;
+;; (overlay-put ovl 'face (list :background
+;; (cdr (assq 'color (cddddr info)))))
+;;
+;; to
+;;
+;; (overlay-put ovl 'face (cons 'background-color
+;; (cdr (assq 'color (cddddr info)))))
+
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;
+;;; Code:
+
+(eval-when-compile (require 'cl)) ; to use `push', `pop'
+(require 'format-spec)
+
+(defface git-blame-prefix-face
+ '((((background dark)) (:foreground "gray"
+ :background "black"))
+ (((background light)) (:foreground "gray"
+ :background "white"))
+ (t (:weight bold)))
+ "The face used for the hash prefix."
+ :group 'git-blame)
+
+(defgroup git-blame nil
+ "A minor mode showing Git blame information."
+ :group 'git
+ :link '(function-link git-blame-mode))
+
+
+(defcustom git-blame-use-colors t
+ "Use colors to indicate commits in `git-blame-mode'."
+ :type 'boolean
+ :group 'git-blame)
+
+(defcustom git-blame-prefix-format
+ "%h %20A:"
+ "The format of the prefix added to each line in `git-blame'
+mode. The format is passed to `format-spec' with the following format keys:
+
+ %h - the abbreviated hash
+ %H - the full hash
+ %a - the author name
+ %A - the author email
+ %c - the committer name
+ %C - the committer email
+ %s - the commit summary
+"
+ :group 'git-blame)
+
+(defcustom git-blame-mouseover-format
+ "%h %a %A: %s"
+ "The format of the description shown when pointing at a line in
+`git-blame' mode. The format string is passed to `format-spec'
+with the following format keys:
+
+ %h - the abbreviated hash
+ %H - the full hash
+ %a - the author name
+ %A - the author email
+ %c - the committer name
+ %C - the committer email
+ %s - the commit summary
+"
+ :group 'git-blame)
+
+
+(defun git-blame-color-scale (&rest elements)
+ "Given a list, returns a list of triples formed with each
+elements of the list.
+
+a b => bbb bba bab baa abb aba aaa aab"
+ (let (result)
+ (dolist (a elements)
+ (dolist (b elements)
+ (dolist (c elements)
+ (setq result (cons (format "#%s%s%s" a b c) result)))))
+ result))
+
+;; (git-blame-color-scale "0c" "04" "24" "1c" "2c" "34" "14" "3c") =>
+;; ("#3c3c3c" "#3c3c14" "#3c3c34" "#3c3c2c" "#3c3c1c" "#3c3c24"
+;; "#3c3c04" "#3c3c0c" "#3c143c" "#3c1414" "#3c1434" "#3c142c" ...)
+
+(defmacro git-blame-random-pop (l)
+ "Select a random element from L and returns it. Also remove
+selected element from l."
+ ;; only works on lists with unique elements
+ `(let ((e (elt ,l (random (length ,l)))))
+ (setq ,l (remove e ,l))
+ e))
+
+(defvar git-blame-log-oneline-format
+ "format:[%cr] %cn: %s"
+ "*Formatting option used for describing current line in the minibuffer.
+
+This option is used to pass to git log --pretty= command-line option,
+and describe which commit the current line was made.")
+
+(defvar git-blame-dark-colors
+ (git-blame-color-scale "0c" "04" "24" "1c" "2c" "34" "14" "3c")
+ "*List of colors (format #RGB) to use in a dark environment.
+
+To check out the list, evaluate (list-colors-display git-blame-dark-colors).")
+
+(defvar git-blame-light-colors
+ (git-blame-color-scale "c4" "d4" "cc" "dc" "f4" "e4" "fc" "ec")
+ "*List of colors (format #RGB) to use in a light environment.
+
+To check out the list, evaluate (list-colors-display git-blame-light-colors).")
+
+(defvar git-blame-colors '()
+ "Colors used by git-blame. The list is built once when activating git-blame
+minor mode.")
+
+(defvar git-blame-ancient-color "dark green"
+ "*Color to be used for ancient commit.")
+
+(defvar git-blame-autoupdate t
+ "*Automatically update the blame display while editing")
+
+(defvar git-blame-proc nil
+ "The running git-blame process")
+(make-variable-buffer-local 'git-blame-proc)
+
+(defvar git-blame-overlays nil
+ "The git-blame overlays used in the current buffer.")
+(make-variable-buffer-local 'git-blame-overlays)
+
+(defvar git-blame-cache nil
+ "A cache of git-blame information for the current buffer")
+(make-variable-buffer-local 'git-blame-cache)
+
+(defvar git-blame-idle-timer nil
+ "An idle timer that updates the blame")
+(make-variable-buffer-local 'git-blame-cache)
+
+(defvar git-blame-update-queue nil
+ "A queue of update requests")
+(make-variable-buffer-local 'git-blame-update-queue)
+
+;; FIXME: docstrings
+(defvar git-blame-file nil)
+(defvar git-blame-current nil)
+
+(defvar git-blame-mode nil)
+(make-variable-buffer-local 'git-blame-mode)
+
+(defvar git-blame-mode-line-string " blame"
+ "String to display on the mode line when git-blame is active.")
+
+(or (assq 'git-blame-mode minor-mode-alist)
+ (setq minor-mode-alist
+ (cons '(git-blame-mode git-blame-mode-line-string) minor-mode-alist)))
+
+;;;###autoload
+(defun git-blame-mode (&optional arg)
+ "Toggle minor mode for displaying Git blame
+
+With prefix ARG, turn the mode on if ARG is positive."
+ (interactive "P")
+ (cond
+ ((null arg)
+ (if git-blame-mode (git-blame-mode-off) (git-blame-mode-on)))
+ ((> (prefix-numeric-value arg) 0) (git-blame-mode-on))
+ (t (git-blame-mode-off))))
+
+(defun git-blame-mode-on ()
+ "Turn on git-blame mode.
+
+See also function `git-blame-mode'."
+ (make-local-variable 'git-blame-colors)
+ (if git-blame-autoupdate
+ (add-hook 'after-change-functions 'git-blame-after-change nil t)
+ (remove-hook 'after-change-functions 'git-blame-after-change t))
+ (git-blame-cleanup)
+ (let ((bgmode (cdr (assoc 'background-mode (frame-parameters)))))
+ (if (eq bgmode 'dark)
+ (setq git-blame-colors git-blame-dark-colors)
+ (setq git-blame-colors git-blame-light-colors)))
+ (setq git-blame-cache (make-hash-table :test 'equal))
+ (setq git-blame-mode t)
+ (git-blame-run))
+
+(defun git-blame-mode-off ()
+ "Turn off git-blame mode.
+
+See also function `git-blame-mode'."
+ (git-blame-cleanup)
+ (if git-blame-idle-timer (cancel-timer git-blame-idle-timer))
+ (setq git-blame-mode nil))
+
+;;;###autoload
+(defun git-reblame ()
+ "Recalculate all blame information in the current buffer"
+ (interactive)
+ (unless git-blame-mode
+ (error "Git-blame is not active"))
+
+ (git-blame-cleanup)
+ (git-blame-run))
+
+(defun git-blame-run (&optional startline endline)
+ (if git-blame-proc
+ ;; Should maybe queue up a new run here
+ (message "Already running git blame")
+ (let ((display-buf (current-buffer))
+ (blame-buf (get-buffer-create
+ (concat " git blame for " (buffer-name))))
+ (args '("--incremental" "--contents" "-")))
+ (if startline
+ (setq args (append args
+ (list "-L" (format "%d,%d" startline endline)))))
+ (setq args (append args
+ (list (file-name-nondirectory buffer-file-name))))
+ (setq git-blame-proc
+ (apply 'start-process
+ "git-blame" blame-buf
+ "git" "blame"
+ args))
+ (with-current-buffer blame-buf
+ (erase-buffer)
+ (make-local-variable 'git-blame-file)
+ (make-local-variable 'git-blame-current)
+ (setq git-blame-file display-buf)
+ (setq git-blame-current nil))
+ (set-process-filter git-blame-proc 'git-blame-filter)
+ (set-process-sentinel git-blame-proc 'git-blame-sentinel)
+ (process-send-region git-blame-proc (point-min) (point-max))
+ (process-send-eof git-blame-proc))))
+
+(defun remove-git-blame-text-properties (start end)
+ (let ((modified (buffer-modified-p))
+ (inhibit-read-only t))
+ (remove-text-properties start end '(point-entered nil))
+ (set-buffer-modified-p modified)))
+
+(defun git-blame-cleanup ()
+ "Remove all blame properties"
+ (mapc 'delete-overlay git-blame-overlays)
+ (setq git-blame-overlays nil)
+ (remove-git-blame-text-properties (point-min) (point-max)))
+
+(defun git-blame-update-region (start end)
+ "Rerun blame to get updates between START and END"
+ (let ((overlays (overlays-in start end)))
+ (while overlays
+ (let ((overlay (pop overlays)))
+ (if (< (overlay-start overlay) start)
+ (setq start (overlay-start overlay)))
+ (if (> (overlay-end overlay) end)
+ (setq end (overlay-end overlay)))
+ (setq git-blame-overlays (delete overlay git-blame-overlays))
+ (delete-overlay overlay))))
+ (remove-git-blame-text-properties start end)
+ ;; We can be sure that start and end are at line breaks
+ (git-blame-run (1+ (count-lines (point-min) start))
+ (count-lines (point-min) end)))
+
+(defun git-blame-sentinel (proc status)
+ (with-current-buffer (process-buffer proc)
+ (with-current-buffer git-blame-file
+ (setq git-blame-proc nil)
+ (if git-blame-update-queue
+ (git-blame-delayed-update))))
+ ;;(kill-buffer (process-buffer proc))
+ ;;(message "git blame finished")
+ )
+
+(defvar in-blame-filter nil)
+
+(defun git-blame-filter (proc str)
+ (with-current-buffer (process-buffer proc)
+ (save-excursion
+ (goto-char (process-mark proc))
+ (insert-before-markers str)
+ (goto-char (point-min))
+ (unless in-blame-filter
+ (let ((more t)
+ (in-blame-filter t))
+ (while more
+ (setq more (git-blame-parse))))))))
+
+(defun git-blame-parse ()
+ (cond ((looking-at "\\([0-9a-f]\\{40\\}\\) \\([0-9]+\\) \\([0-9]+\\) \\([0-9]+\\)\n")
+ (let ((hash (match-string 1))
+ (src-line (string-to-number (match-string 2)))
+ (res-line (string-to-number (match-string 3)))
+ (num-lines (string-to-number (match-string 4))))
+ (delete-region (point) (match-end 0))
+ (setq git-blame-current (list (git-blame-new-commit hash)
+ src-line res-line num-lines)))
+ t)
+ ((looking-at "\\([a-z-]+\\) \\(.+\\)\n")
+ (let ((key (match-string 1))
+ (value (match-string 2)))
+ (delete-region (point) (match-end 0))
+ (git-blame-add-info (car git-blame-current) key value)
+ (when (string= key "filename")
+ (git-blame-create-overlay (car git-blame-current)
+ (caddr git-blame-current)
+ (cadddr git-blame-current))
+ (setq git-blame-current nil)))
+ t)
+ (t
+ nil)))
+
+(defun git-blame-new-commit (hash)
+ (with-current-buffer git-blame-file
+ (or (gethash hash git-blame-cache)
+ ;; Assign a random color to each new commit info
+ ;; Take care not to select the same color multiple times
+ (let* ((color (if git-blame-colors
+ (git-blame-random-pop git-blame-colors)
+ git-blame-ancient-color))
+ (info `(,hash (color . ,color))))
+ (puthash hash info git-blame-cache)
+ info))))
+
+(defun git-blame-create-overlay (info start-line num-lines)
+ (with-current-buffer git-blame-file
+ (save-excursion
+ (let ((inhibit-point-motion-hooks t)
+ (inhibit-modification-hooks t))
+ (goto-char (point-min))
+ (forward-line (1- start-line))
+ (let* ((start (point))
+ (end (progn (forward-line num-lines) (point)))
+ (ovl (make-overlay start end))
+ (hash (car info))
+ (spec `((?h . ,(substring hash 0 6))
+ (?H . ,hash)
+ (?a . ,(git-blame-get-info info 'author))
+ (?A . ,(git-blame-get-info info 'author-mail))
+ (?c . ,(git-blame-get-info info 'committer))
+ (?C . ,(git-blame-get-info info 'committer-mail))
+ (?s . ,(git-blame-get-info info 'summary)))))
+ (push ovl git-blame-overlays)
+ (overlay-put ovl 'git-blame info)
+ (overlay-put ovl 'help-echo
+ (format-spec git-blame-mouseover-format spec))
+ (if git-blame-use-colors
+ (overlay-put ovl 'face (list :background
+ (cdr (assq 'color (cdr info))))))
+ (overlay-put ovl 'line-prefix
+ (propertize (format-spec git-blame-prefix-format spec)
+ 'face 'git-blame-prefix-face)))))))
+
+(defun git-blame-add-info (info key value)
+ (nconc info (list (cons (intern key) value))))
+
+(defun git-blame-get-info (info key)
+ (cdr (assq key (cdr info))))
+
+(defun git-blame-current-commit ()
+ (let ((info (get-char-property (point) 'git-blame)))
+ (if info
+ (car info)
+ (error "No commit info"))))
+
+(defun git-describe-commit (hash)
+ (with-temp-buffer
+ (call-process "git" nil t nil
+ "log" "-1"
+ (concat "--pretty=" git-blame-log-oneline-format)
+ hash)
+ (buffer-substring (point-min) (point-max))))
+
+(defvar git-blame-last-identification nil)
+(make-variable-buffer-local 'git-blame-last-identification)
+(defun git-blame-identify (&optional hash)
+ (interactive)
+ (let ((info (gethash (or hash (git-blame-current-commit)) git-blame-cache)))
+ (when (and info (not (eq info git-blame-last-identification)))
+ (message "%s" (nth 4 info))
+ (setq git-blame-last-identification info))))
+
+;; (defun git-blame-after-save ()
+;; (when git-blame-mode
+;; (git-blame-cleanup)
+;; (git-blame-run)))
+;; (add-hook 'after-save-hook 'git-blame-after-save)
+
+(defun git-blame-after-change (start end length)
+ (when git-blame-mode
+ (git-blame-enq-update start end)))
+
+(defvar git-blame-last-update nil)
+(make-variable-buffer-local 'git-blame-last-update)
+(defun git-blame-enq-update (start end)
+ "Mark the region between START and END as needing blame update"
+ ;; Try to be smart and avoid multiple callouts for sequential
+ ;; editing
+ (cond ((and git-blame-last-update
+ (= start (cdr git-blame-last-update)))
+ (setcdr git-blame-last-update end))
+ ((and git-blame-last-update
+ (= end (car git-blame-last-update)))
+ (setcar git-blame-last-update start))
+ (t
+ (setq git-blame-last-update (cons start end))
+ (setq git-blame-update-queue (nconc git-blame-update-queue
+ (list git-blame-last-update)))))
+ (unless (or git-blame-proc git-blame-idle-timer)
+ (setq git-blame-idle-timer
+ (run-with-idle-timer 0.5 nil 'git-blame-delayed-update))))
+
+(defun git-blame-delayed-update ()
+ (setq git-blame-idle-timer nil)
+ (if git-blame-update-queue
+ (let ((first (pop git-blame-update-queue))
+ (inhibit-point-motion-hooks t))
+ (git-blame-update-region (car first) (cdr first)))))
+
+(provide 'git-blame)
+
+;;; git-blame.el ends here
diff --git a/contrib/emacs/git.el b/contrib/emacs/git.el
new file mode 100644
index 0000000..5ffc506
--- /dev/null
+++ b/contrib/emacs/git.el
@@ -0,0 +1,1705 @@
+;;; git.el --- A user interface for git
+
+;; Copyright (C) 2005, 2006, 2007, 2008, 2009 Alexandre Julliard <julliard@winehq.org>
+
+;; Version: 1.0
+
+;; This program is free software; you can redistribute it and/or
+;; modify it under the terms of the GNU General Public License as
+;; published by the Free Software Foundation; either version 2 of
+;; the License, or (at your option) any later version.
+;;
+;; This program is distributed in the hope that it will be
+;; useful, but WITHOUT ANY WARRANTY; without even the implied
+;; warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+;; PURPOSE. See the GNU General Public License for more details.
+;;
+;; You should have received a copy of the GNU General Public
+;; License along with this program; if not, write to the Free
+;; Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
+;; MA 02111-1307 USA
+
+;;; Commentary:
+
+;; This file contains an interface for the git version control
+;; system. It provides easy access to the most frequently used git
+;; commands. The user interface is as far as possible identical to
+;; that of the PCL-CVS mode.
+;;
+;; To install: put this file on the load-path and place the following
+;; in your .emacs file:
+;;
+;; (require 'git)
+;;
+;; To start: `M-x git-status'
+;;
+;; TODO
+;; - diff against other branch
+;; - renaming files from the status buffer
+;; - creating tags
+;; - fetch/pull
+;; - revlist browser
+;; - git-show-branch browser
+;;
+
+;;; Compatibility:
+;;
+;; This file works on GNU Emacs 21 or later. It may work on older
+;; versions but this is not guaranteed.
+;;
+;; It may work on XEmacs 21, provided that you first install the ewoc
+;; and log-edit packages.
+;;
+
+(eval-when-compile (require 'cl))
+(require 'ewoc)
+(require 'log-edit)
+(require 'easymenu)
+
+
+;;;; Customizations
+;;;; ------------------------------------------------------------
+
+(defgroup git nil
+ "A user interface for the git versioning system."
+ :group 'tools)
+
+(defcustom git-committer-name nil
+ "User name to use for commits.
+The default is to fall back to the repository config,
+then to `add-log-full-name' and then to `user-full-name'."
+ :group 'git
+ :type '(choice (const :tag "Default" nil)
+ (string :tag "Name")))
+
+(defcustom git-committer-email nil
+ "Email address to use for commits.
+The default is to fall back to the git repository config,
+then to `add-log-mailing-address' and then to `user-mail-address'."
+ :group 'git
+ :type '(choice (const :tag "Default" nil)
+ (string :tag "Email")))
+
+(defcustom git-commits-coding-system nil
+ "Default coding system for the log message of git commits."
+ :group 'git
+ :type '(choice (const :tag "From repository config" nil)
+ (coding-system)))
+
+(defcustom git-append-signed-off-by nil
+ "Whether to append a Signed-off-by line to the commit message before editing."
+ :group 'git
+ :type 'boolean)
+
+(defcustom git-reuse-status-buffer t
+ "Whether `git-status' should try to reuse an existing buffer
+if there is already one that displays the same directory."
+ :group 'git
+ :type 'boolean)
+
+(defcustom git-per-dir-ignore-file ".gitignore"
+ "Name of the per-directory ignore file."
+ :group 'git
+ :type 'string)
+
+(defcustom git-show-uptodate nil
+ "Whether to display up-to-date files."
+ :group 'git
+ :type 'boolean)
+
+(defcustom git-show-ignored nil
+ "Whether to display ignored files."
+ :group 'git
+ :type 'boolean)
+
+(defcustom git-show-unknown t
+ "Whether to display unknown files."
+ :group 'git
+ :type 'boolean)
+
+
+(defface git-status-face
+ '((((class color) (background light)) (:foreground "purple"))
+ (((class color) (background dark)) (:foreground "salmon")))
+ "Git mode face used to highlight added and modified files."
+ :group 'git)
+
+(defface git-unmerged-face
+ '((((class color) (background light)) (:foreground "red" :bold t))
+ (((class color) (background dark)) (:foreground "red" :bold t)))
+ "Git mode face used to highlight unmerged files."
+ :group 'git)
+
+(defface git-unknown-face
+ '((((class color) (background light)) (:foreground "goldenrod" :bold t))
+ (((class color) (background dark)) (:foreground "goldenrod" :bold t)))
+ "Git mode face used to highlight unknown files."
+ :group 'git)
+
+(defface git-uptodate-face
+ '((((class color) (background light)) (:foreground "grey60"))
+ (((class color) (background dark)) (:foreground "grey40")))
+ "Git mode face used to highlight up-to-date files."
+ :group 'git)
+
+(defface git-ignored-face
+ '((((class color) (background light)) (:foreground "grey60"))
+ (((class color) (background dark)) (:foreground "grey40")))
+ "Git mode face used to highlight ignored files."
+ :group 'git)
+
+(defface git-mark-face
+ '((((class color) (background light)) (:foreground "red" :bold t))
+ (((class color) (background dark)) (:foreground "tomato" :bold t)))
+ "Git mode face used for the file marks."
+ :group 'git)
+
+(defface git-header-face
+ '((((class color) (background light)) (:foreground "blue"))
+ (((class color) (background dark)) (:foreground "blue")))
+ "Git mode face used for commit headers."
+ :group 'git)
+
+(defface git-separator-face
+ '((((class color) (background light)) (:foreground "brown"))
+ (((class color) (background dark)) (:foreground "brown")))
+ "Git mode face used for commit separator."
+ :group 'git)
+
+(defface git-permission-face
+ '((((class color) (background light)) (:foreground "green" :bold t))
+ (((class color) (background dark)) (:foreground "green" :bold t)))
+ "Git mode face used for permission changes."
+ :group 'git)
+
+
+;;;; Utilities
+;;;; ------------------------------------------------------------
+
+(defconst git-log-msg-separator "--- log message follows this line ---")
+
+(defvar git-log-edit-font-lock-keywords
+ `(("^\\(Author:\\|Date:\\|Merge:\\|Signed-off-by:\\)\\(.*\\)$"
+ (1 font-lock-keyword-face)
+ (2 font-lock-function-name-face))
+ (,(concat "^\\(" (regexp-quote git-log-msg-separator) "\\)$")
+ (1 font-lock-comment-face))))
+
+(defun git-get-env-strings (env)
+ "Build a list of NAME=VALUE strings from a list of environment strings."
+ (mapcar (lambda (entry) (concat (car entry) "=" (cdr entry))) env))
+
+(defun git-call-process (buffer &rest args)
+ "Wrapper for call-process that sets environment strings."
+ (apply #'call-process "git" nil buffer nil args))
+
+(defun git-call-process-display-error (&rest args)
+ "Wrapper for call-process that displays error messages."
+ (let* ((dir default-directory)
+ (buffer (get-buffer-create "*Git Command Output*"))
+ (ok (with-current-buffer buffer
+ (let ((default-directory dir)
+ (buffer-read-only nil))
+ (erase-buffer)
+ (eq 0 (apply #'git-call-process (list buffer t) args))))))
+ (unless ok (display-message-or-buffer buffer))
+ ok))
+
+(defun git-call-process-string (&rest args)
+ "Wrapper for call-process that returns the process output as a string,
+or nil if the git command failed."
+ (with-temp-buffer
+ (and (eq 0 (apply #'git-call-process t args))
+ (buffer-string))))
+
+(defun git-call-process-string-display-error (&rest args)
+ "Wrapper for call-process that displays error message and returns
+the process output as a string, or nil if the git command failed."
+ (with-temp-buffer
+ (if (eq 0 (apply #'git-call-process (list t t) args))
+ (buffer-string)
+ (display-message-or-buffer (current-buffer))
+ nil)))
+
+(defun git-run-process-region (buffer start end program args)
+ "Run a git process with a buffer region as input."
+ (let ((output-buffer (current-buffer))
+ (dir default-directory))
+ (with-current-buffer buffer
+ (cd dir)
+ (apply #'call-process-region start end program
+ nil (list output-buffer t) nil args))))
+
+(defun git-run-command-buffer (buffer-name &rest args)
+ "Run a git command, sending the output to a buffer named BUFFER-NAME."
+ (let ((dir default-directory)
+ (buffer (get-buffer-create buffer-name)))
+ (message "Running git %s..." (car args))
+ (with-current-buffer buffer
+ (let ((default-directory dir)
+ (buffer-read-only nil))
+ (erase-buffer)
+ (apply #'git-call-process buffer args)))
+ (message "Running git %s...done" (car args))
+ buffer))
+
+(defun git-run-command-region (buffer start end env &rest args)
+ "Run a git command with specified buffer region as input."
+ (with-temp-buffer
+ (if (eq 0 (if env
+ (git-run-process-region
+ buffer start end "env"
+ (append (git-get-env-strings env) (list "git") args))
+ (git-run-process-region buffer start end "git" args)))
+ (buffer-string)
+ (display-message-or-buffer (current-buffer))
+ nil)))
+
+(defun git-run-hook (hook env &rest args)
+ "Run a git hook and display its output if any."
+ (let ((dir default-directory)
+ (hook-name (expand-file-name (concat ".git/hooks/" hook))))
+ (or (not (file-executable-p hook-name))
+ (let (status (buffer (get-buffer-create "*Git Hook Output*")))
+ (with-current-buffer buffer
+ (erase-buffer)
+ (cd dir)
+ (setq status
+ (if env
+ (apply #'call-process "env" nil (list buffer t) nil
+ (append (git-get-env-strings env) (list hook-name) args))
+ (apply #'call-process hook-name nil (list buffer t) nil args))))
+ (display-message-or-buffer buffer)
+ (eq 0 status)))))
+
+(defun git-get-string-sha1 (string)
+ "Read a SHA1 from the specified string."
+ (and string
+ (string-match "[0-9a-f]\\{40\\}" string)
+ (match-string 0 string)))
+
+(defun git-get-committer-name ()
+ "Return the name to use as GIT_COMMITTER_NAME."
+ ; copied from log-edit
+ (or git-committer-name
+ (git-config "user.name")
+ (and (boundp 'add-log-full-name) add-log-full-name)
+ (and (fboundp 'user-full-name) (user-full-name))
+ (and (boundp 'user-full-name) user-full-name)))
+
+(defun git-get-committer-email ()
+ "Return the email address to use as GIT_COMMITTER_EMAIL."
+ ; copied from log-edit
+ (or git-committer-email
+ (git-config "user.email")
+ (and (boundp 'add-log-mailing-address) add-log-mailing-address)
+ (and (fboundp 'user-mail-address) (user-mail-address))
+ (and (boundp 'user-mail-address) user-mail-address)))
+
+(defun git-get-commits-coding-system ()
+ "Return the coding system to use for commits."
+ (let ((repo-config (git-config "i18n.commitencoding")))
+ (or git-commits-coding-system
+ (and repo-config
+ (fboundp 'locale-charset-to-coding-system)
+ (locale-charset-to-coding-system repo-config))
+ 'utf-8)))
+
+(defun git-get-logoutput-coding-system ()
+ "Return the coding system used for git-log output."
+ (let ((repo-config (or (git-config "i18n.logoutputencoding")
+ (git-config "i18n.commitencoding"))))
+ (or git-commits-coding-system
+ (and repo-config
+ (fboundp 'locale-charset-to-coding-system)
+ (locale-charset-to-coding-system repo-config))
+ 'utf-8)))
+
+(defun git-escape-file-name (name)
+ "Escape a file name if necessary."
+ (if (string-match "[\n\t\"\\]" name)
+ (concat "\""
+ (mapconcat (lambda (c)
+ (case c
+ (?\n "\\n")
+ (?\t "\\t")
+ (?\\ "\\\\")
+ (?\" "\\\"")
+ (t (char-to-string c))))
+ name "")
+ "\"")
+ name))
+
+(defun git-success-message (text files)
+ "Print a success message after having handled FILES."
+ (let ((n (length files)))
+ (if (equal n 1)
+ (message "%s %s" text (car files))
+ (message "%s %d files" text n))))
+
+(defun git-get-top-dir (dir)
+ "Retrieve the top-level directory of a git tree."
+ (let ((cdup (with-output-to-string
+ (with-current-buffer standard-output
+ (cd dir)
+ (unless (eq 0 (git-call-process t "rev-parse" "--show-cdup"))
+ (error "cannot find top-level git tree for %s." dir))))))
+ (expand-file-name (concat (file-name-as-directory dir)
+ (car (split-string cdup "\n"))))))
+
+;stolen from pcl-cvs
+(defun git-append-to-ignore (file)
+ "Add a file name to the ignore file in its directory."
+ (let* ((fullname (expand-file-name file))
+ (dir (file-name-directory fullname))
+ (name (file-name-nondirectory fullname))
+ (ignore-name (expand-file-name git-per-dir-ignore-file dir))
+ (created (not (file-exists-p ignore-name))))
+ (save-window-excursion
+ (set-buffer (find-file-noselect ignore-name))
+ (goto-char (point-max))
+ (unless (zerop (current-column)) (insert "\n"))
+ (insert "/" name "\n")
+ (sort-lines nil (point-min) (point-max))
+ (save-buffer))
+ (when created
+ (git-call-process nil "update-index" "--add" "--" (file-relative-name ignore-name)))
+ (git-update-status-files (list (file-relative-name ignore-name)))))
+
+; propertize definition for XEmacs, stolen from erc-compat
+(eval-when-compile
+ (unless (fboundp 'propertize)
+ (defun propertize (string &rest props)
+ (let ((string (copy-sequence string)))
+ (while props
+ (put-text-property 0 (length string) (nth 0 props) (nth 1 props) string)
+ (setq props (cddr props)))
+ string))))
+
+;;;; Wrappers for basic git commands
+;;;; ------------------------------------------------------------
+
+(defun git-rev-parse (rev)
+ "Parse a revision name and return its SHA1."
+ (git-get-string-sha1
+ (git-call-process-string "rev-parse" rev)))
+
+(defun git-config (key)
+ "Retrieve the value associated to KEY in the git repository config file."
+ (let ((str (git-call-process-string "config" key)))
+ (and str (car (split-string str "\n")))))
+
+(defun git-symbolic-ref (ref)
+ "Wrapper for the git-symbolic-ref command."
+ (let ((str (git-call-process-string "symbolic-ref" ref)))
+ (and str (car (split-string str "\n")))))
+
+(defun git-update-ref (ref newval &optional oldval reason)
+ "Update a reference by calling git-update-ref."
+ (let ((args (and oldval (list oldval))))
+ (when newval (push newval args))
+ (push ref args)
+ (when reason
+ (push reason args)
+ (push "-m" args))
+ (unless newval (push "-d" args))
+ (apply 'git-call-process-display-error "update-ref" args)))
+
+(defun git-for-each-ref (&rest specs)
+ "Return a list of refs using git-for-each-ref.
+Each entry is a cons of (SHORT-NAME . FULL-NAME)."
+ (let (refs)
+ (with-temp-buffer
+ (apply #'git-call-process t "for-each-ref" "--format=%(refname)" specs)
+ (goto-char (point-min))
+ (while (re-search-forward "^[^/\n]+/[^/\n]+/\\(.+\\)$" nil t)
+ (push (cons (match-string 1) (match-string 0)) refs)))
+ (nreverse refs)))
+
+(defun git-read-tree (tree &optional index-file)
+ "Read a tree into the index file."
+ (let ((process-environment
+ (append (and index-file (list (concat "GIT_INDEX_FILE=" index-file))) process-environment)))
+ (apply 'git-call-process-display-error "read-tree" (if tree (list tree)))))
+
+(defun git-write-tree (&optional index-file)
+ "Call git-write-tree and return the resulting tree SHA1 as a string."
+ (let ((process-environment
+ (append (and index-file (list (concat "GIT_INDEX_FILE=" index-file))) process-environment)))
+ (git-get-string-sha1
+ (git-call-process-string-display-error "write-tree"))))
+
+(defun git-commit-tree (buffer tree parent)
+ "Create a commit and possibly update HEAD.
+Create a commit with the message in BUFFER using the tree with hash TREE.
+Use PARENT as the parent of the new commit. If PARENT is the current \"HEAD\",
+update the \"HEAD\" reference to the new commit."
+ (let ((author-name (git-get-committer-name))
+ (author-email (git-get-committer-email))
+ (subject "commit (initial): ")
+ author-date log-start log-end args coding-system-for-write)
+ (when parent
+ (setq subject "commit: ")
+ (push "-p" args)
+ (push parent args))
+ (with-current-buffer buffer
+ (goto-char (point-min))
+ (if
+ (setq log-start (re-search-forward (concat "^" (regexp-quote git-log-msg-separator) "\n") nil t))
+ (save-restriction
+ (narrow-to-region (point-min) log-start)
+ (goto-char (point-min))
+ (when (re-search-forward "^Author: +\\(.*?\\) *<\\(.*\\)> *$" nil t)
+ (setq author-name (match-string 1)
+ author-email (match-string 2)))
+ (goto-char (point-min))
+ (when (re-search-forward "^Date: +\\(.*\\)$" nil t)
+ (setq author-date (match-string 1)))
+ (goto-char (point-min))
+ (when (re-search-forward "^Merge: +\\(.*\\)" nil t)
+ (setq subject "commit (merge): ")
+ (dolist (parent (split-string (match-string 1) " +" t))
+ (push "-p" args)
+ (push parent args))))
+ (setq log-start (point-min)))
+ (setq log-end (point-max))
+ (goto-char log-start)
+ (when (re-search-forward ".*$" nil t)
+ (setq subject (concat subject (match-string 0))))
+ (setq coding-system-for-write buffer-file-coding-system))
+ (let ((commit
+ (git-get-string-sha1
+ (let ((env `(("GIT_AUTHOR_NAME" . ,author-name)
+ ("GIT_AUTHOR_EMAIL" . ,author-email)
+ ("GIT_COMMITTER_NAME" . ,(git-get-committer-name))
+ ("GIT_COMMITTER_EMAIL" . ,(git-get-committer-email)))))
+ (when author-date (push `("GIT_AUTHOR_DATE" . ,author-date) env))
+ (apply #'git-run-command-region
+ buffer log-start log-end env
+ "commit-tree" tree (nreverse args))))))
+ (when commit (git-update-ref "HEAD" commit parent subject))
+ commit)))
+
+(defun git-empty-db-p ()
+ "Check if the git db is empty (no commit done yet)."
+ (not (eq 0 (git-call-process nil "rev-parse" "--verify" "HEAD"))))
+
+(defun git-get-merge-heads ()
+ "Retrieve the merge heads from the MERGE_HEAD file if present."
+ (let (heads)
+ (when (file-readable-p ".git/MERGE_HEAD")
+ (with-temp-buffer
+ (insert-file-contents ".git/MERGE_HEAD" nil nil nil t)
+ (goto-char (point-min))
+ (while (re-search-forward "[0-9a-f]\\{40\\}" nil t)
+ (push (match-string 0) heads))))
+ (nreverse heads)))
+
+(defun git-get-commit-description (commit)
+ "Get a one-line description of COMMIT."
+ (let ((coding-system-for-read (git-get-logoutput-coding-system)))
+ (let ((descr (git-call-process-string "log" "--max-count=1" "--pretty=oneline" commit)))
+ (if (and descr (string-match "\\`\\([0-9a-f]\\{40\\}\\) *\\(.*\\)$" descr))
+ (concat (substring (match-string 1 descr) 0 10) " - " (match-string 2 descr))
+ descr))))
+
+;;;; File info structure
+;;;; ------------------------------------------------------------
+
+; fileinfo structure stolen from pcl-cvs
+(defstruct (git-fileinfo
+ (:copier nil)
+ (:constructor git-create-fileinfo (state name &optional old-perm new-perm rename-state orig-name marked))
+ (:conc-name git-fileinfo->))
+ marked ;; t/nil
+ state ;; current state
+ name ;; file name
+ old-perm new-perm ;; permission flags
+ rename-state ;; rename or copy state
+ orig-name ;; original name for renames or copies
+ needs-update ;; whether file needs to be updated
+ needs-refresh) ;; whether file needs to be refreshed
+
+(defvar git-status nil)
+
+(defun git-set-fileinfo-state (info state)
+ "Set the state of a file info."
+ (unless (eq (git-fileinfo->state info) state)
+ (setf (git-fileinfo->state info) state
+ (git-fileinfo->new-perm info) (git-fileinfo->old-perm info)
+ (git-fileinfo->rename-state info) nil
+ (git-fileinfo->orig-name info) nil
+ (git-fileinfo->needs-update info) nil
+ (git-fileinfo->needs-refresh info) t)))
+
+(defun git-status-filenames-map (status func files &rest args)
+ "Apply FUNC to the status files names in the FILES list.
+The list must be sorted."
+ (when files
+ (let ((file (pop files))
+ (node (ewoc-nth status 0)))
+ (while (and file node)
+ (let* ((info (ewoc-data node))
+ (name (git-fileinfo->name info)))
+ (if (string-lessp name file)
+ (setq node (ewoc-next status node))
+ (if (string-equal name file)
+ (apply func info args))
+ (setq file (pop files))))))))
+
+(defun git-set-filenames-state (status files state)
+ "Set the state of a list of named files. The list must be sorted"
+ (when files
+ (git-status-filenames-map status #'git-set-fileinfo-state files state)
+ (unless state ;; delete files whose state has been set to nil
+ (ewoc-filter status (lambda (info) (git-fileinfo->state info))))))
+
+(defun git-state-code (code)
+ "Convert from a string to a added/deleted/modified state."
+ (case (string-to-char code)
+ (?M 'modified)
+ (?? 'unknown)
+ (?A 'added)
+ (?D 'deleted)
+ (?U 'unmerged)
+ (?T 'modified)
+ (t nil)))
+
+(defun git-status-code-as-string (code)
+ "Format a git status code as string."
+ (case code
+ ('modified (propertize "Modified" 'face 'git-status-face))
+ ('unknown (propertize "Unknown " 'face 'git-unknown-face))
+ ('added (propertize "Added " 'face 'git-status-face))
+ ('deleted (propertize "Deleted " 'face 'git-status-face))
+ ('unmerged (propertize "Unmerged" 'face 'git-unmerged-face))
+ ('uptodate (propertize "Uptodate" 'face 'git-uptodate-face))
+ ('ignored (propertize "Ignored " 'face 'git-ignored-face))
+ (t "? ")))
+
+(defun git-file-type-as-string (old-perm new-perm)
+ "Return a string describing the file type based on its permissions."
+ (let* ((old-type (lsh (or old-perm 0) -9))
+ (new-type (lsh (or new-perm 0) -9))
+ (str (case new-type
+ (64 ;; file
+ (case old-type
+ (64 nil)
+ (80 " (type change symlink -> file)")
+ (112 " (type change subproject -> file)")))
+ (80 ;; symlink
+ (case old-type
+ (64 " (type change file -> symlink)")
+ (112 " (type change subproject -> symlink)")
+ (t " (symlink)")))
+ (112 ;; subproject
+ (case old-type
+ (64 " (type change file -> subproject)")
+ (80 " (type change symlink -> subproject)")
+ (t " (subproject)")))
+ (72 nil) ;; directory (internal, not a real git state)
+ (0 ;; deleted or unknown
+ (case old-type
+ (80 " (symlink)")
+ (112 " (subproject)")))
+ (t (format " (unknown type %o)" new-type)))))
+ (cond (str (propertize str 'face 'git-status-face))
+ ((eq new-type 72) "/")
+ (t ""))))
+
+(defun git-rename-as-string (info)
+ "Return a string describing the copy or rename associated with INFO, or an empty string if none."
+ (let ((state (git-fileinfo->rename-state info)))
+ (if state
+ (propertize
+ (concat " ("
+ (if (eq state 'copy) "copied from "
+ (if (eq (git-fileinfo->state info) 'added) "renamed from "
+ "renamed to "))
+ (git-escape-file-name (git-fileinfo->orig-name info))
+ ")") 'face 'git-status-face)
+ "")))
+
+(defun git-permissions-as-string (old-perm new-perm)
+ "Format a permission change as string."
+ (propertize
+ (if (or (not old-perm)
+ (not new-perm)
+ (eq 0 (logand ?\111 (logxor old-perm new-perm))))
+ " "
+ (if (eq 0 (logand ?\111 old-perm)) "+x" "-x"))
+ 'face 'git-permission-face))
+
+(defun git-fileinfo-prettyprint (info)
+ "Pretty-printer for the git-fileinfo structure."
+ (let ((old-perm (git-fileinfo->old-perm info))
+ (new-perm (git-fileinfo->new-perm info)))
+ (insert (concat " " (if (git-fileinfo->marked info) (propertize "*" 'face 'git-mark-face) " ")
+ " " (git-status-code-as-string (git-fileinfo->state info))
+ " " (git-permissions-as-string old-perm new-perm)
+ " " (git-escape-file-name (git-fileinfo->name info))
+ (git-file-type-as-string old-perm new-perm)
+ (git-rename-as-string info)))))
+
+(defun git-update-node-fileinfo (node info)
+ "Update the fileinfo of the specified node. The names are assumed to match already."
+ (let ((data (ewoc-data node)))
+ (setf
+ ;; preserve the marked flag
+ (git-fileinfo->marked info) (git-fileinfo->marked data)
+ (git-fileinfo->needs-update data) nil)
+ (when (not (equal info data))
+ (setf (git-fileinfo->needs-refresh info) t
+ (ewoc-data node) info))))
+
+(defun git-insert-info-list (status infolist files)
+ "Insert a sorted list of file infos in the status buffer, replacing existing ones if any."
+ (let* ((info (pop infolist))
+ (node (ewoc-nth status 0))
+ (name (and info (git-fileinfo->name info)))
+ remaining)
+ (while info
+ (let ((nodename (and node (git-fileinfo->name (ewoc-data node)))))
+ (while (and files (string-lessp (car files) name))
+ (push (pop files) remaining))
+ (when (and files (string-equal (car files) name))
+ (setq files (cdr files)))
+ (cond ((not nodename)
+ (setq node (ewoc-enter-last status info))
+ (setq info (pop infolist))
+ (setq name (and info (git-fileinfo->name info))))
+ ((string-lessp nodename name)
+ (setq node (ewoc-next status node)))
+ ((string-equal nodename name)
+ ;; preserve the marked flag
+ (git-update-node-fileinfo node info)
+ (setq info (pop infolist))
+ (setq name (and info (git-fileinfo->name info))))
+ (t
+ (setq node (ewoc-enter-before status node info))
+ (setq info (pop infolist))
+ (setq name (and info (git-fileinfo->name info)))))))
+ (nconc (nreverse remaining) files)))
+
+(defun git-run-diff-index (status files)
+ "Run git-diff-index on FILES and parse the results into STATUS.
+Return the list of files that haven't been handled."
+ (let (infolist)
+ (with-temp-buffer
+ (apply #'git-call-process t "diff-index" "-z" "-M" "HEAD" "--" files)
+ (goto-char (point-min))
+ (while (re-search-forward
+ ":\\([0-7]\\{6\\}\\) \\([0-7]\\{6\\}\\) [0-9a-f]\\{40\\} [0-9a-f]\\{40\\} \\(\\([ADMUT]\\)\0\\([^\0]+\\)\\|\\([CR]\\)[0-9]*\0\\([^\0]+\\)\0\\([^\0]+\\)\\)\0"
+ nil t 1)
+ (let ((old-perm (string-to-number (match-string 1) 8))
+ (new-perm (string-to-number (match-string 2) 8))
+ (state (or (match-string 4) (match-string 6)))
+ (name (or (match-string 5) (match-string 7)))
+ (new-name (match-string 8)))
+ (if new-name ; copy or rename
+ (if (eq ?C (string-to-char state))
+ (push (git-create-fileinfo 'added new-name old-perm new-perm 'copy name) infolist)
+ (push (git-create-fileinfo 'deleted name 0 0 'rename new-name) infolist)
+ (push (git-create-fileinfo 'added new-name old-perm new-perm 'rename name) infolist))
+ (push (git-create-fileinfo (git-state-code state) name old-perm new-perm) infolist)))))
+ (setq infolist (sort (nreverse infolist)
+ (lambda (info1 info2)
+ (string-lessp (git-fileinfo->name info1)
+ (git-fileinfo->name info2)))))
+ (git-insert-info-list status infolist files)))
+
+(defun git-find-status-file (status file)
+ "Find a given file in the status ewoc and return its node."
+ (let ((node (ewoc-nth status 0)))
+ (while (and node (not (string= file (git-fileinfo->name (ewoc-data node)))))
+ (setq node (ewoc-next status node)))
+ node))
+
+(defun git-run-ls-files (status files default-state &rest options)
+ "Run git-ls-files on FILES and parse the results into STATUS.
+Return the list of files that haven't been handled."
+ (let (infolist)
+ (with-temp-buffer
+ (apply #'git-call-process t "ls-files" "-z" (append options (list "--") files))
+ (goto-char (point-min))
+ (while (re-search-forward "\\([^\0]*?\\)\\(/?\\)\0" nil t 1)
+ (let ((name (match-string 1)))
+ (push (git-create-fileinfo default-state name 0
+ (if (string-equal "/" (match-string 2)) (lsh ?\110 9) 0))
+ infolist))))
+ (setq infolist (nreverse infolist)) ;; assume it is sorted already
+ (git-insert-info-list status infolist files)))
+
+(defun git-run-ls-files-cached (status files default-state)
+ "Run git-ls-files -c on FILES and parse the results into STATUS.
+Return the list of files that haven't been handled."
+ (let (infolist)
+ (with-temp-buffer
+ (apply #'git-call-process t "ls-files" "-z" "-s" "-c" "--" files)
+ (goto-char (point-min))
+ (while (re-search-forward "\\([0-7]\\{6\\}\\) [0-9a-f]\\{40\\} 0\t\\([^\0]+\\)\0" nil t)
+ (let* ((new-perm (string-to-number (match-string 1) 8))
+ (old-perm (if (eq default-state 'added) 0 new-perm))
+ (name (match-string 2)))
+ (push (git-create-fileinfo default-state name old-perm new-perm) infolist))))
+ (setq infolist (nreverse infolist)) ;; assume it is sorted already
+ (git-insert-info-list status infolist files)))
+
+(defun git-run-ls-unmerged (status files)
+ "Run git-ls-files -u on FILES and parse the results into STATUS."
+ (with-temp-buffer
+ (apply #'git-call-process t "ls-files" "-z" "-u" "--" files)
+ (goto-char (point-min))
+ (let (unmerged-files)
+ (while (re-search-forward "[0-7]\\{6\\} [0-9a-f]\\{40\\} [123]\t\\([^\0]+\\)\0" nil t)
+ (push (match-string 1) unmerged-files))
+ (setq unmerged-files (nreverse unmerged-files)) ;; assume it is sorted already
+ (git-set-filenames-state status unmerged-files 'unmerged))))
+
+(defun git-get-exclude-files ()
+ "Get the list of exclude files to pass to git-ls-files."
+ (let (files
+ (config (git-config "core.excludesfile")))
+ (when (file-readable-p ".git/info/exclude")
+ (push ".git/info/exclude" files))
+ (when (and config (file-readable-p config))
+ (push config files))
+ files))
+
+(defun git-run-ls-files-with-excludes (status files default-state &rest options)
+ "Run git-ls-files on FILES with appropriate --exclude-from options."
+ (let ((exclude-files (git-get-exclude-files)))
+ (apply #'git-run-ls-files status files default-state "--directory" "--no-empty-directory"
+ (concat "--exclude-per-directory=" git-per-dir-ignore-file)
+ (append options (mapcar (lambda (f) (concat "--exclude-from=" f)) exclude-files)))))
+
+(defun git-update-status-files (&optional files mark-files)
+ "Update the status of FILES from the index.
+The FILES list must be sorted."
+ (unless git-status (error "Not in git-status buffer."))
+ ;; set the needs-update flag on existing files
+ (if files
+ (git-status-filenames-map
+ git-status (lambda (info) (setf (git-fileinfo->needs-update info) t)) files)
+ (ewoc-map (lambda (info) (setf (git-fileinfo->needs-update info) t) nil) git-status)
+ (git-call-process nil "update-index" "--refresh")
+ (when git-show-uptodate
+ (git-run-ls-files-cached git-status nil 'uptodate)))
+ (let ((remaining-files
+ (if (git-empty-db-p) ; we need some special handling for an empty db
+ (git-run-ls-files-cached git-status files 'added)
+ (git-run-diff-index git-status files))))
+ (git-run-ls-unmerged git-status files)
+ (when (or remaining-files (and git-show-unknown (not files)))
+ (setq remaining-files (git-run-ls-files-with-excludes git-status remaining-files 'unknown "-o")))
+ (when (or remaining-files (and git-show-ignored (not files)))
+ (setq remaining-files (git-run-ls-files-with-excludes git-status remaining-files 'ignored "-o" "-i")))
+ (unless files
+ (setq remaining-files (git-get-filenames (ewoc-collect git-status #'git-fileinfo->needs-update))))
+ (when remaining-files
+ (setq remaining-files (git-run-ls-files-cached git-status remaining-files 'uptodate)))
+ (git-set-filenames-state git-status remaining-files nil)
+ (when mark-files (git-mark-files git-status files))
+ (git-refresh-files)
+ (git-refresh-ewoc-hf git-status)))
+
+(defun git-mark-files (status files)
+ "Mark all the specified FILES, and unmark the others."
+ (let ((file (and files (pop files)))
+ (node (ewoc-nth status 0)))
+ (while node
+ (let ((info (ewoc-data node)))
+ (if (and file (string-equal (git-fileinfo->name info) file))
+ (progn
+ (unless (git-fileinfo->marked info)
+ (setf (git-fileinfo->marked info) t)
+ (setf (git-fileinfo->needs-refresh info) t))
+ (setq file (pop files))
+ (setq node (ewoc-next status node)))
+ (when (git-fileinfo->marked info)
+ (setf (git-fileinfo->marked info) nil)
+ (setf (git-fileinfo->needs-refresh info) t))
+ (if (and file (string-lessp file (git-fileinfo->name info)))
+ (setq file (pop files))
+ (setq node (ewoc-next status node))))))))
+
+(defun git-marked-files ()
+ "Return a list of all marked files, or if none a list containing just the file at cursor position."
+ (unless git-status (error "Not in git-status buffer."))
+ (or (ewoc-collect git-status (lambda (info) (git-fileinfo->marked info)))
+ (list (ewoc-data (ewoc-locate git-status)))))
+
+(defun git-marked-files-state (&rest states)
+ "Return a sorted list of marked files that are in the specified states."
+ (let ((files (git-marked-files))
+ result)
+ (dolist (info files)
+ (when (memq (git-fileinfo->state info) states)
+ (push info result)))
+ (nreverse result)))
+
+(defun git-refresh-files ()
+ "Refresh all files that need it and clear the needs-refresh flag."
+ (unless git-status (error "Not in git-status buffer."))
+ (ewoc-map
+ (lambda (info)
+ (let ((refresh (git-fileinfo->needs-refresh info)))
+ (setf (git-fileinfo->needs-refresh info) nil)
+ refresh))
+ git-status)
+ ; move back to goal column
+ (when goal-column (move-to-column goal-column)))
+
+(defun git-refresh-ewoc-hf (status)
+ "Refresh the ewoc header and footer."
+ (let ((branch (git-symbolic-ref "HEAD"))
+ (head (if (git-empty-db-p) "Nothing committed yet"
+ (git-get-commit-description "HEAD")))
+ (merge-heads (git-get-merge-heads)))
+ (ewoc-set-hf status
+ (format "Directory: %s\nBranch: %s\nHead: %s%s\n"
+ default-directory
+ (if branch
+ (if (string-match "^refs/heads/" branch)
+ (substring branch (match-end 0))
+ branch)
+ "none (detached HEAD)")
+ head
+ (if merge-heads
+ (concat "\nMerging: "
+ (mapconcat (lambda (str) (git-get-commit-description str)) merge-heads "\n "))
+ ""))
+ (if (ewoc-nth status 0) "" " No changes."))))
+
+(defun git-get-filenames (files)
+ (mapcar (lambda (info) (git-fileinfo->name info)) files))
+
+(defun git-update-index (index-file files)
+ "Run git-update-index on a list of files."
+ (let ((process-environment (append (and index-file (list (concat "GIT_INDEX_FILE=" index-file)))
+ process-environment))
+ added deleted modified)
+ (dolist (info files)
+ (case (git-fileinfo->state info)
+ ('added (push info added))
+ ('deleted (push info deleted))
+ ('modified (push info modified))))
+ (and
+ (or (not added) (apply #'git-call-process-display-error "update-index" "--add" "--" (git-get-filenames added)))
+ (or (not deleted) (apply #'git-call-process-display-error "update-index" "--remove" "--" (git-get-filenames deleted)))
+ (or (not modified) (apply #'git-call-process-display-error "update-index" "--" (git-get-filenames modified))))))
+
+(defun git-run-pre-commit-hook ()
+ "Run the pre-commit hook if any."
+ (unless git-status (error "Not in git-status buffer."))
+ (let ((files (git-marked-files-state 'added 'deleted 'modified)))
+ (or (not files)
+ (not (file-executable-p ".git/hooks/pre-commit"))
+ (let ((index-file (make-temp-file "gitidx")))
+ (unwind-protect
+ (let ((head-tree (unless (git-empty-db-p) (git-rev-parse "HEAD^{tree}"))))
+ (git-read-tree head-tree index-file)
+ (git-update-index index-file files)
+ (git-run-hook "pre-commit" `(("GIT_INDEX_FILE" . ,index-file))))
+ (delete-file index-file))))))
+
+(defun git-do-commit ()
+ "Perform the actual commit using the current buffer as log message."
+ (interactive)
+ (let ((buffer (current-buffer))
+ (index-file (make-temp-file "gitidx")))
+ (with-current-buffer log-edit-parent-buffer
+ (if (git-marked-files-state 'unmerged)
+ (message "You cannot commit unmerged files, resolve them first.")
+ (unwind-protect
+ (let ((files (git-marked-files-state 'added 'deleted 'modified))
+ head tree head-tree)
+ (unless (git-empty-db-p)
+ (setq head (git-rev-parse "HEAD")
+ head-tree (git-rev-parse "HEAD^{tree}")))
+ (message "Running git commit...")
+ (when
+ (and
+ (git-read-tree head-tree index-file)
+ (git-update-index nil files) ;update both the default index
+ (git-update-index index-file files) ;and the temporary one
+ (setq tree (git-write-tree index-file)))
+ (if (or (not (string-equal tree head-tree))
+ (yes-or-no-p "The tree was not modified, do you really want to perform an empty commit? "))
+ (let ((commit (git-commit-tree buffer tree head)))
+ (when commit
+ (condition-case nil (delete-file ".git/MERGE_HEAD") (error nil))
+ (condition-case nil (delete-file ".git/MERGE_MSG") (error nil))
+ (with-current-buffer buffer (erase-buffer))
+ (git-update-status-files (git-get-filenames files))
+ (git-call-process nil "rerere")
+ (git-call-process nil "gc" "--auto")
+ (message "Committed %s." commit)
+ (git-run-hook "post-commit" nil)))
+ (message "Commit aborted."))))
+ (delete-file index-file))))))
+
+
+;;;; Interactive functions
+;;;; ------------------------------------------------------------
+
+(defun git-mark-file ()
+ "Mark the file that the cursor is on and move to the next one."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (let* ((pos (ewoc-locate git-status))
+ (info (ewoc-data pos)))
+ (setf (git-fileinfo->marked info) t)
+ (ewoc-invalidate git-status pos)
+ (ewoc-goto-next git-status 1)))
+
+(defun git-unmark-file ()
+ "Unmark the file that the cursor is on and move to the next one."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (let* ((pos (ewoc-locate git-status))
+ (info (ewoc-data pos)))
+ (setf (git-fileinfo->marked info) nil)
+ (ewoc-invalidate git-status pos)
+ (ewoc-goto-next git-status 1)))
+
+(defun git-unmark-file-up ()
+ "Unmark the file that the cursor is on and move to the previous one."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (let* ((pos (ewoc-locate git-status))
+ (info (ewoc-data pos)))
+ (setf (git-fileinfo->marked info) nil)
+ (ewoc-invalidate git-status pos)
+ (ewoc-goto-prev git-status 1)))
+
+(defun git-mark-all ()
+ "Mark all files."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (ewoc-map (lambda (info) (unless (git-fileinfo->marked info)
+ (setf (git-fileinfo->marked info) t))) git-status)
+ ; move back to goal column after invalidate
+ (when goal-column (move-to-column goal-column)))
+
+(defun git-unmark-all ()
+ "Unmark all files."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (ewoc-map (lambda (info) (when (git-fileinfo->marked info)
+ (setf (git-fileinfo->marked info) nil)
+ t)) git-status)
+ ; move back to goal column after invalidate
+ (when goal-column (move-to-column goal-column)))
+
+(defun git-toggle-all-marks ()
+ "Toggle all file marks."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (ewoc-map (lambda (info) (setf (git-fileinfo->marked info) (not (git-fileinfo->marked info))) t) git-status)
+ ; move back to goal column after invalidate
+ (when goal-column (move-to-column goal-column)))
+
+(defun git-next-file (&optional n)
+ "Move the selection down N files."
+ (interactive "p")
+ (unless git-status (error "Not in git-status buffer."))
+ (ewoc-goto-next git-status n))
+
+(defun git-prev-file (&optional n)
+ "Move the selection up N files."
+ (interactive "p")
+ (unless git-status (error "Not in git-status buffer."))
+ (ewoc-goto-prev git-status n))
+
+(defun git-next-unmerged-file (&optional n)
+ "Move the selection down N unmerged files."
+ (interactive "p")
+ (unless git-status (error "Not in git-status buffer."))
+ (let* ((last (ewoc-locate git-status))
+ (node (ewoc-next git-status last)))
+ (while (and node (> n 0))
+ (when (eq 'unmerged (git-fileinfo->state (ewoc-data node)))
+ (setq n (1- n))
+ (setq last node))
+ (setq node (ewoc-next git-status node)))
+ (ewoc-goto-node git-status last)))
+
+(defun git-prev-unmerged-file (&optional n)
+ "Move the selection up N unmerged files."
+ (interactive "p")
+ (unless git-status (error "Not in git-status buffer."))
+ (let* ((last (ewoc-locate git-status))
+ (node (ewoc-prev git-status last)))
+ (while (and node (> n 0))
+ (when (eq 'unmerged (git-fileinfo->state (ewoc-data node)))
+ (setq n (1- n))
+ (setq last node))
+ (setq node (ewoc-prev git-status node)))
+ (ewoc-goto-node git-status last)))
+
+(defun git-insert-file (file)
+ "Insert file(s) into the git-status buffer."
+ (interactive "fInsert file: ")
+ (git-update-status-files (list (file-relative-name file))))
+
+(defun git-add-file ()
+ "Add marked file(s) to the index cache."
+ (interactive)
+ (let ((files (git-get-filenames (git-marked-files-state 'unknown 'ignored 'unmerged))))
+ ;; FIXME: add support for directories
+ (unless files
+ (push (file-relative-name (read-file-name "File to add: " nil nil t)) files))
+ (when (apply 'git-call-process-display-error "update-index" "--add" "--" files)
+ (git-update-status-files files)
+ (git-success-message "Added" files))))
+
+(defun git-ignore-file ()
+ "Add marked file(s) to the ignore list."
+ (interactive)
+ (let ((files (git-get-filenames (git-marked-files-state 'unknown))))
+ (unless files
+ (push (file-relative-name (read-file-name "File to ignore: " nil nil t)) files))
+ (dolist (f files) (git-append-to-ignore f))
+ (git-update-status-files files)
+ (git-success-message "Ignored" files)))
+
+(defun git-remove-file ()
+ "Remove the marked file(s)."
+ (interactive)
+ (let ((files (git-get-filenames (git-marked-files-state 'added 'modified 'unknown 'uptodate 'ignored))))
+ (unless files
+ (push (file-relative-name (read-file-name "File to remove: " nil nil t)) files))
+ (if (yes-or-no-p
+ (if (cdr files)
+ (format "Remove %d files? " (length files))
+ (format "Remove %s? " (car files))))
+ (progn
+ (dolist (name files)
+ (ignore-errors
+ (if (file-directory-p name)
+ (delete-directory name)
+ (delete-file name))))
+ (when (apply 'git-call-process-display-error "update-index" "--remove" "--" files)
+ (git-update-status-files files)
+ (git-success-message "Removed" files)))
+ (message "Aborting"))))
+
+(defun git-revert-file ()
+ "Revert changes to the marked file(s)."
+ (interactive)
+ (let ((files (git-marked-files-state 'added 'deleted 'modified 'unmerged))
+ added modified)
+ (when (and files
+ (yes-or-no-p
+ (if (cdr files)
+ (format "Revert %d files? " (length files))
+ (format "Revert %s? " (git-fileinfo->name (car files))))))
+ (dolist (info files)
+ (case (git-fileinfo->state info)
+ ('added (push (git-fileinfo->name info) added))
+ ('deleted (push (git-fileinfo->name info) modified))
+ ('unmerged (push (git-fileinfo->name info) modified))
+ ('modified (push (git-fileinfo->name info) modified))))
+ ;; check if a buffer contains one of the files and isn't saved
+ (dolist (file modified)
+ (let ((buffer (get-file-buffer file)))
+ (when (and buffer (buffer-modified-p buffer))
+ (error "Buffer %s is modified. Please kill or save modified buffers before reverting." (buffer-name buffer)))))
+ (let ((ok (and
+ (or (not added)
+ (apply 'git-call-process-display-error "update-index" "--force-remove" "--" added))
+ (or (not modified)
+ (apply 'git-call-process-display-error "checkout" "HEAD" modified))))
+ (names (git-get-filenames files)))
+ (git-update-status-files names)
+ (when ok
+ (dolist (file modified)
+ (let ((buffer (get-file-buffer file)))
+ (when buffer (with-current-buffer buffer (revert-buffer t t t)))))
+ (git-success-message "Reverted" names))))))
+
+(defun git-remove-handled ()
+ "Remove handled files from the status list."
+ (interactive)
+ (ewoc-filter git-status
+ (lambda (info)
+ (case (git-fileinfo->state info)
+ ('ignored git-show-ignored)
+ ('uptodate git-show-uptodate)
+ ('unknown git-show-unknown)
+ (t t))))
+ (unless (ewoc-nth git-status 0) ; refresh header if list is empty
+ (git-refresh-ewoc-hf git-status)))
+
+(defun git-toggle-show-uptodate ()
+ "Toogle the option for showing up-to-date files."
+ (interactive)
+ (if (setq git-show-uptodate (not git-show-uptodate))
+ (git-refresh-status)
+ (git-remove-handled)))
+
+(defun git-toggle-show-ignored ()
+ "Toogle the option for showing ignored files."
+ (interactive)
+ (if (setq git-show-ignored (not git-show-ignored))
+ (progn
+ (message "Inserting ignored files...")
+ (git-run-ls-files-with-excludes git-status nil 'ignored "-o" "-i")
+ (git-refresh-files)
+ (git-refresh-ewoc-hf git-status)
+ (message "Inserting ignored files...done"))
+ (git-remove-handled)))
+
+(defun git-toggle-show-unknown ()
+ "Toogle the option for showing unknown files."
+ (interactive)
+ (if (setq git-show-unknown (not git-show-unknown))
+ (progn
+ (message "Inserting unknown files...")
+ (git-run-ls-files-with-excludes git-status nil 'unknown "-o")
+ (git-refresh-files)
+ (git-refresh-ewoc-hf git-status)
+ (message "Inserting unknown files...done"))
+ (git-remove-handled)))
+
+(defun git-expand-directory (info)
+ "Expand the directory represented by INFO to list its files."
+ (when (eq (lsh (git-fileinfo->new-perm info) -9) ?\110)
+ (let ((dir (git-fileinfo->name info)))
+ (git-set-filenames-state git-status (list dir) nil)
+ (git-run-ls-files-with-excludes git-status (list (concat dir "/")) 'unknown "-o")
+ (git-refresh-files)
+ (git-refresh-ewoc-hf git-status)
+ t)))
+
+(defun git-setup-diff-buffer (buffer)
+ "Setup a buffer for displaying a diff."
+ (let ((dir default-directory))
+ (with-current-buffer buffer
+ (diff-mode)
+ (goto-char (point-min))
+ (setq default-directory dir)
+ (setq buffer-read-only t)))
+ (display-buffer buffer)
+ ; shrink window only if it displays the status buffer
+ (when (eq (window-buffer) (current-buffer))
+ (shrink-window-if-larger-than-buffer)))
+
+(defun git-diff-file ()
+ "Diff the marked file(s) against HEAD."
+ (interactive)
+ (let ((files (git-marked-files)))
+ (git-setup-diff-buffer
+ (apply #'git-run-command-buffer "*git-diff*" "diff-index" "-p" "-M" "HEAD" "--" (git-get-filenames files)))))
+
+(defun git-diff-file-merge-head (arg)
+ "Diff the marked file(s) against the first merge head (or the nth one with a numeric prefix)."
+ (interactive "p")
+ (let ((files (git-marked-files))
+ (merge-heads (git-get-merge-heads)))
+ (unless merge-heads (error "No merge in progress"))
+ (git-setup-diff-buffer
+ (apply #'git-run-command-buffer "*git-diff*" "diff-index" "-p" "-M"
+ (or (nth (1- arg) merge-heads) "HEAD") "--" (git-get-filenames files)))))
+
+(defun git-diff-unmerged-file (stage)
+ "Diff the marked unmerged file(s) against the specified stage."
+ (let ((files (git-marked-files)))
+ (git-setup-diff-buffer
+ (apply #'git-run-command-buffer "*git-diff*" "diff-files" "-p" stage "--" (git-get-filenames files)))))
+
+(defun git-diff-file-base ()
+ "Diff the marked unmerged file(s) against the common base file."
+ (interactive)
+ (git-diff-unmerged-file "-1"))
+
+(defun git-diff-file-mine ()
+ "Diff the marked unmerged file(s) against my pre-merge version."
+ (interactive)
+ (git-diff-unmerged-file "-2"))
+
+(defun git-diff-file-other ()
+ "Diff the marked unmerged file(s) against the other's pre-merge version."
+ (interactive)
+ (git-diff-unmerged-file "-3"))
+
+(defun git-diff-file-combined ()
+ "Do a combined diff of the marked unmerged file(s)."
+ (interactive)
+ (git-diff-unmerged-file "-c"))
+
+(defun git-diff-file-idiff ()
+ "Perform an interactive diff on the current file."
+ (interactive)
+ (let ((files (git-marked-files-state 'added 'deleted 'modified)))
+ (unless (eq 1 (length files))
+ (error "Cannot perform an interactive diff on multiple files."))
+ (let* ((filename (car (git-get-filenames files)))
+ (buff1 (find-file-noselect filename))
+ (buff2 (git-run-command-buffer (concat filename ".~HEAD~") "cat-file" "blob" (concat "HEAD:" filename))))
+ (ediff-buffers buff1 buff2))))
+
+(defun git-log-file ()
+ "Display a log of changes to the marked file(s)."
+ (interactive)
+ (let* ((files (git-marked-files))
+ (coding-system-for-read git-commits-coding-system)
+ (buffer (apply #'git-run-command-buffer "*git-log*" "rev-list" "--pretty" "HEAD" "--" (git-get-filenames files))))
+ (with-current-buffer buffer
+ ; (git-log-mode) FIXME: implement log mode
+ (goto-char (point-min))
+ (setq buffer-read-only t))
+ (display-buffer buffer)))
+
+(defun git-log-edit-files ()
+ "Return a list of marked files for use in the log-edit buffer."
+ (with-current-buffer log-edit-parent-buffer
+ (git-get-filenames (git-marked-files-state 'added 'deleted 'modified))))
+
+(defun git-log-edit-diff ()
+ "Run a diff of the current files being committed from a log-edit buffer."
+ (with-current-buffer log-edit-parent-buffer
+ (git-diff-file)))
+
+(defun git-append-sign-off (name email)
+ "Append a Signed-off-by entry to the current buffer, avoiding duplicates."
+ (let ((sign-off (format "Signed-off-by: %s <%s>" name email))
+ (case-fold-search t))
+ (goto-char (point-min))
+ (unless (re-search-forward (concat "^" (regexp-quote sign-off)) nil t)
+ (goto-char (point-min))
+ (unless (re-search-forward "^Signed-off-by: " nil t)
+ (setq sign-off (concat "\n" sign-off)))
+ (goto-char (point-max))
+ (insert sign-off "\n"))))
+
+(defun git-setup-log-buffer (buffer &optional merge-heads author-name author-email subject date msg)
+ "Setup the log buffer for a commit."
+ (unless git-status (error "Not in git-status buffer."))
+ (let ((dir default-directory)
+ (committer-name (git-get-committer-name))
+ (committer-email (git-get-committer-email))
+ (sign-off git-append-signed-off-by))
+ (with-current-buffer buffer
+ (cd dir)
+ (erase-buffer)
+ (insert
+ (propertize
+ (format "Author: %s <%s>\n%s%s"
+ (or author-name committer-name)
+ (or author-email committer-email)
+ (if date (format "Date: %s\n" date) "")
+ (if merge-heads
+ (format "Merge: %s\n"
+ (mapconcat 'identity merge-heads " "))
+ ""))
+ 'face 'git-header-face)
+ (propertize git-log-msg-separator 'face 'git-separator-face)
+ "\n")
+ (when subject (insert subject "\n\n"))
+ (cond (msg (insert msg "\n"))
+ ((file-readable-p ".git/rebase-apply/msg")
+ (insert-file-contents ".git/rebase-apply/msg"))
+ ((file-readable-p ".git/MERGE_MSG")
+ (insert-file-contents ".git/MERGE_MSG")))
+ ; delete empty lines at end
+ (goto-char (point-min))
+ (when (re-search-forward "\n+\\'" nil t)
+ (replace-match "\n" t t))
+ (when sign-off (git-append-sign-off committer-name committer-email)))
+ buffer))
+
+(define-derived-mode git-log-edit-mode log-edit-mode "Git-Log-Edit"
+ "Major mode for editing git log messages.
+
+Set up git-specific `font-lock-keywords' for `log-edit-mode'."
+ (set (make-local-variable 'font-lock-defaults)
+ '(git-log-edit-font-lock-keywords t t)))
+
+(defun git-commit-file ()
+ "Commit the marked file(s), asking for a commit message."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (when (git-run-pre-commit-hook)
+ (let ((buffer (get-buffer-create "*git-commit*"))
+ (coding-system (git-get-commits-coding-system))
+ author-name author-email subject date)
+ (when (eq 0 (buffer-size buffer))
+ (when (file-readable-p ".git/rebase-apply/info")
+ (with-temp-buffer
+ (insert-file-contents ".git/rebase-apply/info")
+ (goto-char (point-min))
+ (when (re-search-forward "^Author: \\(.*\\)\nEmail: \\(.*\\)$" nil t)
+ (setq author-name (match-string 1))
+ (setq author-email (match-string 2)))
+ (goto-char (point-min))
+ (when (re-search-forward "^Subject: \\(.*\\)$" nil t)
+ (setq subject (match-string 1)))
+ (goto-char (point-min))
+ (when (re-search-forward "^Date: \\(.*\\)$" nil t)
+ (setq date (match-string 1)))))
+ (git-setup-log-buffer buffer (git-get-merge-heads) author-name author-email subject date))
+ (if (boundp 'log-edit-diff-function)
+ (log-edit 'git-do-commit nil '((log-edit-listfun . git-log-edit-files)
+ (log-edit-diff-function . git-log-edit-diff)) buffer 'git-log-edit-mode)
+ (log-edit 'git-do-commit nil 'git-log-edit-files buffer
+ 'git-log-edit-mode))
+ (setq paragraph-separate (concat (regexp-quote git-log-msg-separator) "$\\|Author: \\|Date: \\|Merge: \\|Signed-off-by: \\|\f\\|[ ]*$"))
+ (setq buffer-file-coding-system coding-system)
+ (re-search-forward (regexp-quote (concat git-log-msg-separator "\n")) nil t))))
+
+(defun git-setup-commit-buffer (commit)
+ "Setup the commit buffer with the contents of COMMIT."
+ (let (parents author-name author-email subject date msg)
+ (with-temp-buffer
+ (let ((coding-system (git-get-logoutput-coding-system)))
+ (git-call-process t "log" "-1" "--pretty=medium" "--abbrev=40" commit)
+ (goto-char (point-min))
+ (when (re-search-forward "^Merge: *\\(.*\\)$" nil t)
+ (setq parents (cdr (split-string (match-string 1) " +"))))
+ (when (re-search-forward "^Author: *\\(.*\\) <\\(.*\\)>$" nil t)
+ (setq author-name (match-string 1))
+ (setq author-email (match-string 2)))
+ (when (re-search-forward "^Date: *\\(.*\\)$" nil t)
+ (setq date (match-string 1)))
+ (while (re-search-forward "^ \\(.*\\)$" nil t)
+ (push (match-string 1) msg))
+ (setq msg (nreverse msg))
+ (setq subject (pop msg))
+ (while (and msg (zerop (length (car msg))) (pop msg)))))
+ (git-setup-log-buffer (get-buffer-create "*git-commit*")
+ parents author-name author-email subject date
+ (mapconcat #'identity msg "\n"))))
+
+(defun git-get-commit-files (commit)
+ "Retrieve a sorted list of files modified by COMMIT."
+ (let (files)
+ (with-temp-buffer
+ (git-call-process t "diff-tree" "-m" "-r" "-z" "--name-only" "--no-commit-id" "--root" commit)
+ (goto-char (point-min))
+ (while (re-search-forward "\\([^\0]*\\)\0" nil t 1)
+ (push (match-string 1) files)))
+ (sort files #'string-lessp)))
+
+(defun git-read-commit-name (prompt &optional default)
+ "Ask for a commit name, with completion for local branch, remote branch and tag."
+ (completing-read prompt
+ (list* "HEAD" "ORIG_HEAD" "FETCH_HEAD" (mapcar #'car (git-for-each-ref)))
+ nil nil nil nil default))
+
+(defun git-checkout (branch &optional merge)
+ "Checkout a branch, tag, or any commit.
+Use a prefix arg if git should merge while checking out."
+ (interactive
+ (list (git-read-commit-name "Checkout: ")
+ current-prefix-arg))
+ (unless git-status (error "Not in git-status buffer."))
+ (let ((args (list branch "--")))
+ (when merge (push "-m" args))
+ (when (apply #'git-call-process-display-error "checkout" args)
+ (git-update-status-files))))
+
+(defun git-branch (branch)
+ "Create a branch from the current HEAD and switch to it."
+ (interactive (list (git-read-commit-name "Branch: ")))
+ (unless git-status (error "Not in git-status buffer."))
+ (if (git-rev-parse (concat "refs/heads/" branch))
+ (if (yes-or-no-p (format "Branch %s already exists, replace it? " branch))
+ (and (git-call-process-display-error "branch" "-f" branch)
+ (git-call-process-display-error "checkout" branch))
+ (message "Canceled."))
+ (git-call-process-display-error "checkout" "-b" branch))
+ (git-refresh-ewoc-hf git-status))
+
+(defun git-amend-commit ()
+ "Undo the last commit on HEAD, and set things up to commit an
+amended version of it."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (when (git-empty-db-p) (error "No commit to amend."))
+ (let* ((commit (git-rev-parse "HEAD"))
+ (files (git-get-commit-files commit)))
+ (when (if (git-rev-parse "HEAD^")
+ (git-call-process-display-error "reset" "--soft" "HEAD^")
+ (and (git-update-ref "ORIG_HEAD" commit)
+ (git-update-ref "HEAD" nil commit)))
+ (git-update-status-files files t)
+ (git-setup-commit-buffer commit)
+ (git-commit-file))))
+
+(defun git-cherry-pick-commit (arg)
+ "Cherry-pick a commit."
+ (interactive (list (git-read-commit-name "Cherry-pick commit: ")))
+ (unless git-status (error "Not in git-status buffer."))
+ (let ((commit (git-rev-parse (concat arg "^0"))))
+ (unless commit (error "Not a valid commit '%s'." arg))
+ (when (git-rev-parse (concat commit "^2"))
+ (error "Cannot cherry-pick a merge commit."))
+ (let ((files (git-get-commit-files commit))
+ (ok (git-call-process-display-error "cherry-pick" "-n" commit)))
+ (git-update-status-files files ok)
+ (with-current-buffer (git-setup-commit-buffer commit)
+ (goto-char (point-min))
+ (if (re-search-forward "^\n*Signed-off-by:" nil t 1)
+ (goto-char (match-beginning 0))
+ (goto-char (point-max)))
+ (insert "(cherry picked from commit " commit ")\n"))
+ (when ok (git-commit-file)))))
+
+(defun git-revert-commit (arg)
+ "Revert a commit."
+ (interactive (list (git-read-commit-name "Revert commit: ")))
+ (unless git-status (error "Not in git-status buffer."))
+ (let ((commit (git-rev-parse (concat arg "^0"))))
+ (unless commit (error "Not a valid commit '%s'." arg))
+ (when (git-rev-parse (concat commit "^2"))
+ (error "Cannot revert a merge commit."))
+ (let ((files (git-get-commit-files commit))
+ (subject (git-get-commit-description commit))
+ (ok (git-call-process-display-error "revert" "-n" commit)))
+ (git-update-status-files files ok)
+ (when (string-match "^[0-9a-f]+ - \\(.*\\)$" subject)
+ (setq subject (match-string 1 subject)))
+ (git-setup-log-buffer (get-buffer-create "*git-commit*")
+ (git-get-merge-heads) nil nil (format "Revert \"%s\"" subject) nil
+ (format "This reverts commit %s.\n" commit))
+ (when ok (git-commit-file)))))
+
+(defun git-find-file ()
+ "Visit the current file in its own buffer."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (let ((info (ewoc-data (ewoc-locate git-status))))
+ (unless (git-expand-directory info)
+ (find-file (git-fileinfo->name info))
+ (when (eq 'unmerged (git-fileinfo->state info))
+ (smerge-mode 1)))))
+
+(defun git-find-file-other-window ()
+ "Visit the current file in its own buffer in another window."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (let ((info (ewoc-data (ewoc-locate git-status))))
+ (find-file-other-window (git-fileinfo->name info))
+ (when (eq 'unmerged (git-fileinfo->state info))
+ (smerge-mode))))
+
+(defun git-find-file-imerge ()
+ "Visit the current file in interactive merge mode."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (let ((info (ewoc-data (ewoc-locate git-status))))
+ (find-file (git-fileinfo->name info))
+ (smerge-ediff)))
+
+(defun git-view-file ()
+ "View the current file in its own buffer."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (let ((info (ewoc-data (ewoc-locate git-status))))
+ (view-file (git-fileinfo->name info))))
+
+(defun git-refresh-status ()
+ "Refresh the git status buffer."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (message "Refreshing git status...")
+ (git-update-status-files)
+ (message "Refreshing git status...done"))
+
+(defun git-status-quit ()
+ "Quit git-status mode."
+ (interactive)
+ (bury-buffer))
+
+;;;; Major Mode
+;;;; ------------------------------------------------------------
+
+(defvar git-status-mode-hook nil
+ "Run after `git-status-mode' is setup.")
+
+(defvar git-status-mode-map nil
+ "Keymap for git major mode.")
+
+(defvar git-status nil
+ "List of all files managed by the git-status mode.")
+
+(unless git-status-mode-map
+ (let ((map (make-keymap))
+ (commit-map (make-sparse-keymap))
+ (diff-map (make-sparse-keymap))
+ (toggle-map (make-sparse-keymap)))
+ (suppress-keymap map)
+ (define-key map "?" 'git-help)
+ (define-key map "h" 'git-help)
+ (define-key map " " 'git-next-file)
+ (define-key map "a" 'git-add-file)
+ (define-key map "c" 'git-commit-file)
+ (define-key map "\C-c" commit-map)
+ (define-key map "d" diff-map)
+ (define-key map "=" 'git-diff-file)
+ (define-key map "f" 'git-find-file)
+ (define-key map "\r" 'git-find-file)
+ (define-key map "g" 'git-refresh-status)
+ (define-key map "i" 'git-ignore-file)
+ (define-key map "I" 'git-insert-file)
+ (define-key map "l" 'git-log-file)
+ (define-key map "m" 'git-mark-file)
+ (define-key map "M" 'git-mark-all)
+ (define-key map "n" 'git-next-file)
+ (define-key map "N" 'git-next-unmerged-file)
+ (define-key map "o" 'git-find-file-other-window)
+ (define-key map "p" 'git-prev-file)
+ (define-key map "P" 'git-prev-unmerged-file)
+ (define-key map "q" 'git-status-quit)
+ (define-key map "r" 'git-remove-file)
+ (define-key map "t" toggle-map)
+ (define-key map "T" 'git-toggle-all-marks)
+ (define-key map "u" 'git-unmark-file)
+ (define-key map "U" 'git-revert-file)
+ (define-key map "v" 'git-view-file)
+ (define-key map "x" 'git-remove-handled)
+ (define-key map "\C-?" 'git-unmark-file-up)
+ (define-key map "\M-\C-?" 'git-unmark-all)
+ ; the commit submap
+ (define-key commit-map "\C-a" 'git-amend-commit)
+ (define-key commit-map "\C-b" 'git-branch)
+ (define-key commit-map "\C-o" 'git-checkout)
+ (define-key commit-map "\C-p" 'git-cherry-pick-commit)
+ (define-key commit-map "\C-v" 'git-revert-commit)
+ ; the diff submap
+ (define-key diff-map "b" 'git-diff-file-base)
+ (define-key diff-map "c" 'git-diff-file-combined)
+ (define-key diff-map "=" 'git-diff-file)
+ (define-key diff-map "e" 'git-diff-file-idiff)
+ (define-key diff-map "E" 'git-find-file-imerge)
+ (define-key diff-map "h" 'git-diff-file-merge-head)
+ (define-key diff-map "m" 'git-diff-file-mine)
+ (define-key diff-map "o" 'git-diff-file-other)
+ ; the toggle submap
+ (define-key toggle-map "u" 'git-toggle-show-uptodate)
+ (define-key toggle-map "i" 'git-toggle-show-ignored)
+ (define-key toggle-map "k" 'git-toggle-show-unknown)
+ (define-key toggle-map "m" 'git-toggle-all-marks)
+ (setq git-status-mode-map map))
+ (easy-menu-define git-menu git-status-mode-map
+ "Git Menu"
+ `("Git"
+ ["Refresh" git-refresh-status t]
+ ["Commit" git-commit-file t]
+ ["Checkout..." git-checkout t]
+ ["New Branch..." git-branch t]
+ ["Cherry-pick Commit..." git-cherry-pick-commit t]
+ ["Revert Commit..." git-revert-commit t]
+ ("Merge"
+ ["Next Unmerged File" git-next-unmerged-file t]
+ ["Prev Unmerged File" git-prev-unmerged-file t]
+ ["Interactive Merge File" git-find-file-imerge t]
+ ["Diff Against Common Base File" git-diff-file-base t]
+ ["Diff Combined" git-diff-file-combined t]
+ ["Diff Against Merge Head" git-diff-file-merge-head t]
+ ["Diff Against Mine" git-diff-file-mine t]
+ ["Diff Against Other" git-diff-file-other t])
+ "--------"
+ ["Add File" git-add-file t]
+ ["Revert File" git-revert-file t]
+ ["Ignore File" git-ignore-file t]
+ ["Remove File" git-remove-file t]
+ ["Insert File" git-insert-file t]
+ "--------"
+ ["Find File" git-find-file t]
+ ["View File" git-view-file t]
+ ["Diff File" git-diff-file t]
+ ["Interactive Diff File" git-diff-file-idiff t]
+ ["Log" git-log-file t]
+ "--------"
+ ["Mark" git-mark-file t]
+ ["Mark All" git-mark-all t]
+ ["Unmark" git-unmark-file t]
+ ["Unmark All" git-unmark-all t]
+ ["Toggle All Marks" git-toggle-all-marks t]
+ ["Hide Handled Files" git-remove-handled t]
+ "--------"
+ ["Show Uptodate Files" git-toggle-show-uptodate :style toggle :selected git-show-uptodate]
+ ["Show Ignored Files" git-toggle-show-ignored :style toggle :selected git-show-ignored]
+ ["Show Unknown Files" git-toggle-show-unknown :style toggle :selected git-show-unknown]
+ "--------"
+ ["Quit" git-status-quit t])))
+
+
+;; git mode should only run in the *git status* buffer
+(put 'git-status-mode 'mode-class 'special)
+
+(defun git-status-mode ()
+ "Major mode for interacting with Git.
+Commands:
+\\{git-status-mode-map}"
+ (kill-all-local-variables)
+ (buffer-disable-undo)
+ (setq mode-name "git status"
+ major-mode 'git-status-mode
+ goal-column 17
+ buffer-read-only t)
+ (use-local-map git-status-mode-map)
+ (let ((buffer-read-only nil))
+ (erase-buffer)
+ (let ((status (ewoc-create 'git-fileinfo-prettyprint "" "")))
+ (set (make-local-variable 'git-status) status))
+ (set (make-local-variable 'list-buffers-directory) default-directory)
+ (make-local-variable 'git-show-uptodate)
+ (make-local-variable 'git-show-ignored)
+ (make-local-variable 'git-show-unknown)
+ (run-hooks 'git-status-mode-hook)))
+
+(defun git-find-status-buffer (dir)
+ "Find the git status buffer handling a specified directory."
+ (let ((list (buffer-list))
+ (fulldir (expand-file-name dir))
+ found)
+ (while (and list (not found))
+ (let ((buffer (car list)))
+ (with-current-buffer buffer
+ (when (and list-buffers-directory
+ (string-equal fulldir (expand-file-name list-buffers-directory))
+ (eq major-mode 'git-status-mode))
+ (setq found buffer))))
+ (setq list (cdr list)))
+ found))
+
+(defun git-status (dir)
+ "Entry point into git-status mode."
+ (interactive "DSelect directory: ")
+ (setq dir (git-get-top-dir dir))
+ (if (file-exists-p (concat (file-name-as-directory dir) ".git"))
+ (let ((buffer (or (and git-reuse-status-buffer (git-find-status-buffer dir))
+ (create-file-buffer (expand-file-name "*git-status*" dir)))))
+ (switch-to-buffer buffer)
+ (cd dir)
+ (git-status-mode)
+ (git-refresh-status)
+ (goto-char (point-min))
+ (add-hook 'after-save-hook 'git-update-saved-file))
+ (message "%s is not a git working tree." dir)))
+
+(defun git-update-saved-file ()
+ "Update the corresponding git-status buffer when a file is saved.
+Meant to be used in `after-save-hook'."
+ (let* ((file (expand-file-name buffer-file-name))
+ (dir (condition-case nil (git-get-top-dir (file-name-directory file)) (error nil)))
+ (buffer (and dir (git-find-status-buffer dir))))
+ (when buffer
+ (with-current-buffer buffer
+ (let ((filename (file-relative-name file dir)))
+ ; skip files located inside the .git directory
+ (unless (string-match "^\\.git/" filename)
+ (git-call-process nil "add" "--refresh" "--" filename)
+ (git-update-status-files (list filename))))))))
+
+(defun git-help ()
+ "Display help for Git mode."
+ (interactive)
+ (describe-function 'git-status-mode))
+
+(provide 'git)
+;;; git.el ends here
diff --git a/contrib/examples/README b/contrib/examples/README
new file mode 100644
index 0000000..6946f3d
--- /dev/null
+++ b/contrib/examples/README
@@ -0,0 +1,3 @@
+These are original scripted implementations, kept primarily for their
+reference value to any aspiring plumbing users who want to learn how
+pieces can be fit together.
diff --git a/contrib/examples/builtin-fetch--tool.c b/contrib/examples/builtin-fetch--tool.c
new file mode 100644
index 0000000..8bc8c75
--- /dev/null
+++ b/contrib/examples/builtin-fetch--tool.c
@@ -0,0 +1,574 @@
+#include "builtin.h"
+#include "cache.h"
+#include "refs.h"
+#include "commit.h"
+#include "sigchain.h"
+
+static char *get_stdin(void)
+{
+ struct strbuf buf = STRBUF_INIT;
+ if (strbuf_read(&buf, 0, 1024) < 0) {
+ die_errno("error reading standard input");
+ }
+ return strbuf_detach(&buf, NULL);
+}
+
+static void show_new(enum object_type type, unsigned char *sha1_new)
+{
+ fprintf(stderr, " %s: %s\n", typename(type),
+ find_unique_abbrev(sha1_new, DEFAULT_ABBREV));
+}
+
+static int update_ref_env(const char *action,
+ const char *refname,
+ unsigned char *sha1,
+ unsigned char *oldval)
+{
+ char msg[1024];
+ const char *rla = getenv("GIT_REFLOG_ACTION");
+
+ if (!rla)
+ rla = "(reflog update)";
+ if (snprintf(msg, sizeof(msg), "%s: %s", rla, action) >= sizeof(msg))
+ warning("reflog message too long: %.*s...", 50, msg);
+ return update_ref(msg, refname, sha1, oldval, 0, QUIET_ON_ERR);
+}
+
+static int update_local_ref(const char *name,
+ const char *new_head,
+ const char *note,
+ int verbose, int force)
+{
+ unsigned char sha1_old[20], sha1_new[20];
+ char oldh[41], newh[41];
+ struct commit *current, *updated;
+ enum object_type type;
+
+ if (get_sha1_hex(new_head, sha1_new))
+ die("malformed object name %s", new_head);
+
+ type = sha1_object_info(sha1_new, NULL);
+ if (type < 0)
+ die("object %s not found", new_head);
+
+ if (!*name) {
+ /* Not storing */
+ if (verbose) {
+ fprintf(stderr, "* fetched %s\n", note);
+ show_new(type, sha1_new);
+ }
+ return 0;
+ }
+
+ if (get_sha1(name, sha1_old)) {
+ const char *msg;
+ just_store:
+ /* new ref */
+ if (!strncmp(name, "refs/tags/", 10))
+ msg = "storing tag";
+ else
+ msg = "storing head";
+ fprintf(stderr, "* %s: storing %s\n",
+ name, note);
+ show_new(type, sha1_new);
+ return update_ref_env(msg, name, sha1_new, NULL);
+ }
+
+ if (!hashcmp(sha1_old, sha1_new)) {
+ if (verbose) {
+ fprintf(stderr, "* %s: same as %s\n", name, note);
+ show_new(type, sha1_new);
+ }
+ return 0;
+ }
+
+ if (!strncmp(name, "refs/tags/", 10)) {
+ fprintf(stderr, "* %s: updating with %s\n", name, note);
+ show_new(type, sha1_new);
+ return update_ref_env("updating tag", name, sha1_new, NULL);
+ }
+
+ current = lookup_commit_reference(sha1_old);
+ updated = lookup_commit_reference(sha1_new);
+ if (!current || !updated)
+ goto just_store;
+
+ strcpy(oldh, find_unique_abbrev(current->object.sha1, DEFAULT_ABBREV));
+ strcpy(newh, find_unique_abbrev(sha1_new, DEFAULT_ABBREV));
+
+ if (in_merge_bases(current, updated)) {
+ fprintf(stderr, "* %s: fast-forward to %s\n",
+ name, note);
+ fprintf(stderr, " old..new: %s..%s\n", oldh, newh);
+ return update_ref_env("fast-forward", name, sha1_new, sha1_old);
+ }
+ if (!force) {
+ fprintf(stderr,
+ "* %s: not updating to non-fast-forward %s\n",
+ name, note);
+ fprintf(stderr,
+ " old...new: %s...%s\n", oldh, newh);
+ return 1;
+ }
+ fprintf(stderr,
+ "* %s: forcing update to non-fast-forward %s\n",
+ name, note);
+ fprintf(stderr, " old...new: %s...%s\n", oldh, newh);
+ return update_ref_env("forced-update", name, sha1_new, sha1_old);
+}
+
+static int append_fetch_head(FILE *fp,
+ const char *head, const char *remote,
+ const char *remote_name, const char *remote_nick,
+ const char *local_name, int not_for_merge,
+ int verbose, int force)
+{
+ struct commit *commit;
+ int remote_len, i, note_len;
+ unsigned char sha1[20];
+ char note[1024];
+ const char *what, *kind;
+
+ if (get_sha1(head, sha1))
+ return error("Not a valid object name: %s", head);
+ commit = lookup_commit_reference_gently(sha1, 1);
+ if (!commit)
+ not_for_merge = 1;
+
+ if (!strcmp(remote_name, "HEAD")) {
+ kind = "";
+ what = "";
+ }
+ else if (!strncmp(remote_name, "refs/heads/", 11)) {
+ kind = "branch";
+ what = remote_name + 11;
+ }
+ else if (!strncmp(remote_name, "refs/tags/", 10)) {
+ kind = "tag";
+ what = remote_name + 10;
+ }
+ else if (!strncmp(remote_name, "refs/remotes/", 13)) {
+ kind = "remote-tracking branch";
+ what = remote_name + 13;
+ }
+ else {
+ kind = "";
+ what = remote_name;
+ }
+
+ remote_len = strlen(remote);
+ for (i = remote_len - 1; remote[i] == '/' && 0 <= i; i--)
+ ;
+ remote_len = i + 1;
+ if (4 < i && !strncmp(".git", remote + i - 3, 4))
+ remote_len = i - 3;
+
+ note_len = 0;
+ if (*what) {
+ if (*kind)
+ note_len += sprintf(note + note_len, "%s ", kind);
+ note_len += sprintf(note + note_len, "'%s' of ", what);
+ }
+ note_len += sprintf(note + note_len, "%.*s", remote_len, remote);
+ fprintf(fp, "%s\t%s\t%s\n",
+ sha1_to_hex(commit ? commit->object.sha1 : sha1),
+ not_for_merge ? "not-for-merge" : "",
+ note);
+ return update_local_ref(local_name, head, note, verbose, force);
+}
+
+static char *keep;
+static void remove_keep(void)
+{
+ if (keep && *keep)
+ unlink(keep);
+}
+
+static void remove_keep_on_signal(int signo)
+{
+ remove_keep();
+ sigchain_pop(signo);
+ raise(signo);
+}
+
+static char *find_local_name(const char *remote_name, const char *refs,
+ int *force_p, int *not_for_merge_p)
+{
+ const char *ref = refs;
+ int len = strlen(remote_name);
+
+ while (ref) {
+ const char *next;
+ int single_force, not_for_merge;
+
+ while (*ref == '\n')
+ ref++;
+ if (!*ref)
+ break;
+ next = strchr(ref, '\n');
+
+ single_force = not_for_merge = 0;
+ if (*ref == '+') {
+ single_force = 1;
+ ref++;
+ }
+ if (*ref == '.') {
+ not_for_merge = 1;
+ ref++;
+ if (*ref == '+') {
+ single_force = 1;
+ ref++;
+ }
+ }
+ if (!strncmp(remote_name, ref, len) && ref[len] == ':') {
+ const char *local_part = ref + len + 1;
+ int retlen;
+
+ if (!next)
+ retlen = strlen(local_part);
+ else
+ retlen = next - local_part;
+ *force_p = single_force;
+ *not_for_merge_p = not_for_merge;
+ return xmemdupz(local_part, retlen);
+ }
+ ref = next;
+ }
+ return NULL;
+}
+
+static int fetch_native_store(FILE *fp,
+ const char *remote,
+ const char *remote_nick,
+ const char *refs,
+ int verbose, int force)
+{
+ char buffer[1024];
+ int err = 0;
+
+ sigchain_push_common(remove_keep_on_signal);
+ atexit(remove_keep);
+
+ while (fgets(buffer, sizeof(buffer), stdin)) {
+ int len;
+ char *cp;
+ char *local_name;
+ int single_force, not_for_merge;
+
+ for (cp = buffer; *cp && !isspace(*cp); cp++)
+ ;
+ if (*cp)
+ *cp++ = 0;
+ len = strlen(cp);
+ if (len && cp[len-1] == '\n')
+ cp[--len] = 0;
+ if (!strcmp(buffer, "failed"))
+ die("Fetch failure: %s", remote);
+ if (!strcmp(buffer, "pack"))
+ continue;
+ if (!strcmp(buffer, "keep")) {
+ char *od = get_object_directory();
+ int len = strlen(od) + strlen(cp) + 50;
+ keep = xmalloc(len);
+ sprintf(keep, "%s/pack/pack-%s.keep", od, cp);
+ continue;
+ }
+
+ local_name = find_local_name(cp, refs,
+ &single_force, &not_for_merge);
+ if (!local_name)
+ continue;
+ err |= append_fetch_head(fp,
+ buffer, remote, cp, remote_nick,
+ local_name, not_for_merge,
+ verbose, force || single_force);
+ }
+ return err;
+}
+
+static int parse_reflist(const char *reflist)
+{
+ const char *ref;
+
+ printf("refs='");
+ for (ref = reflist; ref; ) {
+ const char *next;
+ while (*ref && isspace(*ref))
+ ref++;
+ if (!*ref)
+ break;
+ for (next = ref; *next && !isspace(*next); next++)
+ ;
+ printf("\n%.*s", (int)(next - ref), ref);
+ ref = next;
+ }
+ printf("'\n");
+
+ printf("rref='");
+ for (ref = reflist; ref; ) {
+ const char *next, *colon;
+ while (*ref && isspace(*ref))
+ ref++;
+ if (!*ref)
+ break;
+ for (next = ref; *next && !isspace(*next); next++)
+ ;
+ if (*ref == '.')
+ ref++;
+ if (*ref == '+')
+ ref++;
+ colon = strchr(ref, ':');
+ putchar('\n');
+ printf("%.*s", (int)((colon ? colon : next) - ref), ref);
+ ref = next;
+ }
+ printf("'\n");
+ return 0;
+}
+
+static int expand_refs_wildcard(const char *ls_remote_result, int numrefs,
+ const char **refs)
+{
+ int i, matchlen, replacelen;
+ int found_one = 0;
+ const char *remote = *refs++;
+ numrefs--;
+
+ if (numrefs == 0) {
+ fprintf(stderr, "Nothing specified for fetching with remote.%s.fetch\n",
+ remote);
+ printf("empty\n");
+ }
+
+ for (i = 0; i < numrefs; i++) {
+ const char *ref = refs[i];
+ const char *lref = ref;
+ const char *colon;
+ const char *tail;
+ const char *ls;
+ const char *next;
+
+ if (*lref == '+')
+ lref++;
+ colon = strchr(lref, ':');
+ tail = lref + strlen(lref);
+ if (!(colon &&
+ 2 < colon - lref &&
+ colon[-1] == '*' &&
+ colon[-2] == '/' &&
+ 2 < tail - (colon + 1) &&
+ tail[-1] == '*' &&
+ tail[-2] == '/')) {
+ /* not a glob */
+ if (!found_one++)
+ printf("explicit\n");
+ printf("%s\n", ref);
+ continue;
+ }
+
+ /* glob */
+ if (!found_one++)
+ printf("glob\n");
+
+ /* lref to colon-2 is remote hierarchy name;
+ * colon+1 to tail-2 is local.
+ */
+ matchlen = (colon-1) - lref;
+ replacelen = (tail-1) - (colon+1);
+ for (ls = ls_remote_result; ls; ls = next) {
+ const char *eol;
+ unsigned char sha1[20];
+ int namelen;
+
+ while (*ls && isspace(*ls))
+ ls++;
+ next = strchr(ls, '\n');
+ eol = !next ? (ls + strlen(ls)) : next;
+ if (!memcmp("^{}", eol-3, 3))
+ continue;
+ if (eol - ls < 40)
+ continue;
+ if (get_sha1_hex(ls, sha1))
+ continue;
+ ls += 40;
+ while (ls < eol && isspace(*ls))
+ ls++;
+ /* ls to next (or eol) is the name.
+ * is it identical to lref to colon-2?
+ */
+ if ((eol - ls) <= matchlen ||
+ strncmp(ls, lref, matchlen))
+ continue;
+
+ /* Yes, it is a match */
+ namelen = eol - ls;
+ if (lref != ref)
+ putchar('+');
+ printf("%.*s:%.*s%.*s\n",
+ namelen, ls,
+ replacelen, colon + 1,
+ namelen - matchlen, ls + matchlen);
+ }
+ }
+ return 0;
+}
+
+static int pick_rref(int sha1_only, const char *rref, const char *ls_remote_result)
+{
+ int err = 0;
+ int lrr_count = lrr_count, i, pass;
+ const char *cp;
+ struct lrr {
+ const char *line;
+ const char *name;
+ int namelen;
+ int shown;
+ } *lrr_list = lrr_list;
+
+ for (pass = 0; pass < 2; pass++) {
+ /* pass 0 counts and allocates, pass 1 fills... */
+ cp = ls_remote_result;
+ i = 0;
+ while (1) {
+ const char *np;
+ while (*cp && isspace(*cp))
+ cp++;
+ if (!*cp)
+ break;
+ np = strchrnul(cp, '\n');
+ if (pass) {
+ lrr_list[i].line = cp;
+ lrr_list[i].name = cp + 41;
+ lrr_list[i].namelen = np - (cp + 41);
+ }
+ i++;
+ cp = np;
+ }
+ if (!pass) {
+ lrr_count = i;
+ lrr_list = xcalloc(lrr_count, sizeof(*lrr_list));
+ }
+ }
+
+ while (1) {
+ const char *next;
+ int rreflen;
+ int i;
+
+ while (*rref && isspace(*rref))
+ rref++;
+ if (!*rref)
+ break;
+ next = strchrnul(rref, '\n');
+ rreflen = next - rref;
+
+ for (i = 0; i < lrr_count; i++) {
+ struct lrr *lrr = &(lrr_list[i]);
+
+ if (rreflen == lrr->namelen &&
+ !memcmp(lrr->name, rref, rreflen)) {
+ if (!lrr->shown)
+ printf("%.*s\n",
+ sha1_only ? 40 : lrr->namelen + 41,
+ lrr->line);
+ lrr->shown = 1;
+ break;
+ }
+ }
+ if (lrr_count <= i) {
+ error("pick-rref: %.*s not found", rreflen, rref);
+ err = 1;
+ }
+ rref = next;
+ }
+ free(lrr_list);
+ return err;
+}
+
+int cmd_fetch__tool(int argc, const char **argv, const char *prefix)
+{
+ int verbose = 0;
+ int force = 0;
+ int sopt = 0;
+
+ while (1 < argc) {
+ const char *arg = argv[1];
+ if (!strcmp("-v", arg))
+ verbose = 1;
+ else if (!strcmp("-f", arg))
+ force = 1;
+ else if (!strcmp("-s", arg))
+ sopt = 1;
+ else
+ break;
+ argc--;
+ argv++;
+ }
+
+ if (argc <= 1)
+ return error("Missing subcommand");
+
+ if (!strcmp("append-fetch-head", argv[1])) {
+ int result;
+ FILE *fp;
+ char *filename;
+
+ if (argc != 8)
+ return error("append-fetch-head takes 6 args");
+ filename = git_path("FETCH_HEAD");
+ fp = fopen(filename, "a");
+ if (!fp)
+ return error("cannot open %s: %s", filename, strerror(errno));
+ result = append_fetch_head(fp, argv[2], argv[3],
+ argv[4], argv[5],
+ argv[6], !!argv[7][0],
+ verbose, force);
+ fclose(fp);
+ return result;
+ }
+ if (!strcmp("native-store", argv[1])) {
+ int result;
+ FILE *fp;
+ char *filename;
+
+ if (argc != 5)
+ return error("fetch-native-store takes 3 args");
+ filename = git_path("FETCH_HEAD");
+ fp = fopen(filename, "a");
+ if (!fp)
+ return error("cannot open %s: %s", filename, strerror(errno));
+ result = fetch_native_store(fp, argv[2], argv[3], argv[4],
+ verbose, force);
+ fclose(fp);
+ return result;
+ }
+ if (!strcmp("parse-reflist", argv[1])) {
+ const char *reflist;
+ if (argc != 3)
+ return error("parse-reflist takes 1 arg");
+ reflist = argv[2];
+ if (!strcmp(reflist, "-"))
+ reflist = get_stdin();
+ return parse_reflist(reflist);
+ }
+ if (!strcmp("pick-rref", argv[1])) {
+ const char *ls_remote_result;
+ if (argc != 4)
+ return error("pick-rref takes 2 args");
+ ls_remote_result = argv[3];
+ if (!strcmp(ls_remote_result, "-"))
+ ls_remote_result = get_stdin();
+ return pick_rref(sopt, argv[2], ls_remote_result);
+ }
+ if (!strcmp("expand-refs-wildcard", argv[1])) {
+ const char *reflist;
+ if (argc < 4)
+ return error("expand-refs-wildcard takes at least 2 args");
+ reflist = argv[2];
+ if (!strcmp(reflist, "-"))
+ reflist = get_stdin();
+ return expand_refs_wildcard(reflist, argc - 3, argv + 3);
+ }
+
+ return error("Unknown subcommand: %s", argv[1]);
+}
diff --git a/contrib/examples/git-checkout.sh b/contrib/examples/git-checkout.sh
new file mode 100755
index 0000000..1a7689a
--- /dev/null
+++ b/contrib/examples/git-checkout.sh
@@ -0,0 +1,302 @@
+#!/bin/sh
+
+OPTIONS_KEEPDASHDASH=t
+OPTIONS_SPEC="\
+git-checkout [options] [<branch>] [<paths>...]
+--
+b= create a new branch started at <branch>
+l create the new branch's reflog
+track arrange that the new branch tracks the remote branch
+f proceed even if the index or working tree is not HEAD
+m merge local modifications into the new branch
+q,quiet be quiet
+"
+SUBDIRECTORY_OK=Sometimes
+. git-sh-setup
+require_work_tree
+
+old_name=HEAD
+old=$(git rev-parse --verify $old_name 2>/dev/null)
+oldbranch=$(git symbolic-ref $old_name 2>/dev/null)
+new=
+new_name=
+force=
+branch=
+track=
+newbranch=
+newbranch_log=
+merge=
+quiet=
+v=-v
+LF='
+'
+
+while test $# != 0; do
+ case "$1" in
+ -b)
+ shift
+ newbranch="$1"
+ [ -z "$newbranch" ] &&
+ die "git checkout: -b needs a branch name"
+ git show-ref --verify --quiet -- "refs/heads/$newbranch" &&
+ die "git checkout: branch $newbranch already exists"
+ git check-ref-format "heads/$newbranch" ||
+ die "git checkout: we do not like '$newbranch' as a branch name."
+ ;;
+ -l)
+ newbranch_log=-l
+ ;;
+ --track|--no-track)
+ track="$1"
+ ;;
+ -f)
+ force=1
+ ;;
+ -m)
+ merge=1
+ ;;
+ -q|--quiet)
+ quiet=1
+ v=
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ usage
+ ;;
+ esac
+ shift
+done
+
+arg="$1"
+rev=$(git rev-parse --verify "$arg" 2>/dev/null)
+if rev=$(git rev-parse --verify "$rev^0" 2>/dev/null)
+then
+ [ -z "$rev" ] && die "unknown flag $arg"
+ new_name="$arg"
+ if git show-ref --verify --quiet -- "refs/heads/$arg"
+ then
+ rev=$(git rev-parse --verify "refs/heads/$arg^0")
+ branch="$arg"
+ fi
+ new="$rev"
+ shift
+elif rev=$(git rev-parse --verify "$rev^{tree}" 2>/dev/null)
+then
+ # checking out selected paths from a tree-ish.
+ new="$rev"
+ new_name="$rev^{tree}"
+ shift
+fi
+[ "$1" = "--" ] && shift
+
+case "$newbranch,$track" in
+,--*)
+ die "git checkout: --track and --no-track require -b"
+esac
+
+case "$force$merge" in
+11)
+ die "git checkout: -f and -m are incompatible"
+esac
+
+# The behaviour of the command with and without explicit path
+# parameters is quite different.
+#
+# Without paths, we are checking out everything in the work tree,
+# possibly switching branches. This is the traditional behaviour.
+#
+# With paths, we are _never_ switching branch, but checking out
+# the named paths from either index (when no rev is given),
+# or the named tree-ish (when rev is given).
+
+if test "$#" -ge 1
+then
+ hint=
+ if test "$#" -eq 1
+ then
+ hint="
+Did you intend to checkout '$@' which can not be resolved as commit?"
+ fi
+ if test '' != "$newbranch$force$merge"
+ then
+ die "git checkout: updating paths is incompatible with switching branches/forcing$hint"
+ fi
+ if test '' != "$new"
+ then
+ # from a specific tree-ish; note that this is for
+ # rescuing paths and is never meant to remove what
+ # is not in the named tree-ish.
+ git ls-tree --full-name -r "$new" "$@" |
+ git update-index --index-info || exit $?
+ fi
+
+ # Make sure the request is about existing paths.
+ git ls-files --full-name --error-unmatch -- "$@" >/dev/null || exit
+ git ls-files --full-name -- "$@" |
+ (cd_to_toplevel && git checkout-index -f -u --stdin)
+
+ # Run a post-checkout hook -- the HEAD does not change so the
+ # current HEAD is passed in for both args
+ if test -x "$GIT_DIR"/hooks/post-checkout; then
+ "$GIT_DIR"/hooks/post-checkout $old $old 0
+ fi
+
+ exit $?
+else
+ # Make sure we did not fall back on $arg^{tree} codepath
+ # since we are not checking out from an arbitrary tree-ish,
+ # but switching branches.
+ if test '' != "$new"
+ then
+ git rev-parse --verify "$new^{commit}" >/dev/null 2>&1 ||
+ die "Cannot switch branch to a non-commit."
+ fi
+fi
+
+# We are switching branches and checking out trees, so
+# we *NEED* to be at the toplevel.
+cd_to_toplevel
+
+[ -z "$new" ] && new=$old && new_name="$old_name"
+
+# If we don't have an existing branch that we're switching to,
+# and we don't have a new branch name for the target we
+# are switching to, then we are detaching our HEAD from any
+# branch. However, if "git checkout HEAD" detaches the HEAD
+# from the current branch, even though that may be logically
+# correct, it feels somewhat funny. More importantly, we do not
+# want "git checkout" nor "git checkout -f" to detach HEAD.
+
+detached=
+detach_warn=
+
+describe_detached_head () {
+ test -n "$quiet" || {
+ printf >&2 "$1 "
+ GIT_PAGER= git log >&2 -1 --pretty=oneline --abbrev-commit "$2" --
+ }
+}
+
+if test -z "$branch$newbranch" && test "$new_name" != "$old_name"
+then
+ detached="$new"
+ if test -n "$oldbranch" && test -z "$quiet"
+ then
+ detach_warn="Note: moving to \"$new_name\" which isn't a local branch
+If you want to create a new branch from this checkout, you may do so
+(now or later) by using -b with the checkout command again. Example:
+ git checkout -b <new_branch_name>"
+ fi
+elif test -z "$oldbranch" && test "$new" != "$old"
+then
+ describe_detached_head 'Previous HEAD position was' "$old"
+fi
+
+if [ "X$old" = X ]
+then
+ if test -z "$quiet"
+ then
+ echo >&2 "warning: You appear to be on a branch yet to be born."
+ echo >&2 "warning: Forcing checkout of $new_name."
+ fi
+ force=1
+fi
+
+if [ "$force" ]
+then
+ git read-tree $v --reset -u $new
+else
+ git update-index --refresh >/dev/null
+ git read-tree $v -m -u --exclude-per-directory=.gitignore $old $new || (
+ case "$merge,$v" in
+ ,*)
+ exit 1 ;;
+ 1,)
+ ;; # quiet
+ *)
+ echo >&2 "Falling back to 3-way merge..." ;;
+ esac
+
+ # Match the index to the working tree, and do a three-way.
+ git diff-files --name-only | git update-index --remove --stdin &&
+ work=`git write-tree` &&
+ git read-tree $v --reset -u $new || exit
+
+ eval GITHEAD_$new='${new_name:-${branch:-$new}}' &&
+ eval GITHEAD_$work=local &&
+ export GITHEAD_$new GITHEAD_$work &&
+ git merge-recursive $old -- $new $work
+
+ # Do not register the cleanly merged paths in the index yet.
+ # this is not a real merge before committing, but just carrying
+ # the working tree changes along.
+ unmerged=`git ls-files -u`
+ git read-tree $v --reset $new
+ case "$unmerged" in
+ '') ;;
+ *)
+ (
+ z40=0000000000000000000000000000000000000000
+ echo "$unmerged" |
+ sed -e 's/^[0-7]* [0-9a-f]* /'"0 $z40 /"
+ echo "$unmerged"
+ ) | git update-index --index-info
+ ;;
+ esac
+ exit 0
+ )
+ saved_err=$?
+ if test "$saved_err" = 0 && test -z "$quiet"
+ then
+ git diff-index --name-status "$new"
+ fi
+ (exit $saved_err)
+fi
+
+#
+# Switch the HEAD pointer to the new branch if we
+# checked out a branch head, and remove any potential
+# old MERGE_HEAD's (subsequent commits will clearly not
+# be based on them, since we re-set the index)
+#
+if [ "$?" -eq 0 ]; then
+ if [ "$newbranch" ]; then
+ git branch $track $newbranch_log "$newbranch" "$new_name" || exit
+ branch="$newbranch"
+ fi
+ if test -n "$branch"
+ then
+ old_branch_name=`expr "z$oldbranch" : 'zrefs/heads/\(.*\)'`
+ GIT_DIR="$GIT_DIR" git symbolic-ref -m "checkout: moving from ${old_branch_name:-$old} to $branch" HEAD "refs/heads/$branch"
+ if test -n "$quiet"
+ then
+ true # nothing
+ elif test "refs/heads/$branch" = "$oldbranch"
+ then
+ echo >&2 "Already on branch \"$branch\""
+ else
+ echo >&2 "Switched to${newbranch:+ a new} branch \"$branch\""
+ fi
+ elif test -n "$detached"
+ then
+ old_branch_name=`expr "z$oldbranch" : 'zrefs/heads/\(.*\)'`
+ git update-ref --no-deref -m "checkout: moving from ${old_branch_name:-$old} to $arg" HEAD "$detached" ||
+ die "Cannot detach HEAD"
+ if test -n "$detach_warn"
+ then
+ echo >&2 "$detach_warn"
+ fi
+ describe_detached_head 'HEAD is now at' HEAD
+ fi
+ rm -f "$GIT_DIR/MERGE_HEAD"
+else
+ exit 1
+fi
+
+# Run a post-checkout hook
+if test -x "$GIT_DIR"/hooks/post-checkout; then
+ "$GIT_DIR"/hooks/post-checkout $old $new 1
+fi
diff --git a/contrib/examples/git-clean.sh b/contrib/examples/git-clean.sh
new file mode 100755
index 0000000..01c95e9
--- /dev/null
+++ b/contrib/examples/git-clean.sh
@@ -0,0 +1,118 @@
+#!/bin/sh
+#
+# Copyright (c) 2005-2006 Pavel Roskin
+#
+
+OPTIONS_KEEPDASHDASH=
+OPTIONS_SPEC="\
+git-clean [options] <paths>...
+
+Clean untracked files from the working directory
+
+When optional <paths>... arguments are given, the paths
+affected are further limited to those that match them.
+--
+d remove directories as well
+f override clean.requireForce and clean anyway
+n don't remove anything, just show what would be done
+q be quiet, only report errors
+x remove ignored files as well
+X remove only ignored files"
+
+SUBDIRECTORY_OK=Yes
+. git-sh-setup
+require_work_tree
+
+ignored=
+ignoredonly=
+cleandir=
+rmf="rm -f --"
+rmrf="rm -rf --"
+rm_refuse="echo Not removing"
+echo1="echo"
+
+disabled=$(git config --bool clean.requireForce)
+
+while test $# != 0
+do
+ case "$1" in
+ -d)
+ cleandir=1
+ ;;
+ -f)
+ disabled=false
+ ;;
+ -n)
+ disabled=false
+ rmf="echo Would remove"
+ rmrf="echo Would remove"
+ rm_refuse="echo Would not remove"
+ echo1=":"
+ ;;
+ -q)
+ echo1=":"
+ ;;
+ -x)
+ ignored=1
+ ;;
+ -X)
+ ignoredonly=1
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ usage # should not happen
+ ;;
+ esac
+ shift
+done
+
+# requireForce used to default to false but now it defaults to true.
+# IOW, lack of explicit "clean.requireForce = false" is taken as
+# "clean.requireForce = true".
+case "$disabled" in
+"")
+ die "clean.requireForce not set and -n or -f not given; refusing to clean"
+ ;;
+"true")
+ die "clean.requireForce set and -n or -f not given; refusing to clean"
+ ;;
+esac
+
+if [ "$ignored,$ignoredonly" = "1,1" ]; then
+ die "-x and -X cannot be set together"
+fi
+
+if [ -z "$ignored" ]; then
+ excl="--exclude-per-directory=.gitignore"
+ excl_info= excludes_file=
+ if [ -f "$GIT_DIR/info/exclude" ]; then
+ excl_info="--exclude-from=$GIT_DIR/info/exclude"
+ fi
+ if cfg_excl=$(git config core.excludesfile) && test -f "$cfg_excl"
+ then
+ excludes_file="--exclude-from=$cfg_excl"
+ fi
+ if [ "$ignoredonly" ]; then
+ excl="$excl --ignored"
+ fi
+fi
+
+git ls-files --others --directory \
+ $excl ${excl_info:+"$excl_info"} ${excludes_file:+"$excludes_file"} \
+ -- "$@" |
+while read -r file; do
+ if [ -d "$file" -a ! -L "$file" ]; then
+ if [ -z "$cleandir" ]; then
+ $rm_refuse "$file"
+ continue
+ fi
+ $echo1 "Removing $file"
+ $rmrf "$file"
+ else
+ $echo1 "Removing $file"
+ $rmf "$file"
+ fi
+done
diff --git a/contrib/examples/git-clone.sh b/contrib/examples/git-clone.sh
new file mode 100755
index 0000000..547228e
--- /dev/null
+++ b/contrib/examples/git-clone.sh
@@ -0,0 +1,525 @@
+#!/bin/sh
+#
+# Copyright (c) 2005, Linus Torvalds
+# Copyright (c) 2005, Junio C Hamano
+#
+# Clone a repository into a different directory that does not yet exist.
+
+# See git-sh-setup why.
+unset CDPATH
+
+OPTIONS_SPEC="\
+git-clone [options] [--] <repo> [<dir>]
+--
+n,no-checkout don't create a checkout
+bare create a bare repository
+naked create a bare repository
+l,local to clone from a local repository
+no-hardlinks don't use local hardlinks, always copy
+s,shared setup as a shared repository
+template= path to the template directory
+q,quiet be quiet
+reference= reference repository
+o,origin= use <name> instead of 'origin' to track upstream
+u,upload-pack= path to git-upload-pack on the remote
+depth= create a shallow clone of that depth
+
+use-separate-remote compatibility, do not use
+no-separate-remote compatibility, do not use"
+
+die() {
+ echo >&2 "$@"
+ exit 1
+}
+
+usage() {
+ exec "$0" -h
+}
+
+eval "$(echo "$OPTIONS_SPEC" | git rev-parse --parseopt -- "$@" || echo exit $?)"
+
+get_repo_base() {
+ (
+ cd "`/bin/pwd`" &&
+ cd "$1" || cd "$1.git" &&
+ {
+ cd .git
+ pwd
+ }
+ ) 2>/dev/null
+}
+
+if [ -n "$GIT_SSL_NO_VERIFY" -o \
+ "`git config --bool http.sslVerify`" = false ]; then
+ curl_extra_args="-k"
+fi
+
+http_fetch () {
+ # $1 = Remote, $2 = Local
+ curl -nsfL $curl_extra_args "$1" >"$2"
+ curl_exit_status=$?
+ case $curl_exit_status in
+ 126|127) exit ;;
+ *) return $curl_exit_status ;;
+ esac
+}
+
+clone_dumb_http () {
+ # $1 - remote, $2 - local
+ cd "$2" &&
+ clone_tmp="$GIT_DIR/clone-tmp" &&
+ mkdir -p "$clone_tmp" || exit 1
+ if [ -n "$GIT_CURL_FTP_NO_EPSV" -o \
+ "`git config --bool http.noEPSV`" = true ]; then
+ curl_extra_args="${curl_extra_args} --disable-epsv"
+ fi
+ http_fetch "$1/info/refs" "$clone_tmp/refs" ||
+ die "Cannot get remote repository information.
+Perhaps git-update-server-info needs to be run there?"
+ test "z$quiet" = z && v=-v || v=
+ while read sha1 refname
+ do
+ name=`expr "z$refname" : 'zrefs/\(.*\)'` &&
+ case "$name" in
+ *^*) continue;;
+ esac
+ case "$bare,$name" in
+ yes,* | ,heads/* | ,tags/*) ;;
+ *) continue ;;
+ esac
+ if test -n "$use_separate_remote" &&
+ branch_name=`expr "z$name" : 'zheads/\(.*\)'`
+ then
+ tname="remotes/$origin/$branch_name"
+ else
+ tname=$name
+ fi
+ git-http-fetch $v -a -w "$tname" "$sha1" "$1" || exit 1
+ done <"$clone_tmp/refs"
+ rm -fr "$clone_tmp"
+ http_fetch "$1/HEAD" "$GIT_DIR/REMOTE_HEAD" ||
+ rm -f "$GIT_DIR/REMOTE_HEAD"
+ if test -f "$GIT_DIR/REMOTE_HEAD"; then
+ head_sha1=`cat "$GIT_DIR/REMOTE_HEAD"`
+ case "$head_sha1" in
+ 'ref: refs/'*)
+ ;;
+ *)
+ git-http-fetch $v -a "$head_sha1" "$1" ||
+ rm -f "$GIT_DIR/REMOTE_HEAD"
+ ;;
+ esac
+ fi
+}
+
+quiet=
+local=no
+use_local_hardlink=yes
+local_shared=no
+unset template
+no_checkout=
+upload_pack=
+bare=
+reference=
+origin=
+origin_override=
+use_separate_remote=t
+depth=
+no_progress=
+local_explicitly_asked_for=
+test -t 1 || no_progress=--no-progress
+
+while test $# != 0
+do
+ case "$1" in
+ -n|--no-checkout)
+ no_checkout=yes ;;
+ --naked|--bare)
+ bare=yes ;;
+ -l|--local)
+ local_explicitly_asked_for=yes
+ use_local_hardlink=yes
+ ;;
+ --no-hardlinks)
+ use_local_hardlink=no ;;
+ -s|--shared)
+ local_shared=yes ;;
+ --template)
+ shift; template="--template=$1" ;;
+ -q|--quiet)
+ quiet=-q ;;
+ --use-separate-remote|--no-separate-remote)
+ die "clones are always made with separate-remote layout" ;;
+ --reference)
+ shift; reference="$1" ;;
+ -o|--origin)
+ shift;
+ case "$1" in
+ '')
+ usage ;;
+ */*)
+ die "'$1' is not suitable for an origin name"
+ esac
+ git check-ref-format "heads/$1" ||
+ die "'$1' is not suitable for a branch name"
+ test -z "$origin_override" ||
+ die "Do not give more than one --origin options."
+ origin_override=yes
+ origin="$1"
+ ;;
+ -u|--upload-pack)
+ shift
+ upload_pack="--upload-pack=$1" ;;
+ --depth)
+ shift
+ depth="--depth=$1" ;;
+ --)
+ shift
+ break ;;
+ *)
+ usage ;;
+ esac
+ shift
+done
+
+repo="$1"
+test -n "$repo" ||
+ die 'you must specify a repository to clone.'
+
+# --bare implies --no-checkout and --no-separate-remote
+if test yes = "$bare"
+then
+ if test yes = "$origin_override"
+ then
+ die '--bare and --origin $origin options are incompatible.'
+ fi
+ no_checkout=yes
+ use_separate_remote=
+fi
+
+if test -z "$origin"
+then
+ origin=origin
+fi
+
+# Turn the source into an absolute path if
+# it is local
+if base=$(get_repo_base "$repo"); then
+ repo="$base"
+ if test -z "$depth"
+ then
+ local=yes
+ fi
+elif test -f "$repo"
+then
+ case "$repo" in /*) ;; *) repo="$PWD/$repo" ;; esac
+fi
+
+# Decide the directory name of the new repository
+if test -n "$2"
+then
+ dir="$2"
+ test $# = 2 || die "excess parameter to git-clone"
+else
+ # Derive one from the repository name
+ # Try using "humanish" part of source repo if user didn't specify one
+ if test -f "$repo"
+ then
+ # Cloning from a bundle
+ dir=$(echo "$repo" | sed -e 's|/*\.bundle$||' -e 's|.*/||g')
+ else
+ dir=$(echo "$repo" |
+ sed -e 's|/$||' -e 's|:*/*\.git$||' -e 's|.*[/:]||g')
+ fi
+fi
+
+[ -e "$dir" ] && die "destination directory '$dir' already exists."
+[ yes = "$bare" ] && unset GIT_WORK_TREE
+[ -n "$GIT_WORK_TREE" ] && [ -e "$GIT_WORK_TREE" ] &&
+die "working tree '$GIT_WORK_TREE' already exists."
+D=
+W=
+cleanup() {
+ test -z "$D" && rm -rf "$dir"
+ test -z "$W" && test -n "$GIT_WORK_TREE" && rm -rf "$GIT_WORK_TREE"
+ cd ..
+ test -n "$D" && rm -rf "$D"
+ test -n "$W" && rm -rf "$W"
+ exit $err
+}
+trap 'err=$?; cleanup' 0
+mkdir -p "$dir" && D=$(cd "$dir" && pwd) || usage
+test -n "$GIT_WORK_TREE" && mkdir -p "$GIT_WORK_TREE" &&
+W=$(cd "$GIT_WORK_TREE" && pwd) && GIT_WORK_TREE="$W" && export GIT_WORK_TREE
+if test yes = "$bare" || test -n "$GIT_WORK_TREE"; then
+ GIT_DIR="$D"
+else
+ GIT_DIR="$D/.git"
+fi &&
+export GIT_DIR &&
+GIT_CONFIG="$GIT_DIR/config" git-init $quiet ${template+"$template"} || usage
+
+if test -n "$bare"
+then
+ GIT_CONFIG="$GIT_DIR/config" git config core.bare true
+fi
+
+if test -n "$reference"
+then
+ ref_git=
+ if test -d "$reference"
+ then
+ if test -d "$reference/.git/objects"
+ then
+ ref_git="$reference/.git"
+ elif test -d "$reference/objects"
+ then
+ ref_git="$reference"
+ fi
+ fi
+ if test -n "$ref_git"
+ then
+ ref_git=$(cd "$ref_git" && pwd)
+ echo "$ref_git/objects" >"$GIT_DIR/objects/info/alternates"
+ (
+ GIT_DIR="$ref_git" git for-each-ref \
+ --format='%(objectname) %(*objectname)'
+ ) |
+ while read a b
+ do
+ test -z "$a" ||
+ git update-ref "refs/reference-tmp/$a" "$a"
+ test -z "$b" ||
+ git update-ref "refs/reference-tmp/$b" "$b"
+ done
+ else
+ die "reference repository '$reference' is not a local directory."
+ fi
+fi
+
+rm -f "$GIT_DIR/CLONE_HEAD"
+
+# We do local magic only when the user tells us to.
+case "$local" in
+yes)
+ ( cd "$repo/objects" ) ||
+ die "cannot chdir to local '$repo/objects'."
+
+ if test "$local_shared" = yes
+ then
+ mkdir -p "$GIT_DIR/objects/info"
+ echo "$repo/objects" >>"$GIT_DIR/objects/info/alternates"
+ else
+ cpio_quiet_flag=""
+ cpio --help 2>&1 | grep -- --quiet >/dev/null && \
+ cpio_quiet_flag=--quiet
+ l= &&
+ if test "$use_local_hardlink" = yes
+ then
+ # See if we can hardlink and drop "l" if not.
+ sample_file=$(cd "$repo" && \
+ find objects -type f -print | sed -e 1q)
+ # objects directory should not be empty because
+ # we are cloning!
+ test -f "$repo/$sample_file" ||
+ die "fatal: cannot clone empty repository"
+ if ln "$repo/$sample_file" "$GIT_DIR/objects/sample" 2>/dev/null
+ then
+ rm -f "$GIT_DIR/objects/sample"
+ l=l
+ elif test -n "$local_explicitly_asked_for"
+ then
+ echo >&2 "Warning: -l asked but cannot hardlink to $repo"
+ fi
+ fi &&
+ cd "$repo" &&
+ # Create dirs using umask and permissions and destination
+ find objects -type d -print | (cd "$GIT_DIR" && xargs mkdir -p) &&
+ # Copy existing 0444 permissions on content
+ find objects ! -type d -print | cpio $cpio_quiet_flag -pumd$l "$GIT_DIR/" || \
+ exit 1
+ fi
+ git-ls-remote "$repo" >"$GIT_DIR/CLONE_HEAD" || exit 1
+ ;;
+*)
+ case "$repo" in
+ rsync://*)
+ case "$depth" in
+ "") ;;
+ *) die "shallow over rsync not supported" ;;
+ esac
+ rsync $quiet -av --ignore-existing \
+ --exclude info "$repo/objects/" "$GIT_DIR/objects/" ||
+ exit
+ # Look at objects/info/alternates for rsync -- http will
+ # support it natively and git native ones will do it on the
+ # remote end. Not having that file is not a crime.
+ rsync -q "$repo/objects/info/alternates" \
+ "$GIT_DIR/TMP_ALT" 2>/dev/null ||
+ rm -f "$GIT_DIR/TMP_ALT"
+ if test -f "$GIT_DIR/TMP_ALT"
+ then
+ ( cd "$D" &&
+ . git-parse-remote &&
+ resolve_alternates "$repo" <"$GIT_DIR/TMP_ALT" ) |
+ while read alt
+ do
+ case "$alt" in 'bad alternate: '*) die "$alt";; esac
+ case "$quiet" in
+ '') echo >&2 "Getting alternate: $alt" ;;
+ esac
+ rsync $quiet -av --ignore-existing \
+ --exclude info "$alt" "$GIT_DIR/objects" || exit
+ done
+ rm -f "$GIT_DIR/TMP_ALT"
+ fi
+ git-ls-remote "$repo" >"$GIT_DIR/CLONE_HEAD" || exit 1
+ ;;
+ https://*|http://*|ftp://*)
+ case "$depth" in
+ "") ;;
+ *) die "shallow over http or ftp not supported" ;;
+ esac
+ if test -z "@@NO_CURL@@"
+ then
+ clone_dumb_http "$repo" "$D"
+ else
+ die "http transport not supported, rebuild Git with curl support"
+ fi
+ ;;
+ *)
+ if [ -f "$repo" ] ; then
+ git bundle unbundle "$repo" > "$GIT_DIR/CLONE_HEAD" ||
+ die "unbundle from '$repo' failed."
+ else
+ case "$upload_pack" in
+ '') git-fetch-pack --all -k $quiet $depth $no_progress "$repo";;
+ *) git-fetch-pack --all -k \
+ $quiet "$upload_pack" $depth $no_progress "$repo" ;;
+ esac >"$GIT_DIR/CLONE_HEAD" ||
+ die "fetch-pack from '$repo' failed."
+ fi
+ ;;
+ esac
+ ;;
+esac
+test -d "$GIT_DIR/refs/reference-tmp" && rm -fr "$GIT_DIR/refs/reference-tmp"
+
+if test -f "$GIT_DIR/CLONE_HEAD"
+then
+ # Read git-fetch-pack -k output and store the remote branches.
+ if [ -n "$use_separate_remote" ]
+ then
+ branch_top="remotes/$origin"
+ else
+ branch_top="heads"
+ fi
+ tag_top="tags"
+ while read sha1 name
+ do
+ case "$name" in
+ *'^{}')
+ continue ;;
+ HEAD)
+ destname="REMOTE_HEAD" ;;
+ refs/heads/*)
+ destname="refs/$branch_top/${name#refs/heads/}" ;;
+ refs/tags/*)
+ destname="refs/$tag_top/${name#refs/tags/}" ;;
+ *)
+ continue ;;
+ esac
+ git update-ref -m "clone: from $repo" "$destname" "$sha1" ""
+ done < "$GIT_DIR/CLONE_HEAD"
+fi
+
+if test -n "$W"; then
+ cd "$W" || exit
+else
+ cd "$D" || exit
+fi
+
+if test -z "$bare"
+then
+ # a non-bare repository is always in separate-remote layout
+ remote_top="refs/remotes/$origin"
+ head_sha1=
+ test ! -r "$GIT_DIR/REMOTE_HEAD" || head_sha1=`cat "$GIT_DIR/REMOTE_HEAD"`
+ case "$head_sha1" in
+ 'ref: refs/'*)
+ # Uh-oh, the remote told us (http transport done against
+ # new style repository with a symref HEAD).
+ # Ideally we should skip the guesswork but for now
+ # opt for minimum change.
+ head_sha1=`expr "z$head_sha1" : 'zref: refs/heads/\(.*\)'`
+ head_sha1=`cat "$GIT_DIR/$remote_top/$head_sha1"`
+ ;;
+ esac
+
+ # The name under $remote_top the remote HEAD seems to point at.
+ head_points_at=$(
+ (
+ test -f "$GIT_DIR/$remote_top/master" && echo "master"
+ cd "$GIT_DIR/$remote_top" &&
+ find . -type f -print | sed -e 's/^\.\///'
+ ) | (
+ done=f
+ while read name
+ do
+ test t = $done && continue
+ branch_tip=`cat "$GIT_DIR/$remote_top/$name"`
+ if test "$head_sha1" = "$branch_tip"
+ then
+ echo "$name"
+ done=t
+ fi
+ done
+ )
+ )
+
+ # Upstream URL
+ git config remote."$origin".url "$repo" &&
+
+ # Set up the mappings to track the remote branches.
+ git config remote."$origin".fetch \
+ "+refs/heads/*:$remote_top/*" '^$' &&
+
+ # Write out remote.$origin config, and update our "$head_points_at".
+ case "$head_points_at" in
+ ?*)
+ # Local default branch
+ git symbolic-ref HEAD "refs/heads/$head_points_at" &&
+
+ # Tracking branch for the primary branch at the remote.
+ git update-ref HEAD "$head_sha1" &&
+
+ rm -f "refs/remotes/$origin/HEAD"
+ git symbolic-ref "refs/remotes/$origin/HEAD" \
+ "refs/remotes/$origin/$head_points_at" &&
+
+ git config branch."$head_points_at".remote "$origin" &&
+ git config branch."$head_points_at".merge "refs/heads/$head_points_at"
+ ;;
+ '')
+ if test -z "$head_sha1"
+ then
+ # Source had nonexistent ref in HEAD
+ echo >&2 "Warning: Remote HEAD refers to nonexistent ref, unable to checkout."
+ no_checkout=t
+ else
+ # Source had detached HEAD pointing nowhere
+ git update-ref --no-deref HEAD "$head_sha1" &&
+ rm -f "refs/remotes/$origin/HEAD"
+ fi
+ ;;
+ esac
+
+ case "$no_checkout" in
+ '')
+ test "z$quiet" = z -a "z$no_progress" = z && v=-v || v=
+ git read-tree -m -u $v HEAD HEAD
+ esac
+fi
+rm -f "$GIT_DIR/CLONE_HEAD" "$GIT_DIR/REMOTE_HEAD"
+
+trap - 0
diff --git a/contrib/examples/git-commit.sh b/contrib/examples/git-commit.sh
new file mode 100755
index 0000000..23ffb02
--- /dev/null
+++ b/contrib/examples/git-commit.sh
@@ -0,0 +1,639 @@
+#!/bin/sh
+#
+# Copyright (c) 2005 Linus Torvalds
+# Copyright (c) 2006 Junio C Hamano
+
+USAGE='[-a | --interactive] [-s] [-v] [--no-verify] [-m <message> | -F <logfile> | (-C|-c) <commit> | --amend] [-u] [-e] [--author <author>] [--template <file>] [[-i | -o] <path>...]'
+SUBDIRECTORY_OK=Yes
+OPTIONS_SPEC=
+. git-sh-setup
+require_work_tree
+
+git rev-parse --verify HEAD >/dev/null 2>&1 || initial_commit=t
+
+case "$0" in
+*status)
+ status_only=t
+ ;;
+*commit)
+ status_only=
+ ;;
+esac
+
+refuse_partial () {
+ echo >&2 "$1"
+ echo >&2 "You might have meant to say 'git commit -i paths...', perhaps?"
+ exit 1
+}
+
+TMP_INDEX=
+THIS_INDEX="${GIT_INDEX_FILE:-$GIT_DIR/index}"
+NEXT_INDEX="$GIT_DIR/next-index$$"
+rm -f "$NEXT_INDEX"
+save_index () {
+ cp -p "$THIS_INDEX" "$NEXT_INDEX"
+}
+
+run_status () {
+ # If TMP_INDEX is defined, that means we are doing
+ # "--only" partial commit, and that index file is used
+ # to build the tree for the commit. Otherwise, if
+ # NEXT_INDEX exists, that is the index file used to
+ # make the commit. Otherwise we are using as-is commit
+ # so the regular index file is what we use to compare.
+ if test '' != "$TMP_INDEX"
+ then
+ GIT_INDEX_FILE="$TMP_INDEX"
+ export GIT_INDEX_FILE
+ elif test -f "$NEXT_INDEX"
+ then
+ GIT_INDEX_FILE="$NEXT_INDEX"
+ export GIT_INDEX_FILE
+ fi
+
+ if test "$status_only" = "t" -o "$use_status_color" = "t"; then
+ color=
+ else
+ color=--nocolor
+ fi
+ git runstatus ${color} \
+ ${verbose:+--verbose} \
+ ${amend:+--amend} \
+ ${untracked_files:+--untracked}
+}
+
+trap '
+ test -z "$TMP_INDEX" || {
+ test -f "$TMP_INDEX" && rm -f "$TMP_INDEX"
+ }
+ rm -f "$NEXT_INDEX"
+' 0
+
+################################################################
+# Command line argument parsing and sanity checking
+
+all=
+also=
+allow_empty=f
+interactive=
+only=
+logfile=
+use_commit=
+amend=
+edit_flag=
+no_edit=
+log_given=
+log_message=
+verify=t
+quiet=
+verbose=
+signoff=
+force_author=
+only_include_assumed=
+untracked_files=
+templatefile="`git config commit.template`"
+while test $# != 0
+do
+ case "$1" in
+ -F|--F|-f|--f|--fi|--fil|--file)
+ case "$#" in 1) usage ;; esac
+ shift
+ no_edit=t
+ log_given=t$log_given
+ logfile="$1"
+ ;;
+ -F*|-f*)
+ no_edit=t
+ log_given=t$log_given
+ logfile="${1#-[Ff]}"
+ ;;
+ --F=*|--f=*|--fi=*|--fil=*|--file=*)
+ no_edit=t
+ log_given=t$log_given
+ logfile="${1#*=}"
+ ;;
+ -a|--a|--al|--all)
+ all=t
+ ;;
+ --allo|--allow|--allow-|--allow-e|--allow-em|--allow-emp|\
+ --allow-empt|--allow-empty)
+ allow_empty=t
+ ;;
+ --au=*|--aut=*|--auth=*|--autho=*|--author=*)
+ force_author="${1#*=}"
+ ;;
+ --au|--aut|--auth|--autho|--author)
+ case "$#" in 1) usage ;; esac
+ shift
+ force_author="$1"
+ ;;
+ -e|--e|--ed|--edi|--edit)
+ edit_flag=t
+ ;;
+ -i|--i|--in|--inc|--incl|--inclu|--includ|--include)
+ also=t
+ ;;
+ --int|--inte|--inter|--intera|--interac|--interact|--interacti|\
+ --interactiv|--interactive)
+ interactive=t
+ ;;
+ -o|--o|--on|--onl|--only)
+ only=t
+ ;;
+ -m|--m|--me|--mes|--mess|--messa|--messag|--message)
+ case "$#" in 1) usage ;; esac
+ shift
+ log_given=m$log_given
+ log_message="${log_message:+${log_message}
+
+}$1"
+ no_edit=t
+ ;;
+ -m*)
+ log_given=m$log_given
+ log_message="${log_message:+${log_message}
+
+}${1#-m}"
+ no_edit=t
+ ;;
+ --m=*|--me=*|--mes=*|--mess=*|--messa=*|--messag=*|--message=*)
+ log_given=m$log_given
+ log_message="${log_message:+${log_message}
+
+}${1#*=}"
+ no_edit=t
+ ;;
+ -n|--n|--no|--no-|--no-v|--no-ve|--no-ver|--no-veri|--no-verif|\
+ --no-verify)
+ verify=
+ ;;
+ --a|--am|--ame|--amen|--amend)
+ amend=t
+ use_commit=HEAD
+ ;;
+ -c)
+ case "$#" in 1) usage ;; esac
+ shift
+ log_given=t$log_given
+ use_commit="$1"
+ no_edit=
+ ;;
+ --ree=*|--reed=*|--reedi=*|--reedit=*|--reedit-=*|--reedit-m=*|\
+ --reedit-me=*|--reedit-mes=*|--reedit-mess=*|--reedit-messa=*|\
+ --reedit-messag=*|--reedit-message=*)
+ log_given=t$log_given
+ use_commit="${1#*=}"
+ no_edit=
+ ;;
+ --ree|--reed|--reedi|--reedit|--reedit-|--reedit-m|--reedit-me|\
+ --reedit-mes|--reedit-mess|--reedit-messa|--reedit-messag|\
+ --reedit-message)
+ case "$#" in 1) usage ;; esac
+ shift
+ log_given=t$log_given
+ use_commit="$1"
+ no_edit=
+ ;;
+ -C)
+ case "$#" in 1) usage ;; esac
+ shift
+ log_given=t$log_given
+ use_commit="$1"
+ no_edit=t
+ ;;
+ --reu=*|--reus=*|--reuse=*|--reuse-=*|--reuse-m=*|--reuse-me=*|\
+ --reuse-mes=*|--reuse-mess=*|--reuse-messa=*|--reuse-messag=*|\
+ --reuse-message=*)
+ log_given=t$log_given
+ use_commit="${1#*=}"
+ no_edit=t
+ ;;
+ --reu|--reus|--reuse|--reuse-|--reuse-m|--reuse-me|--reuse-mes|\
+ --reuse-mess|--reuse-messa|--reuse-messag|--reuse-message)
+ case "$#" in 1) usage ;; esac
+ shift
+ log_given=t$log_given
+ use_commit="$1"
+ no_edit=t
+ ;;
+ -s|--s|--si|--sig|--sign|--signo|--signof|--signoff)
+ signoff=t
+ ;;
+ -t|--t|--te|--tem|--temp|--templ|--templa|--templat|--template)
+ case "$#" in 1) usage ;; esac
+ shift
+ templatefile="$1"
+ no_edit=
+ ;;
+ -q|--q|--qu|--qui|--quie|--quiet)
+ quiet=t
+ ;;
+ -v|--v|--ve|--ver|--verb|--verbo|--verbos|--verbose)
+ verbose=t
+ ;;
+ -u|--u|--un|--unt|--untr|--untra|--untrac|--untrack|--untracke|\
+ --untracked|--untracked-|--untracked-f|--untracked-fi|--untracked-fil|\
+ --untracked-file|--untracked-files)
+ untracked_files=t
+ ;;
+ --)
+ shift
+ break
+ ;;
+ -*)
+ usage
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+done
+case "$edit_flag" in t) no_edit= ;; esac
+
+################################################################
+# Sanity check options
+
+case "$amend,$initial_commit" in
+t,t)
+ die "You do not have anything to amend." ;;
+t,)
+ if [ -f "$GIT_DIR/MERGE_HEAD" ]; then
+ die "You are in the middle of a merge -- cannot amend."
+ fi ;;
+esac
+
+case "$log_given" in
+tt*)
+ die "Only one of -c/-C/-F can be used." ;;
+*tm*|*mt*)
+ die "Option -m cannot be combined with -c/-C/-F." ;;
+esac
+
+case "$#,$also,$only,$amend" in
+*,t,t,*)
+ die "Only one of --include/--only can be used." ;;
+0,t,,* | 0,,t,)
+ die "No paths with --include/--only does not make sense." ;;
+0,,t,t)
+ only_include_assumed="# Clever... amending the last one with dirty index." ;;
+0,,,*)
+ ;;
+*,,,*)
+ only_include_assumed="# Explicit paths specified without -i nor -o; assuming --only paths..."
+ also=
+ ;;
+esac
+unset only
+case "$all,$interactive,$also,$#" in
+*t,*t,*)
+ die "Cannot use -a, --interactive or -i at the same time." ;;
+t,,,[1-9]*)
+ die "Paths with -a does not make sense." ;;
+,t,,[1-9]*)
+ die "Paths with --interactive does not make sense." ;;
+,,t,0)
+ die "No paths with -i does not make sense." ;;
+esac
+
+if test ! -z "$templatefile" -a -z "$log_given"
+then
+ if test ! -f "$templatefile"
+ then
+ die "Commit template file does not exist."
+ fi
+fi
+
+################################################################
+# Prepare index to have a tree to be committed
+
+case "$all,$also" in
+t,)
+ if test ! -f "$THIS_INDEX"
+ then
+ die 'nothing to commit (use "git add file1 file2" to include for commit)'
+ fi
+ save_index &&
+ (
+ cd_to_toplevel &&
+ GIT_INDEX_FILE="$NEXT_INDEX" &&
+ export GIT_INDEX_FILE &&
+ git diff-files --name-only -z |
+ git update-index --remove -z --stdin
+ ) || exit
+ ;;
+,t)
+ save_index &&
+ git ls-files --error-unmatch -- "$@" >/dev/null || exit
+
+ git diff-files --name-only -z -- "$@" |
+ (
+ cd_to_toplevel &&
+ GIT_INDEX_FILE="$NEXT_INDEX" &&
+ export GIT_INDEX_FILE &&
+ git update-index --remove -z --stdin
+ ) || exit
+ ;;
+,)
+ if test "$interactive" = t; then
+ git add --interactive || exit
+ fi
+ case "$#" in
+ 0)
+ ;; # commit as-is
+ *)
+ if test -f "$GIT_DIR/MERGE_HEAD"
+ then
+ refuse_partial "Cannot do a partial commit during a merge."
+ fi
+
+ TMP_INDEX="$GIT_DIR/tmp-index$$"
+ W=
+ test -z "$initial_commit" && W=--with-tree=HEAD
+ commit_only=`git ls-files --error-unmatch $W -- "$@"` || exit
+
+ # Build a temporary index and update the real index
+ # the same way.
+ if test -z "$initial_commit"
+ then
+ GIT_INDEX_FILE="$THIS_INDEX" \
+ git read-tree --index-output="$TMP_INDEX" -i -m HEAD
+ else
+ rm -f "$TMP_INDEX"
+ fi || exit
+
+ printf '%s\n' "$commit_only" |
+ GIT_INDEX_FILE="$TMP_INDEX" \
+ git update-index --add --remove --stdin &&
+
+ save_index &&
+ printf '%s\n' "$commit_only" |
+ (
+ GIT_INDEX_FILE="$NEXT_INDEX"
+ export GIT_INDEX_FILE
+ git update-index --add --remove --stdin
+ ) || exit
+ ;;
+ esac
+ ;;
+esac
+
+################################################################
+# If we do as-is commit, the index file will be THIS_INDEX,
+# otherwise NEXT_INDEX after we make this commit. We leave
+# the index as is if we abort.
+
+if test -f "$NEXT_INDEX"
+then
+ USE_INDEX="$NEXT_INDEX"
+else
+ USE_INDEX="$THIS_INDEX"
+fi
+
+case "$status_only" in
+t)
+ # This will silently fail in a read-only repository, which is
+ # what we want.
+ GIT_INDEX_FILE="$USE_INDEX" git update-index -q --unmerged --refresh
+ run_status
+ exit $?
+ ;;
+'')
+ GIT_INDEX_FILE="$USE_INDEX" git update-index -q --refresh || exit
+ ;;
+esac
+
+################################################################
+# Grab commit message, write out tree and make commit.
+
+if test t = "$verify" && test -x "$GIT_DIR"/hooks/pre-commit
+then
+ GIT_INDEX_FILE="${TMP_INDEX:-${USE_INDEX}}" "$GIT_DIR"/hooks/pre-commit \
+ || exit
+fi
+
+if test "$log_message" != ''
+then
+ printf '%s\n' "$log_message"
+elif test "$logfile" != ""
+then
+ if test "$logfile" = -
+ then
+ test -t 0 &&
+ echo >&2 "(reading log message from standard input)"
+ cat
+ else
+ cat <"$logfile"
+ fi
+elif test "$use_commit" != ""
+then
+ encoding=$(git config i18n.commitencoding || echo UTF-8)
+ git show -s --pretty=raw --encoding="$encoding" "$use_commit" |
+ sed -e '1,/^$/d' -e 's/^ //'
+elif test -f "$GIT_DIR/MERGE_MSG"
+then
+ cat "$GIT_DIR/MERGE_MSG"
+elif test -f "$GIT_DIR/SQUASH_MSG"
+then
+ cat "$GIT_DIR/SQUASH_MSG"
+elif test "$templatefile" != ""
+then
+ cat "$templatefile"
+fi | git stripspace >"$GIT_DIR"/COMMIT_EDITMSG
+
+case "$signoff" in
+t)
+ sign=$(git var GIT_COMMITTER_IDENT | sed -e '
+ s/>.*/>/
+ s/^/Signed-off-by: /
+ ')
+ blank_before_signoff=
+ tail -n 1 "$GIT_DIR"/COMMIT_EDITMSG |
+ grep 'Signed-off-by:' >/dev/null || blank_before_signoff='
+'
+ tail -n 1 "$GIT_DIR"/COMMIT_EDITMSG |
+ grep "$sign"$ >/dev/null ||
+ printf '%s%s\n' "$blank_before_signoff" "$sign" \
+ >>"$GIT_DIR"/COMMIT_EDITMSG
+ ;;
+esac
+
+if test -f "$GIT_DIR/MERGE_HEAD" && test -z "$no_edit"; then
+ echo "#"
+ echo "# It looks like you may be committing a MERGE."
+ echo "# If this is not correct, please remove the file"
+ printf '%s\n' "# $GIT_DIR/MERGE_HEAD"
+ echo "# and try again"
+ echo "#"
+fi >>"$GIT_DIR"/COMMIT_EDITMSG
+
+# Author
+if test '' != "$use_commit"
+then
+ eval "$(get_author_ident_from_commit "$use_commit")"
+ export GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL GIT_AUTHOR_DATE
+fi
+if test '' != "$force_author"
+then
+ GIT_AUTHOR_NAME=`expr "z$force_author" : 'z\(.*[^ ]\) *<.*'` &&
+ GIT_AUTHOR_EMAIL=`expr "z$force_author" : '.*\(<.*\)'` &&
+ test '' != "$GIT_AUTHOR_NAME" &&
+ test '' != "$GIT_AUTHOR_EMAIL" ||
+ die "malformed --author parameter"
+ export GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL
+fi
+
+PARENTS="-p HEAD"
+if test -z "$initial_commit"
+then
+ rloga='commit'
+ if [ -f "$GIT_DIR/MERGE_HEAD" ]; then
+ rloga='commit (merge)'
+ PARENTS="-p HEAD "`sed -e 's/^/-p /' "$GIT_DIR/MERGE_HEAD"`
+ elif test -n "$amend"; then
+ rloga='commit (amend)'
+ PARENTS=$(git cat-file commit HEAD |
+ sed -n -e '/^$/q' -e 's/^parent /-p /p')
+ fi
+ current="$(git rev-parse --verify HEAD)"
+else
+ if [ -z "$(git ls-files)" ]; then
+ echo >&2 'nothing to commit (use "git add file1 file2" to include for commit)'
+ exit 1
+ fi
+ PARENTS=""
+ rloga='commit (initial)'
+ current=''
+fi
+set_reflog_action "$rloga"
+
+if test -z "$no_edit"
+then
+ {
+ echo ""
+ echo "# Please enter the commit message for your changes."
+ echo "# (Comment lines starting with '#' will not be included)"
+ test -z "$only_include_assumed" || echo "$only_include_assumed"
+ run_status
+ } >>"$GIT_DIR"/COMMIT_EDITMSG
+else
+ # we need to check if there is anything to commit
+ run_status >/dev/null
+fi
+case "$allow_empty,$?,$PARENTS" in
+t,* | ?,0,* | ?,*,-p' '?*-p' '?*)
+ # an explicit --allow-empty, or a merge commit can record the
+ # same tree as its parent. Otherwise having commitable paths
+ # is required.
+ ;;
+*)
+ rm -f "$GIT_DIR/COMMIT_EDITMSG" "$GIT_DIR/SQUASH_MSG"
+ use_status_color=t
+ run_status
+ exit 1
+esac
+
+case "$no_edit" in
+'')
+ git var GIT_AUTHOR_IDENT > /dev/null || die
+ git var GIT_COMMITTER_IDENT > /dev/null || die
+ git_editor "$GIT_DIR/COMMIT_EDITMSG"
+ ;;
+esac
+
+case "$verify" in
+t)
+ if test -x "$GIT_DIR"/hooks/commit-msg
+ then
+ "$GIT_DIR"/hooks/commit-msg "$GIT_DIR"/COMMIT_EDITMSG || exit
+ fi
+esac
+
+if test -z "$no_edit"
+then
+ sed -e '
+ /^diff --git a\/.*/{
+ s///
+ q
+ }
+ /^#/d
+ ' "$GIT_DIR"/COMMIT_EDITMSG
+else
+ cat "$GIT_DIR"/COMMIT_EDITMSG
+fi |
+git stripspace >"$GIT_DIR"/COMMIT_MSG
+
+# Test whether the commit message has any content we didn't supply.
+have_commitmsg=
+grep -v -i '^Signed-off-by' "$GIT_DIR"/COMMIT_MSG |
+ git stripspace > "$GIT_DIR"/COMMIT_BAREMSG
+
+# Is the commit message totally empty?
+if test -s "$GIT_DIR"/COMMIT_BAREMSG
+then
+ if test "$templatefile" != ""
+ then
+ # Test whether this is just the unaltered template.
+ if cnt=`sed -e '/^#/d' < "$templatefile" |
+ git stripspace |
+ diff "$GIT_DIR"/COMMIT_BAREMSG - |
+ wc -l` &&
+ test 0 -lt $cnt
+ then
+ have_commitmsg=t
+ fi
+ else
+ # No template, so the content in the commit message must
+ # have come from the user.
+ have_commitmsg=t
+ fi
+fi
+
+rm -f "$GIT_DIR"/COMMIT_BAREMSG
+
+if test "$have_commitmsg" = "t"
+then
+ if test -z "$TMP_INDEX"
+ then
+ tree=$(GIT_INDEX_FILE="$USE_INDEX" git write-tree)
+ else
+ tree=$(GIT_INDEX_FILE="$TMP_INDEX" git write-tree) &&
+ rm -f "$TMP_INDEX"
+ fi &&
+ commit=$(git commit-tree $tree $PARENTS <"$GIT_DIR/COMMIT_MSG") &&
+ rlogm=$(sed -e 1q "$GIT_DIR"/COMMIT_MSG) &&
+ git update-ref -m "$GIT_REFLOG_ACTION: $rlogm" HEAD $commit "$current" &&
+ rm -f -- "$GIT_DIR/MERGE_HEAD" "$GIT_DIR/MERGE_MSG" &&
+ if test -f "$NEXT_INDEX"
+ then
+ mv "$NEXT_INDEX" "$THIS_INDEX"
+ else
+ : ;# happy
+ fi
+else
+ echo >&2 "* no commit message? aborting commit."
+ false
+fi
+ret="$?"
+rm -f "$GIT_DIR/COMMIT_MSG" "$GIT_DIR/COMMIT_EDITMSG" "$GIT_DIR/SQUASH_MSG"
+
+cd_to_toplevel
+
+git rerere
+
+if test "$ret" = 0
+then
+ git gc --auto
+ if test -x "$GIT_DIR"/hooks/post-commit
+ then
+ "$GIT_DIR"/hooks/post-commit
+ fi
+ if test -z "$quiet"
+ then
+ commit=`git diff-tree --always --shortstat --pretty="format:%h: %s"\
+ --abbrev --summary --root HEAD --`
+ echo "Created${initial_commit:+ initial} commit $commit"
+ fi
+fi
+
+exit "$ret"
diff --git a/contrib/examples/git-fetch.sh b/contrib/examples/git-fetch.sh
new file mode 100755
index 0000000..a314273
--- /dev/null
+++ b/contrib/examples/git-fetch.sh
@@ -0,0 +1,379 @@
+#!/bin/sh
+#
+
+USAGE='<fetch-options> <repository> <refspec>...'
+SUBDIRECTORY_OK=Yes
+. git-sh-setup
+set_reflog_action "fetch $*"
+cd_to_toplevel ;# probably unnecessary...
+
+. git-parse-remote
+_x40='[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]'
+_x40="$_x40$_x40$_x40$_x40$_x40$_x40$_x40$_x40"
+
+LF='
+'
+IFS="$LF"
+
+no_tags=
+tags=
+append=
+force=
+verbose=
+update_head_ok=
+exec=
+keep=
+shallow_depth=
+no_progress=
+test -t 1 || no_progress=--no-progress
+quiet=
+while test $# != 0
+do
+ case "$1" in
+ -a|--a|--ap|--app|--appe|--appen|--append)
+ append=t
+ ;;
+ --upl|--uplo|--uploa|--upload|--upload-|--upload-p|\
+ --upload-pa|--upload-pac|--upload-pack)
+ shift
+ exec="--upload-pack=$1"
+ ;;
+ --upl=*|--uplo=*|--uploa=*|--upload=*|\
+ --upload-=*|--upload-p=*|--upload-pa=*|--upload-pac=*|--upload-pack=*)
+ exec=--upload-pack=$(expr "z$1" : 'z-[^=]*=\(.*\)')
+ shift
+ ;;
+ -f|--f|--fo|--for|--forc|--force)
+ force=t
+ ;;
+ -t|--t|--ta|--tag|--tags)
+ tags=t
+ ;;
+ -n|--n|--no|--no-|--no-t|--no-ta|--no-tag|--no-tags)
+ no_tags=t
+ ;;
+ -u|--u|--up|--upd|--upda|--updat|--update|--update-|--update-h|\
+ --update-he|--update-hea|--update-head|--update-head-|\
+ --update-head-o|--update-head-ok)
+ update_head_ok=t
+ ;;
+ -q|--q|--qu|--qui|--quie|--quiet)
+ quiet=--quiet
+ ;;
+ -v|--verbose)
+ verbose="$verbose"Yes
+ ;;
+ -k|--k|--ke|--kee|--keep)
+ keep='-k -k'
+ ;;
+ --depth=*)
+ shallow_depth="--depth=`expr "z$1" : 'z-[^=]*=\(.*\)'`"
+ ;;
+ --depth)
+ shift
+ shallow_depth="--depth=$1"
+ ;;
+ -*)
+ usage
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+done
+
+case "$#" in
+0)
+ origin=$(get_default_remote)
+ test -n "$(get_remote_url ${origin})" ||
+ die "Where do you want to fetch from today?"
+ set x $origin ; shift ;;
+esac
+
+if test -z "$exec"
+then
+ # No command line override and we have configuration for the remote.
+ exec="--upload-pack=$(get_uploadpack $1)"
+fi
+
+remote_nick="$1"
+remote=$(get_remote_url "$@")
+refs=
+rref=
+rsync_slurped_objects=
+
+if test "" = "$append"
+then
+ : >"$GIT_DIR/FETCH_HEAD"
+fi
+
+# Global that is reused later
+ls_remote_result=$(git ls-remote $exec "$remote") ||
+ die "Cannot get the repository state from $remote"
+
+append_fetch_head () {
+ flags=
+ test -n "$verbose" && flags="$flags$LF-v"
+ test -n "$force$single_force" && flags="$flags$LF-f"
+ GIT_REFLOG_ACTION="$GIT_REFLOG_ACTION" \
+ git fetch--tool $flags append-fetch-head "$@"
+}
+
+# updating the current HEAD with git-fetch in a bare
+# repository is always fine.
+if test -z "$update_head_ok" && test $(is_bare_repository) = false
+then
+ orig_head=$(git rev-parse --verify HEAD 2>/dev/null)
+fi
+
+# Allow --tags/--notags from remote.$1.tagopt
+case "$tags$no_tags" in
+'')
+ case "$(git config --get "remote.$1.tagopt")" in
+ --tags)
+ tags=t ;;
+ --no-tags)
+ no_tags=t ;;
+ esac
+esac
+
+# If --tags (and later --heads or --all) is specified, then we are
+# not talking about defaults stored in Pull: line of remotes or
+# branches file, and just fetch those and refspecs explicitly given.
+# Otherwise we do what we always did.
+
+reflist=$(get_remote_refs_for_fetch "$@")
+if test "$tags"
+then
+ taglist=`IFS=' ' &&
+ echo "$ls_remote_result" |
+ git show-ref --exclude-existing=refs/tags/ |
+ while read sha1 name
+ do
+ echo ".${name}:${name}"
+ done` || exit
+ if test "$#" -gt 1
+ then
+ # remote URL plus explicit refspecs; we need to merge them.
+ reflist="$reflist$LF$taglist"
+ else
+ # No explicit refspecs; fetch tags only.
+ reflist=$taglist
+ fi
+fi
+
+fetch_all_at_once () {
+
+ eval=$(echo "$1" | git fetch--tool parse-reflist "-")
+ eval "$eval"
+
+ ( : subshell because we muck with IFS
+ IFS=" $LF"
+ (
+ if test "$remote" = . ; then
+ git show-ref $rref || echo failed "$remote"
+ elif test -f "$remote" ; then
+ test -n "$shallow_depth" &&
+ die "shallow clone with bundle is not supported"
+ git bundle unbundle "$remote" $rref ||
+ echo failed "$remote"
+ else
+ if test -d "$remote" &&
+
+ # The remote might be our alternate. With
+ # this optimization we will bypass fetch-pack
+ # altogether, which means we cannot be doing
+ # the shallow stuff at all.
+ test ! -f "$GIT_DIR/shallow" &&
+ test -z "$shallow_depth" &&
+
+ # See if all of what we are going to fetch are
+ # connected to our repository's tips, in which
+ # case we do not have to do any fetch.
+ theirs=$(echo "$ls_remote_result" | \
+ git fetch--tool -s pick-rref "$rref" "-") &&
+
+ # This will barf when $theirs reach an object that
+ # we do not have in our repository. Otherwise,
+ # we already have everything the fetch would bring in.
+ git rev-list --objects $theirs --not --all \
+ >/dev/null 2>/dev/null
+ then
+ echo "$ls_remote_result" | \
+ git fetch--tool pick-rref "$rref" "-"
+ else
+ flags=
+ case $verbose in
+ YesYes*)
+ flags="-v"
+ ;;
+ esac
+ git-fetch-pack --thin $exec $keep $shallow_depth \
+ $quiet $no_progress $flags "$remote" $rref ||
+ echo failed "$remote"
+ fi
+ fi
+ ) |
+ (
+ flags=
+ test -n "$verbose" && flags="$flags -v"
+ test -n "$force" && flags="$flags -f"
+ GIT_REFLOG_ACTION="$GIT_REFLOG_ACTION" \
+ git fetch--tool $flags native-store \
+ "$remote" "$remote_nick" "$refs"
+ )
+ ) || exit
+
+}
+
+fetch_per_ref () {
+ reflist="$1"
+ refs=
+ rref=
+
+ for ref in $reflist
+ do
+ refs="$refs$LF$ref"
+
+ # These are relative path from $GIT_DIR, typically starting at refs/
+ # but may be HEAD
+ if expr "z$ref" : 'z\.' >/dev/null
+ then
+ not_for_merge=t
+ ref=$(expr "z$ref" : 'z\.\(.*\)')
+ else
+ not_for_merge=
+ fi
+ if expr "z$ref" : 'z+' >/dev/null
+ then
+ single_force=t
+ ref=$(expr "z$ref" : 'z+\(.*\)')
+ else
+ single_force=
+ fi
+ remote_name=$(expr "z$ref" : 'z\([^:]*\):')
+ local_name=$(expr "z$ref" : 'z[^:]*:\(.*\)')
+
+ rref="$rref$LF$remote_name"
+
+ # There are transports that can fetch only one head at a time...
+ case "$remote" in
+ http://* | https://* | ftp://*)
+ test -n "$shallow_depth" &&
+ die "shallow clone with http not supported"
+ proto=`expr "$remote" : '\([^:]*\):'`
+ if [ -n "$GIT_SSL_NO_VERIFY" ]; then
+ curl_extra_args="-k"
+ fi
+ if [ -n "$GIT_CURL_FTP_NO_EPSV" -o \
+ "`git config --bool http.noEPSV`" = true ]; then
+ noepsv_opt="--disable-epsv"
+ fi
+
+ # Find $remote_name from ls-remote output.
+ head=$(echo "$ls_remote_result" | \
+ git fetch--tool -s pick-rref "$remote_name" "-")
+ expr "z$head" : "z$_x40\$" >/dev/null ||
+ die "No such ref $remote_name at $remote"
+ echo >&2 "Fetching $remote_name from $remote using $proto"
+ case "$quiet" in '') v=-v ;; *) v= ;; esac
+ git-http-fetch $v -a "$head" "$remote" || exit
+ ;;
+ rsync://*)
+ test -n "$shallow_depth" &&
+ die "shallow clone with rsync not supported"
+ TMP_HEAD="$GIT_DIR/TMP_HEAD"
+ rsync -L -q "$remote/$remote_name" "$TMP_HEAD" || exit 1
+ head=$(git rev-parse --verify TMP_HEAD)
+ rm -f "$TMP_HEAD"
+ case "$quiet" in '') v=-v ;; *) v= ;; esac
+ test "$rsync_slurped_objects" || {
+ rsync -a $v --ignore-existing --exclude info \
+ "$remote/objects/" "$GIT_OBJECT_DIRECTORY/" || exit
+
+ # Look at objects/info/alternates for rsync -- http will
+ # support it natively and git native ones will do it on
+ # the remote end. Not having that file is not a crime.
+ rsync -q "$remote/objects/info/alternates" \
+ "$GIT_DIR/TMP_ALT" 2>/dev/null ||
+ rm -f "$GIT_DIR/TMP_ALT"
+ if test -f "$GIT_DIR/TMP_ALT"
+ then
+ resolve_alternates "$remote" <"$GIT_DIR/TMP_ALT" |
+ while read alt
+ do
+ case "$alt" in 'bad alternate: '*) die "$alt";; esac
+ echo >&2 "Getting alternate: $alt"
+ rsync -av --ignore-existing --exclude info \
+ "$alt" "$GIT_OBJECT_DIRECTORY/" || exit
+ done
+ rm -f "$GIT_DIR/TMP_ALT"
+ fi
+ rsync_slurped_objects=t
+ }
+ ;;
+ esac
+
+ append_fetch_head "$head" "$remote" \
+ "$remote_name" "$remote_nick" "$local_name" "$not_for_merge" || exit
+
+ done
+
+}
+
+fetch_main () {
+ case "$remote" in
+ http://* | https://* | ftp://* | rsync://* )
+ fetch_per_ref "$@"
+ ;;
+ *)
+ fetch_all_at_once "$@"
+ ;;
+ esac
+}
+
+fetch_main "$reflist" || exit
+
+# automated tag following
+case "$no_tags$tags" in
+'')
+ case "$reflist" in
+ *:refs/*)
+ # effective only when we are following remote branch
+ # using local tracking branch.
+ taglist=$(IFS=' ' &&
+ echo "$ls_remote_result" |
+ git show-ref --exclude-existing=refs/tags/ |
+ while read sha1 name
+ do
+ git cat-file -t "$sha1" >/dev/null 2>&1 || continue
+ echo >&2 "Auto-following $name"
+ echo ".${name}:${name}"
+ done)
+ esac
+ case "$taglist" in
+ '') ;;
+ ?*)
+ # do not deepen a shallow tree when following tags
+ shallow_depth=
+ fetch_main "$taglist" || exit ;;
+ esac
+esac
+
+# If the original head was empty (i.e. no "master" yet), or
+# if we were told not to worry, we do not have to check.
+case "$orig_head" in
+'')
+ ;;
+?*)
+ curr_head=$(git rev-parse --verify HEAD 2>/dev/null)
+ if test "$curr_head" != "$orig_head"
+ then
+ git update-ref \
+ -m "$GIT_REFLOG_ACTION: Undoing incorrectly fetched HEAD." \
+ HEAD "$orig_head"
+ die "Cannot fetch into the current branch."
+ fi
+ ;;
+esac
diff --git a/contrib/examples/git-gc.sh b/contrib/examples/git-gc.sh
new file mode 100755
index 0000000..1597e9f
--- /dev/null
+++ b/contrib/examples/git-gc.sh
@@ -0,0 +1,37 @@
+#!/bin/sh
+#
+# Copyright (c) 2006, Shawn O. Pearce
+#
+# Cleanup unreachable files and optimize the repository.
+
+USAGE='[--prune]'
+SUBDIRECTORY_OK=Yes
+. git-sh-setup
+
+no_prune=:
+while test $# != 0
+do
+ case "$1" in
+ --prune)
+ no_prune=
+ ;;
+ --)
+ usage
+ ;;
+ esac
+ shift
+done
+
+case "$(git config --get gc.packrefs)" in
+notbare|"")
+ test $(is_bare_repository) = true || pack_refs=true;;
+*)
+ pack_refs=$(git config --bool --get gc.packrefs)
+esac
+
+test "true" != "$pack_refs" ||
+git pack-refs --prune &&
+git reflog expire --all &&
+git-repack -a -d -l &&
+$no_prune git prune &&
+git rerere gc || exit
diff --git a/contrib/examples/git-ls-remote.sh b/contrib/examples/git-ls-remote.sh
new file mode 100755
index 0000000..fec70bb
--- /dev/null
+++ b/contrib/examples/git-ls-remote.sh
@@ -0,0 +1,142 @@
+#!/bin/sh
+#
+
+usage () {
+ echo >&2 "usage: $0 [--heads] [--tags] [-u|--upload-pack <upload-pack>]"
+ echo >&2 " <repository> <refs>..."
+ exit 1;
+}
+
+die () {
+ echo >&2 "$*"
+ exit 1
+}
+
+exec=
+while test $# != 0
+do
+ case "$1" in
+ -h|--h|--he|--hea|--head|--heads)
+ heads=heads; shift ;;
+ -t|--t|--ta|--tag|--tags)
+ tags=tags; shift ;;
+ -u|--u|--up|--upl|--uploa|--upload|--upload-|--upload-p|--upload-pa|\
+ --upload-pac|--upload-pack)
+ shift
+ exec="--upload-pack=$1"
+ shift;;
+ -u=*|--u=*|--up=*|--upl=*|--uplo=*|--uploa=*|--upload=*|\
+ --upload-=*|--upload-p=*|--upload-pa=*|--upload-pac=*|--upload-pack=*)
+ exec=--upload-pack=$(expr "z$1" : 'z-[^=]*=\(.*\)')
+ shift;;
+ --)
+ shift; break ;;
+ -*)
+ usage ;;
+ *)
+ break ;;
+ esac
+done
+
+case "$#" in 0) usage ;; esac
+
+case ",$heads,$tags," in
+,,,) heads=heads tags=tags other=other ;;
+esac
+
+. git-parse-remote
+peek_repo="$(get_remote_url "$@")"
+shift
+
+tmp=.ls-remote-$$
+trap "rm -fr $tmp-*" 0 1 2 3 15
+tmpdir=$tmp-d
+
+case "$peek_repo" in
+http://* | https://* | ftp://* )
+ if [ -n "$GIT_SSL_NO_VERIFY" -o \
+ "`git config --bool http.sslVerify`" = false ]; then
+ curl_extra_args="-k"
+ fi
+ if [ -n "$GIT_CURL_FTP_NO_EPSV" -o \
+ "`git config --bool http.noEPSV`" = true ]; then
+ curl_extra_args="${curl_extra_args} --disable-epsv"
+ fi
+ curl -nsf $curl_extra_args --header "Pragma: no-cache" "$peek_repo/info/refs" ||
+ echo "failed slurping"
+ ;;
+
+rsync://* )
+ mkdir $tmpdir &&
+ rsync -rlq "$peek_repo/HEAD" $tmpdir &&
+ rsync -rq "$peek_repo/refs" $tmpdir || {
+ echo "failed slurping"
+ exit
+ }
+ head=$(cat "$tmpdir/HEAD") &&
+ case "$head" in
+ ref:' '*)
+ head=$(expr "z$head" : 'zref: \(.*\)') &&
+ head=$(cat "$tmpdir/$head") || exit
+ esac &&
+ echo "$head HEAD"
+ (cd $tmpdir && find refs -type f) |
+ while read path
+ do
+ tr -d '\012' <"$tmpdir/$path"
+ echo " $path"
+ done &&
+ rm -fr $tmpdir
+ ;;
+
+* )
+ if test -f "$peek_repo" ; then
+ git bundle list-heads "$peek_repo" ||
+ echo "failed slurping"
+ else
+ git-peek-remote $exec "$peek_repo" ||
+ echo "failed slurping"
+ fi
+ ;;
+esac |
+sort -t ' ' -k 2 |
+while read sha1 path
+do
+ case "$sha1" in
+ failed)
+ exit 1 ;;
+ esac
+ case "$path" in
+ refs/heads/*)
+ group=heads ;;
+ refs/tags/*)
+ group=tags ;;
+ *)
+ group=other ;;
+ esac
+ case ",$heads,$tags,$other," in
+ *,$group,*)
+ ;;
+ *)
+ continue;;
+ esac
+ case "$#" in
+ 0)
+ match=yes ;;
+ *)
+ match=no
+ for pat
+ do
+ case "/$path" in
+ */$pat )
+ match=yes
+ break ;;
+ esac
+ done
+ esac
+ case "$match" in
+ no)
+ continue ;;
+ esac
+ echo "$sha1 $path"
+done
diff --git a/contrib/examples/git-merge-ours.sh b/contrib/examples/git-merge-ours.sh
new file mode 100755
index 0000000..29dba4b
--- /dev/null
+++ b/contrib/examples/git-merge-ours.sh
@@ -0,0 +1,14 @@
+#!/bin/sh
+#
+# Copyright (c) 2005 Junio C Hamano
+#
+# Pretend we resolved the heads, but declare our tree trumps everybody else.
+#
+
+# We need to exit with 2 if the index does not match our HEAD tree,
+# because the current index is what we will be committing as the
+# merge result.
+
+git diff-index --quiet --cached HEAD -- || exit 2
+
+exit 0
diff --git a/contrib/examples/git-merge.sh b/contrib/examples/git-merge.sh
new file mode 100755
index 0000000..7b922c3
--- /dev/null
+++ b/contrib/examples/git-merge.sh
@@ -0,0 +1,620 @@
+#!/bin/sh
+#
+# Copyright (c) 2005 Junio C Hamano
+#
+
+OPTIONS_KEEPDASHDASH=
+OPTIONS_SPEC="\
+git merge [options] <remote>...
+git merge [options] <msg> HEAD <remote>
+--
+stat show a diffstat at the end of the merge
+n don't show a diffstat at the end of the merge
+summary (synonym to --stat)
+log add list of one-line log to merge commit message
+squash create a single commit instead of doing a merge
+commit perform a commit if the merge succeeds (default)
+ff allow fast-forward (default)
+ff-only abort if fast-forward is not possible
+rerere-autoupdate update index with any reused conflict resolution
+s,strategy= merge strategy to use
+X= option for selected merge strategy
+m,message= message to be used for the merge commit (if any)
+"
+
+SUBDIRECTORY_OK=Yes
+. git-sh-setup
+require_work_tree
+cd_to_toplevel
+
+test -z "$(git ls-files -u)" ||
+ die "Merge is not possible because you have unmerged files."
+
+! test -e "$GIT_DIR/MERGE_HEAD" ||
+ die 'You have not concluded your merge (MERGE_HEAD exists).'
+
+LF='
+'
+
+all_strategies='recur recursive octopus resolve stupid ours subtree'
+all_strategies="$all_strategies recursive-ours recursive-theirs"
+not_strategies='base file index tree'
+default_twohead_strategies='recursive'
+default_octopus_strategies='octopus'
+no_fast_forward_strategies='subtree ours'
+no_trivial_strategies='recursive recur subtree ours recursive-ours recursive-theirs'
+use_strategies=
+xopt=
+
+allow_fast_forward=t
+fast_forward_only=
+allow_trivial_merge=t
+squash= no_commit= log_arg= rr_arg=
+
+dropsave() {
+ rm -f -- "$GIT_DIR/MERGE_HEAD" "$GIT_DIR/MERGE_MSG" \
+ "$GIT_DIR/MERGE_STASH" "$GIT_DIR/MERGE_MODE" || exit 1
+}
+
+savestate() {
+ # Stash away any local modifications.
+ git stash create >"$GIT_DIR/MERGE_STASH"
+}
+
+restorestate() {
+ if test -f "$GIT_DIR/MERGE_STASH"
+ then
+ git reset --hard $head >/dev/null
+ git stash apply $(cat "$GIT_DIR/MERGE_STASH")
+ git update-index --refresh >/dev/null
+ fi
+}
+
+finish_up_to_date () {
+ case "$squash" in
+ t)
+ echo "$1 (nothing to squash)" ;;
+ '')
+ echo "$1" ;;
+ esac
+ dropsave
+}
+
+squash_message () {
+ echo Squashed commit of the following:
+ echo
+ git log --no-merges --pretty=medium ^"$head" $remoteheads
+}
+
+finish () {
+ if test '' = "$2"
+ then
+ rlogm="$GIT_REFLOG_ACTION"
+ else
+ echo "$2"
+ rlogm="$GIT_REFLOG_ACTION: $2"
+ fi
+ case "$squash" in
+ t)
+ echo "Squash commit -- not updating HEAD"
+ squash_message >"$GIT_DIR/SQUASH_MSG"
+ ;;
+ '')
+ case "$merge_msg" in
+ '')
+ echo "No merge message -- not updating HEAD"
+ ;;
+ *)
+ git update-ref -m "$rlogm" HEAD "$1" "$head" || exit 1
+ git gc --auto
+ ;;
+ esac
+ ;;
+ esac
+ case "$1" in
+ '')
+ ;;
+ ?*)
+ if test "$show_diffstat" = t
+ then
+ # We want color (if set), but no pager
+ GIT_PAGER='' git diff --stat --summary -M "$head" "$1"
+ fi
+ ;;
+ esac
+
+ # Run a post-merge hook
+ if test -x "$GIT_DIR"/hooks/post-merge
+ then
+ case "$squash" in
+ t)
+ "$GIT_DIR"/hooks/post-merge 1
+ ;;
+ '')
+ "$GIT_DIR"/hooks/post-merge 0
+ ;;
+ esac
+ fi
+}
+
+merge_name () {
+ remote="$1"
+ rh=$(git rev-parse --verify "$remote^0" 2>/dev/null) || return
+ if truname=$(expr "$remote" : '\(.*\)~[0-9]*$') &&
+ git show-ref -q --verify "refs/heads/$truname" 2>/dev/null
+ then
+ echo "$rh branch '$truname' (early part) of ."
+ return
+ fi
+ if found_ref=$(git rev-parse --symbolic-full-name --verify \
+ "$remote" 2>/dev/null)
+ then
+ expanded=$(git check-ref-format --branch "$remote") ||
+ exit
+ if test "${found_ref#refs/heads/}" != "$found_ref"
+ then
+ echo "$rh branch '$expanded' of ."
+ return
+ elif test "${found_ref#refs/remotes/}" != "$found_ref"
+ then
+ echo "$rh remote branch '$expanded' of ."
+ return
+ fi
+ fi
+ if test "$remote" = "FETCH_HEAD" -a -r "$GIT_DIR/FETCH_HEAD"
+ then
+ sed -e 's/ not-for-merge / /' -e 1q \
+ "$GIT_DIR/FETCH_HEAD"
+ return
+ fi
+ echo "$rh commit '$remote'"
+}
+
+parse_config () {
+ while test $# != 0; do
+ case "$1" in
+ -n|--no-stat|--no-summary)
+ show_diffstat=false ;;
+ --stat|--summary)
+ show_diffstat=t ;;
+ --log|--no-log)
+ log_arg=$1 ;;
+ --squash)
+ test "$allow_fast_forward" = t ||
+ die "You cannot combine --squash with --no-ff."
+ squash=t no_commit=t ;;
+ --no-squash)
+ squash= no_commit= ;;
+ --commit)
+ no_commit= ;;
+ --no-commit)
+ no_commit=t ;;
+ --ff)
+ allow_fast_forward=t ;;
+ --no-ff)
+ test "$squash" != t ||
+ die "You cannot combine --squash with --no-ff."
+ test "$fast_forward_only" != t ||
+ die "You cannot combine --ff-only with --no-ff."
+ allow_fast_forward=f ;;
+ --ff-only)
+ test "$allow_fast_forward" != f ||
+ die "You cannot combine --ff-only with --no-ff."
+ fast_forward_only=t ;;
+ --rerere-autoupdate|--no-rerere-autoupdate)
+ rr_arg=$1 ;;
+ -s|--strategy)
+ shift
+ case " $all_strategies " in
+ *" $1 "*)
+ use_strategies="$use_strategies$1 "
+ ;;
+ *)
+ case " $not_strategies " in
+ *" $1 "*)
+ false
+ esac &&
+ type "git-merge-$1" >/dev/null 2>&1 ||
+ die "available strategies are: $all_strategies"
+ use_strategies="$use_strategies$1 "
+ ;;
+ esac
+ ;;
+ -X)
+ shift
+ xopt="${xopt:+$xopt }$(git rev-parse --sq-quote "--$1")"
+ ;;
+ -m|--message)
+ shift
+ merge_msg="$1"
+ have_message=t
+ ;;
+ --)
+ shift
+ break ;;
+ *) usage ;;
+ esac
+ shift
+ done
+ args_left=$#
+}
+
+test $# != 0 || usage
+
+have_message=
+
+if branch=$(git-symbolic-ref -q HEAD)
+then
+ mergeopts=$(git config "branch.${branch#refs/heads/}.mergeoptions")
+ if test -n "$mergeopts"
+ then
+ parse_config $mergeopts --
+ fi
+fi
+
+parse_config "$@"
+while test $args_left -lt $#; do shift; done
+
+if test -z "$show_diffstat"; then
+ test "$(git config --bool merge.diffstat)" = false && show_diffstat=false
+ test "$(git config --bool merge.stat)" = false && show_diffstat=false
+ test -z "$show_diffstat" && show_diffstat=t
+fi
+
+# This could be traditional "merge <msg> HEAD <commit>..." and the
+# way we can tell it is to see if the second token is HEAD, but some
+# people might have misused the interface and used a committish that
+# is the same as HEAD there instead. Traditional format never would
+# have "-m" so it is an additional safety measure to check for it.
+
+if test -z "$have_message" &&
+ second_token=$(git rev-parse --verify "$2^0" 2>/dev/null) &&
+ head_commit=$(git rev-parse --verify "HEAD" 2>/dev/null) &&
+ test "$second_token" = "$head_commit"
+then
+ merge_msg="$1"
+ shift
+ head_arg="$1"
+ shift
+elif ! git rev-parse --verify HEAD >/dev/null 2>&1
+then
+ # If the merged head is a valid one there is no reason to
+ # forbid "git merge" into a branch yet to be born. We do
+ # the same for "git pull".
+ if test 1 -ne $#
+ then
+ echo >&2 "Can merge only exactly one commit into empty head"
+ exit 1
+ fi
+
+ test "$squash" != t ||
+ die "Squash commit into empty head not supported yet"
+ test "$allow_fast_forward" = t ||
+ die "Non-fast-forward into an empty head does not make sense"
+ rh=$(git rev-parse --verify "$1^0") ||
+ die "$1 - not something we can merge"
+
+ git update-ref -m "initial pull" HEAD "$rh" "" &&
+ git read-tree --reset -u HEAD
+ exit
+
+else
+ # We are invoked directly as the first-class UI.
+ head_arg=HEAD
+
+ # All the rest are the commits being merged; prepare
+ # the standard merge summary message to be appended to
+ # the given message. If remote is invalid we will die
+ # later in the common codepath so we discard the error
+ # in this loop.
+ merge_msg="$(
+ for remote
+ do
+ merge_name "$remote"
+ done |
+ if test "$have_message" = t
+ then
+ git fmt-merge-msg -m "$merge_msg" $log_arg
+ else
+ git fmt-merge-msg $log_arg
+ fi
+ )"
+fi
+head=$(git rev-parse --verify "$head_arg"^0) || usage
+
+# All the rest are remote heads
+test "$#" = 0 && usage ;# we need at least one remote head.
+set_reflog_action "merge $*"
+
+remoteheads=
+for remote
+do
+ remotehead=$(git rev-parse --verify "$remote"^0 2>/dev/null) ||
+ die "$remote - not something we can merge"
+ remoteheads="${remoteheads}$remotehead "
+ eval GITHEAD_$remotehead='"$remote"'
+ export GITHEAD_$remotehead
+done
+set x $remoteheads ; shift
+
+case "$use_strategies" in
+'')
+ case "$#" in
+ 1)
+ var="`git config --get pull.twohead`"
+ if test -n "$var"
+ then
+ use_strategies="$var"
+ else
+ use_strategies="$default_twohead_strategies"
+ fi ;;
+ *)
+ var="`git config --get pull.octopus`"
+ if test -n "$var"
+ then
+ use_strategies="$var"
+ else
+ use_strategies="$default_octopus_strategies"
+ fi ;;
+ esac
+ ;;
+esac
+
+for s in $use_strategies
+do
+ for ss in $no_fast_forward_strategies
+ do
+ case " $s " in
+ *" $ss "*)
+ allow_fast_forward=f
+ break
+ ;;
+ esac
+ done
+ for ss in $no_trivial_strategies
+ do
+ case " $s " in
+ *" $ss "*)
+ allow_trivial_merge=f
+ break
+ ;;
+ esac
+ done
+done
+
+case "$#" in
+1)
+ common=$(git merge-base --all $head "$@")
+ ;;
+*)
+ common=$(git merge-base --all --octopus $head "$@")
+ ;;
+esac
+echo "$head" >"$GIT_DIR/ORIG_HEAD"
+
+case "$allow_fast_forward,$#,$common,$no_commit" in
+?,*,'',*)
+ # No common ancestors found. We need a real merge.
+ ;;
+?,1,"$1",*)
+ # If head can reach all the merge then we are up to date.
+ # but first the most common case of merging one remote.
+ finish_up_to_date "Already up-to-date."
+ exit 0
+ ;;
+t,1,"$head",*)
+ # Again the most common case of merging one remote.
+ echo "Updating $(git rev-parse --short $head)..$(git rev-parse --short $1)"
+ git update-index --refresh 2>/dev/null
+ msg="Fast-forward"
+ if test -n "$have_message"
+ then
+ msg="$msg (no commit created; -m option ignored)"
+ fi
+ new_head=$(git rev-parse --verify "$1^0") &&
+ git read-tree -v -m -u --exclude-per-directory=.gitignore $head "$new_head" &&
+ finish "$new_head" "$msg" || exit
+ dropsave
+ exit 0
+ ;;
+?,1,?*"$LF"?*,*)
+ # We are not doing octopus and not fast-forward. Need a
+ # real merge.
+ ;;
+?,1,*,)
+ # We are not doing octopus, not fast-forward, and have only
+ # one common.
+ git update-index --refresh 2>/dev/null
+ case "$allow_trivial_merge,$fast_forward_only" in
+ t,)
+ # See if it is really trivial.
+ git var GIT_COMMITTER_IDENT >/dev/null || exit
+ echo "Trying really trivial in-index merge..."
+ if git read-tree --trivial -m -u -v $common $head "$1" &&
+ result_tree=$(git write-tree)
+ then
+ echo "Wonderful."
+ result_commit=$(
+ printf '%s\n' "$merge_msg" |
+ git commit-tree $result_tree -p HEAD -p "$1"
+ ) || exit
+ finish "$result_commit" "In-index merge"
+ dropsave
+ exit 0
+ fi
+ echo "Nope."
+ esac
+ ;;
+*)
+ # An octopus. If we can reach all the remote we are up to date.
+ up_to_date=t
+ for remote
+ do
+ common_one=$(git merge-base --all $head $remote)
+ if test "$common_one" != "$remote"
+ then
+ up_to_date=f
+ break
+ fi
+ done
+ if test "$up_to_date" = t
+ then
+ finish_up_to_date "Already up-to-date. Yeeah!"
+ exit 0
+ fi
+ ;;
+esac
+
+if test "$fast_forward_only" = t
+then
+ die "Not possible to fast-forward, aborting."
+fi
+
+# We are going to make a new commit.
+git var GIT_COMMITTER_IDENT >/dev/null || exit
+
+# At this point, we need a real merge. No matter what strategy
+# we use, it would operate on the index, possibly affecting the
+# working tree, and when resolved cleanly, have the desired tree
+# in the index -- this means that the index must be in sync with
+# the $head commit. The strategies are responsible to ensure this.
+
+case "$use_strategies" in
+?*' '?*)
+ # Stash away the local changes so that we can try more than one.
+ savestate
+ single_strategy=no
+ ;;
+*)
+ rm -f "$GIT_DIR/MERGE_STASH"
+ single_strategy=yes
+ ;;
+esac
+
+result_tree= best_cnt=-1 best_strategy= wt_strategy=
+merge_was_ok=
+for strategy in $use_strategies
+do
+ test "$wt_strategy" = '' || {
+ echo "Rewinding the tree to pristine..."
+ restorestate
+ }
+ case "$single_strategy" in
+ no)
+ echo "Trying merge strategy $strategy..."
+ ;;
+ esac
+
+ # Remember which strategy left the state in the working tree
+ wt_strategy=$strategy
+
+ eval 'git-merge-$strategy '"$xopt"' $common -- "$head_arg" "$@"'
+ exit=$?
+ if test "$no_commit" = t && test "$exit" = 0
+ then
+ merge_was_ok=t
+ exit=1 ;# pretend it left conflicts.
+ fi
+
+ test "$exit" = 0 || {
+
+ # The backend exits with 1 when conflicts are left to be resolved,
+ # with 2 when it does not handle the given merge at all.
+
+ if test "$exit" -eq 1
+ then
+ cnt=`{
+ git diff-files --name-only
+ git ls-files --unmerged
+ } | wc -l`
+ if test $best_cnt -le 0 -o $cnt -le $best_cnt
+ then
+ best_strategy=$strategy
+ best_cnt=$cnt
+ fi
+ fi
+ continue
+ }
+
+ # Automerge succeeded.
+ result_tree=$(git write-tree) && break
+done
+
+# If we have a resulting tree, that means the strategy module
+# auto resolved the merge cleanly.
+if test '' != "$result_tree"
+then
+ if test "$allow_fast_forward" = "t"
+ then
+ parents=$(git merge-base --independent "$head" "$@")
+ else
+ parents=$(git rev-parse "$head" "$@")
+ fi
+ parents=$(echo "$parents" | sed -e 's/^/-p /')
+ result_commit=$(printf '%s\n' "$merge_msg" | git commit-tree $result_tree $parents) || exit
+ finish "$result_commit" "Merge made by $wt_strategy."
+ dropsave
+ exit 0
+fi
+
+# Pick the result from the best strategy and have the user fix it up.
+case "$best_strategy" in
+'')
+ restorestate
+ case "$use_strategies" in
+ ?*' '?*)
+ echo >&2 "No merge strategy handled the merge."
+ ;;
+ *)
+ echo >&2 "Merge with strategy $use_strategies failed."
+ ;;
+ esac
+ exit 2
+ ;;
+"$wt_strategy")
+ # We already have its result in the working tree.
+ ;;
+*)
+ echo "Rewinding the tree to pristine..."
+ restorestate
+ echo "Using the $best_strategy to prepare resolving by hand."
+ git-merge-$best_strategy $common -- "$head_arg" "$@"
+ ;;
+esac
+
+if test "$squash" = t
+then
+ finish
+else
+ for remote
+ do
+ echo $remote
+ done >"$GIT_DIR/MERGE_HEAD"
+ printf '%s\n' "$merge_msg" >"$GIT_DIR/MERGE_MSG" ||
+ die "Could not write to $GIT_DIR/MERGE_MSG"
+ if test "$allow_fast_forward" != t
+ then
+ printf "%s" no-ff
+ else
+ :
+ fi >"$GIT_DIR/MERGE_MODE" ||
+ die "Could not write to $GIT_DIR/MERGE_MODE"
+fi
+
+if test "$merge_was_ok" = t
+then
+ echo >&2 \
+ "Automatic merge went well; stopped before committing as requested"
+ exit 0
+else
+ {
+ echo '
+Conflicts:
+'
+ git ls-files --unmerged |
+ sed -e 's/^[^ ]* / /' |
+ uniq
+ } >>"$GIT_DIR/MERGE_MSG"
+ git rerere $rr_arg
+ die "Automatic merge failed; fix conflicts and then commit the result."
+fi
diff --git a/contrib/examples/git-notes.sh b/contrib/examples/git-notes.sh
new file mode 100755
index 0000000..e642e47
--- /dev/null
+++ b/contrib/examples/git-notes.sh
@@ -0,0 +1,121 @@
+#!/bin/sh
+
+USAGE="(edit [-F <file> | -m <msg>] | show) [commit]"
+. git-sh-setup
+
+test -z "$1" && usage
+ACTION="$1"; shift
+
+test -z "$GIT_NOTES_REF" && GIT_NOTES_REF="$(git config core.notesref)"
+test -z "$GIT_NOTES_REF" && GIT_NOTES_REF="refs/notes/commits"
+
+MESSAGE=
+while test $# != 0
+do
+ case "$1" in
+ -m)
+ test "$ACTION" = "edit" || usage
+ shift
+ if test "$#" = "0"; then
+ die "error: option -m needs an argument"
+ else
+ if [ -z "$MESSAGE" ]; then
+ MESSAGE="$1"
+ else
+ MESSAGE="$MESSAGE
+
+$1"
+ fi
+ shift
+ fi
+ ;;
+ -F)
+ test "$ACTION" = "edit" || usage
+ shift
+ if test "$#" = "0"; then
+ die "error: option -F needs an argument"
+ else
+ if [ -z "$MESSAGE" ]; then
+ MESSAGE="$(cat "$1")"
+ else
+ MESSAGE="$MESSAGE
+
+$(cat "$1")"
+ fi
+ shift
+ fi
+ ;;
+ -*)
+ usage
+ ;;
+ *)
+ break
+ ;;
+ esac
+done
+
+COMMIT=$(git rev-parse --verify --default HEAD "$@") ||
+die "Invalid commit: $@"
+
+case "$ACTION" in
+edit)
+ if [ "${GIT_NOTES_REF#refs/notes/}" = "$GIT_NOTES_REF" ]; then
+ die "Refusing to edit notes in $GIT_NOTES_REF (outside of refs/notes/)"
+ fi
+
+ MSG_FILE="$GIT_DIR/new-notes-$COMMIT"
+ GIT_INDEX_FILE="$MSG_FILE.idx"
+ export GIT_INDEX_FILE
+
+ trap '
+ test -f "$MSG_FILE" && rm "$MSG_FILE"
+ test -f "$GIT_INDEX_FILE" && rm "$GIT_INDEX_FILE"
+ ' 0
+
+ CURRENT_HEAD=$(git show-ref "$GIT_NOTES_REF" | cut -f 1 -d ' ')
+ if [ -z "$CURRENT_HEAD" ]; then
+ PARENT=
+ else
+ PARENT="-p $CURRENT_HEAD"
+ git read-tree "$GIT_NOTES_REF" || die "Could not read index"
+ fi
+
+ if [ -z "$MESSAGE" ]; then
+ GIT_NOTES_REF= git log -1 $COMMIT | sed "s/^/#/" > "$MSG_FILE"
+ if [ ! -z "$CURRENT_HEAD" ]; then
+ git cat-file blob :$COMMIT >> "$MSG_FILE" 2> /dev/null
+ fi
+ core_editor="$(git config core.editor)"
+ ${GIT_EDITOR:-${core_editor:-${VISUAL:-${EDITOR:-vi}}}} "$MSG_FILE"
+ else
+ echo "$MESSAGE" > "$MSG_FILE"
+ fi
+
+ grep -v ^# < "$MSG_FILE" | git stripspace > "$MSG_FILE".processed
+ mv "$MSG_FILE".processed "$MSG_FILE"
+ if [ -s "$MSG_FILE" ]; then
+ BLOB=$(git hash-object -w "$MSG_FILE") ||
+ die "Could not write into object database"
+ git update-index --add --cacheinfo 0644 $BLOB $COMMIT ||
+ die "Could not write index"
+ else
+ test -z "$CURRENT_HEAD" &&
+ die "Will not initialise with empty tree"
+ git update-index --force-remove $COMMIT ||
+ die "Could not update index"
+ fi
+
+ TREE=$(git write-tree) || die "Could not write tree"
+ NEW_HEAD=$(echo Annotate $COMMIT | git commit-tree $TREE $PARENT) ||
+ die "Could not annotate"
+ git update-ref -m "Annotate $COMMIT" \
+ "$GIT_NOTES_REF" $NEW_HEAD $CURRENT_HEAD
+;;
+show)
+ git rev-parse -q --verify "$GIT_NOTES_REF":$COMMIT > /dev/null ||
+ die "No note for commit $COMMIT."
+ git show "$GIT_NOTES_REF":$COMMIT
+;;
+*)
+ usage
+esac
diff --git a/contrib/examples/git-remote.perl b/contrib/examples/git-remote.perl
new file mode 100755
index 0000000..d42df7b
--- /dev/null
+++ b/contrib/examples/git-remote.perl
@@ -0,0 +1,474 @@
+#!/usr/bin/perl -w
+
+use strict;
+use Git;
+my $git = Git->repository();
+
+sub add_remote_config {
+ my ($hash, $name, $what, $value) = @_;
+ if ($what eq 'url') {
+ # Having more than one is Ok -- it is used for push.
+ if (! exists $hash->{'URL'}) {
+ $hash->{$name}{'URL'} = $value;
+ }
+ }
+ elsif ($what eq 'fetch') {
+ $hash->{$name}{'FETCH'} ||= [];
+ push @{$hash->{$name}{'FETCH'}}, $value;
+ }
+ elsif ($what eq 'push') {
+ $hash->{$name}{'PUSH'} ||= [];
+ push @{$hash->{$name}{'PUSH'}}, $value;
+ }
+ if (!exists $hash->{$name}{'SOURCE'}) {
+ $hash->{$name}{'SOURCE'} = 'config';
+ }
+}
+
+sub add_remote_remotes {
+ my ($hash, $file, $name) = @_;
+
+ if (exists $hash->{$name}) {
+ $hash->{$name}{'WARNING'} = 'ignored due to config';
+ return;
+ }
+
+ my $fh;
+ if (!open($fh, '<', $file)) {
+ print STDERR "Warning: cannot open $file\n";
+ return;
+ }
+ my $it = { 'SOURCE' => 'remotes' };
+ $hash->{$name} = $it;
+ while (<$fh>) {
+ chomp;
+ if (/^URL:\s*(.*)$/) {
+ # Having more than one is Ok -- it is used for push.
+ if (! exists $it->{'URL'}) {
+ $it->{'URL'} = $1;
+ }
+ }
+ elsif (/^Push:\s*(.*)$/) {
+ $it->{'PUSH'} ||= [];
+ push @{$it->{'PUSH'}}, $1;
+ }
+ elsif (/^Pull:\s*(.*)$/) {
+ $it->{'FETCH'} ||= [];
+ push @{$it->{'FETCH'}}, $1;
+ }
+ elsif (/^\#/) {
+ ; # ignore
+ }
+ else {
+ print STDERR "Warning: funny line in $file: $_\n";
+ }
+ }
+ close($fh);
+}
+
+sub list_remote {
+ my ($git) = @_;
+ my %seen = ();
+ my @remotes = eval {
+ $git->command(qw(config --get-regexp), '^remote\.');
+ };
+ for (@remotes) {
+ if (/^remote\.(\S+?)\.([^.\s]+)\s+(.*)$/) {
+ add_remote_config(\%seen, $1, $2, $3);
+ }
+ }
+
+ my $dir = $git->repo_path() . "/remotes";
+ if (opendir(my $dh, $dir)) {
+ local $_;
+ while ($_ = readdir($dh)) {
+ chomp;
+ next if (! -f "$dir/$_" || ! -r _);
+ add_remote_remotes(\%seen, "$dir/$_", $_);
+ }
+ }
+
+ return \%seen;
+}
+
+sub add_branch_config {
+ my ($hash, $name, $what, $value) = @_;
+ if ($what eq 'remote') {
+ if (exists $hash->{$name}{'REMOTE'}) {
+ print STDERR "Warning: more than one branch.$name.remote\n";
+ }
+ $hash->{$name}{'REMOTE'} = $value;
+ }
+ elsif ($what eq 'merge') {
+ $hash->{$name}{'MERGE'} ||= [];
+ push @{$hash->{$name}{'MERGE'}}, $value;
+ }
+}
+
+sub list_branch {
+ my ($git) = @_;
+ my %seen = ();
+ my @branches = eval {
+ $git->command(qw(config --get-regexp), '^branch\.');
+ };
+ for (@branches) {
+ if (/^branch\.([^.]*)\.(\S*)\s+(.*)$/) {
+ add_branch_config(\%seen, $1, $2, $3);
+ }
+ }
+
+ return \%seen;
+}
+
+my $remote = list_remote($git);
+my $branch = list_branch($git);
+
+sub update_ls_remote {
+ my ($harder, $info) = @_;
+
+ return if (($harder == 0) ||
+ (($harder == 1) && exists $info->{'LS_REMOTE'}));
+
+ my @ref = map { s|refs/heads/||; $_; } keys %{$git->remote_refs($info->{'URL'}, [ 'heads' ])};
+ $info->{'LS_REMOTE'} = \@ref;
+}
+
+sub list_wildcard_mapping {
+ my ($forced, $ours, $ls) = @_;
+ my %refs;
+ for (@$ls) {
+ $refs{$_} = 01; # bit #0 to say "they have"
+ }
+ for ($git->command('for-each-ref', "refs/remotes/$ours")) {
+ chomp;
+ next unless (s|^[0-9a-f]{40}\s[a-z]+\srefs/remotes/$ours/||);
+ next if ($_ eq 'HEAD');
+ $refs{$_} ||= 0;
+ $refs{$_} |= 02; # bit #1 to say "we have"
+ }
+ my (@new, @stale, @tracked);
+ for (sort keys %refs) {
+ my $have = $refs{$_};
+ if ($have == 1) {
+ push @new, $_;
+ }
+ elsif ($have == 2) {
+ push @stale, $_;
+ }
+ elsif ($have == 3) {
+ push @tracked, $_;
+ }
+ }
+ return \@new, \@stale, \@tracked;
+}
+
+sub list_mapping {
+ my ($name, $info) = @_;
+ my $fetch = $info->{'FETCH'};
+ my $ls = $info->{'LS_REMOTE'};
+ my (@new, @stale, @tracked);
+
+ for (@$fetch) {
+ next unless (/(\+)?([^:]+):(.*)/);
+ my ($forced, $theirs, $ours) = ($1, $2, $3);
+ if ($theirs eq 'refs/heads/*' &&
+ $ours =~ /^refs\/remotes\/(.*)\/\*$/) {
+ # wildcard mapping
+ my ($w_new, $w_stale, $w_tracked)
+ = list_wildcard_mapping($forced, $1, $ls);
+ push @new, @$w_new;
+ push @stale, @$w_stale;
+ push @tracked, @$w_tracked;
+ }
+ elsif ($theirs =~ /\*/ || $ours =~ /\*/) {
+ print STDERR "Warning: unrecognized mapping in remotes.$name.fetch: $_\n";
+ }
+ elsif ($theirs =~ s|^refs/heads/||) {
+ if (!grep { $_ eq $theirs } @$ls) {
+ push @stale, $theirs;
+ }
+ elsif ($ours ne '') {
+ push @tracked, $theirs;
+ }
+ }
+ }
+ return \@new, \@stale, \@tracked;
+}
+
+sub show_mapping {
+ my ($name, $info) = @_;
+ my ($new, $stale, $tracked) = list_mapping($name, $info);
+ if (@$new) {
+ print " New remote branches (next fetch will store in remotes/$name)\n";
+ print " @$new\n";
+ }
+ if (@$stale) {
+ print " Stale tracking branches in remotes/$name (use 'git remote prune')\n";
+ print " @$stale\n";
+ }
+ if (@$tracked) {
+ print " Tracked remote branches\n";
+ print " @$tracked\n";
+ }
+}
+
+sub prune_remote {
+ my ($name, $ls_remote) = @_;
+ if (!exists $remote->{$name}) {
+ print STDERR "No such remote $name\n";
+ return 1;
+ }
+ my $info = $remote->{$name};
+ update_ls_remote($ls_remote, $info);
+
+ my ($new, $stale, $tracked) = list_mapping($name, $info);
+ my $prefix = "refs/remotes/$name";
+ foreach my $to_prune (@$stale) {
+ my @v = $git->command(qw(rev-parse --verify), "$prefix/$to_prune");
+ $git->command(qw(update-ref -d), "$prefix/$to_prune", $v[0]);
+ }
+ return 0;
+}
+
+sub show_remote {
+ my ($name, $ls_remote) = @_;
+ if (!exists $remote->{$name}) {
+ print STDERR "No such remote $name\n";
+ return 1;
+ }
+ my $info = $remote->{$name};
+ update_ls_remote($ls_remote, $info);
+
+ print "* remote $name\n";
+ print " URL: $info->{'URL'}\n";
+ for my $branchname (sort keys %$branch) {
+ next unless (defined $branch->{$branchname}{'REMOTE'} &&
+ $branch->{$branchname}{'REMOTE'} eq $name);
+ my @merged = map {
+ s|^refs/heads/||;
+ $_;
+ } split(' ',"@{$branch->{$branchname}{'MERGE'}}");
+ next unless (@merged);
+ print " Remote branch(es) merged with 'git pull' while on branch $branchname\n";
+ print " @merged\n";
+ }
+ if ($info->{'LS_REMOTE'}) {
+ show_mapping($name, $info);
+ }
+ if ($info->{'PUSH'}) {
+ my @pushed = map {
+ s|^refs/heads/||;
+ s|^\+refs/heads/|+|;
+ s|:refs/heads/|:|;
+ $_;
+ } @{$info->{'PUSH'}};
+ print " Local branch(es) pushed with 'git push'\n";
+ print " @pushed\n";
+ }
+ return 0;
+}
+
+sub add_remote {
+ my ($name, $url, $opts) = @_;
+ if (exists $remote->{$name}) {
+ print STDERR "remote $name already exists.\n";
+ exit(1);
+ }
+ $git->command('config', "remote.$name.url", $url);
+ my $track = $opts->{'track'} || ["*"];
+
+ for (@$track) {
+ $git->command('config', '--add', "remote.$name.fetch",
+ $opts->{'mirror'} ?
+ "+refs/$_:refs/$_" :
+ "+refs/heads/$_:refs/remotes/$name/$_");
+ }
+ if ($opts->{'fetch'}) {
+ $git->command('fetch', $name);
+ }
+ if (exists $opts->{'master'}) {
+ $git->command('symbolic-ref', "refs/remotes/$name/HEAD",
+ "refs/remotes/$name/$opts->{'master'}");
+ }
+}
+
+sub update_remote {
+ my ($name) = @_;
+ my @remotes;
+
+ my $conf = $git->config("remotes." . $name);
+ if (defined($conf)) {
+ @remotes = split(' ', $conf);
+ } elsif ($name eq 'default') {
+ @remotes = ();
+ for (sort keys %$remote) {
+ my $do_fetch = $git->config_bool("remote." . $_ .
+ ".skipDefaultUpdate");
+ unless ($do_fetch) {
+ push @remotes, $_;
+ }
+ }
+ } else {
+ print STDERR "Remote group $name does not exist.\n";
+ exit(1);
+ }
+ for (@remotes) {
+ print "Updating $_\n";
+ $git->command('fetch', "$_");
+ }
+}
+
+sub rm_remote {
+ my ($name) = @_;
+ if (!exists $remote->{$name}) {
+ print STDERR "No such remote $name\n";
+ return 1;
+ }
+
+ $git->command('config', '--remove-section', "remote.$name");
+
+ eval {
+ my @trackers = $git->command('config', '--get-regexp',
+ 'branch.*.remote', $name);
+ for (@trackers) {
+ /^branch\.(.*)?\.remote/;
+ $git->config('--unset', "branch.$1.remote");
+ $git->config('--unset', "branch.$1.merge");
+ }
+ };
+
+ my @refs = $git->command('for-each-ref',
+ '--format=%(refname) %(objectname)', "refs/remotes/$name");
+ for (@refs) {
+ my ($ref, $object) = split;
+ $git->command(qw(update-ref -d), $ref, $object);
+ }
+ return 0;
+}
+
+sub add_usage {
+ print STDERR "usage: git remote add [-f] [-t track]* [-m master] <name> <url>\n";
+ exit(1);
+}
+
+my $VERBOSE = 0;
+@ARGV = grep {
+ if ($_ eq '-v' or $_ eq '--verbose') {
+ $VERBOSE=1;
+ 0
+ } else {
+ 1
+ }
+} @ARGV;
+
+if (!@ARGV) {
+ for (sort keys %$remote) {
+ print "$_";
+ print "\t$remote->{$_}->{URL}" if $VERBOSE;
+ print "\n";
+ }
+}
+elsif ($ARGV[0] eq 'show') {
+ my $ls_remote = 1;
+ my $i;
+ for ($i = 1; $i < @ARGV; $i++) {
+ if ($ARGV[$i] eq '-n') {
+ $ls_remote = 0;
+ }
+ else {
+ last;
+ }
+ }
+ if ($i >= @ARGV) {
+ print STDERR "usage: git remote show <remote>\n";
+ exit(1);
+ }
+ my $status = 0;
+ for (; $i < @ARGV; $i++) {
+ $status |= show_remote($ARGV[$i], $ls_remote);
+ }
+ exit($status);
+}
+elsif ($ARGV[0] eq 'update') {
+ if (@ARGV <= 1) {
+ update_remote("default");
+ exit(1);
+ }
+ for (my $i = 1; $i < @ARGV; $i++) {
+ update_remote($ARGV[$i]);
+ }
+}
+elsif ($ARGV[0] eq 'prune') {
+ my $ls_remote = 1;
+ my $i;
+ for ($i = 1; $i < @ARGV; $i++) {
+ if ($ARGV[$i] eq '-n') {
+ $ls_remote = 0;
+ }
+ else {
+ last;
+ }
+ }
+ if ($i >= @ARGV) {
+ print STDERR "usage: git remote prune <remote>\n";
+ exit(1);
+ }
+ my $status = 0;
+ for (; $i < @ARGV; $i++) {
+ $status |= prune_remote($ARGV[$i], $ls_remote);
+ }
+ exit($status);
+}
+elsif ($ARGV[0] eq 'add') {
+ my %opts = ();
+ while (1 < @ARGV && $ARGV[1] =~ /^-/) {
+ my $opt = $ARGV[1];
+ shift @ARGV;
+ if ($opt eq '-f' || $opt eq '--fetch') {
+ $opts{'fetch'} = 1;
+ next;
+ }
+ if ($opt eq '-t' || $opt eq '--track') {
+ if (@ARGV < 1) {
+ add_usage();
+ }
+ $opts{'track'} ||= [];
+ push @{$opts{'track'}}, $ARGV[1];
+ shift @ARGV;
+ next;
+ }
+ if ($opt eq '-m' || $opt eq '--master') {
+ if ((@ARGV < 1) || exists $opts{'master'}) {
+ add_usage();
+ }
+ $opts{'master'} = $ARGV[1];
+ shift @ARGV;
+ next;
+ }
+ if ($opt eq '--mirror') {
+ $opts{'mirror'} = 1;
+ next;
+ }
+ add_usage();
+ }
+ if (@ARGV != 3) {
+ add_usage();
+ }
+ add_remote($ARGV[1], $ARGV[2], \%opts);
+}
+elsif ($ARGV[0] eq 'rm') {
+ if (@ARGV <= 1) {
+ print STDERR "usage: git remote rm <remote>\n";
+ exit(1);
+ }
+ exit(rm_remote($ARGV[1]));
+}
+else {
+ print STDERR "usage: git remote\n";
+ print STDERR " git remote add <name> <url>\n";
+ print STDERR " git remote rm <name>\n";
+ print STDERR " git remote show <name>\n";
+ print STDERR " git remote prune <name>\n";
+ print STDERR " git remote update [group]\n";
+ exit(1);
+}
diff --git a/contrib/examples/git-rerere.perl b/contrib/examples/git-rerere.perl
new file mode 100755
index 0000000..4f69209
--- /dev/null
+++ b/contrib/examples/git-rerere.perl
@@ -0,0 +1,284 @@
+#!/usr/bin/perl
+#
+# REuse REcorded REsolve. This tool records a conflicted automerge
+# result and its hand resolution, and helps to resolve future
+# automerge that results in the same conflict.
+#
+# To enable this feature, create a directory 'rr-cache' under your
+# .git/ directory.
+
+use Digest;
+use File::Path;
+use File::Copy;
+
+my $git_dir = $::ENV{GIT_DIR} || ".git";
+my $rr_dir = "$git_dir/rr-cache";
+my $merge_rr = "$git_dir/rr-cache/MERGE_RR";
+
+my %merge_rr = ();
+
+sub read_rr {
+ if (!-f $merge_rr) {
+ %merge_rr = ();
+ return;
+ }
+ my $in;
+ local $/ = "\0";
+ open $in, "<$merge_rr" or die "$!: $merge_rr";
+ while (<$in>) {
+ chomp;
+ my ($name, $path) = /^([0-9a-f]{40})\t(.*)$/s;
+ $merge_rr{$path} = $name;
+ }
+ close $in;
+}
+
+sub write_rr {
+ my $out;
+ open $out, ">$merge_rr" or die "$!: $merge_rr";
+ for my $path (sort keys %merge_rr) {
+ my $name = $merge_rr{$path};
+ print $out "$name\t$path\0";
+ }
+ close $out;
+}
+
+sub compute_conflict_name {
+ my ($path) = @_;
+ my @side = ();
+ my $in;
+ open $in, "<$path" or die "$!: $path";
+
+ my $sha1 = Digest->new("SHA-1");
+ my $hunk = 0;
+ while (<$in>) {
+ if (/^<<<<<<< .*/) {
+ $hunk++;
+ @side = ([], undef);
+ }
+ elsif (/^=======$/) {
+ $side[1] = [];
+ }
+ elsif (/^>>>>>>> .*/) {
+ my ($one, $two);
+ $one = join('', @{$side[0]});
+ $two = join('', @{$side[1]});
+ if ($two le $one) {
+ ($one, $two) = ($two, $one);
+ }
+ $sha1->add($one);
+ $sha1->add("\0");
+ $sha1->add($two);
+ $sha1->add("\0");
+ @side = ();
+ }
+ elsif (@side == 0) {
+ next;
+ }
+ elsif (defined $side[1]) {
+ push @{$side[1]}, $_;
+ }
+ else {
+ push @{$side[0]}, $_;
+ }
+ }
+ close $in;
+ return ($sha1->hexdigest, $hunk);
+}
+
+sub record_preimage {
+ my ($path, $name) = @_;
+ my @side = ();
+ my ($in, $out);
+ open $in, "<$path" or die "$!: $path";
+ open $out, ">$name" or die "$!: $name";
+
+ while (<$in>) {
+ if (/^<<<<<<< .*/) {
+ @side = ([], undef);
+ }
+ elsif (/^=======$/) {
+ $side[1] = [];
+ }
+ elsif (/^>>>>>>> .*/) {
+ my ($one, $two);
+ $one = join('', @{$side[0]});
+ $two = join('', @{$side[1]});
+ if ($two le $one) {
+ ($one, $two) = ($two, $one);
+ }
+ print $out "<<<<<<<\n";
+ print $out $one;
+ print $out "=======\n";
+ print $out $two;
+ print $out ">>>>>>>\n";
+ @side = ();
+ }
+ elsif (@side == 0) {
+ print $out $_;
+ }
+ elsif (defined $side[1]) {
+ push @{$side[1]}, $_;
+ }
+ else {
+ push @{$side[0]}, $_;
+ }
+ }
+ close $out;
+ close $in;
+}
+
+sub find_conflict {
+ my $in;
+ local $/ = "\0";
+ my $pid = open($in, '-|');
+ die "$!" unless defined $pid;
+ if (!$pid) {
+ exec(qw(git ls-files -z -u)) or die "$!: ls-files";
+ }
+ my %path = ();
+ my @path = ();
+ while (<$in>) {
+ chomp;
+ my ($mode, $sha1, $stage, $path) =
+ /^([0-7]+) ([0-9a-f]{40}) ([123])\t(.*)$/s;
+ $path{$path} |= (1 << $stage);
+ }
+ close $in;
+ while (my ($path, $status) = each %path) {
+ if ($status == 14) { push @path, $path; }
+ }
+ return @path;
+}
+
+sub merge {
+ my ($name, $path) = @_;
+ record_preimage($path, "$rr_dir/$name/thisimage");
+ unless (system('git', 'merge-file', map { "$rr_dir/$name/${_}image" }
+ qw(this pre post))) {
+ my $in;
+ open $in, "<$rr_dir/$name/thisimage" or
+ die "$!: $name/thisimage";
+ my $out;
+ open $out, ">$path" or die "$!: $path";
+ while (<$in>) { print $out $_; }
+ close $in;
+ close $out;
+ return 1;
+ }
+ return 0;
+}
+
+sub garbage_collect_rerere {
+ # We should allow specifying these from the command line and
+ # that is why the caller gives @ARGV to us, but I am lazy.
+
+ my $cutoff_noresolve = 15; # two weeks
+ my $cutoff_resolve = 60; # two months
+ my @to_remove;
+ while (<$rr_dir/*/preimage>) {
+ my ($dir) = /^(.*)\/preimage$/;
+ my $cutoff = ((-f "$dir/postimage")
+ ? $cutoff_resolve
+ : $cutoff_noresolve);
+ my $age = -M "$_";
+ if ($cutoff <= $age) {
+ push @to_remove, $dir;
+ }
+ }
+ if (@to_remove) {
+ rmtree(\@to_remove);
+ }
+}
+
+-d "$rr_dir" || exit(0);
+
+read_rr();
+
+if (@ARGV) {
+ my $arg = shift @ARGV;
+ if ($arg eq 'clear') {
+ for my $path (keys %merge_rr) {
+ my $name = $merge_rr{$path};
+ if (-d "$rr_dir/$name" &&
+ ! -f "$rr_dir/$name/postimage") {
+ rmtree(["$rr_dir/$name"]);
+ }
+ }
+ unlink $merge_rr;
+ }
+ elsif ($arg eq 'status') {
+ for my $path (keys %merge_rr) {
+ print $path, "\n";
+ }
+ }
+ elsif ($arg eq 'diff') {
+ for my $path (keys %merge_rr) {
+ my $name = $merge_rr{$path};
+ system('diff', ((@ARGV == 0) ? ('-u') : @ARGV),
+ '-L', "a/$path", '-L', "b/$path",
+ "$rr_dir/$name/preimage", $path);
+ }
+ }
+ elsif ($arg eq 'gc') {
+ garbage_collect_rerere(@ARGV);
+ }
+ else {
+ die "$0 unknown command: $arg\n";
+ }
+ exit 0;
+}
+
+my %conflict = map { $_ => 1 } find_conflict();
+
+# MERGE_RR records paths with conflicts immediately after merge
+# failed. Some of the conflicted paths might have been hand resolved
+# in the working tree since then, but the initial run would catch all
+# and register their preimages.
+
+for my $path (keys %conflict) {
+ # This path has conflict. If it is not recorded yet,
+ # record the pre-image.
+ if (!exists $merge_rr{$path}) {
+ my ($name, $hunk) = compute_conflict_name($path);
+ next unless ($hunk);
+ $merge_rr{$path} = $name;
+ if (! -d "$rr_dir/$name") {
+ mkpath("$rr_dir/$name", 0, 0777);
+ print STDERR "Recorded preimage for '$path'\n";
+ record_preimage($path, "$rr_dir/$name/preimage");
+ }
+ }
+}
+
+# Now some of the paths that had conflicts earlier might have been
+# hand resolved. Others may be similar to a conflict already that
+# was resolved before.
+
+for my $path (keys %merge_rr) {
+ my $name = $merge_rr{$path};
+
+ # We could resolve this automatically if we have images.
+ if (-f "$rr_dir/$name/preimage" &&
+ -f "$rr_dir/$name/postimage") {
+ if (merge($name, $path)) {
+ print STDERR "Resolved '$path' using previous resolution.\n";
+ # Then we do not have to worry about this path
+ # anymore.
+ delete $merge_rr{$path};
+ next;
+ }
+ }
+
+ # Let's see if we have resolved it.
+ (undef, my $hunk) = compute_conflict_name($path);
+ next if ($hunk);
+
+ print STDERR "Recorded resolution for '$path'.\n";
+ copy($path, "$rr_dir/$name/postimage");
+ # And we do not have to worry about this path anymore.
+ delete $merge_rr{$path};
+}
+
+# Write out the rest.
+write_rr();
diff --git a/contrib/examples/git-reset.sh b/contrib/examples/git-reset.sh
new file mode 100755
index 0000000..bafeb52
--- /dev/null
+++ b/contrib/examples/git-reset.sh
@@ -0,0 +1,106 @@
+#!/bin/sh
+#
+# Copyright (c) 2005, 2006 Linus Torvalds and Junio C Hamano
+#
+USAGE='[--mixed | --soft | --hard] [<commit-ish>] [ [--] <paths>...]'
+SUBDIRECTORY_OK=Yes
+. git-sh-setup
+set_reflog_action "reset $*"
+require_work_tree
+
+update= reset_type=--mixed
+unset rev
+
+while test $# != 0
+do
+ case "$1" in
+ --mixed | --soft | --hard)
+ reset_type="$1"
+ ;;
+ --)
+ break
+ ;;
+ -*)
+ usage
+ ;;
+ *)
+ rev=$(git rev-parse --verify "$1") || exit
+ shift
+ break
+ ;;
+ esac
+ shift
+done
+
+: ${rev=HEAD}
+rev=$(git rev-parse --verify $rev^0) || exit
+
+# Skip -- in "git reset HEAD -- foo" and "git reset -- foo".
+case "$1" in --) shift ;; esac
+
+# git reset --mixed tree [--] paths... can be used to
+# load chosen paths from the tree into the index without
+# affecting the working tree nor HEAD.
+if test $# != 0
+then
+ test "$reset_type" = "--mixed" ||
+ die "Cannot do partial $reset_type reset."
+
+ git diff-index --cached $rev -- "$@" |
+ sed -e 's/^:\([0-7][0-7]*\) [0-7][0-7]* \([0-9a-f][0-9a-f]*\) [0-9a-f][0-9a-f]* [A-Z] \(.*\)$/\1 \2 \3/' |
+ git update-index --add --remove --index-info || exit
+ git update-index --refresh
+ exit
+fi
+
+cd_to_toplevel
+
+if test "$reset_type" = "--hard"
+then
+ update=-u
+fi
+
+# Soft reset does not touch the index file nor the working tree
+# at all, but requires them in a good order. Other resets reset
+# the index file to the tree object we are switching to.
+if test "$reset_type" = "--soft"
+then
+ if test -f "$GIT_DIR/MERGE_HEAD" ||
+ test "" != "$(git ls-files --unmerged)"
+ then
+ die "Cannot do a soft reset in the middle of a merge."
+ fi
+else
+ git read-tree -v --reset $update "$rev" || exit
+fi
+
+# Any resets update HEAD to the head being switched to.
+if orig=$(git rev-parse --verify HEAD 2>/dev/null)
+then
+ echo "$orig" >"$GIT_DIR/ORIG_HEAD"
+else
+ rm -f "$GIT_DIR/ORIG_HEAD"
+fi
+git update-ref -m "$GIT_REFLOG_ACTION" HEAD "$rev"
+update_ref_status=$?
+
+case "$reset_type" in
+--hard )
+ test $update_ref_status = 0 && {
+ printf "HEAD is now at "
+ GIT_PAGER= git log --max-count=1 --pretty=oneline \
+ --abbrev-commit HEAD
+ }
+ ;;
+--soft )
+ ;; # Nothing else to do
+--mixed )
+ # Report what has not been updated.
+ git update-index --refresh
+ ;;
+esac
+
+rm -f "$GIT_DIR/MERGE_HEAD" "$GIT_DIR/rr-cache/MERGE_RR" \
+ "$GIT_DIR/SQUASH_MSG" "$GIT_DIR/MERGE_MSG"
+
+exit $update_ref_status
diff --git a/contrib/examples/git-resolve.sh b/contrib/examples/git-resolve.sh
new file mode 100755
index 0000000..8f98142
--- /dev/null
+++ b/contrib/examples/git-resolve.sh
@@ -0,0 +1,112 @@
+#!/bin/sh
+#
+# Copyright (c) 2005 Linus Torvalds
+#
+# Resolve two trees.
+#
+
+echo 'WARNING: This command is DEPRECATED and will be removed very soon.' >&2
+echo 'WARNING: Please use git-merge or git-pull instead.' >&2
+sleep 2
+
+USAGE='<head> <remote> <merge-message>'
+. git-sh-setup
+
+dropheads() {
+ rm -f -- "$GIT_DIR/MERGE_HEAD" \
+ "$GIT_DIR/LAST_MERGE" || exit 1
+}
+
+head=$(git rev-parse --verify "$1"^0) &&
+merge=$(git rev-parse --verify "$2"^0) &&
+merge_name="$2" &&
+merge_msg="$3" || usage
+
+#
+# The remote name is just used for the message,
+# but we do want it.
+#
+if [ -z "$head" -o -z "$merge" -o -z "$merge_msg" ]; then
+ usage
+fi
+
+dropheads
+echo $head > "$GIT_DIR"/ORIG_HEAD
+echo $merge > "$GIT_DIR"/LAST_MERGE
+
+common=$(git merge-base $head $merge)
+if [ -z "$common" ]; then
+ die "Unable to find common commit between" $merge $head
+fi
+
+case "$common" in
+"$merge")
+ echo "Already up-to-date. Yeeah!"
+ dropheads
+ exit 0
+ ;;
+"$head")
+ echo "Updating $(git rev-parse --short $head)..$(git rev-parse --short $merge)"
+ git read-tree -u -m $head $merge || exit 1
+ git update-ref -m "resolve $merge_name: Fast-forward" \
+ HEAD "$merge" "$head"
+ git diff-tree -p $head $merge | git apply --stat
+ dropheads
+ exit 0
+ ;;
+esac
+
+# We are going to make a new commit.
+git var GIT_COMMITTER_IDENT >/dev/null || exit
+
+# Find an optimum merge base if there are more than one candidates.
+LF='
+'
+common=$(git merge-base -a $head $merge)
+case "$common" in
+?*"$LF"?*)
+ echo "Trying to find the optimum merge base."
+ G=.tmp-index$$
+ best=
+ best_cnt=-1
+ for c in $common
+ do
+ rm -f $G
+ GIT_INDEX_FILE=$G git read-tree -m $c $head $merge \
+ 2>/dev/null || continue
+ # Count the paths that are unmerged.
+ cnt=`GIT_INDEX_FILE=$G git ls-files --unmerged | wc -l`
+ if test $best_cnt -le 0 -o $cnt -le $best_cnt
+ then
+ best=$c
+ best_cnt=$cnt
+ if test "$best_cnt" -eq 0
+ then
+ # Cannot do any better than all trivial merge.
+ break
+ fi
+ fi
+ done
+ rm -f $G
+ common="$best"
+esac
+
+echo "Trying to merge $merge into $head using $common."
+git update-index --refresh 2>/dev/null
+git read-tree -u -m $common $head $merge || exit 1
+result_tree=$(git write-tree 2> /dev/null)
+if [ $? -ne 0 ]; then
+ echo "Simple merge failed, trying Automatic merge"
+ git-merge-index -o git-merge-one-file -a
+ if [ $? -ne 0 ]; then
+ echo $merge > "$GIT_DIR"/MERGE_HEAD
+ die "Automatic merge failed, fix up by hand"
+ fi
+ result_tree=$(git write-tree) || exit 1
+fi
+result_commit=$(echo "$merge_msg" | git commit-tree $result_tree -p $head -p $merge)
+echo "Committed merge $result_commit"
+git update-ref -m "resolve $merge_name: In-index merge" \
+ HEAD "$result_commit" "$head"
+git diff-tree -p $head $result_commit | git apply --stat
+dropheads
diff --git a/contrib/examples/git-revert.sh b/contrib/examples/git-revert.sh
new file mode 100755
index 0000000..6bf155c
--- /dev/null
+++ b/contrib/examples/git-revert.sh
@@ -0,0 +1,207 @@
+#!/bin/sh
+#
+# Copyright (c) 2005 Linus Torvalds
+# Copyright (c) 2005 Junio C Hamano
+#
+
+case "$0" in
+*-revert* )
+ test -t 0 && edit=-e
+ replay=
+ me=revert
+ USAGE='[--edit | --no-edit] [-n] <commit-ish>' ;;
+*-cherry-pick* )
+ replay=t
+ edit=
+ me=cherry-pick
+ USAGE='[--edit] [-n] [-r] [-x] <commit-ish>' ;;
+* )
+ echo >&2 "What are you talking about?"
+ exit 1 ;;
+esac
+
+SUBDIRECTORY_OK=Yes ;# we will cd up
+. git-sh-setup
+require_work_tree
+cd_to_toplevel
+
+no_commit=
+xopt=
+while case "$#" in 0) break ;; esac
+do
+ case "$1" in
+ -n|--n|--no|--no-|--no-c|--no-co|--no-com|--no-comm|\
+ --no-commi|--no-commit)
+ no_commit=t
+ ;;
+ -e|--e|--ed|--edi|--edit)
+ edit=-e
+ ;;
+ --n|--no|--no-|--no-e|--no-ed|--no-edi|--no-edit)
+ edit=
+ ;;
+ -r)
+ : no-op ;;
+ -x|--i-really-want-to-expose-my-private-commit-object-name)
+ replay=
+ ;;
+ -X?*)
+ xopt="$xopt$(git rev-parse --sq-quote "--${1#-X}")"
+ ;;
+ --strategy-option=*)
+ xopt="$xopt$(git rev-parse --sq-quote "--${1#--strategy-option=}")"
+ ;;
+ -X|--strategy-option)
+ shift
+ xopt="$xopt$(git rev-parse --sq-quote "--$1")"
+ ;;
+ -*)
+ usage
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+done
+
+set_reflog_action "$me"
+
+test "$me,$replay" = "revert,t" && usage
+
+case "$no_commit" in
+t)
+ # We do not intend to commit immediately. We just want to
+ # merge the differences in.
+ head=$(git-write-tree) ||
+ die "Your index file is unmerged."
+ ;;
+*)
+ head=$(git-rev-parse --verify HEAD) ||
+ die "You do not have a valid HEAD"
+ files=$(git-diff-index --cached --name-only $head) || exit
+ if [ "$files" ]; then
+ die "Dirty index: cannot $me (dirty: $files)"
+ fi
+ ;;
+esac
+
+rev=$(git-rev-parse --verify "$@") &&
+commit=$(git-rev-parse --verify "$rev^0") ||
+ die "Not a single commit $@"
+prev=$(git-rev-parse --verify "$commit^1" 2>/dev/null) ||
+ die "Cannot run $me a root commit"
+git-rev-parse --verify "$commit^2" >/dev/null 2>&1 &&
+ die "Cannot run $me a multi-parent commit."
+
+encoding=$(git config i18n.commitencoding || echo UTF-8)
+
+# "commit" is an existing commit. We would want to apply
+# the difference it introduces since its first parent "prev"
+# on top of the current HEAD if we are cherry-pick. Or the
+# reverse of it if we are revert.
+
+case "$me" in
+revert)
+ git show -s --pretty=oneline --encoding="$encoding" $commit |
+ sed -e '
+ s/^[^ ]* /Revert "/
+ s/$/"/
+ '
+ echo
+ echo "This reverts commit $commit."
+ test "$rev" = "$commit" ||
+ echo "(original 'git revert' arguments: $@)"
+ base=$commit next=$prev
+ ;;
+
+cherry-pick)
+ pick_author_script='
+ /^author /{
+ s/'\''/'\''\\'\'\''/g
+ h
+ s/^author \([^<]*\) <[^>]*> .*$/\1/
+ s/'\''/'\''\'\'\''/g
+ s/.*/GIT_AUTHOR_NAME='\''&'\''/p
+
+ g
+ s/^author [^<]* <\([^>]*\)> .*$/\1/
+ s/'\''/'\''\'\'\''/g
+ s/.*/GIT_AUTHOR_EMAIL='\''&'\''/p
+
+ g
+ s/^author [^<]* <[^>]*> \(.*\)$/\1/
+ s/'\''/'\''\'\'\''/g
+ s/.*/GIT_AUTHOR_DATE='\''&'\''/p
+
+ q
+ }'
+
+ logmsg=`git show -s --pretty=raw --encoding="$encoding" "$commit"`
+ set_author_env=`echo "$logmsg" |
+ LANG=C LC_ALL=C sed -ne "$pick_author_script"`
+ eval "$set_author_env"
+ export GIT_AUTHOR_NAME
+ export GIT_AUTHOR_EMAIL
+ export GIT_AUTHOR_DATE
+
+ echo "$logmsg" |
+ sed -e '1,/^$/d' -e 's/^ //'
+ case "$replay" in
+ '')
+ echo "(cherry picked from commit $commit)"
+ test "$rev" = "$commit" ||
+ echo "(original 'git cherry-pick' arguments: $@)"
+ ;;
+ esac
+ base=$prev next=$commit
+ ;;
+
+esac >.msg
+
+eval GITHEAD_$head=HEAD
+eval GITHEAD_$next='`git show -s \
+ --pretty=oneline --encoding="$encoding" "$commit" |
+ sed -e "s/^[^ ]* //"`'
+export GITHEAD_$head GITHEAD_$next
+
+# This three way merge is an interesting one. We are at
+# $head, and would want to apply the change between $commit
+# and $prev on top of us (when reverting), or the change between
+# $prev and $commit on top of us (when cherry-picking or replaying).
+
+eval "git merge-recursive $xopt $base -- $head $next" &&
+result=$(git-write-tree 2>/dev/null) || {
+ mv -f .msg "$GIT_DIR/MERGE_MSG"
+ {
+ echo '
+Conflicts:
+'
+ git ls-files --unmerged |
+ sed -e 's/^[^ ]* / /' |
+ uniq
+ } >>"$GIT_DIR/MERGE_MSG"
+ echo >&2 "Automatic $me failed. After resolving the conflicts,"
+ echo >&2 "mark the corrected paths with 'git-add <paths>'"
+ echo >&2 "and commit the result."
+ case "$me" in
+ cherry-pick)
+ echo >&2 "You may choose to use the following when making"
+ echo >&2 "the commit:"
+ echo >&2 "$set_author_env"
+ esac
+ exit 1
+}
+
+# If we are cherry-pick, and if the merge did not result in
+# hand-editing, we will hit this commit and inherit the original
+# author date and name.
+# If we are revert, or if our cherry-pick results in a hand merge,
+# we had better say that the current user is responsible for that.
+
+case "$no_commit" in
+'')
+ git-commit -n -F .msg $edit
+ rm -f .msg
+ ;;
+esac
diff --git a/contrib/examples/git-svnimport.perl b/contrib/examples/git-svnimport.perl
new file mode 100755
index 0000000..c414f0d
--- /dev/null
+++ b/contrib/examples/git-svnimport.perl
@@ -0,0 +1,976 @@
+#!/usr/bin/perl
+
+# This tool is copyright (c) 2005, Matthias Urlichs.
+# It is released under the Gnu Public License, version 2.
+#
+# The basic idea is to pull and analyze SVN changes.
+#
+# Checking out the files is done by a single long-running SVN connection.
+#
+# The head revision is on branch "origin" by default.
+# You can change that with the '-o' option.
+
+use strict;
+use warnings;
+use Getopt::Std;
+use File::Copy;
+use File::Spec;
+use File::Temp qw(tempfile);
+use File::Path qw(mkpath);
+use File::Basename qw(basename dirname);
+use Time::Local;
+use IO::Pipe;
+use POSIX qw(strftime dup2);
+use IPC::Open2;
+use SVN::Core;
+use SVN::Ra;
+
+die "Need SVN:Core 1.2.1 or better" if $SVN::Core::VERSION lt "1.2.1";
+
+$SIG{'PIPE'}="IGNORE";
+$ENV{'TZ'}="UTC";
+
+our($opt_h,$opt_o,$opt_v,$opt_u,$opt_C,$opt_i,$opt_m,$opt_M,$opt_t,$opt_T,
+ $opt_b,$opt_r,$opt_I,$opt_A,$opt_s,$opt_l,$opt_d,$opt_D,$opt_S,$opt_F,
+ $opt_P,$opt_R);
+
+sub usage() {
+ print STDERR <<END;
+usage: ${\basename $0} # fetch/update GIT from SVN
+ [-o branch-for-HEAD] [-h] [-v] [-l max_rev] [-R repack_each_revs]
+ [-C GIT_repository] [-t tagname] [-T trunkname] [-b branchname]
+ [-d|-D] [-i] [-u] [-r] [-I ignorefilename] [-s start_chg]
+ [-m] [-M regex] [-A author_file] [-S] [-F] [-P project_name] [SVN_URL]
+END
+ exit(1);
+}
+
+getopts("A:b:C:dDFhiI:l:mM:o:rs:t:T:SP:R:uv") or usage();
+usage if $opt_h;
+
+my $tag_name = $opt_t || "tags";
+my $trunk_name = defined $opt_T ? $opt_T : "trunk";
+my $branch_name = $opt_b || "branches";
+my $project_name = $opt_P || "";
+$project_name = "/" . $project_name if ($project_name);
+my $repack_after = $opt_R || 1000;
+my $root_pool = SVN::Pool->new_default;
+
+@ARGV == 1 or @ARGV == 2 or usage();
+
+$opt_o ||= "origin";
+$opt_s ||= 1;
+my $git_tree = $opt_C;
+$git_tree ||= ".";
+
+my $svn_url = $ARGV[0];
+my $svn_dir = $ARGV[1];
+
+our @mergerx = ();
+if ($opt_m) {
+ my $branch_esc = quotemeta ($branch_name);
+ my $trunk_esc = quotemeta ($trunk_name);
+ @mergerx =
+ (
+ qr!\b(?:merg(?:ed?|ing))\b.*?\b((?:(?<=$branch_esc/)[\w\.\-]+)|(?:$trunk_esc))\b!i,
+ qr!\b(?:from|of)\W+((?:(?<=$branch_esc/)[\w\.\-]+)|(?:$trunk_esc))\b!i,
+ qr!\b(?:from|of)\W+(?:the )?([\w\.\-]+)[-\s]branch\b!i
+ );
+}
+if ($opt_M) {
+ unshift (@mergerx, qr/$opt_M/);
+}
+
+# Absolutize filename now, since we will have chdir'ed by the time we
+# get around to opening it.
+$opt_A = File::Spec->rel2abs($opt_A) if $opt_A;
+
+our %users = ();
+our $users_file = undef;
+sub read_users($) {
+ $users_file = File::Spec->rel2abs(@_);
+ die "Cannot open $users_file\n" unless -f $users_file;
+ open(my $authors,$users_file);
+ while(<$authors>) {
+ chomp;
+ next unless /^(\S+?)\s*=\s*(.+?)\s*<(.+)>\s*$/;
+ (my $user,my $name,my $email) = ($1,$2,$3);
+ $users{$user} = [$name,$email];
+ }
+ close($authors);
+}
+
+select(STDERR); $|=1; select(STDOUT);
+
+
+package SVNconn;
+# Basic SVN connection.
+# We're only interested in connecting and downloading, so ...
+
+use File::Spec;
+use File::Temp qw(tempfile);
+use POSIX qw(strftime dup2);
+use Fcntl qw(SEEK_SET);
+
+sub new {
+ my($what,$repo) = @_;
+ $what=ref($what) if ref($what);
+
+ my $self = {};
+ $self->{'buffer'} = "";
+ bless($self,$what);
+
+ $repo =~ s#/+$##;
+ $self->{'fullrep'} = $repo;
+ $self->conn();
+
+ return $self;
+}
+
+sub conn {
+ my $self = shift;
+ my $repo = $self->{'fullrep'};
+ my $auth = SVN::Core::auth_open ([SVN::Client::get_simple_provider,
+ SVN::Client::get_ssl_server_trust_file_provider,
+ SVN::Client::get_username_provider]);
+ my $s = SVN::Ra->new(url => $repo, auth => $auth, pool => $root_pool);
+ die "SVN connection to $repo: $!\n" unless defined $s;
+ $self->{'svn'} = $s;
+ $self->{'repo'} = $repo;
+ $self->{'maxrev'} = $s->get_latest_revnum();
+}
+
+sub file {
+ my($self,$path,$rev) = @_;
+
+ my ($fh, $name) = tempfile('gitsvn.XXXXXX',
+ DIR => File::Spec->tmpdir(), UNLINK => 1);
+
+ print "... $rev $path ...\n" if $opt_v;
+ my (undef, $properties);
+ $path =~ s#^/*##;
+ my $subpool = SVN::Pool::new_default_sub;
+ eval { (undef, $properties)
+ = $self->{'svn'}->get_file($path,$rev,$fh); };
+ if($@) {
+ return undef if $@ =~ /Attempted to get checksum/;
+ die $@;
+ }
+ my $mode;
+ if (exists $properties->{'svn:executable'}) {
+ $mode = '100755';
+ } elsif (exists $properties->{'svn:special'}) {
+ my ($special_content, $filesize);
+ $filesize = tell $fh;
+ seek $fh, 0, SEEK_SET;
+ read $fh, $special_content, $filesize;
+ if ($special_content =~ s/^link //) {
+ $mode = '120000';
+ seek $fh, 0, SEEK_SET;
+ truncate $fh, 0;
+ print $fh $special_content;
+ } else {
+ die "unexpected svn:special file encountered";
+ }
+ } else {
+ $mode = '100644';
+ }
+ close ($fh);
+
+ return ($name, $mode);
+}
+
+sub ignore {
+ my($self,$path,$rev) = @_;
+
+ print "... $rev $path ...\n" if $opt_v;
+ $path =~ s#^/*##;
+ my $subpool = SVN::Pool::new_default_sub;
+ my (undef,undef,$properties)
+ = $self->{'svn'}->get_dir($path,$rev,undef);
+ if (exists $properties->{'svn:ignore'}) {
+ my ($fh, $name) = tempfile('gitsvn.XXXXXX',
+ DIR => File::Spec->tmpdir(),
+ UNLINK => 1);
+ print $fh $properties->{'svn:ignore'};
+ close($fh);
+ return $name;
+ } else {
+ return undef;
+ }
+}
+
+sub dir_list {
+ my($self,$path,$rev) = @_;
+ $path =~ s#^/*##;
+ my $subpool = SVN::Pool::new_default_sub;
+ my ($dirents,undef,$properties)
+ = $self->{'svn'}->get_dir($path,$rev,undef);
+ return $dirents;
+}
+
+package main;
+use URI;
+
+our $svn = $svn_url;
+$svn .= "/$svn_dir" if defined $svn_dir;
+my $svn2 = SVNconn->new($svn);
+$svn = SVNconn->new($svn);
+
+my $lwp_ua;
+if($opt_d or $opt_D) {
+ $svn_url = URI->new($svn_url)->canonical;
+ if($opt_D) {
+ $svn_dir =~ s#/*$#/#;
+ } else {
+ $svn_dir = "";
+ }
+ if ($svn_url->scheme eq "http") {
+ use LWP::UserAgent;
+ $lwp_ua = LWP::UserAgent->new(keep_alive => 1, requests_redirectable => []);
+ } else {
+ print STDERR "Warning: not HTTP; turning off direct file access\n";
+ $opt_d=0;
+ }
+}
+
+sub pdate($) {
+ my($d) = @_;
+ $d =~ m#(\d\d\d\d)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)#
+ or die "Unparseable date: $d\n";
+ my $y=$1; $y-=1900 if $y>1900;
+ return timegm($6||0,$5,$4,$3,$2-1,$y);
+}
+
+sub getwd() {
+ my $pwd = `pwd`;
+ chomp $pwd;
+ return $pwd;
+}
+
+
+sub get_headref($$) {
+ my $name = shift;
+ my $git_dir = shift;
+ my $sha;
+
+ if (open(C,"$git_dir/refs/heads/$name")) {
+ chomp($sha = <C>);
+ close(C);
+ length($sha) == 40
+ or die "Cannot get head id for $name ($sha): $!\n";
+ }
+ return $sha;
+}
+
+
+-d $git_tree
+ or mkdir($git_tree,0777)
+ or die "Could not create $git_tree: $!";
+chdir($git_tree);
+
+my $orig_branch = "";
+my $forward_master = 0;
+my %branches;
+
+my $git_dir = $ENV{"GIT_DIR"} || ".git";
+$git_dir = getwd()."/".$git_dir unless $git_dir =~ m#^/#;
+$ENV{"GIT_DIR"} = $git_dir;
+my $orig_git_index;
+$orig_git_index = $ENV{GIT_INDEX_FILE} if exists $ENV{GIT_INDEX_FILE};
+my ($git_ih, $git_index) = tempfile('gitXXXXXX', SUFFIX => '.idx',
+ DIR => File::Spec->tmpdir());
+close ($git_ih);
+$ENV{GIT_INDEX_FILE} = $git_index;
+my $maxnum = 0;
+my $last_rev = "";
+my $last_branch;
+my $current_rev = $opt_s || 1;
+unless(-d $git_dir) {
+ system("git init");
+ die "Cannot init the GIT db at $git_tree: $?\n" if $?;
+ system("git read-tree --empty");
+ die "Cannot init an empty tree: $?\n" if $?;
+
+ $last_branch = $opt_o;
+ $orig_branch = "";
+} else {
+ -f "$git_dir/refs/heads/$opt_o"
+ or die "Branch '$opt_o' does not exist.\n".
+ "Either use the correct '-o branch' option,\n".
+ "or import to a new repository.\n";
+
+ -f "$git_dir/svn2git"
+ or die "'$git_dir/svn2git' does not exist.\n".
+ "You need that file for incremental imports.\n";
+ open(F, "git symbolic-ref HEAD |") or
+ die "Cannot run git-symbolic-ref: $!\n";
+ chomp ($last_branch = <F>);
+ $last_branch = basename($last_branch);
+ close(F);
+ unless($last_branch) {
+ warn "Cannot read the last branch name: $! -- assuming 'master'\n";
+ $last_branch = "master";
+ }
+ $orig_branch = $last_branch;
+ $last_rev = get_headref($orig_branch, $git_dir);
+ if (-f "$git_dir/SVN2GIT_HEAD") {
+ die <<EOM;
+SVN2GIT_HEAD exists.
+Make sure your working directory corresponds to HEAD and remove SVN2GIT_HEAD.
+You may need to run
+
+ git-read-tree -m -u SVN2GIT_HEAD HEAD
+EOM
+ }
+ system('cp', "$git_dir/HEAD", "$git_dir/SVN2GIT_HEAD");
+
+ $forward_master =
+ $opt_o ne 'master' && -f "$git_dir/refs/heads/master" &&
+ system('cmp', '-s', "$git_dir/refs/heads/master",
+ "$git_dir/refs/heads/$opt_o") == 0;
+
+ # populate index
+ system('git', 'read-tree', $last_rev);
+ die "read-tree failed: $?\n" if $?;
+
+ # Get the last import timestamps
+ open my $B,"<", "$git_dir/svn2git";
+ while(<$B>) {
+ chomp;
+ my($num,$branch,$ref) = split;
+ $branches{$branch}{$num} = $ref;
+ $branches{$branch}{"LAST"} = $ref;
+ $current_rev = $num+1 if $current_rev <= $num;
+ }
+ close($B);
+}
+-d $git_dir
+ or die "Could not create git subdir ($git_dir).\n";
+
+my $default_authors = "$git_dir/svn-authors";
+if ($opt_A) {
+ read_users($opt_A);
+ copy($opt_A,$default_authors) or die "Copy failed: $!";
+} else {
+ read_users($default_authors) if -f $default_authors;
+}
+
+open BRANCHES,">>", "$git_dir/svn2git";
+
+sub node_kind($$) {
+ my ($svnpath, $revision) = @_;
+ $svnpath =~ s#^/*##;
+ my $subpool = SVN::Pool::new_default_sub;
+ my $kind = $svn->{'svn'}->check_path($svnpath,$revision);
+ return $kind;
+}
+
+sub get_file($$$) {
+ my($svnpath,$rev,$path) = @_;
+
+ # now get it
+ my ($name,$mode);
+ if($opt_d) {
+ my($req,$res);
+
+ # /svn/!svn/bc/2/django/trunk/django-docs/build.py
+ my $url=$svn_url->clone();
+ $url->path($url->path."/!svn/bc/$rev/$svn_dir$svnpath");
+ print "... $path...\n" if $opt_v;
+ $req = HTTP::Request->new(GET => $url);
+ $res = $lwp_ua->request($req);
+ if ($res->is_success) {
+ my $fh;
+ ($fh, $name) = tempfile('gitsvn.XXXXXX',
+ DIR => File::Spec->tmpdir(), UNLINK => 1);
+ print $fh $res->content;
+ close($fh) or die "Could not write $name: $!\n";
+ } else {
+ return undef if $res->code == 301; # directory?
+ die $res->status_line." at $url\n";
+ }
+ $mode = '0644'; # can't obtain mode via direct http request?
+ } else {
+ ($name,$mode) = $svn->file("$svnpath",$rev);
+ return undef unless defined $name;
+ }
+
+ my $pid = open(my $F, '-|');
+ die $! unless defined $pid;
+ if (!$pid) {
+ exec("git", "hash-object", "-w", $name)
+ or die "Cannot create object: $!\n";
+ }
+ my $sha = <$F>;
+ chomp $sha;
+ close $F;
+ unlink $name;
+ return [$mode, $sha, $path];
+}
+
+sub get_ignore($$$$$) {
+ my($new,$old,$rev,$path,$svnpath) = @_;
+
+ return unless $opt_I;
+ my $name = $svn->ignore("$svnpath",$rev);
+ if ($path eq '/') {
+ $path = $opt_I;
+ } else {
+ $path = File::Spec->catfile($path,$opt_I);
+ }
+ if (defined $name) {
+ my $pid = open(my $F, '-|');
+ die $! unless defined $pid;
+ if (!$pid) {
+ exec("git", "hash-object", "-w", $name)
+ or die "Cannot create object: $!\n";
+ }
+ my $sha = <$F>;
+ chomp $sha;
+ close $F;
+ unlink $name;
+ push(@$new,['0644',$sha,$path]);
+ } elsif (defined $old) {
+ push(@$old,$path);
+ }
+}
+
+sub project_path($$)
+{
+ my ($path, $project) = @_;
+
+ $path = "/".$path unless ($path =~ m#^\/#) ;
+ return $1 if ($path =~ m#^$project\/(.*)$#);
+
+ $path =~ s#\.#\\\.#g;
+ $path =~ s#\+#\\\+#g;
+ return "/" if ($project =~ m#^$path.*$#);
+
+ return undef;
+}
+
+sub split_path($$) {
+ my($rev,$path) = @_;
+ my $branch;
+
+ if($path =~ s#^/\Q$tag_name\E/([^/]+)/?##) {
+ $branch = "/$1";
+ } elsif($path =~ s#^/\Q$trunk_name\E/?##) {
+ $branch = "/";
+ } elsif($path =~ s#^/\Q$branch_name\E/([^/]+)/?##) {
+ $branch = $1;
+ } else {
+ my %no_error = (
+ "/" => 1,
+ "/$tag_name" => 1,
+ "/$branch_name" => 1
+ );
+ print STDERR "$rev: Unrecognized path: $path\n" unless (defined $no_error{$path});
+ return ()
+ }
+ if ($path eq "") {
+ $path = "/";
+ } elsif ($project_name) {
+ $path = project_path($path, $project_name);
+ }
+ return ($branch,$path);
+}
+
+sub branch_rev($$) {
+
+ my ($srcbranch,$uptorev) = @_;
+
+ my $bbranches = $branches{$srcbranch};
+ my @revs = reverse sort { ($a eq 'LAST' ? 0 : $a) <=> ($b eq 'LAST' ? 0 : $b) } keys %$bbranches;
+ my $therev;
+ foreach my $arev(@revs) {
+ next if ($arev eq 'LAST');
+ if ($arev <= $uptorev) {
+ $therev = $arev;
+ last;
+ }
+ }
+ return $therev;
+}
+
+sub expand_svndir($$$);
+
+sub expand_svndir($$$)
+{
+ my ($svnpath, $rev, $path) = @_;
+ my @list;
+ get_ignore(\@list, undef, $rev, $path, $svnpath);
+ my $dirents = $svn->dir_list($svnpath, $rev);
+ foreach my $p(keys %$dirents) {
+ my $kind = node_kind($svnpath.'/'.$p, $rev);
+ if ($kind eq $SVN::Node::file) {
+ my $f = get_file($svnpath.'/'.$p, $rev, $path.'/'.$p);
+ push(@list, $f) if $f;
+ } elsif ($kind eq $SVN::Node::dir) {
+ push(@list,
+ expand_svndir($svnpath.'/'.$p, $rev, $path.'/'.$p));
+ }
+ }
+ return @list;
+}
+
+sub copy_path($$$$$$$$) {
+ # Somebody copied a whole subdirectory.
+ # We need to find the index entries from the old version which the
+ # SVN log entry points to, and add them to the new place.
+
+ my($newrev,$newbranch,$path,$oldpath,$rev,$node_kind,$new,$parents) = @_;
+
+ my($srcbranch,$srcpath) = split_path($rev,$oldpath);
+ unless(defined $srcbranch && defined $srcpath) {
+ print "Path not found when copying from $oldpath @ $rev.\n".
+ "Will try to copy from original SVN location...\n"
+ if $opt_v;
+ push (@$new, expand_svndir($oldpath, $rev, $path));
+ return;
+ }
+ my $therev = branch_rev($srcbranch, $rev);
+ my $gitrev = $branches{$srcbranch}{$therev};
+ unless($gitrev) {
+ print STDERR "$newrev:$newbranch: could not find $oldpath \@ $rev\n";
+ return;
+ }
+ if ($srcbranch ne $newbranch) {
+ push(@$parents, $branches{$srcbranch}{'LAST'});
+ }
+ print "$newrev:$newbranch:$path: copying from $srcbranch:$srcpath @ $rev\n" if $opt_v;
+ if ($node_kind eq $SVN::Node::dir) {
+ $srcpath =~ s#/*$#/#;
+ }
+
+ my $pid = open my $f,'-|';
+ die $! unless defined $pid;
+ if (!$pid) {
+ exec("git","ls-tree","-r","-z",$gitrev,$srcpath)
+ or die $!;
+ }
+ local $/ = "\0";
+ while(<$f>) {
+ chomp;
+ my($m,$p) = split(/\t/,$_,2);
+ my($mode,$type,$sha1) = split(/ /,$m);
+ next if $type ne "blob";
+ if ($node_kind eq $SVN::Node::dir) {
+ $p = $path . substr($p,length($srcpath)-1);
+ } else {
+ $p = $path;
+ }
+ push(@$new,[$mode,$sha1,$p]);
+ }
+ close($f) or
+ print STDERR "$newrev:$newbranch: could not list files in $oldpath \@ $rev\n";
+}
+
+sub commit {
+ my($branch, $changed_paths, $revision, $author, $date, $message) = @_;
+ my($committer_name,$committer_email,$dest);
+ my($author_name,$author_email);
+ my(@old,@new,@parents);
+
+ if (not defined $author or $author eq "") {
+ $committer_name = $committer_email = "unknown";
+ } elsif (defined $users_file) {
+ die "User $author is not listed in $users_file\n"
+ unless exists $users{$author};
+ ($committer_name,$committer_email) = @{$users{$author}};
+ } elsif ($author =~ /^(.*?)\s+<(.*)>$/) {
+ ($committer_name, $committer_email) = ($1, $2);
+ } else {
+ $author =~ s/^<(.*)>$/$1/;
+ $committer_name = $committer_email = $author;
+ }
+
+ if ($opt_F && $message =~ /From:\s+(.*?)\s+<(.*)>\s*\n/) {
+ ($author_name, $author_email) = ($1, $2);
+ print "Author from From: $1 <$2>\n" if ($opt_v);;
+ } elsif ($opt_S && $message =~ /Signed-off-by:\s+(.*?)\s+<(.*)>\s*\n/) {
+ ($author_name, $author_email) = ($1, $2);
+ print "Author from Signed-off-by: $1 <$2>\n" if ($opt_v);;
+ } else {
+ $author_name = $committer_name;
+ $author_email = $committer_email;
+ }
+
+ $date = pdate($date);
+
+ my $tag;
+ my $parent;
+ if($branch eq "/") { # trunk
+ $parent = $opt_o;
+ } elsif($branch =~ m#^/(.+)#) { # tag
+ $tag = 1;
+ $parent = $1;
+ } else { # "normal" branch
+ # nothing to do
+ $parent = $branch;
+ }
+ $dest = $parent;
+
+ my $prev = $changed_paths->{"/"};
+ if($prev and $prev->[0] eq "A") {
+ delete $changed_paths->{"/"};
+ my $oldpath = $prev->[1];
+ my $rev;
+ if(defined $oldpath) {
+ my $p;
+ ($parent,$p) = split_path($revision,$oldpath);
+ if(defined $parent) {
+ if($parent eq "/") {
+ $parent = $opt_o;
+ } else {
+ $parent =~ s#^/##; # if it's a tag
+ }
+ }
+ } else {
+ $parent = undef;
+ }
+ }
+
+ my $rev;
+ if($revision > $opt_s and defined $parent) {
+ open(H,'-|',"git","rev-parse","--verify",$parent);
+ $rev = <H>;
+ close(H) or do {
+ print STDERR "$revision: cannot find commit '$parent'!\n";
+ return;
+ };
+ chop $rev;
+ if(length($rev) != 40) {
+ print STDERR "$revision: cannot find commit '$parent'!\n";
+ return;
+ }
+ $rev = $branches{($parent eq $opt_o) ? "/" : $parent}{"LAST"};
+ if($revision != $opt_s and not $rev) {
+ print STDERR "$revision: do not know ancestor for '$parent'!\n";
+ return;
+ }
+ } else {
+ $rev = undef;
+ }
+
+# if($prev and $prev->[0] eq "A") {
+# if(not $tag) {
+# unless(open(H,"> $git_dir/refs/heads/$branch")) {
+# print STDERR "$revision: Could not create branch $branch: $!\n";
+# $state=11;
+# next;
+# }
+# print H "$rev\n"
+# or die "Could not write branch $branch: $!";
+# close(H)
+# or die "Could not write branch $branch: $!";
+# }
+# }
+ if(not defined $rev) {
+ unlink($git_index);
+ } elsif ($rev ne $last_rev) {
+ print "Switching from $last_rev to $rev ($branch)\n" if $opt_v;
+ system("git", "read-tree", $rev);
+ die "read-tree failed for $rev: $?\n" if $?;
+ $last_rev = $rev;
+ }
+
+ push (@parents, $rev) if defined $rev;
+
+ my $cid;
+ if($tag and not %$changed_paths) {
+ $cid = $rev;
+ } else {
+ my @paths = sort keys %$changed_paths;
+ foreach my $path(@paths) {
+ my $action = $changed_paths->{$path};
+
+ if ($action->[0] eq "R") {
+ # refer to a file/tree in an earlier commit
+ push(@old,$path); # remove any old stuff
+ }
+ if(($action->[0] eq "A") || ($action->[0] eq "R")) {
+ my $node_kind = node_kind($action->[3], $revision);
+ if ($node_kind eq $SVN::Node::file) {
+ my $f = get_file($action->[3],
+ $revision, $path);
+ if ($f) {
+ push(@new,$f) if $f;
+ } else {
+ my $opath = $action->[3];
+ print STDERR "$revision: $branch: could not fetch '$opath'\n";
+ }
+ } elsif ($node_kind eq $SVN::Node::dir) {
+ if($action->[1]) {
+ copy_path($revision, $branch,
+ $path, $action->[1],
+ $action->[2], $node_kind,
+ \@new, \@parents);
+ } else {
+ get_ignore(\@new, \@old, $revision,
+ $path, $action->[3]);
+ }
+ }
+ } elsif ($action->[0] eq "D") {
+ push(@old,$path);
+ } elsif ($action->[0] eq "M") {
+ my $node_kind = node_kind($action->[3], $revision);
+ if ($node_kind eq $SVN::Node::file) {
+ my $f = get_file($action->[3],
+ $revision, $path);
+ push(@new,$f) if $f;
+ } elsif ($node_kind eq $SVN::Node::dir) {
+ get_ignore(\@new, \@old, $revision,
+ $path, $action->[3]);
+ }
+ } else {
+ die "$revision: unknown action '".$action->[0]."' for $path\n";
+ }
+ }
+
+ while(@old) {
+ my @o1;
+ if(@old > 55) {
+ @o1 = splice(@old,0,50);
+ } else {
+ @o1 = @old;
+ @old = ();
+ }
+ my $pid = open my $F, "-|";
+ die "$!" unless defined $pid;
+ if (!$pid) {
+ exec("git", "ls-files", "-z", @o1) or die $!;
+ }
+ @o1 = ();
+ local $/ = "\0";
+ while(<$F>) {
+ chomp;
+ push(@o1,$_);
+ }
+ close($F);
+
+ while(@o1) {
+ my @o2;
+ if(@o1 > 55) {
+ @o2 = splice(@o1,0,50);
+ } else {
+ @o2 = @o1;
+ @o1 = ();
+ }
+ system("git","update-index","--force-remove","--",@o2);
+ die "Cannot remove files: $?\n" if $?;
+ }
+ }
+ while(@new) {
+ my @n2;
+ if(@new > 12) {
+ @n2 = splice(@new,0,10);
+ } else {
+ @n2 = @new;
+ @new = ();
+ }
+ system("git","update-index","--add",
+ (map { ('--cacheinfo', @$_) } @n2));
+ die "Cannot add files: $?\n" if $?;
+ }
+
+ my $pid = open(C,"-|");
+ die "Cannot fork: $!" unless defined $pid;
+ unless($pid) {
+ exec("git","write-tree");
+ die "Cannot exec git-write-tree: $!\n";
+ }
+ chomp(my $tree = <C>);
+ length($tree) == 40
+ or die "Cannot get tree id ($tree): $!\n";
+ close(C)
+ or die "Error running git-write-tree: $?\n";
+ print "Tree ID $tree\n" if $opt_v;
+
+ my $pr = IO::Pipe->new() or die "Cannot open pipe: $!\n";
+ my $pw = IO::Pipe->new() or die "Cannot open pipe: $!\n";
+ $pid = fork();
+ die "Fork: $!\n" unless defined $pid;
+ unless($pid) {
+ $pr->writer();
+ $pw->reader();
+ open(OUT,">&STDOUT");
+ dup2($pw->fileno(),0);
+ dup2($pr->fileno(),1);
+ $pr->close();
+ $pw->close();
+
+ my @par = ();
+
+ # loose detection of merges
+ # based on the commit msg
+ foreach my $rx (@mergerx) {
+ if ($message =~ $rx) {
+ my $mparent = $1;
+ if ($mparent eq 'HEAD') { $mparent = $opt_o };
+ if ( -e "$git_dir/refs/heads/$mparent") {
+ $mparent = get_headref($mparent, $git_dir);
+ push (@parents, $mparent);
+ print OUT "Merge parent branch: $mparent\n" if $opt_v;
+ }
+ }
+ }
+ my %seen_parents = ();
+ my @unique_parents = grep { ! $seen_parents{$_} ++ } @parents;
+ foreach my $bparent (@unique_parents) {
+ push @par, '-p', $bparent;
+ print OUT "Merge parent branch: $bparent\n" if $opt_v;
+ }
+
+ exec("env",
+ "GIT_AUTHOR_NAME=$author_name",
+ "GIT_AUTHOR_EMAIL=$author_email",
+ "GIT_AUTHOR_DATE=".strftime("+0000 %Y-%m-%d %H:%M:%S",gmtime($date)),
+ "GIT_COMMITTER_NAME=$committer_name",
+ "GIT_COMMITTER_EMAIL=$committer_email",
+ "GIT_COMMITTER_DATE=".strftime("+0000 %Y-%m-%d %H:%M:%S",gmtime($date)),
+ "git", "commit-tree", $tree,@par);
+ die "Cannot exec git-commit-tree: $!\n";
+ }
+ $pw->writer();
+ $pr->reader();
+
+ $message =~ s/[\s\n]+\z//;
+ $message = "r$revision: $message" if $opt_r;
+
+ print $pw "$message\n"
+ or die "Error writing to git-commit-tree: $!\n";
+ $pw->close();
+
+ print "Committed change $revision:$branch ".strftime("%Y-%m-%d %H:%M:%S",gmtime($date)).")\n" if $opt_v;
+ chomp($cid = <$pr>);
+ length($cid) == 40
+ or die "Cannot get commit id ($cid): $!\n";
+ print "Commit ID $cid\n" if $opt_v;
+ $pr->close();
+
+ waitpid($pid,0);
+ die "Error running git-commit-tree: $?\n" if $?;
+ }
+
+ if (not defined $cid) {
+ $cid = $branches{"/"}{"LAST"};
+ }
+
+ if(not defined $dest) {
+ print "... no known parent\n" if $opt_v;
+ } elsif(not $tag) {
+ print "Writing to refs/heads/$dest\n" if $opt_v;
+ open(C,">$git_dir/refs/heads/$dest") and
+ print C ("$cid\n") and
+ close(C)
+ or die "Cannot write branch $dest for update: $!\n";
+ }
+
+ if ($tag) {
+ $last_rev = "-" if %$changed_paths;
+ # the tag was 'complex', i.e. did not refer to a "real" revision
+
+ $dest =~ tr/_/\./ if $opt_u;
+
+ system('git', 'tag', '-f', $dest, $cid) == 0
+ or die "Cannot create tag $dest: $!\n";
+
+ print "Created tag '$dest' on '$branch'\n" if $opt_v;
+ }
+ $branches{$branch}{"LAST"} = $cid;
+ $branches{$branch}{$revision} = $cid;
+ $last_rev = $cid;
+ print BRANCHES "$revision $branch $cid\n";
+ print "DONE: $revision $dest $cid\n" if $opt_v;
+}
+
+sub commit_all {
+ # Recursive use of the SVN connection does not work
+ local $svn = $svn2;
+
+ my ($changed_paths, $revision, $author, $date, $message) = @_;
+ my %p;
+ while(my($path,$action) = each %$changed_paths) {
+ $p{$path} = [ $action->action,$action->copyfrom_path, $action->copyfrom_rev, $path ];
+ }
+ $changed_paths = \%p;
+
+ my %done;
+ my @col;
+ my $pref;
+ my $branch;
+
+ while(my($path,$action) = each %$changed_paths) {
+ ($branch,$path) = split_path($revision,$path);
+ next if not defined $branch;
+ next if not defined $path;
+ $done{$branch}{$path} = $action;
+ }
+ while(($branch,$changed_paths) = each %done) {
+ commit($branch, $changed_paths, $revision, $author, $date, $message);
+ }
+}
+
+$opt_l = $svn->{'maxrev'} if not defined $opt_l or $opt_l > $svn->{'maxrev'};
+
+if ($opt_l < $current_rev) {
+ print "Up to date: no new revisions to fetch!\n" if $opt_v;
+ unlink("$git_dir/SVN2GIT_HEAD");
+ exit;
+}
+
+print "Processing from $current_rev to $opt_l ...\n" if $opt_v;
+
+my $from_rev;
+my $to_rev = $current_rev - 1;
+
+my $subpool = SVN::Pool::new_default_sub;
+while ($to_rev < $opt_l) {
+ $subpool->clear;
+ $from_rev = $to_rev + 1;
+ $to_rev = $from_rev + $repack_after;
+ $to_rev = $opt_l if $opt_l < $to_rev;
+ print "Fetching from $from_rev to $to_rev ...\n" if $opt_v;
+ $svn->{'svn'}->get_log("",$from_rev,$to_rev,0,1,1,\&commit_all);
+ my $pid = fork();
+ die "Fork: $!\n" unless defined $pid;
+ unless($pid) {
+ exec("git", "repack", "-d")
+ or die "Cannot repack: $!\n";
+ }
+ waitpid($pid, 0);
+}
+
+
+unlink($git_index);
+
+if (defined $orig_git_index) {
+ $ENV{GIT_INDEX_FILE} = $orig_git_index;
+} else {
+ delete $ENV{GIT_INDEX_FILE};
+}
+
+# Now switch back to the branch we were in before all of this happened
+if($orig_branch) {
+ print "DONE\n" if $opt_v and (not defined $opt_l or $opt_l > 0);
+ system("cp","$git_dir/refs/heads/$opt_o","$git_dir/refs/heads/master")
+ if $forward_master;
+ unless ($opt_i) {
+ system('git', 'read-tree', '-m', '-u', 'SVN2GIT_HEAD', 'HEAD');
+ die "read-tree failed: $?\n" if $?;
+ }
+} else {
+ $orig_branch = "master";
+ print "DONE; creating $orig_branch branch\n" if $opt_v and (not defined $opt_l or $opt_l > 0);
+ system("cp","$git_dir/refs/heads/$opt_o","$git_dir/refs/heads/master")
+ unless -f "$git_dir/refs/heads/master";
+ system('git', 'update-ref', 'HEAD', "$orig_branch");
+ unless ($opt_i) {
+ system('git checkout');
+ die "checkout failed: $?\n" if $?;
+ }
+}
+unlink("$git_dir/SVN2GIT_HEAD");
+close(BRANCHES);
diff --git a/contrib/examples/git-svnimport.txt b/contrib/examples/git-svnimport.txt
new file mode 100644
index 0000000..3bb871e
--- /dev/null
+++ b/contrib/examples/git-svnimport.txt
@@ -0,0 +1,179 @@
+git-svnimport(1)
+================
+v0.1, July 2005
+
+NAME
+----
+git-svnimport - Import a SVN repository into git
+
+
+SYNOPSIS
+--------
+[verse]
+'git-svnimport' [ -o <branch-for-HEAD> ] [ -h ] [ -v ] [ -d | -D ]
+ [ -C <GIT_repository> ] [ -i ] [ -u ] [-l limit_rev]
+ [ -b branch_subdir ] [ -T trunk_subdir ] [ -t tag_subdir ]
+ [ -s start_chg ] [ -m ] [ -r ] [ -M regex ]
+ [ -I <ignorefile_name> ] [ -A <author_file> ]
+ [ -R <repack_each_revs>] [ -P <path_from_trunk> ]
+ <SVN_repository_URL> [ <path> ]
+
+
+DESCRIPTION
+-----------
+Imports a SVN repository into git. It will either create a new
+repository, or incrementally import into an existing one.
+
+SVN access is done by the SVN::Perl module.
+
+git-svnimport assumes that SVN repositories are organized into one
+"trunk" directory where the main development happens, "branches/FOO"
+directories for branches, and "/tags/FOO" directories for tags.
+Other subdirectories are ignored.
+
+git-svnimport creates a file ".git/svn2git", which is required for
+incremental SVN imports.
+
+OPTIONS
+-------
+-C <target-dir>::
+ The GIT repository to import to. If the directory doesn't
+ exist, it will be created. Default is the current directory.
+
+-s <start_rev>::
+ Start importing at this SVN change number. The default is 1.
++
+When importing incrementally, you might need to edit the .git/svn2git file.
+
+-i::
+ Import-only: don't perform a checkout after importing. This option
+ ensures the working directory and index remain untouched and will
+ not create them if they do not exist.
+
+-T <trunk_subdir>::
+ Name the SVN trunk. Default "trunk".
+
+-t <tag_subdir>::
+ Name the SVN subdirectory for tags. Default "tags".
+
+-b <branch_subdir>::
+ Name the SVN subdirectory for branches. Default "branches".
+
+-o <branch-for-HEAD>::
+ The 'trunk' branch from SVN is imported to the 'origin' branch within
+ the git repository. Use this option if you want to import into a
+ different branch.
+
+-r::
+ Prepend 'rX: ' to commit messages, where X is the imported
+ subversion revision.
+
+-u::
+ Replace underscores in tag names with periods.
+
+-I <ignorefile_name>::
+ Import the svn:ignore directory property to files with this
+ name in each directory. (The Subversion and GIT ignore
+ syntaxes are similar enough that using the Subversion patterns
+ directly with "-I .gitignore" will almost always just work.)
+
+-A <author_file>::
+ Read a file with lines on the form
++
+------
+ username = User's Full Name <email@addr.es>
+
+------
++
+and use "User's Full Name <email@addr.es>" as the GIT
+author and committer for Subversion commits made by
+"username". If encountering a commit made by a user not in the
+list, abort.
++
+For convenience, this data is saved to $GIT_DIR/svn-authors
+each time the -A option is provided, and read from that same
+file each time git-svnimport is run with an existing GIT
+repository without -A.
+
+-m::
+ Attempt to detect merges based on the commit message. This option
+ will enable default regexes that try to capture the name source
+ branch name from the commit message.
+
+-M <regex>::
+ Attempt to detect merges based on the commit message with a custom
+ regex. It can be used with -m to also see the default regexes.
+ You must escape forward slashes.
+
+-l <max_rev>::
+ Specify a maximum revision number to pull.
++
+Formerly, this option controlled how many revisions to pull,
+due to SVN memory leaks. (These have been worked around.)
+
+-R <repack_each_revs>::
+ Specify how often git repository should be repacked.
++
+The default value is 1000. git-svnimport will do imports in chunks of 1000
+revisions, after each chunk the git repository will be repacked. To disable
+this behavior specify some large value here which is greater than the number of
+revisions to import.
+
+-P <path_from_trunk>::
+ Partial import of the SVN tree.
++
+By default, the whole tree on the SVN trunk (/trunk) is imported.
+'-P my/proj' will import starting only from '/trunk/my/proj'.
+This option is useful when you want to import one project from a
+svn repo which hosts multiple projects under the same trunk.
+
+-v::
+ Verbosity: let 'svnimport' report what it is doing.
+
+-d::
+ Use direct HTTP requests if possible. The "<path>" argument is used
+ only for retrieving the SVN logs; the path to the contents is
+ included in the SVN log.
+
+-D::
+ Use direct HTTP requests if possible. The "<path>" argument is used
+ for retrieving the logs, as well as for the contents.
++
+There's no safe way to automatically find out which of these options to
+use, so you need to try both. Usually, the one that's wrong will die
+with a 40x error pretty quickly.
+
+<SVN_repository_URL>::
+ The URL of the SVN module you want to import. For local
+ repositories, use "file:///absolute/path".
++
+If you're using the "-d" or "-D" option, this is the URL of the SVN
+repository itself; it usually ends in "/svn".
+
+<path>::
+ The path to the module you want to check out.
+
+-h::
+ Print a short usage message and exit.
+
+OUTPUT
+------
+If '-v' is specified, the script reports what it is doing.
+
+Otherwise, success is indicated the Unix way, i.e. by simply exiting with
+a zero exit status.
+
+Author
+------
+Written by Matthias Urlichs <smurf@smurf.noris.de>, with help from
+various participants of the git-list <git@vger.kernel.org>.
+
+Based on a cvs2git script by the same author.
+
+Documentation
+--------------
+Documentation by Matthias Urlichs <smurf@smurf.noris.de>.
+
+GIT
+---
+Part of the gitlink:git[7] suite
diff --git a/contrib/examples/git-tag.sh b/contrib/examples/git-tag.sh
new file mode 100755
index 0000000..2c15bc9
--- /dev/null
+++ b/contrib/examples/git-tag.sh
@@ -0,0 +1,205 @@
+#!/bin/sh
+# Copyright (c) 2005 Linus Torvalds
+
+USAGE='[-n [<num>]] -l [<pattern>] | [-a | -s | -u <key-id>] [-f | -d | -v] [-m <msg>] <tagname> [<head>]'
+SUBDIRECTORY_OK='Yes'
+. git-sh-setup
+
+message_given=
+annotate=
+signed=
+force=
+message=
+username=
+list=
+verify=
+LINES=0
+while test $# != 0
+do
+ case "$1" in
+ -a)
+ annotate=1
+ shift
+ ;;
+ -s)
+ annotate=1
+ signed=1
+ shift
+ ;;
+ -f)
+ force=1
+ shift
+ ;;
+ -n)
+ case "$#,$2" in
+ 1,* | *,-*)
+ LINES=1 # no argument
+ ;;
+ *) shift
+ LINES=$(expr "$1" : '\([0-9]*\)')
+ [ -z "$LINES" ] && LINES=1 # 1 line is default when -n is used
+ ;;
+ esac
+ shift
+ ;;
+ -l)
+ list=1
+ shift
+ case $# in
+ 0) PATTERN=
+ ;;
+ *)
+ PATTERN="$1" # select tags by shell pattern, not re
+ shift
+ ;;
+ esac
+ git rev-parse --symbolic --tags | sort |
+ while read TAG
+ do
+ case "$TAG" in
+ *$PATTERN*) ;;
+ *) continue ;;
+ esac
+ [ "$LINES" -le 0 ] && { echo "$TAG"; continue ;}
+ OBJTYPE=$(git cat-file -t "$TAG")
+ case $OBJTYPE in
+ tag)
+ ANNOTATION=$(git cat-file tag "$TAG" |
+ sed -e '1,/^$/d' |
+ sed -n -e "
+ /^-----BEGIN PGP SIGNATURE-----\$/q
+ 2,\$s/^/ /
+ p
+ ${LINES}q
+ ")
+ printf "%-15s %s\n" "$TAG" "$ANNOTATION"
+ ;;
+ *) echo "$TAG"
+ ;;
+ esac
+ done
+ ;;
+ -m)
+ annotate=1
+ shift
+ message="$1"
+ if test "$#" = "0"; then
+ die "error: option -m needs an argument"
+ else
+ message="$1"
+ message_given=1
+ shift
+ fi
+ ;;
+ -F)
+ annotate=1
+ shift
+ if test "$#" = "0"; then
+ die "error: option -F needs an argument"
+ else
+ message="$(cat "$1")"
+ message_given=1
+ shift
+ fi
+ ;;
+ -u)
+ annotate=1
+ signed=1
+ shift
+ if test "$#" = "0"; then
+ die "error: option -u needs an argument"
+ else
+ username="$1"
+ shift
+ fi
+ ;;
+ -d)
+ shift
+ had_error=0
+ for tag
+ do
+ cur=$(git show-ref --verify --hash -- "refs/tags/$tag") || {
+ echo >&2 "Seriously, what tag are you talking about?"
+ had_error=1
+ continue
+ }
+ git update-ref -m 'tag: delete' -d "refs/tags/$tag" "$cur" || {
+ had_error=1
+ continue
+ }
+ echo "Deleted tag $tag."
+ done
+ exit $had_error
+ ;;
+ -v)
+ shift
+ tag_name="$1"
+ tag=$(git show-ref --verify --hash -- "refs/tags/$tag_name") ||
+ die "Seriously, what tag are you talking about?"
+ git-verify-tag -v "$tag"
+ exit $?
+ ;;
+ -*)
+ usage
+ ;;
+ *)
+ break
+ ;;
+ esac
+done
+
+[ -n "$list" ] && exit 0
+
+name="$1"
+[ "$name" ] || usage
+prev=0000000000000000000000000000000000000000
+if git show-ref --verify --quiet -- "refs/tags/$name"
+then
+ test -n "$force" || die "tag '$name' already exists"
+ prev=`git rev-parse "refs/tags/$name"`
+fi
+shift
+git check-ref-format "tags/$name" ||
+ die "we do not like '$name' as a tag name."
+
+object=$(git rev-parse --verify --default HEAD "$@") || exit 1
+type=$(git cat-file -t $object) || exit 1
+tagger=$(git var GIT_COMMITTER_IDENT) || exit 1
+
+test -n "$username" ||
+ username=$(git config user.signingkey) ||
+ username=$(expr "z$tagger" : 'z\(.*>\)')
+
+trap 'rm -f "$GIT_DIR"/TAG_TMP* "$GIT_DIR"/TAG_FINALMSG "$GIT_DIR"/TAG_EDITMSG' 0
+
+if [ "$annotate" ]; then
+ if [ -z "$message_given" ]; then
+ ( echo "#"
+ echo "# Write a tag message"
+ echo "#" ) > "$GIT_DIR"/TAG_EDITMSG
+ git_editor "$GIT_DIR"/TAG_EDITMSG || exit
+ else
+ printf '%s\n' "$message" >"$GIT_DIR"/TAG_EDITMSG
+ fi
+
+ grep -v '^#' <"$GIT_DIR"/TAG_EDITMSG |
+ git stripspace >"$GIT_DIR"/TAG_FINALMSG
+
+ [ -s "$GIT_DIR"/TAG_FINALMSG -o -n "$message_given" ] || {
+ echo >&2 "No tag message?"
+ exit 1
+ }
+
+ ( printf 'object %s\ntype %s\ntag %s\ntagger %s\n\n' \
+ "$object" "$type" "$name" "$tagger";
+ cat "$GIT_DIR"/TAG_FINALMSG ) >"$GIT_DIR"/TAG_TMP
+ rm -f "$GIT_DIR"/TAG_TMP.asc "$GIT_DIR"/TAG_FINALMSG
+ if [ "$signed" ]; then
+ gpg -bsa -u "$username" "$GIT_DIR"/TAG_TMP &&
+ cat "$GIT_DIR"/TAG_TMP.asc >>"$GIT_DIR"/TAG_TMP ||
+ die "failed to sign the tag with GPG."
+ fi
+ object=$(git-mktag < "$GIT_DIR"/TAG_TMP)
+fi
+
+git update-ref "refs/tags/$name" "$object" "$prev"
diff --git a/contrib/examples/git-verify-tag.sh b/contrib/examples/git-verify-tag.sh
new file mode 100755
index 0000000..0902a5c
--- /dev/null
+++ b/contrib/examples/git-verify-tag.sh
@@ -0,0 +1,45 @@
+#!/bin/sh
+
+USAGE='<tag>'
+SUBDIRECTORY_OK='Yes'
+. git-sh-setup
+
+verbose=
+while test $# != 0
+do
+ case "$1" in
+ -v|--v|--ve|--ver|--verb|--verbo|--verbos|--verbose)
+ verbose=t ;;
+ *)
+ break ;;
+ esac
+ shift
+done
+
+if [ "$#" != "1" ]
+then
+ usage
+fi
+
+type="$(git cat-file -t "$1" 2>/dev/null)" ||
+ die "$1: no such object."
+
+test "$type" = tag ||
+ die "$1: cannot verify a non-tag object of type $type."
+
+case "$verbose" in
+t)
+ git cat-file -p "$1" |
+ sed -n -e '/^-----BEGIN PGP SIGNATURE-----/q' -e p
+ ;;
+esac
+
+trap 'rm -f "$GIT_DIR/.tmp-vtag"' 0
+
+git cat-file tag "$1" >"$GIT_DIR/.tmp-vtag" || exit 1
+sed -n -e '
+ /^-----BEGIN PGP SIGNATURE-----$/q
+ p
+' <"$GIT_DIR/.tmp-vtag" |
+gpg --verify "$GIT_DIR/.tmp-vtag" - || exit 1
+rm -f "$GIT_DIR/.tmp-vtag"
diff --git a/contrib/fast-import/git-import.perl b/contrib/fast-import/git-import.perl
new file mode 100755
index 0000000..0891b9e
--- /dev/null
+++ b/contrib/fast-import/git-import.perl
@@ -0,0 +1,64 @@
+#!/usr/bin/perl
+#
+# Performs an initial import of a directory. This is the equivalent
+# of doing 'git init; git add .; git commit'. It's a little slower,
+# but is meant to be a simple fast-import example.
+
+use strict;
+use File::Find;
+
+my $USAGE = 'usage: git-import branch import-message';
+my $branch = shift or die "$USAGE\n";
+my $message = shift or die "$USAGE\n";
+
+chomp(my $username = `git config user.name`);
+chomp(my $email = `git config user.email`);
+die 'You need to set user name and email'
+ unless $username && $email;
+
+system('git init');
+open(my $fi, '|-', qw(git fast-import --date-format=now))
+ or die "unable to spawn fast-import: $!";
+
+print $fi <<EOF;
+commit refs/heads/$branch
+committer $username <$email> now
+data <<MSGEOF
+$message
+MSGEOF
+
+EOF
+
+find(
+ sub {
+ if($File::Find::name eq './.git') {
+ $File::Find::prune = 1;
+ return;
+ }
+ return unless -f $_;
+
+ my $fn = $File::Find::name;
+ $fn =~ s#^.\/##;
+
+ open(my $in, '<', $_)
+ or die "unable to open $fn: $!";
+ my @st = stat($in)
+ or die "unable to stat $fn: $!";
+ my $len = $st[7];
+
+ print $fi "M 644 inline $fn\n";
+ print $fi "data $len\n";
+ while($len > 0) {
+ my $r = read($in, my $buf, $len < 4096 ? $len : 4096);
+ defined($r) or die "read error from $fn: $!";
+ $r > 0 or die "premature EOF from $fn: $!";
+ print $fi $buf;
+ $len -= $r;
+ }
+ print $fi "\n";
+
+ }, '.'
+);
+
+close($fi);
+exit $?;
diff --git a/contrib/fast-import/git-import.sh b/contrib/fast-import/git-import.sh
new file mode 100755
index 0000000..f8d803c
--- /dev/null
+++ b/contrib/fast-import/git-import.sh
@@ -0,0 +1,38 @@
+#!/bin/sh
+#
+# Performs an initial import of a directory. This is the equivalent
+# of doing 'git init; git add .; git commit'. It's a lot slower,
+# but is meant to be a simple fast-import example.
+
+if [ -z "$1" -o -z "$2" ]; then
+ echo "usage: git-import branch import-message"
+ exit 1
+fi
+
+USERNAME="$(git config user.name)"
+EMAIL="$(git config user.email)"
+
+if [ -z "$USERNAME" -o -z "$EMAIL" ]; then
+ echo "You need to set user name and email"
+ exit 1
+fi
+
+git init
+
+(
+ cat <<EOF
+commit refs/heads/$1
+committer $USERNAME <$EMAIL> now
+data <<MSGEOF
+$2
+MSGEOF
+
+EOF
+ find * -type f|while read i;do
+ echo "M 100644 inline $i"
+ echo data $(stat -c '%s' "$i")
+ cat "$i"
+ echo
+ done
+ echo
+) | git fast-import --date-format=now
diff --git a/contrib/fast-import/git-p4.README b/contrib/fast-import/git-p4.README
new file mode 100644
index 0000000..cec5ecf
--- /dev/null
+++ b/contrib/fast-import/git-p4.README
@@ -0,0 +1,12 @@
+The git-p4 script moved to the top-level of the git source directory.
+
+Invoke it as any other git command, like "git p4 clone", for instance.
+
+Note that the top-level git-p4.py script is now the source. It is
+built using make to git-p4, which will be installed.
+
+Windows users can copy the git-p4.py source script directly, possibly
+invoking it through a batch file called "git-p4.bat" in the same folder.
+It should contain just one line:
+
+ @python "%~d0%~p0git-p4.py" %*
diff --git a/contrib/fast-import/import-directories.perl b/contrib/fast-import/import-directories.perl
new file mode 100755
index 0000000..7f3afa5
--- /dev/null
+++ b/contrib/fast-import/import-directories.perl
@@ -0,0 +1,417 @@
+#!/usr/bin/perl
+#
+# Copyright 2008-2009 Peter Krefting <peter@softwolves.pp.se>
+#
+# ------------------------------------------------------------------------
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+#
+# ------------------------------------------------------------------------
+
+=pod
+
+=head1 NAME
+
+import-directories - Import bits and pieces to Git.
+
+=head1 SYNOPSIS
+
+B<import-directories.perl> F<configfile> F<outputfile>
+
+=head1 DESCRIPTION
+
+Script to import arbitrary projects version controlled by the "copy the
+source directory to a new location and edit it there"-version controlled
+projects into version control. Handles projects with arbitrary branching
+and version trees, taking a file describing the inputs and generating a
+file compatible with the L<git-fast-import(1)> format.
+
+=head1 CONFIGURATION FILE
+
+=head2 Format
+
+The configuration file is based on the standard I<.ini> format.
+
+ ; Comments start with semi-colons
+ [section]
+ key=value
+
+Please see below for information on how to escape special characters.
+
+=head2 Global configuration
+
+Global configuration is done in the B<[config]> section, which should be
+the first section in the file. Configuration can be changed by
+repeating configuration sections later on.
+
+ [config]
+ ; configure conversion of CRLFs. "convert" means that all CRLFs
+ ; should be converted into LFs (suitable for the core.autocrlf
+ ; setting set to true in Git). "none" means that all data is
+ ; treated as binary.
+ crlf=convert
+
+=head2 Revision configuration
+
+Each revision that is to be imported is described in three
+sections. Revisions should be defined in topological order, so
+that a revision's parent has always been defined when a new revision
+is introduced. All the sections for one revision must be defined
+before defining the next revision.
+
+Each revision is assigned a unique numerical identifier. The
+numbers do not need to be consecutive, nor monotonically
+increasing.
+
+For instance, if your configuration file contains only the two
+revisions 4711 and 42, where 4711 is the initial commit, the
+only requirement is that 4711 is completely defined before 42.
+
+=pod
+
+=head3 Revision description section
+
+A section whose section name is just an integer gives meta-data
+about the revision.
+
+ [3]
+ ; author sets the author of the revisions
+ author=Peter Krefting <peter@softwolves.pp.se>
+ ; branch sets the branch that the revision should be committed to
+ branch=master
+ ; parent describes the revision that is the parent of this commit
+ ; (optional)
+ parent=1
+ ; merges describes a revision that is merged into this commit
+ ; (optional; can be repeated)
+ merges=2
+ ; selects one file to take the timestamp from
+ ; (optional; if unspecified, the most recent file from the .files
+ ; section is used)
+ timestamp=3/source.c
+
+=head3 Revision contents section
+
+A section whose section name is an integer followed by B<.files>
+describe all the files included in this revision. If a file that
+was available previously is not included in this revision, it will
+be removed.
+
+If an on-disk revision is incomplete, you can point to files from
+a previous revision. There are no restriction as to where the source
+files are located, nor to the names of them.
+
+ [3.files]
+ ; the key is the path inside the repository, the value is the path
+ ; as seen from the importer script.
+ source.c=ver-3.00/source.c
+ source.h=ver-2.99/source.h
+ readme.txt=ver-3.00/introduction to the project.txt
+
+File names are treated as byte strings (but please see below on
+quoting rules), and should be stored in the configuration file in
+the encoding that should be used in the generated repository.
+
+=head3 Revision commit message section
+
+A section whose section name is an integer followed by B<.message>
+gives the commit message. This section is read verbatim, up until
+the beginning of the next section. As such, a commit message may not
+contain a line that begins with an opening square bracket ("[") and
+ends with a closing square bracket ("]"), unless they are surrounded
+by whitespace or other characters.
+
+ [3.message]
+ Implement foobar.
+ ; trailing blank lines are ignored.
+
+=cut
+
+# Globals
+use strict;
+use warnings;
+use integer;
+my $crlfmode = 0;
+my @revs;
+my (%revmap, %message, %files, %author, %branch, %parent, %merges, %time, %timesource);
+my $sectiontype = 0;
+my $rev = 0;
+my $mark = 1;
+
+# Check command line
+if ($#ARGV < 1 || $ARGV[0] =~ /^--?h/)
+{
+ exec('perldoc', $0);
+ exit 1;
+}
+
+# Open configuration
+my $config = $ARGV[0];
+open CFG, '<', $config or die "Cannot open configuration file \"$config\": ";
+
+# Open output
+my $output = $ARGV[1];
+open OUT, '>', $output or die "Cannot create output file \"$output\": ";
+binmode OUT;
+
+LINE: while (my $line = <CFG>)
+{
+ $line =~ s/\r?\n$//;
+ next LINE if $sectiontype != 4 && $line eq '';
+ next LINE if $line =~ /^;/;
+ my $oldsectiontype = $sectiontype;
+ my $oldrev = $rev;
+
+ # Sections
+ if ($line =~ m"^\[(config|(\d+)(|\.files|\.message))\]$")
+ {
+ if ($1 eq 'config')
+ {
+ $sectiontype = 1;
+ }
+ elsif ($3 eq '')
+ {
+ $sectiontype = 2;
+ $rev = $2;
+ # Create a new revision
+ die "Duplicate rev: $line\n " if defined $revmap{$rev};
+ print "Reading revision $rev\n";
+ push @revs, $rev;
+ $revmap{$rev} = $mark ++;
+ $time{$revmap{$rev}} = 0;
+ }
+ elsif ($3 eq '.files')
+ {
+ $sectiontype = 3;
+ $rev = $2;
+ die "Revision mismatch: $line\n " unless $rev == $oldrev;
+ }
+ elsif ($3 eq '.message')
+ {
+ $sectiontype = 4;
+ $rev = $2;
+ die "Revision mismatch: $line\n " unless $rev == $oldrev;
+ }
+ else
+ {
+ die "Internal parse error: $line\n ";
+ }
+ next LINE;
+ }
+
+ # Parse data
+ if ($sectiontype != 4)
+ {
+ # Key and value
+ if ($line =~ m"^\s*([^\s].*=.*[^\s])\s*$")
+ {
+ my ($key, $value) = &parsekeyvaluepair($1);
+ # Global configuration
+ if (1 == $sectiontype)
+ {
+ if ($key eq 'crlf')
+ {
+ $crlfmode = 1, next LINE if $value eq 'convert';
+ $crlfmode = 0, next LINE if $value eq 'none';
+ }
+ die "Unknown configuration option: $line\n ";
+ }
+ # Revision specification
+ if (2 == $sectiontype)
+ {
+ my $current = $revmap{$rev};
+ $author{$current} = $value, next LINE if $key eq 'author';
+ $branch{$current} = $value, next LINE if $key eq 'branch';
+ $parent{$current} = $value, next LINE if $key eq 'parent';
+ $timesource{$current} = $value, next LINE if $key eq 'timestamp';
+ push(@{$merges{$current}}, $value), next LINE if $key eq 'merges';
+ die "Unknown revision option: $line\n ";
+ }
+ # Filespecs
+ if (3 == $sectiontype)
+ {
+ # Add the file and create a marker
+ die "File not found: $line\n " unless -f $value;
+ my $current = $revmap{$rev};
+ ${$files{$current}}{$key} = $mark;
+ my $time = &fileblob($value, $crlfmode, $mark ++);
+
+ # Update revision timestamp if more recent than other
+ # files seen, or if this is the file we have selected
+ # to take the time stamp from using the "timestamp"
+ # directive.
+ if ((defined $timesource{$current} && $timesource{$current} eq $value)
+ || $time > $time{$current})
+ {
+ $time{$current} = $time;
+ }
+ }
+ }
+ else
+ {
+ die "Parse error: $line\n ";
+ }
+ }
+ else
+ {
+ # Commit message
+ my $current = $revmap{$rev};
+ if (defined $message{$current})
+ {
+ $message{$current} .= "\n";
+ }
+ $message{$current} .= $line;
+ }
+}
+close CFG;
+
+# Start spewing out data for git-fast-import
+foreach my $commit (@revs)
+{
+ # Progress
+ print OUT "progress Creating revision $commit\n";
+
+ # Create commit header
+ my $mark = $revmap{$commit};
+
+ # Branch and commit id
+ print OUT "commit refs/heads/", $branch{$mark}, "\nmark :", $mark, "\n";
+
+ # Author and timestamp
+ die "No timestamp defined for $commit (no files?)\n" unless defined $time{$mark};
+ print OUT "committer ", $author{$mark}, " ", $time{$mark}, " +0100\n";
+
+ # Commit message
+ die "No message defined for $commit\n" unless defined $message{$mark};
+ my $message = $message{$mark};
+ $message =~ s/\n$//; # Kill trailing empty line
+ print OUT "data ", length($message), "\n", $message, "\n";
+
+ # Parent and any merges
+ print OUT "from :", $revmap{$parent{$mark}}, "\n" if defined $parent{$mark};
+ if (defined $merges{$mark})
+ {
+ foreach my $merge (@{$merges{$mark}})
+ {
+ print OUT "merge :", $revmap{$merge}, "\n";
+ }
+ }
+
+ # Output file marks
+ print OUT "deleteall\n"; # start from scratch
+ foreach my $file (sort keys %{$files{$mark}})
+ {
+ print OUT "M 644 :", ${$files{$mark}}{$file}, " $file\n";
+ }
+ print OUT "\n";
+}
+
+# Create one file blob
+sub fileblob
+{
+ my ($filename, $crlfmode, $mark) = @_;
+
+ # Import the file
+ print OUT "progress Importing $filename\nblob\nmark :$mark\n";
+ open FILE, '<', $filename or die "Cannot read $filename\n ";
+ binmode FILE;
+ my ($size, $mtime) = (stat(FILE))[7,9];
+ my $file;
+ read FILE, $file, $size;
+ close FILE;
+ $file =~ s/\r\n/\n/g if $crlfmode;
+ print OUT "data ", length($file), "\n", $file, "\n";
+
+ return $mtime;
+}
+
+# Parse a key=value pair
+sub parsekeyvaluepair
+{
+=pod
+
+=head2 Escaping special characters
+
+Key and value strings may be enclosed in quotes, in which case
+whitespace inside the quotes is preserved. Additionally, an equal
+sign may be included in the key by preceding it with a backslash.
+For example:
+
+ "key1 "=value1
+ key2=" value2"
+ key\=3=value3
+ key4=value=4
+ "key5""=value5
+
+Here the first key is "key1 " (note the trailing white-space) and the
+second value is " value2" (note the leading white-space). The third
+key contains an equal sign "key=3" and so does the fourth value, which
+does not need to be escaped. The fifth key contains a trailing quote,
+which does not need to be escaped since it is inside a surrounding
+quote.
+
+=cut
+ my $pair = shift;
+
+ # Separate key and value by the first non-quoted equal sign
+ my ($key, $value);
+ if ($pair =~ /^(.*[^\\])=(.*)$/)
+ {
+ ($key, $value) = ($1, $2)
+ }
+ else
+ {
+ die "Parse error: $pair\n ";
+ }
+
+ # Unquote and unescape the key and value separately
+ return (&unescape($key), &unescape($value));
+}
+
+# Unquote and unescape
+sub unescape
+{
+ my $string = shift;
+
+ # First remove enclosing quotes. Backslash before the trailing
+ # quote leaves both.
+ if ($string =~ /^"(.*[^\\])"$/)
+ {
+ $string = $1;
+ }
+
+ # Second remove any backslashes inside the unquoted string.
+ # For later: Handle special sequences like \t ?
+ $string =~ s/\\(.)/$1/g;
+
+ return $string;
+}
+
+__END__
+
+=pod
+
+=head1 EXAMPLES
+
+B<import-directories.perl> F<project.import>
+
+=head1 AUTHOR
+
+Copyright 2008-2009 Peter Krefting E<lt>peter@softwolves.pp.se>
+
+This program is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation.
+
+=cut
diff --git a/contrib/fast-import/import-tars.perl b/contrib/fast-import/import-tars.perl
new file mode 100755
index 0000000..95438e1
--- /dev/null
+++ b/contrib/fast-import/import-tars.perl
@@ -0,0 +1,189 @@
+#!/usr/bin/perl
+
+## tar archive frontend for git-fast-import
+##
+## For example:
+##
+## mkdir project; cd project; git init
+## perl import-tars.perl *.tar.bz2
+## git whatchanged import-tars
+##
+## Use --metainfo to specify the extension for a meta data file, where
+## import-tars can read the commit message and optionally author and
+## committer information.
+##
+## echo 'This is the commit message' > myfile.tar.bz2.msg
+## perl import-tars.perl --metainfo=msg myfile.tar.bz2
+
+use strict;
+use Getopt::Long;
+
+my $metaext = '';
+
+die "usage: import-tars [--metainfo=extension] *.tar.{gz,bz2,lzma,xz,Z}\n"
+ unless GetOptions('metainfo=s' => \$metaext) && @ARGV;
+
+my $branch_name = 'import-tars';
+my $branch_ref = "refs/heads/$branch_name";
+my $author_name = $ENV{'GIT_AUTHOR_NAME'} || 'T Ar Creator';
+my $author_email = $ENV{'GIT_AUTHOR_EMAIL'} || 'tar@example.com';
+my $committer_name = $ENV{'GIT_COMMITTER_NAME'} || `git config --get user.name`;
+my $committer_email = $ENV{'GIT_COMMITTER_EMAIL'} || `git config --get user.email`;
+
+chomp($committer_name, $committer_email);
+
+open(FI, '|-', 'git', 'fast-import', '--quiet')
+ or die "Unable to start git fast-import: $!\n";
+foreach my $tar_file (@ARGV)
+{
+ my $commit_time = time;
+ $tar_file =~ m,([^/]+)$,;
+ my $tar_name = $1;
+
+ if ($tar_name =~ s/\.(tar\.gz|tgz)$//) {
+ open(I, '-|', 'gunzip', '-c', $tar_file)
+ or die "Unable to gunzip -c $tar_file: $!\n";
+ } elsif ($tar_name =~ s/\.(tar\.bz2|tbz2)$//) {
+ open(I, '-|', 'bunzip2', '-c', $tar_file)
+ or die "Unable to bunzip2 -c $tar_file: $!\n";
+ } elsif ($tar_name =~ s/\.tar\.Z$//) {
+ open(I, '-|', 'uncompress', '-c', $tar_file)
+ or die "Unable to uncompress -c $tar_file: $!\n";
+ } elsif ($tar_name =~ s/\.(tar\.(lzma|xz)|(tlz|txz))$//) {
+ open(I, '-|', 'xz', '-dc', $tar_file)
+ or die "Unable to xz -dc $tar_file: $!\n";
+ } elsif ($tar_name =~ s/\.tar$//) {
+ open(I, $tar_file) or die "Unable to open $tar_file: $!\n";
+ } else {
+ die "Unrecognized compression format: $tar_file\n";
+ }
+
+ my $author_time = 0;
+ my $next_mark = 1;
+ my $have_top_dir = 1;
+ my ($top_dir, %files);
+
+ while (read(I, $_, 512) == 512) {
+ my ($name, $mode, $uid, $gid, $size, $mtime,
+ $chksum, $typeflag, $linkname, $magic,
+ $version, $uname, $gname, $devmajor, $devminor,
+ $prefix) = unpack 'Z100 Z8 Z8 Z8 Z12 Z12
+ Z8 Z1 Z100 Z6
+ Z2 Z32 Z32 Z8 Z8 Z*', $_;
+ last unless length($name);
+ if ($name eq '././@LongLink') {
+ # GNU tar extension
+ if (read(I, $_, 512) != 512) {
+ die ('Short archive');
+ }
+ $name = unpack 'Z257', $_;
+ next unless $name;
+
+ my $dummy;
+ if (read(I, $_, 512) != 512) {
+ die ('Short archive');
+ }
+ ($dummy, $mode, $uid, $gid, $size, $mtime,
+ $chksum, $typeflag, $linkname, $magic,
+ $version, $uname, $gname, $devmajor, $devminor,
+ $prefix) = unpack 'Z100 Z8 Z8 Z8 Z12 Z12
+ Z8 Z1 Z100 Z6
+ Z2 Z32 Z32 Z8 Z8 Z*', $_;
+ }
+ next if $name =~ m{/\z};
+ $mode = oct $mode;
+ $size = oct $size;
+ $mtime = oct $mtime;
+ next if $typeflag == 5; # directory
+
+ print FI "blob\n", "mark :$next_mark\n";
+ if ($typeflag == 2) { # symbolic link
+ print FI "data ", length($linkname), "\n", $linkname;
+ $mode = 0120000;
+ } else {
+ print FI "data $size\n";
+ while ($size > 0 && read(I, $_, 512) == 512) {
+ print FI substr($_, 0, $size);
+ $size -= 512;
+ }
+ }
+ print FI "\n";
+
+ my $path;
+ if ($prefix) {
+ $path = "$prefix/$name";
+ } else {
+ $path = "$name";
+ }
+ $files{$path} = [$next_mark++, $mode];
+
+ $author_time = $mtime if $mtime > $author_time;
+ $path =~ m,^([^/]+)/,;
+ $top_dir = $1 unless $top_dir;
+ $have_top_dir = 0 if $top_dir ne $1;
+ }
+
+ my $commit_msg = "Imported from $tar_file.";
+ my $this_committer_name = $committer_name;
+ my $this_committer_email = $committer_email;
+ my $this_author_name = $author_name;
+ my $this_author_email = $author_email;
+ if ($metaext ne '') {
+ # Optionally read a commit message from <filename.tar>.msg
+ # Add a line on the form "Committer: name <e-mail>" to override
+ # the committer and "Author: name <e-mail>" to override the
+ # author for this tar ball.
+ if (open MSG, '<', "${tar_file}.${metaext}") {
+ my $header_done = 0;
+ $commit_msg = '';
+ while (<MSG>) {
+ if (!$header_done && /^Committer:\s+([^<>]*)\s+<(.*)>\s*$/i) {
+ $this_committer_name = $1;
+ $this_committer_email = $2;
+ } elsif (!$header_done && /^Author:\s+([^<>]*)\s+<(.*)>\s*$/i) {
+ $this_author_name = $1;
+ $this_author_email = $2;
+ } elsif (!$header_done && /^$/) { # empty line ends header.
+ $header_done = 1;
+ } else {
+ $commit_msg .= $_;
+ $header_done = 1;
+ }
+ }
+ close MSG;
+ }
+ }
+
+ print FI <<EOF;
+commit $branch_ref
+author $this_author_name <$this_author_email> $author_time +0000
+committer $this_committer_name <$this_committer_email> $commit_time +0000
+data <<END_OF_COMMIT_MESSAGE
+$commit_msg
+END_OF_COMMIT_MESSAGE
+
+deleteall
+EOF
+
+ foreach my $path (keys %files)
+ {
+ my ($mark, $mode) = @{$files{$path}};
+ $path =~ s,^([^/]+)/,, if $have_top_dir;
+ $mode = $mode & 0111 ? 0755 : 0644 unless $mode == 0120000;
+ printf FI "M %o :%i %s\n", $mode, $mark, $path;
+ }
+ print FI "\n";
+
+ print FI <<EOF;
+tag $tar_name
+from $branch_ref
+tagger $author_name <$author_email> $author_time +0000
+data <<END_OF_TAG_MESSAGE
+Package $tar_name
+END_OF_TAG_MESSAGE
+
+EOF
+
+ close I;
+}
+close FI;
diff --git a/contrib/fast-import/import-zips.py b/contrib/fast-import/import-zips.py
new file mode 100755
index 0000000..d12c296
--- /dev/null
+++ b/contrib/fast-import/import-zips.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+
+## zip archive frontend for git-fast-import
+##
+## For example:
+##
+## mkdir project; cd project; git init
+## python import-zips.py *.zip
+## git log --stat import-zips
+
+from os import popen, path
+from sys import argv, exit, hexversion, stderr
+from time import mktime
+from zipfile import ZipFile
+
+if hexversion < 0x01060000:
+ # The limiter is the zipfile module
+ stderr.write("import-zips.py: requires Python 1.6.0 or later.\n")
+ exit(1)
+
+if len(argv) < 2:
+ print 'usage:', argv[0], '<zipfile>...'
+ exit(1)
+
+branch_ref = 'refs/heads/import-zips'
+committer_name = 'Z Ip Creator'
+committer_email = 'zip@example.com'
+
+fast_import = popen('git fast-import --quiet', 'w')
+def printlines(list):
+ for str in list:
+ fast_import.write(str + "\n")
+
+for zipfile in argv[1:]:
+ commit_time = 0
+ next_mark = 1
+ common_prefix = None
+ mark = dict()
+
+ zip = ZipFile(zipfile, 'r')
+ for name in zip.namelist():
+ if name.endswith('/'):
+ continue
+ info = zip.getinfo(name)
+
+ if commit_time < info.date_time:
+ commit_time = info.date_time
+ if common_prefix == None:
+ common_prefix = name[:name.rfind('/') + 1]
+ else:
+ while not name.startswith(common_prefix):
+ last_slash = common_prefix[:-1].rfind('/') + 1
+ common_prefix = common_prefix[:last_slash]
+
+ mark[name] = ':' + str(next_mark)
+ next_mark += 1
+
+ printlines(('blob', 'mark ' + mark[name], \
+ 'data ' + str(info.file_size)))
+ fast_import.write(zip.read(name) + "\n")
+
+ committer = committer_name + ' <' + committer_email + '> %d +0000' % \
+ mktime(commit_time + (0, 0, 0))
+
+ printlines(('commit ' + branch_ref, 'committer ' + committer, \
+ 'data <<EOM', 'Imported from ' + zipfile + '.', 'EOM', \
+ '', 'deleteall'))
+
+ for name in mark.keys():
+ fast_import.write('M 100644 ' + mark[name] + ' ' +
+ name[len(common_prefix):] + "\n")
+
+ printlines(('', 'tag ' + path.basename(zipfile), \
+ 'from ' + branch_ref, 'tagger ' + committer, \
+ 'data <<EOM', 'Package ' + zipfile, 'EOM', ''))
+
+if fast_import.close():
+ exit(1)
diff --git a/contrib/git-jump/README b/contrib/git-jump/README
new file mode 100644
index 0000000..1cebc32
--- /dev/null
+++ b/contrib/git-jump/README
@@ -0,0 +1,92 @@
+git-jump
+========
+
+Git-jump is a script for helping you jump to "interesting" parts of your
+project in your editor. It works by outputting a set of interesting
+spots in the "quickfix" format, which editors like vim can use as a
+queue of places to visit (this feature is usually used to jump to errors
+produced by a compiler). For example, given a diff like this:
+
+------------------------------------
+diff --git a/foo.c b/foo.c
+index a655540..5a59044 100644
+--- a/foo.c
++++ b/foo.c
+@@ -1,3 +1,3 @@
+ int main(void) {
+- printf("hello word!\n");
++ printf("hello world!\n");
+ }
+-----------------------------------
+
+git-jump will feed this to the editor:
+
+-----------------------------------
+foo.c:2: printf("hello word!\n");
+-----------------------------------
+
+Obviously this trivial case isn't that interesting; you could just open
+`foo.c` yourself. But when you have many changes scattered across a
+project, you can use the editor's support to "jump" from point to point.
+
+Git-jump can generate three types of interesting lists:
+
+ 1. The beginning of any diff hunks.
+
+ 2. The beginning of any merge conflict markers.
+
+ 3. Any grep matches.
+
+
+Using git-jump
+--------------
+
+To use it, just drop git-jump in your PATH, and then invoke it like
+this:
+
+--------------------------------------------------
+# jump to changes not yet staged for commit
+git jump diff
+
+# jump to changes that are staged for commit; you can give
+# arbitrary diff options
+git jump diff --cached
+
+# jump to merge conflicts
+git jump merge
+
+# jump to all instances of foo_bar
+git jump grep foo_bar
+
+# same as above, but case-insensitive; you can give
+# arbitrary grep options
+git jump grep -i foo_bar
+--------------------------------------------------
+
+
+Related Programs
+----------------
+
+You can accomplish some of the same things with individual tools. For
+example, you can use `git mergetool` to start vimdiff on each unmerged
+file. `git jump merge` is for the vim-wielding luddite who just wants to
+jump straight to the conflict text with no fanfare.
+
+As of git v1.7.2, `git grep` knows the `--open-files-in-pager` option,
+which does something similar to `git jump grep`. However, it is limited
+to positioning the cursor to the correct line in only the first file,
+leaving you to locate subsequent hits in that file or other files using
+the editor or pager. By contrast, git-jump provides the editor with a
+complete list of files and line numbers for each match.
+
+
+Limitations
+-----------
+
+This scripts was written and tested with vim. Given that the quickfix
+format is the same as what gcc produces, I expect emacs users have a
+similar feature for iterating through the list, but I know nothing about
+how to activate it.
+
+The shell snippets to generate the quickfix lines will almost certainly
+choke on filenames with exotic characters (like newlines).
diff --git a/contrib/git-jump/git-jump b/contrib/git-jump/git-jump
new file mode 100755
index 0000000..dc90cd6
--- /dev/null
+++ b/contrib/git-jump/git-jump
@@ -0,0 +1,69 @@
+#!/bin/sh
+
+usage() {
+ cat <<\EOF
+usage: git jump <mode> [<args>]
+
+Jump to interesting elements in an editor.
+The <mode> parameter is one of:
+
+diff: elements are diff hunks. Arguments are given to diff.
+
+merge: elements are merge conflicts. Arguments are ignored.
+
+grep: elements are grep hits. Arguments are given to grep.
+EOF
+}
+
+open_editor() {
+ editor=`git var GIT_EDITOR`
+ eval "$editor -q \$1"
+}
+
+mode_diff() {
+ git diff --no-prefix --relative "$@" |
+ perl -ne '
+ if (m{^\+\+\+ (.*)}) { $file = $1; next }
+ defined($file) or next;
+ if (m/^@@ .*\+(\d+)/) { $line = $1; next }
+ defined($line) or next;
+ if (/^ /) { $line++; next }
+ if (/^[-+]\s*(.*)/) {
+ print "$file:$line: $1\n";
+ $line = undef;
+ }
+ '
+}
+
+mode_merge() {
+ git ls-files -u |
+ perl -pe 's/^.*?\t//' |
+ sort -u |
+ while IFS= read fn; do
+ grep -Hn '^<<<<<<<' "$fn"
+ done
+}
+
+# Grep -n generates nice quickfix-looking lines by itself,
+# but let's clean up extra whitespace, so they look better if the
+# editor shows them to us in the status bar.
+mode_grep() {
+ git grep -n "$@" |
+ perl -pe '
+ s/[ \t]+/ /g;
+ s/^ *//;
+ '
+}
+
+if test $# -lt 1; then
+ usage >&2
+ exit 1
+fi
+mode=$1; shift
+
+trap 'rm -f "$tmp"' 0 1 2 3 15
+tmp=`mktemp -t git-jump.XXXXXX` || exit 1
+type "mode_$mode" >/dev/null 2>&1 || { usage >&2; exit 1; }
+"mode_$mode" "$@" >"$tmp"
+test -s "$tmp" || exit 0
+open_editor "$tmp"
diff --git a/contrib/git-resurrect.sh b/contrib/git-resurrect.sh
new file mode 100755
index 0000000..a4ed4c3
--- /dev/null
+++ b/contrib/git-resurrect.sh
@@ -0,0 +1,181 @@
+#!/bin/sh
+
+USAGE="[-a] [-r] [-m] [-t] [-n] [-b <newname>] <name>"
+LONG_USAGE="git-resurrect attempts to find traces of a branch tip
+called <name>, and tries to resurrect it. Currently, the reflog is
+searched for checkout messages, and with -r also merge messages. With
+-m and -t, the history of all refs is scanned for Merge <name> into
+other/Merge <other> into <name> (respectively) commit subjects, which
+is rather slow but allows you to resurrect other people's topic
+branches."
+
+OPTIONS_KEEPDASHDASH=
+OPTIONS_SPEC="\
+git resurrect $USAGE
+--
+b,branch= save branch as <newname> instead of <name>
+a,all same as -l -r -m -t
+k,keep-going full rev-list scan (instead of first match)
+l,reflog scan reflog for checkouts (enabled by default)
+r,reflog-merges scan for merges recorded in reflog
+m,merges scan for merges into other branches (slow)
+t,merge-targets scan for merges of other branches into <name>
+n,dry-run don't recreate the branch"
+
+. git-sh-setup
+
+search_reflog () {
+ sed -ne 's~^\([^ ]*\) .*\tcheckout: moving from '"$1"' .*~\1~p' \
+ < "$GIT_DIR"/logs/HEAD
+}
+
+search_reflog_merges () {
+ git rev-parse $(
+ sed -ne 's~^[^ ]* \([^ ]*\) .*\tmerge '"$1"':.*~\1^2~p' \
+ < "$GIT_DIR"/logs/HEAD
+ )
+}
+
+_x40="[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]"
+_x40="$_x40$_x40$_x40$_x40$_x40$_x40$_x40$_x40"
+
+search_merges () {
+ git rev-list --all --grep="Merge branch '$1'" \
+ --pretty=tformat:"%P %s" |
+ sed -ne "/^$_x40 \($_x40\) Merge .*/ {s//\1/p;$early_exit}"
+}
+
+search_merge_targets () {
+ git rev-list --all --grep="Merge branch '[^']*' into $branch\$" \
+ --pretty=tformat:"%H %s" --all |
+ sed -ne "/^\($_x40\) Merge .*/ {s//\1/p;$early_exit} "
+}
+
+dry_run=
+early_exit=q
+scan_reflog=t
+scan_reflog_merges=
+scan_merges=
+scan_merge_targets=
+new_name=
+
+while test "$#" != 0; do
+ case "$1" in
+ -b|--branch)
+ shift
+ new_name="$1"
+ ;;
+ -n|--dry-run)
+ dry_run=t
+ ;;
+ --no-dry-run)
+ dry_run=
+ ;;
+ -k|--keep-going)
+ early_exit=
+ ;;
+ --no-keep-going)
+ early_exit=q
+ ;;
+ -m|--merges)
+ scan_merges=t
+ ;;
+ --no-merges)
+ scan_merges=
+ ;;
+ -l|--reflog)
+ scan_reflog=t
+ ;;
+ --no-reflog)
+ scan_reflog=
+ ;;
+ -r|--reflog_merges)
+ scan_reflog_merges=t
+ ;;
+ --no-reflog_merges)
+ scan_reflog_merges=
+ ;;
+ -t|--merge-targets)
+ scan_merge_targets=t
+ ;;
+ --no-merge-targets)
+ scan_merge_targets=
+ ;;
+ -a|--all)
+ scan_reflog=t
+ scan_reflog_merges=t
+ scan_merges=t
+ scan_merge_targets=t
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ usage
+ ;;
+ esac
+ shift
+done
+
+test "$#" = 1 || usage
+
+all_strategies="$scan_reflog$scan_reflog_merges$scan_merges$scan_merge_targets"
+if test -z "$all_strategies"; then
+ die "must enable at least one of -lrmt"
+fi
+
+branch="$1"
+test -z "$new_name" && new_name="$branch"
+
+if test ! -z "$scan_reflog"; then
+ if test -r "$GIT_DIR"/logs/HEAD; then
+ candidates="$(search_reflog $branch)"
+ else
+ die 'reflog scanning requested, but' \
+ '$GIT_DIR/logs/HEAD not readable'
+ fi
+fi
+if test ! -z "$scan_reflog_merges"; then
+ if test -r "$GIT_DIR"/logs/HEAD; then
+ candidates="$candidates $(search_reflog_merges $branch)"
+ else
+ die 'reflog scanning requested, but' \
+ '$GIT_DIR/logs/HEAD not readable'
+ fi
+fi
+if test ! -z "$scan_merges"; then
+ candidates="$candidates $(search_merges $branch)"
+fi
+if test ! -z "$scan_merge_targets"; then
+ candidates="$candidates $(search_merge_targets $branch)"
+fi
+
+candidates="$(git rev-parse $candidates | sort -u)"
+
+if test -z "$candidates"; then
+ hint=
+ test "z$all_strategies" != "ztttt" \
+ && hint=" (maybe try again with -a)"
+ die "no candidates for $branch found$hint"
+fi
+
+echo "** Candidates for $branch **"
+for cmt in $candidates; do
+ git --no-pager log --pretty=tformat:"%ct:%h [%cr] %s" --abbrev-commit -1 $cmt
+done \
+| sort -n | cut -d: -f2-
+
+newest="$(git rev-list -1 $candidates)"
+if test ! -z "$dry_run"; then
+ printf "** Most recent: "
+ git --no-pager log -1 --pretty=tformat:"%h %s" $newest
+elif ! git rev-parse --verify --quiet $new_name >/dev/null; then
+ printf "** Restoring $new_name to "
+ git --no-pager log -1 --pretty=tformat:"%h %s" $newest
+ git branch $new_name $newest
+else
+ printf "Most recent: "
+ git --no-pager log -1 --pretty=tformat:"%h %s" $newest
+ echo "** $new_name already exists, doing nothing"
+fi
diff --git a/contrib/git-shell-commands/README b/contrib/git-shell-commands/README
new file mode 100644
index 0000000..438463b
--- /dev/null
+++ b/contrib/git-shell-commands/README
@@ -0,0 +1,18 @@
+Sample programs callable through git-shell. Place a directory named
+'git-shell-commands' in the home directory of a user whose shell is
+git-shell. Then anyone logging in as that user will be able to run
+executables in the 'git-shell-commands' directory.
+
+Provided commands:
+
+help: Prints out the names of available commands. When run
+interactively, git-shell will automatically run 'help' on startup,
+provided it exists.
+
+list: Displays any bare repository whose name ends with ".git" under
+user's home directory. No other git repositories are visible,
+although they might be clonable through git-shell. 'list' is designed
+to minimize the number of calls to git that must be made in finding
+available repositories; if your setup has additional repositories that
+should be user-discoverable, you may wish to modify 'list'
+accordingly.
diff --git a/contrib/git-shell-commands/help b/contrib/git-shell-commands/help
new file mode 100755
index 0000000..535770c
--- /dev/null
+++ b/contrib/git-shell-commands/help
@@ -0,0 +1,18 @@
+#!/bin/sh
+
+if tty -s
+then
+ echo "Run 'help' for help, or 'exit' to leave. Available commands:"
+else
+ echo "Run 'help' for help. Available commands:"
+fi
+
+cd "$(dirname "$0")"
+
+for cmd in *
+do
+ case "$cmd" in
+ help) ;;
+ *) [ -f "$cmd" ] && [ -x "$cmd" ] && echo "$cmd" ;;
+ esac
+done
diff --git a/contrib/git-shell-commands/list b/contrib/git-shell-commands/list
new file mode 100755
index 0000000..6f89938
--- /dev/null
+++ b/contrib/git-shell-commands/list
@@ -0,0 +1,10 @@
+#!/bin/sh
+
+print_if_bare_repo='
+ if "$(git --git-dir="$1" rev-parse --is-bare-repository)" = true
+ then
+ printf "%s\n" "${1#./}"
+ fi
+'
+
+find -type d -name "*.git" -exec sh -c "$print_if_bare_repo" -- \{} \; -prune 2>/dev/null
diff --git a/contrib/gitview/gitview b/contrib/gitview/gitview
new file mode 100755
index 0000000..4c99dfb
--- /dev/null
+++ b/contrib/gitview/gitview
@@ -0,0 +1,1305 @@
+#! /usr/bin/env python
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+""" gitview
+GUI browser for git repository
+This program is based on bzrk by Scott James Remnant <scott@ubuntu.com>
+"""
+__copyright__ = "Copyright (C) 2006 Hewlett-Packard Development Company, L.P."
+__copyright__ = "Copyright (C) 2007 Aneesh Kumar K.V <aneesh.kumar@gmail.com"
+__author__ = "Aneesh Kumar K.V <aneesh.kumar@gmail.com>"
+
+
+import sys
+import os
+import gtk
+import pygtk
+import pango
+import re
+import time
+import gobject
+import cairo
+import math
+import string
+import fcntl
+
+have_gtksourceview2 = False
+have_gtksourceview = False
+try:
+ import gtksourceview2
+ have_gtksourceview2 = True
+except ImportError:
+ try:
+ import gtksourceview
+ have_gtksourceview = True
+ except ImportError:
+ print "Running without gtksourceview2 or gtksourceview module"
+
+re_ident = re.compile('(author|committer) (?P<ident>.*) (?P<epoch>\d+) (?P<tz>[+-]\d{4})')
+
+def list_to_string(args, skip):
+ count = len(args)
+ i = skip
+ str_arg=" "
+ while (i < count ):
+ str_arg = str_arg + args[i]
+ str_arg = str_arg + " "
+ i = i+1
+
+ return str_arg
+
+def show_date(epoch, tz):
+ secs = float(epoch)
+ tzsecs = float(tz[1:3]) * 3600
+ tzsecs += float(tz[3:5]) * 60
+ if (tz[0] == "+"):
+ secs += tzsecs
+ else:
+ secs -= tzsecs
+
+ return time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(secs))
+
+def get_source_buffer_and_view():
+ if have_gtksourceview2:
+ buffer = gtksourceview2.Buffer()
+ slm = gtksourceview2.LanguageManager()
+ gsl = slm.get_language("diff")
+ buffer.set_highlight_syntax(True)
+ buffer.set_language(gsl)
+ view = gtksourceview2.View(buffer)
+ elif have_gtksourceview:
+ buffer = gtksourceview.SourceBuffer()
+ slm = gtksourceview.SourceLanguagesManager()
+ gsl = slm.get_language_from_mime_type("text/x-patch")
+ buffer.set_highlight(True)
+ buffer.set_language(gsl)
+ view = gtksourceview.SourceView(buffer)
+ else:
+ buffer = gtk.TextBuffer()
+ view = gtk.TextView(buffer)
+ return (buffer, view)
+
+
+class CellRendererGraph(gtk.GenericCellRenderer):
+ """Cell renderer for directed graph.
+
+ This module contains the implementation of a custom GtkCellRenderer that
+ draws part of the directed graph based on the lines suggested by the code
+ in graph.py.
+
+ Because we're shiny, we use Cairo to do this, and because we're naughty
+ we cheat and draw over the bits of the TreeViewColumn that are supposed to
+ just be for the background.
+
+ Properties:
+ node (column, colour, [ names ]) tuple to draw revision node,
+ in_lines (start, end, colour) tuple list to draw inward lines,
+ out_lines (start, end, colour) tuple list to draw outward lines.
+ """
+
+ __gproperties__ = {
+ "node": ( gobject.TYPE_PYOBJECT, "node",
+ "revision node instruction",
+ gobject.PARAM_WRITABLE
+ ),
+ "in-lines": ( gobject.TYPE_PYOBJECT, "in-lines",
+ "instructions to draw lines into the cell",
+ gobject.PARAM_WRITABLE
+ ),
+ "out-lines": ( gobject.TYPE_PYOBJECT, "out-lines",
+ "instructions to draw lines out of the cell",
+ gobject.PARAM_WRITABLE
+ ),
+ }
+
+ def do_set_property(self, property, value):
+ """Set properties from GObject properties."""
+ if property.name == "node":
+ self.node = value
+ elif property.name == "in-lines":
+ self.in_lines = value
+ elif property.name == "out-lines":
+ self.out_lines = value
+ else:
+ raise AttributeError, "no such property: '%s'" % property.name
+
+ def box_size(self, widget):
+ """Calculate box size based on widget's font.
+
+ Cache this as it's probably expensive to get. It ensures that we
+ draw the graph at least as large as the text.
+ """
+ try:
+ return self._box_size
+ except AttributeError:
+ pango_ctx = widget.get_pango_context()
+ font_desc = widget.get_style().font_desc
+ metrics = pango_ctx.get_metrics(font_desc)
+
+ ascent = pango.PIXELS(metrics.get_ascent())
+ descent = pango.PIXELS(metrics.get_descent())
+
+ self._box_size = ascent + descent + 6
+ return self._box_size
+
+ def set_colour(self, ctx, colour, bg, fg):
+ """Set the context source colour.
+
+ Picks a distinct colour based on an internal wheel; the bg
+ parameter provides the value that should be assigned to the 'zero'
+ colours and the fg parameter provides the multiplier that should be
+ applied to the foreground colours.
+ """
+ colours = [
+ ( 1.0, 0.0, 0.0 ),
+ ( 1.0, 1.0, 0.0 ),
+ ( 0.0, 1.0, 0.0 ),
+ ( 0.0, 1.0, 1.0 ),
+ ( 0.0, 0.0, 1.0 ),
+ ( 1.0, 0.0, 1.0 ),
+ ]
+
+ colour %= len(colours)
+ red = (colours[colour][0] * fg) or bg
+ green = (colours[colour][1] * fg) or bg
+ blue = (colours[colour][2] * fg) or bg
+
+ ctx.set_source_rgb(red, green, blue)
+
+ def on_get_size(self, widget, cell_area):
+ """Return the size we need for this cell.
+
+ Each cell is drawn individually and is only as wide as it needs
+ to be, we let the TreeViewColumn take care of making them all
+ line up.
+ """
+ box_size = self.box_size(widget)
+
+ cols = self.node[0]
+ for start, end, colour in self.in_lines + self.out_lines:
+ cols = int(max(cols, start, end))
+
+ (column, colour, names) = self.node
+ names_len = 0
+ if (len(names) != 0):
+ for item in names:
+ names_len += len(item)
+
+ width = box_size * (cols + 1 ) + names_len
+ height = box_size
+
+ # FIXME I have no idea how to use cell_area properly
+ return (0, 0, width, height)
+
+ def on_render(self, window, widget, bg_area, cell_area, exp_area, flags):
+ """Render an individual cell.
+
+ Draws the cell contents using cairo, taking care to clip what we
+ do to within the background area so we don't draw over other cells.
+ Note that we're a bit naughty there and should really be drawing
+ in the cell_area (or even the exposed area), but we explicitly don't
+ want any gutter.
+
+ We try and be a little clever, if the line we need to draw is going
+ to cross other columns we actually draw it as in the .---' style
+ instead of a pure diagonal ... this reduces confusion by an
+ incredible amount.
+ """
+ ctx = window.cairo_create()
+ ctx.rectangle(bg_area.x, bg_area.y, bg_area.width, bg_area.height)
+ ctx.clip()
+
+ box_size = self.box_size(widget)
+
+ ctx.set_line_width(box_size / 8)
+ ctx.set_line_cap(cairo.LINE_CAP_SQUARE)
+
+ # Draw lines into the cell
+ for start, end, colour in self.in_lines:
+ ctx.move_to(cell_area.x + box_size * start + box_size / 2,
+ bg_area.y - bg_area.height / 2)
+
+ if start - end > 1:
+ ctx.line_to(cell_area.x + box_size * start, bg_area.y)
+ ctx.line_to(cell_area.x + box_size * end + box_size, bg_area.y)
+ elif start - end < -1:
+ ctx.line_to(cell_area.x + box_size * start + box_size,
+ bg_area.y)
+ ctx.line_to(cell_area.x + box_size * end, bg_area.y)
+
+ ctx.line_to(cell_area.x + box_size * end + box_size / 2,
+ bg_area.y + bg_area.height / 2)
+
+ self.set_colour(ctx, colour, 0.0, 0.65)
+ ctx.stroke()
+
+ # Draw lines out of the cell
+ for start, end, colour in self.out_lines:
+ ctx.move_to(cell_area.x + box_size * start + box_size / 2,
+ bg_area.y + bg_area.height / 2)
+
+ if start - end > 1:
+ ctx.line_to(cell_area.x + box_size * start,
+ bg_area.y + bg_area.height)
+ ctx.line_to(cell_area.x + box_size * end + box_size,
+ bg_area.y + bg_area.height)
+ elif start - end < -1:
+ ctx.line_to(cell_area.x + box_size * start + box_size,
+ bg_area.y + bg_area.height)
+ ctx.line_to(cell_area.x + box_size * end,
+ bg_area.y + bg_area.height)
+
+ ctx.line_to(cell_area.x + box_size * end + box_size / 2,
+ bg_area.y + bg_area.height / 2 + bg_area.height)
+
+ self.set_colour(ctx, colour, 0.0, 0.65)
+ ctx.stroke()
+
+ # Draw the revision node in the right column
+ (column, colour, names) = self.node
+ ctx.arc(cell_area.x + box_size * column + box_size / 2,
+ cell_area.y + cell_area.height / 2,
+ box_size / 4, 0, 2 * math.pi)
+
+
+ self.set_colour(ctx, colour, 0.0, 0.5)
+ ctx.stroke_preserve()
+
+ self.set_colour(ctx, colour, 0.5, 1.0)
+ ctx.fill_preserve()
+
+ if (len(names) != 0):
+ name = " "
+ for item in names:
+ name = name + item + " "
+
+ ctx.set_font_size(13)
+ if (flags & 1):
+ self.set_colour(ctx, colour, 0.5, 1.0)
+ else:
+ self.set_colour(ctx, colour, 0.0, 0.5)
+ ctx.show_text(name)
+
+class Commit(object):
+ """ This represent a commit object obtained after parsing the git-rev-list
+ output """
+
+ __slots__ = ['children_sha1', 'message', 'author', 'date', 'committer',
+ 'commit_date', 'commit_sha1', 'parent_sha1']
+
+ children_sha1 = {}
+
+ def __init__(self, commit_lines):
+ self.message = ""
+ self.author = ""
+ self.date = ""
+ self.committer = ""
+ self.commit_date = ""
+ self.commit_sha1 = ""
+ self.parent_sha1 = [ ]
+ self.parse_commit(commit_lines)
+
+
+ def parse_commit(self, commit_lines):
+
+ # First line is the sha1 lines
+ line = string.strip(commit_lines[0])
+ sha1 = re.split(" ", line)
+ self.commit_sha1 = sha1[0]
+ self.parent_sha1 = sha1[1:]
+
+ #build the child list
+ for parent_id in self.parent_sha1:
+ try:
+ Commit.children_sha1[parent_id].append(self.commit_sha1)
+ except KeyError:
+ Commit.children_sha1[parent_id] = [self.commit_sha1]
+
+ # IF we don't have parent
+ if (len(self.parent_sha1) == 0):
+ self.parent_sha1 = [0]
+
+ for line in commit_lines[1:]:
+ m = re.match("^ ", line)
+ if (m != None):
+ # First line of the commit message used for short log
+ if self.message == "":
+ self.message = string.strip(line)
+ continue
+
+ m = re.match("tree", line)
+ if (m != None):
+ continue
+
+ m = re.match("parent", line)
+ if (m != None):
+ continue
+
+ m = re_ident.match(line)
+ if (m != None):
+ date = show_date(m.group('epoch'), m.group('tz'))
+ if m.group(1) == "author":
+ self.author = m.group('ident')
+ self.date = date
+ elif m.group(1) == "committer":
+ self.committer = m.group('ident')
+ self.commit_date = date
+
+ continue
+
+ def get_message(self, with_diff=0):
+ if (with_diff == 1):
+ message = self.diff_tree()
+ else:
+ fp = os.popen("git cat-file commit " + self.commit_sha1)
+ message = fp.read()
+ fp.close()
+
+ return message
+
+ def diff_tree(self):
+ fp = os.popen("git diff-tree --pretty --cc -v -p --always " + self.commit_sha1)
+ diff = fp.read()
+ fp.close()
+ return diff
+
+class AnnotateWindow(object):
+ """Annotate window.
+ This object represents and manages a single window containing the
+ annotate information of the file
+ """
+
+ def __init__(self):
+ self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
+ self.window.set_border_width(0)
+ self.window.set_title("Git repository browser annotation window")
+ self.prev_read = ""
+
+ # Use two thirds of the screen by default
+ screen = self.window.get_screen()
+ monitor = screen.get_monitor_geometry(0)
+ width = int(monitor.width * 0.66)
+ height = int(monitor.height * 0.66)
+ self.window.set_default_size(width, height)
+
+ def add_file_data(self, filename, commit_sha1, line_num):
+ fp = os.popen("git cat-file blob " + commit_sha1 +":"+filename)
+ i = 1;
+ for line in fp.readlines():
+ line = string.rstrip(line)
+ self.model.append(None, ["HEAD", filename, line, i])
+ i = i+1
+ fp.close()
+
+ # now set the cursor position
+ self.treeview.set_cursor(line_num-1)
+ self.treeview.grab_focus()
+
+ def _treeview_cursor_cb(self, *args):
+ """Callback for when the treeview cursor changes."""
+ (path, col) = self.treeview.get_cursor()
+ commit_sha1 = self.model[path][0]
+ commit_msg = ""
+ fp = os.popen("git cat-file commit " + commit_sha1)
+ for line in fp.readlines():
+ commit_msg = commit_msg + line
+ fp.close()
+
+ self.commit_buffer.set_text(commit_msg)
+
+ def _treeview_row_activated(self, *args):
+ """Callback for when the treeview row gets selected."""
+ (path, col) = self.treeview.get_cursor()
+ commit_sha1 = self.model[path][0]
+ filename = self.model[path][1]
+ line_num = self.model[path][3]
+
+ window = AnnotateWindow();
+ fp = os.popen("git rev-parse "+ commit_sha1 + "~1")
+ commit_sha1 = string.strip(fp.readline())
+ fp.close()
+ window.annotate(filename, commit_sha1, line_num)
+
+ def data_ready(self, source, condition):
+ while (1):
+ try :
+ # A simple readline doesn't work
+ # a readline bug ??
+ buffer = source.read(100)
+
+ except:
+ # resource temporary not available
+ return True
+
+ if (len(buffer) == 0):
+ gobject.source_remove(self.io_watch_tag)
+ source.close()
+ return False
+
+ if (self.prev_read != ""):
+ buffer = self.prev_read + buffer
+ self.prev_read = ""
+
+ if (buffer[len(buffer) -1] != '\n'):
+ try:
+ newline_index = buffer.rindex("\n")
+ except ValueError:
+ newline_index = 0
+
+ self.prev_read = buffer[newline_index:(len(buffer))]
+ buffer = buffer[0:newline_index]
+
+ for buff in buffer.split("\n"):
+ annotate_line = re.compile('^([0-9a-f]{40}) (.+) (.+) (.+)$')
+ m = annotate_line.match(buff)
+ if not m:
+ annotate_line = re.compile('^(filename) (.+)$')
+ m = annotate_line.match(buff)
+ if not m:
+ continue
+ filename = m.group(2)
+ else:
+ self.commit_sha1 = m.group(1)
+ self.source_line = int(m.group(2))
+ self.result_line = int(m.group(3))
+ self.count = int(m.group(4))
+ #set the details only when we have the file name
+ continue
+
+ while (self.count > 0):
+ # set at result_line + count-1 the sha1 as commit_sha1
+ self.count = self.count - 1
+ iter = self.model.iter_nth_child(None, self.result_line + self.count-1)
+ self.model.set(iter, 0, self.commit_sha1, 1, filename, 3, self.source_line)
+
+
+ def annotate(self, filename, commit_sha1, line_num):
+ # verify the commit_sha1 specified has this filename
+
+ fp = os.popen("git ls-tree "+ commit_sha1 + " -- " + filename)
+ line = string.strip(fp.readline())
+ if line == '':
+ # pop up the message the file is not there as a part of the commit
+ fp.close()
+ dialog = gtk.MessageDialog(parent=None, flags=0,
+ type=gtk.MESSAGE_WARNING, buttons=gtk.BUTTONS_CLOSE,
+ message_format=None)
+ dialog.set_markup("The file %s is not present in the parent commit %s" % (filename, commit_sha1))
+ dialog.run()
+ dialog.destroy()
+ return
+
+ fp.close()
+
+ vpan = gtk.VPaned();
+ self.window.add(vpan);
+ vpan.show()
+
+ scrollwin = gtk.ScrolledWindow()
+ scrollwin.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
+ scrollwin.set_shadow_type(gtk.SHADOW_IN)
+ vpan.pack1(scrollwin, True, True);
+ scrollwin.show()
+
+ self.model = gtk.TreeStore(str, str, str, int)
+ self.treeview = gtk.TreeView(self.model)
+ self.treeview.set_rules_hint(True)
+ self.treeview.set_search_column(0)
+ self.treeview.connect("cursor-changed", self._treeview_cursor_cb)
+ self.treeview.connect("row-activated", self._treeview_row_activated)
+ scrollwin.add(self.treeview)
+ self.treeview.show()
+
+ cell = gtk.CellRendererText()
+ cell.set_property("width-chars", 10)
+ cell.set_property("ellipsize", pango.ELLIPSIZE_END)
+ column = gtk.TreeViewColumn("Commit")
+ column.set_resizable(True)
+ column.pack_start(cell, expand=True)
+ column.add_attribute(cell, "text", 0)
+ self.treeview.append_column(column)
+
+ cell = gtk.CellRendererText()
+ cell.set_property("width-chars", 20)
+ cell.set_property("ellipsize", pango.ELLIPSIZE_END)
+ column = gtk.TreeViewColumn("File Name")
+ column.set_resizable(True)
+ column.pack_start(cell, expand=True)
+ column.add_attribute(cell, "text", 1)
+ self.treeview.append_column(column)
+
+ cell = gtk.CellRendererText()
+ cell.set_property("width-chars", 20)
+ cell.set_property("ellipsize", pango.ELLIPSIZE_END)
+ column = gtk.TreeViewColumn("Data")
+ column.set_resizable(True)
+ column.pack_start(cell, expand=True)
+ column.add_attribute(cell, "text", 2)
+ self.treeview.append_column(column)
+
+ # The commit message window
+ scrollwin = gtk.ScrolledWindow()
+ scrollwin.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
+ scrollwin.set_shadow_type(gtk.SHADOW_IN)
+ vpan.pack2(scrollwin, True, True);
+ scrollwin.show()
+
+ commit_text = gtk.TextView()
+ self.commit_buffer = gtk.TextBuffer()
+ commit_text.set_buffer(self.commit_buffer)
+ scrollwin.add(commit_text)
+ commit_text.show()
+
+ self.window.show()
+
+ self.add_file_data(filename, commit_sha1, line_num)
+
+ fp = os.popen("git blame --incremental -C -C -- " + filename + " " + commit_sha1)
+ flags = fcntl.fcntl(fp.fileno(), fcntl.F_GETFL)
+ fcntl.fcntl(fp.fileno(), fcntl.F_SETFL, flags | os.O_NONBLOCK)
+ self.io_watch_tag = gobject.io_add_watch(fp, gobject.IO_IN, self.data_ready)
+
+
+class DiffWindow(object):
+ """Diff window.
+ This object represents and manages a single window containing the
+ differences between two revisions on a branch.
+ """
+
+ def __init__(self):
+ self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
+ self.window.set_border_width(0)
+ self.window.set_title("Git repository browser diff window")
+
+ # Use two thirds of the screen by default
+ screen = self.window.get_screen()
+ monitor = screen.get_monitor_geometry(0)
+ width = int(monitor.width * 0.66)
+ height = int(monitor.height * 0.66)
+ self.window.set_default_size(width, height)
+
+
+ self.construct()
+
+ def construct(self):
+ """Construct the window contents."""
+ vbox = gtk.VBox()
+ self.window.add(vbox)
+ vbox.show()
+
+ menu_bar = gtk.MenuBar()
+ save_menu = gtk.ImageMenuItem(gtk.STOCK_SAVE)
+ save_menu.connect("activate", self.save_menu_response, "save")
+ save_menu.show()
+ menu_bar.append(save_menu)
+ vbox.pack_start(menu_bar, expand=False, fill=True)
+ menu_bar.show()
+
+ hpan = gtk.HPaned()
+
+ scrollwin = gtk.ScrolledWindow()
+ scrollwin.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
+ scrollwin.set_shadow_type(gtk.SHADOW_IN)
+ hpan.pack1(scrollwin, True, True)
+ scrollwin.show()
+
+ (self.buffer, sourceview) = get_source_buffer_and_view()
+
+ sourceview.set_editable(False)
+ sourceview.modify_font(pango.FontDescription("Monospace"))
+ scrollwin.add(sourceview)
+ sourceview.show()
+
+ # The file hierarchy: a scrollable treeview
+ scrollwin = gtk.ScrolledWindow()
+ scrollwin.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
+ scrollwin.set_shadow_type(gtk.SHADOW_IN)
+ scrollwin.set_size_request(20, -1)
+ hpan.pack2(scrollwin, True, True)
+ scrollwin.show()
+
+ self.model = gtk.TreeStore(str, str, str)
+ self.treeview = gtk.TreeView(self.model)
+ self.treeview.set_search_column(1)
+ self.treeview.connect("cursor-changed", self._treeview_clicked)
+ scrollwin.add(self.treeview)
+ self.treeview.show()
+
+ cell = gtk.CellRendererText()
+ cell.set_property("width-chars", 20)
+ column = gtk.TreeViewColumn("Select to annotate")
+ column.pack_start(cell, expand=True)
+ column.add_attribute(cell, "text", 0)
+ self.treeview.append_column(column)
+
+ vbox.pack_start(hpan, expand=True, fill=True)
+ hpan.show()
+
+ def _treeview_clicked(self, *args):
+ """Callback for when the treeview cursor changes."""
+ (path, col) = self.treeview.get_cursor()
+ specific_file = self.model[path][1]
+ commit_sha1 = self.model[path][2]
+ if specific_file == None :
+ return
+ elif specific_file == "" :
+ specific_file = None
+
+ window = AnnotateWindow();
+ window.annotate(specific_file, commit_sha1, 1)
+
+
+ def commit_files(self, commit_sha1, parent_sha1):
+ self.model.clear()
+ add = self.model.append(None, [ "Added", None, None])
+ dele = self.model.append(None, [ "Deleted", None, None])
+ mod = self.model.append(None, [ "Modified", None, None])
+ diff_tree = re.compile('^(:.{6}) (.{6}) (.{40}) (.{40}) (A|D|M)\s(.+)$')
+ fp = os.popen("git diff-tree -r --no-commit-id " + parent_sha1 + " " + commit_sha1)
+ while 1:
+ line = string.strip(fp.readline())
+ if line == '':
+ break
+ m = diff_tree.match(line)
+ if not m:
+ continue
+
+ attr = m.group(5)
+ filename = m.group(6)
+ if attr == "A":
+ self.model.append(add, [filename, filename, commit_sha1])
+ elif attr == "D":
+ self.model.append(dele, [filename, filename, commit_sha1])
+ elif attr == "M":
+ self.model.append(mod, [filename, filename, commit_sha1])
+ fp.close()
+
+ self.treeview.expand_all()
+
+ def set_diff(self, commit_sha1, parent_sha1, encoding):
+ """Set the differences showed by this window.
+ Compares the two trees and populates the window with the
+ differences.
+ """
+ # Diff with the first commit or the last commit shows nothing
+ if (commit_sha1 == 0 or parent_sha1 == 0 ):
+ return
+
+ fp = os.popen("git diff-tree -p " + parent_sha1 + " " + commit_sha1)
+ self.buffer.set_text(unicode(fp.read(), encoding).encode('utf-8'))
+ fp.close()
+ self.commit_files(commit_sha1, parent_sha1)
+ self.window.show()
+
+ def save_menu_response(self, widget, string):
+ dialog = gtk.FileChooserDialog("Save..", None, gtk.FILE_CHOOSER_ACTION_SAVE,
+ (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
+ gtk.STOCK_SAVE, gtk.RESPONSE_OK))
+ dialog.set_default_response(gtk.RESPONSE_OK)
+ response = dialog.run()
+ if response == gtk.RESPONSE_OK:
+ patch_buffer = self.buffer.get_text(self.buffer.get_start_iter(),
+ self.buffer.get_end_iter())
+ fp = open(dialog.get_filename(), "w")
+ fp.write(patch_buffer)
+ fp.close()
+ dialog.destroy()
+
+class GitView(object):
+ """ This is the main class
+ """
+ version = "0.9"
+
+ def __init__(self, with_diff=0):
+ self.with_diff = with_diff
+ self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
+ self.window.set_border_width(0)
+ self.window.set_title("Git repository browser")
+
+ self.get_encoding()
+ self.get_bt_sha1()
+
+ # Use three-quarters of the screen by default
+ screen = self.window.get_screen()
+ monitor = screen.get_monitor_geometry(0)
+ width = int(monitor.width * 0.75)
+ height = int(monitor.height * 0.75)
+ self.window.set_default_size(width, height)
+
+ # FIXME AndyFitz!
+ icon = self.window.render_icon(gtk.STOCK_INDEX, gtk.ICON_SIZE_BUTTON)
+ self.window.set_icon(icon)
+
+ self.accel_group = gtk.AccelGroup()
+ self.window.add_accel_group(self.accel_group)
+ self.accel_group.connect_group(0xffc2, 0, gtk.ACCEL_LOCKED, self.refresh);
+ self.accel_group.connect_group(0xffc1, 0, gtk.ACCEL_LOCKED, self.maximize);
+ self.accel_group.connect_group(0xffc8, 0, gtk.ACCEL_LOCKED, self.fullscreen);
+ self.accel_group.connect_group(0xffc9, 0, gtk.ACCEL_LOCKED, self.unfullscreen);
+
+ self.window.add(self.construct())
+
+ def refresh(self, widget, event=None, *arguments, **keywords):
+ self.get_encoding()
+ self.get_bt_sha1()
+ Commit.children_sha1 = {}
+ self.set_branch(sys.argv[without_diff:])
+ self.window.show()
+ return True
+
+ def maximize(self, widget, event=None, *arguments, **keywords):
+ self.window.maximize()
+ return True
+
+ def fullscreen(self, widget, event=None, *arguments, **keywords):
+ self.window.fullscreen()
+ return True
+
+ def unfullscreen(self, widget, event=None, *arguments, **keywords):
+ self.window.unfullscreen()
+ return True
+
+ def get_bt_sha1(self):
+ """ Update the bt_sha1 dictionary with the
+ respective sha1 details """
+
+ self.bt_sha1 = { }
+ ls_remote = re.compile('^(.{40})\trefs/([^^]+)(?:\\^(..))?$');
+ fp = os.popen('git ls-remote "${GIT_DIR-.git}"')
+ while 1:
+ line = string.strip(fp.readline())
+ if line == '':
+ break
+ m = ls_remote.match(line)
+ if not m:
+ continue
+ (sha1, name) = (m.group(1), m.group(2))
+ if not self.bt_sha1.has_key(sha1):
+ self.bt_sha1[sha1] = []
+ self.bt_sha1[sha1].append(name)
+ fp.close()
+
+ def get_encoding(self):
+ fp = os.popen("git config --get i18n.commitencoding")
+ self.encoding=string.strip(fp.readline())
+ fp.close()
+ if (self.encoding == ""):
+ self.encoding = "utf-8"
+
+
+ def construct(self):
+ """Construct the window contents."""
+ vbox = gtk.VBox()
+ paned = gtk.VPaned()
+ paned.pack1(self.construct_top(), resize=False, shrink=True)
+ paned.pack2(self.construct_bottom(), resize=False, shrink=True)
+ menu_bar = gtk.MenuBar()
+ menu_bar.set_pack_direction(gtk.PACK_DIRECTION_RTL)
+ help_menu = gtk.MenuItem("Help")
+ menu = gtk.Menu()
+ about_menu = gtk.MenuItem("About")
+ menu.append(about_menu)
+ about_menu.connect("activate", self.about_menu_response, "about")
+ about_menu.show()
+ help_menu.set_submenu(menu)
+ help_menu.show()
+ menu_bar.append(help_menu)
+ menu_bar.show()
+ vbox.pack_start(menu_bar, expand=False, fill=True)
+ vbox.pack_start(paned, expand=True, fill=True)
+ paned.show()
+ vbox.show()
+ return vbox
+
+
+ def construct_top(self):
+ """Construct the top-half of the window."""
+ vbox = gtk.VBox(spacing=6)
+ vbox.set_border_width(12)
+ vbox.show()
+
+
+ scrollwin = gtk.ScrolledWindow()
+ scrollwin.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
+ scrollwin.set_shadow_type(gtk.SHADOW_IN)
+ vbox.pack_start(scrollwin, expand=True, fill=True)
+ scrollwin.show()
+
+ self.treeview = gtk.TreeView()
+ self.treeview.set_rules_hint(True)
+ self.treeview.set_search_column(4)
+ self.treeview.connect("cursor-changed", self._treeview_cursor_cb)
+ scrollwin.add(self.treeview)
+ self.treeview.show()
+
+ cell = CellRendererGraph()
+ column = gtk.TreeViewColumn()
+ column.set_resizable(True)
+ column.pack_start(cell, expand=True)
+ column.add_attribute(cell, "node", 1)
+ column.add_attribute(cell, "in-lines", 2)
+ column.add_attribute(cell, "out-lines", 3)
+ self.treeview.append_column(column)
+
+ cell = gtk.CellRendererText()
+ cell.set_property("width-chars", 65)
+ cell.set_property("ellipsize", pango.ELLIPSIZE_END)
+ column = gtk.TreeViewColumn("Message")
+ column.set_resizable(True)
+ column.pack_start(cell, expand=True)
+ column.add_attribute(cell, "text", 4)
+ self.treeview.append_column(column)
+
+ cell = gtk.CellRendererText()
+ cell.set_property("width-chars", 40)
+ cell.set_property("ellipsize", pango.ELLIPSIZE_END)
+ column = gtk.TreeViewColumn("Author")
+ column.set_resizable(True)
+ column.pack_start(cell, expand=True)
+ column.add_attribute(cell, "text", 5)
+ self.treeview.append_column(column)
+
+ cell = gtk.CellRendererText()
+ cell.set_property("ellipsize", pango.ELLIPSIZE_END)
+ column = gtk.TreeViewColumn("Date")
+ column.set_resizable(True)
+ column.pack_start(cell, expand=True)
+ column.add_attribute(cell, "text", 6)
+ self.treeview.append_column(column)
+
+ return vbox
+
+ def about_menu_response(self, widget, string):
+ dialog = gtk.AboutDialog()
+ dialog.set_name("Gitview")
+ dialog.set_version(GitView.version)
+ dialog.set_authors(["Aneesh Kumar K.V <aneesh.kumar@gmail.com>"])
+ dialog.set_website("http://www.kernel.org/pub/software/scm/git/")
+ dialog.set_copyright("Use and distribute under the terms of the GNU General Public License")
+ dialog.set_wrap_license(True)
+ dialog.run()
+ dialog.destroy()
+
+
+ def construct_bottom(self):
+ """Construct the bottom half of the window."""
+ vbox = gtk.VBox(False, spacing=6)
+ vbox.set_border_width(12)
+ (width, height) = self.window.get_size()
+ vbox.set_size_request(width, int(height / 2.5))
+ vbox.show()
+
+ self.table = gtk.Table(rows=4, columns=4)
+ self.table.set_row_spacings(6)
+ self.table.set_col_spacings(6)
+ vbox.pack_start(self.table, expand=False, fill=True)
+ self.table.show()
+
+ align = gtk.Alignment(0.0, 0.5)
+ label = gtk.Label()
+ label.set_markup("<b>Revision:</b>")
+ align.add(label)
+ self.table.attach(align, 0, 1, 0, 1, gtk.FILL, gtk.FILL)
+ label.show()
+ align.show()
+
+ align = gtk.Alignment(0.0, 0.5)
+ self.revid_label = gtk.Label()
+ self.revid_label.set_selectable(True)
+ align.add(self.revid_label)
+ self.table.attach(align, 1, 2, 0, 1, gtk.EXPAND | gtk.FILL, gtk.FILL)
+ self.revid_label.show()
+ align.show()
+
+ align = gtk.Alignment(0.0, 0.5)
+ label = gtk.Label()
+ label.set_markup("<b>Committer:</b>")
+ align.add(label)
+ self.table.attach(align, 0, 1, 1, 2, gtk.FILL, gtk.FILL)
+ label.show()
+ align.show()
+
+ align = gtk.Alignment(0.0, 0.5)
+ self.committer_label = gtk.Label()
+ self.committer_label.set_selectable(True)
+ align.add(self.committer_label)
+ self.table.attach(align, 1, 2, 1, 2, gtk.EXPAND | gtk.FILL, gtk.FILL)
+ self.committer_label.show()
+ align.show()
+
+ align = gtk.Alignment(0.0, 0.5)
+ label = gtk.Label()
+ label.set_markup("<b>Timestamp:</b>")
+ align.add(label)
+ self.table.attach(align, 0, 1, 2, 3, gtk.FILL, gtk.FILL)
+ label.show()
+ align.show()
+
+ align = gtk.Alignment(0.0, 0.5)
+ self.timestamp_label = gtk.Label()
+ self.timestamp_label.set_selectable(True)
+ align.add(self.timestamp_label)
+ self.table.attach(align, 1, 2, 2, 3, gtk.EXPAND | gtk.FILL, gtk.FILL)
+ self.timestamp_label.show()
+ align.show()
+
+ align = gtk.Alignment(0.0, 0.5)
+ label = gtk.Label()
+ label.set_markup("<b>Parents:</b>")
+ align.add(label)
+ self.table.attach(align, 0, 1, 3, 4, gtk.FILL, gtk.FILL)
+ label.show()
+ align.show()
+ self.parents_widgets = []
+
+ align = gtk.Alignment(0.0, 0.5)
+ label = gtk.Label()
+ label.set_markup("<b>Children:</b>")
+ align.add(label)
+ self.table.attach(align, 2, 3, 3, 4, gtk.FILL, gtk.FILL)
+ label.show()
+ align.show()
+ self.children_widgets = []
+
+ scrollwin = gtk.ScrolledWindow()
+ scrollwin.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
+ scrollwin.set_shadow_type(gtk.SHADOW_IN)
+ vbox.pack_start(scrollwin, expand=True, fill=True)
+ scrollwin.show()
+
+ (self.message_buffer, sourceview) = get_source_buffer_and_view()
+
+ sourceview.set_editable(False)
+ sourceview.modify_font(pango.FontDescription("Monospace"))
+ scrollwin.add(sourceview)
+ sourceview.show()
+
+ return vbox
+
+ def _treeview_cursor_cb(self, *args):
+ """Callback for when the treeview cursor changes."""
+ (path, col) = self.treeview.get_cursor()
+ commit = self.model[path][0]
+
+ if commit.committer is not None:
+ committer = commit.committer
+ timestamp = commit.commit_date
+ message = commit.get_message(self.with_diff)
+ revid_label = commit.commit_sha1
+ else:
+ committer = ""
+ timestamp = ""
+ message = ""
+ revid_label = ""
+
+ self.revid_label.set_text(revid_label)
+ self.committer_label.set_text(committer)
+ self.timestamp_label.set_text(timestamp)
+ self.message_buffer.set_text(unicode(message, self.encoding).encode('utf-8'))
+
+ for widget in self.parents_widgets:
+ self.table.remove(widget)
+
+ self.parents_widgets = []
+ self.table.resize(4 + len(commit.parent_sha1) - 1, 4)
+ for idx, parent_id in enumerate(commit.parent_sha1):
+ self.table.set_row_spacing(idx + 3, 0)
+
+ align = gtk.Alignment(0.0, 0.0)
+ self.parents_widgets.append(align)
+ self.table.attach(align, 1, 2, idx + 3, idx + 4,
+ gtk.EXPAND | gtk.FILL, gtk.FILL)
+ align.show()
+
+ hbox = gtk.HBox(False, 0)
+ align.add(hbox)
+ hbox.show()
+
+ label = gtk.Label(parent_id)
+ label.set_selectable(True)
+ hbox.pack_start(label, expand=False, fill=True)
+ label.show()
+
+ image = gtk.Image()
+ image.set_from_stock(gtk.STOCK_JUMP_TO, gtk.ICON_SIZE_MENU)
+ image.show()
+
+ button = gtk.Button()
+ button.add(image)
+ button.set_relief(gtk.RELIEF_NONE)
+ button.connect("clicked", self._go_clicked_cb, parent_id)
+ hbox.pack_start(button, expand=False, fill=True)
+ button.show()
+
+ image = gtk.Image()
+ image.set_from_stock(gtk.STOCK_FIND, gtk.ICON_SIZE_MENU)
+ image.show()
+
+ button = gtk.Button()
+ button.add(image)
+ button.set_relief(gtk.RELIEF_NONE)
+ button.set_sensitive(True)
+ button.connect("clicked", self._show_clicked_cb,
+ commit.commit_sha1, parent_id, self.encoding)
+ hbox.pack_start(button, expand=False, fill=True)
+ button.show()
+
+ # Populate with child details
+ for widget in self.children_widgets:
+ self.table.remove(widget)
+
+ self.children_widgets = []
+ try:
+ child_sha1 = Commit.children_sha1[commit.commit_sha1]
+ except KeyError:
+ # We don't have child
+ child_sha1 = [ 0 ]
+
+ if ( len(child_sha1) > len(commit.parent_sha1)):
+ self.table.resize(4 + len(child_sha1) - 1, 4)
+
+ for idx, child_id in enumerate(child_sha1):
+ self.table.set_row_spacing(idx + 3, 0)
+
+ align = gtk.Alignment(0.0, 0.0)
+ self.children_widgets.append(align)
+ self.table.attach(align, 3, 4, idx + 3, idx + 4,
+ gtk.EXPAND | gtk.FILL, gtk.FILL)
+ align.show()
+
+ hbox = gtk.HBox(False, 0)
+ align.add(hbox)
+ hbox.show()
+
+ label = gtk.Label(child_id)
+ label.set_selectable(True)
+ hbox.pack_start(label, expand=False, fill=True)
+ label.show()
+
+ image = gtk.Image()
+ image.set_from_stock(gtk.STOCK_JUMP_TO, gtk.ICON_SIZE_MENU)
+ image.show()
+
+ button = gtk.Button()
+ button.add(image)
+ button.set_relief(gtk.RELIEF_NONE)
+ button.connect("clicked", self._go_clicked_cb, child_id)
+ hbox.pack_start(button, expand=False, fill=True)
+ button.show()
+
+ image = gtk.Image()
+ image.set_from_stock(gtk.STOCK_FIND, gtk.ICON_SIZE_MENU)
+ image.show()
+
+ button = gtk.Button()
+ button.add(image)
+ button.set_relief(gtk.RELIEF_NONE)
+ button.set_sensitive(True)
+ button.connect("clicked", self._show_clicked_cb,
+ child_id, commit.commit_sha1, self.encoding)
+ hbox.pack_start(button, expand=False, fill=True)
+ button.show()
+
+ def _destroy_cb(self, widget):
+ """Callback for when a window we manage is destroyed."""
+ self.quit()
+
+
+ def quit(self):
+ """Stop the GTK+ main loop."""
+ gtk.main_quit()
+
+ def run(self, args):
+ self.set_branch(args)
+ self.window.connect("destroy", self._destroy_cb)
+ self.window.show()
+ gtk.main()
+
+ def set_branch(self, args):
+ """Fill in different windows with info from the reposiroty"""
+ fp = os.popen("git rev-parse --sq --default HEAD " + list_to_string(args, 1))
+ git_rev_list_cmd = fp.read()
+ fp.close()
+ fp = os.popen("git rev-list --header --topo-order --parents " + git_rev_list_cmd)
+ self.update_window(fp)
+
+ def update_window(self, fp):
+ commit_lines = []
+
+ self.model = gtk.ListStore(gobject.TYPE_PYOBJECT, gobject.TYPE_PYOBJECT,
+ gobject.TYPE_PYOBJECT, gobject.TYPE_PYOBJECT, str, str, str)
+
+ # used for cursor positioning
+ self.index = {}
+
+ self.colours = {}
+ self.nodepos = {}
+ self.incomplete_line = {}
+ self.commits = []
+
+ index = 0
+ last_colour = 0
+ last_nodepos = -1
+ out_line = []
+ input_line = fp.readline()
+ while (input_line != ""):
+ # The commit header ends with '\0'
+ # This NULL is immediately followed by the sha1 of the
+ # next commit
+ if (input_line[0] != '\0'):
+ commit_lines.append(input_line)
+ input_line = fp.readline()
+ continue;
+
+ commit = Commit(commit_lines)
+ if (commit != None ):
+ self.commits.append(commit)
+
+ # Skip the '\0
+ commit_lines = []
+ commit_lines.append(input_line[1:])
+ input_line = fp.readline()
+
+ fp.close()
+
+ for commit in self.commits:
+ (out_line, last_colour, last_nodepos) = self.draw_graph(commit,
+ index, out_line,
+ last_colour,
+ last_nodepos)
+ self.index[commit.commit_sha1] = index
+ index += 1
+
+ self.treeview.set_model(self.model)
+ self.treeview.show()
+
+ def draw_graph(self, commit, index, out_line, last_colour, last_nodepos):
+ in_line=[]
+
+ # | -> outline
+ # X
+ # |\ <- inline
+
+ # Reset nodepostion
+ if (last_nodepos > 5):
+ last_nodepos = -1
+
+ # Add the incomplete lines of the last cell in this
+ try:
+ colour = self.colours[commit.commit_sha1]
+ except KeyError:
+ self.colours[commit.commit_sha1] = last_colour+1
+ last_colour = self.colours[commit.commit_sha1]
+ colour = self.colours[commit.commit_sha1]
+
+ try:
+ node_pos = self.nodepos[commit.commit_sha1]
+ except KeyError:
+ self.nodepos[commit.commit_sha1] = last_nodepos+1
+ last_nodepos = self.nodepos[commit.commit_sha1]
+ node_pos = self.nodepos[commit.commit_sha1]
+
+ #The first parent always continue on the same line
+ try:
+ # check we alreay have the value
+ tmp_node_pos = self.nodepos[commit.parent_sha1[0]]
+ except KeyError:
+ self.colours[commit.parent_sha1[0]] = colour
+ self.nodepos[commit.parent_sha1[0]] = node_pos
+
+ for sha1 in self.incomplete_line.keys():
+ if (sha1 != commit.commit_sha1):
+ self.draw_incomplete_line(sha1, node_pos,
+ out_line, in_line, index)
+ else:
+ del self.incomplete_line[sha1]
+
+
+ for parent_id in commit.parent_sha1:
+ try:
+ tmp_node_pos = self.nodepos[parent_id]
+ except KeyError:
+ self.colours[parent_id] = last_colour+1
+ last_colour = self.colours[parent_id]
+ self.nodepos[parent_id] = last_nodepos+1
+ last_nodepos = self.nodepos[parent_id]
+
+ in_line.append((node_pos, self.nodepos[parent_id],
+ self.colours[parent_id]))
+ self.add_incomplete_line(parent_id)
+
+ try:
+ branch_tag = self.bt_sha1[commit.commit_sha1]
+ except KeyError:
+ branch_tag = [ ]
+
+
+ node = (node_pos, colour, branch_tag)
+
+ self.model.append([commit, node, out_line, in_line,
+ commit.message, commit.author, commit.date])
+
+ return (in_line, last_colour, last_nodepos)
+
+ def add_incomplete_line(self, sha1):
+ try:
+ self.incomplete_line[sha1].append(self.nodepos[sha1])
+ except KeyError:
+ self.incomplete_line[sha1] = [self.nodepos[sha1]]
+
+ def draw_incomplete_line(self, sha1, node_pos, out_line, in_line, index):
+ for idx, pos in enumerate(self.incomplete_line[sha1]):
+ if(pos == node_pos):
+ #remove the straight line and add a slash
+ if ((pos, pos, self.colours[sha1]) in out_line):
+ out_line.remove((pos, pos, self.colours[sha1]))
+ out_line.append((pos, pos+0.5, self.colours[sha1]))
+ self.incomplete_line[sha1][idx] = pos = pos+0.5
+ try:
+ next_commit = self.commits[index+1]
+ if (next_commit.commit_sha1 == sha1 and pos != int(pos)):
+ # join the line back to the node point
+ # This need to be done only if we modified it
+ in_line.append((pos, pos-0.5, self.colours[sha1]))
+ continue;
+ except IndexError:
+ pass
+ in_line.append((pos, pos, self.colours[sha1]))
+
+
+ def _go_clicked_cb(self, widget, revid):
+ """Callback for when the go button for a parent is clicked."""
+ try:
+ self.treeview.set_cursor(self.index[revid])
+ except KeyError:
+ dialog = gtk.MessageDialog(parent=None, flags=0,
+ type=gtk.MESSAGE_WARNING, buttons=gtk.BUTTONS_CLOSE,
+ message_format=None)
+ dialog.set_markup("Revision <b>%s</b> not present in the list" % revid)
+ # revid == 0 is the parent of the first commit
+ if (revid != 0 ):
+ dialog.format_secondary_text("Try running gitview without any options")
+ dialog.run()
+ dialog.destroy()
+
+ self.treeview.grab_focus()
+
+ def _show_clicked_cb(self, widget, commit_sha1, parent_sha1, encoding):
+ """Callback for when the show button for a parent is clicked."""
+ window = DiffWindow()
+ window.set_diff(commit_sha1, parent_sha1, encoding)
+ self.treeview.grab_focus()
+
+without_diff = 0
+if __name__ == "__main__":
+
+ if (len(sys.argv) > 1 ):
+ if (sys.argv[1] == "--without-diff"):
+ without_diff = 1
+
+ view = GitView( without_diff != 1)
+ view.run(sys.argv[without_diff:])
diff --git a/contrib/gitview/gitview.txt b/contrib/gitview/gitview.txt
new file mode 100644
index 0000000..9e12f97
--- /dev/null
+++ b/contrib/gitview/gitview.txt
@@ -0,0 +1,57 @@
+gitview(1)
+==========
+
+NAME
+----
+gitview - A GTK based repository browser for git
+
+SYNOPSIS
+--------
+[verse]
+'gitview' [options] [args]
+
+DESCRIPTION
+---------
+
+Dependencies:
+
+* Python 2.4
+* PyGTK 2.8 or later
+* PyCairo 1.0 or later
+
+OPTIONS
+-------
+--without-diff::
+
+ If the user doesn't want to list the commit diffs in the main window.
+ This may speed up the repository browsing.
+
+<args>::
+
+ All the valid option for gitlink:git-rev-list[1].
+
+Key Bindings
+------------
+F4::
+ To maximize the window
+
+F5::
+ To reread references.
+
+F11::
+ Full screen
+
+F12::
+ Leave full screen
+
+EXAMPLES
+--------
+
+gitview v2.6.12.. include/scsi drivers/scsi::
+
+ Show as the changes since version v2.6.12 that changed any file in the
+ include/scsi or drivers/scsi subdirectories
+
+gitview --since=2.weeks.ago::
+
+ Show the changes during the last two weeks
diff --git a/contrib/hg-to-git/hg-to-git.py b/contrib/hg-to-git/hg-to-git.py
new file mode 100755
index 0000000..232625a
--- /dev/null
+++ b/contrib/hg-to-git/hg-to-git.py
@@ -0,0 +1,255 @@
+#!/usr/bin/env python
+
+""" hg-to-git.py - A Mercurial to GIT converter
+
+ Copyright (C)2007 Stelian Pop <stelian@popies.net>
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 2, or (at your option)
+ any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+"""
+
+import os, os.path, sys
+import tempfile, pickle, getopt
+import re
+
+if sys.hexversion < 0x02030000:
+ # The behavior of the pickle module changed significantly in 2.3
+ sys.stderr.write("hg-to-git.py: requires Python 2.3 or later.\n")
+ sys.exit(1)
+
+# Maps hg version -> git version
+hgvers = {}
+# List of children for each hg revision
+hgchildren = {}
+# List of parents for each hg revision
+hgparents = {}
+# Current branch for each hg revision
+hgbranch = {}
+# Number of new changesets converted from hg
+hgnewcsets = 0
+
+#------------------------------------------------------------------------------
+
+def usage():
+
+ print """\
+%s: [OPTIONS] <hgprj>
+
+options:
+ -s, --gitstate=FILE: name of the state to be saved/read
+ for incrementals
+ -n, --nrepack=INT: number of changesets that will trigger
+ a repack (default=0, -1 to deactivate)
+ -v, --verbose: be verbose
+
+required:
+ hgprj: name of the HG project to import (directory)
+""" % sys.argv[0]
+
+#------------------------------------------------------------------------------
+
+def getgitenv(user, date):
+ env = ''
+ elems = re.compile('(.*?)\s+<(.*)>').match(user)
+ if elems:
+ env += 'export GIT_AUTHOR_NAME="%s" ;' % elems.group(1)
+ env += 'export GIT_COMMITTER_NAME="%s" ;' % elems.group(1)
+ env += 'export GIT_AUTHOR_EMAIL="%s" ;' % elems.group(2)
+ env += 'export GIT_COMMITTER_EMAIL="%s" ;' % elems.group(2)
+ else:
+ env += 'export GIT_AUTHOR_NAME="%s" ;' % user
+ env += 'export GIT_COMMITTER_NAME="%s" ;' % user
+ env += 'export GIT_AUTHOR_EMAIL= ;'
+ env += 'export GIT_COMMITTER_EMAIL= ;'
+
+ env += 'export GIT_AUTHOR_DATE="%s" ;' % date
+ env += 'export GIT_COMMITTER_DATE="%s" ;' % date
+ return env
+
+#------------------------------------------------------------------------------
+
+state = ''
+opt_nrepack = 0
+verbose = False
+
+try:
+ opts, args = getopt.getopt(sys.argv[1:], 's:t:n:v', ['gitstate=', 'tempdir=', 'nrepack=', 'verbose'])
+ for o, a in opts:
+ if o in ('-s', '--gitstate'):
+ state = a
+ state = os.path.abspath(state)
+ if o in ('-n', '--nrepack'):
+ opt_nrepack = int(a)
+ if o in ('-v', '--verbose'):
+ verbose = True
+ if len(args) != 1:
+ raise Exception('params')
+except:
+ usage()
+ sys.exit(1)
+
+hgprj = args[0]
+os.chdir(hgprj)
+
+if state:
+ if os.path.exists(state):
+ if verbose:
+ print 'State does exist, reading'
+ f = open(state, 'r')
+ hgvers = pickle.load(f)
+ else:
+ print 'State does not exist, first run'
+
+sock = os.popen('hg tip --template "{rev}"')
+tip = sock.read()
+if sock.close():
+ sys.exit(1)
+if verbose:
+ print 'tip is', tip
+
+# Calculate the branches
+if verbose:
+ print 'analysing the branches...'
+hgchildren["0"] = ()
+hgparents["0"] = (None, None)
+hgbranch["0"] = "master"
+for cset in range(1, int(tip) + 1):
+ hgchildren[str(cset)] = ()
+ prnts = os.popen('hg log -r %d --template "{parents}"' % cset).read().strip().split(' ')
+ prnts = map(lambda x: x[:x.find(':')], prnts)
+ if prnts[0] != '':
+ parent = prnts[0].strip()
+ else:
+ parent = str(cset - 1)
+ hgchildren[parent] += ( str(cset), )
+ if len(prnts) > 1:
+ mparent = prnts[1].strip()
+ hgchildren[mparent] += ( str(cset), )
+ else:
+ mparent = None
+
+ hgparents[str(cset)] = (parent, mparent)
+
+ if mparent:
+ # For merge changesets, take either one, preferably the 'master' branch
+ if hgbranch[mparent] == 'master':
+ hgbranch[str(cset)] = 'master'
+ else:
+ hgbranch[str(cset)] = hgbranch[parent]
+ else:
+ # Normal changesets
+ # For first children, take the parent branch, for the others create a new branch
+ if hgchildren[parent][0] == str(cset):
+ hgbranch[str(cset)] = hgbranch[parent]
+ else:
+ hgbranch[str(cset)] = "branch-" + str(cset)
+
+if not hgvers.has_key("0"):
+ print 'creating repository'
+ os.system('git init')
+
+# loop through every hg changeset
+for cset in range(int(tip) + 1):
+
+ # incremental, already seen
+ if hgvers.has_key(str(cset)):
+ continue
+ hgnewcsets += 1
+
+ # get info
+ log_data = os.popen('hg log -r %d --template "{tags}\n{date|date}\n{author}\n"' % cset).readlines()
+ tag = log_data[0].strip()
+ date = log_data[1].strip()
+ user = log_data[2].strip()
+ parent = hgparents[str(cset)][0]
+ mparent = hgparents[str(cset)][1]
+
+ #get comment
+ (fdcomment, filecomment) = tempfile.mkstemp()
+ csetcomment = os.popen('hg log -r %d --template "{desc}"' % cset).read().strip()
+ os.write(fdcomment, csetcomment)
+ os.close(fdcomment)
+
+ print '-----------------------------------------'
+ print 'cset:', cset
+ print 'branch:', hgbranch[str(cset)]
+ print 'user:', user
+ print 'date:', date
+ print 'comment:', csetcomment
+ if parent:
+ print 'parent:', parent
+ if mparent:
+ print 'mparent:', mparent
+ if tag:
+ print 'tag:', tag
+ print '-----------------------------------------'
+
+ # checkout the parent if necessary
+ if cset != 0:
+ if hgbranch[str(cset)] == "branch-" + str(cset):
+ print 'creating new branch', hgbranch[str(cset)]
+ os.system('git checkout -b %s %s' % (hgbranch[str(cset)], hgvers[parent]))
+ else:
+ print 'checking out branch', hgbranch[str(cset)]
+ os.system('git checkout %s' % hgbranch[str(cset)])
+
+ # merge
+ if mparent:
+ if hgbranch[parent] == hgbranch[str(cset)]:
+ otherbranch = hgbranch[mparent]
+ else:
+ otherbranch = hgbranch[parent]
+ print 'merging', otherbranch, 'into', hgbranch[str(cset)]
+ os.system(getgitenv(user, date) + 'git merge --no-commit -s ours "" %s %s' % (hgbranch[str(cset)], otherbranch))
+
+ # remove everything except .git and .hg directories
+ os.system('find . \( -path "./.hg" -o -path "./.git" \) -prune -o ! -name "." -print | xargs rm -rf')
+
+ # repopulate with checkouted files
+ os.system('hg update -C %d' % cset)
+
+ # add new files
+ os.system('git ls-files -x .hg --others | git update-index --add --stdin')
+ # delete removed files
+ os.system('git ls-files -x .hg --deleted | git update-index --remove --stdin')
+
+ # commit
+ os.system(getgitenv(user, date) + 'git commit --allow-empty -a -F %s' % filecomment)
+ os.unlink(filecomment)
+
+ # tag
+ if tag and tag != 'tip':
+ os.system(getgitenv(user, date) + 'git tag %s' % tag)
+
+ # delete branch if not used anymore...
+ if mparent and len(hgchildren[str(cset)]):
+ print "Deleting unused branch:", otherbranch
+ os.system('git branch -d %s' % otherbranch)
+
+ # retrieve and record the version
+ vvv = os.popen('git show --quiet --pretty=format:%H').read()
+ print 'record', cset, '->', vvv
+ hgvers[str(cset)] = vvv
+
+if hgnewcsets >= opt_nrepack and opt_nrepack != -1:
+ os.system('git repack -a -d')
+
+# write the state for incrementals
+if state:
+ if verbose:
+ print 'Writing state'
+ f = open(state, 'w')
+ pickle.dump(hgvers, f)
+
+# vim: et ts=8 sw=4 sts=4
diff --git a/contrib/hg-to-git/hg-to-git.txt b/contrib/hg-to-git/hg-to-git.txt
new file mode 100644
index 0000000..91f8fe6
--- /dev/null
+++ b/contrib/hg-to-git/hg-to-git.txt
@@ -0,0 +1,21 @@
+hg-to-git.py is able to convert a Mercurial repository into a git one,
+and preserves the branches in the process (unlike tailor)
+
+hg-to-git.py can probably be greatly improved (it's a rather crude
+combination of shell and python) but it does already work quite well for
+me. Features:
+ - supports incremental conversion
+ (for keeping a git repo in sync with a hg one)
+ - supports hg branches
+ - converts hg tags
+
+Note that the git repository will be created 'in place' (at the same
+location as the source hg repo). You will have to manually remove the
+'.hg' directory after the conversion.
+
+Also note that the incremental conversion uses 'simple' hg changesets
+identifiers (ordinals, as opposed to SHA-1 ids), and since these ids
+are not stable across different repositories the hg-to-git.py state file
+is forever tied to one hg repository.
+
+Stelian Pop <stelian@popies.net>
diff --git a/contrib/hooks/post-receive-email b/contrib/hooks/post-receive-email
new file mode 100755
index 0000000..0e5b72d
--- /dev/null
+++ b/contrib/hooks/post-receive-email
@@ -0,0 +1,748 @@
+#!/bin/sh
+#
+# Copyright (c) 2007 Andy Parkins
+#
+# An example hook script to mail out commit update information. This hook
+# sends emails listing new revisions to the repository introduced by the
+# change being reported. The rule is that (for branch updates) each commit
+# will appear on one email and one email only.
+#
+# This hook is stored in the contrib/hooks directory. Your distribution
+# will have put this somewhere standard. You should make this script
+# executable then link to it in the repository you would like to use it in.
+# For example, on debian the hook is stored in
+# /usr/share/git-core/contrib/hooks/post-receive-email:
+#
+# chmod a+x post-receive-email
+# cd /path/to/your/repository.git
+# ln -sf /usr/share/git-core/contrib/hooks/post-receive-email hooks/post-receive
+#
+# This hook script assumes it is enabled on the central repository of a
+# project, with all users pushing only to it and not between each other. It
+# will still work if you don't operate in that style, but it would become
+# possible for the email to be from someone other than the person doing the
+# push.
+#
+# To help with debugging and use on pre-v1.5.1 git servers, this script will
+# also obey the interface of hooks/update, taking its arguments on the
+# command line. Unfortunately, hooks/update is called once for each ref.
+# To avoid firing one email per ref, this script just prints its output to
+# the screen when used in this mode. The output can then be redirected if
+# wanted.
+#
+# Config
+# ------
+# hooks.mailinglist
+# This is the list that all pushes will go to; leave it blank to not send
+# emails for every ref update.
+# hooks.announcelist
+# This is the list that all pushes of annotated tags will go to. Leave it
+# blank to default to the mailinglist field. The announce emails lists
+# the short log summary of the changes since the last annotated tag.
+# hooks.envelopesender
+# If set then the -f option is passed to sendmail to allow the envelope
+# sender address to be set
+# hooks.emailprefix
+# All emails have their subjects prefixed with this prefix, or "[SCM]"
+# if emailprefix is unset, to aid filtering
+# hooks.showrev
+# The shell command used to format each revision in the email, with
+# "%s" replaced with the commit id. Defaults to "git rev-list -1
+# --pretty %s", displaying the commit id, author, date and log
+# message. To list full patches separated by a blank line, you
+# could set this to "git show -C %s; echo".
+# To list a gitweb/cgit URL *and* a full patch for each change set, use this:
+# "t=%s; printf 'http://.../?id=%%s' \$t; echo;echo; git show -C \$t; echo"
+# Be careful if "..." contains things that will be expanded by shell "eval"
+# or printf.
+# hooks.emailmaxlines
+# The maximum number of lines that should be included in the generated
+# email body. If not specified, there is no limit.
+# Lines beyond the limit are suppressed and counted, and a final
+# line is added indicating the number of suppressed lines.
+# hooks.diffopts
+# Alternate options for the git diff-tree invocation that shows changes.
+# Default is "--stat --summary --find-copies-harder". Add -p to those
+# options to include a unified diff of changes in addition to the usual
+# summary output.
+#
+# Notes
+# -----
+# All emails include the headers "X-Git-Refname", "X-Git-Oldrev",
+# "X-Git-Newrev", and "X-Git-Reftype" to enable fine tuned filtering and
+# give information for debugging.
+#
+
+# ---------------------------- Functions
+
+#
+# Function to prepare for email generation. This decides what type
+# of update this is and whether an email should even be generated.
+#
+prep_for_email()
+{
+ # --- Arguments
+ oldrev=$(git rev-parse $1)
+ newrev=$(git rev-parse $2)
+ refname="$3"
+
+ # --- Interpret
+ # 0000->1234 (create)
+ # 1234->2345 (update)
+ # 2345->0000 (delete)
+ if expr "$oldrev" : '0*$' >/dev/null
+ then
+ change_type="create"
+ else
+ if expr "$newrev" : '0*$' >/dev/null
+ then
+ change_type="delete"
+ else
+ change_type="update"
+ fi
+ fi
+
+ # --- Get the revision types
+ newrev_type=$(git cat-file -t $newrev 2> /dev/null)
+ oldrev_type=$(git cat-file -t "$oldrev" 2> /dev/null)
+ case "$change_type" in
+ create|update)
+ rev="$newrev"
+ rev_type="$newrev_type"
+ ;;
+ delete)
+ rev="$oldrev"
+ rev_type="$oldrev_type"
+ ;;
+ esac
+
+ # The revision type tells us what type the commit is, combined with
+ # the location of the ref we can decide between
+ # - working branch
+ # - tracking branch
+ # - unannoted tag
+ # - annotated tag
+ case "$refname","$rev_type" in
+ refs/tags/*,commit)
+ # un-annotated tag
+ refname_type="tag"
+ short_refname=${refname##refs/tags/}
+ ;;
+ refs/tags/*,tag)
+ # annotated tag
+ refname_type="annotated tag"
+ short_refname=${refname##refs/tags/}
+ # change recipients
+ if [ -n "$announcerecipients" ]; then
+ recipients="$announcerecipients"
+ fi
+ ;;
+ refs/heads/*,commit)
+ # branch
+ refname_type="branch"
+ short_refname=${refname##refs/heads/}
+ ;;
+ refs/remotes/*,commit)
+ # tracking branch
+ refname_type="tracking branch"
+ short_refname=${refname##refs/remotes/}
+ echo >&2 "*** Push-update of tracking branch, $refname"
+ echo >&2 "*** - no email generated."
+ return 1
+ ;;
+ *)
+ # Anything else (is there anything else?)
+ echo >&2 "*** Unknown type of update to $refname ($rev_type)"
+ echo >&2 "*** - no email generated"
+ return 1
+ ;;
+ esac
+
+ # Check if we've got anyone to send to
+ if [ -z "$recipients" ]; then
+ case "$refname_type" in
+ "annotated tag")
+ config_name="hooks.announcelist"
+ ;;
+ *)
+ config_name="hooks.mailinglist"
+ ;;
+ esac
+ echo >&2 "*** $config_name is not set so no email will be sent"
+ echo >&2 "*** for $refname update $oldrev->$newrev"
+ return 1
+ fi
+
+ return 0
+}
+
+#
+# Top level email generation function. This calls the appropriate
+# body-generation routine after outputting the common header.
+#
+# Note this function doesn't actually generate any email output, that is
+# taken care of by the functions it calls:
+# - generate_email_header
+# - generate_create_XXXX_email
+# - generate_update_XXXX_email
+# - generate_delete_XXXX_email
+# - generate_email_footer
+#
+# Note also that this function cannot 'exit' from the script; when this
+# function is running (in hook script mode), the send_mail() function
+# is already executing in another process, connected via a pipe, and
+# if this function exits without, whatever has been generated to that
+# point will be sent as an email... even if nothing has been generated.
+#
+generate_email()
+{
+ # Email parameters
+ # The email subject will contain the best description of the ref
+ # that we can build from the parameters
+ describe=$(git describe $rev 2>/dev/null)
+ if [ -z "$describe" ]; then
+ describe=$rev
+ fi
+
+ generate_email_header
+
+ # Call the correct body generation function
+ fn_name=general
+ case "$refname_type" in
+ "tracking branch"|branch)
+ fn_name=branch
+ ;;
+ "annotated tag")
+ fn_name=atag
+ ;;
+ esac
+
+ if [ -z "$maxlines" ]; then
+ generate_${change_type}_${fn_name}_email
+ else
+ generate_${change_type}_${fn_name}_email | limit_lines $maxlines
+ fi
+
+ generate_email_footer
+}
+
+generate_email_header()
+{
+ # --- Email (all stdout will be the email)
+ # Generate header
+ cat <<-EOF
+ To: $recipients
+ Subject: ${emailprefix}$projectdesc $refname_type $short_refname ${change_type}d. $describe
+ X-Git-Refname: $refname
+ X-Git-Reftype: $refname_type
+ X-Git-Oldrev: $oldrev
+ X-Git-Newrev: $newrev
+ Auto-Submitted: auto-generated
+
+ This is an automated email from the git hooks/post-receive script. It was
+ generated because a ref change was pushed to the repository containing
+ the project "$projectdesc".
+
+ The $refname_type, $short_refname has been ${change_type}d
+ EOF
+}
+
+generate_email_footer()
+{
+ SPACE=" "
+ cat <<-EOF
+
+
+ hooks/post-receive
+ --${SPACE}
+ $projectdesc
+ EOF
+}
+
+# --------------- Branches
+
+#
+# Called for the creation of a branch
+#
+generate_create_branch_email()
+{
+ # This is a new branch and so oldrev is not valid
+ echo " at $newrev ($newrev_type)"
+ echo ""
+
+ echo $LOGBEGIN
+ show_new_revisions
+ echo $LOGEND
+}
+
+#
+# Called for the change of a pre-existing branch
+#
+generate_update_branch_email()
+{
+ # Consider this:
+ # 1 --- 2 --- O --- X --- 3 --- 4 --- N
+ #
+ # O is $oldrev for $refname
+ # N is $newrev for $refname
+ # X is a revision pointed to by some other ref, for which we may
+ # assume that an email has already been generated.
+ # In this case we want to issue an email containing only revisions
+ # 3, 4, and N. Given (almost) by
+ #
+ # git rev-list N ^O --not --all
+ #
+ # The reason for the "almost", is that the "--not --all" will take
+ # precedence over the "N", and effectively will translate to
+ #
+ # git rev-list N ^O ^X ^N
+ #
+ # So, we need to build up the list more carefully. git rev-parse
+ # will generate a list of revs that may be fed into git rev-list.
+ # We can get it to make the "--not --all" part and then filter out
+ # the "^N" with:
+ #
+ # git rev-parse --not --all | grep -v N
+ #
+ # Then, using the --stdin switch to git rev-list we have effectively
+ # manufactured
+ #
+ # git rev-list N ^O ^X
+ #
+ # This leaves a problem when someone else updates the repository
+ # while this script is running. Their new value of the ref we're
+ # working on would be included in the "--not --all" output; and as
+ # our $newrev would be an ancestor of that commit, it would exclude
+ # all of our commits. What we really want is to exclude the current
+ # value of $refname from the --not list, rather than N itself. So:
+ #
+ # git rev-parse --not --all | grep -v $(git rev-parse $refname)
+ #
+ # Get's us to something pretty safe (apart from the small time
+ # between refname being read, and git rev-parse running - for that,
+ # I give up)
+ #
+ #
+ # Next problem, consider this:
+ # * --- B --- * --- O ($oldrev)
+ # \
+ # * --- X --- * --- N ($newrev)
+ #
+ # That is to say, there is no guarantee that oldrev is a strict
+ # subset of newrev (it would have required a --force, but that's
+ # allowed). So, we can't simply say rev-list $oldrev..$newrev.
+ # Instead we find the common base of the two revs and list from
+ # there.
+ #
+ # As above, we need to take into account the presence of X; if
+ # another branch is already in the repository and points at some of
+ # the revisions that we are about to output - we don't want them.
+ # The solution is as before: git rev-parse output filtered.
+ #
+ # Finally, tags: 1 --- 2 --- O --- T --- 3 --- 4 --- N
+ #
+ # Tags pushed into the repository generate nice shortlog emails that
+ # summarise the commits between them and the previous tag. However,
+ # those emails don't include the full commit messages that we output
+ # for a branch update. Therefore we still want to output revisions
+ # that have been output on a tag email.
+ #
+ # Luckily, git rev-parse includes just the tool. Instead of using
+ # "--all" we use "--branches"; this has the added benefit that
+ # "remotes/" will be ignored as well.
+
+ # List all of the revisions that were removed by this update, in a
+ # fast-forward update, this list will be empty, because rev-list O
+ # ^N is empty. For a non-fast-forward, O ^N is the list of removed
+ # revisions
+ fast_forward=""
+ rev=""
+ for rev in $(git rev-list $newrev..$oldrev)
+ do
+ revtype=$(git cat-file -t "$rev")
+ echo " discards $rev ($revtype)"
+ done
+ if [ -z "$rev" ]; then
+ fast_forward=1
+ fi
+
+ # List all the revisions from baserev to newrev in a kind of
+ # "table-of-contents"; note this list can include revisions that
+ # have already had notification emails and is present to show the
+ # full detail of the change from rolling back the old revision to
+ # the base revision and then forward to the new revision
+ for rev in $(git rev-list $oldrev..$newrev)
+ do
+ revtype=$(git cat-file -t "$rev")
+ echo " via $rev ($revtype)"
+ done
+
+ if [ "$fast_forward" ]; then
+ echo " from $oldrev ($oldrev_type)"
+ else
+ # 1. Existing revisions were removed. In this case newrev
+ # is a subset of oldrev - this is the reverse of a
+ # fast-forward, a rewind
+ # 2. New revisions were added on top of an old revision,
+ # this is a rewind and addition.
+
+ # (1) certainly happened, (2) possibly. When (2) hasn't
+ # happened, we set a flag to indicate that no log printout
+ # is required.
+
+ echo ""
+
+ # Find the common ancestor of the old and new revisions and
+ # compare it with newrev
+ baserev=$(git merge-base $oldrev $newrev)
+ rewind_only=""
+ if [ "$baserev" = "$newrev" ]; then
+ echo "This update discarded existing revisions and left the branch pointing at"
+ echo "a previous point in the repository history."
+ echo ""
+ echo " * -- * -- N ($newrev)"
+ echo " \\"
+ echo " O -- O -- O ($oldrev)"
+ echo ""
+ echo "The removed revisions are not necessarily gone - if another reference"
+ echo "still refers to them they will stay in the repository."
+ rewind_only=1
+ else
+ echo "This update added new revisions after undoing existing revisions. That is"
+ echo "to say, the old revision is not a strict subset of the new revision. This"
+ echo "situation occurs when you --force push a change and generate a repository"
+ echo "containing something like this:"
+ echo ""
+ echo " * -- * -- B -- O -- O -- O ($oldrev)"
+ echo " \\"
+ echo " N -- N -- N ($newrev)"
+ echo ""
+ echo "When this happens we assume that you've already had alert emails for all"
+ echo "of the O revisions, and so we here report only the revisions in the N"
+ echo "branch from the common base, B."
+ fi
+ fi
+
+ echo ""
+ if [ -z "$rewind_only" ]; then
+ echo "Those revisions listed above that are new to this repository have"
+ echo "not appeared on any other notification email; so we list those"
+ echo "revisions in full, below."
+
+ echo ""
+ echo $LOGBEGIN
+ show_new_revisions
+
+ # XXX: Need a way of detecting whether git rev-list actually
+ # outputted anything, so that we can issue a "no new
+ # revisions added by this update" message
+
+ echo $LOGEND
+ else
+ echo "No new revisions were added by this update."
+ fi
+
+ # The diffstat is shown from the old revision to the new revision.
+ # This is to show the truth of what happened in this change.
+ # There's no point showing the stat from the base to the new
+ # revision because the base is effectively a random revision at this
+ # point - the user will be interested in what this revision changed
+ # - including the undoing of previous revisions in the case of
+ # non-fast-forward updates.
+ echo ""
+ echo "Summary of changes:"
+ git diff-tree $diffopts $oldrev..$newrev
+}
+
+#
+# Called for the deletion of a branch
+#
+generate_delete_branch_email()
+{
+ echo " was $oldrev"
+ echo ""
+ echo $LOGBEGIN
+ git show -s --pretty=oneline $oldrev
+ echo $LOGEND
+}
+
+# --------------- Annotated tags
+
+#
+# Called for the creation of an annotated tag
+#
+generate_create_atag_email()
+{
+ echo " at $newrev ($newrev_type)"
+
+ generate_atag_email
+}
+
+#
+# Called for the update of an annotated tag (this is probably a rare event
+# and may not even be allowed)
+#
+generate_update_atag_email()
+{
+ echo " to $newrev ($newrev_type)"
+ echo " from $oldrev (which is now obsolete)"
+
+ generate_atag_email
+}
+
+#
+# Called when an annotated tag is created or changed
+#
+generate_atag_email()
+{
+ # Use git for-each-ref to pull out the individual fields from the
+ # tag
+ eval $(git for-each-ref --shell --format='
+ tagobject=%(*objectname)
+ tagtype=%(*objecttype)
+ tagger=%(taggername)
+ tagged=%(taggerdate)' $refname
+ )
+
+ echo " tagging $tagobject ($tagtype)"
+ case "$tagtype" in
+ commit)
+
+ # If the tagged object is a commit, then we assume this is a
+ # release, and so we calculate which tag this tag is
+ # replacing
+ prevtag=$(git describe --abbrev=0 $newrev^ 2>/dev/null)
+
+ if [ -n "$prevtag" ]; then
+ echo " replaces $prevtag"
+ fi
+ ;;
+ *)
+ echo " length $(git cat-file -s $tagobject) bytes"
+ ;;
+ esac
+ echo " tagged by $tagger"
+ echo " on $tagged"
+
+ echo ""
+ echo $LOGBEGIN
+
+ # Show the content of the tag message; this might contain a change
+ # log or release notes so is worth displaying.
+ git cat-file tag $newrev | sed -e '1,/^$/d'
+
+ echo ""
+ case "$tagtype" in
+ commit)
+ # Only commit tags make sense to have rev-list operations
+ # performed on them
+ if [ -n "$prevtag" ]; then
+ # Show changes since the previous release
+ git rev-list --pretty=short "$prevtag..$newrev" | git shortlog
+ else
+ # No previous tag, show all the changes since time
+ # began
+ git rev-list --pretty=short $newrev | git shortlog
+ fi
+ ;;
+ *)
+ # XXX: Is there anything useful we can do for non-commit
+ # objects?
+ ;;
+ esac
+
+ echo $LOGEND
+}
+
+#
+# Called for the deletion of an annotated tag
+#
+generate_delete_atag_email()
+{
+ echo " was $oldrev"
+ echo ""
+ echo $LOGBEGIN
+ git show -s --pretty=oneline $oldrev
+ echo $LOGEND
+}
+
+# --------------- General references
+
+#
+# Called when any other type of reference is created (most likely a
+# non-annotated tag)
+#
+generate_create_general_email()
+{
+ echo " at $newrev ($newrev_type)"
+
+ generate_general_email
+}
+
+#
+# Called when any other type of reference is updated (most likely a
+# non-annotated tag)
+#
+generate_update_general_email()
+{
+ echo " to $newrev ($newrev_type)"
+ echo " from $oldrev"
+
+ generate_general_email
+}
+
+#
+# Called for creation or update of any other type of reference
+#
+generate_general_email()
+{
+ # Unannotated tags are more about marking a point than releasing a
+ # version; therefore we don't do the shortlog summary that we do for
+ # annotated tags above - we simply show that the point has been
+ # marked, and print the log message for the marked point for
+ # reference purposes
+ #
+ # Note this section also catches any other reference type (although
+ # there aren't any) and deals with them in the same way.
+
+ echo ""
+ if [ "$newrev_type" = "commit" ]; then
+ echo $LOGBEGIN
+ git show --no-color --root -s --pretty=medium $newrev
+ echo $LOGEND
+ else
+ # What can we do here? The tag marks an object that is not
+ # a commit, so there is no log for us to display. It's
+ # probably not wise to output git cat-file as it could be a
+ # binary blob. We'll just say how big it is
+ echo "$newrev is a $newrev_type, and is $(git cat-file -s $newrev) bytes long."
+ fi
+}
+
+#
+# Called for the deletion of any other type of reference
+#
+generate_delete_general_email()
+{
+ echo " was $oldrev"
+ echo ""
+ echo $LOGBEGIN
+ git show -s --pretty=oneline $oldrev
+ echo $LOGEND
+}
+
+
+# --------------- Miscellaneous utilities
+
+#
+# Show new revisions as the user would like to see them in the email.
+#
+show_new_revisions()
+{
+ # This shows all log entries that are not already covered by
+ # another ref - i.e. commits that are now accessible from this
+ # ref that were previously not accessible
+ # (see generate_update_branch_email for the explanation of this
+ # command)
+
+ # Revision range passed to rev-list differs for new vs. updated
+ # branches.
+ if [ "$change_type" = create ]
+ then
+ # Show all revisions exclusive to this (new) branch.
+ revspec=$newrev
+ else
+ # Branch update; show revisions not part of $oldrev.
+ revspec=$oldrev..$newrev
+ fi
+
+ other_branches=$(git for-each-ref --format='%(refname)' refs/heads/ |
+ grep -F -v $refname)
+ git rev-parse --not $other_branches |
+ if [ -z "$custom_showrev" ]
+ then
+ git rev-list --pretty --stdin $revspec
+ else
+ git rev-list --stdin $revspec |
+ while read onerev
+ do
+ eval $(printf "$custom_showrev" $onerev)
+ done
+ fi
+}
+
+
+limit_lines()
+{
+ lines=0
+ skipped=0
+ while IFS="" read -r line; do
+ lines=$((lines + 1))
+ if [ $lines -gt $1 ]; then
+ skipped=$((skipped + 1))
+ else
+ printf "%s\n" "$line"
+ fi
+ done
+ if [ $skipped -ne 0 ]; then
+ echo "... $skipped lines suppressed ..."
+ fi
+}
+
+
+send_mail()
+{
+ if [ -n "$envelopesender" ]; then
+ /usr/sbin/sendmail -t -f "$envelopesender"
+ else
+ /usr/sbin/sendmail -t
+ fi
+}
+
+# ---------------------------- main()
+
+# --- Constants
+LOGBEGIN="- Log -----------------------------------------------------------------"
+LOGEND="-----------------------------------------------------------------------"
+
+# --- Config
+# Set GIT_DIR either from the working directory, or from the environment
+# variable.
+GIT_DIR=$(git rev-parse --git-dir 2>/dev/null)
+if [ -z "$GIT_DIR" ]; then
+ echo >&2 "fatal: post-receive: GIT_DIR not set"
+ exit 1
+fi
+
+projectdesc=$(sed -ne '1p' "$GIT_DIR/description" 2>/dev/null)
+# Check if the description is unchanged from it's default, and shorten it to
+# a more manageable length if it is
+if expr "$projectdesc" : "Unnamed repository.*$" >/dev/null
+then
+ projectdesc="UNNAMED PROJECT"
+fi
+
+recipients=$(git config hooks.mailinglist)
+announcerecipients=$(git config hooks.announcelist)
+envelopesender=$(git config hooks.envelopesender)
+emailprefix=$(git config hooks.emailprefix || echo '[SCM] ')
+custom_showrev=$(git config hooks.showrev)
+maxlines=$(git config hooks.emailmaxlines)
+diffopts=$(git config hooks.diffopts)
+: ${diffopts:="--stat --summary --find-copies-harder"}
+
+# --- Main loop
+# Allow dual mode: run from the command line just like the update hook, or
+# if no arguments are given then run as a hook script
+if [ -n "$1" -a -n "$2" -a -n "$3" ]; then
+ # Output to the terminal in command line mode - if someone wanted to
+ # resend an email; they could redirect the output to sendmail
+ # themselves
+ prep_for_email $2 $3 $1 && PAGER= generate_email
+else
+ while read oldrev newrev refname
+ do
+ prep_for_email $oldrev $newrev $refname || continue
+ generate_email $maxlines | send_mail
+ done
+fi
diff --git a/contrib/hooks/pre-auto-gc-battery b/contrib/hooks/pre-auto-gc-battery
new file mode 100644
index 0000000..1f914c9
--- /dev/null
+++ b/contrib/hooks/pre-auto-gc-battery
@@ -0,0 +1,43 @@
+#!/bin/sh
+#
+# An example hook script to verify if you are on battery, in case you
+# are running Linux or OS X. Called by git-gc --auto with no arguments.
+# The hook should exit with non-zero status after issuing an appropriate
+# message if it wants to stop the auto repacking.
+#
+# This hook is stored in the contrib/hooks directory. Your distribution
+# may have put this somewhere else. If you want to use this hook, you
+# should make this script executable then link to it in the repository
+# you would like to use it in.
+#
+# For example, if the hook is stored in
+# /usr/share/git-core/contrib/hooks/pre-auto-gc-battery:
+#
+# chmod a+x pre-auto-gc-battery
+# cd /path/to/your/repository.git
+# ln -sf /usr/share/git-core/contrib/hooks/pre-auto-gc-battery \
+# hooks/pre-auto-gc
+
+if test -x /sbin/on_ac_power && /sbin/on_ac_power
+then
+ exit 0
+elif test "$(cat /sys/class/power_supply/AC/online 2>/dev/null)" = 1
+then
+ exit 0
+elif grep -q 'on-line' /proc/acpi/ac_adapter/AC/state 2>/dev/null
+then
+ exit 0
+elif grep -q '0x01$' /proc/apm 2>/dev/null
+then
+ exit 0
+elif grep -q "AC Power \+: 1" /proc/pmu/info 2>/dev/null
+then
+ exit 0
+elif test -x /usr/bin/pmset && /usr/bin/pmset -g batt |
+ grep -q "Currently drawing from 'AC Power'"
+then
+ exit 0
+fi
+
+echo "Auto packing deferred; not on AC"
+exit 1
diff --git a/contrib/hooks/setgitperms.perl b/contrib/hooks/setgitperms.perl
new file mode 100644
index 0000000..2770a1b
--- /dev/null
+++ b/contrib/hooks/setgitperms.perl
@@ -0,0 +1,214 @@
+#!/usr/bin/perl
+#
+# Copyright (c) 2006 Josh England
+#
+# This script can be used to save/restore full permissions and ownership data
+# within a git working tree.
+#
+# To save permissions/ownership data, place this script in your .git/hooks
+# directory and enable a `pre-commit` hook with the following lines:
+# #!/bin/sh
+# SUBDIRECTORY_OK=1 . git-sh-setup
+# $GIT_DIR/hooks/setgitperms.perl -r
+#
+# To restore permissions/ownership data, place this script in your .git/hooks
+# directory and enable a `post-merge` and `post-checkout` hook with the
+# following lines:
+# #!/bin/sh
+# SUBDIRECTORY_OK=1 . git-sh-setup
+# $GIT_DIR/hooks/setgitperms.perl -w
+#
+use strict;
+use Getopt::Long;
+use File::Find;
+use File::Basename;
+
+my $usage =
+"usage: setgitperms.perl [OPTION]... <--read|--write>
+This program uses a file `.gitmeta` to store/restore permissions and uid/gid
+info for all files/dirs tracked by git in the repository.
+
+---------------------------------Read Mode-------------------------------------
+-r, --read Reads perms/etc from working dir into a .gitmeta file
+-s, --stdout Output to stdout instead of .gitmeta
+-d, --diff Show unified diff of perms file (XOR with --stdout)
+
+---------------------------------Write Mode------------------------------------
+-w, --write Modify perms/etc in working dir to match the .gitmeta file
+-v, --verbose Be verbose
+
+\n";
+
+my ($stdout, $showdiff, $verbose, $read_mode, $write_mode);
+
+if ((@ARGV < 0) || !GetOptions(
+ "stdout", \$stdout,
+ "diff", \$showdiff,
+ "read", \$read_mode,
+ "write", \$write_mode,
+ "verbose", \$verbose,
+ )) { die $usage; }
+die $usage unless ($read_mode xor $write_mode);
+
+my $topdir = `git rev-parse --show-cdup` or die "\n"; chomp $topdir;
+my $gitdir = $topdir . '.git';
+my $gitmeta = $topdir . '.gitmeta';
+
+if ($write_mode) {
+ # Update the working dir permissions/ownership based on data from .gitmeta
+ open (IN, "<$gitmeta") or die "Could not open $gitmeta for reading: $!\n";
+ while (defined ($_ = <IN>)) {
+ chomp;
+ if (/^(.*) mode=(\S+)\s+uid=(\d+)\s+gid=(\d+)/) {
+ # Compare recorded perms to actual perms in the working dir
+ my ($path, $mode, $uid, $gid) = ($1, $2, $3, $4);
+ my $fullpath = $topdir . $path;
+ my (undef,undef,$wmode,undef,$wuid,$wgid) = lstat($fullpath);
+ $wmode = sprintf "%04o", $wmode & 07777;
+ if ($mode ne $wmode) {
+ $verbose && print "Updating permissions on $path: old=$wmode, new=$mode\n";
+ chmod oct($mode), $fullpath;
+ }
+ if ($uid != $wuid || $gid != $wgid) {
+ if ($verbose) {
+ # Print out user/group names instead of uid/gid
+ my $pwname = getpwuid($uid);
+ my $grpname = getgrgid($gid);
+ my $wpwname = getpwuid($wuid);
+ my $wgrpname = getgrgid($wgid);
+ $pwname = $uid if !defined $pwname;
+ $grpname = $gid if !defined $grpname;
+ $wpwname = $wuid if !defined $wpwname;
+ $wgrpname = $wgid if !defined $wgrpname;
+
+ print "Updating uid/gid on $path: old=$wpwname/$wgrpname, new=$pwname/$grpname\n";
+ }
+ chown $uid, $gid, $fullpath;
+ }
+ }
+ else {
+ warn "Invalid input format in $gitmeta:\n\t$_\n";
+ }
+ }
+ close IN;
+}
+elsif ($read_mode) {
+ # Handle merge conflicts in the .gitperms file
+ if (-e "$gitdir/MERGE_MSG") {
+ if (`grep ====== $gitmeta`) {
+ # Conflict not resolved -- abort the commit
+ print "PERMISSIONS/OWNERSHIP CONFLICT\n";
+ print " Resolve the conflict in the $gitmeta file and then run\n";
+ print " `.git/hooks/setgitperms.perl --write` to reconcile.\n";
+ exit 1;
+ }
+ elsif (`grep $gitmeta $gitdir/MERGE_MSG`) {
+ # A conflict in .gitmeta has been manually resolved. Verify that
+ # the working dir perms matches the current .gitmeta perms for
+ # each file/dir that conflicted.
+ # This is here because a `setgitperms.perl --write` was not
+ # performed due to a merge conflict, so permissions/ownership
+ # may not be consistent with the manually merged .gitmeta file.
+ my @conflict_diff = `git show \$(cat $gitdir/MERGE_HEAD)`;
+ my @conflict_files;
+ my $metadiff = 0;
+
+ # Build a list of files that conflicted from the .gitmeta diff
+ foreach my $line (@conflict_diff) {
+ if ($line =~ m|^diff --git a/$gitmeta b/$gitmeta|) {
+ $metadiff = 1;
+ }
+ elsif ($line =~ /^diff --git/) {
+ $metadiff = 0;
+ }
+ elsif ($metadiff && $line =~ /^\+(.*) mode=/) {
+ push @conflict_files, $1;
+ }
+ }
+
+ # Verify that each conflict file now has permissions consistent
+ # with the .gitmeta file
+ foreach my $file (@conflict_files) {
+ my $absfile = $topdir . $file;
+ my $gm_entry = `grep "^$file mode=" $gitmeta`;
+ if ($gm_entry =~ /mode=(\d+) uid=(\d+) gid=(\d+)/) {
+ my ($gm_mode, $gm_uid, $gm_gid) = ($1, $2, $3);
+ my (undef,undef,$mode,undef,$uid,$gid) = lstat("$absfile");
+ $mode = sprintf("%04o", $mode & 07777);
+ if (($gm_mode ne $mode) || ($gm_uid != $uid)
+ || ($gm_gid != $gid)) {
+ print "PERMISSIONS/OWNERSHIP CONFLICT\n";
+ print " Mismatch found for file: $file\n";
+ print " Run `.git/hooks/setgitperms.perl --write` to reconcile.\n";
+ exit 1;
+ }
+ }
+ else {
+ print "Warning! Permissions/ownership no longer being tracked for file: $file\n";
+ }
+ }
+ }
+ }
+
+ # No merge conflicts -- write out perms/ownership data to .gitmeta file
+ unless ($stdout) {
+ open (OUT, ">$gitmeta.tmp") or die "Could not open $gitmeta.tmp for writing: $!\n";
+ }
+
+ my @files = `git ls-files`;
+ my %dirs;
+
+ foreach my $path (@files) {
+ chomp $path;
+ # We have to manually add stats for parent directories
+ my $parent = dirname($path);
+ while (!exists $dirs{$parent}) {
+ $dirs{$parent} = 1;
+ next if $parent eq '.';
+ printstats($parent);
+ $parent = dirname($parent);
+ }
+ # Now the git-tracked file
+ printstats($path);
+ }
+
+ # diff the temporary metadata file to see if anything has changed
+ # If no metadata has changed, don't overwrite the real file
+ # This is just so `git commit -a` doesn't try to commit a bogus update
+ unless ($stdout) {
+ if (! -e $gitmeta) {
+ rename "$gitmeta.tmp", $gitmeta;
+ }
+ else {
+ my $diff = `diff -U 0 $gitmeta $gitmeta.tmp`;
+ if ($diff ne '') {
+ rename "$gitmeta.tmp", $gitmeta;
+ }
+ else {
+ unlink "$gitmeta.tmp";
+ }
+ if ($showdiff) {
+ print $diff;
+ }
+ }
+ close OUT;
+ }
+ # Make sure the .gitmeta file is tracked
+ system("git add $gitmeta");
+}
+
+
+sub printstats {
+ my $path = $_[0];
+ $path =~ s/@/\@/g;
+ my (undef,undef,$mode,undef,$uid,$gid) = lstat($path);
+ $path =~ s/%/\%/g;
+ if ($stdout) {
+ print $path;
+ printf " mode=%04o uid=$uid gid=$gid\n", $mode & 07777;
+ }
+ else {
+ print OUT $path;
+ printf OUT " mode=%04o uid=$uid gid=$gid\n", $mode & 07777;
+ }
+}
diff --git a/contrib/hooks/update-paranoid b/contrib/hooks/update-paranoid
new file mode 100644
index 0000000..d18b317
--- /dev/null
+++ b/contrib/hooks/update-paranoid
@@ -0,0 +1,421 @@
+#!/usr/bin/perl
+
+use strict;
+use File::Spec;
+
+$ENV{PATH} = '/opt/git/bin';
+my $acl_git = '/vcs/acls.git';
+my $acl_branch = 'refs/heads/master';
+my $debug = 0;
+
+=doc
+Invoked as: update refname old-sha1 new-sha1
+
+This script is run by git-receive-pack once for each ref that the
+client is trying to modify. If we exit with a non-zero exit value
+then the update for that particular ref is denied, but updates for
+other refs in the same run of receive-pack may still be allowed.
+
+We are run after the objects have been uploaded, but before the
+ref is actually modified. We take advantage of that fact when we
+look for "new" commits and tags (the new objects won't show up in
+`rev-list --all`).
+
+This script loads and parses the content of the config file
+"users/$this_user.acl" from the $acl_branch commit of $acl_git ODB.
+The acl file is a git-config style file, but uses a slightly more
+restricted syntax as the Perl parser contained within this script
+is not nearly as permissive as git-config.
+
+Example:
+
+ [user]
+ committer = John Doe <john.doe@example.com>
+ committer = John R. Doe <john.doe@example.com>
+
+ [repository "acls"]
+ allow = heads/master
+ allow = CDUR for heads/jd/
+ allow = C for ^tags/v\\d+$
+
+For all new commit or tag objects the committer (or tagger) line
+within the object must exactly match one of the user.committer
+values listed in the acl file ("HEAD:users/$this_user.acl").
+
+For a branch to be modified an allow line within the matching
+repository section must be matched for both the refname and the
+opcode.
+
+Repository sections are matched on the basename of the repository
+(after removing the .git suffix).
+
+The opcode abbrevations are:
+
+ C: create new ref
+ D: delete existing ref
+ U: fast-forward existing ref (no commit loss)
+ R: rewind/rebase existing ref (commit loss)
+
+if no opcodes are listed before the "for" keyword then "U" (for
+fast-forward update only) is assumed as this is the most common
+usage.
+
+Refnames are matched by always assuming a prefix of "refs/".
+This hook forbids pushing or deleting anything not under "refs/".
+
+Refnames that start with ^ are Perl regular expressions, and the ^
+is kept as part of the regexp. \\ is needed to get just one \, so
+\\d expands to \d in Perl. The 3rd allow line above is an example.
+
+Refnames that don't start with ^ but that end with / are prefix
+matches (2nd allow line above); all other refnames are strict
+equality matches (1st allow line).
+
+Anything pushed to "heads/" (ok, really "refs/heads/") must be
+a commit. Tags are not permitted here.
+
+Anything pushed to "tags/" (err, really "refs/tags/") must be an
+annotated tag. Commits, blobs, trees, etc. are not permitted here.
+Annotated tag signatures aren't checked, nor are they required.
+
+The special subrepository of 'info/new-commit-check' can
+be created and used to allow users to push new commits and
+tags from another local repository to this one, even if they
+aren't the committer/tagger of those objects. In a nut shell
+the info/new-commit-check directory is a Git repository whose
+objects/info/alternates file lists this repository and all other
+possible sources, and whose refs subdirectory contains symlinks
+to this repository's refs subdirectory, and to all other possible
+sources refs subdirectories. Yes, this means that you cannot
+use packed-refs in those repositories as they won't be resolved
+correctly.
+
+=cut
+
+my $git_dir = $ENV{GIT_DIR};
+my $new_commit_check = "$git_dir/info/new-commit-check";
+my $ref = $ARGV[0];
+my $old = $ARGV[1];
+my $new = $ARGV[2];
+my $new_type;
+my ($this_user) = getpwuid $<; # REAL_USER_ID
+my $repository_name;
+my %user_committer;
+my @allow_rules;
+my @path_rules;
+my %diff_cache;
+
+sub deny ($) {
+ print STDERR "-Deny- $_[0]\n" if $debug;
+ print STDERR "\ndenied: $_[0]\n\n";
+ exit 1;
+}
+
+sub grant ($) {
+ print STDERR "-Grant- $_[0]\n" if $debug;
+ exit 0;
+}
+
+sub info ($) {
+ print STDERR "-Info- $_[0]\n" if $debug;
+}
+
+sub git_value (@) {
+ open(T,'-|','git',@_); local $_ = <T>; chop; close T; $_;
+}
+
+sub match_string ($$) {
+ my ($acl_n, $ref) = @_;
+ ($acl_n eq $ref)
+ || ($acl_n =~ m,/$, && substr($ref,0,length $acl_n) eq $acl_n)
+ || ($acl_n =~ m,^\^, && $ref =~ m:$acl_n:);
+}
+
+sub parse_config ($$$$) {
+ my $data = shift;
+ local $ENV{GIT_DIR} = shift;
+ my $br = shift;
+ my $fn = shift;
+ return unless git_value('rev-list','--max-count=1',$br,'--',$fn);
+ info "Loading $br:$fn";
+ open(I,'-|','git','cat-file','blob',"$br:$fn");
+ my $section = '';
+ while (<I>) {
+ chomp;
+ if (/^\s*$/ || /^\s*#/) {
+ } elsif (/^\[([a-z]+)\]$/i) {
+ $section = lc $1;
+ } elsif (/^\[([a-z]+)\s+"(.*)"\]$/i) {
+ $section = join('.',lc $1,$2);
+ } elsif (/^\s*([a-z][a-z0-9]+)\s*=\s*(.*?)\s*$/i) {
+ push @{$data->{join('.',$section,lc $1)}}, $2;
+ } else {
+ deny "bad config file line $. in $br:$fn";
+ }
+ }
+ close I;
+}
+
+sub all_new_committers () {
+ local $ENV{GIT_DIR} = $git_dir;
+ $ENV{GIT_DIR} = $new_commit_check if -d $new_commit_check;
+
+ info "Getting committers of new commits.";
+ my %used;
+ open(T,'-|','git','rev-list','--pretty=raw',$new,'--not','--all');
+ while (<T>) {
+ next unless s/^committer //;
+ chop;
+ s/>.*$/>/;
+ info "Found $_." unless $used{$_}++;
+ }
+ close T;
+ info "No new commits." unless %used;
+ keys %used;
+}
+
+sub all_new_taggers () {
+ my %exists;
+ open(T,'-|','git','for-each-ref','--format=%(objectname)','refs/tags');
+ while (<T>) {
+ chop;
+ $exists{$_} = 1;
+ }
+ close T;
+
+ info "Getting taggers of new tags.";
+ my %used;
+ my $obj = $new;
+ my $obj_type = $new_type;
+ while ($obj_type eq 'tag') {
+ last if $exists{$obj};
+ $obj_type = '';
+ open(T,'-|','git','cat-file','tag',$obj);
+ while (<T>) {
+ chop;
+ if (/^object ([a-z0-9]{40})$/) {
+ $obj = $1;
+ } elsif (/^type (.+)$/) {
+ $obj_type = $1;
+ } elsif (s/^tagger //) {
+ s/>.*$/>/;
+ info "Found $_." unless $used{$_}++;
+ last;
+ }
+ }
+ close T;
+ }
+ info "No new tags." unless %used;
+ keys %used;
+}
+
+sub check_committers (@) {
+ my @bad;
+ foreach (@_) { push @bad, $_ unless $user_committer{$_}; }
+ if (@bad) {
+ print STDERR "\n";
+ print STDERR "You are not $_.\n" foreach (sort @bad);
+ deny "You cannot push changes not committed by you.";
+ }
+}
+
+sub load_diff ($) {
+ my $base = shift;
+ my $d = $diff_cache{$base};
+ unless ($d) {
+ local $/ = "\0";
+ my %this_diff;
+ if ($base =~ /^0{40}$/) {
+ # Don't load the diff at all; we are making the
+ # branch and have no base to compare to in this
+ # case. A file level ACL makes no sense in this
+ # context. Having an empty diff will allow the
+ # branch creation.
+ #
+ } else {
+ open(T,'-|','git','diff-tree',
+ '-r','--name-status','-z',
+ $base,$new) or return undef;
+ while (<T>) {
+ my $op = $_;
+ chop $op;
+
+ my $path = <T>;
+ chop $path;
+
+ $this_diff{$path} = $op;
+ }
+ close T or return undef;
+ }
+ $d = \%this_diff;
+ $diff_cache{$base} = $d;
+ }
+ return $d;
+}
+
+deny "No GIT_DIR inherited from caller" unless $git_dir;
+deny "Need a ref name" unless $ref;
+deny "Refusing funny ref $ref" unless $ref =~ s,^refs/,,;
+deny "Bad old value $old" unless $old =~ /^[a-z0-9]{40}$/;
+deny "Bad new value $new" unless $new =~ /^[a-z0-9]{40}$/;
+deny "Cannot determine who you are." unless $this_user;
+grant "No change requested." if $old eq $new;
+
+$repository_name = File::Spec->rel2abs($git_dir);
+$repository_name =~ m,/([^/]+)(?:\.git|/\.git)$,;
+$repository_name = $1;
+info "Updating in '$repository_name'.";
+
+my $op;
+if ($old =~ /^0{40}$/) { $op = 'C'; }
+elsif ($new =~ /^0{40}$/) { $op = 'D'; }
+else { $op = 'R'; }
+
+# This is really an update (fast-forward) if the
+# merge base of $old and $new is $old.
+#
+$op = 'U' if ($op eq 'R'
+ && $ref =~ m,^heads/,
+ && $old eq git_value('merge-base',$old,$new));
+
+# Load the user's ACL file. Expand groups (user.memberof) one level.
+{
+ my %data = ('user.committer' => []);
+ parse_config(\%data,$acl_git,$acl_branch,"external/$repository_name.acl");
+
+ %data = (
+ 'user.committer' => $data{'user.committer'},
+ 'user.memberof' => [],
+ );
+ parse_config(\%data,$acl_git,$acl_branch,"users/$this_user.acl");
+
+ %user_committer = map {$_ => $_} @{$data{'user.committer'}};
+ my $rule_key = "repository.$repository_name.allow";
+ my $rules = $data{$rule_key} || [];
+
+ foreach my $group (@{$data{'user.memberof'}}) {
+ my %g;
+ parse_config(\%g,$acl_git,$acl_branch,"groups/$group.acl");
+ my $group_rules = $g{$rule_key};
+ push @$rules, @$group_rules if $group_rules;
+ }
+
+RULE:
+ foreach (@$rules) {
+ while (/\${user\.([a-z][a-zA-Z0-9]+)}/) {
+ my $k = lc $1;
+ my $v = $data{"user.$k"};
+ next RULE unless defined $v;
+ next RULE if @$v != 1;
+ next RULE unless defined $v->[0];
+ s/\${user\.$k}/$v->[0]/g;
+ }
+
+ if (/^([AMD ]+)\s+of\s+([^\s]+)\s+for\s+([^\s]+)\s+diff\s+([^\s]+)$/) {
+ my ($ops, $pth, $ref, $bst) = ($1, $2, $3, $4);
+ $ops =~ s/ //g;
+ $pth =~ s/\\\\/\\/g;
+ $ref =~ s/\\\\/\\/g;
+ push @path_rules, [$ops, $pth, $ref, $bst];
+ } elsif (/^([AMD ]+)\s+of\s+([^\s]+)\s+for\s+([^\s]+)$/) {
+ my ($ops, $pth, $ref) = ($1, $2, $3);
+ $ops =~ s/ //g;
+ $pth =~ s/\\\\/\\/g;
+ $ref =~ s/\\\\/\\/g;
+ push @path_rules, [$ops, $pth, $ref, $old];
+ } elsif (/^([CDRU ]+)\s+for\s+([^\s]+)$/) {
+ my $ops = $1;
+ my $ref = $2;
+ $ops =~ s/ //g;
+ $ref =~ s/\\\\/\\/g;
+ push @allow_rules, [$ops, $ref];
+ } elsif (/^for\s+([^\s]+)$/) {
+ # Mentioned, but nothing granted?
+ } elsif (/^[^\s]+$/) {
+ s/\\\\/\\/g;
+ push @allow_rules, ['U', $_];
+ }
+ }
+}
+
+if ($op ne 'D') {
+ $new_type = git_value('cat-file','-t',$new);
+
+ if ($ref =~ m,^heads/,) {
+ deny "$ref must be a commit." unless $new_type eq 'commit';
+ } elsif ($ref =~ m,^tags/,) {
+ deny "$ref must be an annotated tag." unless $new_type eq 'tag';
+ }
+
+ check_committers (all_new_committers);
+ check_committers (all_new_taggers) if $new_type eq 'tag';
+}
+
+info "$this_user wants $op for $ref";
+foreach my $acl_entry (@allow_rules) {
+ my ($acl_ops, $acl_n) = @$acl_entry;
+ next unless $acl_ops =~ /^[CDRU]+$/; # Uhh.... shouldn't happen.
+ next unless $acl_n;
+ next unless $op =~ /^[$acl_ops]$/;
+ next unless match_string $acl_n, $ref;
+
+ # Don't test path rules on branch deletes.
+ #
+ grant "Allowed by: $acl_ops for $acl_n" if $op eq 'D';
+
+ # Aggregate matching path rules; allow if there aren't
+ # any matching this ref.
+ #
+ my %pr;
+ foreach my $p_entry (@path_rules) {
+ my ($p_ops, $p_n, $p_ref, $p_bst) = @$p_entry;
+ next unless $p_ref;
+ push @{$pr{$p_bst}}, $p_entry if match_string $p_ref, $ref;
+ }
+ grant "Allowed by: $acl_ops for $acl_n" unless %pr;
+
+ # Allow only if all changes against a single base are
+ # allowed by file path rules.
+ #
+ my @bad;
+ foreach my $p_bst (keys %pr) {
+ my $diff_ref = load_diff $p_bst;
+ deny "Cannot difference trees." unless ref $diff_ref;
+
+ my %fd = %$diff_ref;
+ foreach my $p_entry (@{$pr{$p_bst}}) {
+ my ($p_ops, $p_n, $p_ref, $p_bst) = @$p_entry;
+ next unless $p_ops =~ /^[AMD]+$/;
+ next unless $p_n;
+
+ foreach my $f_n (keys %fd) {
+ my $f_op = $fd{$f_n};
+ next unless $f_op;
+ next unless $f_op =~ /^[$p_ops]$/;
+ delete $fd{$f_n} if match_string $p_n, $f_n;
+ }
+ last unless %fd;
+ }
+
+ if (%fd) {
+ push @bad, [$p_bst, \%fd];
+ } else {
+ # All changes relative to $p_bst were allowed.
+ #
+ grant "Allowed by: $acl_ops for $acl_n diff $p_bst";
+ }
+ }
+
+ foreach my $bad_ref (@bad) {
+ my ($p_bst, $fd) = @$bad_ref;
+ print STDERR "\n";
+ print STDERR "Not allowed to make the following changes:\n";
+ print STDERR "(base: $p_bst)\n";
+ foreach my $f_n (sort keys %$fd) {
+ print STDERR " $fd->{$f_n} $f_n\n";
+ }
+ }
+ deny "You are not permitted to $op $ref";
+}
+close A;
+deny "You are not permitted to $op $ref";
diff --git a/contrib/mw-to-git/.gitignore b/contrib/mw-to-git/.gitignore
new file mode 100644
index 0000000..b919655
--- /dev/null
+++ b/contrib/mw-to-git/.gitignore
@@ -0,0 +1 @@
+git-remote-mediawiki
diff --git a/contrib/mw-to-git/Makefile b/contrib/mw-to-git/Makefile
new file mode 100644
index 0000000..f149719
--- /dev/null
+++ b/contrib/mw-to-git/Makefile
@@ -0,0 +1,17 @@
+#
+# Copyright (C) 2013
+# Matthieu Moy <Matthieu.Moy@imag.fr>
+#
+## Build git-remote-mediawiki
+
+SCRIPT_PERL=git-remote-mediawiki.perl
+GIT_ROOT_DIR=../..
+HERE=contrib/mw-to-git/
+
+SCRIPT_PERL_FULL=$(patsubst %,$(HERE)/%,$(SCRIPT_PERL))
+
+all: build
+
+build install clean:
+ $(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL=$(SCRIPT_PERL_FULL) \
+ $@-perl-script
diff --git a/contrib/mw-to-git/git-remote-mediawiki.perl b/contrib/mw-to-git/git-remote-mediawiki.perl
new file mode 100755
index 0000000..7173872
--- /dev/null
+++ b/contrib/mw-to-git/git-remote-mediawiki.perl
@@ -0,0 +1,1322 @@
+#! /usr/bin/perl
+
+# Copyright (C) 2011
+# Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
+# Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
+# Claire Fousse <claire.fousse@ensimag.imag.fr>
+# David Amouyal <david.amouyal@ensimag.imag.fr>
+# Matthieu Moy <matthieu.moy@grenoble-inp.fr>
+# License: GPL v2 or later
+
+# Gateway between Git and MediaWiki.
+# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
+
+use strict;
+use MediaWiki::API;
+use Git;
+use DateTime::Format::ISO8601;
+
+# By default, use UTF-8 to communicate with Git and the user
+binmode STDERR, ":utf8";
+binmode STDOUT, ":utf8";
+
+use URI::Escape;
+use IPC::Open2;
+
+use warnings;
+
+# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
+use constant SLASH_REPLACEMENT => "%2F";
+
+# It's not always possible to delete pages (may require some
+# privileges). Deleted pages are replaced with this content.
+use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
+
+# It's not possible to create empty pages. New empty files in Git are
+# sent with this content instead.
+use constant EMPTY_CONTENT => "<!-- empty page -->\n";
+
+# used to reflect file creation or deletion in diff.
+use constant NULL_SHA1 => "0000000000000000000000000000000000000000";
+
+# Used on Git's side to reflect empty edit messages on the wiki
+use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
+
+my $remotename = $ARGV[0];
+my $url = $ARGV[1];
+
+# Accept both space-separated and multiple keys in config file.
+# Spaces should be written as _ anyway because we'll use chomp.
+my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".pages"));
+chomp(@tracked_pages);
+
+# Just like @tracked_pages, but for MediaWiki categories.
+my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories"));
+chomp(@tracked_categories);
+
+# Import media files on pull
+my $import_media = run_git("config --get --bool remote.". $remotename .".mediaimport");
+chomp($import_media);
+$import_media = ($import_media eq "true");
+
+# Export media files on push
+my $export_media = run_git("config --get --bool remote.". $remotename .".mediaexport");
+chomp($export_media);
+$export_media = !($export_media eq "false");
+
+my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin");
+# Note: mwPassword is discourraged. Use the credential system instead.
+my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword");
+my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain");
+chomp($wiki_login);
+chomp($wiki_passwd);
+chomp($wiki_domain);
+
+# Import only last revisions (both for clone and fetch)
+my $shallow_import = run_git("config --get --bool remote.". $remotename .".shallow");
+chomp($shallow_import);
+$shallow_import = ($shallow_import eq "true");
+
+# Fetch (clone and pull) by revisions instead of by pages. This behavior
+# is more efficient when we have a wiki with lots of pages and we fetch
+# the revisions quite often so that they concern only few pages.
+# Possible values:
+# - by_rev: perform one query per new revision on the remote wiki
+# - by_page: query each tracked page for new revision
+my $fetch_strategy = run_git("config --get remote.$remotename.fetchStrategy");
+unless ($fetch_strategy) {
+ $fetch_strategy = run_git("config --get mediawiki.fetchStrategy");
+}
+chomp($fetch_strategy);
+unless ($fetch_strategy) {
+ $fetch_strategy = "by_page";
+}
+
+# Dumb push: don't update notes and mediawiki ref to reflect the last push.
+#
+# Configurable with mediawiki.dumbPush, or per-remote with
+# remote.<remotename>.dumbPush.
+#
+# This means the user will have to re-import the just-pushed
+# revisions. On the other hand, this means that the Git revisions
+# corresponding to MediaWiki revisions are all imported from the wiki,
+# regardless of whether they were initially created in Git or from the
+# web interface, hence all users will get the same history (i.e. if
+# the push from Git to MediaWiki loses some information, everybody
+# will get the history with information lost). If the import is
+# deterministic, this means everybody gets the same sha1 for each
+# MediaWiki revision.
+my $dumb_push = run_git("config --get --bool remote.$remotename.dumbPush");
+unless ($dumb_push) {
+ $dumb_push = run_git("config --get --bool mediawiki.dumbPush");
+}
+chomp($dumb_push);
+$dumb_push = ($dumb_push eq "true");
+
+my $wiki_name = $url;
+$wiki_name =~ s/[^\/]*:\/\///;
+# If URL is like http://user:password@example.com/, we clearly don't
+# want the password in $wiki_name. While we're there, also remove user
+# and '@' sign, to avoid author like MWUser@HTTPUser@host.com
+$wiki_name =~ s/^.*@//;
+
+# Commands parser
+my $entry;
+my @cmd;
+while (<STDIN>) {
+ chomp;
+ @cmd = split(/ /);
+ if (defined($cmd[0])) {
+ # Line not blank
+ if ($cmd[0] eq "capabilities") {
+ die("Too many arguments for capabilities") unless (!defined($cmd[1]));
+ mw_capabilities();
+ } elsif ($cmd[0] eq "list") {
+ die("Too many arguments for list") unless (!defined($cmd[2]));
+ mw_list($cmd[1]);
+ } elsif ($cmd[0] eq "import") {
+ die("Invalid arguments for import") unless ($cmd[1] ne "" && !defined($cmd[2]));
+ mw_import($cmd[1]);
+ } elsif ($cmd[0] eq "option") {
+ die("Too many arguments for option") unless ($cmd[1] ne "" && $cmd[2] ne "" && !defined($cmd[3]));
+ mw_option($cmd[1],$cmd[2]);
+ } elsif ($cmd[0] eq "push") {
+ mw_push($cmd[1]);
+ } else {
+ print STDERR "Unknown command. Aborting...\n";
+ last;
+ }
+ } else {
+ # blank line: we should terminate
+ last;
+ }
+
+ BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
+ # command is fully processed.
+}
+
+########################## Functions ##############################
+
+# MediaWiki API instance, created lazily.
+my $mediawiki;
+
+sub mw_connect_maybe {
+ if ($mediawiki) {
+ return;
+ }
+ $mediawiki = MediaWiki::API->new;
+ $mediawiki->{config}->{api_url} = "$url/api.php";
+ if ($wiki_login) {
+ my %credential = (
+ 'url' => $url,
+ 'username' => $wiki_login,
+ 'password' => $wiki_passwd
+ );
+ Git::credential(\%credential);
+ my $request = {lgname => $credential{username},
+ lgpassword => $credential{password},
+ lgdomain => $wiki_domain};
+ if ($mediawiki->login($request)) {
+ Git::credential(\%credential, 'approve');
+ print STDERR "Logged in mediawiki user \"$credential{username}\".\n";
+ } else {
+ print STDERR "Failed to log in mediawiki user \"$credential{username}\" on $url\n";
+ print STDERR " (error " .
+ $mediawiki->{error}->{code} . ': ' .
+ $mediawiki->{error}->{details} . ")\n";
+ Git::credential(\%credential, 'reject');
+ exit 1;
+ }
+ }
+}
+
+sub fatal_mw_error {
+ my $action = shift;
+ print STDERR "fatal: could not $action.\n";
+ print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
+ if ($url =~ /^https/) {
+ print STDERR "fatal: make sure '$url/api.php' is a valid page\n";
+ print STDERR "fatal: and the SSL certificate is correct.\n";
+ } else {
+ print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+ }
+ print STDERR "fatal: (error " .
+ $mediawiki->{error}->{code} . ': ' .
+ $mediawiki->{error}->{details} . ")\n";
+ exit 1;
+}
+
+## Functions for listing pages on the remote wiki
+sub get_mw_tracked_pages {
+ my $pages = shift;
+ get_mw_page_list(\@tracked_pages, $pages);
+}
+
+sub get_mw_page_list {
+ my $page_list = shift;
+ my $pages = shift;
+ my @some_pages = @$page_list;
+ while (@some_pages) {
+ my $last = 50;
+ if ($#some_pages < $last) {
+ $last = $#some_pages;
+ }
+ my @slice = @some_pages[0..$last];
+ get_mw_first_pages(\@slice, $pages);
+ @some_pages = @some_pages[51..$#some_pages];
+ }
+}
+
+sub get_mw_tracked_categories {
+ my $pages = shift;
+ foreach my $category (@tracked_categories) {
+ if (index($category, ':') < 0) {
+ # Mediawiki requires the Category
+ # prefix, but let's not force the user
+ # to specify it.
+ $category = "Category:" . $category;
+ }
+ my $mw_pages = $mediawiki->list( {
+ action => 'query',
+ list => 'categorymembers',
+ cmtitle => $category,
+ cmlimit => 'max' } )
+ || die $mediawiki->{error}->{code} . ': '
+ . $mediawiki->{error}->{details};
+ foreach my $page (@{$mw_pages}) {
+ $pages->{$page->{title}} = $page;
+ }
+ }
+}
+
+sub get_mw_all_pages {
+ my $pages = shift;
+ # No user-provided list, get the list of pages from the API.
+ my $mw_pages = $mediawiki->list({
+ action => 'query',
+ list => 'allpages',
+ aplimit => 'max'
+ });
+ if (!defined($mw_pages)) {
+ fatal_mw_error("get the list of wiki pages");
+ }
+ foreach my $page (@{$mw_pages}) {
+ $pages->{$page->{title}} = $page;
+ }
+}
+
+# queries the wiki for a set of pages. Meant to be used within a loop
+# querying the wiki for slices of page list.
+sub get_mw_first_pages {
+ my $some_pages = shift;
+ my @some_pages = @{$some_pages};
+
+ my $pages = shift;
+
+ # pattern 'page1|page2|...' required by the API
+ my $titles = join('|', @some_pages);
+
+ my $mw_pages = $mediawiki->api({
+ action => 'query',
+ titles => $titles,
+ });
+ if (!defined($mw_pages)) {
+ fatal_mw_error("query the list of wiki pages");
+ }
+ while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
+ if ($id < 0) {
+ print STDERR "Warning: page $page->{title} not found on wiki\n";
+ } else {
+ $pages->{$page->{title}} = $page;
+ }
+ }
+}
+
+# Get the list of pages to be fetched according to configuration.
+sub get_mw_pages {
+ mw_connect_maybe();
+
+ print STDERR "Listing pages on remote wiki...\n";
+
+ my %pages; # hash on page titles to avoid duplicates
+ my $user_defined;
+ if (@tracked_pages) {
+ $user_defined = 1;
+ # The user provided a list of pages titles, but we
+ # still need to query the API to get the page IDs.
+ get_mw_tracked_pages(\%pages);
+ }
+ if (@tracked_categories) {
+ $user_defined = 1;
+ get_mw_tracked_categories(\%pages);
+ }
+ if (!$user_defined) {
+ get_mw_all_pages(\%pages);
+ }
+ if ($import_media) {
+ print STDERR "Getting media files for selected pages...\n";
+ if ($user_defined) {
+ get_linked_mediafiles(\%pages);
+ } else {
+ get_all_mediafiles(\%pages);
+ }
+ }
+ print STDERR (scalar keys %pages) . " pages found.\n";
+ return %pages;
+}
+
+# usage: $out = run_git("command args");
+# $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
+sub run_git {
+ my $args = shift;
+ my $encoding = (shift || "encoding(UTF-8)");
+ open(my $git, "-|:$encoding", "git " . $args);
+ my $res = do { local $/; <$git> };
+ close($git);
+
+ return $res;
+}
+
+
+sub get_all_mediafiles {
+ my $pages = shift;
+ # Attach list of all pages for media files from the API,
+ # they are in a different namespace, only one namespace
+ # can be queried at the same moment
+ my $mw_pages = $mediawiki->list({
+ action => 'query',
+ list => 'allpages',
+ apnamespace => get_mw_namespace_id("File"),
+ aplimit => 'max'
+ });
+ if (!defined($mw_pages)) {
+ print STDERR "fatal: could not get the list of pages for media files.\n";
+ print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
+ print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+ exit 1;
+ }
+ foreach my $page (@{$mw_pages}) {
+ $pages->{$page->{title}} = $page;
+ }
+}
+
+sub get_linked_mediafiles {
+ my $pages = shift;
+ my @titles = map $_->{title}, values(%{$pages});
+
+ # The query is split in small batches because of the MW API limit of
+ # the number of links to be returned (500 links max).
+ my $batch = 10;
+ while (@titles) {
+ if ($#titles < $batch) {
+ $batch = $#titles;
+ }
+ my @slice = @titles[0..$batch];
+
+ # pattern 'page1|page2|...' required by the API
+ my $mw_titles = join('|', @slice);
+
+ # Media files could be included or linked from
+ # a page, get all related
+ my $query = {
+ action => 'query',
+ prop => 'links|images',
+ titles => $mw_titles,
+ plnamespace => get_mw_namespace_id("File"),
+ pllimit => 'max'
+ };
+ my $result = $mediawiki->api($query);
+
+ while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
+ my @media_titles;
+ if (defined($page->{links})) {
+ my @link_titles = map $_->{title}, @{$page->{links}};
+ push(@media_titles, @link_titles);
+ }
+ if (defined($page->{images})) {
+ my @image_titles = map $_->{title}, @{$page->{images}};
+ push(@media_titles, @image_titles);
+ }
+ if (@media_titles) {
+ get_mw_page_list(\@media_titles, $pages);
+ }
+ }
+
+ @titles = @titles[($batch+1)..$#titles];
+ }
+}
+
+sub get_mw_mediafile_for_page_revision {
+ # Name of the file on Wiki, with the prefix.
+ my $filename = shift;
+ my $timestamp = shift;
+ my %mediafile;
+
+ # Search if on a media file with given timestamp exists on
+ # MediaWiki. In that case download the file.
+ my $query = {
+ action => 'query',
+ prop => 'imageinfo',
+ titles => "File:" . $filename,
+ iistart => $timestamp,
+ iiend => $timestamp,
+ iiprop => 'timestamp|archivename|url',
+ iilimit => 1
+ };
+ my $result = $mediawiki->api($query);
+
+ my ($fileid, $file) = each( %{$result->{query}->{pages}} );
+ # If not defined it means there is no revision of the file for
+ # given timestamp.
+ if (defined($file->{imageinfo})) {
+ $mediafile{title} = $filename;
+
+ my $fileinfo = pop(@{$file->{imageinfo}});
+ $mediafile{timestamp} = $fileinfo->{timestamp};
+ # Mediawiki::API's download function doesn't support https URLs
+ # and can't download old versions of files.
+ print STDERR "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
+ $mediafile{content} = download_mw_mediafile($fileinfo->{url});
+ }
+ return %mediafile;
+}
+
+sub download_mw_mediafile {
+ my $url = shift;
+
+ my $response = $mediawiki->{ua}->get($url);
+ if ($response->code == 200) {
+ return $response->decoded_content;
+ } else {
+ print STDERR "Error downloading mediafile from :\n";
+ print STDERR "URL: $url\n";
+ print STDERR "Server response: " . $response->code . " " . $response->message . "\n";
+ exit 1;
+ }
+}
+
+sub get_last_local_revision {
+ # Get note regarding last mediawiki revision
+ my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null");
+ my @note_info = split(/ /, $note);
+
+ my $lastrevision_number;
+ if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) {
+ print STDERR "No previous mediawiki revision found";
+ $lastrevision_number = 0;
+ } else {
+ # Notes are formatted : mediawiki_revision: #number
+ $lastrevision_number = $note_info[1];
+ chomp($lastrevision_number);
+ print STDERR "Last local mediawiki revision found is $lastrevision_number";
+ }
+ return $lastrevision_number;
+}
+
+# Remember the timestamp corresponding to a revision id.
+my %basetimestamps;
+
+# Get the last remote revision without taking in account which pages are
+# tracked or not. This function makes a single request to the wiki thus
+# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
+# option.
+sub get_last_global_remote_rev {
+ mw_connect_maybe();
+
+ my $query = {
+ action => 'query',
+ list => 'recentchanges',
+ prop => 'revisions',
+ rclimit => '1',
+ rcdir => 'older',
+ };
+ my $result = $mediawiki->api($query);
+ return $result->{query}->{recentchanges}[0]->{revid};
+}
+
+# Get the last remote revision concerning the tracked pages and the tracked
+# categories.
+sub get_last_remote_revision {
+ mw_connect_maybe();
+
+ my %pages_hash = get_mw_pages();
+ my @pages = values(%pages_hash);
+
+ my $max_rev_num = 0;
+
+ print STDERR "Getting last revision id on tracked pages...\n";
+
+ foreach my $page (@pages) {
+ my $id = $page->{pageid};
+
+ my $query = {
+ action => 'query',
+ prop => 'revisions',
+ rvprop => 'ids|timestamp',
+ pageids => $id,
+ };
+
+ my $result = $mediawiki->api($query);
+
+ my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
+
+ $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
+
+ $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
+ }
+
+ print STDERR "Last remote revision found is $max_rev_num.\n";
+ return $max_rev_num;
+}
+
+# Clean content before sending it to MediaWiki
+sub mediawiki_clean {
+ my $string = shift;
+ my $page_created = shift;
+ # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
+ # This function right trims a string and adds a \n at the end to follow this rule
+ $string =~ s/\s+$//;
+ if ($string eq "" && $page_created) {
+ # Creating empty pages is forbidden.
+ $string = EMPTY_CONTENT;
+ }
+ return $string."\n";
+}
+
+# Filter applied on MediaWiki data before adding them to Git
+sub mediawiki_smudge {
+ my $string = shift;
+ if ($string eq EMPTY_CONTENT) {
+ $string = "";
+ }
+ # This \n is important. This is due to mediawiki's way to handle end of files.
+ return $string."\n";
+}
+
+sub mediawiki_clean_filename {
+ my $filename = shift;
+ $filename =~ s/@{[SLASH_REPLACEMENT]}/\//g;
+ # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
+ # Do a variant of URL-encoding, i.e. looks like URL-encoding,
+ # but with _ added to prevent MediaWiki from thinking this is
+ # an actual special character.
+ $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
+ # If we use the uri escape before
+ # we should unescape here, before anything
+
+ return $filename;
+}
+
+sub mediawiki_smudge_filename {
+ my $filename = shift;
+ $filename =~ s/\//@{[SLASH_REPLACEMENT]}/g;
+ $filename =~ s/ /_/g;
+ # Decode forbidden characters encoded in mediawiki_clean_filename
+ $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge;
+ return $filename;
+}
+
+sub literal_data {
+ my ($content) = @_;
+ print STDOUT "data ", bytes::length($content), "\n", $content;
+}
+
+sub literal_data_raw {
+ # Output possibly binary content.
+ my ($content) = @_;
+ # Avoid confusion between size in bytes and in characters
+ utf8::downgrade($content);
+ binmode STDOUT, ":raw";
+ print STDOUT "data ", bytes::length($content), "\n", $content;
+ binmode STDOUT, ":utf8";
+}
+
+sub mw_capabilities {
+ # Revisions are imported to the private namespace
+ # refs/mediawiki/$remotename/ by the helper and fetched into
+ # refs/remotes/$remotename later by fetch.
+ print STDOUT "refspec refs/heads/*:refs/mediawiki/$remotename/*\n";
+ print STDOUT "import\n";
+ print STDOUT "list\n";
+ print STDOUT "push\n";
+ print STDOUT "\n";
+}
+
+sub mw_list {
+ # MediaWiki do not have branches, we consider one branch arbitrarily
+ # called master, and HEAD pointing to it.
+ print STDOUT "? refs/heads/master\n";
+ print STDOUT "\@refs/heads/master HEAD\n";
+ print STDOUT "\n";
+}
+
+sub mw_option {
+ print STDERR "remote-helper command 'option $_[0]' not yet implemented\n";
+ print STDOUT "unsupported\n";
+}
+
+sub fetch_mw_revisions_for_page {
+ my $page = shift;
+ my $id = shift;
+ my $fetch_from = shift;
+ my @page_revs = ();
+ my $query = {
+ action => 'query',
+ prop => 'revisions',
+ rvprop => 'ids',
+ rvdir => 'newer',
+ rvstartid => $fetch_from,
+ rvlimit => 500,
+ pageids => $id,
+ };
+
+ my $revnum = 0;
+ # Get 500 revisions at a time due to the mediawiki api limit
+ while (1) {
+ my $result = $mediawiki->api($query);
+
+ # Parse each of those 500 revisions
+ foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
+ my $page_rev_ids;
+ $page_rev_ids->{pageid} = $page->{pageid};
+ $page_rev_ids->{revid} = $revision->{revid};
+ push(@page_revs, $page_rev_ids);
+ $revnum++;
+ }
+ last unless $result->{'query-continue'};
+ $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
+ }
+ if ($shallow_import && @page_revs) {
+ print STDERR " Found 1 revision (shallow import).\n";
+ @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
+ return $page_revs[0];
+ }
+ print STDERR " Found ", $revnum, " revision(s).\n";
+ return @page_revs;
+}
+
+sub fetch_mw_revisions {
+ my $pages = shift; my @pages = @{$pages};
+ my $fetch_from = shift;
+
+ my @revisions = ();
+ my $n = 1;
+ foreach my $page (@pages) {
+ my $id = $page->{pageid};
+
+ print STDERR "page $n/", scalar(@pages), ": ". $page->{title} ."\n";
+ $n++;
+ my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
+ @revisions = (@page_revs, @revisions);
+ }
+
+ return ($n, @revisions);
+}
+
+sub fe_escape_path {
+ my $path = shift;
+ $path =~ s/\\/\\\\/g;
+ $path =~ s/"/\\"/g;
+ $path =~ s/\n/\\n/g;
+ return '"' . $path . '"';
+}
+
+sub import_file_revision {
+ my $commit = shift;
+ my %commit = %{$commit};
+ my $full_import = shift;
+ my $n = shift;
+ my $mediafile = shift;
+ my %mediafile;
+ if ($mediafile) {
+ %mediafile = %{$mediafile};
+ }
+
+ my $title = $commit{title};
+ my $comment = $commit{comment};
+ my $content = $commit{content};
+ my $author = $commit{author};
+ my $date = $commit{date};
+
+ print STDOUT "commit refs/mediawiki/$remotename/master\n";
+ print STDOUT "mark :$n\n";
+ print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
+ literal_data($comment);
+
+ # If it's not a clone, we need to know where to start from
+ if (!$full_import && $n == 1) {
+ print STDOUT "from refs/mediawiki/$remotename/master^0\n";
+ }
+ if ($content ne DELETED_CONTENT) {
+ print STDOUT "M 644 inline " .
+ fe_escape_path($title . ".mw") . "\n";
+ literal_data($content);
+ if (%mediafile) {
+ print STDOUT "M 644 inline "
+ . fe_escape_path($mediafile{title}) . "\n";
+ literal_data_raw($mediafile{content});
+ }
+ print STDOUT "\n\n";
+ } else {
+ print STDOUT "D " . fe_escape_path($title . ".mw") . "\n";
+ }
+
+ # mediawiki revision number in the git note
+ if ($full_import && $n == 1) {
+ print STDOUT "reset refs/notes/$remotename/mediawiki\n";
+ }
+ print STDOUT "commit refs/notes/$remotename/mediawiki\n";
+ print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
+ literal_data("Note added by git-mediawiki during import");
+ if (!$full_import && $n == 1) {
+ print STDOUT "from refs/notes/$remotename/mediawiki^0\n";
+ }
+ print STDOUT "N inline :$n\n";
+ literal_data("mediawiki_revision: " . $commit{mw_revision});
+ print STDOUT "\n\n";
+}
+
+# parse a sequence of
+# <cmd> <arg1>
+# <cmd> <arg2>
+# \n
+# (like batch sequence of import and sequence of push statements)
+sub get_more_refs {
+ my $cmd = shift;
+ my @refs;
+ while (1) {
+ my $line = <STDIN>;
+ if ($line =~ m/^$cmd (.*)$/) {
+ push(@refs, $1);
+ } elsif ($line eq "\n") {
+ return @refs;
+ } else {
+ die("Invalid command in a '$cmd' batch: ". $_);
+ }
+ }
+}
+
+sub mw_import {
+ # multiple import commands can follow each other.
+ my @refs = (shift, get_more_refs("import"));
+ foreach my $ref (@refs) {
+ mw_import_ref($ref);
+ }
+ print STDOUT "done\n";
+}
+
+sub mw_import_ref {
+ my $ref = shift;
+ # The remote helper will call "import HEAD" and
+ # "import refs/heads/master".
+ # Since HEAD is a symbolic ref to master (by convention,
+ # followed by the output of the command "list" that we gave),
+ # we don't need to do anything in this case.
+ if ($ref eq "HEAD") {
+ return;
+ }
+
+ mw_connect_maybe();
+
+ print STDERR "Searching revisions...\n";
+ my $last_local = get_last_local_revision();
+ my $fetch_from = $last_local + 1;
+ if ($fetch_from == 1) {
+ print STDERR ", fetching from beginning.\n";
+ } else {
+ print STDERR ", fetching from here.\n";
+ }
+
+ my $n = 0;
+ if ($fetch_strategy eq "by_rev") {
+ print STDERR "Fetching & writing export data by revs...\n";
+ $n = mw_import_ref_by_revs($fetch_from);
+ } elsif ($fetch_strategy eq "by_page") {
+ print STDERR "Fetching & writing export data by pages...\n";
+ $n = mw_import_ref_by_pages($fetch_from);
+ } else {
+ print STDERR "fatal: invalid fetch strategy \"$fetch_strategy\".\n";
+ print STDERR "Check your configuration variables remote.$remotename.fetchStrategy and mediawiki.fetchStrategy\n";
+ exit 1;
+ }
+
+ if ($fetch_from == 1 && $n == 0) {
+ print STDERR "You appear to have cloned an empty MediaWiki.\n";
+ # Something has to be done remote-helper side. If nothing is done, an error is
+ # thrown saying that HEAD is referring to unknown object 0000000000000000000
+ # and the clone fails.
+ }
+}
+
+sub mw_import_ref_by_pages {
+
+ my $fetch_from = shift;
+ my %pages_hash = get_mw_pages();
+ my @pages = values(%pages_hash);
+
+ my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
+
+ @revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
+ my @revision_ids = map $_->{revid}, @revisions;
+
+ return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
+}
+
+sub mw_import_ref_by_revs {
+
+ my $fetch_from = shift;
+ my %pages_hash = get_mw_pages();
+
+ my $last_remote = get_last_global_remote_rev();
+ my @revision_ids = $fetch_from..$last_remote;
+ return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
+}
+
+# Import revisions given in second argument (array of integers).
+# Only pages appearing in the third argument (hash indexed by page titles)
+# will be imported.
+sub mw_import_revids {
+ my $fetch_from = shift;
+ my $revision_ids = shift;
+ my $pages = shift;
+
+ my $n = 0;
+ my $n_actual = 0;
+ my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
+
+ foreach my $pagerevid (@$revision_ids) {
+ # Count page even if we skip it, since we display
+ # $n/$total and $total includes skipped pages.
+ $n++;
+
+ # fetch the content of the pages
+ my $query = {
+ action => 'query',
+ prop => 'revisions',
+ rvprop => 'content|timestamp|comment|user|ids',
+ revids => $pagerevid,
+ };
+
+ my $result = $mediawiki->api($query);
+
+ if (!$result) {
+ die "Failed to retrieve modified page for revision $pagerevid";
+ }
+
+ if (defined($result->{query}->{badrevids}->{$pagerevid})) {
+ # The revision id does not exist on the remote wiki.
+ next;
+ }
+
+ if (!defined($result->{query}->{pages})) {
+ die "Invalid revision $pagerevid.";
+ }
+
+ my @result_pages = values(%{$result->{query}->{pages}});
+ my $result_page = $result_pages[0];
+ my $rev = $result_pages[0]->{revisions}->[0];
+
+ my $page_title = $result_page->{title};
+
+ if (!exists($pages->{$page_title})) {
+ print STDERR "$n/", scalar(@$revision_ids),
+ ": Skipping revision #$rev->{revid} of $page_title\n";
+ next;
+ }
+
+ $n_actual++;
+
+ my %commit;
+ $commit{author} = $rev->{user} || 'Anonymous';
+ $commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
+ $commit{title} = mediawiki_smudge_filename($page_title);
+ $commit{mw_revision} = $rev->{revid};
+ $commit{content} = mediawiki_smudge($rev->{'*'});
+
+ if (!defined($rev->{timestamp})) {
+ $last_timestamp++;
+ } else {
+ $last_timestamp = $rev->{timestamp};
+ }
+ $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
+
+ # Differentiates classic pages and media files.
+ my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
+ my %mediafile;
+ if ($namespace) {
+ my $id = get_mw_namespace_id($namespace);
+ if ($id && $id == get_mw_namespace_id("File")) {
+ %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
+ }
+ }
+ # If this is a revision of the media page for new version
+ # of a file do one common commit for both file and media page.
+ # Else do commit only for that page.
+ print STDERR "$n/", scalar(@$revision_ids), ": Revision #$rev->{revid} of $commit{title}\n";
+ import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
+ }
+
+ return $n_actual;
+}
+
+sub error_non_fast_forward {
+ my $advice = run_git("config --bool advice.pushNonFastForward");
+ chomp($advice);
+ if ($advice ne "false") {
+ # Native git-push would show this after the summary.
+ # We can't ask it to display it cleanly, so print it
+ # ourselves before.
+ print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n";
+ print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
+ print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n";
+ }
+ print STDOUT "error $_[0] \"non-fast-forward\"\n";
+ return 0;
+}
+
+sub mw_upload_file {
+ my $complete_file_name = shift;
+ my $new_sha1 = shift;
+ my $extension = shift;
+ my $file_deleted = shift;
+ my $summary = shift;
+ my $newrevid;
+ my $path = "File:" . $complete_file_name;
+ my %hashFiles = get_allowed_file_extensions();
+ if (!exists($hashFiles{$extension})) {
+ print STDERR "$complete_file_name is not a permitted file on this wiki.\n";
+ print STDERR "Check the configuration of file uploads in your mediawiki.\n";
+ return $newrevid;
+ }
+ # Deleting and uploading a file requires a priviledged user
+ if ($file_deleted) {
+ mw_connect_maybe();
+ my $query = {
+ action => 'delete',
+ title => $path,
+ reason => $summary
+ };
+ if (!$mediawiki->edit($query)) {
+ print STDERR "Failed to delete file on remote wiki\n";
+ print STDERR "Check your permissions on the remote site. Error code:\n";
+ print STDERR $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
+ exit 1;
+ }
+ } else {
+ # Don't let perl try to interpret file content as UTF-8 => use "raw"
+ my $content = run_git("cat-file blob $new_sha1", "raw");
+ if ($content ne "") {
+ mw_connect_maybe();
+ $mediawiki->{config}->{upload_url} =
+ "$url/index.php/Special:Upload";
+ $mediawiki->edit({
+ action => 'upload',
+ filename => $complete_file_name,
+ comment => $summary,
+ file => [undef,
+ $complete_file_name,
+ Content => $content],
+ ignorewarnings => 1,
+ }, {
+ skip_encoding => 1
+ } ) || die $mediawiki->{error}->{code} . ':'
+ . $mediawiki->{error}->{details};
+ my $last_file_page = $mediawiki->get_page({title => $path});
+ $newrevid = $last_file_page->{revid};
+ print STDERR "Pushed file: $new_sha1 - $complete_file_name.\n";
+ } else {
+ print STDERR "Empty file $complete_file_name not pushed.\n";
+ }
+ }
+ return $newrevid;
+}
+
+sub mw_push_file {
+ my $diff_info = shift;
+ # $diff_info contains a string in this format:
+ # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
+ my @diff_info_split = split(/[ \t]/, $diff_info);
+
+ # Filename, including .mw extension
+ my $complete_file_name = shift;
+ # Commit message
+ my $summary = shift;
+ # MediaWiki revision number. Keep the previous one by default,
+ # in case there's no edit to perform.
+ my $oldrevid = shift;
+ my $newrevid;
+
+ if ($summary eq EMPTY_MESSAGE) {
+ $summary = '';
+ }
+
+ my $new_sha1 = $diff_info_split[3];
+ my $old_sha1 = $diff_info_split[2];
+ my $page_created = ($old_sha1 eq NULL_SHA1);
+ my $page_deleted = ($new_sha1 eq NULL_SHA1);
+ $complete_file_name = mediawiki_clean_filename($complete_file_name);
+
+ my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
+ if (!defined($extension)) {
+ $extension = "";
+ }
+ if ($extension eq "mw") {
+ my $ns = get_mw_namespace_id_for_page($complete_file_name);
+ if ($ns && $ns == get_mw_namespace_id("File") && (!$export_media)) {
+ print STDERR "Ignoring media file related page: $complete_file_name\n";
+ return ($oldrevid, "ok");
+ }
+ my $file_content;
+ if ($page_deleted) {
+ # Deleting a page usually requires
+ # special privileges. A common
+ # convention is to replace the page
+ # with this content instead:
+ $file_content = DELETED_CONTENT;
+ } else {
+ $file_content = run_git("cat-file blob $new_sha1");
+ }
+
+ mw_connect_maybe();
+
+ my $result = $mediawiki->edit( {
+ action => 'edit',
+ summary => $summary,
+ title => $title,
+ basetimestamp => $basetimestamps{$oldrevid},
+ text => mediawiki_clean($file_content, $page_created),
+ }, {
+ skip_encoding => 1 # Helps with names with accentuated characters
+ });
+ if (!$result) {
+ if ($mediawiki->{error}->{code} == 3) {
+ # edit conflicts, considered as non-fast-forward
+ print STDERR 'Warning: Error ' .
+ $mediawiki->{error}->{code} .
+ ' from mediwiki: ' . $mediawiki->{error}->{details} .
+ ".\n";
+ return ($oldrevid, "non-fast-forward");
+ } else {
+ # Other errors. Shouldn't happen => just die()
+ die 'Fatal: Error ' .
+ $mediawiki->{error}->{code} .
+ ' from mediwiki: ' . $mediawiki->{error}->{details};
+ }
+ }
+ $newrevid = $result->{edit}->{newrevid};
+ print STDERR "Pushed file: $new_sha1 - $title\n";
+ } elsif ($export_media) {
+ $newrevid = mw_upload_file($complete_file_name, $new_sha1,
+ $extension, $page_deleted,
+ $summary);
+ } else {
+ print STDERR "Ignoring media file $title\n";
+ }
+ $newrevid = ($newrevid or $oldrevid);
+ return ($newrevid, "ok");
+}
+
+sub mw_push {
+ # multiple push statements can follow each other
+ my @refsspecs = (shift, get_more_refs("push"));
+ my $pushed;
+ for my $refspec (@refsspecs) {
+ my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
+ or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>");
+ if ($force) {
+ print STDERR "Warning: forced push not allowed on a MediaWiki.\n";
+ }
+ if ($local eq "") {
+ print STDERR "Cannot delete remote branch on a MediaWiki\n";
+ print STDOUT "error $remote cannot delete\n";
+ next;
+ }
+ if ($remote ne "refs/heads/master") {
+ print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n";
+ print STDOUT "error $remote only master allowed\n";
+ next;
+ }
+ if (mw_push_revision($local, $remote)) {
+ $pushed = 1;
+ }
+ }
+
+ # Notify Git that the push is done
+ print STDOUT "\n";
+
+ if ($pushed && $dumb_push) {
+ print STDERR "Just pushed some revisions to MediaWiki.\n";
+ print STDERR "The pushed revisions now have to be re-imported, and your current branch\n";
+ print STDERR "needs to be updated with these re-imported commits. You can do this with\n";
+ print STDERR "\n";
+ print STDERR " git pull --rebase\n";
+ print STDERR "\n";
+ }
+}
+
+sub mw_push_revision {
+ my $local = shift;
+ my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
+ my $last_local_revid = get_last_local_revision();
+ print STDERR ".\n"; # Finish sentence started by get_last_local_revision()
+ my $last_remote_revid = get_last_remote_revision();
+ my $mw_revision = $last_remote_revid;
+
+ # Get sha1 of commit pointed by local HEAD
+ my $HEAD_sha1 = run_git("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1);
+ # Get sha1 of commit pointed by remotes/$remotename/master
+ my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/$remotename/master 2>/dev/null");
+ chomp($remoteorigin_sha1);
+
+ if ($last_local_revid > 0 &&
+ $last_local_revid < $last_remote_revid) {
+ return error_non_fast_forward($remote);
+ }
+
+ if ($HEAD_sha1 eq $remoteorigin_sha1) {
+ # nothing to push
+ return 0;
+ }
+
+ # Get every commit in between HEAD and refs/remotes/origin/master,
+ # including HEAD and refs/remotes/origin/master
+ my @commit_pairs = ();
+ if ($last_local_revid > 0) {
+ my $parsed_sha1 = $remoteorigin_sha1;
+ # Find a path from last MediaWiki commit to pushed commit
+ print STDERR "Computing path from local to remote ...\n";
+ my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents $local ^$parsed_sha1"));
+ my %local_ancestry;
+ foreach my $line (@local_ancestry) {
+ if (my ($child, $parents) = $line =~ m/^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
+ foreach my $parent (split(' ', $parents)) {
+ $local_ancestry{$parent} = $child;
+ }
+ } elsif (!$line =~ m/^([a-f0-9]+)/) {
+ die "Unexpected output from git rev-list: $line";
+ }
+ }
+ while ($parsed_sha1 ne $HEAD_sha1) {
+ my $child = $local_ancestry{$parsed_sha1};
+ if (!$child) {
+ printf STDERR "Cannot find a path in history from remote commit to last commit\n";
+ return error_non_fast_forward($remote);
+ }
+ push(@commit_pairs, [$parsed_sha1, $child]);
+ $parsed_sha1 = $child;
+ }
+ } else {
+ # No remote mediawiki revision. Export the whole
+ # history (linearized with --first-parent)
+ print STDERR "Warning: no common ancestor, pushing complete history\n";
+ my $history = run_git("rev-list --first-parent --children $local");
+ my @history = split('\n', $history);
+ @history = @history[1..$#history];
+ foreach my $line (reverse @history) {
+ my @commit_info_split = split(/ |\n/, $line);
+ push(@commit_pairs, \@commit_info_split);
+ }
+ }
+
+ foreach my $commit_info_split (@commit_pairs) {
+ my $sha1_child = @{$commit_info_split}[0];
+ my $sha1_commit = @{$commit_info_split}[1];
+ my $diff_infos = run_git("diff-tree -r --raw -z $sha1_child $sha1_commit");
+ # TODO: we could detect rename, and encode them with a #redirect on the wiki.
+ # TODO: for now, it's just a delete+add
+ my @diff_info_list = split(/\0/, $diff_infos);
+ # Keep the subject line of the commit message as mediawiki comment for the revision
+ my $commit_msg = run_git("log --no-walk --format=\"%s\" $sha1_commit");
+ chomp($commit_msg);
+ # Push every blob
+ while (@diff_info_list) {
+ my $status;
+ # git diff-tree -z gives an output like
+ # <metadata>\0<filename1>\0
+ # <metadata>\0<filename2>\0
+ # and we've split on \0.
+ my $info = shift(@diff_info_list);
+ my $file = shift(@diff_info_list);
+ ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
+ if ($status eq "non-fast-forward") {
+ # we may already have sent part of the
+ # commit to MediaWiki, but it's too
+ # late to cancel it. Stop the push in
+ # the middle, but still give an
+ # accurate error message.
+ return error_non_fast_forward($remote);
+ }
+ if ($status ne "ok") {
+ die("Unknown error from mw_push_file()");
+ }
+ }
+ unless ($dumb_push) {
+ run_git("notes --ref=$remotename/mediawiki add -f -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
+ run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child");
+ }
+ }
+
+ print STDOUT "ok $remote\n";
+ return 1;
+}
+
+sub get_allowed_file_extensions {
+ mw_connect_maybe();
+
+ my $query = {
+ action => 'query',
+ meta => 'siteinfo',
+ siprop => 'fileextensions'
+ };
+ my $result = $mediawiki->api($query);
+ my @file_extensions= map $_->{ext},@{$result->{query}->{fileextensions}};
+ my %hashFile = map {$_ => 1}@file_extensions;
+
+ return %hashFile;
+}
+
+# In memory cache for MediaWiki namespace ids.
+my %namespace_id;
+
+# Namespaces whose id is cached in the configuration file
+# (to avoid duplicates)
+my %cached_mw_namespace_id;
+
+# Return MediaWiki id for a canonical namespace name.
+# Ex.: "File", "Project".
+sub get_mw_namespace_id {
+ mw_connect_maybe();
+ my $name = shift;
+
+ if (!exists $namespace_id{$name}) {
+ # Look at configuration file, if the record for that namespace is
+ # already cached. Namespaces are stored in form:
+ # "Name_of_namespace:Id_namespace", ex.: "File:6".
+ my @temp = split(/[\n]/, run_git("config --get-all remote."
+ . $remotename .".namespaceCache"));
+ chomp(@temp);
+ foreach my $ns (@temp) {
+ my ($n, $id) = split(/:/, $ns);
+ if ($id eq 'notANameSpace') {
+ $namespace_id{$n} = {is_namespace => 0};
+ } else {
+ $namespace_id{$n} = {is_namespace => 1, id => $id};
+ }
+ $cached_mw_namespace_id{$n} = 1;
+ }
+ }
+
+ if (!exists $namespace_id{$name}) {
+ print STDERR "Namespace $name not found in cache, querying the wiki ...\n";
+ # NS not found => get namespace id from MW and store it in
+ # configuration file.
+ my $query = {
+ action => 'query',
+ meta => 'siteinfo',
+ siprop => 'namespaces'
+ };
+ my $result = $mediawiki->api($query);
+
+ while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
+ if (defined($ns->{id}) && defined($ns->{canonical})) {
+ $namespace_id{$ns->{canonical}} = {is_namespace => 1, id => $ns->{id}};
+ if ($ns->{'*'}) {
+ # alias (e.g. french Fichier: as alias for canonical File:)
+ $namespace_id{$ns->{'*'}} = {is_namespace => 1, id => $ns->{id}};
+ }
+ }
+ }
+ }
+
+ my $ns = $namespace_id{$name};
+ my $id;
+
+ unless (defined $ns) {
+ print STDERR "No such namespace $name on MediaWiki.\n";
+ $ns = {is_namespace => 0};
+ $namespace_id{$name} = $ns;
+ }
+
+ if ($ns->{is_namespace}) {
+ $id = $ns->{id};
+ }
+
+ # Store "notANameSpace" as special value for inexisting namespaces
+ my $store_id = ($id || 'notANameSpace');
+
+ # Store explicitely requested namespaces on disk
+ if (!exists $cached_mw_namespace_id{$name}) {
+ run_git("config --add remote.". $remotename
+ .".namespaceCache \"". $name .":". $store_id ."\"");
+ $cached_mw_namespace_id{$name} = 1;
+ }
+ return $id;
+}
+
+sub get_mw_namespace_id_for_page {
+ if (my ($namespace) = $_[0] =~ /^([^:]*):/) {
+ return get_mw_namespace_id($namespace);
+ } else {
+ return;
+ }
+}
diff --git a/contrib/mw-to-git/git-remote-mediawiki.txt b/contrib/mw-to-git/git-remote-mediawiki.txt
new file mode 100644
index 0000000..23b7ef9
--- /dev/null
+++ b/contrib/mw-to-git/git-remote-mediawiki.txt
@@ -0,0 +1,7 @@
+Git-Mediawiki is a project which aims the creation of a gate
+between git and mediawiki, allowing git users to push and pull
+objects from mediawiki just as one would do with a classic git
+repository thanks to remote-helpers.
+
+For more information, visit the wiki at
+https://github.com/moy/Git-Mediawiki/wiki
diff --git a/contrib/mw-to-git/t/.gitignore b/contrib/mw-to-git/t/.gitignore
new file mode 100644
index 0000000..a7a40b4
--- /dev/null
+++ b/contrib/mw-to-git/t/.gitignore
@@ -0,0 +1,4 @@
+WEB/
+wiki/
+trash directory.t*/
+test-results/
diff --git a/contrib/mw-to-git/t/Makefile b/contrib/mw-to-git/t/Makefile
new file mode 100644
index 0000000..f422203
--- /dev/null
+++ b/contrib/mw-to-git/t/Makefile
@@ -0,0 +1,31 @@
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+## Test git-remote-mediawiki
+
+all: test
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh)
+
+.PHONY: help test clean all
+
+help:
+ @echo 'Run "$(MAKE) test" to launch test scripts'
+ @echo 'Run "$(MAKE) clean" to remove trash folders'
+
+test:
+ @for t in $(T); do \
+ echo "$$t"; \
+ "./$$t" || exit 1; \
+ done
+
+clean:
+ $(RM) -r 'trash directory'.*
diff --git a/contrib/mw-to-git/t/README b/contrib/mw-to-git/t/README
new file mode 100644
index 0000000..03f6ee5
--- /dev/null
+++ b/contrib/mw-to-git/t/README
@@ -0,0 +1,124 @@
+Tests for Mediawiki-to-Git
+==========================
+
+Introduction
+------------
+This manual describes how to install the git-remote-mediawiki test
+environment on a machine with git installed on it.
+
+Prerequisite
+------------
+
+In order to run this test environment correctly, you will need to
+install the following packages (Debian/Ubuntu names, may need to be
+adapted for another distribution):
+
+* lighttpd
+* php5
+* php5-cgi
+* php5-cli
+* php5-curl
+* php5-sqlite
+
+Principles and Technical Choices
+--------------------------------
+
+The test environment makes it easy to install and manipulate one or
+several MediaWiki instances. To allow developers to run the testsuite
+easily, the environment does not require root privilege (except to
+install the required packages if needed). It starts a webserver
+instance on the user's account (using lighttpd greatly helps for
+that), and does not need a separate database daemon (thanks to the use
+of sqlite).
+
+Run the test environment
+------------------------
+
+Install a new wiki
+~~~~~~~~~~~~~~~~~~
+
+Once you have all the prerequisite, you need to install a MediaWiki
+instance on your machine. If you already have one, it is still
+strongly recommended to install one with the script provided. Here's
+how to work it:
+
+a. change directory to contrib/mw-to-git/t/
+b. if needed, edit test.config to choose your installation parameters
+c. run `./install-wiki.sh install`
+d. check on your favourite web browser if your wiki is correctly
+ installed.
+
+Remove an existing wiki
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Edit the file test.config to fit the wiki you want to delete, and then
+execute the command `./install-wiki.sh delete` from the
+contrib/mw-to-git/t directory.
+
+Run the existing tests
+~~~~~~~~~~~~~~~~~~~~~~
+
+The provided tests are currently in the `contrib/mw-to-git/t` directory.
+The files are all the t936[0-9]-*.sh shell scripts.
+
+a. Run all tests:
+To do so, run "make test" from the contrib/mw-to-git/ directory.
+
+b. Run a specific test:
+To run a given test <test_name>, run ./<test_name> from the
+contrib/mw-to-git/t directory.
+
+How to create new tests
+-----------------------
+
+Available functions
+~~~~~~~~~~~~~~~~~~~
+
+The test environment of git-remote-mediawiki provides some functions
+useful to test its behaviour. for more details about the functions'
+parameters, please refer to the `test-gitmw-lib.sh` and
+`test-gitmw.pl` files.
+
+** `test_check_wiki_precond`:
+Check if the tests must be skipped or not. Please use this function
+at the beginning of each new test file.
+
+** `wiki_getpage`:
+Fetch a given page from the wiki and puts its content in the
+directory in parameter.
+
+** `wiki_delete_page`:
+Delete a given page from the wiki.
+
+** `wiki_edit_page`:
+Create or modify a given page in the wiki. You can specify several
+parameters like a summary for the page edition, or add the page to a
+given category.
+See test-gitmw.pl for more details.
+
+** `wiki_getallpage`:
+Fetch all pages from the wiki into a given directory. The directory
+is created if it does not exists.
+
+** `test_diff_directories`:
+Compare the content of two directories. The content must be the same.
+Use this function to compare the content of a git directory and a wiki
+one created by wiki_getallpage.
+
+** `test_contains_N_files`:
+Check if the given directory contains a given number of file.
+
+** `wiki_page_exists`:
+Tests if a given page exists on the wiki.
+
+** `wiki_reset`:
+Reset the wiki, i.e. flush the database. Use this function at the
+beginning of each new test, except if the test re-uses the same wiki
+(and history) as the previous test.
+
+How to write a new test
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Please, follow the standards given by git. See git/t/README.
+New file should be named as t936[0-9]-*.sh.
+Be sure to reset your wiki regulary with the function `wiki_reset`.
diff --git a/contrib/mw-to-git/t/install-wiki.sh b/contrib/mw-to-git/t/install-wiki.sh
new file mode 100755
index 0000000..70a53f6
--- /dev/null
+++ b/contrib/mw-to-git/t/install-wiki.sh
@@ -0,0 +1,45 @@
+#!/bin/sh
+
+# This script installs or deletes a MediaWiki on your computer.
+# It requires a web server with PHP and SQLite running. In addition, if you
+# do not have MediaWiki sources on your computer, the option 'install'
+# downloads them for you.
+# Please set the CONFIGURATION VARIABLES in ./test-gitmw-lib.sh
+
+WIKI_TEST_DIR=$(cd "$(dirname "$0")" && pwd)
+
+if test -z "$WIKI_TEST_DIR"
+then
+ WIKI_TEST_DIR=.
+fi
+
+. "$WIKI_TEST_DIR"/test-gitmw-lib.sh
+usage () {
+ echo "usage: "
+ echo " ./install-wiki.sh <install | delete | --help>"
+ echo " install | -i : Install a wiki on your computer."
+ echo " delete | -d : Delete the wiki and all its pages and "
+ echo " content."
+}
+
+
+# Argument: install, delete, --help | -h
+case "$1" in
+ "install" | "-i")
+ wiki_install
+ exit 0
+ ;;
+ "delete" | "-d")
+ wiki_delete
+ exit 0
+ ;;
+ "--help" | "-h")
+ usage
+ exit 0
+ ;;
+ *)
+ echo "Invalid argument: $1"
+ usage
+ exit 1
+ ;;
+esac
diff --git a/contrib/mw-to-git/t/install-wiki/.gitignore b/contrib/mw-to-git/t/install-wiki/.gitignore
new file mode 100644
index 0000000..b5a2a44
--- /dev/null
+++ b/contrib/mw-to-git/t/install-wiki/.gitignore
@@ -0,0 +1 @@
+wikidb.sqlite
diff --git a/contrib/mw-to-git/t/install-wiki/LocalSettings.php b/contrib/mw-to-git/t/install-wiki/LocalSettings.php
new file mode 100644
index 0000000..745e47e
--- /dev/null
+++ b/contrib/mw-to-git/t/install-wiki/LocalSettings.php
@@ -0,0 +1,129 @@
+<?php
+# This file was automatically generated by the MediaWiki 1.19.0
+# installer. If you make manual changes, please keep track in case you
+# need to recreate them later.
+#
+# See includes/DefaultSettings.php for all configurable settings
+# and their default values, but don't forget to make changes in _this_
+# file, not there.
+#
+# Further documentation for configuration settings may be found at:
+# http://www.mediawiki.org/wiki/Manual:Configuration_settings
+
+# Protect against web entry
+if ( !defined( 'MEDIAWIKI' ) ) {
+ exit;
+}
+
+## Uncomment this to disable output compression
+# $wgDisableOutputCompression = true;
+
+$wgSitename = "Git-MediaWiki-Test";
+$wgMetaNamespace = "Git-MediaWiki-Test";
+
+## The URL base path to the directory containing the wiki;
+## defaults for all runtime URL paths are based off of this.
+## For more information on customizing the URLs please see:
+## http://www.mediawiki.org/wiki/Manual:Short_URL
+$wgScriptPath = "@WG_SCRIPT_PATH@";
+$wgScriptExtension = ".php";
+
+## The protocol and server name to use in fully-qualified URLs
+$wgServer = "@WG_SERVER@";
+
+## The relative URL path to the skins directory
+$wgStylePath = "$wgScriptPath/skins";
+
+## The relative URL path to the logo. Make sure you change this from the default,
+## or else you'll overwrite your logo when you upgrade!
+$wgLogo = "$wgStylePath/common/images/wiki.png";
+
+## UPO means: this is also a user preference option
+
+$wgEnableEmail = true;
+$wgEnableUserEmail = true; # UPO
+
+$wgEmergencyContact = "apache@localhost";
+$wgPasswordSender = "apache@localhost";
+
+$wgEnotifUserTalk = false; # UPO
+$wgEnotifWatchlist = false; # UPO
+$wgEmailAuthentication = true;
+
+## Database settings
+$wgDBtype = "sqlite";
+$wgDBserver = "";
+$wgDBname = "@WG_SQLITE_DATAFILE@";
+$wgDBuser = "";
+$wgDBpassword = "";
+
+# SQLite-specific settings
+$wgSQLiteDataDir = "@WG_SQLITE_DATADIR@";
+
+
+## Shared memory settings
+$wgMainCacheType = CACHE_NONE;
+$wgMemCachedServers = array();
+
+## To enable image uploads, make sure the 'images' directory
+## is writable, then set this to true:
+$wgEnableUploads = true;
+$wgUseImageMagick = true;
+$wgImageMagickConvertCommand ="@CONVERT@";
+$wgFileExtensions[] = 'txt';
+
+# InstantCommons allows wiki to use images from http://commons.wikimedia.org
+$wgUseInstantCommons = false;
+
+## If you use ImageMagick (or any other shell command) on a
+## Linux server, this will need to be set to the name of an
+## available UTF-8 locale
+$wgShellLocale = "en_US.utf8";
+
+## If you want to use image uploads under safe mode,
+## create the directories images/archive, images/thumb and
+## images/temp, and make them all writable. Then uncomment
+## this, if it's not already uncommented:
+#$wgHashedUploadDirectory = false;
+
+## Set $wgCacheDirectory to a writable directory on the web server
+## to make your wiki go slightly faster. The directory should not
+## be publicly accessible from the web.
+#$wgCacheDirectory = "$IP/cache";
+
+# Site language code, should be one of the list in ./languages/Names.php
+$wgLanguageCode = "en";
+
+$wgSecretKey = "1c912bfe3519fb70f5dc523ecc698111cd43d81a11c585b3eefb28f29c2699b7";
+#$wgSecretKey = "@SECRETKEY@";
+
+
+# Site upgrade key. Must be set to a string (default provided) to turn on the
+# web installer while LocalSettings.php is in place
+$wgUpgradeKey = "ddae7dc87cd0a645";
+
+## Default skin: you can change the default skin. Use the internal symbolic
+## names, ie 'standard', 'nostalgia', 'cologneblue', 'monobook', 'vector':
+$wgDefaultSkin = "vector";
+
+## For attaching licensing metadata to pages, and displaying an
+## appropriate copyright notice / icon. GNU Free Documentation
+## License and Creative Commons licenses are supported so far.
+$wgRightsPage = ""; # Set to the title of a wiki page that describes your license/copyright
+$wgRightsUrl = "";
+$wgRightsText = "";
+$wgRightsIcon = "";
+
+# Path to the GNU diff3 utility. Used for conflict resolution.
+$wgDiff3 = "/usr/bin/diff3";
+
+# Query string length limit for ResourceLoader. You should only set this if
+# your web server has a query string length limit (then set it to that limit),
+# or if you have suhosin.get.max_value_length set in php.ini (then set it to
+# that value)
+$wgResourceLoaderMaxQueryLength = -1;
+
+
+
+# End of automatically generated settings.
+# Add more configuration options below.
diff --git a/contrib/mw-to-git/t/install-wiki/db_install.php b/contrib/mw-to-git/t/install-wiki/db_install.php
new file mode 100644
index 0000000..0f3f4e0
--- /dev/null
+++ b/contrib/mw-to-git/t/install-wiki/db_install.php
@@ -0,0 +1,120 @@
+<?php
+/**
+ * This script generates a SQLite database for a MediaWiki version 1.19.0
+ * You must specify the login of the admin (argument 1) and its
+ * password (argument 2) and the folder where the database file
+ * is located (absolute path in argument 3).
+ * It is used by the script install-wiki.sh in order to make easy the
+ * installation of a MediaWiki.
+ *
+ * In order to generate a SQLite database file, MediaWiki ask the user
+ * to submit some forms in its web browser. This script simulates this
+ * behavior though the functions <get> and <submit>
+ *
+ */
+$argc = $_SERVER['argc'];
+$argv = $_SERVER['argv'];
+
+$login = $argv[2];
+$pass = $argv[3];
+$tmp = $argv[4];
+$port = $argv[5];
+
+$url = 'http://localhost:'.$port.'/wiki/mw-config/index.php';
+$db_dir = urlencode($tmp);
+$tmp_cookie = tempnam($tmp, "COOKIE_");
+/*
+ * Fetchs a page with cURL.
+ */
+function get($page_name = "") {
+ $curl = curl_init();
+ $page_name_add = "";
+ if ($page_name != "") {
+ $page_name_add = '?page='.$page_name;
+ }
+ $url = $GLOBALS['url'].$page_name_add;
+ $tmp_cookie = $GLOBALS['tmp_cookie'];
+ curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie);
+ curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
+ curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true);
+ curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie);
+ curl_setopt($curl, CURLOPT_HEADER, true);
+ curl_setopt($curl, CURLOPT_URL, $url);
+
+ $page = curl_exec($curl);
+ if (!$page) {
+ die("Could not get page: $url\n");
+ }
+ curl_close($curl);
+ return $page;
+}
+
+/*
+ * Submits a form with cURL.
+ */
+function submit($page_name, $option = "") {
+ $curl = curl_init();
+ $datapost = 'submit-continue=Continue+%E2%86%92';
+ if ($option != "") {
+ $datapost = $option.'&'.$datapost;
+ }
+ $url = $GLOBALS['url'].'?page='.$page_name;
+ $tmp_cookie = $GLOBALS['tmp_cookie'];
+ curl_setopt($curl, CURLOPT_URL, $url);
+ curl_setopt($curl, CURLOPT_POST, true);
+ curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true);
+ curl_setopt($curl, CURLOPT_POSTFIELDS, $datapost);
+ curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
+ curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie);
+ curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie);
+
+ $page = curl_exec($curl);
+ if (!$page) {
+ die("Could not get page: $url\n");
+ }
+ curl_close($curl);
+ return "$page";
+}
+
+/*
+ * Here starts this script: simulates the behavior of the user
+ * submitting forms to generates the database file.
+ * Note this simulation was made for the MediaWiki version 1.19.0,
+ * we can't assume it works with other versions.
+ *
+ */
+
+$page = get();
+if (!preg_match('/input type="hidden" value="([0-9]+)" name="LanguageRequestTime"/',
+ $page, $matches)) {
+ echo "Unexpected content for page downloaded:\n";
+ echo "$page";
+ die;
+};
+$timestamp = $matches[1];
+$language = "LanguageRequestTime=$timestamp&uselang=en&ContLang=en";
+$page = submit('Language', $language);
+
+submit('Welcome');
+
+$db_config = 'DBType=sqlite';
+$db_config = $db_config.'&sqlite_wgSQLiteDataDir='.$db_dir;
+$db_config = $db_config.'&sqlite_wgDBname='.$argv[1];
+submit('DBConnect', $db_config);
+
+$wiki_config = 'config_wgSitename=TEST';
+$wiki_config = $wiki_config.'&config__NamespaceType=site-name';
+$wiki_config = $wiki_config.'&config_wgMetaNamespace=MyWiki';
+$wiki_config = $wiki_config.'&config__AdminName='.$login;
+
+$wiki_config = $wiki_config.'&config__AdminPassword='.$pass;
+$wiki_config = $wiki_config.'&config__AdminPassword2='.$pass;
+
+$wiki_config = $wiki_config.'&wiki__configEmail=email%40email.org';
+$wiki_config = $wiki_config.'&config__SkipOptional=skip';
+submit('Name', $wiki_config);
+submit('Install');
+submit('Install');
+
+unlink($tmp_cookie);
+?>
diff --git a/contrib/mw-to-git/t/push-pull-tests.sh b/contrib/mw-to-git/t/push-pull-tests.sh
new file mode 100644
index 0000000..9da2dc5
--- /dev/null
+++ b/contrib/mw-to-git/t/push-pull-tests.sh
@@ -0,0 +1,144 @@
+test_push_pull () {
+
+ test_expect_success 'Git pull works after adding a new wiki page' '
+ wiki_reset &&
+
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
+ wiki_editpage Foo "page created after the git clone" false &&
+
+ (
+ cd mw_dir_1 &&
+ git pull
+ ) &&
+
+ wiki_getallpage ref_page_1 &&
+ test_diff_directories mw_dir_1 ref_page_1
+ '
+
+ test_expect_success 'Git pull works after editing a wiki page' '
+ wiki_reset &&
+
+ wiki_editpage Foo "page created before the git clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
+ wiki_editpage Foo "new line added on the wiki" true &&
+
+ (
+ cd mw_dir_2 &&
+ git pull
+ ) &&
+
+ wiki_getallpage ref_page_2 &&
+ test_diff_directories mw_dir_2 ref_page_2
+ '
+
+ test_expect_success 'git pull works on conflict handled by auto-merge' '
+ wiki_reset &&
+
+ wiki_editpage Foo "1 init
+3
+5
+ " false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_3 &&
+
+ wiki_editpage Foo "1 init
+2 content added on wiki after clone
+3
+5
+ " false &&
+
+ (
+ cd mw_dir_3 &&
+ echo "1 init
+3
+4 content added on git after clone
+5
+" >Foo.mw &&
+ git commit -am "conflicting change on foo" &&
+ git pull &&
+ git push
+ )
+ '
+
+ test_expect_success 'Git push works after adding a file .mw' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_4 &&
+ wiki_getallpage ref_page_4 &&
+ (
+ cd mw_dir_4 &&
+ test_path_is_missing Foo.mw &&
+ touch Foo.mw &&
+ echo "hello world" >>Foo.mw &&
+ git add Foo.mw &&
+ git commit -m "Foo" &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_4 &&
+ test_diff_directories mw_dir_4 ref_page_4
+ '
+
+ test_expect_success 'Git push works after editing a file .mw' '
+ wiki_reset &&
+ wiki_editpage "Foo" "page created before the git clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_5 &&
+
+ (
+ cd mw_dir_5 &&
+ echo "new line added in the file Foo.mw" >>Foo.mw &&
+ git commit -am "edit file Foo.mw" &&
+ git push
+ ) &&
+
+ wiki_getallpage ref_page_5 &&
+ test_diff_directories mw_dir_5 ref_page_5
+ '
+
+ test_expect_failure 'Git push works after deleting a file' '
+ wiki_reset &&
+ wiki_editpage Foo "wiki page added before git clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
+
+ (
+ cd mw_dir_6 &&
+ git rm Foo.mw &&
+ git commit -am "page Foo.mw deleted" &&
+ git push
+ ) &&
+
+ test_must_fail wiki_page_exist Foo
+ '
+
+ test_expect_success 'Merge conflict expected and solving it' '
+ wiki_reset &&
+
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
+ wiki_editpage Foo "1 conflict
+3 wiki
+4" false &&
+
+ (
+ cd mw_dir_7 &&
+ echo "1 conflict
+2 git
+4" >Foo.mw &&
+ git add Foo.mw &&
+ git commit -m "conflict created" &&
+ test_must_fail git pull &&
+ "$PERL_PATH" -pi -e "s/[<=>].*//g" Foo.mw &&
+ git commit -am "merge conflict solved" &&
+ git push
+ )
+ '
+
+ test_expect_failure 'git pull works after deleting a wiki page' '
+ wiki_reset &&
+ wiki_editpage Foo "wiki page added before the git clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_8 &&
+
+ wiki_delete_page Foo &&
+ (
+ cd mw_dir_8 &&
+ git pull &&
+ test_path_is_missing Foo.mw
+ )
+ '
+}
diff --git a/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh b/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh
new file mode 100755
index 0000000..811a90c
--- /dev/null
+++ b/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh
@@ -0,0 +1,257 @@
+#!/bin/sh
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+
+test_description='Test the Git Mediawiki remote helper: git clone'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+
+test_check_precond
+
+
+test_expect_success 'Git clone creates the expected git log with one file' '
+ wiki_reset &&
+ wiki_editpage foo "this is not important" false -c cat -s "this must be the same" &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
+ (
+ cd mw_dir_1 &&
+ git log --format=%s HEAD^..HEAD >log.tmp
+ ) &&
+ echo "this must be the same" >msg.tmp &&
+ diff -b mw_dir_1/log.tmp msg.tmp
+'
+
+
+test_expect_success 'Git clone creates the expected git log with multiple files' '
+ wiki_reset &&
+ wiki_editpage daddy "this is not important" false -s="this must be the same" &&
+ wiki_editpage daddy "neither is this" true -s="this must also be the same" &&
+ wiki_editpage daddy "neither is this" true -s="same same same" &&
+ wiki_editpage dj "dont care" false -s="identical" &&
+ wiki_editpage dj "dont care either" true -s="identical too" &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
+ (
+ cd mw_dir_2 &&
+ git log --format=%s Daddy.mw >logDaddy.tmp &&
+ git log --format=%s Dj.mw >logDj.tmp
+ ) &&
+ echo "same same same" >msgDaddy.tmp &&
+ echo "this must also be the same" >>msgDaddy.tmp &&
+ echo "this must be the same" >>msgDaddy.tmp &&
+ echo "identical too" >msgDj.tmp &&
+ echo "identical" >>msgDj.tmp &&
+ diff -b mw_dir_2/logDaddy.tmp msgDaddy.tmp &&
+ diff -b mw_dir_2/logDj.tmp msgDj.tmp
+'
+
+
+test_expect_success 'Git clone creates only Main_Page.mw with an empty wiki' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_3 &&
+ test_contains_N_files mw_dir_3 1 &&
+ test_path_is_file mw_dir_3/Main_Page.mw
+'
+
+test_expect_success 'Git clone does not fetch a deleted page' '
+ wiki_reset &&
+ wiki_editpage foo "this page must be deleted before the clone" false &&
+ wiki_delete_page foo &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_4 &&
+ test_contains_N_files mw_dir_4 1 &&
+ test_path_is_file mw_dir_4/Main_Page.mw &&
+ test_path_is_missing mw_dir_4/Foo.mw
+'
+
+test_expect_success 'Git clone works with page added' '
+ wiki_reset &&
+ wiki_editpage foo " I will be cloned" false &&
+ wiki_editpage bar "I will be cloned" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_5 &&
+ wiki_getallpage ref_page_5 &&
+ test_diff_directories mw_dir_5 ref_page_5 &&
+ wiki_delete_page foo &&
+ wiki_delete_page bar
+'
+
+test_expect_success 'Git clone works with an edited page ' '
+ wiki_reset &&
+ wiki_editpage foo "this page will be edited" \
+ false -s "first edition of page foo"&&
+ wiki_editpage foo "this page has been edited and must be on the clone " true &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
+ test_path_is_file mw_dir_6/Foo.mw &&
+ test_path_is_file mw_dir_6/Main_Page.mw &&
+ wiki_getallpage mw_dir_6/page_ref_6 &&
+ test_diff_directories mw_dir_6 mw_dir_6/page_ref_6 &&
+ (
+ cd mw_dir_6 &&
+ git log --format=%s HEAD^ Foo.mw > ../Foo.log
+ ) &&
+ echo "first edition of page foo" > FooExpect.log &&
+ diff FooExpect.log Foo.log
+'
+
+
+test_expect_success 'Git clone works with several pages and some deleted ' '
+ wiki_reset &&
+ wiki_editpage foo "this page will not be deleted" false &&
+ wiki_editpage bar "I must not be erased" false &&
+ wiki_editpage namnam "I will not be there at the end" false &&
+ wiki_editpage nyancat "nyan nyan nyan delete me" false &&
+ wiki_delete_page namnam &&
+ wiki_delete_page nyancat &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
+ test_path_is_file mw_dir_7/Foo.mw &&
+ test_path_is_file mw_dir_7/Bar.mw &&
+ test_path_is_missing mw_dir_7/Namnam.mw &&
+ test_path_is_missing mw_dir_7/Nyancat.mw &&
+ wiki_getallpage mw_dir_7/page_ref_7 &&
+ test_diff_directories mw_dir_7 mw_dir_7/page_ref_7
+'
+
+
+test_expect_success 'Git clone works with one specific page cloned ' '
+ wiki_reset &&
+ wiki_editpage foo "I will not be cloned" false &&
+ wiki_editpage bar "Do not clone me" false &&
+ wiki_editpage namnam "I will be cloned :)" false -s="this log must stay" &&
+ wiki_editpage nyancat "nyan nyan nyan you cant clone me" false &&
+ git clone -c remote.origin.pages=namnam \
+ mediawiki::'"$WIKI_URL"' mw_dir_8 &&
+ test_contains_N_files mw_dir_8 1 &&
+ test_path_is_file mw_dir_8/Namnam.mw &&
+ test_path_is_missing mw_dir_8/Main_Page.mw &&
+ (
+ cd mw_dir_8 &&
+ echo "this log must stay" >msg.tmp &&
+ git log --format=%s >log.tmp &&
+ diff -b msg.tmp log.tmp
+ ) &&
+ wiki_check_content mw_dir_8/Namnam.mw Namnam
+'
+
+test_expect_success 'Git clone works with multiple specific page cloned ' '
+ wiki_reset &&
+ wiki_editpage foo "I will be there" false &&
+ wiki_editpage bar "I will not disapear" false &&
+ wiki_editpage namnam "I be erased" false &&
+ wiki_editpage nyancat "nyan nyan nyan you will not erase me" false &&
+ wiki_delete_page namnam &&
+ git clone -c remote.origin.pages="foo bar nyancat namnam" \
+ mediawiki::'"$WIKI_URL"' mw_dir_9 &&
+ test_contains_N_files mw_dir_9 3 &&
+ test_path_is_missing mw_dir_9/Namnam.mw &&
+ test_path_is_file mw_dir_9/Foo.mw &&
+ test_path_is_file mw_dir_9/Nyancat.mw &&
+ test_path_is_file mw_dir_9/Bar.mw &&
+ wiki_check_content mw_dir_9/Foo.mw Foo &&
+ wiki_check_content mw_dir_9/Bar.mw Bar &&
+ wiki_check_content mw_dir_9/Nyancat.mw Nyancat
+'
+
+test_expect_success 'Mediawiki-clone of several specific pages on wiki' '
+ wiki_reset &&
+ wiki_editpage foo "foo 1" false &&
+ wiki_editpage bar "bar 1" false &&
+ wiki_editpage dummy "dummy 1" false &&
+ wiki_editpage cloned_1 "cloned_1 1" false &&
+ wiki_editpage cloned_2 "cloned_2 2" false &&
+ wiki_editpage cloned_3 "cloned_3 3" false &&
+ mkdir -p ref_page_10 &&
+ wiki_getpage cloned_1 ref_page_10 &&
+ wiki_getpage cloned_2 ref_page_10 &&
+ wiki_getpage cloned_3 ref_page_10 &&
+ git clone -c remote.origin.pages="cloned_1 cloned_2 cloned_3" \
+ mediawiki::'"$WIKI_URL"' mw_dir_10 &&
+ test_diff_directories mw_dir_10 ref_page_10
+'
+
+test_expect_success 'Git clone works with the shallow option' '
+ wiki_reset &&
+ wiki_editpage foo "1st revision, should be cloned" false &&
+ wiki_editpage bar "1st revision, should be cloned" false &&
+ wiki_editpage nyan "1st revision, should not be cloned" false &&
+ wiki_editpage nyan "2nd revision, should be cloned" false &&
+ git -c remote.origin.shallow=true clone \
+ mediawiki::'"$WIKI_URL"' mw_dir_11 &&
+ test_contains_N_files mw_dir_11 4 &&
+ test_path_is_file mw_dir_11/Nyan.mw &&
+ test_path_is_file mw_dir_11/Foo.mw &&
+ test_path_is_file mw_dir_11/Bar.mw &&
+ test_path_is_file mw_dir_11/Main_Page.mw &&
+ (
+ cd mw_dir_11 &&
+ test `git log --oneline Nyan.mw | wc -l` -eq 1 &&
+ test `git log --oneline Foo.mw | wc -l` -eq 1 &&
+ test `git log --oneline Bar.mw | wc -l` -eq 1 &&
+ test `git log --oneline Main_Page.mw | wc -l ` -eq 1
+ ) &&
+ wiki_check_content mw_dir_11/Nyan.mw Nyan &&
+ wiki_check_content mw_dir_11/Foo.mw Foo &&
+ wiki_check_content mw_dir_11/Bar.mw Bar &&
+ wiki_check_content mw_dir_11/Main_Page.mw Main_Page
+'
+
+test_expect_success 'Git clone works with the shallow option with a delete page' '
+ wiki_reset &&
+ wiki_editpage foo "1st revision, will be deleted" false &&
+ wiki_editpage bar "1st revision, should be cloned" false &&
+ wiki_editpage nyan "1st revision, should not be cloned" false &&
+ wiki_editpage nyan "2nd revision, should be cloned" false &&
+ wiki_delete_page foo &&
+ git -c remote.origin.shallow=true clone \
+ mediawiki::'"$WIKI_URL"' mw_dir_12 &&
+ test_contains_N_files mw_dir_12 3 &&
+ test_path_is_file mw_dir_12/Nyan.mw &&
+ test_path_is_missing mw_dir_12/Foo.mw &&
+ test_path_is_file mw_dir_12/Bar.mw &&
+ test_path_is_file mw_dir_12/Main_Page.mw &&
+ (
+ cd mw_dir_12 &&
+ test `git log --oneline Nyan.mw | wc -l` -eq 1 &&
+ test `git log --oneline Bar.mw | wc -l` -eq 1 &&
+ test `git log --oneline Main_Page.mw | wc -l ` -eq 1
+ ) &&
+ wiki_check_content mw_dir_12/Nyan.mw Nyan &&
+ wiki_check_content mw_dir_12/Bar.mw Bar &&
+ wiki_check_content mw_dir_12/Main_Page.mw Main_Page
+'
+
+test_expect_success 'Test of fetching a category' '
+ wiki_reset &&
+ wiki_editpage Foo "I will be cloned" false -c=Category &&
+ wiki_editpage Bar "Meet me on the repository" false -c=Category &&
+ wiki_editpage Dummy "I will not come" false &&
+ wiki_editpage BarWrong "I will stay online only" false -c=NotCategory &&
+ git clone -c remote.origin.categories="Category" \
+ mediawiki::'"$WIKI_URL"' mw_dir_13 &&
+ wiki_getallpage ref_page_13 Category &&
+ test_diff_directories mw_dir_13 ref_page_13
+'
+
+test_expect_success 'Test of resistance to modification of category on wiki for clone' '
+ wiki_reset &&
+ wiki_editpage Tobedeleted "this page will be deleted" false -c=Catone &&
+ wiki_editpage Tobeedited "this page will be modified" false -c=Catone &&
+ wiki_editpage Normalone "this page wont be modified and will be on git" false -c=Catone &&
+ wiki_editpage Notconsidered "this page will not appear on local" false &&
+ wiki_editpage Othercategory "this page will not appear on local" false -c=Cattwo &&
+ wiki_editpage Tobeedited "this page have been modified" true -c=Catone &&
+ wiki_delete_page Tobedeleted
+ git clone -c remote.origin.categories="Catone" \
+ mediawiki::'"$WIKI_URL"' mw_dir_14 &&
+ wiki_getallpage ref_page_14 Catone &&
+ test_diff_directories mw_dir_14 ref_page_14
+'
+
+test_done
diff --git a/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh b/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh
new file mode 100755
index 0000000..9ea2014
--- /dev/null
+++ b/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+# tests for git-remote-mediawiki
+
+test_description='Test the Git Mediawiki remote helper: git push and git pull simple test cases'
+
+. ./test-gitmw-lib.sh
+. ./push-pull-tests.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+test_check_precond
+
+test_push_pull
+
+test_done
diff --git a/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh b/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh
new file mode 100755
index 0000000..37021e2
--- /dev/null
+++ b/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh
@@ -0,0 +1,347 @@
+#!/bin/sh
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+# tests for git-remote-mediawiki
+
+test_description='Test git-mediawiki with special characters in filenames'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+
+test_check_precond
+
+
+test_expect_success 'Git clone works for a wiki with accents in the page names' '
+ wiki_reset &&
+ wiki_editpage féé "This page must be délétéd before clone" false &&
+ wiki_editpage kèè "This page must be deleted before clone" false &&
+ wiki_editpage hàà "This page must be deleted before clone" false &&
+ wiki_editpage kîî "This page must be deleted before clone" false &&
+ wiki_editpage foo "This page must be deleted before clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
+ wiki_getallpage ref_page_1 &&
+ test_diff_directories mw_dir_1 ref_page_1
+'
+
+
+test_expect_success 'Git pull works with a wiki with accents in the pages names' '
+ wiki_reset &&
+ wiki_editpage kîî "this page must be cloned" false &&
+ wiki_editpage foo "this page must be cloned" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
+ wiki_editpage éàîôû "This page must be pulled" false &&
+ (
+ cd mw_dir_2 &&
+ git pull
+ ) &&
+ wiki_getallpage ref_page_2 &&
+ test_diff_directories mw_dir_2 ref_page_2
+'
+
+
+test_expect_success 'Cloning a chosen page works with accents' '
+ wiki_reset &&
+ wiki_editpage kîî "this page must be cloned" false &&
+ git clone -c remote.origin.pages=kîî \
+ mediawiki::'"$WIKI_URL"' mw_dir_3 &&
+ wiki_check_content mw_dir_3/Kîî.mw Kîî &&
+ test_path_is_file mw_dir_3/Kîî.mw &&
+ rm -rf mw_dir_3
+'
+
+
+test_expect_success 'The shallow option works with accents' '
+ wiki_reset &&
+ wiki_editpage néoà "1st revision, should not be cloned" false &&
+ wiki_editpage néoà "2nd revision, should be cloned" false &&
+ git -c remote.origin.shallow=true clone \
+ mediawiki::'"$WIKI_URL"' mw_dir_4 &&
+ test_contains_N_files mw_dir_4 2 &&
+ test_path_is_file mw_dir_4/Néoà.mw &&
+ test_path_is_file mw_dir_4/Main_Page.mw &&
+ (
+ cd mw_dir_4 &&
+ test `git log --oneline Néoà.mw | wc -l` -eq 1 &&
+ test `git log --oneline Main_Page.mw | wc -l ` -eq 1
+ ) &&
+ wiki_check_content mw_dir_4/Néoà.mw Néoà &&
+ wiki_check_content mw_dir_4/Main_Page.mw Main_Page
+'
+
+
+test_expect_success 'Cloning works when page name first letter has an accent' '
+ wiki_reset &&
+ wiki_editpage îî "this page must be cloned" false &&
+ git clone -c remote.origin.pages=îî \
+ mediawiki::'"$WIKI_URL"' mw_dir_5 &&
+ test_path_is_file mw_dir_5/Îî.mw &&
+ wiki_check_content mw_dir_5/Îî.mw Îî
+'
+
+
+test_expect_success 'Git push works with a wiki with accents' '
+ wiki_reset &&
+ wiki_editpage féé "lots of accents : éèàÖ" false &&
+ wiki_editpage foo "this page must be cloned" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
+ (
+ cd mw_dir_6 &&
+ echo "A wild Pîkächû appears on the wiki" >Pîkächû.mw &&
+ git add Pîkächû.mw &&
+ git commit -m "A new page appears" &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_6 &&
+ test_diff_directories mw_dir_6 ref_page_6
+'
+
+test_expect_success 'Git clone works with accentsand spaces' '
+ wiki_reset &&
+ wiki_editpage "é à î" "this page must be délété before the clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
+ wiki_getallpage ref_page_7 &&
+ test_diff_directories mw_dir_7 ref_page_7
+'
+
+test_expect_success 'character $ in page name (mw -> git)' '
+ wiki_reset &&
+ wiki_editpage file_\$_foo "expect to be called file_$_foo" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_8 &&
+ test_path_is_file mw_dir_8/File_\$_foo.mw &&
+ wiki_getallpage ref_page_8 &&
+ test_diff_directories mw_dir_8 ref_page_8
+'
+
+
+
+test_expect_success 'character $ in file name (git -> mw) ' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_9 &&
+ (
+ cd mw_dir_9 &&
+ echo "this file is called File_\$_foo.mw" >File_\$_foo.mw &&
+ git add . &&
+ git commit -am "file File_\$_foo.mw" &&
+ git pull &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_9 &&
+ test_diff_directories mw_dir_9 ref_page_9
+'
+
+
+test_expect_failure 'capital at the beginning of file names' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_10 &&
+ (
+ cd mw_dir_10 &&
+ echo "my new file foo" >foo.mw &&
+ echo "my new file Foo... Finger crossed" >Foo.mw &&
+ git add . &&
+ git commit -am "file foo.mw" &&
+ git pull &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_10 &&
+ test_diff_directories mw_dir_10 ref_page_10
+'
+
+
+test_expect_failure 'special character at the beginning of file name from mw to git' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_11 &&
+ wiki_editpage {char_1 "expect to be renamed {char_1" false &&
+ wiki_editpage [char_2 "expect to be renamed [char_2" false &&
+ (
+ cd mw_dir_11 &&
+ git pull
+ ) &&
+ test_path_is_file mw_dir_11/{char_1 &&
+ test_path_is_file mw_dir_11/[char_2
+'
+
+test_expect_success 'Pull page with title containing ":" other than namespace separator' '
+ wiki_editpage Foo:Bar content false &&
+ (
+ cd mw_dir_11 &&
+ git pull
+ ) &&
+ test_path_is_file mw_dir_11/Foo:Bar.mw
+'
+
+test_expect_success 'Push page with title containing ":" other than namespace separator' '
+ (
+ cd mw_dir_11 &&
+ echo content >NotANameSpace:Page.mw &&
+ git add NotANameSpace:Page.mw &&
+ git commit -m "add page with colon" &&
+ git push
+ ) &&
+ wiki_page_exist NotANameSpace:Page
+'
+
+test_expect_success 'test of correct formatting for file name from mw to git' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_12 &&
+ wiki_editpage char_%_7b_1 "expect to be renamed char{_1" false &&
+ wiki_editpage char_%_5b_2 "expect to be renamed char{_2" false &&
+ (
+ cd mw_dir_12 &&
+ git pull
+ ) &&
+ test_path_is_file mw_dir_12/Char\{_1.mw &&
+ test_path_is_file mw_dir_12/Char\[_2.mw &&
+ wiki_getallpage ref_page_12 &&
+ mv ref_page_12/Char_%_7b_1.mw ref_page_12/Char\{_1.mw &&
+ mv ref_page_12/Char_%_5b_2.mw ref_page_12/Char\[_2.mw &&
+ test_diff_directories mw_dir_12 ref_page_12
+'
+
+
+test_expect_failure 'test of correct formatting for file name beginning with special character' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_13 &&
+ (
+ cd mw_dir_13 &&
+ echo "my new file {char_1" >\{char_1.mw &&
+ echo "my new file [char_2" >\[char_2.mw &&
+ git add . &&
+ git commit -am "committing some exotic file name..." &&
+ git push &&
+ git pull
+ ) &&
+ wiki_getallpage ref_page_13 &&
+ test_path_is_file ref_page_13/{char_1.mw &&
+ test_path_is_file ref_page_13/[char_2.mw &&
+ test_diff_directories mw_dir_13 ref_page_13
+'
+
+
+test_expect_success 'test of correct formatting for file name from git to mw' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_14 &&
+ (
+ cd mw_dir_14 &&
+ echo "my new file char{_1" >Char\{_1.mw &&
+ echo "my new file char[_2" >Char\[_2.mw &&
+ git add . &&
+ git commit -m "committing some exotic file name..." &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_14 &&
+ mv mw_dir_14/Char\{_1.mw mw_dir_14/Char_%_7b_1.mw &&
+ mv mw_dir_14/Char\[_2.mw mw_dir_14/Char_%_5b_2.mw &&
+ test_diff_directories mw_dir_14 ref_page_14
+'
+
+
+test_expect_success 'git clone with /' '
+ wiki_reset &&
+ wiki_editpage \/fo\/o "this is not important" false -c=Deleted &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_15 &&
+ test_path_is_file mw_dir_15/%2Ffo%2Fo.mw &&
+ wiki_check_content mw_dir_15/%2Ffo%2Fo.mw \/fo\/o
+'
+
+
+test_expect_success 'git push with /' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_16 &&
+ echo "I will be on the wiki" >mw_dir_16/%2Ffo%2Fo.mw &&
+ (
+ cd mw_dir_16 &&
+ git add %2Ffo%2Fo.mw &&
+ git commit -m " %2Ffo%2Fo added" &&
+ git push
+ ) &&
+ wiki_page_exist \/fo\/o &&
+ wiki_check_content mw_dir_16/%2Ffo%2Fo.mw \/fo\/o
+
+'
+
+
+test_expect_success 'git clone with \' '
+ wiki_reset &&
+ wiki_editpage \\ko\\o "this is not important" false -c=Deleted &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_17 &&
+ test_path_is_file mw_dir_17/\\ko\\o.mw &&
+ wiki_check_content mw_dir_17/\\ko\\o.mw \\ko\\o
+'
+
+
+test_expect_success 'git push with \' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_18 &&
+ echo "I will be on the wiki" >mw_dir_18/\\ko\\o.mw &&
+ (
+ cd mw_dir_18 &&
+ git add \\ko\\o.mw &&
+ git commit -m " \\ko\\o added" &&
+ git push
+ )&&
+ wiki_page_exist \\ko\\o &&
+ wiki_check_content mw_dir_18/\\ko\\o.mw \\ko\\o
+
+'
+
+test_expect_success 'git clone with \ in format control' '
+ wiki_reset &&
+ wiki_editpage \\no\\o "this is not important" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_19 &&
+ test_path_is_file mw_dir_19/\\no\\o.mw &&
+ wiki_check_content mw_dir_19/\\no\\o.mw \\no\\o
+'
+
+
+test_expect_success 'git push with \ in format control' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_20 &&
+ echo "I will be on the wiki" >mw_dir_20/\\fo\\o.mw &&
+ (
+ cd mw_dir_20 &&
+ git add \\fo\\o.mw &&
+ git commit -m " \\fo\\o added" &&
+ git push
+ )&&
+ wiki_page_exist \\fo\\o &&
+ wiki_check_content mw_dir_20/\\fo\\o.mw \\fo\\o
+
+'
+
+
+test_expect_success 'fast-import meta-characters in page name (mw -> git)' '
+ wiki_reset &&
+ wiki_editpage \"file\"_\\_foo "expect to be called \"file\"_\\_foo" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_21 &&
+ test_path_is_file mw_dir_21/\"file\"_\\_foo.mw &&
+ wiki_getallpage ref_page_21 &&
+ test_diff_directories mw_dir_21 ref_page_21
+'
+
+
+test_expect_success 'fast-import meta-characters in page name (git -> mw) ' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_22 &&
+ (
+ cd mw_dir_22 &&
+ echo "this file is called \"file\"_\\_foo.mw" >\"file\"_\\_foo &&
+ git add . &&
+ git commit -am "file \"file\"_\\_foo" &&
+ git pull &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_22 &&
+ test_diff_directories mw_dir_22 ref_page_22
+'
+
+
+test_done
diff --git a/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh b/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh
new file mode 100755
index 0000000..5a03739
--- /dev/null
+++ b/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh
@@ -0,0 +1,198 @@
+#!/bin/sh
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+# tests for git-remote-mediawiki
+
+test_description='Test the Git Mediawiki remote helper: git push and git pull simple test cases'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+
+test_check_precond
+
+
+test_git_reimport () {
+ git -c remote.origin.dumbPush=true push &&
+ git -c remote.origin.mediaImport=true pull --rebase
+}
+
+# Don't bother with permissions, be administrator by default
+test_expect_success 'setup config' '
+ git config --global remote.origin.mwLogin WikiAdmin &&
+ git config --global remote.origin.mwPassword AdminPass &&
+ test_might_fail git config --global --unset remote.origin.mediaImport
+'
+
+test_expect_success 'git push can upload media (File:) files' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ (
+ cd mw_dir &&
+ echo "hello world" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "add a text file" &&
+ git push &&
+ "$PERL_PATH" -e "print STDOUT \"binary content: \".chr(255);" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "add a text file with binary content" &&
+ git push
+ )
+'
+
+test_expect_success 'git clone works on previously created wiki with media files' '
+ test_when_finished "rm -rf mw_dir mw_dir_clone" &&
+ git clone -c remote.origin.mediaimport=true \
+ mediawiki::'"$WIKI_URL"' mw_dir_clone &&
+ test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt &&
+ (cd mw_dir_clone && git checkout HEAD^) &&
+ (cd mw_dir && git checkout HEAD^) &&
+ test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt
+'
+
+test_expect_success 'git push & pull work with locally renamed media files' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ echo "A File" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "add a file" &&
+ git mv Foo.txt Bar.txt &&
+ git commit -m "Rename a file" &&
+ test_git_reimport &&
+ echo "A File" >expect &&
+ test_cmp expect Bar.txt &&
+ test_path_is_missing Foo.txt
+ )
+'
+
+test_expect_success 'git push can propagate local page deletion' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ test_path_is_missing Foo.mw &&
+ echo "hello world" >Foo.mw &&
+ git add Foo.mw &&
+ git commit -m "Add the page Foo" &&
+ git push &&
+ rm -f Foo.mw &&
+ git commit -am "Delete the page Foo" &&
+ test_git_reimport &&
+ test_path_is_missing Foo.mw
+ )
+'
+
+test_expect_success 'git push can propagate local media file deletion' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ echo "hello world" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "Add the text file Foo" &&
+ git rm Foo.txt &&
+ git commit -m "Delete the file Foo" &&
+ test_git_reimport &&
+ test_path_is_missing Foo.txt
+ )
+'
+
+# test failure: the file is correctly uploaded, and then deleted but
+# as no page link to it, the import (which looks at page revisions)
+# doesn't notice the file deletion on the wiki. We fetch the list of
+# files from the wiki, but as the file is deleted, it doesn't appear.
+test_expect_failure 'git pull correctly imports media file deletion when no page link to it' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ echo "hello world" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "Add the text file Foo" &&
+ git push &&
+ git rm Foo.txt &&
+ git commit -m "Delete the file Foo" &&
+ test_git_reimport &&
+ test_path_is_missing Foo.txt
+ )
+'
+
+test_expect_success 'git push properly warns about insufficient permissions' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ echo "A File" >foo.forbidden &&
+ git add foo.forbidden &&
+ git commit -m "add a file" &&
+ git push 2>actual &&
+ test_i18ngrep "foo.forbidden is not a permitted file" actual
+ )
+'
+
+test_expect_success 'setup a repository with media files' '
+ wiki_reset &&
+ wiki_editpage testpage "I am linking a file [[File:File.txt]]" false &&
+ echo "File content" >File.txt &&
+ wiki_upload_file File.txt &&
+ echo "Another file content" >AnotherFile.txt &&
+ wiki_upload_file AnotherFile.txt
+'
+
+test_expect_success 'git clone works with one specific page cloned and mediaimport=true' '
+ git clone -c remote.origin.pages=testpage \
+ -c remote.origin.mediaimport=true \
+ mediawiki::'"$WIKI_URL"' mw_dir_15 &&
+ test_when_finished "rm -rf mw_dir_15" &&
+ test_contains_N_files mw_dir_15 3 &&
+ test_path_is_file mw_dir_15/Testpage.mw &&
+ test_path_is_file mw_dir_15/File:File.txt.mw &&
+ test_path_is_file mw_dir_15/File.txt &&
+ test_path_is_missing mw_dir_15/Main_Page.mw &&
+ test_path_is_missing mw_dir_15/File:AnotherFile.txt.mw &&
+ test_path_is_missing mw_dir_15/AnothetFile.txt &&
+ wiki_check_content mw_dir_15/Testpage.mw Testpage &&
+ test_cmp mw_dir_15/File.txt File.txt
+'
+
+test_expect_success 'git clone works with one specific page cloned and mediaimport=false' '
+ test_when_finished "rm -rf mw_dir_16" &&
+ git clone -c remote.origin.pages=testpage \
+ mediawiki::'"$WIKI_URL"' mw_dir_16 &&
+ test_contains_N_files mw_dir_16 1 &&
+ test_path_is_file mw_dir_16/Testpage.mw &&
+ test_path_is_missing mw_dir_16/File:File.txt.mw &&
+ test_path_is_missing mw_dir_16/File.txt &&
+ test_path_is_missing mw_dir_16/Main_Page.mw &&
+ wiki_check_content mw_dir_16/Testpage.mw Testpage
+'
+
+# should behave like mediaimport=false
+test_expect_success 'git clone works with one specific page cloned and mediaimport unset' '
+ test_when_finished "rm -fr mw_dir_17" &&
+ git clone -c remote.origin.pages=testpage \
+ mediawiki::'"$WIKI_URL"' mw_dir_17 &&
+ test_contains_N_files mw_dir_17 1 &&
+ test_path_is_file mw_dir_17/Testpage.mw &&
+ test_path_is_missing mw_dir_17/File:File.txt.mw &&
+ test_path_is_missing mw_dir_17/File.txt &&
+ test_path_is_missing mw_dir_17/Main_Page.mw &&
+ wiki_check_content mw_dir_17/Testpage.mw Testpage
+'
+
+test_done
diff --git a/contrib/mw-to-git/t/t9364-pull-by-rev.sh b/contrib/mw-to-git/t/t9364-pull-by-rev.sh
new file mode 100755
index 0000000..5c22457
--- /dev/null
+++ b/contrib/mw-to-git/t/t9364-pull-by-rev.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+test_description='Test the Git Mediawiki remote helper: git pull by revision'
+
+. ./test-gitmw-lib.sh
+. ./push-pull-tests.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+test_check_precond
+
+test_expect_success 'configuration' '
+ git config --global mediawiki.fetchStrategy by_rev
+'
+
+test_push_pull
+
+test_done
diff --git a/contrib/mw-to-git/t/test-gitmw-lib.sh b/contrib/mw-to-git/t/test-gitmw-lib.sh
new file mode 100755
index 0000000..3b2cfac
--- /dev/null
+++ b/contrib/mw-to-git/t/test-gitmw-lib.sh
@@ -0,0 +1,435 @@
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+# License: GPL v2 or later
+
+#
+# CONFIGURATION VARIABLES
+# You might want to change these ones
+#
+
+. ./test.config
+
+WIKI_URL=http://"$SERVER_ADDR:$PORT/$WIKI_DIR_NAME"
+CURR_DIR=$(pwd)
+TEST_OUTPUT_DIRECTORY=$(pwd)
+TEST_DIRECTORY="$CURR_DIR"/../../../t
+
+export TEST_OUTPUT_DIRECTORY TEST_DIRECTORY CURR_DIR
+
+if test "$LIGHTTPD" = "false" ; then
+ PORT=80
+else
+ WIKI_DIR_INST="$CURR_DIR/$WEB_WWW"
+fi
+
+wiki_upload_file () {
+ "$CURR_DIR"/test-gitmw.pl upload_file "$@"
+}
+
+wiki_getpage () {
+ "$CURR_DIR"/test-gitmw.pl get_page "$@"
+}
+
+wiki_delete_page () {
+ "$CURR_DIR"/test-gitmw.pl delete_page "$@"
+}
+
+wiki_editpage () {
+ "$CURR_DIR"/test-gitmw.pl edit_page "$@"
+}
+
+die () {
+ die_with_status 1 "$@"
+}
+
+die_with_status () {
+ status=$1
+ shift
+ echo >&2 "$*"
+ exit "$status"
+}
+
+
+# Check the preconditions to run git-remote-mediawiki's tests
+test_check_precond () {
+ if ! test_have_prereq PERL
+ then
+ skip_all='skipping gateway git-mw tests, perl not available'
+ test_done
+ fi
+
+ if [ ! -f "$GIT_BUILD_DIR"/git-remote-mediawiki ];
+ then
+ echo "No remote mediawiki for git found. Copying it in git"
+ echo "cp $GIT_BUILD_DIR/contrib/mw-to-git/git-remote-mediawiki $GIT_BUILD_DIR/"
+ ln -s "$GIT_BUILD_DIR"/contrib/mw-to-git/git-remote-mediawiki "$GIT_BUILD_DIR"
+ fi
+
+ if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ];
+ then
+ skip_all='skipping gateway git-mw tests, no mediawiki found'
+ test_done
+ fi
+}
+
+# test_diff_directories <dir_git> <dir_wiki>
+#
+# Compare the contents of directories <dir_git> and <dir_wiki> with diff
+# and errors if they do not match. The program will
+# not look into .git in the process.
+# Warning: the first argument MUST be the directory containing the git data
+test_diff_directories () {
+ rm -rf "$1_tmp"
+ mkdir -p "$1_tmp"
+ cp "$1"/*.mw "$1_tmp"
+ diff -r -b "$1_tmp" "$2"
+}
+
+# $1=<dir>
+# $2=<N>
+#
+# Check that <dir> contains exactly <N> files
+test_contains_N_files () {
+ if test `ls -- "$1" | wc -l` -ne "$2"; then
+ echo "directory $1 sould contain $2 files"
+ echo "it contains these files:"
+ ls "$1"
+ false
+ fi
+}
+
+
+# wiki_check_content <file_name> <page_name>
+#
+# Compares the contents of the file <file_name> and the wiki page
+# <page_name> and exits with error 1 if they do not match.
+wiki_check_content () {
+ mkdir -p wiki_tmp
+ wiki_getpage "$2" wiki_tmp
+ # replacement of forbidden character in file name
+ page_name=$(printf "%s\n" "$2" | sed -e "s/\//%2F/g")
+
+ diff -b "$1" wiki_tmp/"$page_name".mw
+ if test $? -ne 0
+ then
+ rm -rf wiki_tmp
+ error "ERROR: file $2 not found on wiki"
+ fi
+ rm -rf wiki_tmp
+}
+
+# wiki_page_exist <page_name>
+#
+# Check the existence of the page <page_name> on the wiki and exits
+# with error if it is absent from it.
+wiki_page_exist () {
+ mkdir -p wiki_tmp
+ wiki_getpage "$1" wiki_tmp
+ page_name=$(printf "%s\n" "$1" | sed "s/\//%2F/g")
+ if test -f wiki_tmp/"$page_name".mw ; then
+ rm -rf wiki_tmp
+ else
+ rm -rf wiki_tmp
+ error "test failed: file $1 not found on wiki"
+ fi
+}
+
+# wiki_getallpagename
+#
+# Fetch the name of each page on the wiki.
+wiki_getallpagename () {
+ "$CURR_DIR"/test-gitmw.pl getallpagename
+}
+
+# wiki_getallpagecategory <category>
+#
+# Fetch the name of each page belonging to <category> on the wiki.
+wiki_getallpagecategory () {
+ "$CURR_DIR"/test-gitmw.pl getallpagename "$@"
+}
+
+# wiki_getallpage <dest_dir> [<category>]
+#
+# Fetch all the pages from the wiki and place them in the directory
+# <dest_dir>.
+# If <category> is define, then wiki_getallpage fetch the pages included
+# in <category>.
+wiki_getallpage () {
+ if test -z "$2";
+ then
+ wiki_getallpagename
+ else
+ wiki_getallpagecategory "$2"
+ fi
+ mkdir -p "$1"
+ while read -r line; do
+ wiki_getpage "$line" $1;
+ done < all.txt
+}
+
+# ================= Install part =================
+
+error () {
+ echo "$@" >&2
+ exit 1
+}
+
+# config_lighttpd
+#
+# Create the configuration files and the folders necessary to start lighttpd.
+# Overwrite any existing file.
+config_lighttpd () {
+ mkdir -p $WEB
+ mkdir -p $WEB_TMP
+ mkdir -p $WEB_WWW
+ cat > $WEB/lighttpd.conf <<EOF
+ server.document-root = "$CURR_DIR/$WEB_WWW"
+ server.port = $PORT
+ server.pid-file = "$CURR_DIR/$WEB_TMP/pid"
+
+ server.modules = (
+ "mod_rewrite",
+ "mod_redirect",
+ "mod_access",
+ "mod_accesslog",
+ "mod_fastcgi"
+ )
+
+ index-file.names = ("index.php" , "index.html")
+
+ mimetype.assign = (
+ ".pdf" => "application/pdf",
+ ".sig" => "application/pgp-signature",
+ ".spl" => "application/futuresplash",
+ ".class" => "application/octet-stream",
+ ".ps" => "application/postscript",
+ ".torrent" => "application/x-bittorrent",
+ ".dvi" => "application/x-dvi",
+ ".gz" => "application/x-gzip",
+ ".pac" => "application/x-ns-proxy-autoconfig",
+ ".swf" => "application/x-shockwave-flash",
+ ".tar.gz" => "application/x-tgz",
+ ".tgz" => "application/x-tgz",
+ ".tar" => "application/x-tar",
+ ".zip" => "application/zip",
+ ".mp3" => "audio/mpeg",
+ ".m3u" => "audio/x-mpegurl",
+ ".wma" => "audio/x-ms-wma",
+ ".wax" => "audio/x-ms-wax",
+ ".ogg" => "application/ogg",
+ ".wav" => "audio/x-wav",
+ ".gif" => "image/gif",
+ ".jpg" => "image/jpeg",
+ ".jpeg" => "image/jpeg",
+ ".png" => "image/png",
+ ".xbm" => "image/x-xbitmap",
+ ".xpm" => "image/x-xpixmap",
+ ".xwd" => "image/x-xwindowdump",
+ ".css" => "text/css",
+ ".html" => "text/html",
+ ".htm" => "text/html",
+ ".js" => "text/javascript",
+ ".asc" => "text/plain",
+ ".c" => "text/plain",
+ ".cpp" => "text/plain",
+ ".log" => "text/plain",
+ ".conf" => "text/plain",
+ ".text" => "text/plain",
+ ".txt" => "text/plain",
+ ".dtd" => "text/xml",
+ ".xml" => "text/xml",
+ ".mpeg" => "video/mpeg",
+ ".mpg" => "video/mpeg",
+ ".mov" => "video/quicktime",
+ ".qt" => "video/quicktime",
+ ".avi" => "video/x-msvideo",
+ ".asf" => "video/x-ms-asf",
+ ".asx" => "video/x-ms-asf",
+ ".wmv" => "video/x-ms-wmv",
+ ".bz2" => "application/x-bzip",
+ ".tbz" => "application/x-bzip-compressed-tar",
+ ".tar.bz2" => "application/x-bzip-compressed-tar",
+ "" => "text/plain"
+ )
+
+ fastcgi.server = ( ".php" =>
+ ("localhost" =>
+ ( "socket" => "$CURR_DIR/$WEB_TMP/php.socket",
+ "bin-path" => "$PHP_DIR/php-cgi -c $CURR_DIR/$WEB/php.ini"
+
+ )
+ )
+ )
+EOF
+
+ cat > $WEB/php.ini <<EOF
+ session.save_path ='$CURR_DIR/$WEB_TMP'
+EOF
+}
+
+# start_lighttpd
+#
+# Start or restart daemon lighttpd. If restart, rewrite configuration files.
+start_lighttpd () {
+ if test -f "$WEB_TMP/pid"; then
+ echo "Instance already running. Restarting..."
+ stop_lighttpd
+ fi
+ config_lighttpd
+ "$LIGHTTPD_DIR"/lighttpd -f "$WEB"/lighttpd.conf
+
+ if test $? -ne 0 ; then
+ echo "Could not execute http deamon lighttpd"
+ exit 1
+ fi
+}
+
+# stop_lighttpd
+#
+# Kill daemon lighttpd and removes files and folders associated.
+stop_lighttpd () {
+ test -f "$WEB_TMP/pid" && kill $(cat "$WEB_TMP/pid")
+ rm -rf "$WEB"
+}
+
+# Create the SQLite database of the MediaWiki. If the database file already
+# exists, it will be deleted.
+# This script should be runned from the directory where $FILES_FOLDER is
+# located.
+create_db () {
+ rm -f "$TMP/$DB_FILE"
+
+ echo "Generating the SQLite database file. It can take some time ..."
+ # Run the php script to generate the SQLite database file
+ # with cURL calls.
+ php "$FILES_FOLDER/$DB_INSTALL_SCRIPT" $(basename "$DB_FILE" .sqlite) \
+ "$WIKI_ADMIN" "$WIKI_PASSW" "$TMP" "$PORT"
+
+ if [ ! -f "$TMP/$DB_FILE" ] ; then
+ error "Can't create database file $TMP/$DB_FILE. Try to run ./install-wiki.sh delete first."
+ fi
+
+ # Copy the generated database file into the directory the
+ # user indicated.
+ cp "$TMP/$DB_FILE" "$FILES_FOLDER" ||
+ error "Unable to copy $TMP/$DB_FILE to $FILES_FOLDER"
+}
+
+# Install a wiki in your web server directory.
+wiki_install () {
+ if test $LIGHTTPD = "true" ; then
+ start_lighttpd
+ fi
+
+ SERVER_ADDR=$SERVER_ADDR:$PORT
+ # In this part, we change directory to $TMP in order to download,
+ # unpack and copy the files of MediaWiki
+ (
+ mkdir -p "$WIKI_DIR_INST/$WIKI_DIR_NAME"
+ if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ] ; then
+ error "Folder $WIKI_DIR_INST/$WIKI_DIR_NAME doesn't exist.
+ Please create it and launch the script again."
+ fi
+
+ # Fetch MediaWiki's archive if not already present in the TMP directory
+ cd "$TMP"
+ if [ ! -f "$MW_VERSION.tar.gz" ] ; then
+ echo "Downloading $MW_VERSION sources ..."
+ wget "http://download.wikimedia.org/mediawiki/1.19/mediawiki-1.19.0.tar.gz" ||
+ error "Unable to download "\
+ "http://download.wikimedia.org/mediawiki/1.19/"\
+ "mediawiki-1.19.0.tar.gz. "\
+ "Please fix your connection and launch the script again."
+ echo "$MW_VERSION.tar.gz downloaded in `pwd`. "\
+ "You can delete it later if you want."
+ else
+ echo "Reusing existing $MW_VERSION.tar.gz downloaded in `pwd`."
+ fi
+ archive_abs_path=$(pwd)/"$MW_VERSION.tar.gz"
+ cd "$WIKI_DIR_INST/$WIKI_DIR_NAME/" ||
+ error "can't cd to $WIKI_DIR_INST/$WIKI_DIR_NAME/"
+ tar xzf "$archive_abs_path" --strip-components=1 ||
+ error "Unable to extract WikiMedia's files from $archive_abs_path to "\
+ "$WIKI_DIR_INST/$WIKI_DIR_NAME"
+ ) || exit 1
+
+ create_db
+
+ # Copy the generic LocalSettings.php in the web server's directory
+ # And modify parameters according to the ones set at the top
+ # of this script.
+ # Note that LocalSettings.php is never modified.
+ if [ ! -f "$FILES_FOLDER/LocalSettings.php" ] ; then
+ error "Can't find $FILES_FOLDER/LocalSettings.php " \
+ "in the current folder. "\
+ "Please run the script inside its folder."
+ fi
+ cp "$FILES_FOLDER/LocalSettings.php" \
+ "$FILES_FOLDER/LocalSettings-tmp.php" ||
+ error "Unable to copy $FILES_FOLDER/LocalSettings.php " \
+ "to $FILES_FOLDER/LocalSettings-tmp.php"
+
+ # Parse and set the LocalSettings file of the user according to the
+ # CONFIGURATION VARIABLES section at the beginning of this script
+ file_swap="$FILES_FOLDER/LocalSettings-swap.php"
+ sed "s,@WG_SCRIPT_PATH@,/$WIKI_DIR_NAME," \
+ "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+ mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+ sed "s,@WG_SERVER@,http://$SERVER_ADDR," \
+ "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+ mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+ sed "s,@WG_SQLITE_DATADIR@,$TMP," \
+ "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+ mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+ sed "s,@WG_SQLITE_DATAFILE@,$( basename $DB_FILE .sqlite)," \
+ "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+ mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+
+ mv "$FILES_FOLDER/LocalSettings-tmp.php" \
+ "$WIKI_DIR_INST/$WIKI_DIR_NAME/LocalSettings.php" ||
+ error "Unable to move $FILES_FOLDER/LocalSettings-tmp.php" \
+ "in $WIKI_DIR_INST/$WIKI_DIR_NAME"
+ echo "File $FILES_FOLDER/LocalSettings.php is set in" \
+ " $WIKI_DIR_INST/$WIKI_DIR_NAME"
+
+ echo "Your wiki has been installed. You can check it at
+ http://$SERVER_ADDR/$WIKI_DIR_NAME"
+}
+
+# Reset the database of the wiki and the password of the admin
+#
+# Warning: This function must be called only in a subdirectory of t/ directory
+wiki_reset () {
+ # Copy initial database of the wiki
+ if [ ! -f "../$FILES_FOLDER/$DB_FILE" ] ; then
+ error "Can't find ../$FILES_FOLDER/$DB_FILE in the current folder."
+ fi
+ cp "../$FILES_FOLDER/$DB_FILE" "$TMP" ||
+ error "Can't copy ../$FILES_FOLDER/$DB_FILE in $TMP"
+ echo "File $FILES_FOLDER/$DB_FILE is set in $TMP"
+}
+
+# Delete the wiki created in the web server's directory and all its content
+# saved in the database.
+wiki_delete () {
+ if test $LIGHTTPD = "true"; then
+ stop_lighttpd
+ else
+ # Delete the wiki's directory.
+ rm -rf "$WIKI_DIR_INST/$WIKI_DIR_NAME" ||
+ error "Wiki's directory $WIKI_DIR_INST/" \
+ "$WIKI_DIR_NAME could not be deleted"
+ # Delete the wiki's SQLite database.
+ rm -f "$TMP/$DB_FILE" ||
+ error "Database $TMP/$DB_FILE could not be deleted."
+ fi
+
+ # Delete the wiki's SQLite database
+ rm -f "$TMP/$DB_FILE" || error "Database $TMP/$DB_FILE could not be deleted."
+ rm -f "$FILES_FOLDER/$DB_FILE"
+ rm -rf "$TMP/$MW_VERSION"
+}
diff --git a/contrib/mw-to-git/t/test-gitmw.pl b/contrib/mw-to-git/t/test-gitmw.pl
new file mode 100755
index 0000000..0ff7625
--- /dev/null
+++ b/contrib/mw-to-git/t/test-gitmw.pl
@@ -0,0 +1,225 @@
+#!/usr/bin/perl -w -s
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+# License: GPL v2 or later
+
+# Usage:
+# ./test-gitmw.pl <command> [argument]*
+# Execute in terminal using the name of the function to call as first
+# parameter, and the function's arguments as following parameters
+#
+# Example:
+# ./test-gitmw.pl "get_page" foo .
+# will call <wiki_getpage> with arguments <foo> and <.>
+#
+# Available functions are:
+# "get_page"
+# "delete_page"
+# "edit_page"
+# "getallpagename"
+
+use MediaWiki::API;
+use Getopt::Long;
+use encoding 'utf8';
+use DateTime::Format::ISO8601;
+use open ':encoding(utf8)';
+use constant SLASH_REPLACEMENT => "%2F";
+
+#Parsing of the config file
+
+my $configfile = "$ENV{'CURR_DIR'}/test.config";
+my %config;
+open my $CONFIG, "<", $configfile or die "can't open $configfile: $!";
+while (<$CONFIG>)
+{
+ chomp;
+ s/#.*//;
+ s/^\s+//;
+ s/\s+$//;
+ next unless length;
+ my ($key, $value) = split (/\s*=\s*/,$_, 2);
+ $config{$key} = $value;
+ last if ($key eq 'LIGHTTPD' and $value eq 'false');
+ last if ($key eq 'PORT');
+}
+close $CONFIG or die "can't close $configfile: $!";
+
+my $wiki_address = "http://$config{'SERVER_ADDR'}".":"."$config{'PORT'}";
+my $wiki_url = "$wiki_address/$config{'WIKI_DIR_NAME'}/api.php";
+my $wiki_admin = "$config{'WIKI_ADMIN'}";
+my $wiki_admin_pass = "$config{'WIKI_PASSW'}";
+my $mw = MediaWiki::API->new;
+$mw->{config}->{api_url} = $wiki_url;
+
+
+# wiki_login <name> <password>
+#
+# Logs the user with <name> and <password> in the global variable
+# of the mediawiki $mw
+sub wiki_login {
+ $mw->login( { lgname => "$_[0]",lgpassword => "$_[1]" } )
+ || die "getpage: login failed";
+}
+
+# wiki_getpage <wiki_page> <dest_path>
+#
+# fetch a page <wiki_page> from the wiki referenced in the global variable
+# $mw and copies its content in directory dest_path
+sub wiki_getpage {
+ my $pagename = $_[0];
+ my $destdir = $_[1];
+
+ my $page = $mw->get_page( { title => $pagename } );
+ if (!defined($page)) {
+ die "getpage: wiki does not exist";
+ }
+
+ my $content = $page->{'*'};
+ if (!defined($content)) {
+ die "getpage: page does not exist";
+ }
+
+ $pagename=$page->{'title'};
+ # Replace spaces by underscore in the page name
+ $pagename =~ s/ /_/g;
+ $pagename =~ s/\//%2F/g;
+ open(my $file, ">$destdir/$pagename.mw");
+ print $file "$content";
+ close ($file);
+
+}
+
+# wiki_delete_page <page_name>
+#
+# delete the page with name <page_name> from the wiki referenced
+# in the global variable $mw
+sub wiki_delete_page {
+ my $pagename = $_[0];
+
+ my $exist=$mw->get_page({title => $pagename});
+
+ if (defined($exist->{'*'})){
+ $mw->edit({ action => 'delete',
+ title => $pagename})
+ || die $mw->{error}->{code} . ": " . $mw->{error}->{details};
+ } else {
+ die "no page with such name found: $pagename\n";
+ }
+}
+
+# wiki_editpage <wiki_page> <wiki_content> <wiki_append> [-c=<category>] [-s=<summary>]
+#
+# Edit a page named <wiki_page> with content <wiki_content> on the wiki
+# referenced with the global variable $mw
+# If <wiki_append> == true : append <wiki_content> at the end of the actual
+# content of the page <wiki_page>
+# If <wik_page> doesn't exist, that page is created with the <wiki_content>
+sub wiki_editpage {
+ my $wiki_page = $_[0];
+ my $wiki_content = $_[1];
+ my $wiki_append = $_[2];
+ my $summary = "";
+ my ($summ, $cat) = ();
+ GetOptions('s=s' => \$summ, 'c=s' => \$cat);
+
+ my $append = 0;
+ if (defined($wiki_append) && $wiki_append eq 'true') {
+ $append=1;
+ }
+
+ my $previous_text ="";
+
+ if ($append) {
+ my $ref = $mw->get_page( { title => $wiki_page } );
+ $previous_text = $ref->{'*'};
+ }
+
+ my $text = $wiki_content;
+ if (defined($previous_text)) {
+ $text="$previous_text$text";
+ }
+
+ # Eventually, add this page to a category.
+ if (defined($cat)) {
+ my $category_name="[[Category:$cat]]";
+ $text="$text\n $category_name";
+ }
+ if(defined($summ)){
+ $summary=$summ;
+ }
+
+ $mw->edit( { action => 'edit', title => $wiki_page, summary => $summary, text => "$text"} );
+}
+
+# wiki_getallpagename [<category>]
+#
+# Fetch all pages of the wiki referenced by the global variable $mw
+# and print the names of each one in the file all.txt with a new line
+# ("\n") between these.
+# If the argument <category> is defined, then this function get only the pages
+# belonging to <category>.
+sub wiki_getallpagename {
+ # fetch the pages of the wiki
+ if (defined($_[0])) {
+ my $mw_pages = $mw->list ( { action => 'query',
+ list => 'categorymembers',
+ cmtitle => "Category:$_[0]",
+ cmnamespace => 0,
+ cmlimit => 500 },
+ )
+ || die $mw->{error}->{code}.": ".$mw->{error}->{details};
+ open(my $file, ">all.txt");
+ foreach my $page (@{$mw_pages}) {
+ print $file "$page->{title}\n";
+ }
+ close ($file);
+
+ } else {
+ my $mw_pages = $mw->list({
+ action => 'query',
+ list => 'allpages',
+ aplimit => 500,
+ })
+ || die $mw->{error}->{code}.": ".$mw->{error}->{details};
+ open(my $file, ">all.txt");
+ foreach my $page (@{$mw_pages}) {
+ print $file "$page->{title}\n";
+ }
+ close ($file);
+ }
+}
+
+sub wiki_upload_file {
+ my $file_name = $_[0];
+ my $resultat = $mw->edit ( {
+ action => 'upload',
+ filename => $file_name,
+ comment => 'upload a file',
+ file => [ $file_name ],
+ ignorewarnings=>1,
+ }, {
+ skip_encoding => 1
+ } ) || die $mw->{error}->{code} . ' : ' . $mw->{error}->{details};
+}
+
+
+
+# Main part of this script: parse the command line arguments
+# and select which function to execute
+my $fct_to_call = shift;
+
+wiki_login($wiki_admin, $wiki_admin_pass);
+
+my %functions_to_call = qw(
+ upload_file wiki_upload_file
+ get_page wiki_getpage
+ delete_page wiki_delete_page
+ edit_page wiki_editpage
+ getallpagename wiki_getallpagename
+);
+die "$0 ERROR: wrong argument" unless exists $functions_to_call{$fct_to_call};
+&{$functions_to_call{$fct_to_call}}(@ARGV);
diff --git a/contrib/mw-to-git/t/test.config b/contrib/mw-to-git/t/test.config
new file mode 100644
index 0000000..958b37b
--- /dev/null
+++ b/contrib/mw-to-git/t/test.config
@@ -0,0 +1,35 @@
+# Name of the web server's directory dedicated to the wiki is WIKI_DIR_NAME
+WIKI_DIR_NAME=wiki
+
+# Login and password of the wiki's admin
+WIKI_ADMIN=WikiAdmin
+WIKI_PASSW=AdminPass
+
+# Address of the web server
+SERVER_ADDR=localhost
+
+# SQLite database of the wiki, named DB_FILE, is located in TMP
+TMP=/tmp
+DB_FILE=wikidb.sqlite
+
+# If LIGHTTPD is not set to true, the script will use the defaut
+# web server running in WIKI_DIR_INST.
+WIKI_DIR_INST=/var/www
+
+# If LIGHTTPD is set to true, the script will use Lighttpd to run
+# the wiki.
+LIGHTTPD=true
+
+# The variables below are useful only if LIGHTTPD is set to true.
+PORT=1234
+PHP_DIR=/usr/bin
+LIGHTTPD_DIR=/usr/sbin
+WEB=WEB
+WEB_TMP=$WEB/tmp
+WEB_WWW=$WEB/www
+
+# The variables below are used by the script to install a wiki.
+# You should not modify these unless you are modifying the script itself.
+MW_VERSION=mediawiki-1.19.0
+FILES_FOLDER=install-wiki
+DB_INSTALL_SCRIPT=db_install.php
diff --git a/contrib/p4import/README b/contrib/p4import/README
new file mode 100644
index 0000000..b9892b6
--- /dev/null
+++ b/contrib/p4import/README
@@ -0,0 +1 @@
+Please see contrib/fast-import/git-p4 for a better Perforce importer.
diff --git a/contrib/p4import/git-p4import.py b/contrib/p4import/git-p4import.py
new file mode 100644
index 0000000..593d6a0
--- /dev/null
+++ b/contrib/p4import/git-p4import.py
@@ -0,0 +1,365 @@
+#!/usr/bin/env python
+#
+# This tool is copyright (c) 2006, Sean Estabrooks.
+# It is released under the Gnu Public License, version 2.
+#
+# Import Perforce branches into Git repositories.
+# Checking out the files is done by calling the standard p4
+# client which you must have properly configured yourself
+#
+
+import marshal
+import os
+import sys
+import time
+import getopt
+
+if sys.hexversion < 0x02020000:
+ # The behavior of the marshal module changed significantly in 2.2
+ sys.stderr.write("git-p4import.py: requires Python 2.2 or later.\n")
+ sys.exit(1)
+
+from signal import signal, \
+ SIGPIPE, SIGINT, SIG_DFL, \
+ default_int_handler
+
+signal(SIGPIPE, SIG_DFL)
+s = signal(SIGINT, SIG_DFL)
+if s != default_int_handler:
+ signal(SIGINT, s)
+
+def die(msg, *args):
+ for a in args:
+ msg = "%s %s" % (msg, a)
+ print "git-p4import fatal error:", msg
+ sys.exit(1)
+
+def usage():
+ print "USAGE: git-p4import [-q|-v] [--authors=<file>] [-t <timezone>] [//p4repo/path <branch>]"
+ sys.exit(1)
+
+verbosity = 1
+logfile = "/dev/null"
+ignore_warnings = False
+stitch = 0
+tagall = True
+
+def report(level, msg, *args):
+ global verbosity
+ global logfile
+ for a in args:
+ msg = "%s %s" % (msg, a)
+ fd = open(logfile, "a")
+ fd.writelines(msg)
+ fd.close()
+ if level <= verbosity:
+ print msg
+
+class p4_command:
+ def __init__(self, _repopath):
+ try:
+ global logfile
+ self.userlist = {}
+ if _repopath[-1] == '/':
+ self.repopath = _repopath[:-1]
+ else:
+ self.repopath = _repopath
+ if self.repopath[-4:] != "/...":
+ self.repopath= "%s/..." % self.repopath
+ f=os.popen('p4 -V 2>>%s'%logfile, 'rb')
+ a = f.readlines()
+ if f.close():
+ raise
+ except:
+ die("Could not find the \"p4\" command")
+
+ def p4(self, cmd, *args):
+ global logfile
+ cmd = "%s %s" % (cmd, ' '.join(args))
+ report(2, "P4:", cmd)
+ f=os.popen('p4 -G %s 2>>%s' % (cmd,logfile), 'rb')
+ list = []
+ while 1:
+ try:
+ list.append(marshal.load(f))
+ except EOFError:
+ break
+ self.ret = f.close()
+ return list
+
+ def sync(self, id, force=False, trick=False, test=False):
+ if force:
+ ret = self.p4("sync -f %s@%s"%(self.repopath, id))[0]
+ elif trick:
+ ret = self.p4("sync -k %s@%s"%(self.repopath, id))[0]
+ elif test:
+ ret = self.p4("sync -n %s@%s"%(self.repopath, id))[0]
+ else:
+ ret = self.p4("sync %s@%s"%(self.repopath, id))[0]
+ if ret['code'] == "error":
+ data = ret['data'].upper()
+ if data.find('VIEW') > 0:
+ die("Perforce reports %s is not in client view"% self.repopath)
+ elif data.find('UP-TO-DATE') < 0:
+ die("Could not sync files from perforce", self.repopath)
+
+ def changes(self, since=0):
+ try:
+ list = []
+ for rec in self.p4("changes %s@%s,#head" % (self.repopath, since+1)):
+ list.append(rec['change'])
+ list.reverse()
+ return list
+ except:
+ return []
+
+ def authors(self, filename):
+ f=open(filename)
+ for l in f.readlines():
+ self.userlist[l[:l.find('=')].rstrip()] = \
+ (l[l.find('=')+1:l.find('<')].rstrip(),l[l.find('<')+1:l.find('>')])
+ f.close()
+ for f,e in self.userlist.items():
+ report(2, f, ":", e[0], " <", e[1], ">")
+
+ def _get_user(self, id):
+ if not self.userlist.has_key(id):
+ try:
+ user = self.p4("users", id)[0]
+ self.userlist[id] = (user['FullName'], user['Email'])
+ except:
+ self.userlist[id] = (id, "")
+ return self.userlist[id]
+
+ def _format_date(self, ticks):
+ symbol='+'
+ name = time.tzname[0]
+ offset = time.timezone
+ if ticks[8]:
+ name = time.tzname[1]
+ offset = time.altzone
+ if offset < 0:
+ offset *= -1
+ symbol = '-'
+ localo = "%s%02d%02d %s" % (symbol, offset / 3600, offset % 3600, name)
+ tickso = time.strftime("%a %b %d %H:%M:%S %Y", ticks)
+ return "%s %s" % (tickso, localo)
+
+ def where(self):
+ try:
+ return self.p4("where %s" % self.repopath)[-1]['path']
+ except:
+ return ""
+
+ def describe(self, num):
+ desc = self.p4("describe -s", num)[0]
+ self.msg = desc['desc']
+ self.author, self.email = self._get_user(desc['user'])
+ self.date = self._format_date(time.localtime(long(desc['time'])))
+ return self
+
+class git_command:
+ def __init__(self):
+ try:
+ self.version = self.git("--version")[0][12:].rstrip()
+ except:
+ die("Could not find the \"git\" command")
+ try:
+ self.gitdir = self.get_single("rev-parse --git-dir")
+ report(2, "gdir:", self.gitdir)
+ except:
+ die("Not a git repository... did you forget to \"git init\" ?")
+ try:
+ self.cdup = self.get_single("rev-parse --show-cdup")
+ if self.cdup != "":
+ os.chdir(self.cdup)
+ self.topdir = os.getcwd()
+ report(2, "topdir:", self.topdir)
+ except:
+ die("Could not find top git directory")
+
+ def git(self, cmd):
+ global logfile
+ report(2, "GIT:", cmd)
+ f=os.popen('git %s 2>>%s' % (cmd,logfile), 'rb')
+ r=f.readlines()
+ self.ret = f.close()
+ return r
+
+ def get_single(self, cmd):
+ return self.git(cmd)[0].rstrip()
+
+ def current_branch(self):
+ try:
+ testit = self.git("rev-parse --verify HEAD")[0]
+ return self.git("symbolic-ref HEAD")[0][11:].rstrip()
+ except:
+ return None
+
+ def get_config(self, variable):
+ try:
+ return self.git("config --get %s" % variable)[0].rstrip()
+ except:
+ return None
+
+ def set_config(self, variable, value):
+ try:
+ self.git("config %s %s"%(variable, value) )
+ except:
+ die("Could not set %s to " % variable, value)
+
+ def make_tag(self, name, head):
+ self.git("tag -f %s %s"%(name,head))
+
+ def top_change(self, branch):
+ try:
+ a=self.get_single("name-rev --tags refs/heads/%s" % branch)
+ loc = a.find(' tags/') + 6
+ if a[loc:loc+3] != "p4/":
+ raise
+ return int(a[loc+3:][:-2])
+ except:
+ return 0
+
+ def update_index(self):
+ self.git("ls-files -m -d -o -z | git update-index --add --remove -z --stdin")
+
+ def checkout(self, branch):
+ self.git("checkout %s" % branch)
+
+ def repoint_head(self, branch):
+ self.git("symbolic-ref HEAD refs/heads/%s" % branch)
+
+ def remove_files(self):
+ self.git("ls-files | xargs rm")
+
+ def clean_directories(self):
+ self.git("clean -d")
+
+ def fresh_branch(self, branch):
+ report(1, "Creating new branch", branch)
+ self.git("ls-files | xargs rm")
+ os.remove(".git/index")
+ self.repoint_head(branch)
+ self.git("clean -d")
+
+ def basedir(self):
+ return self.topdir
+
+ def commit(self, author, email, date, msg, id):
+ self.update_index()
+ fd=open(".msg", "w")
+ fd.writelines(msg)
+ fd.close()
+ try:
+ current = self.get_single("rev-parse --verify HEAD")
+ head = "-p HEAD"
+ except:
+ current = ""
+ head = ""
+ tree = self.get_single("write-tree")
+ for r,l in [('DATE',date),('NAME',author),('EMAIL',email)]:
+ os.environ['GIT_AUTHOR_%s'%r] = l
+ os.environ['GIT_COMMITTER_%s'%r] = l
+ commit = self.get_single("commit-tree %s %s < .msg" % (tree,head))
+ os.remove(".msg")
+ self.make_tag("p4/%s"%id, commit)
+ self.git("update-ref HEAD %s %s" % (commit, current) )
+
+try:
+ opts, args = getopt.getopt(sys.argv[1:], "qhvt:",
+ ["authors=","help","stitch=","timezone=","log=","ignore","notags"])
+except getopt.GetoptError:
+ usage()
+
+for o, a in opts:
+ if o == "-q":
+ verbosity = 0
+ if o == "-v":
+ verbosity += 1
+ if o in ("--log"):
+ logfile = a
+ if o in ("--notags"):
+ tagall = False
+ if o in ("-h", "--help"):
+ usage()
+ if o in ("--ignore"):
+ ignore_warnings = True
+
+git = git_command()
+branch=git.current_branch()
+
+for o, a in opts:
+ if o in ("-t", "--timezone"):
+ git.set_config("perforce.timezone", a)
+ if o in ("--stitch"):
+ git.set_config("perforce.%s.path" % branch, a)
+ stitch = 1
+
+if len(args) == 2:
+ branch = args[1]
+ git.checkout(branch)
+ if branch == git.current_branch():
+ die("Branch %s already exists!" % branch)
+ report(1, "Setting perforce to ", args[0])
+ git.set_config("perforce.%s.path" % branch, args[0])
+elif len(args) != 0:
+ die("You must specify the perforce //depot/path and git branch")
+
+p4path = git.get_config("perforce.%s.path" % branch)
+if p4path == None:
+ die("Do not know Perforce //depot/path for git branch", branch)
+
+p4 = p4_command(p4path)
+
+for o, a in opts:
+ if o in ("-a", "--authors"):
+ p4.authors(a)
+
+localdir = git.basedir()
+if p4.where()[:len(localdir)] != localdir:
+ report(1, "**WARNING** Appears p4 client is misconfigured")
+ report(1, " for sync from %s to %s" % (p4.repopath, localdir))
+ if ignore_warnings != True:
+ die("Reconfigure or use \"--ignore\" on command line")
+
+if stitch == 0:
+ top = git.top_change(branch)
+else:
+ top = 0
+changes = p4.changes(top)
+count = len(changes)
+if count == 0:
+ report(1, "Already up to date...")
+ sys.exit(0)
+
+ptz = git.get_config("perforce.timezone")
+if ptz:
+ report(1, "Setting timezone to", ptz)
+ os.environ['TZ'] = ptz
+ time.tzset()
+
+if stitch == 1:
+ git.remove_files()
+ git.clean_directories()
+ p4.sync(changes[0], force=True)
+elif top == 0 and branch != git.current_branch():
+ p4.sync(changes[0], test=True)
+ report(1, "Creating new initial commit");
+ git.fresh_branch(branch)
+ p4.sync(changes[0], force=True)
+else:
+ p4.sync(changes[0], trick=True)
+
+report(1, "processing %s changes from p4 (%s) to git (%s)" % (count, p4.repopath, branch))
+for id in changes:
+ report(1, "Importing changeset", id)
+ change = p4.describe(id)
+ p4.sync(id)
+ if tagall :
+ git.commit(change.author, change.email, change.date, change.msg, id)
+ else:
+ git.commit(change.author, change.email, change.date, change.msg, "import")
+ if stitch == 1:
+ git.clean_directories()
+ stitch = 0
diff --git a/contrib/p4import/git-p4import.txt b/contrib/p4import/git-p4import.txt
new file mode 100644
index 0000000..9967587
--- /dev/null
+++ b/contrib/p4import/git-p4import.txt
@@ -0,0 +1,167 @@
+git-p4import(1)
+===============
+
+NAME
+----
+git-p4import - Import a Perforce repository into git
+
+
+SYNOPSIS
+--------
+[verse]
+`git-p4import` [-q|-v] [--notags] [--authors <file>] [-t <timezone>]
+ <//p4repo/path> <branch>
+`git-p4import` --stitch <//p4repo/path>
+`git-p4import`
+
+
+DESCRIPTION
+-----------
+Import a Perforce repository into an existing git repository. When
+a <//p4repo/path> and <branch> are specified a new branch with the
+given name will be created and the initial import will begin.
+
+Once the initial import is complete you can do an incremental import
+of new commits from the Perforce repository. You do this by checking
+out the appropriate git branch and then running `git-p4import` without
+any options.
+
+The standard p4 client is used to communicate with the Perforce
+repository; it must be configured correctly in order for `git-p4import`
+to operate (see below).
+
+
+OPTIONS
+-------
+-q::
+ Do not display any progress information.
+
+-v::
+ Give extra progress information.
+
+\--authors::
+ Specify an authors file containing a mapping of Perforce user
+ ids to full names and email addresses (see Notes below).
+
+\--notags::
+ Do not create a tag for each imported commit.
+
+\--stitch::
+ Import the contents of the given perforce branch into the
+ currently checked out git branch.
+
+\--log::
+ Store debugging information in the specified file.
+
+-t::
+ Specify that the remote repository is in the specified timezone.
+ Timezone must be in the format "US/Pacific" or "Europe/London"
+ etc. You only need to specify this once, it will be saved in
+ the git config file for the repository.
+
+<//p4repo/path>::
+ The Perforce path that will be imported into the specified branch.
+
+<branch>::
+ The new branch that will be created to hold the Perforce imports.
+
+
+P4 Client
+---------
+You must make the `p4` client command available in your $PATH and
+configure it to communicate with the target Perforce repository.
+Typically this means you must set the "$P4PORT" and "$P4CLIENT"
+environment variables.
+
+You must also configure a `p4` client "view" which maps the Perforce
+branch into the top level of your git repository, for example:
+
+------------
+Client: myhost
+
+Root: /home/sean/import
+
+Options: noallwrite clobber nocompress unlocked modtime rmdir
+
+View:
+ //public/jam/... //myhost/jam/...
+------------
+
+With the above `p4` client setup, you could import the "jam"
+perforce branch into a branch named "jammy", like so:
+
+------------
+$ mkdir -p /home/sean/import/jam
+$ cd /home/sean/import/jam
+$ git init
+$ git p4import //public/jam jammy
+------------
+
+
+Multiple Branches
+-----------------
+Note that by creating multiple "views" you can use `git-p4import`
+to import additional branches into the same git repository.
+However, the `p4` client has a limitation in that it silently
+ignores all but the last "view" that maps into the same local
+directory. So the following will *not* work:
+
+------------
+View:
+ //public/jam/... //myhost/jam/...
+ //public/other/... //myhost/jam/...
+ //public/guest/... //myhost/jam/...
+------------
+
+If you want more than one Perforce branch to be imported into the
+same directory you must employ a workaround. A simple option is
+to adjust your `p4` client before each import to only include a
+single view.
+
+Another option is to create multiple symlinks locally which all
+point to the same directory in your git repository and then use
+one per "view" instead of listing the actual directory.
+
+
+Tags
+----
+A git tag of the form p4/xx is created for every change imported from
+the Perforce repository where xx is the Perforce changeset number.
+Therefore after the import you can use git to access any commit by its
+Perforce number, e.g. git show p4/327.
+
+The tag associated with the HEAD commit is also how `git-p4import`
+determines if there are new changes to incrementally import from the
+Perforce repository.
+
+If you import from a repository with many thousands of changes
+you will have an equal number of p4/xxxx git tags. Git tags can
+be expensive in terms of disk space and repository operations.
+If you don't need to perform further incremental imports, you
+may delete the tags.
+
+
+Notes
+-----
+You can interrupt the import (e.g. ctrl-c) at any time and restart it
+without worry.
+
+Author information is automatically determined by querying the
+Perforce "users" table using the id associated with each change.
+However, if you want to manually supply these mappings you can do
+so with the "--authors" option. It accepts a file containing a list
+of mappings with each line containing one mapping in the format:
+
+------------
+ perforce_id = Full Name <email@address.com>
+------------
+
+
+Author
+------
+Written by Sean Estabrooks <seanlkml@sympatico.ca>
+
+
+GIT
+---
+Part of the gitlink:git[7] suite
diff --git a/contrib/persistent-https/LICENSE b/contrib/persistent-https/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/contrib/persistent-https/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/contrib/persistent-https/Makefile b/contrib/persistent-https/Makefile
new file mode 100644
index 0000000..92baa3b
--- /dev/null
+++ b/contrib/persistent-https/Makefile
@@ -0,0 +1,38 @@
+# Copyright 2012 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+BUILD_LABEL=$(shell date +"%s")
+TAR_OUT=$(shell go env GOOS)_$(shell go env GOARCH).tar.gz
+
+all: git-remote-persistent-https git-remote-persistent-https--proxy \
+ git-remote-persistent-http
+
+git-remote-persistent-https--proxy: git-remote-persistent-https
+ ln -f -s git-remote-persistent-https git-remote-persistent-https--proxy
+
+git-remote-persistent-http: git-remote-persistent-https
+ ln -f -s git-remote-persistent-https git-remote-persistent-http
+
+git-remote-persistent-https:
+ go build -o git-remote-persistent-https \
+ -ldflags "-X main._BUILD_EMBED_LABEL $(BUILD_LABEL)"
+
+clean:
+ rm -f git-remote-persistent-http* *.tar.gz
+
+tar: clean all
+ @chmod 555 git-remote-persistent-https
+ @tar -czf $(TAR_OUT) git-remote-persistent-http* README LICENSE
+ @echo
+ @echo "Created $(TAR_OUT)"
diff --git a/contrib/persistent-https/README b/contrib/persistent-https/README
new file mode 100644
index 0000000..f784dd2
--- /dev/null
+++ b/contrib/persistent-https/README
@@ -0,0 +1,62 @@
+git-remote-persistent-https
+
+The git-remote-persistent-https binary speeds up SSL operations
+by running a daemon job (git-remote-persistent-https--proxy) that
+keeps a connection open to a server.
+
+
+PRE-BUILT BINARIES
+
+Darwin amd64:
+https://commondatastorage.googleapis.com/git-remote-persistent-https/darwin_amd64.tar.gz
+
+Linux amd64:
+https://commondatastorage.googleapis.com/git-remote-persistent-https/linux_amd64.tar.gz
+
+
+INSTALLING
+
+Move all of the git-remote-persistent-http* binaries to a directory
+in PATH.
+
+
+USAGE
+
+HTTPS requests can be delegated to the proxy by using the
+"persistent-https" scheme, e.g.
+
+git clone persistent-https://kernel.googlesource.com/pub/scm/git/git
+
+Likewise, .gitconfig can be updated as follows to rewrite https urls
+to use persistent-https:
+
+[url "persistent-https"]
+ insteadof = https
+[url "persistent-http"]
+ insteadof = http
+
+
+#####################################################################
+# BUILDING FROM SOURCE
+#####################################################################
+
+LOCATION
+
+The source is available in the contrib/persistent-https directory of
+the Git source repository. The Git source repository is available at
+git://git.kernel.org/pub/scm/git/git.git/
+https://kernel.googlesource.com/pub/scm/git/git
+
+
+PREREQUISITES
+
+The code is written in Go (http://golang.org/) and the Go compiler is
+required. Currently, the compiler must be built and installed from tip
+of source, in order to include a fix in the reverse http proxy:
+http://code.google.com/p/go/source/detail?r=a615b796570a2cd8591884767a7d67ede74f6648
+
+
+BUILDING
+
+Run "make" to build the binaries. See the section on
+INSTALLING above.
diff --git a/contrib/persistent-https/client.go b/contrib/persistent-https/client.go
new file mode 100644
index 0000000..71125b5
--- /dev/null
+++ b/contrib/persistent-https/client.go
@@ -0,0 +1,189 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bufio"
+ "errors"
+ "fmt"
+ "net"
+ "net/url"
+ "os"
+ "os/exec"
+ "strings"
+ "syscall"
+ "time"
+)
+
+type Client struct {
+ ProxyBin string
+ Args []string
+
+ insecure bool
+}
+
+func (c *Client) Run() error {
+ if err := c.resolveArgs(); err != nil {
+ return fmt.Errorf("resolveArgs() got error: %v", err)
+ }
+
+ // Connect to the proxy.
+ uconn, hconn, addr, err := c.connect()
+ if err != nil {
+ return fmt.Errorf("connect() got error: %v", err)
+ }
+ // Keep the unix socket connection open for the duration of the request.
+ defer uconn.Close()
+ // Keep a connection to the HTTP server open, so no other user can
+ // bind on the same address so long as the process is running.
+ defer hconn.Close()
+
+ // Start the git-remote-http subprocess.
+ cargs := []string{"-c", fmt.Sprintf("http.proxy=%v", addr), "remote-http"}
+ cargs = append(cargs, c.Args...)
+ cmd := exec.Command("git", cargs...)
+
+ for _, v := range os.Environ() {
+ if !strings.HasPrefix(v, "GIT_PERSISTENT_HTTPS_SECURE=") {
+ cmd.Env = append(cmd.Env, v)
+ }
+ }
+ // Set the GIT_PERSISTENT_HTTPS_SECURE environment variable when
+ // the proxy is using a SSL connection. This allows credential helpers
+ // to identify secure proxy connections, despite being passed an HTTP
+ // scheme.
+ if !c.insecure {
+ cmd.Env = append(cmd.Env, "GIT_PERSISTENT_HTTPS_SECURE=1")
+ }
+
+ cmd.Stdin = os.Stdin
+ cmd.Stdout = os.Stdout
+ cmd.Stderr = os.Stderr
+ if err := cmd.Run(); err != nil {
+ if eerr, ok := err.(*exec.ExitError); ok {
+ if stat, ok := eerr.ProcessState.Sys().(syscall.WaitStatus); ok && stat.ExitStatus() != 0 {
+ os.Exit(stat.ExitStatus())
+ }
+ }
+ return fmt.Errorf("git-remote-http subprocess got error: %v", err)
+ }
+ return nil
+}
+
+func (c *Client) connect() (uconn net.Conn, hconn net.Conn, addr string, err error) {
+ uconn, err = DefaultSocket.Dial()
+ if err != nil {
+ if e, ok := err.(*net.OpError); ok && (os.IsNotExist(e.Err) || e.Err == syscall.ECONNREFUSED) {
+ if err = c.startProxy(); err == nil {
+ uconn, err = DefaultSocket.Dial()
+ }
+ }
+ if err != nil {
+ return
+ }
+ }
+
+ if addr, err = c.readAddr(uconn); err != nil {
+ return
+ }
+
+ // Open a tcp connection to the proxy.
+ if hconn, err = net.Dial("tcp", addr); err != nil {
+ return
+ }
+
+ // Verify the address hasn't changed ownership.
+ var addr2 string
+ if addr2, err = c.readAddr(uconn); err != nil {
+ return
+ } else if addr != addr2 {
+ err = fmt.Errorf("address changed after connect. got %q, want %q", addr2, addr)
+ return
+ }
+ return
+}
+
+func (c *Client) readAddr(conn net.Conn) (string, error) {
+ conn.SetDeadline(time.Now().Add(5 * time.Second))
+ data := make([]byte, 100)
+ n, err := conn.Read(data)
+ if err != nil {
+ return "", fmt.Errorf("error reading unix socket: %v", err)
+ } else if n == 0 {
+ return "", errors.New("empty data response")
+ }
+ conn.Write([]byte{1}) // Ack
+
+ var addr string
+ if addrs := strings.Split(string(data[:n]), "\n"); len(addrs) != 2 {
+ return "", fmt.Errorf("got %q, wanted 2 addresses", data[:n])
+ } else if c.insecure {
+ addr = addrs[1]
+ } else {
+ addr = addrs[0]
+ }
+ return addr, nil
+}
+
+func (c *Client) startProxy() error {
+ cmd := exec.Command(c.ProxyBin)
+ cmd.SysProcAttr = &syscall.SysProcAttr{Setpgid: true}
+ stdout, err := cmd.StdoutPipe()
+ if err != nil {
+ return err
+ }
+ defer stdout.Close()
+ if err := cmd.Start(); err != nil {
+ return err
+ }
+ result := make(chan error)
+ go func() {
+ bytes, _, err := bufio.NewReader(stdout).ReadLine()
+ if line := string(bytes); err == nil && line != "OK" {
+ err = fmt.Errorf("proxy returned %q, want \"OK\"", line)
+ }
+ result <- err
+ }()
+ select {
+ case err := <-result:
+ return err
+ case <-time.After(5 * time.Second):
+ return errors.New("timeout waiting for proxy to start")
+ }
+ panic("not reachable")
+}
+
+func (c *Client) resolveArgs() error {
+ if nargs := len(c.Args); nargs == 0 {
+ return errors.New("remote needed")
+ } else if nargs > 2 {
+ return fmt.Errorf("want at most 2 args, got %v", c.Args)
+ }
+
+ // Rewrite the url scheme to be http.
+ idx := len(c.Args) - 1
+ rawurl := c.Args[idx]
+ rurl, err := url.Parse(rawurl)
+ if err != nil {
+ return fmt.Errorf("invalid remote: %v", err)
+ }
+ c.insecure = rurl.Scheme == "persistent-http"
+ rurl.Scheme = "http"
+ c.Args[idx] = rurl.String()
+ if idx != 0 && c.Args[0] == rawurl {
+ c.Args[0] = c.Args[idx]
+ }
+ return nil
+}
diff --git a/contrib/persistent-https/main.go b/contrib/persistent-https/main.go
new file mode 100644
index 0000000..fd1b107
--- /dev/null
+++ b/contrib/persistent-https/main.go
@@ -0,0 +1,82 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// The git-remote-persistent-https binary speeds up SSL operations by running
+// a daemon job that keeps a connection open to a Git server. This ensures the
+// git-remote-persistent-https--proxy is running and delegating execution
+// to the git-remote-http binary with the http_proxy set to the daemon job.
+// A unix socket is used to authenticate the proxy and discover the
+// HTTP address. Note, both the client and proxy are included in the same
+// binary.
+package main
+
+import (
+ "flag"
+ "fmt"
+ "log"
+ "os"
+ "strings"
+ "time"
+)
+
+var (
+ forceProxy = flag.Bool("proxy", false, "Whether to start the binary in proxy mode")
+ proxyBin = flag.String("proxy_bin", "git-remote-persistent-https--proxy", "Path to the proxy binary")
+ printLabel = flag.Bool("print_label", false, "Prints the build label for the binary")
+
+ // Variable that should be defined through the -X linker flag.
+ _BUILD_EMBED_LABEL string
+)
+
+const (
+ defaultMaxIdleDuration = 24 * time.Hour
+ defaultPollUpdateInterval = 15 * time.Minute
+)
+
+func main() {
+ flag.Parse()
+ if *printLabel {
+ // Short circuit execution to print the build label
+ fmt.Println(buildLabel())
+ return
+ }
+
+ var err error
+ if *forceProxy || strings.HasSuffix(os.Args[0], "--proxy") {
+ log.SetPrefix("git-remote-persistent-https--proxy: ")
+ proxy := &Proxy{
+ BuildLabel: buildLabel(),
+ MaxIdleDuration: defaultMaxIdleDuration,
+ PollUpdateInterval: defaultPollUpdateInterval,
+ }
+ err = proxy.Run()
+ } else {
+ log.SetPrefix("git-remote-persistent-https: ")
+ client := &Client{
+ ProxyBin: *proxyBin,
+ Args: flag.Args(),
+ }
+ err = client.Run()
+ }
+ if err != nil {
+ log.Fatalln(err)
+ }
+}
+
+func buildLabel() string {
+ if _BUILD_EMBED_LABEL == "" {
+ log.Println(`unlabeled build; build with "make" to label`)
+ }
+ return _BUILD_EMBED_LABEL
+}
diff --git a/contrib/persistent-https/proxy.go b/contrib/persistent-https/proxy.go
new file mode 100644
index 0000000..bb0cdba
--- /dev/null
+++ b/contrib/persistent-https/proxy.go
@@ -0,0 +1,190 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "fmt"
+ "log"
+ "net"
+ "net/http"
+ "net/http/httputil"
+ "os"
+ "os/exec"
+ "os/signal"
+ "sync"
+ "syscall"
+ "time"
+)
+
+type Proxy struct {
+ BuildLabel string
+ MaxIdleDuration time.Duration
+ PollUpdateInterval time.Duration
+
+ ul net.Listener
+ httpAddr string
+ httpsAddr string
+}
+
+func (p *Proxy) Run() error {
+ hl, err := net.Listen("tcp", "127.0.0.1:0")
+ if err != nil {
+ return fmt.Errorf("http listen failed: %v", err)
+ }
+ defer hl.Close()
+
+ hsl, err := net.Listen("tcp", "127.0.0.1:0")
+ if err != nil {
+ return fmt.Errorf("https listen failed: %v", err)
+ }
+ defer hsl.Close()
+
+ p.ul, err = DefaultSocket.Listen()
+ if err != nil {
+ c, derr := DefaultSocket.Dial()
+ if derr == nil {
+ c.Close()
+ fmt.Println("OK\nA proxy is already running... exiting")
+ return nil
+ } else if e, ok := derr.(*net.OpError); ok && e.Err == syscall.ECONNREFUSED {
+ // Nothing is listening on the socket, unlink it and try again.
+ syscall.Unlink(DefaultSocket.Path())
+ p.ul, err = DefaultSocket.Listen()
+ }
+ if err != nil {
+ return fmt.Errorf("unix listen failed on %v: %v", DefaultSocket.Path(), err)
+ }
+ }
+ defer p.ul.Close()
+ go p.closeOnSignal()
+ go p.closeOnUpdate()
+
+ p.httpAddr = hl.Addr().String()
+ p.httpsAddr = hsl.Addr().String()
+ fmt.Printf("OK\nListening on unix socket=%v http=%v https=%v\n",
+ p.ul.Addr(), p.httpAddr, p.httpsAddr)
+
+ result := make(chan error, 2)
+ go p.serveUnix(result)
+ go func() {
+ result <- http.Serve(hl, &httputil.ReverseProxy{
+ FlushInterval: 500 * time.Millisecond,
+ Director: func(r *http.Request) {},
+ })
+ }()
+ go func() {
+ result <- http.Serve(hsl, &httputil.ReverseProxy{
+ FlushInterval: 500 * time.Millisecond,
+ Director: func(r *http.Request) {
+ r.URL.Scheme = "https"
+ },
+ })
+ }()
+ return <-result
+}
+
+type socketContext struct {
+ sync.WaitGroup
+ mutex sync.Mutex
+ last time.Time
+}
+
+func (sc *socketContext) Done() {
+ sc.mutex.Lock()
+ defer sc.mutex.Unlock()
+ sc.last = time.Now()
+ sc.WaitGroup.Done()
+}
+
+func (p *Proxy) serveUnix(result chan<- error) {
+ sockCtx := &socketContext{}
+ go p.closeOnIdle(sockCtx)
+
+ var err error
+ for {
+ var uconn net.Conn
+ uconn, err = p.ul.Accept()
+ if err != nil {
+ err = fmt.Errorf("accept failed: %v", err)
+ break
+ }
+ sockCtx.Add(1)
+ go p.handleUnixConn(sockCtx, uconn)
+ }
+ sockCtx.Wait()
+ result <- err
+}
+
+func (p *Proxy) handleUnixConn(sockCtx *socketContext, uconn net.Conn) {
+ defer sockCtx.Done()
+ defer uconn.Close()
+ data := []byte(fmt.Sprintf("%v\n%v", p.httpsAddr, p.httpAddr))
+ uconn.SetDeadline(time.Now().Add(5 * time.Second))
+ for i := 0; i < 2; i++ {
+ if n, err := uconn.Write(data); err != nil {
+ log.Printf("error sending http addresses: %+v\n", err)
+ return
+ } else if n != len(data) {
+ log.Printf("sent %d data bytes, wanted %d\n", n, len(data))
+ return
+ }
+ if _, err := uconn.Read([]byte{0, 0, 0, 0}); err != nil {
+ log.Printf("error waiting for Ack: %+v\n", err)
+ return
+ }
+ }
+ // Wait without a deadline for the client to finish via EOF
+ uconn.SetDeadline(time.Time{})
+ uconn.Read([]byte{0, 0, 0, 0})
+}
+
+func (p *Proxy) closeOnIdle(sockCtx *socketContext) {
+ for d := p.MaxIdleDuration; d > 0; {
+ time.Sleep(d)
+ sockCtx.Wait()
+ sockCtx.mutex.Lock()
+ if d = sockCtx.last.Add(p.MaxIdleDuration).Sub(time.Now()); d <= 0 {
+ log.Println("graceful shutdown from idle timeout")
+ p.ul.Close()
+ }
+ sockCtx.mutex.Unlock()
+ }
+}
+
+func (p *Proxy) closeOnUpdate() {
+ for {
+ time.Sleep(p.PollUpdateInterval)
+ if out, err := exec.Command(os.Args[0], "--print_label").Output(); err != nil {
+ log.Printf("error polling for updated binary: %v\n", err)
+ } else if s := string(out[:len(out)-1]); p.BuildLabel != s {
+ log.Printf("graceful shutdown from updated binary: %q --> %q\n", p.BuildLabel, s)
+ p.ul.Close()
+ break
+ }
+ }
+}
+
+func (p *Proxy) closeOnSignal() {
+ ch := make(chan os.Signal, 10)
+ signal.Notify(ch, os.Interrupt, os.Kill, os.Signal(syscall.SIGTERM), os.Signal(syscall.SIGHUP))
+ sig := <-ch
+ p.ul.Close()
+ switch sig {
+ case os.Signal(syscall.SIGHUP):
+ log.Printf("graceful shutdown from signal: %v\n", sig)
+ default:
+ log.Fatalf("exiting from signal: %v\n", sig)
+ }
+}
diff --git a/contrib/persistent-https/socket.go b/contrib/persistent-https/socket.go
new file mode 100644
index 0000000..193b911
--- /dev/null
+++ b/contrib/persistent-https/socket.go
@@ -0,0 +1,97 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "fmt"
+ "log"
+ "net"
+ "os"
+ "path/filepath"
+ "syscall"
+)
+
+// A Socket is a wrapper around a Unix socket that verifies directory
+// permissions.
+type Socket struct {
+ Dir string
+}
+
+func defaultDir() string {
+ sockPath := ".git-credential-cache"
+ if home := os.Getenv("HOME"); home != "" {
+ return filepath.Join(home, sockPath)
+ }
+ log.Printf("socket: cannot find HOME path. using relative directory %q for socket", sockPath)
+ return sockPath
+}
+
+// DefaultSocket is a Socket in the $HOME/.git-credential-cache directory.
+var DefaultSocket = Socket{Dir: defaultDir()}
+
+// Listen announces the local network address of the unix socket. The
+// permissions on the socket directory are verified before attempting
+// the actual listen.
+func (s Socket) Listen() (net.Listener, error) {
+ network, addr := "unix", s.Path()
+ if err := s.mkdir(); err != nil {
+ return nil, &net.OpError{Op: "listen", Net: network, Addr: &net.UnixAddr{Name: addr, Net: network}, Err: err}
+ }
+ return net.Listen(network, addr)
+}
+
+// Dial connects to the unix socket. The permissions on the socket directory
+// are verified before attempting the actual dial.
+func (s Socket) Dial() (net.Conn, error) {
+ network, addr := "unix", s.Path()
+ if err := s.checkPermissions(); err != nil {
+ return nil, &net.OpError{Op: "dial", Net: network, Addr: &net.UnixAddr{Name: addr, Net: network}, Err: err}
+ }
+ return net.Dial(network, addr)
+}
+
+// Path returns the fully specified file name of the unix socket.
+func (s Socket) Path() string {
+ return filepath.Join(s.Dir, "persistent-https-proxy-socket")
+}
+
+func (s Socket) mkdir() error {
+ if err := s.checkPermissions(); err == nil {
+ return nil
+ } else if !os.IsNotExist(err) {
+ return err
+ }
+ if err := os.MkdirAll(s.Dir, 0700); err != nil {
+ return err
+ }
+ return s.checkPermissions()
+}
+
+func (s Socket) checkPermissions() error {
+ fi, err := os.Stat(s.Dir)
+ if err != nil {
+ return err
+ }
+ if !fi.IsDir() {
+ return fmt.Errorf("socket: got file, want directory for %q", s.Dir)
+ }
+ if fi.Mode().Perm() != 0700 {
+ return fmt.Errorf("socket: got perm %o, want 700 for %q", fi.Mode().Perm(), s.Dir)
+ }
+ if st := fi.Sys().(*syscall.Stat_t); int(st.Uid) != os.Getuid() {
+ return fmt.Errorf("socket: got uid %d, want %d for %q", st.Uid, os.Getuid(), s.Dir)
+ }
+ return nil
+}
diff --git a/contrib/remote-helpers/Makefile b/contrib/remote-helpers/Makefile
new file mode 100644
index 0000000..239161d
--- /dev/null
+++ b/contrib/remote-helpers/Makefile
@@ -0,0 +1,14 @@
+TESTS := $(wildcard test*.sh)
+
+export T := $(addprefix $(CURDIR)/,$(TESTS))
+export MAKE := $(MAKE) -e
+export PATH := $(CURDIR):$(PATH)
+export TEST_LINT := test-lint-executable test-lint-shell-syntax
+
+test:
+ $(MAKE) -C ../../t $@
+
+$(TESTS):
+ $(MAKE) -C ../../t $(CURDIR)/$@
+
+.PHONY: $(TESTS)
diff --git a/contrib/remote-helpers/git-remote-bzr b/contrib/remote-helpers/git-remote-bzr
new file mode 100755
index 0000000..c3a3cac
--- /dev/null
+++ b/contrib/remote-helpers/git-remote-bzr
@@ -0,0 +1,960 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 Felipe Contreras
+#
+
+#
+# Just copy to your ~/bin, or anywhere in your $PATH.
+# Then you can clone with:
+# % git clone bzr::/path/to/bzr/repo/or/url
+#
+# For example:
+# % git clone bzr::$HOME/myrepo
+# or
+# % git clone bzr::lp:myrepo
+#
+# If you want to specify which branches you want track (per repo):
+# git config remote-bzr.branches 'trunk, devel, test'
+#
+
+import sys
+
+import bzrlib
+if hasattr(bzrlib, "initialize"):
+ bzrlib.initialize()
+
+import bzrlib.plugin
+bzrlib.plugin.load_plugins()
+
+import bzrlib.generate_ids
+import bzrlib.transport
+import bzrlib.errors
+import bzrlib.ui
+import bzrlib.urlutils
+import bzrlib.branch
+
+import sys
+import os
+import json
+import re
+import StringIO
+import atexit, shutil, hashlib, urlparse, subprocess
+
+NAME_RE = re.compile('^([^<>]+)')
+AUTHOR_RE = re.compile('^([^<>]+?)? ?<([^<>]*)>$')
+EMAIL_RE = re.compile('^([^<>]+[^ \\\t<>])?\\b(?:[ \\t<>]*?)\\b([^ \\t<>]+@[^ \\t<>]+)')
+RAW_AUTHOR_RE = re.compile('^(\w+) (.+)? <(.*)> (\d+) ([+-]\d+)')
+
+def die(msg, *args):
+ sys.stderr.write('ERROR: %s\n' % (msg % args))
+ sys.exit(1)
+
+def warn(msg, *args):
+ sys.stderr.write('WARNING: %s\n' % (msg % args))
+
+def gittz(tz):
+ return '%+03d%02d' % (tz / 3600, tz % 3600 / 60)
+
+def get_config(config):
+ cmd = ['git', 'config', '--get', config]
+ process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ output, _ = process.communicate()
+ return output
+
+class Marks:
+
+ def __init__(self, path):
+ self.path = path
+ self.tips = {}
+ self.marks = {}
+ self.rev_marks = {}
+ self.last_mark = 0
+ self.load()
+
+ def load(self):
+ if not os.path.exists(self.path):
+ return
+
+ tmp = json.load(open(self.path))
+ self.tips = tmp['tips']
+ self.marks = tmp['marks']
+ self.last_mark = tmp['last-mark']
+
+ for rev, mark in self.marks.iteritems():
+ self.rev_marks[mark] = rev
+
+ def dict(self):
+ return { 'tips': self.tips, 'marks': self.marks, 'last-mark' : self.last_mark }
+
+ def store(self):
+ json.dump(self.dict(), open(self.path, 'w'))
+
+ def __str__(self):
+ return str(self.dict())
+
+ def from_rev(self, rev):
+ return self.marks[rev]
+
+ def to_rev(self, mark):
+ return str(self.rev_marks[mark])
+
+ def next_mark(self):
+ self.last_mark += 1
+ return self.last_mark
+
+ def get_mark(self, rev):
+ self.last_mark += 1
+ self.marks[rev] = self.last_mark
+ return self.last_mark
+
+ def is_marked(self, rev):
+ return rev in self.marks
+
+ def new_mark(self, rev, mark):
+ self.marks[rev] = mark
+ self.rev_marks[mark] = rev
+ self.last_mark = mark
+
+ def get_tip(self, branch):
+ try:
+ return str(self.tips[branch])
+ except KeyError:
+ return None
+
+ def set_tip(self, branch, tip):
+ self.tips[branch] = tip
+
+class Parser:
+
+ def __init__(self, repo):
+ self.repo = repo
+ self.line = self.get_line()
+
+ def get_line(self):
+ return sys.stdin.readline().strip()
+
+ def __getitem__(self, i):
+ return self.line.split()[i]
+
+ def check(self, word):
+ return self.line.startswith(word)
+
+ def each_block(self, separator):
+ while self.line != separator:
+ yield self.line
+ self.line = self.get_line()
+
+ def __iter__(self):
+ return self.each_block('')
+
+ def next(self):
+ self.line = self.get_line()
+ if self.line == 'done':
+ self.line = None
+
+ def get_mark(self):
+ i = self.line.index(':') + 1
+ return int(self.line[i:])
+
+ def get_data(self):
+ if not self.check('data'):
+ return None
+ i = self.line.index(' ') + 1
+ size = int(self.line[i:])
+ return sys.stdin.read(size)
+
+ def get_author(self):
+ m = RAW_AUTHOR_RE.match(self.line)
+ if not m:
+ return None
+ _, name, email, date, tz = m.groups()
+ committer = '%s <%s>' % (name, email)
+ tz = int(tz)
+ tz = ((tz / 100) * 3600) + ((tz % 100) * 60)
+ return (committer, int(date), tz)
+
+def rev_to_mark(rev):
+ global marks
+ return marks.from_rev(rev)
+
+def mark_to_rev(mark):
+ global marks
+ return marks.to_rev(mark)
+
+def fixup_user(user):
+ name = mail = None
+ user = user.replace('"', '')
+ m = AUTHOR_RE.match(user)
+ if m:
+ name = m.group(1)
+ mail = m.group(2).strip()
+ else:
+ m = EMAIL_RE.match(user)
+ if m:
+ name = m.group(1)
+ mail = m.group(2)
+ else:
+ m = NAME_RE.match(user)
+ if m:
+ name = m.group(1).strip()
+
+ if not name:
+ name = 'unknown'
+ if not mail:
+ mail = 'Unknown'
+
+ return '%s <%s>' % (name, mail)
+
+def get_filechanges(cur, prev):
+ modified = {}
+ removed = {}
+
+ changes = cur.changes_from(prev)
+
+ def u(s):
+ return s.encode('utf-8')
+
+ for path, fid, kind in changes.added:
+ modified[u(path)] = fid
+ for path, fid, kind in changes.removed:
+ removed[u(path)] = None
+ for path, fid, kind, mod, _ in changes.modified:
+ modified[u(path)] = fid
+ for oldpath, newpath, fid, kind, mod, _ in changes.renamed:
+ removed[u(oldpath)] = None
+ if kind == 'directory':
+ lst = cur.list_files(from_dir=newpath, recursive=True)
+ for path, file_class, kind, fid, entry in lst:
+ if kind != 'directory':
+ modified[u(newpath + '/' + path)] = fid
+ else:
+ modified[u(newpath)] = fid
+
+ return modified, removed
+
+def export_files(tree, files):
+ global marks, filenodes
+
+ final = []
+ for path, fid in files.iteritems():
+ kind = tree.kind(fid)
+
+ h = tree.get_file_sha1(fid)
+
+ if kind == 'symlink':
+ d = tree.get_symlink_target(fid)
+ mode = '120000'
+ elif kind == 'file':
+
+ if tree.is_executable(fid):
+ mode = '100755'
+ else:
+ mode = '100644'
+
+ # is the blob already exported?
+ if h in filenodes:
+ mark = filenodes[h]
+ final.append((mode, mark, path))
+ continue
+
+ d = tree.get_file_text(fid)
+ elif kind == 'directory':
+ continue
+ else:
+ die("Unhandled kind '%s' for path '%s'" % (kind, path))
+
+ mark = marks.next_mark()
+ filenodes[h] = mark
+
+ print "blob"
+ print "mark :%u" % mark
+ print "data %d" % len(d)
+ print d
+
+ final.append((mode, mark, path))
+
+ return final
+
+def export_branch(repo, name):
+ global prefix
+
+ ref = '%s/heads/%s' % (prefix, name)
+ tip = marks.get_tip(name)
+
+ branch = get_remote_branch(name)
+ repo = branch.repository
+
+ branch.lock_read()
+ revs = branch.iter_merge_sorted_revisions(None, tip, 'exclude', 'forward')
+ try:
+ tip_revno = branch.revision_id_to_revno(tip)
+ last_revno, _ = branch.last_revision_info()
+ total = last_revno - tip_revno
+ except bzrlib.errors.NoSuchRevision:
+ tip_revno = 0
+ total = 0
+
+ for revid, _, seq, _ in revs:
+
+ if marks.is_marked(revid):
+ continue
+
+ rev = repo.get_revision(revid)
+ revno = seq[0]
+
+ parents = rev.parent_ids
+ time = rev.timestamp
+ tz = rev.timezone
+ committer = rev.committer.encode('utf-8')
+ committer = "%s %u %s" % (fixup_user(committer), time, gittz(tz))
+ authors = rev.get_apparent_authors()
+ if authors:
+ author = authors[0].encode('utf-8')
+ author = "%s %u %s" % (fixup_user(author), time, gittz(tz))
+ else:
+ author = committer
+ msg = rev.message.encode('utf-8')
+
+ msg += '\n'
+
+ if len(parents) == 0:
+ parent = bzrlib.revision.NULL_REVISION
+ else:
+ parent = parents[0]
+
+ cur_tree = repo.revision_tree(revid)
+ prev = repo.revision_tree(parent)
+ modified, removed = get_filechanges(cur_tree, prev)
+
+ modified_final = export_files(cur_tree, modified)
+
+ if len(parents) == 0:
+ print 'reset %s' % ref
+
+ print "commit %s" % ref
+ print "mark :%d" % (marks.get_mark(revid))
+ print "author %s" % (author)
+ print "committer %s" % (committer)
+ print "data %d" % (len(msg))
+ print msg
+
+ for i, p in enumerate(parents):
+ try:
+ m = rev_to_mark(p)
+ except KeyError:
+ # ghost?
+ continue
+ if i == 0:
+ print "from :%s" % m
+ else:
+ print "merge :%s" % m
+
+ for f in removed:
+ print "D %s" % (f,)
+ for f in modified_final:
+ print "M %s :%u %s" % f
+ print
+
+ if len(seq) > 1:
+ # let's skip branch revisions from the progress report
+ continue
+
+ progress = (revno - tip_revno)
+ if (progress % 100 == 0):
+ if total:
+ print "progress revision %d '%s' (%d/%d)" % (revno, name, progress, total)
+ else:
+ print "progress revision %d '%s' (%d)" % (revno, name, progress)
+
+ branch.unlock()
+
+ revid = branch.last_revision()
+
+ # make sure the ref is updated
+ print "reset %s" % ref
+ print "from :%u" % rev_to_mark(revid)
+ print
+
+ marks.set_tip(name, revid)
+
+def export_tag(repo, name):
+ global tags, prefix
+
+ ref = '%s/tags/%s' % (prefix, name)
+ print "reset %s" % ref
+ print "from :%u" % rev_to_mark(tags[name])
+ print
+
+def do_import(parser):
+ global dirname
+
+ repo = parser.repo
+ path = os.path.join(dirname, 'marks-git')
+
+ print "feature done"
+ if os.path.exists(path):
+ print "feature import-marks=%s" % path
+ print "feature export-marks=%s" % path
+ print "feature force"
+ sys.stdout.flush()
+
+ while parser.check('import'):
+ ref = parser[1]
+ if ref.startswith('refs/heads/'):
+ name = ref[len('refs/heads/'):]
+ export_branch(repo, name)
+ if ref.startswith('refs/tags/'):
+ name = ref[len('refs/tags/'):]
+ export_tag(repo, name)
+ parser.next()
+
+ print 'done'
+
+ sys.stdout.flush()
+
+def parse_blob(parser):
+ global blob_marks
+
+ parser.next()
+ mark = parser.get_mark()
+ parser.next()
+ data = parser.get_data()
+ blob_marks[mark] = data
+ parser.next()
+
+class CustomTree():
+
+ def __init__(self, branch, revid, parents, files):
+ global files_cache
+
+ self.updates = {}
+ self.branch = branch
+
+ def copy_tree(revid):
+ files = files_cache[revid] = {}
+ branch.lock_read()
+ tree = branch.repository.revision_tree(revid)
+ try:
+ for path, entry in tree.iter_entries_by_dir():
+ files[path] = [entry.file_id, None]
+ finally:
+ branch.unlock()
+ return files
+
+ if len(parents) == 0:
+ self.base_id = bzrlib.revision.NULL_REVISION
+ self.base_files = {}
+ else:
+ self.base_id = parents[0]
+ self.base_files = files_cache.get(self.base_id, None)
+ if not self.base_files:
+ self.base_files = copy_tree(self.base_id)
+
+ self.files = files_cache[revid] = self.base_files.copy()
+ self.rev_files = {}
+
+ for path, data in self.files.iteritems():
+ fid, mark = data
+ self.rev_files[fid] = [path, mark]
+
+ for path, f in files.iteritems():
+ fid, mark = self.files.get(path, [None, None])
+ if not fid:
+ fid = bzrlib.generate_ids.gen_file_id(path)
+ f['path'] = path
+ self.rev_files[fid] = [path, mark]
+ self.updates[fid] = f
+
+ def last_revision(self):
+ return self.base_id
+
+ def iter_changes(self):
+ changes = []
+
+ def get_parent(dirname, basename):
+ parent_fid, mark = self.base_files.get(dirname, [None, None])
+ if parent_fid:
+ return parent_fid
+ parent_fid, mark = self.files.get(dirname, [None, None])
+ if parent_fid:
+ return parent_fid
+ if basename == '':
+ return None
+ fid = bzrlib.generate_ids.gen_file_id(path)
+ add_entry(fid, dirname, 'directory')
+ return fid
+
+ def add_entry(fid, path, kind, mode = None):
+ dirname, basename = os.path.split(path)
+ parent_fid = get_parent(dirname, basename)
+
+ executable = False
+ if mode == '100755':
+ executable = True
+ elif mode == '120000':
+ kind = 'symlink'
+
+ change = (fid,
+ (None, path),
+ True,
+ (False, True),
+ (None, parent_fid),
+ (None, basename),
+ (None, kind),
+ (None, executable))
+ self.files[path] = [change[0], None]
+ changes.append(change)
+
+ def update_entry(fid, path, kind, mode = None):
+ dirname, basename = os.path.split(path)
+ parent_fid = get_parent(dirname, basename)
+
+ executable = False
+ if mode == '100755':
+ executable = True
+ elif mode == '120000':
+ kind = 'symlink'
+
+ change = (fid,
+ (path, path),
+ True,
+ (True, True),
+ (None, parent_fid),
+ (None, basename),
+ (None, kind),
+ (None, executable))
+ self.files[path] = [change[0], None]
+ changes.append(change)
+
+ def remove_entry(fid, path, kind):
+ dirname, basename = os.path.split(path)
+ parent_fid = get_parent(dirname, basename)
+ change = (fid,
+ (path, None),
+ True,
+ (True, False),
+ (parent_fid, None),
+ (None, None),
+ (None, None),
+ (None, None))
+ del self.files[path]
+ changes.append(change)
+
+ for fid, f in self.updates.iteritems():
+ path = f['path']
+
+ if 'deleted' in f:
+ remove_entry(fid, path, 'file')
+ continue
+
+ if path in self.base_files:
+ update_entry(fid, path, 'file', f['mode'])
+ else:
+ add_entry(fid, path, 'file', f['mode'])
+
+ self.files[path][1] = f['mark']
+ self.rev_files[fid][1] = f['mark']
+
+ return changes
+
+ def get_content(self, file_id):
+ path, mark = self.rev_files[file_id]
+ if mark:
+ return blob_marks[mark]
+
+ # last resort
+ tree = self.branch.repository.revision_tree(self.base_id)
+ return tree.get_file_text(file_id)
+
+ def get_file_with_stat(self, file_id, path=None):
+ content = self.get_content(file_id)
+ return (StringIO.StringIO(content), None)
+
+ def get_symlink_target(self, file_id):
+ return self.get_content(file_id)
+
+ def id2path(self, file_id):
+ path, mark = self.rev_files[file_id]
+ return path
+
+def c_style_unescape(string):
+ if string[0] == string[-1] == '"':
+ return string.decode('string-escape')[1:-1]
+ return string
+
+def parse_commit(parser):
+ global marks, blob_marks, parsed_refs
+ global mode
+
+ parents = []
+
+ ref = parser[1]
+ parser.next()
+
+ if ref.startswith('refs/heads/'):
+ name = ref[len('refs/heads/'):]
+ branch = get_remote_branch(name)
+ else:
+ die('unknown ref')
+
+ commit_mark = parser.get_mark()
+ parser.next()
+ author = parser.get_author()
+ parser.next()
+ committer = parser.get_author()
+ parser.next()
+ data = parser.get_data()
+ parser.next()
+ if parser.check('from'):
+ parents.append(parser.get_mark())
+ parser.next()
+ while parser.check('merge'):
+ parents.append(parser.get_mark())
+ parser.next()
+
+ # fast-export adds an extra newline
+ if data[-1] == '\n':
+ data = data[:-1]
+
+ files = {}
+
+ for line in parser:
+ if parser.check('M'):
+ t, m, mark_ref, path = line.split(' ', 3)
+ mark = int(mark_ref[1:])
+ f = { 'mode' : m, 'mark' : mark }
+ elif parser.check('D'):
+ t, path = line.split(' ', 1)
+ f = { 'deleted' : True }
+ else:
+ die('Unknown file command: %s' % line)
+ path = c_style_unescape(path).decode('utf-8')
+ files[path] = f
+
+ committer, date, tz = committer
+ parents = [mark_to_rev(p) for p in parents]
+ revid = bzrlib.generate_ids.gen_revision_id(committer, date)
+ props = {}
+ props['branch-nick'] = branch.nick
+
+ mtree = CustomTree(branch, revid, parents, files)
+ changes = mtree.iter_changes()
+
+ branch.lock_write()
+ try:
+ builder = branch.get_commit_builder(parents, None, date, tz, committer, props, revid)
+ try:
+ list(builder.record_iter_changes(mtree, mtree.last_revision(), changes))
+ builder.finish_inventory()
+ builder.commit(data.decode('utf-8', 'replace'))
+ except Exception, e:
+ builder.abort()
+ raise
+ finally:
+ branch.unlock()
+
+ parsed_refs[ref] = revid
+ marks.new_mark(revid, commit_mark)
+
+def parse_reset(parser):
+ global parsed_refs
+
+ ref = parser[1]
+ parser.next()
+
+ # ugh
+ if parser.check('commit'):
+ parse_commit(parser)
+ return
+ if not parser.check('from'):
+ return
+ from_mark = parser.get_mark()
+ parser.next()
+
+ parsed_refs[ref] = mark_to_rev(from_mark)
+
+def do_export(parser):
+ global parsed_refs, dirname
+
+ parser.next()
+
+ for line in parser.each_block('done'):
+ if parser.check('blob'):
+ parse_blob(parser)
+ elif parser.check('commit'):
+ parse_commit(parser)
+ elif parser.check('reset'):
+ parse_reset(parser)
+ elif parser.check('tag'):
+ pass
+ elif parser.check('feature'):
+ pass
+ else:
+ die('unhandled export command: %s' % line)
+
+ for ref, revid in parsed_refs.iteritems():
+ if ref.startswith('refs/heads/'):
+ name = ref[len('refs/heads/'):]
+ branch = get_remote_branch(name)
+ branch.generate_revision_history(revid, marks.get_tip(name))
+
+ if name in peers:
+ peer = bzrlib.branch.Branch.open(peers[name])
+ try:
+ peer.bzrdir.push_branch(branch, revision_id=revid)
+ except bzrlib.errors.DivergedBranches:
+ print "error %s non-fast forward" % ref
+ continue
+
+ try:
+ wt = branch.bzrdir.open_workingtree()
+ wt.update()
+ except bzrlib.errors.NoWorkingTree:
+ pass
+ elif ref.startswith('refs/tags/'):
+ # TODO: implement tag push
+ print "error %s pushing tags not supported" % ref
+ continue
+ else:
+ # transport-helper/fast-export bugs
+ continue
+
+ print "ok %s" % ref
+
+ print
+
+def do_capabilities(parser):
+ global dirname
+
+ print "import"
+ print "export"
+ print "refspec refs/heads/*:%s/heads/*" % prefix
+ print "refspec refs/tags/*:%s/tags/*" % prefix
+
+ path = os.path.join(dirname, 'marks-git')
+
+ if os.path.exists(path):
+ print "*import-marks %s" % path
+ print "*export-marks %s" % path
+
+ print
+
+def ref_is_valid(name):
+ return not True in [c in name for c in '~^: \\']
+
+def do_list(parser):
+ global tags
+
+ master_branch = None
+
+ for name in branches:
+ if not master_branch:
+ master_branch = name
+ print "? refs/heads/%s" % name
+
+ branch = get_remote_branch(master_branch)
+ branch.lock_read()
+ for tag, revid in branch.tags.get_tag_dict().items():
+ try:
+ branch.revision_id_to_dotted_revno(revid)
+ except bzrlib.errors.NoSuchRevision:
+ continue
+ if not ref_is_valid(tag):
+ continue
+ print "? refs/tags/%s" % tag
+ tags[tag] = revid
+ branch.unlock()
+
+ print "@refs/heads/%s HEAD" % master_branch
+ print
+
+def clone(path, remote_branch):
+ try:
+ bdir = bzrlib.bzrdir.BzrDir.create(path)
+ except bzrlib.errors.AlreadyControlDirError:
+ bdir = bzrlib.bzrdir.BzrDir.open(path)
+ repo = bdir.find_repository()
+ repo.fetch(remote_branch.repository)
+ return remote_branch.sprout(bdir, repository=repo)
+
+def get_remote_branch(name):
+ global dirname, branches
+
+ remote_branch = bzrlib.branch.Branch.open(branches[name])
+ if isinstance(remote_branch.user_transport, bzrlib.transport.local.LocalTransport):
+ return remote_branch
+
+ branch_path = os.path.join(dirname, 'clone', name)
+
+ try:
+ branch = bzrlib.branch.Branch.open(branch_path)
+ except bzrlib.errors.NotBranchError:
+ # clone
+ branch = clone(branch_path, remote_branch)
+ else:
+ # pull
+ try:
+ branch.pull(remote_branch, overwrite=True)
+ except bzrlib.errors.DivergedBranches:
+ # use remote branch for now
+ return remote_branch
+
+ return branch
+
+def find_branches(repo):
+ transport = repo.bzrdir.root_transport
+
+ for fn in transport.iter_files_recursive():
+ if not fn.endswith('.bzr/branch-format'):
+ continue
+
+ name = subdir = fn[:-len('/.bzr/branch-format')]
+ name = name if name != '' else 'master'
+ name = name.replace('/', '+')
+
+ try:
+ cur = transport.clone(subdir)
+ branch = bzrlib.branch.Branch.open_from_transport(cur)
+ except bzrlib.errors.NotBranchError:
+ continue
+ else:
+ yield name, branch.base
+
+def get_repo(url, alias):
+ global dirname, peer, branches
+
+ normal_url = bzrlib.urlutils.normalize_url(url)
+ origin = bzrlib.bzrdir.BzrDir.open(url)
+ is_local = isinstance(origin.transport, bzrlib.transport.local.LocalTransport)
+
+ shared_path = os.path.join(gitdir, 'bzr')
+ try:
+ shared_dir = bzrlib.bzrdir.BzrDir.open(shared_path)
+ except bzrlib.errors.NotBranchError:
+ shared_dir = bzrlib.bzrdir.BzrDir.create(shared_path)
+ try:
+ shared_repo = shared_dir.open_repository()
+ except bzrlib.errors.NoRepositoryPresent:
+ shared_repo = shared_dir.create_repository(shared=True)
+
+ if not is_local:
+ clone_path = os.path.join(dirname, 'clone')
+ if not os.path.exists(clone_path):
+ os.mkdir(clone_path)
+ else:
+ # check and remove old organization
+ try:
+ bdir = bzrlib.bzrdir.BzrDir.open(clone_path)
+ bdir.destroy_repository()
+ except bzrlib.errors.NotBranchError:
+ pass
+ except bzrlib.errors.NoRepositoryPresent:
+ pass
+
+ wanted = get_config('remote-bzr.branches').rstrip().split(', ')
+ # stupid python
+ wanted = [e for e in wanted if e]
+
+ if not wanted:
+ try:
+ repo = origin.open_repository()
+ if not repo.user_transport.listable():
+ # this repository is not usable for us
+ raise bzrlib.errors.NoRepositoryPresent(repo.bzrdir)
+ except bzrlib.errors.NoRepositoryPresent:
+ wanted = ['master']
+
+ if wanted:
+ def list_wanted(url, wanted):
+ for name in wanted:
+ subdir = name if name != 'master' else ''
+ yield name, bzrlib.urlutils.join(url, subdir)
+
+ branch_list = list_wanted(url, wanted)
+ else:
+ branch_list = find_branches(repo)
+
+ for name, url in branch_list:
+ if not is_local:
+ peers[name] = url
+ branches[name] = url
+
+ return origin
+
+def fix_path(alias, orig_url):
+ url = urlparse.urlparse(orig_url, 'file')
+ if url.scheme != 'file' or os.path.isabs(url.path):
+ return
+ abs_url = urlparse.urljoin("%s/" % os.getcwd(), orig_url)
+ cmd = ['git', 'config', 'remote.%s.url' % alias, "bzr::%s" % abs_url]
+ subprocess.call(cmd)
+
+def main(args):
+ global marks, prefix, gitdir, dirname
+ global tags, filenodes
+ global blob_marks
+ global parsed_refs
+ global files_cache
+ global is_tmp
+ global branches, peers
+
+ alias = args[1]
+ url = args[2]
+
+ tags = {}
+ filenodes = {}
+ blob_marks = {}
+ parsed_refs = {}
+ files_cache = {}
+ marks = None
+ branches = {}
+ peers = {}
+
+ if alias[5:] == url:
+ is_tmp = True
+ alias = hashlib.sha1(alias).hexdigest()
+ else:
+ is_tmp = False
+
+ prefix = 'refs/bzr/%s' % alias
+ gitdir = os.environ['GIT_DIR']
+ dirname = os.path.join(gitdir, 'bzr', alias)
+
+ if not is_tmp:
+ fix_path(alias, url)
+
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+
+ if hasattr(bzrlib.ui.ui_factory, 'be_quiet'):
+ bzrlib.ui.ui_factory.be_quiet(True)
+
+ repo = get_repo(url, alias)
+
+ marks_path = os.path.join(dirname, 'marks-int')
+ marks = Marks(marks_path)
+
+ parser = Parser(repo)
+ for line in parser:
+ if parser.check('capabilities'):
+ do_capabilities(parser)
+ elif parser.check('list'):
+ do_list(parser)
+ elif parser.check('import'):
+ do_import(parser)
+ elif parser.check('export'):
+ do_export(parser)
+ else:
+ die('unhandled command: %s' % line)
+ sys.stdout.flush()
+
+def bye():
+ if not marks:
+ return
+ if not is_tmp:
+ marks.store()
+ else:
+ shutil.rmtree(dirname)
+
+atexit.register(bye)
+sys.exit(main(sys.argv))
diff --git a/contrib/remote-helpers/git-remote-hg b/contrib/remote-helpers/git-remote-hg
new file mode 100755
index 0000000..0194c67
--- /dev/null
+++ b/contrib/remote-helpers/git-remote-hg
@@ -0,0 +1,1220 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 Felipe Contreras
+#
+
+# Inspired by Rocco Rutte's hg-fast-export
+
+# Just copy to your ~/bin, or anywhere in your $PATH.
+# Then you can clone with:
+# git clone hg::/path/to/mercurial/repo/
+#
+# For remote repositories a local clone is stored in
+# "$GIT_DIR/hg/origin/clone/.hg/".
+
+from mercurial import hg, ui, bookmarks, context, encoding, node, error, extensions, discovery, util
+
+import re
+import sys
+import os
+import json
+import shutil
+import subprocess
+import urllib
+import atexit
+import urlparse, hashlib
+
+#
+# If you are not in hg-git-compat mode and want to disable the tracking of
+# named branches:
+# git config --global remote-hg.track-branches false
+#
+# If you want the equivalent of hg's clone/pull--insecure option:
+# git config --global remote-hg.insecure true
+#
+# If you want to switch to hg-git compatibility mode:
+# git config --global remote-hg.hg-git-compat true
+#
+# git:
+# Sensible defaults for git.
+# hg bookmarks are exported as git branches, hg branches are prefixed
+# with 'branches/', HEAD is a special case.
+#
+# hg:
+# Emulate hg-git.
+# Only hg bookmarks are exported as git branches.
+# Commits are modified to preserve hg information and allow bidirectionality.
+#
+
+NAME_RE = re.compile('^([^<>]+)')
+AUTHOR_RE = re.compile('^([^<>]+?)? ?<([^<>]*)>$')
+EMAIL_RE = re.compile('^([^<>]+[^ \\\t<>])?\\b(?:[ \\t<>]*?)\\b([^ \\t<>]+@[^ \\t<>]+)')
+AUTHOR_HG_RE = re.compile('^(.*?) ?<(.*?)(?:>(.+)?)?$')
+RAW_AUTHOR_RE = re.compile('^(\w+) (?:(.+)? )?<(.*)> (\d+) ([+-]\d+)')
+
+VERSION = 2
+
+def die(msg, *args):
+ sys.stderr.write('ERROR: %s\n' % (msg % args))
+ sys.exit(1)
+
+def warn(msg, *args):
+ sys.stderr.write('WARNING: %s\n' % (msg % args))
+
+def gitmode(flags):
+ return 'l' in flags and '120000' or 'x' in flags and '100755' or '100644'
+
+def gittz(tz):
+ return '%+03d%02d' % (-tz / 3600, -tz % 3600 / 60)
+
+def hgmode(mode):
+ m = { '100755': 'x', '120000': 'l' }
+ return m.get(mode, '')
+
+def hghex(n):
+ return node.hex(n)
+
+def hgbin(n):
+ return node.bin(n)
+
+def hgref(ref):
+ return ref.replace('___', ' ')
+
+def gitref(ref):
+ return ref.replace(' ', '___')
+
+def check_version(*check):
+ if not hg_version:
+ return True
+ return hg_version >= check
+
+def get_config(config):
+ cmd = ['git', 'config', '--get', config]
+ process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ output, _ = process.communicate()
+ return output
+
+def get_config_bool(config, default=False):
+ value = get_config(config).rstrip('\n')
+ if value == "true":
+ return True
+ elif value == "false":
+ return False
+ else:
+ return default
+
+class Marks:
+
+ def __init__(self, path, repo):
+ self.path = path
+ self.repo = repo
+ self.clear()
+ self.load()
+
+ if self.version < VERSION:
+ if self.version == 1:
+ self.upgrade_one()
+
+ # upgraded?
+ if self.version < VERSION:
+ self.clear()
+ self.version = VERSION
+
+ def clear(self):
+ self.tips = {}
+ self.marks = {}
+ self.rev_marks = {}
+ self.last_mark = 0
+ self.version = 0
+
+ def load(self):
+ if not os.path.exists(self.path):
+ return
+
+ tmp = json.load(open(self.path))
+
+ self.tips = tmp['tips']
+ self.marks = tmp['marks']
+ self.last_mark = tmp['last-mark']
+ self.version = tmp.get('version', 1)
+
+ for rev, mark in self.marks.iteritems():
+ self.rev_marks[mark] = rev
+
+ def upgrade_one(self):
+ def get_id(rev):
+ return hghex(self.repo.changelog.node(int(rev)))
+ self.tips = dict((name, get_id(rev)) for name, rev in self.tips.iteritems())
+ self.marks = dict((get_id(rev), mark) for rev, mark in self.marks.iteritems())
+ self.rev_marks = dict((mark, get_id(rev)) for mark, rev in self.rev_marks.iteritems())
+ self.version = 2
+
+ def dict(self):
+ return { 'tips': self.tips, 'marks': self.marks, 'last-mark' : self.last_mark, 'version' : self.version }
+
+ def store(self):
+ json.dump(self.dict(), open(self.path, 'w'))
+
+ def __str__(self):
+ return str(self.dict())
+
+ def from_rev(self, rev):
+ return self.marks[rev]
+
+ def to_rev(self, mark):
+ return str(self.rev_marks[mark])
+
+ def next_mark(self):
+ self.last_mark += 1
+ return self.last_mark
+
+ def get_mark(self, rev):
+ self.last_mark += 1
+ self.marks[rev] = self.last_mark
+ return self.last_mark
+
+ def new_mark(self, rev, mark):
+ self.marks[rev] = mark
+ self.rev_marks[mark] = rev
+ self.last_mark = mark
+
+ def is_marked(self, rev):
+ return rev in self.marks
+
+ def get_tip(self, branch):
+ return str(self.tips[branch])
+
+ def set_tip(self, branch, tip):
+ self.tips[branch] = tip
+
+class Parser:
+
+ def __init__(self, repo):
+ self.repo = repo
+ self.line = self.get_line()
+
+ def get_line(self):
+ return sys.stdin.readline().strip()
+
+ def __getitem__(self, i):
+ return self.line.split()[i]
+
+ def check(self, word):
+ return self.line.startswith(word)
+
+ def each_block(self, separator):
+ while self.line != separator:
+ yield self.line
+ self.line = self.get_line()
+
+ def __iter__(self):
+ return self.each_block('')
+
+ def next(self):
+ self.line = self.get_line()
+ if self.line == 'done':
+ self.line = None
+
+ def get_mark(self):
+ i = self.line.index(':') + 1
+ return int(self.line[i:])
+
+ def get_data(self):
+ if not self.check('data'):
+ return None
+ i = self.line.index(' ') + 1
+ size = int(self.line[i:])
+ return sys.stdin.read(size)
+
+ def get_author(self):
+ global bad_mail
+
+ ex = None
+ m = RAW_AUTHOR_RE.match(self.line)
+ if not m:
+ return None
+ _, name, email, date, tz = m.groups()
+ if name and 'ext:' in name:
+ m = re.match('^(.+?) ext:\((.+)\)$', name)
+ if m:
+ name = m.group(1)
+ ex = urllib.unquote(m.group(2))
+
+ if email != bad_mail:
+ if name:
+ user = '%s <%s>' % (name, email)
+ else:
+ user = '<%s>' % (email)
+ else:
+ user = name
+
+ if ex:
+ user += ex
+
+ tz = int(tz)
+ tz = ((tz / 100) * 3600) + ((tz % 100) * 60)
+ return (user, int(date), -tz)
+
+def fix_file_path(path):
+ if not os.path.isabs(path):
+ return path
+ return os.path.relpath(path, '/')
+
+def export_files(files):
+ global marks, filenodes
+
+ final = []
+ for f in files:
+ fid = node.hex(f.filenode())
+
+ if fid in filenodes:
+ mark = filenodes[fid]
+ else:
+ mark = marks.next_mark()
+ filenodes[fid] = mark
+ d = f.data()
+
+ print "blob"
+ print "mark :%u" % mark
+ print "data %d" % len(d)
+ print d
+
+ path = fix_file_path(f.path())
+ final.append((gitmode(f.flags()), mark, path))
+
+ return final
+
+def get_filechanges(repo, ctx, parent):
+ modified = set()
+ added = set()
+ removed = set()
+
+ # load earliest manifest first for caching reasons
+ prev = parent.manifest().copy()
+ cur = ctx.manifest()
+
+ for fn in cur:
+ if fn in prev:
+ if (cur.flags(fn) != prev.flags(fn) or cur[fn] != prev[fn]):
+ modified.add(fn)
+ del prev[fn]
+ else:
+ added.add(fn)
+ removed |= set(prev.keys())
+
+ return added | modified, removed
+
+def fixup_user_git(user):
+ name = mail = None
+ user = user.replace('"', '')
+ m = AUTHOR_RE.match(user)
+ if m:
+ name = m.group(1)
+ mail = m.group(2).strip()
+ else:
+ m = EMAIL_RE.match(user)
+ if m:
+ name = m.group(1)
+ mail = m.group(2)
+ else:
+ m = NAME_RE.match(user)
+ if m:
+ name = m.group(1).strip()
+ return (name, mail)
+
+def fixup_user_hg(user):
+ def sanitize(name):
+ # stole this from hg-git
+ return re.sub('[<>\n]', '?', name.lstrip('< ').rstrip('> '))
+
+ m = AUTHOR_HG_RE.match(user)
+ if m:
+ name = sanitize(m.group(1))
+ mail = sanitize(m.group(2))
+ ex = m.group(3)
+ if ex:
+ name += ' ext:(' + urllib.quote(ex) + ')'
+ else:
+ name = sanitize(user)
+ if '@' in user:
+ mail = name
+ else:
+ mail = None
+
+ return (name, mail)
+
+def fixup_user(user):
+ global mode, bad_mail
+
+ if mode == 'git':
+ name, mail = fixup_user_git(user)
+ else:
+ name, mail = fixup_user_hg(user)
+
+ if not name:
+ name = bad_name
+ if not mail:
+ mail = bad_mail
+
+ return '%s <%s>' % (name, mail)
+
+def updatebookmarks(repo, peer):
+ remotemarks = peer.listkeys('bookmarks')
+ localmarks = repo._bookmarks
+
+ if not remotemarks:
+ return
+
+ for k, v in remotemarks.iteritems():
+ localmarks[k] = hgbin(v)
+
+ if hasattr(localmarks, 'write'):
+ localmarks.write()
+ else:
+ bookmarks.write(repo)
+
+def get_repo(url, alias):
+ global dirname, peer
+
+ myui = ui.ui()
+ myui.setconfig('ui', 'interactive', 'off')
+ myui.fout = sys.stderr
+
+ if get_config_bool('remote-hg.insecure'):
+ myui.setconfig('web', 'cacerts', '')
+
+ extensions.loadall(myui)
+
+ if hg.islocal(url) and not os.environ.get('GIT_REMOTE_HG_TEST_REMOTE'):
+ repo = hg.repository(myui, url)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+ else:
+ shared_path = os.path.join(gitdir, 'hg')
+ if not os.path.exists(shared_path):
+ try:
+ hg.clone(myui, {}, url, shared_path, update=False, pull=True)
+ except:
+ die('Repository error')
+
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+
+ local_path = os.path.join(dirname, 'clone')
+ if not os.path.exists(local_path):
+ hg.share(myui, shared_path, local_path, update=False)
+
+ repo = hg.repository(myui, local_path)
+ try:
+ peer = hg.peer(myui, {}, url)
+ except:
+ die('Repository error')
+ repo.pull(peer, heads=None, force=True)
+
+ updatebookmarks(repo, peer)
+
+ return repo
+
+def rev_to_mark(rev):
+ global marks
+ return marks.from_rev(rev.hex())
+
+def mark_to_rev(mark):
+ global marks
+ return marks.to_rev(mark)
+
+def export_ref(repo, name, kind, head):
+ global prefix, marks, mode
+
+ ename = '%s/%s' % (kind, name)
+ try:
+ tip = marks.get_tip(ename)
+ tip = repo[tip].rev()
+ except:
+ tip = 0
+
+ revs = xrange(tip, head.rev() + 1)
+ total = len(revs)
+
+ for rev in revs:
+
+ c = repo[rev]
+ node = c.node()
+
+ if marks.is_marked(c.hex()):
+ continue
+
+ (manifest, user, (time, tz), files, desc, extra) = repo.changelog.read(node)
+ rev_branch = extra['branch']
+
+ author = "%s %d %s" % (fixup_user(user), time, gittz(tz))
+ if 'committer' in extra:
+ user, time, tz = extra['committer'].rsplit(' ', 2)
+ committer = "%s %s %s" % (user, time, gittz(int(tz)))
+ else:
+ committer = author
+
+ parents = [repo[p] for p in repo.changelog.parentrevs(rev) if p >= 0]
+
+ if len(parents) == 0:
+ modified = c.manifest().keys()
+ removed = []
+ else:
+ modified, removed = get_filechanges(repo, c, parents[0])
+
+ desc += '\n'
+
+ if mode == 'hg':
+ extra_msg = ''
+
+ if rev_branch != 'default':
+ extra_msg += 'branch : %s\n' % rev_branch
+
+ renames = []
+ for f in c.files():
+ if f not in c.manifest():
+ continue
+ rename = c.filectx(f).renamed()
+ if rename:
+ renames.append((rename[0], f))
+
+ for e in renames:
+ extra_msg += "rename : %s => %s\n" % e
+
+ for key, value in extra.iteritems():
+ if key in ('author', 'committer', 'encoding', 'message', 'branch', 'hg-git'):
+ continue
+ else:
+ extra_msg += "extra : %s : %s\n" % (key, urllib.quote(value))
+
+ if extra_msg:
+ desc += '\n--HG--\n' + extra_msg
+
+ if len(parents) == 0 and rev:
+ print 'reset %s/%s' % (prefix, ename)
+
+ modified_final = export_files(c.filectx(f) for f in modified)
+
+ print "commit %s/%s" % (prefix, ename)
+ print "mark :%d" % (marks.get_mark(c.hex()))
+ print "author %s" % (author)
+ print "committer %s" % (committer)
+ print "data %d" % (len(desc))
+ print desc
+
+ if len(parents) > 0:
+ print "from :%s" % (rev_to_mark(parents[0]))
+ if len(parents) > 1:
+ print "merge :%s" % (rev_to_mark(parents[1]))
+
+ for f in removed:
+ print "D %s" % (fix_file_path(f))
+ for f in modified_final:
+ print "M %s :%u %s" % f
+ print
+
+ progress = (rev - tip)
+ if (progress % 100 == 0):
+ print "progress revision %d '%s' (%d/%d)" % (rev, name, progress, total)
+
+ # make sure the ref is updated
+ print "reset %s/%s" % (prefix, ename)
+ print "from :%u" % rev_to_mark(head)
+ print
+
+ marks.set_tip(ename, head.hex())
+
+def export_tag(repo, tag):
+ export_ref(repo, tag, 'tags', repo[hgref(tag)])
+
+def export_bookmark(repo, bmark):
+ head = bmarks[hgref(bmark)]
+ export_ref(repo, bmark, 'bookmarks', head)
+
+def export_branch(repo, branch):
+ tip = get_branch_tip(repo, branch)
+ head = repo[tip]
+ export_ref(repo, branch, 'branches', head)
+
+def export_head(repo):
+ global g_head
+ export_ref(repo, g_head[0], 'bookmarks', g_head[1])
+
+def do_capabilities(parser):
+ global prefix, dirname
+
+ print "import"
+ print "export"
+ print "refspec refs/heads/branches/*:%s/branches/*" % prefix
+ print "refspec refs/heads/*:%s/bookmarks/*" % prefix
+ print "refspec refs/tags/*:%s/tags/*" % prefix
+
+ path = os.path.join(dirname, 'marks-git')
+
+ if os.path.exists(path):
+ print "*import-marks %s" % path
+ print "*export-marks %s" % path
+ print "option"
+
+ print
+
+def branch_tip(branch):
+ return branches[branch][-1]
+
+def get_branch_tip(repo, branch):
+ global branches
+
+ heads = branches.get(hgref(branch), None)
+ if not heads:
+ return None
+
+ # verify there's only one head
+ if (len(heads) > 1):
+ warn("Branch '%s' has more than one head, consider merging" % branch)
+ return branch_tip(hgref(branch))
+
+ return heads[0]
+
+def list_head(repo, cur):
+ global g_head, bmarks, fake_bmark
+
+ if 'default' not in branches:
+ # empty repo
+ return
+
+ node = repo[branch_tip('default')]
+ head = 'master' if not 'master' in bmarks else 'default'
+ fake_bmark = head
+ bmarks[head] = node
+
+ head = gitref(head)
+ print "@refs/heads/%s HEAD" % head
+ g_head = (head, node)
+
+def do_list(parser):
+ global branches, bmarks, track_branches
+
+ repo = parser.repo
+ for bmark, node in bookmarks.listbookmarks(repo).iteritems():
+ bmarks[bmark] = repo[node]
+
+ cur = repo.dirstate.branch()
+ orig = peer if peer else repo
+
+ for branch, heads in orig.branchmap().iteritems():
+ # only open heads
+ heads = [h for h in heads if 'close' not in repo.changelog.read(h)[5]]
+ if heads:
+ branches[branch] = heads
+
+ list_head(repo, cur)
+
+ if track_branches:
+ for branch in branches:
+ print "? refs/heads/branches/%s" % gitref(branch)
+
+ for bmark in bmarks:
+ print "? refs/heads/%s" % gitref(bmark)
+
+ for tag, node in repo.tagslist():
+ if tag == 'tip':
+ continue
+ print "? refs/tags/%s" % gitref(tag)
+
+ print
+
+def do_import(parser):
+ repo = parser.repo
+
+ path = os.path.join(dirname, 'marks-git')
+
+ print "feature done"
+ if os.path.exists(path):
+ print "feature import-marks=%s" % path
+ print "feature export-marks=%s" % path
+ print "feature force"
+ sys.stdout.flush()
+
+ tmp = encoding.encoding
+ encoding.encoding = 'utf-8'
+
+ # lets get all the import lines
+ while parser.check('import'):
+ ref = parser[1]
+
+ if (ref == 'HEAD'):
+ export_head(repo)
+ elif ref.startswith('refs/heads/branches/'):
+ branch = ref[len('refs/heads/branches/'):]
+ export_branch(repo, branch)
+ elif ref.startswith('refs/heads/'):
+ bmark = ref[len('refs/heads/'):]
+ export_bookmark(repo, bmark)
+ elif ref.startswith('refs/tags/'):
+ tag = ref[len('refs/tags/'):]
+ export_tag(repo, tag)
+
+ parser.next()
+
+ encoding.encoding = tmp
+
+ print 'done'
+
+def parse_blob(parser):
+ global blob_marks
+
+ parser.next()
+ mark = parser.get_mark()
+ parser.next()
+ data = parser.get_data()
+ blob_marks[mark] = data
+ parser.next()
+
+def get_merge_files(repo, p1, p2, files):
+ for e in repo[p1].files():
+ if e not in files:
+ if e not in repo[p1].manifest():
+ continue
+ f = { 'ctx' : repo[p1][e] }
+ files[e] = f
+
+def parse_commit(parser):
+ global marks, blob_marks, parsed_refs
+ global mode
+
+ from_mark = merge_mark = None
+
+ ref = parser[1]
+ parser.next()
+
+ commit_mark = parser.get_mark()
+ parser.next()
+ author = parser.get_author()
+ parser.next()
+ committer = parser.get_author()
+ parser.next()
+ data = parser.get_data()
+ parser.next()
+ if parser.check('from'):
+ from_mark = parser.get_mark()
+ parser.next()
+ if parser.check('merge'):
+ merge_mark = parser.get_mark()
+ parser.next()
+ if parser.check('merge'):
+ die('octopus merges are not supported yet')
+
+ # fast-export adds an extra newline
+ if data[-1] == '\n':
+ data = data[:-1]
+
+ files = {}
+
+ for line in parser:
+ if parser.check('M'):
+ t, m, mark_ref, path = line.split(' ', 3)
+ mark = int(mark_ref[1:])
+ f = { 'mode' : hgmode(m), 'data' : blob_marks[mark] }
+ elif parser.check('D'):
+ t, path = line.split(' ', 1)
+ f = { 'deleted' : True }
+ else:
+ die('Unknown file command: %s' % line)
+ files[path] = f
+
+ # only export the commits if we are on an internal proxy repo
+ if dry_run and not peer:
+ parsed_refs[ref] = None
+ return
+
+ def getfilectx(repo, memctx, f):
+ of = files[f]
+ if 'deleted' in of:
+ raise IOError
+ if 'ctx' in of:
+ return of['ctx']
+ is_exec = of['mode'] == 'x'
+ is_link = of['mode'] == 'l'
+ rename = of.get('rename', None)
+ return context.memfilectx(f, of['data'],
+ is_link, is_exec, rename)
+
+ repo = parser.repo
+
+ user, date, tz = author
+ extra = {}
+
+ if committer != author:
+ extra['committer'] = "%s %u %u" % committer
+
+ if from_mark:
+ p1 = mark_to_rev(from_mark)
+ else:
+ p1 = '0' * 40
+
+ if merge_mark:
+ p2 = mark_to_rev(merge_mark)
+ else:
+ p2 = '0' * 40
+
+ #
+ # If files changed from any of the parents, hg wants to know, but in git if
+ # nothing changed from the first parent, nothing changed.
+ #
+ if merge_mark:
+ get_merge_files(repo, p1, p2, files)
+
+ # Check if the ref is supposed to be a named branch
+ if ref.startswith('refs/heads/branches/'):
+ branch = ref[len('refs/heads/branches/'):]
+ extra['branch'] = hgref(branch)
+
+ if mode == 'hg':
+ i = data.find('\n--HG--\n')
+ if i >= 0:
+ tmp = data[i + len('\n--HG--\n'):].strip()
+ for k, v in [e.split(' : ', 1) for e in tmp.split('\n')]:
+ if k == 'rename':
+ old, new = v.split(' => ', 1)
+ files[new]['rename'] = old
+ elif k == 'branch':
+ extra[k] = v
+ elif k == 'extra':
+ ek, ev = v.split(' : ', 1)
+ extra[ek] = urllib.unquote(ev)
+ data = data[:i]
+
+ ctx = context.memctx(repo, (p1, p2), data,
+ files.keys(), getfilectx,
+ user, (date, tz), extra)
+
+ tmp = encoding.encoding
+ encoding.encoding = 'utf-8'
+
+ node = hghex(repo.commitctx(ctx))
+
+ encoding.encoding = tmp
+
+ parsed_refs[ref] = node
+ marks.new_mark(node, commit_mark)
+
+def parse_reset(parser):
+ global parsed_refs
+
+ ref = parser[1]
+ parser.next()
+ # ugh
+ if parser.check('commit'):
+ parse_commit(parser)
+ return
+ if not parser.check('from'):
+ return
+ from_mark = parser.get_mark()
+ parser.next()
+
+ try:
+ rev = mark_to_rev(from_mark)
+ except KeyError:
+ rev = None
+ parsed_refs[ref] = rev
+
+def parse_tag(parser):
+ name = parser[1]
+ parser.next()
+ from_mark = parser.get_mark()
+ parser.next()
+ tagger = parser.get_author()
+ parser.next()
+ data = parser.get_data()
+ parser.next()
+
+ parsed_tags[name] = (tagger, data)
+
+def write_tag(repo, tag, node, msg, author):
+ branch = repo[node].branch()
+ tip = branch_tip(branch)
+ tip = repo[tip]
+
+ def getfilectx(repo, memctx, f):
+ try:
+ fctx = tip.filectx(f)
+ data = fctx.data()
+ except error.ManifestLookupError:
+ data = ""
+ content = data + "%s %s\n" % (node, tag)
+ return context.memfilectx(f, content, False, False, None)
+
+ p1 = tip.hex()
+ p2 = '0' * 40
+ if author:
+ user, date, tz = author
+ date_tz = (date, tz)
+ else:
+ cmd = ['git', 'var', 'GIT_COMMITTER_IDENT']
+ process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ output, _ = process.communicate()
+ m = re.match('^.* <.*>', output)
+ if m:
+ user = m.group(0)
+ else:
+ user = repo.ui.username()
+ date_tz = None
+
+ ctx = context.memctx(repo, (p1, p2), msg,
+ ['.hgtags'], getfilectx,
+ user, date_tz, {'branch' : branch})
+
+ tmp = encoding.encoding
+ encoding.encoding = 'utf-8'
+
+ tagnode = repo.commitctx(ctx)
+
+ encoding.encoding = tmp
+
+ return (tagnode, branch)
+
+def checkheads_bmark(repo, ref, ctx):
+ bmark = ref[len('refs/heads/'):]
+ if not bmark in bmarks:
+ # new bmark
+ return True
+
+ ctx_old = bmarks[bmark]
+ ctx_new = ctx
+ if not repo.changelog.descendant(ctx_old.rev(), ctx_new.rev()):
+ if force_push:
+ print "ok %s forced update" % ref
+ else:
+ print "error %s non-fast forward" % ref
+ return False
+
+ return True
+
+def checkheads(repo, remote, p_revs):
+
+ remotemap = remote.branchmap()
+ if not remotemap:
+ # empty repo
+ return True
+
+ new = {}
+ ret = True
+
+ for node, ref in p_revs.iteritems():
+ ctx = repo[node]
+ branch = ctx.branch()
+ if not branch in remotemap:
+ # new branch
+ continue
+ if not ref.startswith('refs/heads/branches'):
+ if ref.startswith('refs/heads/'):
+ if not checkheads_bmark(repo, ref, ctx):
+ ret = False
+
+ # only check branches
+ continue
+ new.setdefault(branch, []).append(ctx.rev())
+
+ for branch, heads in new.iteritems():
+ old = [repo.changelog.rev(x) for x in remotemap[branch]]
+ for rev in heads:
+ if check_version(2, 3):
+ ancestors = repo.changelog.ancestors([rev], stoprev=min(old))
+ else:
+ ancestors = repo.changelog.ancestors(rev)
+ found = False
+
+ for x in old:
+ if x in ancestors:
+ found = True
+ break
+
+ if found:
+ continue
+
+ node = repo.changelog.node(rev)
+ ref = p_revs[node]
+ if force_push:
+ print "ok %s forced update" % ref
+ else:
+ print "error %s non-fast forward" % ref
+ ret = False
+
+ return ret
+
+def push_unsafe(repo, remote, parsed_refs, p_revs):
+
+ force = force_push
+
+ fci = discovery.findcommonincoming
+ commoninc = fci(repo, remote, force=force)
+ common, _, remoteheads = commoninc
+
+ if not checkheads(repo, remote, p_revs):
+ return None
+
+ cg = repo.getbundle('push', heads=list(p_revs), common=common)
+
+ unbundle = remote.capable('unbundle')
+ if unbundle:
+ if force:
+ remoteheads = ['force']
+ return remote.unbundle(cg, remoteheads, 'push')
+ else:
+ return remote.addchangegroup(cg, 'push', repo.url())
+
+def push(repo, remote, parsed_refs, p_revs):
+ if hasattr(remote, 'canpush') and not remote.canpush():
+ print "error cannot push"
+
+ if not p_revs:
+ # nothing to push
+ return
+
+ lock = None
+ unbundle = remote.capable('unbundle')
+ if not unbundle:
+ lock = remote.lock()
+ try:
+ ret = push_unsafe(repo, remote, parsed_refs, p_revs)
+ finally:
+ if lock is not None:
+ lock.release()
+
+ return ret
+
+def check_tip(ref, kind, name, heads):
+ try:
+ ename = '%s/%s' % (kind, name)
+ tip = marks.get_tip(ename)
+ except KeyError:
+ return True
+ else:
+ return tip in heads
+
+def do_export(parser):
+ global parsed_refs, bmarks, peer
+
+ p_bmarks = []
+ p_revs = {}
+
+ parser.next()
+
+ for line in parser.each_block('done'):
+ if parser.check('blob'):
+ parse_blob(parser)
+ elif parser.check('commit'):
+ parse_commit(parser)
+ elif parser.check('reset'):
+ parse_reset(parser)
+ elif parser.check('tag'):
+ parse_tag(parser)
+ elif parser.check('feature'):
+ pass
+ else:
+ die('unhandled export command: %s' % line)
+
+ need_fetch = False
+
+ for ref, node in parsed_refs.iteritems():
+ bnode = hgbin(node) if node else None
+ if ref.startswith('refs/heads/branches'):
+ branch = ref[len('refs/heads/branches/'):]
+ if branch in branches and bnode in branches[branch]:
+ # up to date
+ continue
+
+ if peer:
+ remotemap = peer.branchmap()
+ if remotemap and branch in remotemap:
+ heads = [hghex(e) for e in remotemap[branch]]
+ if not check_tip(ref, 'branches', branch, heads):
+ print "error %s fetch first" % ref
+ need_fetch = True
+ continue
+
+ p_revs[bnode] = ref
+ print "ok %s" % ref
+ elif ref.startswith('refs/heads/'):
+ bmark = ref[len('refs/heads/'):]
+ new = node
+ old = bmarks[bmark].hex() if bmark in bmarks else ''
+
+ if old == new:
+ continue
+
+ print "ok %s" % ref
+ if bmark != fake_bmark and \
+ not (bmark == 'master' and bmark not in parser.repo._bookmarks):
+ p_bmarks.append((ref, bmark, old, new))
+
+ if peer:
+ remote_old = peer.listkeys('bookmarks').get(bmark)
+ if remote_old:
+ if not check_tip(ref, 'bookmarks', bmark, remote_old):
+ print "error %s fetch first" % ref
+ need_fetch = True
+ continue
+
+ p_revs[bnode] = ref
+ elif ref.startswith('refs/tags/'):
+ if dry_run:
+ print "ok %s" % ref
+ continue
+ tag = ref[len('refs/tags/'):]
+ tag = hgref(tag)
+ author, msg = parsed_tags.get(tag, (None, None))
+ if mode == 'git':
+ if not msg:
+ msg = 'Added tag %s for changeset %s' % (tag, node[:12]);
+ tagnode, branch = write_tag(parser.repo, tag, node, msg, author)
+ p_revs[tagnode] = 'refs/heads/branches/' + gitref(branch)
+ else:
+ fp = parser.repo.opener('localtags', 'a')
+ fp.write('%s %s\n' % (node, tag))
+ fp.close()
+ p_revs[bnode] = ref
+ print "ok %s" % ref
+ else:
+ # transport-helper/fast-export bugs
+ continue
+
+ if need_fetch:
+ print
+ return
+
+ if dry_run:
+ if peer and not force_push:
+ checkheads(parser.repo, peer, p_revs)
+ print
+ return
+
+ if peer:
+ if not push(parser.repo, peer, parsed_refs, p_revs):
+ # do not update bookmarks
+ print
+ return
+
+ # update remote bookmarks
+ remote_bmarks = peer.listkeys('bookmarks')
+ for ref, bmark, old, new in p_bmarks:
+ if force_push:
+ old = remote_bmarks.get(bmark, '')
+ if not peer.pushkey('bookmarks', bmark, old, new):
+ print "error %s" % ref
+ else:
+ # update local bookmarks
+ for ref, bmark, old, new in p_bmarks:
+ if not bookmarks.pushbookmark(parser.repo, bmark, old, new):
+ print "error %s" % ref
+
+ print
+
+def do_option(parser):
+ global dry_run, force_push
+ _, key, value = parser.line.split(' ')
+ if key == 'dry-run':
+ dry_run = (value == 'true')
+ print 'ok'
+ elif key == 'force':
+ force_push = (value == 'true')
+ print 'ok'
+ else:
+ print 'unsupported'
+
+def fix_path(alias, repo, orig_url):
+ url = urlparse.urlparse(orig_url, 'file')
+ if url.scheme != 'file' or os.path.isabs(url.path):
+ return
+ abs_url = urlparse.urljoin("%s/" % os.getcwd(), orig_url)
+ cmd = ['git', 'config', 'remote.%s.url' % alias, "hg::%s" % abs_url]
+ subprocess.call(cmd)
+
+def main(args):
+ global prefix, gitdir, dirname, branches, bmarks
+ global marks, blob_marks, parsed_refs
+ global peer, mode, bad_mail, bad_name
+ global track_branches, force_push, is_tmp
+ global parsed_tags
+ global filenodes
+ global fake_bmark, hg_version
+ global dry_run
+
+ alias = args[1]
+ url = args[2]
+ peer = None
+
+ hg_git_compat = get_config_bool('remote-hg.hg-git-compat')
+ track_branches = get_config_bool('remote-hg.track-branches', True)
+ force_push = False
+
+ if hg_git_compat:
+ mode = 'hg'
+ bad_mail = 'none@none'
+ bad_name = ''
+ else:
+ mode = 'git'
+ bad_mail = 'unknown'
+ bad_name = 'Unknown'
+
+ if alias[4:] == url:
+ is_tmp = True
+ alias = hashlib.sha1(alias).hexdigest()
+ else:
+ is_tmp = False
+
+ gitdir = os.environ['GIT_DIR']
+ dirname = os.path.join(gitdir, 'hg', alias)
+ branches = {}
+ bmarks = {}
+ blob_marks = {}
+ parsed_refs = {}
+ marks = None
+ parsed_tags = {}
+ filenodes = {}
+ fake_bmark = None
+ try:
+ hg_version = tuple(int(e) for e in util.version().split('.'))
+ except:
+ hg_version = None
+ dry_run = False
+
+ repo = get_repo(url, alias)
+ prefix = 'refs/hg/%s' % alias
+
+ if not is_tmp:
+ fix_path(alias, peer or repo, url)
+
+ marks_path = os.path.join(dirname, 'marks-hg')
+ marks = Marks(marks_path, repo)
+
+ if sys.platform == 'win32':
+ import msvcrt
+ msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
+
+ parser = Parser(repo)
+ for line in parser:
+ if parser.check('capabilities'):
+ do_capabilities(parser)
+ elif parser.check('list'):
+ do_list(parser)
+ elif parser.check('import'):
+ do_import(parser)
+ elif parser.check('export'):
+ do_export(parser)
+ elif parser.check('option'):
+ do_option(parser)
+ else:
+ die('unhandled command: %s' % line)
+ sys.stdout.flush()
+
+def bye():
+ if not marks:
+ return
+ if not is_tmp:
+ marks.store()
+ else:
+ shutil.rmtree(dirname)
+
+atexit.register(bye)
+sys.exit(main(sys.argv))
diff --git a/contrib/remote-helpers/test-bzr.sh b/contrib/remote-helpers/test-bzr.sh
new file mode 100755
index 0000000..dce281f
--- /dev/null
+++ b/contrib/remote-helpers/test-bzr.sh
@@ -0,0 +1,361 @@
+#!/bin/sh
+#
+# Copyright (c) 2012 Felipe Contreras
+#
+
+test_description='Test remote-bzr'
+
+. ./test-lib.sh
+
+if ! test_have_prereq PYTHON; then
+ skip_all='skipping remote-bzr tests; python not available'
+ test_done
+fi
+
+if ! python -c 'import bzrlib'; then
+ skip_all='skipping remote-bzr tests; bzr not available'
+ test_done
+fi
+
+check () {
+ echo $3 > expected &&
+ git --git-dir=$1/.git log --format='%s' -1 $2 > actual
+ test_cmp expected actual
+}
+
+bzr whoami "A U Thor <author@example.com>"
+
+test_expect_success 'cloning' '
+ (
+ bzr init bzrrepo &&
+ cd bzrrepo &&
+ echo one > content &&
+ bzr add content &&
+ bzr commit -m one
+ ) &&
+
+ git clone "bzr::bzrrepo" gitrepo &&
+ check gitrepo HEAD one
+'
+
+test_expect_success 'pulling' '
+ (
+ cd bzrrepo &&
+ echo two > content &&
+ bzr commit -m two
+ ) &&
+
+ (cd gitrepo && git pull) &&
+
+ check gitrepo HEAD two
+'
+
+test_expect_success 'pushing' '
+ (
+ cd gitrepo &&
+ echo three > content &&
+ git commit -a -m three &&
+ git push
+ ) &&
+
+ echo three > expected &&
+ cat bzrrepo/content > actual &&
+ test_cmp expected actual
+'
+
+test_expect_success 'roundtrip' '
+ (
+ cd gitrepo &&
+ git pull &&
+ git log --format="%s" -1 origin/master > actual
+ ) &&
+ echo three > expected &&
+ test_cmp expected actual &&
+
+ (cd gitrepo && git push && git pull) &&
+
+ (
+ cd bzrrepo &&
+ echo four > content &&
+ bzr commit -m four
+ ) &&
+
+ (cd gitrepo && git pull && git push) &&
+
+ check gitrepo HEAD four &&
+
+ (
+ cd gitrepo &&
+ echo five > content &&
+ git commit -a -m five &&
+ git push && git pull
+ ) &&
+
+ (cd bzrrepo && bzr revert) &&
+
+ echo five > expected &&
+ cat bzrrepo/content > actual &&
+ test_cmp expected actual
+'
+
+cat > expected <<EOF
+100644 blob 54f9d6da5c91d556e6b54340b1327573073030af content
+100755 blob 68769579c3eaadbe555379b9c3538e6628bae1eb executable
+120000 blob 6b584e8ece562ebffc15d38808cd6b98fc3d97ea link
+EOF
+
+test_expect_success 'special modes' '
+ (
+ cd bzrrepo &&
+ echo exec > executable
+ chmod +x executable &&
+ bzr add executable
+ bzr commit -m exec &&
+ ln -s content link
+ bzr add link
+ bzr commit -m link &&
+ mkdir dir &&
+ bzr add dir &&
+ bzr commit -m dir
+ ) &&
+
+ (
+ cd gitrepo &&
+ git pull
+ git ls-tree HEAD > ../actual
+ ) &&
+
+ test_cmp expected actual &&
+
+ (
+ cd gitrepo &&
+ git cat-file -p HEAD:link > ../actual
+ ) &&
+
+ printf content > expected &&
+ test_cmp expected actual
+'
+
+cat > expected <<EOF
+100644 blob 54f9d6da5c91d556e6b54340b1327573073030af content
+100755 blob 68769579c3eaadbe555379b9c3538e6628bae1eb executable
+120000 blob 6b584e8ece562ebffc15d38808cd6b98fc3d97ea link
+040000 tree 35c0caa46693cef62247ac89a680f0c5ce32b37b movedir-new
+EOF
+
+test_expect_success 'moving directory' '
+ (
+ cd bzrrepo &&
+ mkdir movedir &&
+ echo one > movedir/one &&
+ echo two > movedir/two &&
+ bzr add movedir &&
+ bzr commit -m movedir &&
+ bzr mv movedir movedir-new &&
+ bzr commit -m movedir-new
+ ) &&
+
+ (
+ cd gitrepo &&
+ git pull &&
+ git ls-tree HEAD > ../actual
+ ) &&
+
+ test_cmp expected actual
+'
+
+test_expect_success 'different authors' '
+ (
+ cd bzrrepo &&
+ echo john >> content &&
+ bzr commit -m john \
+ --author "Jane Rey <jrey@example.com>" \
+ --author "John Doe <jdoe@example.com>"
+ ) &&
+
+ (
+ cd gitrepo &&
+ git pull &&
+ git show --format="%an <%ae>, %cn <%ce>" --quiet > ../actual
+ ) &&
+
+ echo "Jane Rey <jrey@example.com>, A U Thor <author@example.com>" > expected &&
+ test_cmp expected actual
+'
+
+# cleanup previous stuff
+rm -rf bzrrepo gitrepo
+
+test_expect_success 'fetch utf-8 filenames' '
+ test_when_finished "rm -rf bzrrepo gitrepo && LC_ALL=C" &&
+
+ LC_ALL=en_US.UTF-8
+ export LC_ALL
+
+ (
+ bzr init bzrrepo &&
+ cd bzrrepo &&
+
+ echo test >> "ærø" &&
+ bzr add "ærø" &&
+ echo test >> "ø~?" &&
+ bzr add "ø~?" &&
+ bzr commit -m add-utf-8 &&
+ echo test >> "ærø" &&
+ bzr commit -m test-utf-8 &&
+ bzr rm "ø~?" &&
+ bzr mv "ærø" "ø~?" &&
+ bzr commit -m bzr-mv-utf-8
+ ) &&
+
+ (
+ git clone "bzr::bzrrepo" gitrepo &&
+ cd gitrepo &&
+ git -c core.quotepath=false ls-files > ../actual
+ ) &&
+ echo "ø~?" > expected &&
+ test_cmp expected actual
+'
+
+test_expect_success 'push utf-8 filenames' '
+ test_when_finished "rm -rf bzrrepo gitrepo && LC_ALL=C" &&
+
+ mkdir -p tmp && cd tmp &&
+
+ LC_ALL=en_US.UTF-8
+ export LC_ALL
+
+ (
+ bzr init bzrrepo &&
+ cd bzrrepo &&
+
+ echo one >> content &&
+ bzr add content &&
+ bzr commit -m one
+ ) &&
+
+ (
+ git clone "bzr::bzrrepo" gitrepo &&
+ cd gitrepo &&
+
+ echo test >> "ærø" &&
+ git add "ærø" &&
+ git commit -m utf-8 &&
+
+ git push
+ ) &&
+
+ (cd bzrrepo && bzr ls > ../actual) &&
+ printf "content\nærø\n" > expected &&
+ test_cmp expected actual
+'
+
+test_expect_success 'pushing a merge' '
+ test_when_finished "rm -rf bzrrepo gitrepo" &&
+
+ (
+ bzr init bzrrepo &&
+ cd bzrrepo &&
+ echo one > content &&
+ bzr add content &&
+ bzr commit -m one
+ ) &&
+
+ git clone "bzr::bzrrepo" gitrepo &&
+
+ (
+ cd bzrrepo &&
+ echo two > content &&
+ bzr commit -m two
+ ) &&
+
+ (
+ cd gitrepo &&
+ echo three > content &&
+ git commit -a -m three &&
+ git fetch &&
+ git merge origin/master || true &&
+ echo three > content &&
+ git commit -a --no-edit &&
+ git push
+ ) &&
+
+ echo three > expected &&
+ cat bzrrepo/content > actual &&
+ test_cmp expected actual
+'
+
+cat > expected <<EOF
+origin/HEAD
+origin/branch
+origin/trunk
+EOF
+
+test_expect_success 'proper bzr repo' '
+ test_when_finished "rm -rf bzrrepo gitrepo" &&
+
+ bzr init-repo bzrrepo &&
+
+ (
+ bzr init bzrrepo/trunk &&
+ cd bzrrepo/trunk &&
+ echo one >> content &&
+ bzr add content &&
+ bzr commit -m one
+ ) &&
+
+ (
+ bzr branch bzrrepo/trunk bzrrepo/branch &&
+ cd bzrrepo/branch &&
+ echo two >> content &&
+ bzr commit -m one
+ ) &&
+
+ (
+ git clone "bzr::bzrrepo" gitrepo &&
+ cd gitrepo &&
+ git for-each-ref --format "%(refname:short)" refs/remotes/origin > ../actual
+ ) &&
+
+ test_cmp expected actual
+'
+
+test_expect_success 'strip' '
+ test_when_finished "rm -rf bzrrepo gitrepo" &&
+
+ (
+ bzr init bzrrepo &&
+ cd bzrrepo &&
+
+ echo one >> content &&
+ bzr add content &&
+ bzr commit -m one &&
+
+ echo two >> content &&
+ bzr commit -m two
+ ) &&
+
+ git clone "bzr::bzrrepo" gitrepo &&
+
+ (
+ cd bzrrepo &&
+ bzr uncommit --force &&
+
+ echo three >> content &&
+ bzr commit -m three &&
+
+ echo four >> content &&
+ bzr commit -m four &&
+ bzr log --line | sed -e "s/^[0-9][0-9]*: //" > ../expected
+ ) &&
+
+ (
+ cd gitrepo &&
+ git fetch &&
+ git log --format="%an %ad %s" --date=short origin/master > ../actual
+ ) &&
+
+ test_cmp expected actual
+'
+
+test_done
diff --git a/contrib/remote-helpers/test-hg-bidi.sh b/contrib/remote-helpers/test-hg-bidi.sh
new file mode 100755
index 0000000..f83d67d
--- /dev/null
+++ b/contrib/remote-helpers/test-hg-bidi.sh
@@ -0,0 +1,240 @@
+#!/bin/sh
+#
+# Copyright (c) 2012 Felipe Contreras
+#
+# Base commands from hg-git tests:
+# https://bitbucket.org/durin42/hg-git/src
+#
+
+test_description='Test bidirectionality of remote-hg'
+
+. ./test-lib.sh
+
+if ! test_have_prereq PYTHON; then
+ skip_all='skipping remote-hg tests; python not available'
+ test_done
+fi
+
+if ! python -c 'import mercurial'; then
+ skip_all='skipping remote-hg tests; mercurial not available'
+ test_done
+fi
+
+# clone to a git repo
+git_clone () {
+ git clone -q "hg::$1" $2
+}
+
+# clone to an hg repo
+hg_clone () {
+ (
+ hg init $2 &&
+ cd $1 &&
+ git push -q "hg::../$2" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*'
+ ) &&
+
+ (cd $2 && hg -q update)
+}
+
+# push an hg repo
+hg_push () {
+ (
+ cd $2
+ git checkout -q -b tmp &&
+ git fetch -q "hg::../$1" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*' &&
+ git checkout -q @{-1} &&
+ git branch -q -D tmp 2> /dev/null || true
+ )
+}
+
+hg_log () {
+ hg -R $1 log --graph --debug
+}
+
+setup () {
+ (
+ echo "[ui]"
+ echo "username = A U Thor <author@example.com>"
+ echo "[defaults]"
+ echo "backout = -d \"0 0\""
+ echo "commit = -d \"0 0\""
+ echo "debugrawcommit = -d \"0 0\""
+ echo "tag = -d \"0 0\""
+ echo "[extensions]"
+ echo "graphlog ="
+ ) >> "$HOME"/.hgrc &&
+ git config --global remote-hg.hg-git-compat true
+ git config --global remote-hg.track-branches true
+
+ HGEDITOR=/usr/bin/true
+ GIT_AUTHOR_DATE="2007-01-01 00:00:00 +0230"
+ GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"
+ export HGEDITOR GIT_AUTHOR_DATE GIT_COMMITTER_DATE
+}
+
+setup
+
+test_expect_success 'encoding' '
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
+
+ (
+ git init -q gitrepo &&
+ cd gitrepo &&
+
+ echo alpha > alpha &&
+ git add alpha &&
+ git commit -m "add älphà" &&
+
+ GIT_AUTHOR_NAME="tést èncödîng" &&
+ export GIT_AUTHOR_NAME &&
+ echo beta > beta &&
+ git add beta &&
+ git commit -m "add beta" &&
+
+ echo gamma > gamma &&
+ git add gamma &&
+ git commit -m "add gämmâ" &&
+
+ : TODO git config i18n.commitencoding latin-1 &&
+ echo delta > delta &&
+ git add delta &&
+ git commit -m "add déltà"
+ ) &&
+
+ hg_clone gitrepo hgrepo &&
+ git_clone hgrepo gitrepo2 &&
+ hg_clone gitrepo2 hgrepo2 &&
+
+ HGENCODING=utf-8 hg_log hgrepo > expected &&
+ HGENCODING=utf-8 hg_log hgrepo2 > actual &&
+
+ test_cmp expected actual
+'
+
+test_expect_success 'file removal' '
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
+
+ (
+ git init -q gitrepo &&
+ cd gitrepo &&
+ echo alpha > alpha &&
+ git add alpha &&
+ git commit -m "add alpha" &&
+ echo beta > beta &&
+ git add beta &&
+ git commit -m "add beta"
+ mkdir foo &&
+ echo blah > foo/bar &&
+ git add foo &&
+ git commit -m "add foo" &&
+ git rm alpha &&
+ git commit -m "remove alpha" &&
+ git rm foo/bar &&
+ git commit -m "remove foo/bar"
+ ) &&
+
+ hg_clone gitrepo hgrepo &&
+ git_clone hgrepo gitrepo2 &&
+ hg_clone gitrepo2 hgrepo2 &&
+
+ hg_log hgrepo > expected &&
+ hg_log hgrepo2 > actual &&
+
+ test_cmp expected actual
+'
+
+test_expect_success 'git tags' '
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
+
+ (
+ git init -q gitrepo &&
+ cd gitrepo &&
+ git config receive.denyCurrentBranch ignore &&
+ echo alpha > alpha &&
+ git add alpha &&
+ git commit -m "add alpha" &&
+ git tag alpha &&
+
+ echo beta > beta &&
+ git add beta &&
+ git commit -m "add beta" &&
+ git tag -a -m "added tag beta" beta
+ ) &&
+
+ hg_clone gitrepo hgrepo &&
+ git_clone hgrepo gitrepo2 &&
+ hg_clone gitrepo2 hgrepo2 &&
+
+ hg_log hgrepo > expected &&
+ hg_log hgrepo2 > actual &&
+
+ test_cmp expected actual
+'
+
+test_expect_success 'hg branch' '
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
+
+ (
+ git init -q gitrepo &&
+ cd gitrepo &&
+
+ echo alpha > alpha &&
+ git add alpha &&
+ git commit -q -m "add alpha" &&
+ git checkout -q -b not-master
+ ) &&
+
+ (
+ hg_clone gitrepo hgrepo &&
+
+ cd hgrepo &&
+ hg -q co default &&
+ hg mv alpha beta &&
+ hg -q commit -m "rename alpha to beta" &&
+ hg branch gamma | grep -v "permanent and global" &&
+ hg -q commit -m "started branch gamma"
+ ) &&
+
+ hg_push hgrepo gitrepo &&
+ hg_clone gitrepo hgrepo2 &&
+
+ : Back to the common revision &&
+ (cd hgrepo && hg checkout default) &&
+
+ hg_log hgrepo > expected &&
+ hg_log hgrepo2 > actual &&
+
+ test_cmp expected actual
+'
+
+test_expect_success 'hg tags' '
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
+
+ (
+ git init -q gitrepo &&
+ cd gitrepo &&
+
+ echo alpha > alpha &&
+ git add alpha &&
+ git commit -m "add alpha" &&
+ git checkout -q -b not-master
+ ) &&
+
+ (
+ hg_clone gitrepo hgrepo &&
+
+ cd hgrepo &&
+ hg co default &&
+ hg tag alpha
+ ) &&
+
+ hg_push hgrepo gitrepo &&
+ hg_clone gitrepo hgrepo2 &&
+
+ hg_log hgrepo > expected &&
+ hg_log hgrepo2 > actual &&
+
+ test_cmp expected actual
+'
+
+test_done
diff --git a/contrib/remote-helpers/test-hg-hg-git.sh b/contrib/remote-helpers/test-hg-hg-git.sh
new file mode 100755
index 0000000..2219284
--- /dev/null
+++ b/contrib/remote-helpers/test-hg-hg-git.sh
@@ -0,0 +1,527 @@
+#!/bin/sh
+#
+# Copyright (c) 2012 Felipe Contreras
+#
+# Base commands from hg-git tests:
+# https://bitbucket.org/durin42/hg-git/src
+#
+
+test_description='Test remote-hg output compared to hg-git'
+
+. ./test-lib.sh
+
+if ! test_have_prereq PYTHON; then
+ skip_all='skipping remote-hg tests; python not available'
+ test_done
+fi
+
+if ! python -c 'import mercurial'; then
+ skip_all='skipping remote-hg tests; mercurial not available'
+ test_done
+fi
+
+if ! python -c 'import hggit'; then
+ skip_all='skipping remote-hg tests; hg-git not available'
+ test_done
+fi
+
+# clone to a git repo with git
+git_clone_git () {
+ git clone -q "hg::$1" $2 &&
+ (cd $2 && git checkout master && git branch -D default)
+}
+
+# clone to an hg repo with git
+hg_clone_git () {
+ (
+ hg init $2 &&
+ hg -R $2 bookmark -i master &&
+ cd $1 &&
+ git push -q "hg::../$2" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*'
+ ) &&
+
+ (cd $2 && hg -q update)
+}
+
+# clone to a git repo with hg
+git_clone_hg () {
+ (
+ git init -q $2 &&
+ cd $1 &&
+ hg bookmark -i -f -r tip master &&
+ hg -q push -r master ../$2 || true
+ )
+}
+
+# clone to an hg repo with hg
+hg_clone_hg () {
+ hg -q clone $1 $2
+}
+
+# push an hg repo with git
+hg_push_git () {
+ (
+ cd $2
+ git checkout -q -b tmp &&
+ git fetch -q "hg::../$1" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*' &&
+ git branch -D default &&
+ git checkout -q @{-1} &&
+ git branch -q -D tmp 2> /dev/null || true
+ )
+}
+
+# push an hg git repo with hg
+hg_push_hg () {
+ (
+ cd $1 &&
+ hg -q push ../$2 || true
+ )
+}
+
+hg_log () {
+ hg -R $1 log --graph --debug >log &&
+ grep -v 'tag: *default/' log
+}
+
+git_log () {
+ git --git-dir=$1/.git fast-export --branches
+}
+
+setup () {
+ (
+ echo "[ui]"
+ echo "username = A U Thor <author@example.com>"
+ echo "[defaults]"
+ echo "backout = -d \"0 0\""
+ echo "commit = -d \"0 0\""
+ echo "debugrawcommit = -d \"0 0\""
+ echo "tag = -d \"0 0\""
+ echo "[extensions]"
+ echo "hgext.bookmarks ="
+ echo "hggit ="
+ echo "graphlog ="
+ ) >> "$HOME"/.hgrc &&
+ git config --global receive.denycurrentbranch warn
+ git config --global remote-hg.hg-git-compat true
+ git config --global remote-hg.track-branches false
+
+ HGEDITOR=true
+ HGMERGE=true
+
+ GIT_AUTHOR_DATE="2007-01-01 00:00:00 +0230"
+ GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"
+ export HGEDITOR HGMERGE GIT_AUTHOR_DATE GIT_COMMITTER_DATE
+}
+
+setup
+
+test_expect_success 'executable bit' '
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
+
+ (
+ git init -q gitrepo &&
+ cd gitrepo &&
+ echo alpha > alpha &&
+ chmod 0644 alpha &&
+ git add alpha &&
+ git commit -m "add alpha" &&
+ chmod 0755 alpha &&
+ git add alpha &&
+ git commit -m "set executable bit" &&
+ chmod 0644 alpha &&
+ git add alpha &&
+ git commit -m "clear executable bit"
+ ) &&
+
+ for x in hg git; do
+ (
+ hg_clone_$x gitrepo hgrepo-$x &&
+ cd hgrepo-$x &&
+ hg_log . &&
+ hg manifest -r 1 -v &&
+ hg manifest -v
+ ) > output-$x &&
+
+ git_clone_$x hgrepo-$x gitrepo2-$x &&
+ git_log gitrepo2-$x > log-$x
+ done &&
+
+ test_cmp output-hg output-git &&
+ test_cmp log-hg log-git
+'
+
+test_expect_success 'symlink' '
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
+
+ (
+ git init -q gitrepo &&
+ cd gitrepo &&
+ echo alpha > alpha &&
+ git add alpha &&
+ git commit -m "add alpha" &&
+ ln -s alpha beta &&
+ git add beta &&
+ git commit -m "add beta"
+ ) &&
+
+ for x in hg git; do
+ (
+ hg_clone_$x gitrepo hgrepo-$x &&
+ cd hgrepo-$x &&
+ hg_log . &&
+ hg manifest -v
+ ) > output-$x &&
+
+ git_clone_$x hgrepo-$x gitrepo2-$x &&
+ git_log gitrepo2-$x > log-$x
+ done &&
+
+ test_cmp output-hg output-git &&
+ test_cmp log-hg log-git
+'
+
+test_expect_success 'merge conflict 1' '
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
+
+ (
+ hg init hgrepo1 &&
+ cd hgrepo1 &&
+ echo A > afile &&
+ hg add afile &&
+ hg ci -m "origin" &&
+
+ echo B > afile &&
+ hg ci -m "A->B" &&
+
+ hg up -r0 &&
+ echo C > afile &&
+ hg ci -m "A->C" &&
+
+ hg merge -r1 &&
+ echo C > afile &&
+ hg resolve -m afile &&
+ hg ci -m "merge to C"
+ ) &&
+
+ for x in hg git; do
+ git_clone_$x hgrepo1 gitrepo-$x &&
+ hg_clone_$x gitrepo-$x hgrepo2-$x &&
+ hg_log hgrepo2-$x > hg-log-$x &&
+ git_log gitrepo-$x > git-log-$x
+ done &&
+
+ test_cmp hg-log-hg hg-log-git &&
+ test_cmp git-log-hg git-log-git
+'
+
+test_expect_success 'merge conflict 2' '
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
+
+ (
+ hg init hgrepo1 &&
+ cd hgrepo1 &&
+ echo A > afile &&
+ hg add afile &&
+ hg ci -m "origin" &&
+
+ echo B > afile &&
+ hg ci -m "A->B" &&
+
+ hg up -r0 &&
+ echo C > afile &&
+ hg ci -m "A->C" &&
+
+ hg merge -r1 || true &&
+ echo B > afile &&
+ hg resolve -m afile &&
+ hg ci -m "merge to B"
+ ) &&
+
+ for x in hg git; do
+ git_clone_$x hgrepo1 gitrepo-$x &&
+ hg_clone_$x gitrepo-$x hgrepo2-$x &&
+ hg_log hgrepo2-$x > hg-log-$x &&
+ git_log gitrepo-$x > git-log-$x
+ done &&
+
+ test_cmp hg-log-hg hg-log-git &&
+ test_cmp git-log-hg git-log-git
+'
+
+test_expect_success 'converged merge' '
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
+
+ (
+ hg init hgrepo1 &&
+ cd hgrepo1 &&
+ echo A > afile &&
+ hg add afile &&
+ hg ci -m "origin" &&
+
+ echo B > afile &&
+ hg ci -m "A->B" &&
+
+ echo C > afile &&
+ hg ci -m "B->C" &&
+
+ hg up -r0 &&
+ echo C > afile &&
+ hg ci -m "A->C" &&
+
+ hg merge -r2 || true &&
+ hg ci -m "merge"
+ ) &&
+
+ for x in hg git; do
+ git_clone_$x hgrepo1 gitrepo-$x &&
+ hg_clone_$x gitrepo-$x hgrepo2-$x &&
+ hg_log hgrepo2-$x > hg-log-$x &&
+ git_log gitrepo-$x > git-log-$x
+ done &&
+
+ test_cmp hg-log-hg hg-log-git &&
+ test_cmp git-log-hg git-log-git
+'
+
+test_expect_success 'encoding' '
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
+
+ (
+ git init -q gitrepo &&
+ cd gitrepo &&
+
+ echo alpha > alpha &&
+ git add alpha &&
+ git commit -m "add älphà" &&
+
+ GIT_AUTHOR_NAME="tést èncödîng" &&
+ export GIT_AUTHOR_NAME &&
+ echo beta > beta &&
+ git add beta &&
+ git commit -m "add beta" &&
+
+ echo gamma > gamma &&
+ git add gamma &&
+ git commit -m "add gämmâ" &&
+
+ : TODO git config i18n.commitencoding latin-1 &&
+ echo delta > delta &&
+ git add delta &&
+ git commit -m "add déltà"
+ ) &&
+
+ for x in hg git; do
+ hg_clone_$x gitrepo hgrepo-$x &&
+ git_clone_$x hgrepo-$x gitrepo2-$x &&
+
+ HGENCODING=utf-8 hg_log hgrepo-$x > hg-log-$x &&
+ git_log gitrepo2-$x > git-log-$x
+ done &&
+
+ test_cmp hg-log-hg hg-log-git &&
+ test_cmp git-log-hg git-log-git
+'
+
+test_expect_success 'file removal' '
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
+
+ (
+ git init -q gitrepo &&
+ cd gitrepo &&
+ echo alpha > alpha &&
+ git add alpha &&
+ git commit -m "add alpha" &&
+ echo beta > beta &&
+ git add beta &&
+ git commit -m "add beta"
+ mkdir foo &&
+ echo blah > foo/bar &&
+ git add foo &&
+ git commit -m "add foo" &&
+ git rm alpha &&
+ git commit -m "remove alpha" &&
+ git rm foo/bar &&
+ git commit -m "remove foo/bar"
+ ) &&
+
+ for x in hg git; do
+ (
+ hg_clone_$x gitrepo hgrepo-$x &&
+ cd hgrepo-$x &&
+ hg_log . &&
+ hg manifest -r 3 &&
+ hg manifest
+ ) > output-$x &&
+
+ git_clone_$x hgrepo-$x gitrepo2-$x &&
+ git_log gitrepo2-$x > log-$x
+ done &&
+
+ test_cmp output-hg output-git &&
+ test_cmp log-hg log-git
+'
+
+test_expect_success 'git tags' '
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
+
+ (
+ git init -q gitrepo &&
+ cd gitrepo &&
+ git config receive.denyCurrentBranch ignore &&
+ echo alpha > alpha &&
+ git add alpha &&
+ git commit -m "add alpha" &&
+ git tag alpha &&
+
+ echo beta > beta &&
+ git add beta &&
+ git commit -m "add beta" &&
+ git tag -a -m "added tag beta" beta
+ ) &&
+
+ for x in hg git; do
+ hg_clone_$x gitrepo hgrepo-$x &&
+ hg_log hgrepo-$x > log-$x
+ done &&
+
+ test_cmp log-hg log-git
+'
+
+test_expect_success 'hg author' '
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
+
+ for x in hg git; do
+ (
+ git init -q gitrepo-$x &&
+ cd gitrepo-$x &&
+
+ echo alpha > alpha &&
+ git add alpha &&
+ git commit -m "add alpha" &&
+ git checkout -q -b not-master
+ ) &&
+
+ (
+ hg_clone_$x gitrepo-$x hgrepo-$x &&
+ cd hgrepo-$x &&
+
+ hg co master &&
+ echo beta > beta &&
+ hg add beta &&
+ hg commit -u "test" -m "add beta" &&
+
+ echo gamma >> beta &&
+ hg commit -u "test <test@example.com> (comment)" -m "modify beta" &&
+
+ echo gamma > gamma &&
+ hg add gamma &&
+ hg commit -u "<test@example.com>" -m "add gamma" &&
+
+ echo delta > delta &&
+ hg add delta &&
+ hg commit -u "name<test@example.com>" -m "add delta" &&
+
+ echo epsilon > epsilon &&
+ hg add epsilon &&
+ hg commit -u "name <test@example.com" -m "add epsilon" &&
+
+ echo zeta > zeta &&
+ hg add zeta &&
+ hg commit -u " test " -m "add zeta" &&
+
+ echo eta > eta &&
+ hg add eta &&
+ hg commit -u "test < test@example.com >" -m "add eta" &&
+
+ echo theta > theta &&
+ hg add theta &&
+ hg commit -u "test >test@example.com>" -m "add theta" &&
+
+ echo iota > iota &&
+ hg add iota &&
+ hg commit -u "test <test <at> example <dot> com>" -m "add iota"
+ ) &&
+
+ hg_push_$x hgrepo-$x gitrepo-$x &&
+ hg_clone_$x gitrepo-$x hgrepo2-$x &&
+
+ hg_log hgrepo2-$x > hg-log-$x &&
+ git_log gitrepo-$x > git-log-$x
+ done &&
+
+ test_cmp hg-log-hg hg-log-git &&
+ test_cmp git-log-hg git-log-git
+'
+
+test_expect_success 'hg branch' '
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
+
+ for x in hg git; do
+ (
+ git init -q gitrepo-$x &&
+ cd gitrepo-$x &&
+
+ echo alpha > alpha &&
+ git add alpha &&
+ git commit -q -m "add alpha" &&
+ git checkout -q -b not-master
+ ) &&
+
+ (
+ hg_clone_$x gitrepo-$x hgrepo-$x &&
+
+ cd hgrepo-$x &&
+ hg -q co master &&
+ hg mv alpha beta &&
+ hg -q commit -m "rename alpha to beta" &&
+ hg branch gamma | grep -v "permanent and global" &&
+ hg -q commit -m "started branch gamma"
+ ) &&
+
+ hg_push_$x hgrepo-$x gitrepo-$x &&
+ hg_clone_$x gitrepo-$x hgrepo2-$x &&
+
+ hg_log hgrepo2-$x > hg-log-$x &&
+ git_log gitrepo-$x > git-log-$x
+ done &&
+
+ test_cmp hg-log-hg hg-log-git &&
+ test_cmp git-log-hg git-log-git
+'
+
+test_expect_success 'hg tags' '
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
+
+ for x in hg git; do
+ (
+ git init -q gitrepo-$x &&
+ cd gitrepo-$x &&
+
+ echo alpha > alpha &&
+ git add alpha &&
+ git commit -m "add alpha" &&
+ git checkout -q -b not-master
+ ) &&
+
+ (
+ hg_clone_$x gitrepo-$x hgrepo-$x &&
+
+ cd hgrepo-$x &&
+ hg co master &&
+ hg tag alpha
+ ) &&
+
+ hg_push_$x hgrepo-$x gitrepo-$x &&
+ hg_clone_$x gitrepo-$x hgrepo2-$x &&
+
+ (
+ git --git-dir=gitrepo-$x/.git tag -l &&
+ hg_log hgrepo2-$x &&
+ cat hgrepo2-$x/.hgtags
+ ) > output-$x
+ done &&
+
+ test_cmp output-hg output-git
+'
+
+test_done
diff --git a/contrib/remote-helpers/test-hg.sh b/contrib/remote-helpers/test-hg.sh
new file mode 100755
index 0000000..f7ce8aa
--- /dev/null
+++ b/contrib/remote-helpers/test-hg.sh
@@ -0,0 +1,692 @@
+#!/bin/sh
+#
+# Copyright (c) 2012 Felipe Contreras
+#
+# Base commands from hg-git tests:
+# https://bitbucket.org/durin42/hg-git/src
+#
+
+test_description='Test remote-hg'
+
+. ./test-lib.sh
+
+if ! test_have_prereq PYTHON; then
+ skip_all='skipping remote-hg tests; python not available'
+ test_done
+fi
+
+if ! python -c 'import mercurial'; then
+ skip_all='skipping remote-hg tests; mercurial not available'
+ test_done
+fi
+
+check () {
+ echo $3 > expected &&
+ git --git-dir=$1/.git log --format='%s' -1 $2 > actual
+ test_cmp expected actual
+}
+
+check_branch () {
+ if [ -n "$3" ]; then
+ echo $3 > expected &&
+ hg -R $1 log -r $2 --template '{desc}\n' > actual &&
+ test_cmp expected actual
+ else
+ hg -R $1 branches > out &&
+ ! grep $2 out
+ fi
+}
+
+check_bookmark () {
+ if [ -n "$3" ]; then
+ echo $3 > expected &&
+ hg -R $1 log -r "bookmark('$2')" --template '{desc}\n' > actual &&
+ test_cmp expected actual
+ else
+ hg -R $1 bookmarks > out &&
+ ! grep $2 out
+ fi
+}
+
+check_push () {
+ local expected_ret=$1 ret=0 ref_ret=0 IFS=':'
+
+ shift
+ git push origin "$@" 2> error
+ ret=$?
+ cat error
+
+ while read branch kind
+ do
+ case "$kind" in
+ 'new')
+ grep "^ \* \[new branch\] *${branch} -> ${branch}$" error || ref_ret=1
+ ;;
+ 'non-fast-forward')
+ grep "^ ! \[rejected\] *${branch} -> ${branch} (non-fast-forward)$" error || ref_ret=1
+ ;;
+ 'fetch-first')
+ grep "^ ! \[rejected\] *${branch} -> ${branch} (fetch first)$" error || ref_ret=1
+ ;;
+ 'forced-update')
+ grep "^ + [a-f0-9]*\.\.\.[a-f0-9]* *${branch} -> ${branch} (forced update)$" error || ref_ret=1
+ ;;
+ '')
+ grep "^ [a-f0-9]*\.\.[a-f0-9]* *${branch} -> ${branch}$" error || ref_ret=1
+ ;;
+ esac
+ let 'ref_ret' && echo "match for '$branch' failed" && break
+ done
+
+ if let 'expected_ret != ret || ref_ret'
+ then
+ return 1
+ fi
+
+ return 0
+}
+
+setup () {
+ (
+ echo "[ui]"
+ echo "username = H G Wells <wells@example.com>"
+ echo "[extensions]"
+ echo "mq ="
+ ) >> "$HOME"/.hgrc &&
+
+ GIT_AUTHOR_DATE="2007-01-01 00:00:00 +0230" &&
+ GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE" &&
+ export GIT_COMMITTER_DATE GIT_AUTHOR_DATE
+}
+
+setup
+
+test_expect_success 'cloning' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero > content &&
+ hg add content &&
+ hg commit -m zero
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+ check gitrepo HEAD zero
+'
+
+test_expect_success 'cloning with branches' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ cd hgrepo &&
+ hg branch next &&
+ echo next > content &&
+ hg commit -m next
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+ check gitrepo origin/branches/next next
+'
+
+test_expect_success 'cloning with bookmarks' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ cd hgrepo &&
+ hg checkout default &&
+ hg bookmark feature-a &&
+ echo feature-a > content &&
+ hg commit -m feature-a
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+ check gitrepo origin/feature-a feature-a
+'
+
+test_expect_success 'update bookmark' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ cd hgrepo &&
+ hg bookmark devel
+ ) &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git checkout --quiet devel &&
+ echo devel > content &&
+ git commit -a -m devel &&
+ git push --quiet
+ ) &&
+
+ check_bookmark hgrepo devel devel
+'
+
+test_expect_success 'new bookmark' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git checkout --quiet -b feature-b &&
+ echo feature-b > content &&
+ git commit -a -m feature-b &&
+ git push --quiet origin feature-b
+ ) &&
+
+ check_bookmark hgrepo feature-b feature-b
+'
+
+# cleanup previous stuff
+rm -rf hgrepo
+
+author_test () {
+ echo $1 >> content &&
+ hg commit -u "$2" -m "add $1" &&
+ echo "$3" >> ../expected
+}
+
+test_expect_success 'authors' '
+ test_when_finished "rm -rf hgrepo gitrepo" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+
+ touch content &&
+ hg add content &&
+
+ > ../expected &&
+ author_test alpha "" "H G Wells <wells@example.com>" &&
+ author_test beta "test" "test <unknown>" &&
+ author_test beta "test <test@example.com> (comment)" "test <test@example.com>" &&
+ author_test gamma "<test@example.com>" "Unknown <test@example.com>" &&
+ author_test delta "name<test@example.com>" "name <test@example.com>" &&
+ author_test epsilon "name <test@example.com" "name <test@example.com>" &&
+ author_test zeta " test " "test <unknown>" &&
+ author_test eta "test < test@example.com >" "test <test@example.com>" &&
+ author_test theta "test >test@example.com>" "test <test@example.com>" &&
+ author_test iota "test < test <at> example <dot> com>" "test <unknown>" &&
+ author_test kappa "test@example.com" "Unknown <test@example.com>"
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+ git --git-dir=gitrepo/.git log --reverse --format="%an <%ae>" > actual &&
+
+ test_cmp expected actual
+'
+
+test_expect_success 'strip' '
+ test_when_finished "rm -rf hgrepo gitrepo" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+
+ echo one >> content &&
+ hg add content &&
+ hg commit -m one &&
+
+ echo two >> content &&
+ hg commit -m two
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+
+ (
+ cd hgrepo &&
+ hg strip 1 &&
+
+ echo three >> content &&
+ hg commit -m three &&
+
+ echo four >> content &&
+ hg commit -m four
+ ) &&
+
+ (
+ cd gitrepo &&
+ git fetch &&
+ git log --format="%s" origin/master > ../actual
+ ) &&
+
+ hg -R hgrepo log --template "{desc}\n" > expected &&
+ test_cmp actual expected
+'
+
+test_expect_success 'remote push with master bookmark' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero > content &&
+ hg add content &&
+ hg commit -m zero &&
+ hg bookmark master &&
+ echo one > content &&
+ hg commit -m one
+ ) &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ echo two > content &&
+ git commit -a -m two &&
+ git push
+ ) &&
+
+ check_branch hgrepo default two
+'
+
+cat > expected <<EOF
+changeset: 0:6e2126489d3d
+tag: tip
+user: A U Thor <author@example.com>
+date: Mon Jan 01 00:00:00 2007 +0230
+summary: one
+
+EOF
+
+test_expect_success 'remote push from master branch' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ hg init hgrepo &&
+
+ (
+ git init gitrepo &&
+ cd gitrepo &&
+ git remote add origin "hg::../hgrepo" &&
+ echo one > content &&
+ git add content &&
+ git commit -a -m one &&
+ git push origin master
+ ) &&
+
+ hg -R hgrepo log > actual &&
+ cat actual &&
+ test_cmp expected actual &&
+
+ check_branch hgrepo default one
+'
+
+GIT_REMOTE_HG_TEST_REMOTE=1
+export GIT_REMOTE_HG_TEST_REMOTE
+
+test_expect_success 'remote cloning' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero > content &&
+ hg add content &&
+ hg commit -m zero
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+ check gitrepo HEAD zero
+'
+
+test_expect_success 'remote update bookmark' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ cd hgrepo &&
+ hg bookmark devel
+ ) &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git checkout --quiet devel &&
+ echo devel > content &&
+ git commit -a -m devel &&
+ git push --quiet
+ ) &&
+
+ check_bookmark hgrepo devel devel
+'
+
+test_expect_success 'remote new bookmark' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git checkout --quiet -b feature-b &&
+ echo feature-b > content &&
+ git commit -a -m feature-b &&
+ git push --quiet origin feature-b
+ ) &&
+
+ check_bookmark hgrepo feature-b feature-b
+'
+
+test_expect_success 'remote push diverged' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ git clone "hg::hgrepo" gitrepo &&
+
+ (
+ cd hgrepo &&
+ hg checkout default &&
+ echo bump > content &&
+ hg commit -m bump
+ ) &&
+
+ (
+ cd gitrepo &&
+ echo diverge > content &&
+ git commit -a -m diverged &&
+ check_push 1 <<-EOF
+ master:non-fast-forward
+ EOF
+ ) &&
+
+ check_branch hgrepo default bump
+'
+
+test_expect_success 'remote update bookmark diverge' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ cd hgrepo &&
+ hg checkout tip^ &&
+ hg bookmark diverge
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+
+ (
+ cd hgrepo &&
+ echo "bump bookmark" > content &&
+ hg commit -m "bump bookmark"
+ ) &&
+
+ (
+ cd gitrepo &&
+ git checkout --quiet diverge &&
+ echo diverge > content &&
+ git commit -a -m diverge &&
+ check_push 1 <<-EOF
+ diverge:fetch-first
+ EOF
+ ) &&
+
+ check_bookmark hgrepo diverge "bump bookmark"
+'
+
+test_expect_success 'remote new bookmark multiple branch head' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git checkout --quiet -b feature-c HEAD^ &&
+ echo feature-c > content &&
+ git commit -a -m feature-c &&
+ git push --quiet origin feature-c
+ ) &&
+
+ check_bookmark hgrepo feature-c feature-c
+'
+
+# cleanup previous stuff
+rm -rf hgrepo
+
+setup_big_push () {
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero > content &&
+ hg add content &&
+ hg commit -m zero &&
+ hg bookmark bad_bmark1 &&
+ echo one > content &&
+ hg commit -m one &&
+ hg bookmark bad_bmark2 &&
+ hg bookmark good_bmark &&
+ hg bookmark -i good_bmark &&
+ hg -q branch good_branch &&
+ echo "good branch" > content &&
+ hg commit -m "good branch" &&
+ hg -q branch bad_branch &&
+ echo "bad branch" > content &&
+ hg commit -m "bad branch"
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+
+ (
+ cd gitrepo &&
+ echo two > content &&
+ git commit -q -a -m two &&
+
+ git checkout -q good_bmark &&
+ echo three > content &&
+ git commit -q -a -m three &&
+
+ git checkout -q bad_bmark1 &&
+ git reset --hard HEAD^ &&
+ echo four > content &&
+ git commit -q -a -m four &&
+
+ git checkout -q bad_bmark2 &&
+ git reset --hard HEAD^ &&
+ echo five > content &&
+ git commit -q -a -m five &&
+
+ git checkout -q -b new_bmark master &&
+ echo six > content &&
+ git commit -q -a -m six &&
+
+ git checkout -q branches/good_branch &&
+ echo seven > content &&
+ git commit -q -a -m seven &&
+ echo eight > content &&
+ git commit -q -a -m eight &&
+
+ git checkout -q branches/bad_branch &&
+ git reset --hard HEAD^ &&
+ echo nine > content &&
+ git commit -q -a -m nine &&
+
+ git checkout -q -b branches/new_branch master &&
+ echo ten > content &&
+ git commit -q -a -m ten
+ )
+}
+
+test_expect_success 'remote big push' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ setup_big_push
+
+ (
+ cd gitrepo &&
+
+ check_push 1 --all <<-EOF
+ master
+ good_bmark
+ branches/good_branch
+ new_bmark:new
+ branches/new_branch:new
+ bad_bmark1:non-fast-forward
+ bad_bmark2:non-fast-forward
+ branches/bad_branch:non-fast-forward
+ EOF
+ ) &&
+
+ check_branch hgrepo default one &&
+ check_branch hgrepo good_branch "good branch" &&
+ check_branch hgrepo bad_branch "bad branch" &&
+ check_branch hgrepo new_branch '' &&
+ check_bookmark hgrepo good_bmark one &&
+ check_bookmark hgrepo bad_bmark1 one &&
+ check_bookmark hgrepo bad_bmark2 one &&
+ check_bookmark hgrepo new_bmark ''
+'
+
+test_expect_success 'remote big push fetch first' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero > content &&
+ hg add content &&
+ hg commit -m zero &&
+ hg bookmark bad_bmark &&
+ hg bookmark good_bmark &&
+ hg bookmark -i good_bmark &&
+ hg -q branch good_branch &&
+ echo "good branch" > content &&
+ hg commit -m "good branch" &&
+ hg -q branch bad_branch &&
+ echo "bad branch" > content &&
+ hg commit -m "bad branch"
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+
+ (
+ cd hgrepo &&
+ hg bookmark -f bad_bmark &&
+ echo update_bmark > content &&
+ hg commit -m "update bmark"
+ ) &&
+
+ (
+ cd gitrepo &&
+ echo two > content &&
+ git commit -q -a -m two &&
+
+ git checkout -q good_bmark &&
+ echo three > content &&
+ git commit -q -a -m three &&
+
+ git checkout -q bad_bmark &&
+ echo four > content &&
+ git commit -q -a -m four &&
+
+ git checkout -q branches/bad_branch &&
+ echo five > content &&
+ git commit -q -a -m five &&
+
+ check_push 1 --all <<-EOF
+ master
+ good_bmark
+ new_bmark:new
+ new_branch:new
+ bad_bmark:fetch-first
+ branches/bad_branch:festch-first
+ EOF
+
+ git fetch &&
+
+ check_push 1 --all <<-EOF
+ master
+ good_bmark
+ bad_bmark:non-fast-forward
+ branches/bad_branch:non-fast-forward
+ EOF
+ )
+'
+
+test_expect_failure 'remote big push force' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ setup_big_push
+
+ (
+ cd gitrepo &&
+
+ check_push 0 --force --all <<-EOF
+ master
+ good_bmark
+ branches/good_branch
+ new_bmark:new
+ branches/new_branch:new
+ bad_bmark1:forced-update
+ bad_bmark2:forced-update
+ branches/bad_branch:forced-update
+ EOF
+ ) &&
+
+ check_branch hgrepo default six &&
+ check_branch hgrepo good_branch eight &&
+ check_branch hgrepo bad_branch nine &&
+ check_branch hgrepo new_branch ten &&
+ check_bookmark hgrepo good_bmark three &&
+ check_bookmark hgrepo bad_bmark1 four &&
+ check_bookmark hgrepo bad_bmark2 five &&
+ check_bookmark hgrepo new_bmark six
+'
+
+test_expect_failure 'remote big push dry-run' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ setup_big_push
+
+ (
+ cd gitrepo &&
+
+ check_push 0 --dry-run --all <<-EOF
+ master
+ good_bmark
+ branches/good_branch
+ new_bmark:new
+ branches/new_branch:new
+ bad_bmark1:non-fast-forward
+ bad_bmark2:non-fast-forward
+ branches/bad_branch:non-fast-forward
+ EOF
+
+ check_push 0 --dry-run master good_bmark new_bmark branches/good_branch branches/new_branch <<-EOF
+ master
+ good_bmark
+ branches/good_branch
+ new_bmark:new
+ branches/new_branch:new
+ EOF
+ ) &&
+
+ check_branch hgrepo default one &&
+ check_branch hgrepo good_branch "good branch" &&
+ check_branch hgrepo bad_branch "bad branch" &&
+ check_branch hgrepo new_branch '' &&
+ check_bookmark hgrepo good_bmark one &&
+ check_bookmark hgrepo bad_bmark1 one &&
+ check_bookmark hgrepo bad_bmark2 one &&
+ check_bookmark hgrepo new_bmark ''
+'
+
+test_expect_success 'remote double failed push' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero > content &&
+ hg add content &&
+ hg commit -m zero &&
+ echo one > content &&
+ hg commit -m one
+ ) &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git reset --hard HEAD^ &&
+ echo two > content &&
+ git commit -a -m two &&
+ test_expect_code 1 git push &&
+ test_expect_code 1 git push
+ )
+'
+
+test_done
diff --git a/contrib/remotes2config.sh b/contrib/remotes2config.sh
new file mode 100755
index 0000000..1cda19f
--- /dev/null
+++ b/contrib/remotes2config.sh
@@ -0,0 +1,33 @@
+#!/bin/sh
+
+# Use this tool to rewrite your .git/remotes/ files into the config.
+
+. git-sh-setup
+
+if [ -d "$GIT_DIR"/remotes ]; then
+ echo "Rewriting $GIT_DIR/remotes" >&2
+ error=0
+ # rewrite into config
+ {
+ cd "$GIT_DIR"/remotes
+ ls | while read f; do
+ name=$(printf "$f" | tr -c "A-Za-z0-9-" ".")
+ sed -n \
+ -e "s/^URL:[ ]*\(.*\)$/remote.$name.url \1 ./p" \
+ -e "s/^Pull:[ ]*\(.*\)$/remote.$name.fetch \1 ^$ /p" \
+ -e "s/^Push:[ ]*\(.*\)$/remote.$name.push \1 ^$ /p" \
+ < "$f"
+ done
+ echo done
+ } | while read key value regex; do
+ case $key in
+ done)
+ if [ $error = 0 ]; then
+ mv "$GIT_DIR"/remotes "$GIT_DIR"/remotes.old
+ fi ;;
+ *)
+ echo "git config $key "$value" $regex"
+ git config $key "$value" $regex || error=1 ;;
+ esac
+ done
+fi
diff --git a/contrib/rerere-train.sh b/contrib/rerere-train.sh
new file mode 100755
index 0000000..36b6fee
--- /dev/null
+++ b/contrib/rerere-train.sh
@@ -0,0 +1,52 @@
+#!/bin/sh
+# Copyright (c) 2008, Nanako Shiraishi
+# Prime rerere database from existing merge commits
+
+me=rerere-train
+USAGE="$me rev-list-args"
+
+SUBDIRECTORY_OK=Yes
+OPTIONS_SPEC=
+. $(git --exec-path)/git-sh-setup
+require_work_tree
+cd_to_toplevel
+
+# Remember original branch
+branch=$(git symbolic-ref -q HEAD) ||
+original_HEAD=$(git rev-parse --verify HEAD) || {
+ echo >&2 "Not on any branch and no commit yet?"
+ exit 1
+}
+
+mkdir -p "$GIT_DIR/rr-cache" || exit
+
+git rev-list --parents "$@" |
+while read commit parent1 other_parents
+do
+ if test -z "$other_parents"
+ then
+ # Skip non-merges
+ continue
+ fi
+ git checkout -q "$parent1^0"
+ if git merge $other_parents >/dev/null 2>&1
+ then
+ # Cleanly merges
+ continue
+ fi
+ if test -s "$GIT_DIR/MERGE_RR"
+ then
+ git show -s --pretty=format:"Learning from %h %s" "$commit"
+ git rerere
+ git checkout -q $commit -- .
+ git rerere
+ fi
+ git reset -q --hard
+done
+
+if test -z "$branch"
+then
+ git checkout "$original_HEAD"
+else
+ git checkout "${branch#refs/heads/}"
+fi
diff --git a/contrib/stats/git-common-hash b/contrib/stats/git-common-hash
new file mode 100755
index 0000000..e27fd08
--- /dev/null
+++ b/contrib/stats/git-common-hash
@@ -0,0 +1,26 @@
+#!/bin/sh
+
+# This script displays the distribution of longest common hash prefixes.
+# This can be used to determine the minimum prefix length to use
+# for object names to be unique.
+
+git rev-list --objects --all | sort | perl -lne '
+ substr($_, 40) = "";
+ # uncomment next line for a distribution of bits instead of hex chars
+ # $_ = unpack("B*",pack("H*",$_));
+ if (defined $p) {
+ ($p ^ $_) =~ /^(\0*)/;
+ $common = length $1;
+ if (defined $pcommon) {
+ $count[$pcommon > $common ? $pcommon : $common]++;
+ } else {
+ $count[$common]++; # first item
+ }
+ }
+ $p = $_;
+ $pcommon = $common;
+ END {
+ $count[$common]++; # last item
+ print "$_: $count[$_]" for 0..$#count;
+ }
+'
diff --git a/contrib/stats/mailmap.pl b/contrib/stats/mailmap.pl
new file mode 100755
index 0000000..9513f5e
--- /dev/null
+++ b/contrib/stats/mailmap.pl
@@ -0,0 +1,70 @@
+#!/usr/bin/perl
+
+use warnings 'all';
+use strict;
+use Getopt::Long;
+
+my $match_emails;
+my $match_names;
+my $order_by = 'count';
+Getopt::Long::Configure(qw(bundling));
+GetOptions(
+ 'emails|e!' => \$match_emails,
+ 'names|n!' => \$match_names,
+ 'count|c' => sub { $order_by = 'count' },
+ 'time|t' => sub { $order_by = 'stamp' },
+) or exit 1;
+$match_emails = 1 unless $match_names;
+
+my $email = {};
+my $name = {};
+
+open(my $fh, '-|', "git log --format='%at <%aE> %aN'");
+while(<$fh>) {
+ my ($t, $e, $n) = /(\S+) <(\S+)> (.*)/;
+ mark($email, $e, $n, $t);
+ mark($name, $n, $e, $t);
+}
+close($fh);
+
+if ($match_emails) {
+ foreach my $e (dups($email)) {
+ foreach my $n (vals($email->{$e})) {
+ show($n, $e, $email->{$e}->{$n});
+ }
+ print "\n";
+ }
+}
+if ($match_names) {
+ foreach my $n (dups($name)) {
+ foreach my $e (vals($name->{$n})) {
+ show($n, $e, $name->{$n}->{$e});
+ }
+ print "\n";
+ }
+}
+exit 0;
+
+sub mark {
+ my ($h, $k, $v, $t) = @_;
+ my $e = $h->{$k}->{$v} ||= { count => 0, stamp => 0 };
+ $e->{count}++;
+ $e->{stamp} = $t unless $t < $e->{stamp};
+}
+
+sub dups {
+ my $h = shift;
+ return grep { keys($h->{$_}) > 1 } keys($h);
+}
+
+sub vals {
+ my $h = shift;
+ return sort {
+ $h->{$b}->{$order_by} <=> $h->{$a}->{$order_by}
+ } keys($h);
+}
+
+sub show {
+ my ($n, $e, $h) = @_;
+ print "$n <$e> ($h->{$order_by})\n";
+}
diff --git a/contrib/stats/packinfo.pl b/contrib/stats/packinfo.pl
new file mode 100755
index 0000000..be188c0
--- /dev/null
+++ b/contrib/stats/packinfo.pl
@@ -0,0 +1,212 @@
+#!/usr/bin/perl
+#
+# This tool will print vaguely pretty information about a pack. It
+# expects the output of "git verify-pack -v" as input on stdin.
+#
+# $ git verify-pack -v | packinfo.pl
+#
+# This prints some full-pack statistics; currently "all sizes", "all
+# path sizes", "tree sizes", "tree path sizes", and "depths".
+#
+# * "all sizes" stats are across every object size in the file;
+# full sizes for base objects, and delta size for deltas.
+# * "all path sizes" stats are across all object's "path sizes".
+# A path size is the sum of the size of the delta chain, including the
+# base object. In other words, it's how many bytes need be read to
+# reassemble the file from deltas.
+# * "tree sizes" are object sizes grouped into delta trees.
+# * "tree path sizes" are path sizes grouped into delta trees.
+# * "depths" should be obvious.
+#
+# When run as:
+#
+# $ git verify-pack -v | packinfo.pl -tree
+#
+# the trees of objects are output along with the stats. This looks
+# like:
+#
+# 0 commit 031321c6... 803 803
+#
+# 0 blob 03156f21... 1767 1767
+# 1 blob f52a9d7f... 10 1777
+# 2 blob a8cc5739... 51 1828
+# 3 blob 660e90b1... 15 1843
+# 4 blob 0cb8e3bb... 33 1876
+# 2 blob e48607f0... 311 2088
+# size: count 6 total 2187 min 10 max 1767 mean 364.50 median 51 std_dev 635.85
+# path size: count 6 total 11179 min 1767 max 2088 mean 1863.17 median 1843 std_dev 107.26
+#
+# The first number after the sha1 is the object size, the second
+# number is the path size. The statistics are across all objects in
+# the previous delta tree. Obviously they are omitted for trees of
+# one object.
+#
+# When run as:
+#
+# $ git verify-pack -v | packinfo.pl -tree -filenames
+#
+# it adds filenames to the tree. Getting this information is slow:
+#
+# 0 blob 03156f21... 1767 1767 Documentation/git-lost-found.txt @ tags/v1.2.0~142
+# 1 blob f52a9d7f... 10 1777 Documentation/git-lost-found.txt @ tags/v1.5.0-rc1~74
+# 2 blob a8cc5739... 51 1828 Documentation/git-lost+found.txt @ tags/v0.99.9h^0
+# 3 blob 660e90b1... 15 1843 Documentation/git-lost+found.txt @ master~3222^2~2
+# 4 blob 0cb8e3bb... 33 1876 Documentation/git-lost+found.txt @ master~3222^2~3
+# 2 blob e48607f0... 311 2088 Documentation/git-lost-found.txt @ tags/v1.5.2-rc3~4
+# size: count 6 total 2187 min 10 max 1767 mean 364.50 median 51 std_dev 635.85
+# path size: count 6 total 11179 min 1767 max 2088 mean 1863.17 median 1843 std_dev 107.26
+#
+# When run as:
+#
+# $ git verify-pack -v | packinfo.pl -dump
+#
+# it prints out "sha1 size pathsize depth" for each sha1 in lexical
+# order.
+#
+# 000079a2eaef17b7eae70e1f0f635557ea67b644 30 472 7
+# 00013cafe6980411aa6fdd940784917b5ff50f0a 44 1542 4
+# 000182eacf99cde27d5916aa415921924b82972c 499 499 0
+# ...
+#
+# This is handy for comparing two packs. Adding "-filenames" will add
+# filenames, as per "-tree -filenames" above.
+
+use strict;
+use Getopt::Long;
+
+my $filenames = 0;
+my $tree = 0;
+my $dump = 0;
+GetOptions("tree" => \$tree,
+ "filenames" => \$filenames,
+ "dump" => \$dump);
+
+my %parents;
+my %children;
+my %sizes;
+my @roots;
+my %paths;
+my %types;
+my @commits;
+my %names;
+my %depths;
+my @depths;
+
+while (<STDIN>) {
+ my ($sha1, $type, $size, $space, $offset, $depth, $parent) = split(/\s+/, $_);
+ next unless ($sha1 =~ /^[0-9a-f]{40}$/);
+ $depths{$sha1} = $depth || 0;
+ push(@depths, $depth || 0);
+ push(@commits, $sha1) if ($type eq 'commit');
+ push(@roots, $sha1) unless $parent;
+ $parents{$sha1} = $parent;
+ $types{$sha1} = $type;
+ push(@{$children{$parent}}, $sha1);
+ $sizes{$sha1} = $size;
+}
+
+if ($filenames && ($tree || $dump)) {
+ open(NAMES, "git name-rev --all|");
+ while (<NAMES>) {
+ if (/^(\S+)\s+(.*)$/) {
+ my ($sha1, $name) = ($1, $2);
+ $names{$sha1} = $name;
+ }
+ }
+ close NAMES;
+
+ for my $commit (@commits) {
+ my $name = $names{$commit};
+ open(TREE, "git ls-tree -t -r $commit|");
+ print STDERR "Plumbing tree $name\n";
+ while (<TREE>) {
+ if (/^(\S+)\s+(\S+)\s+(\S+)\s+(.*)$/) {
+ my ($mode, $type, $sha1, $path) = ($1, $2, $3, $4);
+ $paths{$sha1} = "$path @ $name";
+ }
+ }
+ close TREE;
+ }
+}
+
+sub stats {
+ my @data = sort {$a <=> $b} @_;
+ my $min = $data[0];
+ my $max = $data[$#data];
+ my $total = 0;
+ my $count = scalar @data;
+ for my $datum (@data) {
+ $total += $datum;
+ }
+ my $mean = $total / $count;
+ my $median = $data[int(@data / 2)];
+ my $diff_sum = 0;
+ for my $datum (@data) {
+ $diff_sum += ($datum - $mean)**2;
+ }
+ my $std_dev = sqrt($diff_sum / $count);
+ return ($count, $total, $min, $max, $mean, $median, $std_dev);
+}
+
+sub print_stats {
+ my $name = shift;
+ my ($count, $total, $min, $max, $mean, $median, $std_dev) = stats(@_);
+ printf("%s: count %s total %s min %s max %s mean %.2f median %s std_dev %.2f\n",
+ $name, $count, $total, $min, $max, $mean, $median, $std_dev);
+}
+
+my @sizes;
+my @path_sizes;
+my @all_sizes;
+my @all_path_sizes;
+my %path_sizes;
+
+sub dig {
+ my ($sha1, $depth, $path_size) = @_;
+ $path_size += $sizes{$sha1};
+ push(@sizes, $sizes{$sha1});
+ push(@all_sizes, $sizes{$sha1});
+ push(@path_sizes, $path_size);
+ push(@all_path_sizes, $path_size);
+ $path_sizes{$sha1} = $path_size;
+ if ($tree) {
+ printf("%3d%s %6s %s %8d %8d %s\n",
+ $depth, (" " x $depth), $types{$sha1},
+ $sha1, $sizes{$sha1}, $path_size, $paths{$sha1});
+ }
+ for my $child (@{$children{$sha1}}) {
+ dig($child, $depth + 1, $path_size);
+ }
+}
+
+my @tree_sizes;
+my @tree_path_sizes;
+
+for my $root (@roots) {
+ undef @sizes;
+ undef @path_sizes;
+ dig($root, 0, 0);
+ my ($aa, $sz_total) = stats(@sizes);
+ my ($bb, $psz_total) = stats(@path_sizes);
+ push(@tree_sizes, $sz_total);
+ push(@tree_path_sizes, $psz_total);
+ if ($tree) {
+ if (@sizes > 1) {
+ print_stats(" size", @sizes);
+ print_stats("path size", @path_sizes);
+ }
+ print "\n";
+ }
+}
+
+if ($dump) {
+ for my $sha1 (sort keys %sizes) {
+ print "$sha1 $sizes{$sha1} $path_sizes{$sha1} $depths{$sha1} $paths{$sha1}\n";
+ }
+} else {
+ print_stats(" all sizes", @all_sizes);
+ print_stats(" all path sizes", @all_path_sizes);
+ print_stats(" tree sizes", @tree_sizes);
+ print_stats("tree path sizes", @tree_path_sizes);
+ print_stats(" depths", @depths);
+}
diff --git a/contrib/subtree/.gitignore b/contrib/subtree/.gitignore
new file mode 100644
index 0000000..91360a3
--- /dev/null
+++ b/contrib/subtree/.gitignore
@@ -0,0 +1,6 @@
+*~
+git-subtree
+git-subtree.xml
+git-subtree.1
+mainline
+subproj
diff --git a/contrib/subtree/COPYING b/contrib/subtree/COPYING
new file mode 100644
index 0000000..d511905
--- /dev/null
+++ b/contrib/subtree/COPYING
@@ -0,0 +1,339 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users. This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it. (Some other Free Software Foundation software is covered by
+the GNU Lesser General Public License instead.) You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+ To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have. You must make sure that they, too, receive or can get the
+source code. And you must show them these terms so they know their
+rights.
+
+ We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+ Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software. If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+ Finally, any free program is threatened constantly by software
+patents. We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary. To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ GNU GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License. The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language. (Hereinafter, translation is included without limitation in
+the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+ 1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+ 2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) You must cause the modified files to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ b) You must cause any work that you distribute or publish, that in
+ whole or in part contains or is derived from the Program or any
+ part thereof, to be licensed as a whole at no charge to all third
+ parties under the terms of this License.
+
+ c) If the modified program normally reads commands interactively
+ when run, you must cause it, when started running for such
+ interactive use in the most ordinary way, to print or display an
+ announcement including an appropriate copyright notice and a
+ notice that there is no warranty (or else, saying that you provide
+ a warranty) and that users may redistribute the program under
+ these conditions, and telling the user how to view a copy of this
+ License. (Exception: if the Program itself is interactive but
+ does not normally print such an announcement, your work based on
+ the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+ a) Accompany it with the complete corresponding machine-readable
+ source code, which must be distributed under the terms of Sections
+ 1 and 2 above on a medium customarily used for software interchange; or,
+
+ b) Accompany it with a written offer, valid for at least three
+ years, to give any third party, for a charge no more than your
+ cost of physically performing source distribution, a complete
+ machine-readable copy of the corresponding source code, to be
+ distributed under the terms of Sections 1 and 2 above on a medium
+ customarily used for software interchange; or,
+
+ c) Accompany it with the information you received as to the offer
+ to distribute corresponding source code. (This alternative is
+ allowed only for noncommercial distribution and only if you
+ received the program in object code or executable form with such
+ an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it. For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable. However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License. Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+ 5. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Program or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+ 6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+ 7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all. For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded. In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+ 9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation. If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+ 10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission. For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this. Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+ NO WARRANTY
+
+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 2 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along
+ with this program; if not, write to the Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+ Gnomovision version 69, Copyright (C) year name of author
+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+ `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+ <signature of Ty Coon>, 1 April 1989
+ Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs. If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.
diff --git a/contrib/subtree/INSTALL b/contrib/subtree/INSTALL
new file mode 100644
index 0000000..7ab0cf4
--- /dev/null
+++ b/contrib/subtree/INSTALL
@@ -0,0 +1,28 @@
+HOW TO INSTALL git-subtree
+==========================
+
+First, build from the top source directory.
+
+Then, in contrib/subtree, run:
+
+ make
+ make install
+ make install-doc
+
+If you used configure to do the main build the git-subtree build will
+pick up those settings. If not, you will likely have to provide a
+value for prefix:
+
+ make prefix=<some dir>
+ make prefix=<some dir> install
+ make prefix=<some dir> install-doc
+
+To run tests first copy git-subtree to the main build area so the
+newly-built git can find it:
+
+ cp git-subtree ../..
+
+Then:
+
+ make test
+
diff --git a/contrib/subtree/Makefile b/contrib/subtree/Makefile
new file mode 100644
index 0000000..b507505
--- /dev/null
+++ b/contrib/subtree/Makefile
@@ -0,0 +1,53 @@
+-include ../../config.mak.autogen
+-include ../../config.mak
+
+prefix ?= /usr/local
+mandir ?= $(prefix)/share/man
+libexecdir ?= $(prefix)/libexec/git-core
+gitdir ?= $(shell git --exec-path)
+man1dir ?= $(mandir)/man1
+
+gitver ?= $(word 3,$(shell git --version))
+
+# this should be set to a 'standard' bsd-type install program
+INSTALL ?= install
+
+ASCIIDOC_CONF = ../../Documentation/asciidoc.conf
+MANPAGE_NORMAL_XSL = ../../Documentation/manpage-normal.xsl
+
+GIT_SUBTREE_SH := git-subtree.sh
+GIT_SUBTREE := git-subtree
+
+GIT_SUBTREE_DOC := git-subtree.1
+GIT_SUBTREE_XML := git-subtree.xml
+GIT_SUBTREE_TXT := git-subtree.txt
+
+all: $(GIT_SUBTREE)
+
+$(GIT_SUBTREE): $(GIT_SUBTREE_SH)
+ cp $< $@ && chmod +x $@
+
+doc: $(GIT_SUBTREE_DOC)
+
+install: $(GIT_SUBTREE)
+ $(INSTALL) -m 755 $(GIT_SUBTREE) $(DESTDIR)$(libexecdir)
+
+install-doc: install-man
+
+install-man: $(GIT_SUBTREE_DOC)
+ $(INSTALL) -d -m 755 $(DESTDIR)$(man1dir)
+ $(INSTALL) -m 644 $^ $(DESTDIR)$(man1dir)
+
+$(GIT_SUBTREE_DOC): $(GIT_SUBTREE_XML)
+ xmlto -m $(MANPAGE_NORMAL_XSL) man $^
+
+$(GIT_SUBTREE_XML): $(GIT_SUBTREE_TXT)
+ asciidoc -b docbook -d manpage -f $(ASCIIDOC_CONF) \
+ -agit_version=$(gitver) $^
+
+test:
+ $(MAKE) -C t/ test
+
+clean:
+ rm -f *~ *.xml *.html *.1
+ rm -rf subproj mainline
diff --git a/contrib/subtree/README b/contrib/subtree/README
new file mode 100644
index 0000000..c686b4a
--- /dev/null
+++ b/contrib/subtree/README
@@ -0,0 +1,8 @@
+
+Please read git-subtree.txt for documentation.
+
+Please don't contact me using github mail; it's slow, ugly, and worst of
+all, redundant. Email me instead at apenwarr@gmail.com and I'll be happy to
+help.
+
+Avery
diff --git a/contrib/subtree/git-subtree.sh b/contrib/subtree/git-subtree.sh
new file mode 100755
index 0000000..51ae932
--- /dev/null
+++ b/contrib/subtree/git-subtree.sh
@@ -0,0 +1,725 @@
+#!/bin/sh
+#
+# git-subtree.sh: split/join git repositories in subdirectories of this one
+#
+# Copyright (C) 2009 Avery Pennarun <apenwarr@gmail.com>
+#
+if [ $# -eq 0 ]; then
+ set -- -h
+fi
+OPTS_SPEC="\
+git subtree add --prefix=<prefix> <commit>
+git subtree add --prefix=<prefix> <repository> <commit>
+git subtree merge --prefix=<prefix> <commit>
+git subtree pull --prefix=<prefix> <repository> <refspec...>
+git subtree push --prefix=<prefix> <repository> <refspec...>
+git subtree split --prefix=<prefix> <commit...>
+--
+h,help show the help
+q quiet
+d show debug messages
+P,prefix= the name of the subdir to split out
+m,message= use the given message as the commit message for the merge commit
+ options for 'split'
+annotate= add a prefix to commit message of new commits
+b,branch= create a new branch from the split subtree
+ignore-joins ignore prior --rejoin commits
+onto= try connecting new tree to an existing one
+rejoin merge the new branch back into HEAD
+ options for 'add', 'merge', 'pull' and 'push'
+squash merge subtree changes as a single commit
+"
+eval "$(echo "$OPTS_SPEC" | git rev-parse --parseopt -- "$@" || echo exit $?)"
+
+PATH=$PATH:$(git --exec-path)
+. git-sh-setup
+
+require_work_tree
+
+quiet=
+branch=
+debug=
+command=
+onto=
+rejoin=
+ignore_joins=
+annotate=
+squash=
+message=
+
+debug()
+{
+ if [ -n "$debug" ]; then
+ echo "$@" >&2
+ fi
+}
+
+say()
+{
+ if [ -z "$quiet" ]; then
+ echo "$@" >&2
+ fi
+}
+
+assert()
+{
+ if "$@"; then
+ :
+ else
+ die "assertion failed: " "$@"
+ fi
+}
+
+
+#echo "Options: $*"
+
+while [ $# -gt 0 ]; do
+ opt="$1"
+ shift
+ case "$opt" in
+ -q) quiet=1 ;;
+ -d) debug=1 ;;
+ --annotate) annotate="$1"; shift ;;
+ --no-annotate) annotate= ;;
+ -b) branch="$1"; shift ;;
+ -P) prefix="$1"; shift ;;
+ -m) message="$1"; shift ;;
+ --no-prefix) prefix= ;;
+ --onto) onto="$1"; shift ;;
+ --no-onto) onto= ;;
+ --rejoin) rejoin=1 ;;
+ --no-rejoin) rejoin= ;;
+ --ignore-joins) ignore_joins=1 ;;
+ --no-ignore-joins) ignore_joins= ;;
+ --squash) squash=1 ;;
+ --no-squash) squash= ;;
+ --) break ;;
+ *) die "Unexpected option: $opt" ;;
+ esac
+done
+
+command="$1"
+shift
+case "$command" in
+ add|merge|pull) default= ;;
+ split|push) default="--default HEAD" ;;
+ *) die "Unknown command '$command'" ;;
+esac
+
+if [ -z "$prefix" ]; then
+ die "You must provide the --prefix option."
+fi
+
+case "$command" in
+ add) [ -e "$prefix" ] &&
+ die "prefix '$prefix' already exists." ;;
+ *) [ -e "$prefix" ] ||
+ die "'$prefix' does not exist; use 'git subtree add'" ;;
+esac
+
+dir="$(dirname "$prefix/.")"
+
+if [ "$command" != "pull" -a "$command" != "add" -a "$command" != "push" ]; then
+ revs=$(git rev-parse $default --revs-only "$@") || exit $?
+ dirs="$(git rev-parse --no-revs --no-flags "$@")" || exit $?
+ if [ -n "$dirs" ]; then
+ die "Error: Use --prefix instead of bare filenames."
+ fi
+fi
+
+debug "command: {$command}"
+debug "quiet: {$quiet}"
+debug "revs: {$revs}"
+debug "dir: {$dir}"
+debug "opts: {$*}"
+debug
+
+cache_setup()
+{
+ cachedir="$GIT_DIR/subtree-cache/$$"
+ rm -rf "$cachedir" || die "Can't delete old cachedir: $cachedir"
+ mkdir -p "$cachedir" || die "Can't create new cachedir: $cachedir"
+ mkdir -p "$cachedir/notree" || die "Can't create new cachedir: $cachedir/notree"
+ debug "Using cachedir: $cachedir" >&2
+}
+
+cache_get()
+{
+ for oldrev in $*; do
+ if [ -r "$cachedir/$oldrev" ]; then
+ read newrev <"$cachedir/$oldrev"
+ echo $newrev
+ fi
+ done
+}
+
+cache_miss()
+{
+ for oldrev in $*; do
+ if [ ! -r "$cachedir/$oldrev" ]; then
+ echo $oldrev
+ fi
+ done
+}
+
+check_parents()
+{
+ missed=$(cache_miss $*)
+ for miss in $missed; do
+ if [ ! -r "$cachedir/notree/$miss" ]; then
+ debug " incorrect order: $miss"
+ fi
+ done
+}
+
+set_notree()
+{
+ echo "1" > "$cachedir/notree/$1"
+}
+
+cache_set()
+{
+ oldrev="$1"
+ newrev="$2"
+ if [ "$oldrev" != "latest_old" \
+ -a "$oldrev" != "latest_new" \
+ -a -e "$cachedir/$oldrev" ]; then
+ die "cache for $oldrev already exists!"
+ fi
+ echo "$newrev" >"$cachedir/$oldrev"
+}
+
+rev_exists()
+{
+ if git rev-parse "$1" >/dev/null 2>&1; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+rev_is_descendant_of_branch()
+{
+ newrev="$1"
+ branch="$2"
+ branch_hash=$(git rev-parse $branch)
+ match=$(git rev-list -1 $branch_hash ^$newrev)
+
+ if [ -z "$match" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+# if a commit doesn't have a parent, this might not work. But we only want
+# to remove the parent from the rev-list, and since it doesn't exist, it won't
+# be there anyway, so do nothing in that case.
+try_remove_previous()
+{
+ if rev_exists "$1^"; then
+ echo "^$1^"
+ fi
+}
+
+find_latest_squash()
+{
+ debug "Looking for latest squash ($dir)..."
+ dir="$1"
+ sq=
+ main=
+ sub=
+ git log --grep="^git-subtree-dir: $dir/*\$" \
+ --pretty=format:'START %H%n%s%n%n%b%nEND%n' HEAD |
+ while read a b junk; do
+ debug "$a $b $junk"
+ debug "{{$sq/$main/$sub}}"
+ case "$a" in
+ START) sq="$b" ;;
+ git-subtree-mainline:) main="$b" ;;
+ git-subtree-split:) sub="$b" ;;
+ END)
+ if [ -n "$sub" ]; then
+ if [ -n "$main" ]; then
+ # a rejoin commit?
+ # Pretend its sub was a squash.
+ sq="$sub"
+ fi
+ debug "Squash found: $sq $sub"
+ echo "$sq" "$sub"
+ break
+ fi
+ sq=
+ main=
+ sub=
+ ;;
+ esac
+ done
+}
+
+find_existing_splits()
+{
+ debug "Looking for prior splits..."
+ dir="$1"
+ revs="$2"
+ main=
+ sub=
+ git log --grep="^git-subtree-dir: $dir/*\$" \
+ --pretty=format:'START %H%n%s%n%n%b%nEND%n' $revs |
+ while read a b junk; do
+ case "$a" in
+ START) sq="$b" ;;
+ git-subtree-mainline:) main="$b" ;;
+ git-subtree-split:) sub="$b" ;;
+ END)
+ debug " Main is: '$main'"
+ if [ -z "$main" -a -n "$sub" ]; then
+ # squash commits refer to a subtree
+ debug " Squash: $sq from $sub"
+ cache_set "$sq" "$sub"
+ fi
+ if [ -n "$main" -a -n "$sub" ]; then
+ debug " Prior: $main -> $sub"
+ cache_set $main $sub
+ cache_set $sub $sub
+ try_remove_previous "$main"
+ try_remove_previous "$sub"
+ fi
+ main=
+ sub=
+ ;;
+ esac
+ done
+}
+
+copy_commit()
+{
+ # We're going to set some environment vars here, so
+ # do it in a subshell to get rid of them safely later
+ debug copy_commit "{$1}" "{$2}" "{$3}"
+ git log -1 --pretty=format:'%an%n%ae%n%ad%n%cn%n%ce%n%cd%n%B' "$1" |
+ (
+ read GIT_AUTHOR_NAME
+ read GIT_AUTHOR_EMAIL
+ read GIT_AUTHOR_DATE
+ read GIT_COMMITTER_NAME
+ read GIT_COMMITTER_EMAIL
+ read GIT_COMMITTER_DATE
+ export GIT_AUTHOR_NAME \
+ GIT_AUTHOR_EMAIL \
+ GIT_AUTHOR_DATE \
+ GIT_COMMITTER_NAME \
+ GIT_COMMITTER_EMAIL \
+ GIT_COMMITTER_DATE
+ (echo -n "$annotate"; cat ) |
+ git commit-tree "$2" $3 # reads the rest of stdin
+ ) || die "Can't copy commit $1"
+}
+
+add_msg()
+{
+ dir="$1"
+ latest_old="$2"
+ latest_new="$3"
+ if [ -n "$message" ]; then
+ commit_message="$message"
+ else
+ commit_message="Add '$dir/' from commit '$latest_new'"
+ fi
+ cat <<-EOF
+ $commit_message
+
+ git-subtree-dir: $dir
+ git-subtree-mainline: $latest_old
+ git-subtree-split: $latest_new
+ EOF
+}
+
+add_squashed_msg()
+{
+ if [ -n "$message" ]; then
+ echo "$message"
+ else
+ echo "Merge commit '$1' as '$2'"
+ fi
+}
+
+rejoin_msg()
+{
+ dir="$1"
+ latest_old="$2"
+ latest_new="$3"
+ if [ -n "$message" ]; then
+ commit_message="$message"
+ else
+ commit_message="Split '$dir/' into commit '$latest_new'"
+ fi
+ cat <<-EOF
+ $commit_message
+
+ git-subtree-dir: $dir
+ git-subtree-mainline: $latest_old
+ git-subtree-split: $latest_new
+ EOF
+}
+
+squash_msg()
+{
+ dir="$1"
+ oldsub="$2"
+ newsub="$3"
+ newsub_short=$(git rev-parse --short "$newsub")
+
+ if [ -n "$oldsub" ]; then
+ oldsub_short=$(git rev-parse --short "$oldsub")
+ echo "Squashed '$dir/' changes from $oldsub_short..$newsub_short"
+ echo
+ git log --pretty=tformat:'%h %s' "$oldsub..$newsub"
+ git log --pretty=tformat:'REVERT: %h %s' "$newsub..$oldsub"
+ else
+ echo "Squashed '$dir/' content from commit $newsub_short"
+ fi
+
+ echo
+ echo "git-subtree-dir: $dir"
+ echo "git-subtree-split: $newsub"
+}
+
+toptree_for_commit()
+{
+ commit="$1"
+ git log -1 --pretty=format:'%T' "$commit" -- || exit $?
+}
+
+subtree_for_commit()
+{
+ commit="$1"
+ dir="$2"
+ git ls-tree "$commit" -- "$dir" |
+ while read mode type tree name; do
+ assert [ "$name" = "$dir" ]
+ assert [ "$type" = "tree" -o "$type" = "commit" ]
+ [ "$type" = "commit" ] && continue # ignore submodules
+ echo $tree
+ break
+ done
+}
+
+tree_changed()
+{
+ tree=$1
+ shift
+ if [ $# -ne 1 ]; then
+ return 0 # weird parents, consider it changed
+ else
+ ptree=$(toptree_for_commit $1)
+ if [ "$ptree" != "$tree" ]; then
+ return 0 # changed
+ else
+ return 1 # not changed
+ fi
+ fi
+}
+
+new_squash_commit()
+{
+ old="$1"
+ oldsub="$2"
+ newsub="$3"
+ tree=$(toptree_for_commit $newsub) || exit $?
+ if [ -n "$old" ]; then
+ squash_msg "$dir" "$oldsub" "$newsub" |
+ git commit-tree "$tree" -p "$old" || exit $?
+ else
+ squash_msg "$dir" "" "$newsub" |
+ git commit-tree "$tree" || exit $?
+ fi
+}
+
+copy_or_skip()
+{
+ rev="$1"
+ tree="$2"
+ newparents="$3"
+ assert [ -n "$tree" ]
+
+ identical=
+ nonidentical=
+ p=
+ gotparents=
+ for parent in $newparents; do
+ ptree=$(toptree_for_commit $parent) || exit $?
+ [ -z "$ptree" ] && continue
+ if [ "$ptree" = "$tree" ]; then
+ # an identical parent could be used in place of this rev.
+ identical="$parent"
+ else
+ nonidentical="$parent"
+ fi
+
+ # sometimes both old parents map to the same newparent;
+ # eliminate duplicates
+ is_new=1
+ for gp in $gotparents; do
+ if [ "$gp" = "$parent" ]; then
+ is_new=
+ break
+ fi
+ done
+ if [ -n "$is_new" ]; then
+ gotparents="$gotparents $parent"
+ p="$p -p $parent"
+ fi
+ done
+
+ if [ -n "$identical" ]; then
+ echo $identical
+ else
+ copy_commit $rev $tree "$p" || exit $?
+ fi
+}
+
+ensure_clean()
+{
+ if ! git diff-index HEAD --exit-code --quiet 2>&1; then
+ die "Working tree has modifications. Cannot add."
+ fi
+ if ! git diff-index --cached HEAD --exit-code --quiet 2>&1; then
+ die "Index has modifications. Cannot add."
+ fi
+}
+
+cmd_add()
+{
+ if [ -e "$dir" ]; then
+ die "'$dir' already exists. Cannot add."
+ fi
+
+ ensure_clean
+
+ if [ $# -eq 1 ]; then
+ git rev-parse -q --verify "$1^{commit}" >/dev/null ||
+ die "'$1' does not refer to a commit"
+
+ "cmd_add_commit" "$@"
+ elif [ $# -eq 2 ]; then
+ # Technically we could accept a refspec here but we're
+ # just going to turn around and add FETCH_HEAD under the
+ # specified directory. Allowing a refspec might be
+ # misleading because we won't do anything with any other
+ # branches fetched via the refspec.
+ git rev-parse -q --verify "$2^{commit}" >/dev/null ||
+ die "'$2' does not refer to a commit"
+
+ "cmd_add_repository" "$@"
+ else
+ say "error: parameters were '$@'"
+ die "Provide either a commit or a repository and commit."
+ fi
+}
+
+cmd_add_repository()
+{
+ echo "git fetch" "$@"
+ repository=$1
+ refspec=$2
+ git fetch "$@" || exit $?
+ revs=FETCH_HEAD
+ set -- $revs
+ cmd_add_commit "$@"
+}
+
+cmd_add_commit()
+{
+ revs=$(git rev-parse $default --revs-only "$@") || exit $?
+ set -- $revs
+ rev="$1"
+
+ debug "Adding $dir as '$rev'..."
+ git read-tree --prefix="$dir" $rev || exit $?
+ git checkout -- "$dir" || exit $?
+ tree=$(git write-tree) || exit $?
+
+ headrev=$(git rev-parse HEAD) || exit $?
+ if [ -n "$headrev" -a "$headrev" != "$rev" ]; then
+ headp="-p $headrev"
+ else
+ headp=
+ fi
+
+ if [ -n "$squash" ]; then
+ rev=$(new_squash_commit "" "" "$rev") || exit $?
+ commit=$(add_squashed_msg "$rev" "$dir" |
+ git commit-tree $tree $headp -p "$rev") || exit $?
+ else
+ commit=$(add_msg "$dir" "$headrev" "$rev" |
+ git commit-tree $tree $headp -p "$rev") || exit $?
+ fi
+ git reset "$commit" || exit $?
+
+ say "Added dir '$dir'"
+}
+
+cmd_split()
+{
+ debug "Splitting $dir..."
+ cache_setup || exit $?
+
+ if [ -n "$onto" ]; then
+ debug "Reading history for --onto=$onto..."
+ git rev-list $onto |
+ while read rev; do
+ # the 'onto' history is already just the subdir, so
+ # any parent we find there can be used verbatim
+ debug " cache: $rev"
+ cache_set $rev $rev
+ done
+ fi
+
+ if [ -n "$ignore_joins" ]; then
+ unrevs=
+ else
+ unrevs="$(find_existing_splits "$dir" "$revs")"
+ fi
+
+ # We can't restrict rev-list to only $dir here, because some of our
+ # parents have the $dir contents the root, and those won't match.
+ # (and rev-list --follow doesn't seem to solve this)
+ grl='git rev-list --topo-order --reverse --parents $revs $unrevs'
+ revmax=$(eval "$grl" | wc -l)
+ revcount=0
+ createcount=0
+ eval "$grl" |
+ while read rev parents; do
+ revcount=$(($revcount + 1))
+ say -n "$revcount/$revmax ($createcount) "
+ debug "Processing commit: $rev"
+ exists=$(cache_get $rev)
+ if [ -n "$exists" ]; then
+ debug " prior: $exists"
+ continue
+ fi
+ createcount=$(($createcount + 1))
+ debug " parents: $parents"
+ newparents=$(cache_get $parents)
+ debug " newparents: $newparents"
+
+ tree=$(subtree_for_commit $rev "$dir")
+ debug " tree is: $tree"
+
+ check_parents $parents
+
+ # ugly. is there no better way to tell if this is a subtree
+ # vs. a mainline commit? Does it matter?
+ if [ -z $tree ]; then
+ set_notree $rev
+ if [ -n "$newparents" ]; then
+ cache_set $rev $rev
+ fi
+ continue
+ fi
+
+ newrev=$(copy_or_skip "$rev" "$tree" "$newparents") || exit $?
+ debug " newrev is: $newrev"
+ cache_set $rev $newrev
+ cache_set latest_new $newrev
+ cache_set latest_old $rev
+ done || exit $?
+ latest_new=$(cache_get latest_new)
+ if [ -z "$latest_new" ]; then
+ die "No new revisions were found"
+ fi
+
+ if [ -n "$rejoin" ]; then
+ debug "Merging split branch into HEAD..."
+ latest_old=$(cache_get latest_old)
+ git merge -s ours \
+ -m "$(rejoin_msg $dir $latest_old $latest_new)" \
+ $latest_new >&2 || exit $?
+ fi
+ if [ -n "$branch" ]; then
+ if rev_exists "refs/heads/$branch"; then
+ if ! rev_is_descendant_of_branch $latest_new $branch; then
+ die "Branch '$branch' is not an ancestor of commit '$latest_new'."
+ fi
+ action='Updated'
+ else
+ action='Created'
+ fi
+ git update-ref -m 'subtree split' "refs/heads/$branch" $latest_new || exit $?
+ say "$action branch '$branch'"
+ fi
+ echo $latest_new
+ exit 0
+}
+
+cmd_merge()
+{
+ revs=$(git rev-parse $default --revs-only "$@") || exit $?
+ ensure_clean
+
+ set -- $revs
+ if [ $# -ne 1 ]; then
+ die "You must provide exactly one revision. Got: '$revs'"
+ fi
+ rev="$1"
+
+ if [ -n "$squash" ]; then
+ first_split="$(find_latest_squash "$dir")"
+ if [ -z "$first_split" ]; then
+ die "Can't squash-merge: '$dir' was never added."
+ fi
+ set $first_split
+ old=$1
+ sub=$2
+ if [ "$sub" = "$rev" ]; then
+ say "Subtree is already at commit $rev."
+ exit 0
+ fi
+ new=$(new_squash_commit "$old" "$sub" "$rev") || exit $?
+ debug "New squash commit: $new"
+ rev="$new"
+ fi
+
+ version=$(git version)
+ if [ "$version" \< "git version 1.7" ]; then
+ if [ -n "$message" ]; then
+ git merge -s subtree --message="$message" $rev
+ else
+ git merge -s subtree $rev
+ fi
+ else
+ if [ -n "$message" ]; then
+ git merge -Xsubtree="$prefix" --message="$message" $rev
+ else
+ git merge -Xsubtree="$prefix" $rev
+ fi
+ fi
+}
+
+cmd_pull()
+{
+ ensure_clean
+ git fetch "$@" || exit $?
+ revs=FETCH_HEAD
+ set -- $revs
+ cmd_merge "$@"
+}
+
+cmd_push()
+{
+ if [ $# -ne 2 ]; then
+ die "You must provide <repository> <refspec>"
+ fi
+ if [ -e "$dir" ]; then
+ repository=$1
+ refspec=$2
+ echo "git push using: " $repository $refspec
+ localrev=$(git subtree split --prefix="$prefix") || die
+ git push $repository $localrev:refs/heads/$refspec
+ else
+ die "'$dir' must already exist. Try 'git subtree add'."
+ fi
+}
+
+"cmd_$command" "$@"
diff --git a/contrib/subtree/git-subtree.txt b/contrib/subtree/git-subtree.txt
new file mode 100644
index 0000000..7ba853e
--- /dev/null
+++ b/contrib/subtree/git-subtree.txt
@@ -0,0 +1,367 @@
+git-subtree(1)
+==============
+
+NAME
+----
+git-subtree - Merge subtrees together and split repository into subtrees
+
+
+SYNOPSIS
+--------
+[verse]
+'git subtree' add -P <prefix> <refspec>
+'git subtree' add -P <prefix> <repository> <refspec>
+'git subtree' pull -P <prefix> <repository> <refspec...>
+'git subtree' push -P <prefix> <repository> <refspec...>
+'git subtree' merge -P <prefix> <commit>
+'git subtree' split -P <prefix> [OPTIONS] [<commit>]
+
+
+DESCRIPTION
+-----------
+Subtrees allow subprojects to be included within a subdirectory
+of the main project, optionally including the subproject's
+entire history.
+
+For example, you could include the source code for a library
+as a subdirectory of your application.
+
+Subtrees are not to be confused with submodules, which are meant for
+the same task. Unlike submodules, subtrees do not need any special
+constructions (like .gitmodule files or gitlinks) be present in
+your repository, and do not force end-users of your
+repository to do anything special or to understand how subtrees
+work. A subtree is just a subdirectory that can be
+committed to, branched, and merged along with your project in
+any way you want.
+
+They are also not to be confused with using the subtree merge
+strategy. The main difference is that, besides merging
+the other project as a subdirectory, you can also extract the
+entire history of a subdirectory from your project and make it
+into a standalone project. Unlike the subtree merge strategy
+you can alternate back and forth between these
+two operations. If the standalone library gets updated, you can
+automatically merge the changes into your project; if you
+update the library inside your project, you can "split" the
+changes back out again and merge them back into the library
+project.
+
+For example, if a library you made for one application ends up being
+useful elsewhere, you can extract its entire history and publish
+that as its own git repository, without accidentally
+intermingling the history of your application project.
+
+[TIP]
+In order to keep your commit messages clean, we recommend that
+people split their commits between the subtrees and the main
+project as much as possible. That is, if you make a change that
+affects both the library and the main application, commit it in
+two pieces. That way, when you split the library commits out
+later, their descriptions will still make sense. But if this
+isn't important to you, it's not *necessary*. git subtree will
+simply leave out the non-library-related parts of the commit
+when it splits it out into the subproject later.
+
+
+COMMANDS
+--------
+add::
+ Create the <prefix> subtree by importing its contents
+ from the given <refspec> or <repository> and remote <refspec>.
+ A new commit is created automatically, joining the imported
+ project's history with your own. With '--squash', imports
+ only a single commit from the subproject, rather than its
+ entire history.
+
+merge::
+ Merge recent changes up to <commit> into the <prefix>
+ subtree. As with normal 'git merge', this doesn't
+ remove your own local changes; it just merges those
+ changes into the latest <commit>. With '--squash',
+ creates only one commit that contains all the changes,
+ rather than merging in the entire history.
+
+ If you use '--squash', the merge direction doesn't
+ always have to be forward; you can use this command to
+ go back in time from v2.5 to v2.4, for example. If your
+ merge introduces a conflict, you can resolve it in the
+ usual ways.
+
+pull::
+ Exactly like 'merge', but parallels 'git pull' in that
+ it fetches the given commit from the specified remote
+ repository.
+
+push::
+ Does a 'split' (see below) using the <prefix> supplied
+ and then does a 'git push' to push the result to the
+ repository and refspec. This can be used to push your
+ subtree to different branches of the remote repository.
+
+split::
+ Extract a new, synthetic project history from the
+ history of the <prefix> subtree. The new history
+ includes only the commits (including merges) that
+ affected <prefix>, and each of those commits now has the
+ contents of <prefix> at the root of the project instead
+ of in a subdirectory. Thus, the newly created history
+ is suitable for export as a separate git repository.
+
+ After splitting successfully, a single commit id is
+ printed to stdout. This corresponds to the HEAD of the
+ newly created tree, which you can manipulate however you
+ want.
+
+ Repeated splits of exactly the same history are
+ guaranteed to be identical (ie. to produce the same
+ commit ids). Because of this, if you add new commits
+ and then re-split, the new commits will be attached as
+ commits on top of the history you generated last time,
+ so 'git merge' and friends will work as expected.
+
+ Note that if you use '--squash' when you merge, you
+ should usually not just '--rejoin' when you split.
+
+
+OPTIONS
+-------
+-q::
+--quiet::
+ Suppress unnecessary output messages on stderr.
+
+-d::
+--debug::
+ Produce even more unnecessary output messages on stderr.
+
+-P <prefix>::
+--prefix=<prefix>::
+ Specify the path in the repository to the subtree you
+ want to manipulate. This option is mandatory
+ for all commands.
+
+-m <message>::
+--message=<message>::
+ This option is only valid for add, merge and pull (unsure).
+ Specify <message> as the commit message for the merge commit.
+
+
+OPTIONS FOR add, merge, push, pull
+----------------------------------
+--squash::
+ This option is only valid for add, merge, push and pull
+ commands.
+
+ Instead of merging the entire history from the subtree
+ project, produce only a single commit that contains all
+ the differences you want to merge, and then merge that
+ new commit into your project.
+
+ Using this option helps to reduce log clutter. People
+ rarely want to see every change that happened between
+ v1.0 and v1.1 of the library they're using, since none of the
+ interim versions were ever included in their application.
+
+ Using '--squash' also helps avoid problems when the same
+ subproject is included multiple times in the same
+ project, or is removed and then re-added. In such a
+ case, it doesn't make sense to combine the histories
+ anyway, since it's unclear which part of the history
+ belongs to which subtree.
+
+ Furthermore, with '--squash', you can switch back and
+ forth between different versions of a subtree, rather
+ than strictly forward. 'git subtree merge --squash'
+ always adjusts the subtree to match the exactly
+ specified commit, even if getting to that commit would
+ require undoing some changes that were added earlier.
+
+ Whether or not you use '--squash', changes made in your
+ local repository remain intact and can be later split
+ and send upstream to the subproject.
+
+
+OPTIONS FOR split
+-----------------
+--annotate=<annotation>::
+ This option is only valid for the split command.
+
+ When generating synthetic history, add <annotation> as a
+ prefix to each commit message. Since we're creating new
+ commits with the same commit message, but possibly
+ different content, from the original commits, this can help
+ to differentiate them and avoid confusion.
+
+ Whenever you split, you need to use the same
+ <annotation>, or else you don't have a guarantee that
+ the new re-created history will be identical to the old
+ one. That will prevent merging from working correctly.
+ git subtree tries to make it work anyway, particularly
+ if you use --rejoin, but it may not always be effective.
+
+-b <branch>::
+--branch=<branch>::
+ This option is only valid for the split command.
+
+ After generating the synthetic history, create a new
+ branch called <branch> that contains the new history.
+ This is suitable for immediate pushing upstream.
+ <branch> must not already exist.
+
+--ignore-joins::
+ This option is only valid for the split command.
+
+ If you use '--rejoin', git subtree attempts to optimize
+ its history reconstruction to generate only the new
+ commits since the last '--rejoin'. '--ignore-join'
+ disables this behaviour, forcing it to regenerate the
+ entire history. In a large project, this can take a
+ long time.
+
+--onto=<onto>::
+ This option is only valid for the split command.
+
+ If your subtree was originally imported using something
+ other than git subtree, its history may not match what
+ git subtree is expecting. In that case, you can specify
+ the commit id <onto> that corresponds to the first
+ revision of the subproject's history that was imported
+ into your project, and git subtree will attempt to build
+ its history from there.
+
+ If you used 'git subtree add', you should never need
+ this option.
+
+--rejoin::
+ This option is only valid for the split command.
+
+ After splitting, merge the newly created synthetic
+ history back into your main project. That way, future
+ splits can search only the part of history that has
+ been added since the most recent --rejoin.
+
+ If your split commits end up merged into the upstream
+ subproject, and then you want to get the latest upstream
+ version, this will allow git's merge algorithm to more
+ intelligently avoid conflicts (since it knows these
+ synthetic commits are already part of the upstream
+ repository).
+
+ Unfortunately, using this option results in 'git log'
+ showing an extra copy of every new commit that was
+ created (the original, and the synthetic one).
+
+ If you do all your merges with '--squash', don't use
+ '--rejoin' when you split, because you don't want the
+ subproject's history to be part of your project anyway.
+
+
+EXAMPLE 1. Add command
+----------------------
+Let's assume that you have a local repository that you would like
+to add an external vendor library to. In this case we will add the
+git-subtree repository as a subdirectory of your already existing
+git-extensions repository in ~/git-extensions/:
+
+ $ git subtree add --prefix=git-subtree --squash \
+ git://github.com/apenwarr/git-subtree.git master
+
+'master' needs to be a valid remote ref and can be a different branch
+name
+
+You can omit the --squash flag, but doing so will increase the number
+of commits that are incldued in your local repository.
+
+We now have a ~/git-extensions/git-subtree directory containing code
+from the master branch of git://github.com/apenwarr/git-subtree.git
+in our git-extensions repository.
+
+EXAMPLE 2. Extract a subtree using commit, merge and pull
+---------------------------------------------------------
+Let's use the repository for the git source code as an example.
+First, get your own copy of the git.git repository:
+
+ $ git clone git://git.kernel.org/pub/scm/git/git.git test-git
+ $ cd test-git
+
+gitweb (commit 1130ef3) was merged into git as of commit
+0a8f4f0, after which it was no longer maintained separately.
+But imagine it had been maintained separately, and we wanted to
+extract git's changes to gitweb since that time, to share with
+the upstream. You could do this:
+
+ $ git subtree split --prefix=gitweb --annotate='(split) ' \
+ 0a8f4f0^.. --onto=1130ef3 --rejoin \
+ --branch gitweb-latest
+ $ gitk gitweb-latest
+ $ git push git@github.com:whatever/gitweb.git gitweb-latest:master
+
+(We use '0a8f4f0^..' because that means "all the changes from
+0a8f4f0 to the current version, including 0a8f4f0 itself.")
+
+If gitweb had originally been merged using 'git subtree add' (or
+a previous split had already been done with --rejoin specified)
+then you can do all your splits without having to remember any
+weird commit ids:
+
+ $ git subtree split --prefix=gitweb --annotate='(split) ' --rejoin \
+ --branch gitweb-latest2
+
+And you can merge changes back in from the upstream project just
+as easily:
+
+ $ git subtree pull --prefix=gitweb \
+ git@github.com:whatever/gitweb.git master
+
+Or, using '--squash', you can actually rewind to an earlier
+version of gitweb:
+
+ $ git subtree merge --prefix=gitweb --squash gitweb-latest~10
+
+Then make some changes:
+
+ $ date >gitweb/myfile
+ $ git add gitweb/myfile
+ $ git commit -m 'created myfile'
+
+And fast forward again:
+
+ $ git subtree merge --prefix=gitweb --squash gitweb-latest
+
+And notice that your change is still intact:
+
+ $ ls -l gitweb/myfile
+
+And you can split it out and look at your changes versus
+the standard gitweb:
+
+ git log gitweb-latest..$(git subtree split --prefix=gitweb)
+
+EXAMPLE 3. Extract a subtree using branch
+-----------------------------------------
+Suppose you have a source directory with many files and
+subdirectories, and you want to extract the lib directory to its own
+git project. Here's a short way to do it:
+
+First, make the new repository wherever you want:
+
+ $ <go to the new location>
+ $ git init --bare
+
+Back in your original directory:
+
+ $ git subtree split --prefix=lib --annotate="(split)" -b split
+
+Then push the new branch onto the new empty repository:
+
+ $ git push <new-repo> split:master
+
+
+AUTHOR
+------
+Written by Avery Pennarun <apenwarr@gmail.com>
+
+
+GIT
+---
+Part of the linkgit:git[1] suite
diff --git a/contrib/subtree/t/Makefile b/contrib/subtree/t/Makefile
new file mode 100644
index 0000000..c864810
--- /dev/null
+++ b/contrib/subtree/t/Makefile
@@ -0,0 +1,69 @@
+# Run tests
+#
+# Copyright (c) 2005 Junio C Hamano
+#
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+#GIT_TEST_OPTS=--verbose --debug
+SHELL_PATH ?= $(SHELL)
+PERL_PATH ?= /usr/bin/perl
+TAR ?= $(TAR)
+RM ?= rm -f
+PROVE ?= prove
+DEFAULT_TEST_TARGET ?= test
+
+# Shell quote;
+SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH))
+
+T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh)
+
+all: $(DEFAULT_TEST_TARGET)
+
+test: pre-clean $(TEST_LINT)
+ $(MAKE) aggregate-results-and-cleanup
+
+prove: pre-clean $(TEST_LINT)
+ @echo "*** prove ***"; GIT_CONFIG=.git/config $(PROVE) --exec '$(SHELL_PATH_SQ)' $(GIT_PROVE_OPTS) $(T) :: $(GIT_TEST_OPTS)
+ $(MAKE) clean
+
+$(T):
+ @echo "*** $@ ***"; GIT_CONFIG=.git/config '$(SHELL_PATH_SQ)' $@ $(GIT_TEST_OPTS)
+
+pre-clean:
+ $(RM) -r test-results
+
+clean:
+ $(RM) -r 'trash directory'.* test-results
+ $(RM) -r valgrind/bin
+ $(RM) .prove
+
+test-lint: test-lint-duplicates test-lint-executable
+
+test-lint-duplicates:
+ @dups=`echo $(T) | tr ' ' '\n' | sed 's/-.*//' | sort | uniq -d` && \
+ test -z "$$dups" || { \
+ echo >&2 "duplicate test numbers:" $$dups; exit 1; }
+
+test-lint-executable:
+ @bad=`for i in $(T); do test -x "$$i" || echo $$i; done` && \
+ test -z "$$bad" || { \
+ echo >&2 "non-executable tests:" $$bad; exit 1; }
+
+aggregate-results-and-cleanup: $(T)
+ $(MAKE) aggregate-results
+ $(MAKE) clean
+
+aggregate-results:
+ for f in ../../../t/test-results/t*-*.counts; do \
+ echo "$$f"; \
+ done | '$(SHELL_PATH_SQ)' ../../../t/aggregate-results.sh
+
+valgrind:
+ $(MAKE) GIT_TEST_OPTS="$(GIT_TEST_OPTS) --valgrind"
+
+test-results:
+ mkdir -p test-results
+
+.PHONY: pre-clean $(T) aggregate-results clean valgrind
diff --git a/contrib/subtree/t/t7900-subtree.sh b/contrib/subtree/t/t7900-subtree.sh
new file mode 100755
index 0000000..b0f8536
--- /dev/null
+++ b/contrib/subtree/t/t7900-subtree.sh
@@ -0,0 +1,468 @@
+#!/bin/sh
+#
+# Copyright (c) 2012 Avery Pennaraum
+#
+test_description='Basic porcelain support for subtrees
+
+This test verifies the basic operation of the merge, pull, add
+and split subcommands of git subtree.
+'
+
+export TEST_DIRECTORY=$(pwd)/../../../t
+
+. ../../../t/test-lib.sh
+
+create()
+{
+ echo "$1" >"$1"
+ git add "$1"
+}
+
+
+check_equal()
+{
+ test_debug 'echo'
+ test_debug "echo \"check a:\" \"{$1}\""
+ test_debug "echo \" b:\" \"{$2}\""
+ if [ "$1" = "$2" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+fixnl()
+{
+ t=""
+ while read x; do
+ t="$t$x "
+ done
+ echo $t
+}
+
+multiline()
+{
+ while read x; do
+ set -- $x
+ for d in "$@"; do
+ echo "$d"
+ done
+ done
+}
+
+undo()
+{
+ git reset --hard HEAD~
+}
+
+last_commit_message()
+{
+ git log --pretty=format:%s -1
+}
+
+test_expect_success 'init subproj' '
+ test_create_repo subproj
+'
+
+# To the subproject!
+cd subproj
+
+test_expect_success 'add sub1' '
+ create sub1 &&
+ git commit -m "sub1" &&
+ git branch sub1 &&
+ git branch -m master subproj
+'
+
+# Save this hash for testing later.
+
+subdir_hash=`git rev-parse HEAD`
+
+test_expect_success 'add sub2' '
+ create sub2 &&
+ git commit -m "sub2" &&
+ git branch sub2
+'
+
+test_expect_success 'add sub3' '
+ create sub3 &&
+ git commit -m "sub3" &&
+ git branch sub3
+'
+
+# Back to mainline
+cd ..
+
+test_expect_success 'add main4' '
+ create main4 &&
+ git commit -m "main4" &&
+ git branch -m master mainline &&
+ git branch subdir
+'
+
+test_expect_success 'fetch subproj history' '
+ git fetch ./subproj sub1 &&
+ git branch sub1 FETCH_HEAD
+'
+
+test_expect_success 'no subtree exists in main tree' '
+ test_must_fail git subtree merge --prefix=subdir sub1
+'
+
+test_expect_success 'no pull from non-existant subtree' '
+ test_must_fail git subtree pull --prefix=subdir ./subproj sub1
+'
+
+test_expect_success 'check if --message works for add' '
+ git subtree add --prefix=subdir --message="Added subproject" sub1 &&
+ check_equal ''"$(last_commit_message)"'' "Added subproject" &&
+ undo
+'
+
+test_expect_success 'check if --message works as -m and --prefix as -P' '
+ git subtree add -P subdir -m "Added subproject using git subtree" sub1 &&
+ check_equal ''"$(last_commit_message)"'' "Added subproject using git subtree" &&
+ undo
+'
+
+test_expect_success 'check if --message works with squash too' '
+ git subtree add -P subdir -m "Added subproject with squash" --squash sub1 &&
+ check_equal ''"$(last_commit_message)"'' "Added subproject with squash" &&
+ undo
+'
+
+test_expect_success 'add subproj to mainline' '
+ git subtree add --prefix=subdir/ FETCH_HEAD &&
+ check_equal ''"$(last_commit_message)"'' "Add '"'subdir/'"' from commit '"'"'''"$(git rev-parse sub1)"'''"'"'"
+'
+
+# this shouldn't actually do anything, since FETCH_HEAD is already a parent
+test_expect_success 'merge fetched subproj' '
+ git merge -m "merge -s -ours" -s ours FETCH_HEAD
+'
+
+test_expect_success 'add main-sub5' '
+ create subdir/main-sub5 &&
+ git commit -m "main-sub5"
+'
+
+test_expect_success 'add main6' '
+ create main6 &&
+ git commit -m "main6 boring"
+'
+
+test_expect_success 'add main-sub7' '
+ create subdir/main-sub7 &&
+ git commit -m "main-sub7"
+'
+
+test_expect_success 'fetch new subproj history' '
+ git fetch ./subproj sub2 &&
+ git branch sub2 FETCH_HEAD
+'
+
+test_expect_success 'check if --message works for merge' '
+ git subtree merge --prefix=subdir -m "Merged changes from subproject" sub2 &&
+ check_equal ''"$(last_commit_message)"'' "Merged changes from subproject" &&
+ undo
+'
+
+test_expect_success 'check if --message for merge works with squash too' '
+ git subtree merge --prefix subdir -m "Merged changes from subproject using squash" --squash sub2 &&
+ check_equal ''"$(last_commit_message)"'' "Merged changes from subproject using squash" &&
+ undo
+'
+
+test_expect_success 'merge new subproj history into subdir' '
+ git subtree merge --prefix=subdir FETCH_HEAD &&
+ git branch pre-split &&
+ check_equal ''"$(last_commit_message)"'' "Merge commit '"'"'"$(git rev-parse sub2)"'"'"' into mainline"
+'
+
+test_expect_success 'Check that prefix argument is required for split' '
+ echo "You must provide the --prefix option." > expected &&
+ test_must_fail git subtree split > actual 2>&1 &&
+ test_debug "echo -n expected: " &&
+ test_debug "cat expected" &&
+ test_debug "echo -n actual: " &&
+ test_debug "cat actual" &&
+ test_cmp expected actual &&
+ rm -f expected actual
+'
+
+test_expect_success 'Check that the <prefix> exists for a split' '
+ echo "'"'"'non-existent-directory'"'"'" does not exist\; use "'"'"'git subtree add'"'"'" > expected &&
+ test_must_fail git subtree split --prefix=non-existent-directory > actual 2>&1 &&
+ test_debug "echo -n expected: " &&
+ test_debug "cat expected" &&
+ test_debug "echo -n actual: " &&
+ test_debug "cat actual" &&
+ test_cmp expected actual
+# rm -f expected actual
+'
+
+test_expect_success 'check if --message works for split+rejoin' '
+ spl1=''"$(git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --message "Split & rejoin" --rejoin)"'' &&
+ git branch spl1 "$spl1" &&
+ check_equal ''"$(last_commit_message)"'' "Split & rejoin" &&
+ undo
+'
+
+test_expect_success 'check split with --branch' '
+ spl1=$(git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --message "Split & rejoin" --rejoin) &&
+ undo &&
+ git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --branch splitbr1 &&
+ check_equal ''"$(git rev-parse splitbr1)"'' "$spl1"
+'
+
+test_expect_success 'check hash of split' '
+ spl1=$(git subtree split --prefix subdir) &&
+ undo &&
+ git subtree split --prefix subdir --branch splitbr1test &&
+ check_equal ''"$(git rev-parse splitbr1test)"'' "$spl1"
+ git checkout splitbr1test &&
+ new_hash=$(git rev-parse HEAD~2) &&
+ git checkout mainline &&
+ check_equal ''"$new_hash"'' "$subdir_hash"
+'
+
+test_expect_success 'check split with --branch for an existing branch' '
+ spl1=''"$(git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --message "Split & rejoin" --rejoin)"'' &&
+ undo &&
+ git branch splitbr2 sub1 &&
+ git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --branch splitbr2 &&
+ check_equal ''"$(git rev-parse splitbr2)"'' "$spl1"
+'
+
+test_expect_success 'check split with --branch for an incompatible branch' '
+ test_must_fail git subtree split --prefix subdir --onto FETCH_HEAD --branch subdir
+'
+
+test_expect_success 'check split+rejoin' '
+ spl1=''"$(git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --message "Split & rejoin" --rejoin)"'' &&
+ undo &&
+ git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --rejoin &&
+ check_equal ''"$(last_commit_message)"'' "Split '"'"'subdir/'"'"' into commit '"'"'"$spl1"'"'"'"
+'
+
+test_expect_success 'add main-sub8' '
+ create subdir/main-sub8 &&
+ git commit -m "main-sub8"
+'
+
+# To the subproject!
+cd ./subproj
+
+test_expect_success 'merge split into subproj' '
+ git fetch .. spl1 &&
+ git branch spl1 FETCH_HEAD &&
+ git merge FETCH_HEAD
+'
+
+test_expect_success 'add sub9' '
+ create sub9 &&
+ git commit -m "sub9"
+'
+
+# Back to mainline
+cd ..
+
+test_expect_success 'split for sub8' '
+ split2=''"$(git subtree split --annotate='"'*'"' --prefix subdir/ --rejoin)"''
+ git branch split2 "$split2"
+'
+
+test_expect_success 'add main-sub10' '
+ create subdir/main-sub10 &&
+ git commit -m "main-sub10"
+'
+
+test_expect_success 'split for sub10' '
+ spl3=''"$(git subtree split --annotate='"'*'"' --prefix subdir --rejoin)"'' &&
+ git branch spl3 "$spl3"
+'
+
+# To the subproject!
+cd ./subproj
+
+test_expect_success 'merge split into subproj' '
+ git fetch .. spl3 &&
+ git branch spl3 FETCH_HEAD &&
+ git merge FETCH_HEAD &&
+ git branch subproj-merge-spl3
+'
+
+chkm="main4 main6"
+chkms="main-sub10 main-sub5 main-sub7 main-sub8"
+chkms_sub=$(echo $chkms | multiline | sed 's,^,subdir/,' | fixnl)
+chks="sub1 sub2 sub3 sub9"
+chks_sub=$(echo $chks | multiline | sed 's,^,subdir/,' | fixnl)
+
+test_expect_success 'make sure exactly the right set of files ends up in the subproj' '
+ subfiles=''"$(git ls-files | fixnl)"'' &&
+ check_equal "$subfiles" "$chkms $chks"
+'
+
+test_expect_success 'make sure the subproj history *only* contains commits that affect the subdir' '
+ allchanges=''"$(git log --name-only --pretty=format:'"''"' | sort | fixnl)"'' &&
+ check_equal "$allchanges" "$chkms $chks"
+'
+
+# Back to mainline
+cd ..
+
+test_expect_success 'pull from subproj' '
+ git fetch ./subproj subproj-merge-spl3 &&
+ git branch subproj-merge-spl3 FETCH_HEAD &&
+ git subtree pull --prefix=subdir ./subproj subproj-merge-spl3
+'
+
+test_expect_success 'make sure exactly the right set of files ends up in the mainline' '
+ mainfiles=''"$(git ls-files | fixnl)"'' &&
+ check_equal "$mainfiles" "$chkm $chkms_sub $chks_sub"
+'
+
+test_expect_success 'make sure each filename changed exactly once in the entire history' '
+ # main-sub?? and /subdir/main-sub?? both change, because those are the
+ # changes that were split into their own history. And subdir/sub?? never
+ # change, since they were *only* changed in the subtree branch.
+ allchanges=''"$(git log --name-only --pretty=format:'"''"' | sort | fixnl)"'' &&
+ check_equal "$allchanges" ''"$(echo $chkms $chkm $chks $chkms_sub | multiline | sort | fixnl)"''
+'
+
+test_expect_success 'make sure the --rejoin commits never make it into subproj' '
+ check_equal ''"$(git log --pretty=format:'"'%s'"' HEAD^2 | grep -i split)"'' ""
+'
+
+test_expect_success 'make sure no "git subtree" tagged commits make it into subproj' '
+ # They are meaningless to subproj since one side of the merge refers to the mainline
+ check_equal ''"$(git log --pretty=format:'"'%s%n%b'"' HEAD^2 | grep "git-subtree.*:")"'' ""
+'
+
+# prepare second pair of repositories
+mkdir test2
+cd test2
+
+test_expect_success 'init main' '
+ test_create_repo main
+'
+
+cd main
+
+test_expect_success 'add main1' '
+ create main1 &&
+ git commit -m "main1"
+'
+
+cd ..
+
+test_expect_success 'init sub' '
+ test_create_repo sub
+'
+
+cd sub
+
+test_expect_success 'add sub2' '
+ create sub2 &&
+ git commit -m "sub2"
+'
+
+cd ../main
+
+# check if split can find proper base without --onto
+
+test_expect_success 'add sub as subdir in main' '
+ git fetch ../sub master &&
+ git branch sub2 FETCH_HEAD &&
+ git subtree add --prefix subdir sub2
+'
+
+cd ../sub
+
+test_expect_success 'add sub3' '
+ create sub3 &&
+ git commit -m "sub3"
+'
+
+cd ../main
+
+test_expect_success 'merge from sub' '
+ git fetch ../sub master &&
+ git branch sub3 FETCH_HEAD &&
+ git subtree merge --prefix subdir sub3
+'
+
+test_expect_success 'add main-sub4' '
+ create subdir/main-sub4 &&
+ git commit -m "main-sub4"
+'
+
+test_expect_success 'split for main-sub4 without --onto' '
+ git subtree split --prefix subdir --branch mainsub4
+'
+
+# at this point, the new commit parent should be sub3 if it is not,
+# something went wrong (the "newparent" of "master~" commit should
+# have been sub3, but it was not, because its cache was not set to
+# itself)
+
+test_expect_success 'check that the commit parent is sub3' '
+ check_equal ''"$(git log --pretty=format:%P -1 mainsub4)"'' ''"$(git rev-parse sub3)"''
+'
+
+test_expect_success 'add main-sub5' '
+ mkdir subdir2 &&
+ create subdir2/main-sub5 &&
+ git commit -m "main-sub5"
+'
+
+test_expect_success 'split for main-sub5 without --onto' '
+ # also test that we still can split out an entirely new subtree
+ # if the parent of the first commit in the tree is not empty,
+ # then the new subtree has accidentally been attached to something
+ git subtree split --prefix subdir2 --branch mainsub5 &&
+ check_equal ''"$(git log --pretty=format:%P -1 mainsub5)"'' ""
+'
+
+# make sure no patch changes more than one file. The original set of commits
+# changed only one file each. A multi-file change would imply that we pruned
+# commits too aggressively.
+joincommits()
+{
+ commit=
+ all=
+ while read x y; do
+ #echo "{$x}" >&2
+ if [ -z "$x" ]; then
+ continue
+ elif [ "$x" = "commit:" ]; then
+ if [ -n "$commit" ]; then
+ echo "$commit $all"
+ all=
+ fi
+ commit="$y"
+ else
+ all="$all $y"
+ fi
+ done
+ echo "$commit $all"
+}
+
+test_expect_success 'verify one file change per commit' '
+ x= &&
+ list=''"$(git log --pretty=format:'"'commit: %H'"' | joincommits)"'' &&
+# test_debug "echo HERE" &&
+# test_debug "echo ''"$list"''" &&
+ (git log --pretty=format:'"'commit: %H'"' | joincommits |
+ ( while read commit a b; do
+ test_debug "echo Verifying commit "''"$commit"''
+ test_debug "echo a: "''"$a"''
+ test_debug "echo b: "''"$b"''
+ check_equal "$b" ""
+ x=1
+ done
+ check_equal "$x" 1
+ ))
+'
+
+test_done
diff --git a/contrib/subtree/todo b/contrib/subtree/todo
new file mode 100644
index 0000000..7e44b00
--- /dev/null
+++ b/contrib/subtree/todo
@@ -0,0 +1,50 @@
+
+ delete tempdir
+
+ 'git subtree rejoin' option to do the same as --rejoin, eg. after a
+ rebase
+
+ --prefix doesn't force the subtree correctly in merge/pull:
+ "-s subtree" should be given an explicit subtree option?
+ There doesn't seem to be a way to do this. We'd have to
+ patch git-merge-subtree. Ugh.
+ (but we could avoid this problem by generating squashes with
+ exactly the right subtree structure, rather than using
+ subtree merge...)
+
+ add a 'push' subcommand to parallel 'pull'
+
+ add a 'log' subcommand to see what's new in a subtree?
+
+ add to-submodule and from-submodule commands
+
+ automated tests for --squash stuff
+
+ "add" command non-obviously requires a commitid; would be easier if
+ it had a "pull" sort of mode instead
+
+ "pull" and "merge" commands should fail if you've never merged
+ that --prefix before
+
+ docs should provide an example of "add"
+
+ note that the initial split doesn't *have* to have a commitid
+ specified... that's just an optimization
+
+ if you try to add (or maybe merge?) with an invalid commitid, you
+ get a misleading "prefix must end with /" message from
+ one of the other git tools that git-subtree calls. Should
+ detect this situation and print the *real* problem.
+
+ "pull --squash" should do fetch-synthesize-merge, but instead just
+ does "pull" directly, which doesn't work at all.
+
+ make a 'force-update' that does what 'add' does even if the subtree
+ already exists. That way we can help people who imported
+ subtrees "incorrectly" (eg. by just copying in the files) in
+ the past.
+
+ guess --prefix automatically if possible based on pwd
+
+ make a 'git subtree grafts' that automatically expands --squash'd
+ commits so you can see the full history if you want it.
diff --git a/contrib/svn-fe/.gitignore b/contrib/svn-fe/.gitignore
new file mode 100644
index 0000000..02a7791
--- /dev/null
+++ b/contrib/svn-fe/.gitignore
@@ -0,0 +1,4 @@
+/*.xml
+/*.1
+/*.html
+/svn-fe
diff --git a/contrib/svn-fe/Makefile b/contrib/svn-fe/Makefile
new file mode 100644
index 0000000..360d8da
--- /dev/null
+++ b/contrib/svn-fe/Makefile
@@ -0,0 +1,63 @@
+all:: svn-fe$X
+
+CC = gcc
+RM = rm -f
+MV = mv
+
+CFLAGS = -g -O2 -Wall
+LDFLAGS =
+ALL_CFLAGS = $(CFLAGS)
+ALL_LDFLAGS = $(LDFLAGS)
+EXTLIBS =
+
+GIT_LIB = ../../libgit.a
+VCSSVN_LIB = ../../vcs-svn/lib.a
+LIBS = $(VCSSVN_LIB) $(GIT_LIB) $(EXTLIBS)
+
+QUIET_SUBDIR0 = +$(MAKE) -C # space to separate -C and subdir
+QUIET_SUBDIR1 =
+
+ifneq ($(findstring $(MAKEFLAGS),w),w)
+PRINT_DIR = --no-print-directory
+else # "make -w"
+NO_SUBDIR = :
+endif
+
+ifneq ($(findstring $(MAKEFLAGS),s),s)
+ifndef V
+ QUIET_CC = @echo ' ' CC $@;
+ QUIET_LINK = @echo ' ' LINK $@;
+ QUIET_SUBDIR0 = +@subdir=
+ QUIET_SUBDIR1 = ;$(NO_SUBDIR) echo ' ' SUBDIR $$subdir; \
+ $(MAKE) $(PRINT_DIR) -C $$subdir
+endif
+endif
+
+svn-fe$X: svn-fe.o $(VCSSVN_LIB) $(GIT_LIB)
+ $(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ svn-fe.o \
+ $(ALL_LDFLAGS) $(LIBS)
+
+svn-fe.o: svn-fe.c ../../vcs-svn/svndump.h
+ $(QUIET_CC)$(CC) -I../../vcs-svn -o $*.o -c $(ALL_CFLAGS) $<
+
+svn-fe.html: svn-fe.txt
+ $(QUIET_SUBDIR0)../../Documentation $(QUIET_SUBDIR1) \
+ MAN_TXT=../contrib/svn-fe/svn-fe.txt \
+ ../contrib/svn-fe/$@
+
+svn-fe.1: svn-fe.txt
+ $(QUIET_SUBDIR0)../../Documentation $(QUIET_SUBDIR1) \
+ MAN_TXT=../contrib/svn-fe/svn-fe.txt \
+ ../contrib/svn-fe/$@
+ $(MV) ../../Documentation/svn-fe.1 .
+
+../../vcs-svn/lib.a: FORCE
+ $(QUIET_SUBDIR0)../.. $(QUIET_SUBDIR1) vcs-svn/lib.a
+
+../../libgit.a: FORCE
+ $(QUIET_SUBDIR0)../.. $(QUIET_SUBDIR1) libgit.a
+
+clean:
+ $(RM) svn-fe$X svn-fe.o svn-fe.html svn-fe.xml svn-fe.1
+
+.PHONY: all clean FORCE
diff --git a/contrib/svn-fe/svn-fe.c b/contrib/svn-fe/svn-fe.c
new file mode 100644
index 0000000..f363505
--- /dev/null
+++ b/contrib/svn-fe/svn-fe.c
@@ -0,0 +1,18 @@
+/*
+ * This file is in the public domain.
+ * You may freely use, modify, distribute, and relicense it.
+ */
+
+#include <stdlib.h>
+#include "svndump.h"
+
+int main(int argc, char **argv)
+{
+ if (svndump_init(NULL))
+ return 1;
+ svndump_read((argc > 1) ? argv[1] : NULL, "refs/heads/master",
+ "refs/notes/svn/revs");
+ svndump_deinit();
+ svndump_reset();
+ return 0;
+}
diff --git a/contrib/svn-fe/svn-fe.txt b/contrib/svn-fe/svn-fe.txt
new file mode 100644
index 0000000..1128ab2
--- /dev/null
+++ b/contrib/svn-fe/svn-fe.txt
@@ -0,0 +1,71 @@
+svn-fe(1)
+=========
+
+NAME
+----
+svn-fe - convert an SVN "dumpfile" to a fast-import stream
+
+SYNOPSIS
+--------
+[verse]
+mkfifo backchannel &&
+svnadmin dump --deltas REPO |
+ svn-fe [url] 3<backchannel |
+ git fast-import --cat-blob-fd=3 3>backchannel
+
+DESCRIPTION
+-----------
+
+Converts a Subversion dumpfile into input suitable for
+git-fast-import(1) and similar importers. REPO is a path to a
+Subversion repository mirrored on the local disk. Remote Subversion
+repositories can be mirrored on local disk using the `svnsync`
+command.
+
+Note: this tool is very young. The details of its commandline
+interface may change in backward incompatible ways.
+
+INPUT FORMAT
+------------
+Subversion's repository dump format is documented in full in
+`notes/dump-load-format.txt` from the Subversion source tree.
+Files in this format can be generated using the 'svnadmin dump' or
+'svk admin dump' command.
+
+OUTPUT FORMAT
+-------------
+The fast-import format is documented by the git-fast-import(1)
+manual page.
+
+NOTES
+-----
+Subversion dumps do not record a separate author and committer for
+each revision, nor a separate display name and email address for
+each author. Like git-svn(1), 'svn-fe' will use the name
+
+---------
+user <user@UUID>
+---------
+
+as committer, where 'user' is the value of the `svn:author` property
+and 'UUID' the repository's identifier.
+
+To support incremental imports, 'svn-fe' puts a `git-svn-id` line at
+the end of each commit log message if passed a URL on the command
+line. This line has the form `git-svn-id: URL@REVNO UUID`.
+
+The resulting repository will generally require further processing
+to put each project in its own repository and to separate the history
+of each branch. The 'git filter-branch --subdirectory-filter' command
+may be useful for this purpose.
+
+BUGS
+----
+Empty directories and unknown properties are silently discarded.
+
+The exit status does not reflect whether an error was detected.
+
+SEE ALSO
+--------
+git-svn(1), svn2git(1), svk(1), git-filter-branch(1), git-fast-import(1),
+https://svn.apache.org/repos/asf/subversion/trunk/notes/dump-load-format.txt
diff --git a/contrib/svn-fe/svnrdump_sim.py b/contrib/svn-fe/svnrdump_sim.py
new file mode 100755
index 0000000..4e78a1c
--- /dev/null
+++ b/contrib/svn-fe/svnrdump_sim.py
@@ -0,0 +1,57 @@
+#!/usr/bin/python
+"""
+Simulates svnrdump by replaying an existing dump from a file, taking care
+of the specified revision range.
+To simulate incremental imports the environment variable SVNRMAX can be set
+to the highest revision that should be available.
+"""
+import sys, os
+
+if sys.hexversion < 0x02040000:
+ # The limiter is the ValueError() calls. This may be too conservative
+ sys.stderr.write("svnrdump-sim.py: requires Python 2.4 or later.\n")
+ sys.exit(1)
+
+def getrevlimit():
+ var = 'SVNRMAX'
+ if var in os.environ:
+ return os.environ[var]
+ return None
+
+def writedump(url, lower, upper):
+ if url.startswith('sim://'):
+ filename = url[6:]
+ if filename[-1] == '/': filename = filename[:-1] #remove terminating slash
+ else:
+ raise ValueError('sim:// url required')
+ f = open(filename, 'r');
+ state = 'header'
+ wroterev = False
+ while(True):
+ l = f.readline()
+ if l == '': break
+ if state == 'header' and l.startswith('Revision-number: '):
+ state = 'prefix'
+ if state == 'prefix' and l == 'Revision-number: %s\n' % lower:
+ state = 'selection'
+ if not upper == 'HEAD' and state == 'selection' and l == 'Revision-number: %s\n' % upper:
+ break;
+
+ if state == 'header' or state == 'selection':
+ if state == 'selection': wroterev = True
+ sys.stdout.write(l)
+ return wroterev
+
+if __name__ == "__main__":
+ if not (len(sys.argv) in (3, 4, 5)):
+ print("usage: %s dump URL -rLOWER:UPPER")
+ sys.exit(1)
+ if not sys.argv[1] == 'dump': raise NotImplementedError('only "dump" is suppported.')
+ url = sys.argv[2]
+ r = ('0', 'HEAD')
+ if len(sys.argv) == 4 and sys.argv[3][0:2] == '-r':
+ r = sys.argv[3][2:].lstrip().split(':')
+ if not getrevlimit() is None: r[1] = getrevlimit()
+ if writedump(url, r[0], r[1]): ret = 0
+ else: ret = 1
+ sys.exit(ret)
diff --git a/contrib/thunderbird-patch-inline/README b/contrib/thunderbird-patch-inline/README
new file mode 100644
index 0000000..000147b
--- /dev/null
+++ b/contrib/thunderbird-patch-inline/README
@@ -0,0 +1,20 @@
+appp.sh is a script that is supposed to be used together with ExternalEditor
+for Mozilla Thunderbird. It will let you include patches inline in e-mails
+in an easy way.
+
+Usage:
+- Generate the patch with git format-patch.
+- Start writing a new e-mail in Thunderbird.
+- Press the external editor button (or Ctrl-E) to run appp.sh
+- Select the previously generated patch file.
+- Finish editing the e-mail.
+
+Any text that is entered into the message editor before appp.sh is called
+will be moved to the section between the --- and the diffstat.
+
+All S-O-B:s and Cc:s in the patch will be added to the CC list.
+
+To set it up, just install External Editor and tell it to use appp.sh as the
+editor.
+
+Zenity is a required dependency.
diff --git a/contrib/thunderbird-patch-inline/appp.sh b/contrib/thunderbird-patch-inline/appp.sh
new file mode 100755
index 0000000..5eb4a51
--- /dev/null
+++ b/contrib/thunderbird-patch-inline/appp.sh
@@ -0,0 +1,55 @@
+#!/bin/sh
+# Copyright 2008 Lukas Sandström <luksan@gmail.com>
+#
+# AppendPatch - A script to be used together with ExternalEditor
+# for Mozilla Thunderbird to properly include patches inline in e-mails.
+
+# ExternalEditor can be downloaded at http://globs.org/articles.php?lng=en&pg=2
+
+CONFFILE=~/.appprc
+
+SEP="-=-=-=-=-=-=-=-=-=# Don't remove this line #=-=-=-=-=-=-=-=-=-"
+if [ -e "$CONFFILE" ] ; then
+ LAST_DIR=`grep -m 1 "^LAST_DIR=" "${CONFFILE}"|sed -e 's/^LAST_DIR=//'`
+ cd "${LAST_DIR}"
+else
+ cd > /dev/null
+fi
+
+PATCH=$(zenity --file-selection)
+
+if [ "$?" != "0" ] ; then
+ #zenity --error --text "No patchfile given."
+ exit 1
+fi
+
+cd - > /dev/null
+
+SUBJECT=`sed -n -e '/^Subject: /p' "${PATCH}"`
+HEADERS=`sed -e '/^'"${SEP}"'$/,$d' $1`
+BODY=`sed -e "1,/${SEP}/d" $1`
+CMT_MSG=`sed -e '1,/^$/d' -e '/^---$/,$d' "${PATCH}"`
+DIFF=`sed -e '1,/^---$/d' "${PATCH}"`
+
+CCS=`echo -e "$CMT_MSG\n$HEADERS" | sed -n -e 's/^Cc: \(.*\)$/\1,/gp' \
+ -e 's/^Signed-off-by: \(.*\)/\1,/gp'`
+
+echo "$SUBJECT" > $1
+echo "Cc: $CCS" >> $1
+echo "$HEADERS" | sed -e '/^Subject: /d' -e '/^Cc: /d' >> $1
+echo "$SEP" >> $1
+
+echo "$CMT_MSG" >> $1
+echo "---" >> $1
+if [ "x${BODY}x" != "xx" ] ; then
+ echo >> $1
+ echo "$BODY" >> $1
+ echo >> $1
+fi
+echo "$DIFF" >> $1
+
+LAST_DIR=`dirname "${PATCH}"`
+
+grep -v "^LAST_DIR=" "${CONFFILE}" > "${CONFFILE}_"
+echo "LAST_DIR=${LAST_DIR}" >> "${CONFFILE}_"
+mv "${CONFFILE}_" "${CONFFILE}"
diff --git a/contrib/vim/README b/contrib/vim/README
new file mode 100644
index 0000000..8f16d06
--- /dev/null
+++ b/contrib/vim/README
@@ -0,0 +1,22 @@
+Syntax highlighting for git commit messages, config files, etc. is
+included with the vim distribution as of vim 7.2, and should work
+automatically.
+
+If you have an older version of vim, you can get the latest syntax
+files from the vim project:
+
+ http://ftp.vim.org/pub/vim/runtime/syntax/git.vim
+ http://ftp.vim.org/pub/vim/runtime/syntax/gitcommit.vim
+ http://ftp.vim.org/pub/vim/runtime/syntax/gitconfig.vim
+ http://ftp.vim.org/pub/vim/runtime/syntax/gitrebase.vim
+ http://ftp.vim.org/pub/vim/runtime/syntax/gitsendemail.vim
+
+These files are also available via FTP at the same location.
+
+To install:
+
+ 1. Copy these files to vim's syntax directory $HOME/.vim/syntax
+ 2. To auto-detect the editing of various git-related filetypes:
+
+ $ curl http://ftp.vim.org/pub/vim/runtime/filetype.vim |
+ sed -ne '/^" Git$/, /^$/ p' >>$HOME/.vim/filetype.vim
diff --git a/contrib/workdir/git-new-workdir b/contrib/workdir/git-new-workdir
new file mode 100755
index 0000000..75e8b25
--- /dev/null
+++ b/contrib/workdir/git-new-workdir
@@ -0,0 +1,82 @@
+#!/bin/sh
+
+usage () {
+ echo "usage:" $@
+ exit 127
+}
+
+die () {
+ echo $@
+ exit 128
+}
+
+if test $# -lt 2 || test $# -gt 3
+then
+ usage "$0 <repository> <new_workdir> [<branch>]"
+fi
+
+orig_git=$1
+new_workdir=$2
+branch=$3
+
+# want to make sure that what is pointed to has a .git directory ...
+git_dir=$(cd "$orig_git" 2>/dev/null &&
+ git rev-parse --git-dir 2>/dev/null) ||
+ die "Not a git repository: \"$orig_git\""
+
+case "$git_dir" in
+.git)
+ git_dir="$orig_git/.git"
+ ;;
+.)
+ git_dir=$orig_git
+ ;;
+esac
+
+# don't link to a configured bare repository
+isbare=$(git --git-dir="$git_dir" config --bool --get core.bare)
+if test ztrue = z$isbare
+then
+ die "\"$git_dir\" has core.bare set to true," \
+ " remove from \"$git_dir/config\" to use $0"
+fi
+
+# don't link to a workdir
+if test -h "$git_dir/config"
+then
+ die "\"$orig_git\" is a working directory only, please specify" \
+ "a complete repository."
+fi
+
+# don't recreate a workdir over an existing repository
+if test -e "$new_workdir"
+then
+ die "destination directory '$new_workdir' already exists."
+fi
+
+# make sure the links use full paths
+git_dir=$(cd "$git_dir"; pwd)
+
+# create the workdir
+mkdir -p "$new_workdir/.git" || die "unable to create \"$new_workdir\"!"
+
+# create the links to the original repo. explicitly exclude index, HEAD and
+# logs/HEAD from the list since they are purely related to the current working
+# directory, and should not be shared.
+for x in config refs logs/refs objects info hooks packed-refs remotes rr-cache svn
+do
+ case $x in
+ */*)
+ mkdir -p "$(dirname "$new_workdir/.git/$x")"
+ ;;
+ esac
+ ln -s "$git_dir/$x" "$new_workdir/.git/$x"
+done
+
+# now setup the workdir
+cd "$new_workdir"
+# copy the HEAD from the original repository as a default branch
+cp "$git_dir/HEAD" .git/HEAD
+# checkout the branch (either the same as HEAD from the original repository, or
+# the one that was asked for)
+git checkout -f $branch