summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore11
-rw-r--r--Documentation/Makefile6
-rw-r--r--Documentation/SubmittingPatches30
-rw-r--r--Documentation/asciidoc.conf10
-rw-r--r--Documentation/git-add.txt5
-rw-r--r--Documentation/git-am.txt9
-rw-r--r--Documentation/git-apply.txt30
-rw-r--r--Documentation/git-applypatch.txt2
-rw-r--r--Documentation/git-archimport.txt2
-rw-r--r--Documentation/git-branch.txt12
-rw-r--r--Documentation/git-check-ref-format.txt2
-rw-r--r--Documentation/git-checkout-index.txt69
-rw-r--r--Documentation/git-checkout.txt25
-rw-r--r--Documentation/git-cherry-pick.txt2
-rw-r--r--Documentation/git-cherry.txt2
-rw-r--r--Documentation/git-clone-pack.txt2
-rw-r--r--Documentation/git-clone.txt2
-rw-r--r--Documentation/git-commit.txt23
-rw-r--r--Documentation/git-count-objects.txt2
-rw-r--r--Documentation/git-cvsimport.txt33
-rw-r--r--Documentation/git-cvsserver.txt148
-rw-r--r--Documentation/git-daemon.txt2
-rw-r--r--Documentation/git-describe.txt2
-rw-r--r--Documentation/git-diff-stages.txt2
-rw-r--r--Documentation/git-diff.txt2
-rw-r--r--Documentation/git-fetch-pack.txt4
-rw-r--r--Documentation/git-fetch.txt2
-rw-r--r--Documentation/git-format-patch.txt16
-rw-r--r--Documentation/git-fsck-objects.txt15
-rw-r--r--Documentation/git-get-tar-commit-id.txt2
-rw-r--r--Documentation/git-grep.txt16
-rw-r--r--Documentation/git-hash-object.txt2
-rw-r--r--Documentation/git-http-push.txt2
-rw-r--r--Documentation/git-init-db.txt18
-rw-r--r--Documentation/git-lost-found.txt2
-rw-r--r--Documentation/git-ls-files.txt24
-rw-r--r--Documentation/git-ls-remote.txt2
-rw-r--r--Documentation/git-ls-tree.txt11
-rw-r--r--Documentation/git-mailinfo.txt2
-rw-r--r--Documentation/git-mailsplit.txt2
-rw-r--r--Documentation/git-mv.txt2
-rw-r--r--Documentation/git-name-rev.txt2
-rw-r--r--Documentation/git-pack-objects.txt4
-rw-r--r--Documentation/git-pack-redundant.txt6
-rw-r--r--Documentation/git-patch-id.txt2
-rw-r--r--Documentation/git-peek-remote.txt2
-rw-r--r--Documentation/git-prune-packed.txt2
-rw-r--r--Documentation/git-pull.txt2
-rw-r--r--Documentation/git-push.txt2
-rw-r--r--Documentation/git-read-tree.txt15
-rw-r--r--Documentation/git-rebase.txt60
-rw-r--r--Documentation/git-relink.txt2
-rw-r--r--Documentation/git-repack.txt2
-rw-r--r--Documentation/git-repo-config.txt3
-rw-r--r--Documentation/git-request-pull.txt2
-rw-r--r--Documentation/git-reset.txt2
-rw-r--r--Documentation/git-rev-list.txt59
-rw-r--r--Documentation/git-rev-parse.txt4
-rw-r--r--Documentation/git-revert.txt2
-rw-r--r--Documentation/git-rm.txt92
-rw-r--r--Documentation/git-send-email.txt10
-rw-r--r--Documentation/git-send-pack.txt2
-rw-r--r--Documentation/git-sh-setup.txt2
-rw-r--r--Documentation/git-shell.txt2
-rw-r--r--Documentation/git-shortlog.txt4
-rw-r--r--Documentation/git-show-branch.txt10
-rw-r--r--Documentation/git-show.txt2
-rw-r--r--Documentation/git-status.txt2
-rw-r--r--Documentation/git-stripspace.txt2
-rw-r--r--Documentation/git-svnimport.txt38
-rw-r--r--Documentation/git-tag.txt12
-rw-r--r--Documentation/git-tools.txt97
-rw-r--r--Documentation/git-unpack-objects.txt2
-rw-r--r--Documentation/git-update-index.txt78
-rw-r--r--Documentation/git-update-ref.txt2
-rw-r--r--Documentation/git-upload-pack.txt2
-rw-r--r--Documentation/git-var.txt2
-rw-r--r--Documentation/git-verify-pack.txt2
-rw-r--r--Documentation/git-verify-tag.txt2
-rw-r--r--Documentation/git-whatchanged.txt8
-rw-r--r--Documentation/git.txt109
-rw-r--r--Documentation/hooks.txt51
-rw-r--r--Documentation/repository-layout.txt2
-rw-r--r--Documentation/tutorial.txt2
-rwxr-xr-xGIT-VERSION-GEN7
-rw-r--r--INSTALL4
-rw-r--r--Makefile209
-rw-r--r--apply.c37
-rw-r--r--blame.c892
-rw-r--r--blob.c3
-rw-r--r--cache.h19
-rw-r--r--cat-file.c122
-rw-r--r--checkout-index.c126
-rw-r--r--combine-diff.c5
-rw-r--r--commit-tree.c15
-rw-r--r--commit.c52
-rw-r--r--commit.h24
-rw-r--r--config.c10
-rw-r--r--contrib/README44
-rw-r--r--contrib/emacs/.gitignore1
-rw-r--r--contrib/emacs/Makefile20
-rw-r--r--contrib/emacs/git.el1012
-rw-r--r--contrib/emacs/vc-git.el135
-rw-r--r--contrib/git-svn/.gitignore4
-rw-r--r--contrib/git-svn/Makefile35
-rwxr-xr-xcontrib/git-svn/git-svn.perl1180
-rw-r--r--contrib/git-svn/git-svn.txt296
-rw-r--r--contrib/git-svn/t/t0000-contrib-git-svn.sh216
-rwxr-xr-xcontrib/gitview/gitview1000
-rw-r--r--contrib/gitview/gitview.txt38
-rw-r--r--convert-objects.c22
-rw-r--r--count-delta.c72
-rw-r--r--count-delta.h10
-rw-r--r--date.c4
-rw-r--r--diff-delta.c309
-rw-r--r--diff-files.c6
-rw-r--r--diff-index.c6
-rw-r--r--diff-tree.c2
-rw-r--r--diff.c353
-rw-r--r--diff.h12
-rw-r--r--diffcore-break.c63
-rw-r--r--diffcore-delta.c213
-rw-r--r--diffcore-rename.c53
-rw-r--r--diffcore.h13
-rw-r--r--entry.c47
-rw-r--r--environment.c2
-rw-r--r--epoch.c639
-rw-r--r--epoch.h21
-rw-r--r--exec_cmd.c17
-rw-r--r--exec_cmd.h4
-rw-r--r--fetch-pack.c33
-rw-r--r--fsck-objects.c22
-rwxr-xr-xgenerate-cmdlist.sh52
-rwxr-xr-xgit-add.sh11
-rwxr-xr-xgit-am.sh3
-rwxr-xr-xgit-annotate.perl508
-rwxr-xr-xgit-archimport.perl4
-rwxr-xr-xgit-bisect.sh17
-rwxr-xr-xgit-branch.sh10
-rwxr-xr-xgit-clone.sh233
-rwxr-xr-xgit-commit.sh70
-rw-r--r--git-compat-util.h2
-rwxr-xr-xgit-cvsimport.perl54
-rwxr-xr-xgit-cvsserver.perl2616
-rwxr-xr-xgit-diff.sh4
-rwxr-xr-xgit-fetch.sh14
-rwxr-xr-xgit-fmt-merge-msg.perl53
-rwxr-xr-xgit-format-patch.sh63
-rwxr-xr-xgit-ls-remote.sh2
-rwxr-xr-xgit-merge.sh71
-rwxr-xr-xgit-mv.perl17
-rwxr-xr-xgit-parse-remote.sh8
-rwxr-xr-xgit-pull.sh26
-rwxr-xr-xgit-push.sh6
-rwxr-xr-xgit-rebase.sh100
-rwxr-xr-xgit-repack.sh5
-rwxr-xr-xgit-rerere.perl6
-rwxr-xr-xgit-resolve.sh2
-rwxr-xr-xgit-revert.sh5
-rwxr-xr-xgit-rm.sh70
-rwxr-xr-xgit-send-email.perl153
-rwxr-xr-xgit-svnimport.perl135
-rwxr-xr-xgit-tag.sh14
-rwxr-xr-xgit-verify-tag.sh21
-rw-r--r--git.c281
-rwxr-xr-xgitk2099
-rw-r--r--hash-object.c5
-rw-r--r--http-fetch.c12
-rw-r--r--http-push.c1786
-rw-r--r--http.c10
-rw-r--r--http.h1
-rw-r--r--imap-send.c1359
-rw-r--r--index-pack.c12
-rw-r--r--ls-files.c140
-rw-r--r--ls-tree.c24
-rw-r--r--mailinfo.c4
-rw-r--r--merge-base.c1
-rw-r--r--merge-tree.c178
-rw-r--r--mktag.c3
-rw-r--r--mktree.c138
-rw-r--r--name-rev.c1
-rw-r--r--object.c14
-rw-r--r--pack-check.c24
-rw-r--r--pack-objects.c411
-rw-r--r--pack-redundant.c4
-rw-r--r--pager.c48
-rw-r--r--read-cache.c28
-rw-r--r--read-tree.c16
-rw-r--r--receive-pack.c10
-rw-r--r--refs.c9
-rw-r--r--repo-config.c5
-rw-r--r--rev-list.c755
-rw-r--r--rev-parse.c33
-rw-r--r--revision.c811
-rw-r--r--revision.h76
-rw-r--r--run-command.c6
-rw-r--r--run-command.h4
-rw-r--r--send-pack.c54
-rw-r--r--sha1_file.c62
-rw-r--r--sha1_name.c35
-rw-r--r--shell.c2
-rw-r--r--show-branch.c22
-rw-r--r--t/.gitignore1
-rw-r--r--t/Makefile11
-rw-r--r--t/annotate-tests.sh121
-rwxr-xr-xt/t0000-basic.sh2
-rwxr-xr-xt/t1200-tutorial.sh2
-rwxr-xr-xt/t1300-repo-config.sh8
-rwxr-xr-xt/t2004-checkout-cache-temp.sh212
-rwxr-xr-xt/t3020-ls-files-error-unmatch.sh27
-rwxr-xr-xt/t3600-rm.sh71
-rwxr-xr-xt/t5000-tar-tree.sh3
-rwxr-xr-xt/t5600-clone-fail-cleanup.sh36
-rwxr-xr-xt/t6001-rev-list-merge-order.sh462
-rwxr-xr-xt/t6021-merge-criss-cross.sh6
-rwxr-xr-xt/t6022-merge-rename.sh6
-rwxr-xr-xt/t7001-mv.sh18
-rwxr-xr-xt/t8001-annotate.sh9
-rwxr-xr-xt/t8002-blame.sh9
-rwxr-xr-xt/test-lib.sh2
-rw-r--r--tag.c3
-rw-r--r--tar-tree.c389
-rw-r--r--tar.h25
-rw-r--r--templates/hooks--pre-rebase150
-rw-r--r--tree-diff.c35
-rw-r--r--tree-walk.c117
-rw-r--r--tree-walk.h25
-rw-r--r--tree.c7
-rw-r--r--unpack-file.c4
-rw-r--r--unpack-objects.c12
-rw-r--r--update-index.c93
-rw-r--r--update-ref.c1
-rw-r--r--upload-pack.c17
-rw-r--r--write-tree.c5
-rw-r--r--xdiff/xdiff.h94
-rw-r--r--xdiff/xdiffi.c464
-rw-r--r--xdiff/xdiffi.h60
-rw-r--r--xdiff/xemit.c180
-rw-r--r--xdiff/xemit.h34
-rw-r--r--xdiff/xinclude.h43
-rw-r--r--xdiff/xmacros.h53
-rw-r--r--xdiff/xprepare.c465
-rw-r--r--xdiff/xprepare.h35
-rw-r--r--xdiff/xtypes.h68
-rw-r--r--xdiff/xutils.c299
-rw-r--r--xdiff/xutils.h46
246 files changed, 20303 insertions, 4961 deletions
diff --git a/.gitignore b/.gitignore
index d7e8d2a..75891c3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,6 +2,7 @@ GIT-VERSION-FILE
git
git-add
git-am
+git-annotate
git-apply
git-applymbox
git-applypatch
@@ -22,6 +23,7 @@ git-convert-objects
git-count-objects
git-cvsexportcommit
git-cvsimport
+git-cvsserver
git-daemon
git-diff
git-diff-files
@@ -40,6 +42,7 @@ git-grep
git-hash-object
git-http-fetch
git-http-push
+git-imap-send
git-index-pack
git-init-db
git-local-fetch
@@ -53,6 +56,7 @@ git-mailsplit
git-merge
git-merge-base
git-merge-index
+git-merge-tree
git-merge-octopus
git-merge-one-file
git-merge-ours
@@ -60,6 +64,7 @@ git-merge-recursive
git-merge-resolve
git-merge-stupid
git-mktag
+git-mktree
git-name-rev
git-mv
git-pack-redundant
@@ -84,6 +89,7 @@ git-resolve
git-rev-list
git-rev-parse
git-revert
+git-rm
git-send-email
git-send-pack
git-sh-setup
@@ -116,12 +122,13 @@ git-write-tree
git-core-*/?*
test-date
test-delta
+common-cmds.h
*.tar.gz
*.dsc
*.deb
git-core.spec
*.exe
-libgit.a
-*.o
+*.[ao]
*.py[co]
config.mak
+git-blame
diff --git a/Documentation/Makefile b/Documentation/Makefile
index a3bca86..f4cbf7e 100644
--- a/Documentation/Makefile
+++ b/Documentation/Makefile
@@ -1,4 +1,7 @@
-MAN1_TXT=$(wildcard git-*.txt) gitk.txt
+MAN1_TXT= \
+ $(filter-out $(addsuffix .txt, $(ARTICLES) $(SP_ARTICLES)), \
+ $(wildcard git-*.txt)) \
+ gitk.txt
MAN7_TXT=git.txt
DOC_HTML=$(patsubst %.txt,%.html,$(MAN1_TXT) $(MAN7_TXT))
@@ -11,6 +14,7 @@ ARTICLES += howto-index
ARTICLES += repository-layout
ARTICLES += hooks
ARTICLES += everyday
+ARTICLES += git-tools
# with their own formatting rules.
SP_ARTICLES = glossary howto/revert-branch-rebase
diff --git a/Documentation/SubmittingPatches b/Documentation/SubmittingPatches
index 9ccb8f7..318b04f 100644
--- a/Documentation/SubmittingPatches
+++ b/Documentation/SubmittingPatches
@@ -4,8 +4,8 @@ it for the core GIT to make sure people understand what they are
doing when they write "Signed-off-by" line.
But the patch submission requirements are a lot more relaxed
-here, because the core GIT is thousand times smaller ;-). So
-here is only the relevant bits.
+here on the technical/contents front, because the core GIT is
+thousand times smaller ;-). So here is only the relevant bits.
(1) Make separate commits for logically separate changes.
@@ -18,13 +18,19 @@ repository. It is a good discipline.
Describe the technical detail of the change(s).
-If your description starts to get long, that's a sign that you
+If your description starts to get too long, that's a sign that you
probably need to split up your commit to finer grained pieces.
+Oh, another thing. I am picky about whitespaces. Make sure your
+changes do not trigger errors with the sample pre-commit hook shipped
+in templates/hooks--pre-commit.
-(2) Generate your patch using git/cogito out of your commits.
-git diff tools generate unidiff which is the preferred format.
+(2) Generate your patch using git tools out of your commits.
+
+git based diff tools (git, Cogito, and StGIT included) generate
+unidiff which is the preferred format.
+
You do not have to be afraid to use -M option to "git diff" or
"git format-patch", if your patch involves file renames. The
receiving end can handle them just fine.
@@ -33,20 +39,22 @@ Please make sure your patch does not include any extra files
which do not belong in a patch submission. Make sure to review
your patch after generating it, to ensure accuracy. Before
sending out, please make sure it cleanly applies to the "master"
-branch head.
+branch head. If you are preparing a work based on "next" branch,
+that is fine, but please mark it as such.
(3) Sending your patches.
-People on the git mailing list needs to be able to read and
+People on the git mailing list need to be able to read and
comment on the changes you are submitting. It is important for
a developer to be able to "quote" your changes, using standard
e-mail tools, so that they may comment on specific portions of
-your code. For this reason, all patches should be submitting
-e-mail "inline". WARNING: Be wary of your MUAs word-wrap
-corrupting your patch. Do not cut-n-paste your patch.
+your code. For this reason, all patches should be submited
+"inline". WARNING: Be wary of your MUAs word-wrap
+corrupting your patch. Do not cut-n-paste your patch; you can
+lose tabs that way if you are not careful.
-It is common convention to prefix your subject line with
+It is a common convention to prefix your subject line with
[PATCH]. This lets people easily distinguish patches from other
e-mail discussions.
diff --git a/Documentation/asciidoc.conf b/Documentation/asciidoc.conf
index fa0877d..7ce7151 100644
--- a/Documentation/asciidoc.conf
+++ b/Documentation/asciidoc.conf
@@ -18,6 +18,16 @@ ifdef::backend-docbook[]
{0#</citerefentry>}
endif::backend-docbook[]
+ifdef::backend-docbook[]
+# "unbreak" docbook-xsl v1.68 for manpages. v1.69 works with or without this.
+[listingblock]
+<example><title>{title}</title>
+<literallayout>
+|
+</literallayout>
+{title#}</example>
+endif::backend-docbook[]
+
ifdef::backend-xhtml11[]
[gitlink-inlinemacro]
<a href="{target}.html">{target}{0?({0})}</a>
diff --git a/Documentation/git-add.txt b/Documentation/git-add.txt
index 7e29383..ae24547 100644
--- a/Documentation/git-add.txt
+++ b/Documentation/git-add.txt
@@ -3,7 +3,7 @@ git-add(1)
NAME
----
-git-add - Add files to the index file.
+git-add - Add files to the index file
SYNOPSIS
--------
@@ -65,6 +65,9 @@ git-add git-*.sh::
(i.e. you are listing the files explicitly), it does not
add `subdir/git-foo.sh` to the index.
+See Also
+--------
+gitlink:git-rm[1]
Author
------
diff --git a/Documentation/git-am.txt b/Documentation/git-am.txt
index 02cabc9..910457d 100644
--- a/Documentation/git-am.txt
+++ b/Documentation/git-am.txt
@@ -9,7 +9,8 @@ git-am - Apply a series of patches in a mailbox
SYNOPSIS
--------
[verse]
-'git-am' [--signoff] [--dotest=<dir>] [--utf8] [--binary] [--3way] <mbox>...
+'git-am' [--signoff] [--dotest=<dir>] [--utf8] [--binary] [--3way]
+ [--interactive] [--whitespace=<option>] <mbox>...
'git-am' [--skip | --resolved]
DESCRIPTION
@@ -46,6 +47,10 @@ OPTIONS
Skip the current patch. This is only meaningful when
restarting an aborted patch.
+--whitespace=<option>::
+ This flag is passed to the `git-apply` program that applies
+ the patch.
+
--interactive::
Run interactively, just like git-applymbox.
@@ -80,7 +85,7 @@ names.
SEE ALSO
--------
-gitlink:git-applymbox[1], gitlink:git-applypatch[1].
+gitlink:git-applymbox[1], gitlink:git-applypatch[1], gitlink:git-apply[1].
Author
diff --git a/Documentation/git-apply.txt b/Documentation/git-apply.txt
index 75076b6..1c64a1a 100644
--- a/Documentation/git-apply.txt
+++ b/Documentation/git-apply.txt
@@ -11,6 +11,7 @@ SYNOPSIS
[verse]
'git-apply' [--stat] [--numstat] [--summary] [--check] [--index] [--apply]
[--no-add] [--index-info] [--allow-binary-replacement] [-z] [-pNUM]
+ [--whitespace=<nowarn|warn|error|error-all|strip>]
[<patch>...]
DESCRIPTION
@@ -97,6 +98,35 @@ OPTIONS
result. This allows binary files to be patched in a
very limited way.
+--whitespace=<option>::
+ When applying a patch, detect a new or modified line
+ that ends with trailing whitespaces (this includes a
+ line that solely consists of whitespaces). By default,
+ the command outputs warning messages and applies the
+ patch.
+ When `git-apply` is used for statistics and not applying a
+ patch, it defaults to `nowarn`.
+ You can use different `<option>` to control this
+ behaviour:
++
+* `nowarn` turns off the trailing whitespace warning.
+* `warn` outputs warnings for a few such errors, but applies the
+ patch (default).
+* `error` outputs warnings for a few such errors, and refuses
+ to apply the patch.
+* `error-all` is similar to `error` but shows all errors.
+* `strip` outputs warnings for a few such errors, strips out the
+ trailing whitespaces and applies the patch.
+
+
+Configuration
+-------------
+
+apply.whitespace::
+ When no `--whitespace` flag is given from the command
+ line, this configuration item is used as the default.
+
+
Author
------
Written by Linus Torvalds <torvalds@osdl.org>
diff --git a/Documentation/git-applypatch.txt b/Documentation/git-applypatch.txt
index 5b9037d..2b1ff14 100644
--- a/Documentation/git-applypatch.txt
+++ b/Documentation/git-applypatch.txt
@@ -3,7 +3,7 @@ git-applypatch(1)
NAME
----
-git-applypatch - Apply one patch extracted from an e-mail.
+git-applypatch - Apply one patch extracted from an e-mail
SYNOPSIS
diff --git a/Documentation/git-archimport.txt b/Documentation/git-archimport.txt
index 023d3ae..5a13187 100644
--- a/Documentation/git-archimport.txt
+++ b/Documentation/git-archimport.txt
@@ -9,7 +9,7 @@ git-archimport - Import an Arch repository into git
SYNOPSIS
--------
[verse]
-`git-archimport` [-h] [-v] [-o] [-a] [-f] [-T] [-D depth] [-t tempdir]
+'git-archimport' [-h] [-v] [-o] [-a] [-f] [-T] [-D depth] [-t tempdir]
<archive/branch> [ <archive/branch> ]
DESCRIPTION
diff --git a/Documentation/git-branch.txt b/Documentation/git-branch.txt
index b1bc827..71ecd85 100644
--- a/Documentation/git-branch.txt
+++ b/Documentation/git-branch.txt
@@ -3,20 +3,24 @@ git-branch(1)
NAME
----
-git-branch - Create a new branch, or remove an old one.
+git-branch - Create a new branch, or remove an old one
SYNOPSIS
--------
-'git-branch' [(-d | -D) <branchname>] | [[-f] <branchname> [<start-point>]]
+[verse]
+'git-branch' [[-f] <branchname> [<start-point>]]
+'git-branch' (-d | -D) <branchname>
DESCRIPTION
-----------
If no argument is provided, show available branches and mark current
branch with star. Otherwise, create a new branch of name <branchname>.
-
If a starting point is also specified, that will be where the branch is
created, otherwise it will be created at the current HEAD.
+With a `-d` or `-D` option, `<branchname>` will be deleted.
+
+
OPTIONS
-------
-d::
@@ -39,7 +43,7 @@ OPTIONS
Examples
~~~~~~~~
-Start development off of a know tag::
+Start development off of a known tag::
+
------------
$ git clone git://git.kernel.org/pub/scm/.../linux-2.6 my2.6
diff --git a/Documentation/git-check-ref-format.txt b/Documentation/git-check-ref-format.txt
index f7f84c6..7dc1bdb 100644
--- a/Documentation/git-check-ref-format.txt
+++ b/Documentation/git-check-ref-format.txt
@@ -3,7 +3,7 @@ git-check-ref-format(1)
NAME
----
-git-check-ref-format - Make sure ref name is well formed.
+git-check-ref-format - Make sure ref name is well formed
SYNOPSIS
--------
diff --git a/Documentation/git-checkout-index.txt b/Documentation/git-checkout-index.txt
index 2a1e526..09bd6a5 100644
--- a/Documentation/git-checkout-index.txt
+++ b/Documentation/git-checkout-index.txt
@@ -10,7 +10,10 @@ SYNOPSIS
--------
[verse]
'git-checkout-index' [-u] [-q] [-a] [-f] [-n] [--prefix=<string>]
- [--stage=<number>] [--] <file>...
+ [--stage=<number>|all]
+ [--temp]
+ [-z] [--stdin]
+ [--] [<file>]\*
DESCRIPTION
-----------
@@ -41,9 +44,24 @@ OPTIONS
When creating files, prepend <string> (usually a directory
including a trailing /)
---stage=<number>::
+--stage=<number>|all::
Instead of checking out unmerged entries, copy out the
files from named stage. <number> must be between 1 and 3.
+ Note: --stage=all automatically implies --temp.
+
+--temp::
+ Instead of copying the files to the working directory
+ write the content to temporary files. The temporary name
+ associations will be written to stdout.
+
+--stdin::
+ Instead of taking list of paths from the command line,
+ read list of paths from the standard input. Paths are
+ separated by LF (i.e. one path per line) by default.
+
+-z::
+ Only meaningful with `--stdin`; paths are separated with
+ NUL character instead of LF.
--::
Do not interpret any more arguments as options.
@@ -64,13 +82,58 @@ $ find . -name '*.h' -print0 | xargs -0 git-checkout-index -f --
which will force all existing `*.h` files to be replaced with their
cached copies. If an empty command line implied "all", then this would
-force-refresh everything in the index, which was not the point.
+force-refresh everything in the index, which was not the point. But
+since git-checkout-index accepts --stdin it would be faster to use:
+
+----------------
+$ find . -name '*.h' -print0 | git-checkout-index -f -z --stdin
+----------------
The `--` is just a good idea when you know the rest will be filenames;
it will prevent problems with a filename of, for example, `-a`.
Using `--` is probably a good policy in scripts.
+Using --temp or --stage=all
+---------------------------
+When `--temp` is used (or implied by `--stage=all`)
+`git-checkout-index` will create a temporary file for each index
+entry being checked out. The index will not be updated with stat
+information. These options can be useful if the caller needs all
+stages of all unmerged entries so that the unmerged files can be
+processed by an external merge tool.
+
+A listing will be written to stdout providing the association of
+temporary file names to tracked path names. The listing format
+has two variations:
+
+ . tempname TAB path RS
++
+The first format is what gets used when `--stage` is omitted or
+is not `--stage=all`. The field tempname is the temporary file
+name holding the file content and path is the tracked path name in
+the index. Only the requested entries are output.
+
+ . stage1temp SP stage2temp SP stage3tmp TAB path RS
++
+The second format is what gets used when `--stage=all`. The three
+stage temporary fields (stage1temp, stage2temp, stage3temp) list the
+name of the temporary file if there is a stage entry in the index
+or `.` if there is no stage entry. Paths which only have a stage 0
+entry will always be omitted from the output.
+
+In both formats RS (the record separator) is newline by default
+but will be the null byte if -z was passed on the command line.
+The temporary file names are always safe strings; they will never
+contain directory separators or whitespace characters. The path
+field is always relative to the current directory and the temporary
+file names are always relative to the top level directory.
+
+If the object being copied out to a temporary file is a symbolic
+link the content of the link will be written to a normal file. It is
+up to the end-user or the Porcelain to make use of this information.
+
+
EXAMPLES
--------
To update and refresh only the files already checked out::
diff --git a/Documentation/git-checkout.txt b/Documentation/git-checkout.txt
index df9a618..985bb2f 100644
--- a/Documentation/git-checkout.txt
+++ b/Documentation/git-checkout.txt
@@ -3,19 +3,22 @@ git-checkout(1)
NAME
----
-git-checkout - Checkout and switch to a branch.
+git-checkout - Checkout and switch to a branch
SYNOPSIS
--------
-'git-checkout' [-f] [-b <new_branch>] [-m] [<branch>] [<paths>...]
+[verse]
+'git-checkout' [-f] [-b <new_branch>] [-m] [<branch>]
+'git-checkout' [-m] [<branch>] <paths>...
DESCRIPTION
-----------
-When <paths> are not given, this command switches branches, by
+When <paths> are not given, this command switches branches by
updating the index and working tree to reflect the specified
branch, <branch>, and updating HEAD to be <branch> or, if
-specified, <new_branch>.
+specified, <new_branch>. Using -b will cause <new_branch> to
+be created.
When <paths> are given, this command does *not* switch
branches. It updates the named paths in the working tree from
@@ -29,17 +32,17 @@ given paths before updating the working tree.
OPTIONS
-------
-f::
- Force an re-read of everything.
+ Force a re-read of everything.
-b::
Create a new branch and start it at <branch>.
-m::
- If you have local modifications to a file that is
- different between the current branch and the branch you
- are switching to, the command refuses to switch
- branches, to preserve your modifications in context.
- With this option, a three-way merge between the current
+ If you have local modifications to one or more files that
+ are different between the current branch and the branch to
+ which you are switching, the command refuses to switch
+ branches in order to preserve your modifications in context.
+ However, with this option, a three-way merge between the current
branch, your working tree contents, and the new branch
is done, and you will be on the new branch.
+
@@ -82,7 +85,7 @@ $ git checkout -- hello.c
------------
. After working in a wrong branch, switching to the correct
-branch you would want to is done with:
+branch would be done using:
+
------------
$ git checkout mytopic
diff --git a/Documentation/git-cherry-pick.txt b/Documentation/git-cherry-pick.txt
index 4f323fa..bfa950c 100644
--- a/Documentation/git-cherry-pick.txt
+++ b/Documentation/git-cherry-pick.txt
@@ -3,7 +3,7 @@ git-cherry-pick(1)
NAME
----
-git-cherry-pick - Apply the change introduced by an existing commit.
+git-cherry-pick - Apply the change introduced by an existing commit
SYNOPSIS
--------
diff --git a/Documentation/git-cherry.txt b/Documentation/git-cherry.txt
index af87966..9a5e371 100644
--- a/Documentation/git-cherry.txt
+++ b/Documentation/git-cherry.txt
@@ -3,7 +3,7 @@ git-cherry(1)
NAME
----
-git-cherry - Find commits not merged upstream.
+git-cherry - Find commits not merged upstream
SYNOPSIS
--------
diff --git a/Documentation/git-clone-pack.txt b/Documentation/git-clone-pack.txt
index 39906fc..09f43ee 100644
--- a/Documentation/git-clone-pack.txt
+++ b/Documentation/git-clone-pack.txt
@@ -3,7 +3,7 @@ git-clone-pack(1)
NAME
----
-git-clone-pack - Clones a repository by receiving packed objects.
+git-clone-pack - Clones a repository by receiving packed objects
SYNOPSIS
diff --git a/Documentation/git-clone.txt b/Documentation/git-clone.txt
index 684e4bd..9ac54c2 100644
--- a/Documentation/git-clone.txt
+++ b/Documentation/git-clone.txt
@@ -3,7 +3,7 @@ git-clone(1)
NAME
----
-git-clone - Clones a repository.
+git-clone - Clones a repository
SYNOPSIS
diff --git a/Documentation/git-commit.txt b/Documentation/git-commit.txt
index 5b1b4d3..d04b342 100644
--- a/Documentation/git-commit.txt
+++ b/Documentation/git-commit.txt
@@ -18,6 +18,10 @@ Updates the index file for given paths, or all modified files if
VISUAL and EDITOR environment variables to edit the commit log
message.
+Several environment variable are used during commits. They are
+documented in gitlink:git-commit-tree[1].
+
+
This command can run `commit-msg`, `pre-commit`, and
`post-commit` hooks. See link:hooks.html[hooks] for more
information.
@@ -85,27 +89,12 @@ OPTIONS
<file>...::
Files to be committed. The meaning of these is
different between `--include` and `--only`. Without
- either, it defaults `--include` semantics.
+ either, it defaults `--only` semantics.
If you make a commit and then found a mistake immediately after
that, you can recover from it with gitlink:git-reset[1].
-WARNING
--------
-
-The 1.2.0 and its maintenance series 1.2.X will keep the
-traditional `--include` semantics as the default when neither
-`--only` nor `--include` is specified and `paths...` are given.
-This *will* change during the development towards 1.3.0 in the
-'master' branch of `git.git` repository. If you are using this
-command in your scripts, and you depend on the traditional
-`--include` semantics, please update them to explicitly ask for
-`--include` semantics. Also if you are used to making partial
-commit using `--include` semantics, please train your fingers to
-say `git commit --include paths...` (or `git commit -i paths...`).
-
-
Discussion
----------
@@ -121,7 +110,7 @@ even the command is invoked from a subdirectory.
That is, update the specified paths to the index and then commit
the whole tree.
-`git commit --only paths...` largely bypasses the index file and
+`git commit paths...` largely bypasses the index file and
commits only the changes made to the specified paths. It has
however several safety valves to prevent confusion.
diff --git a/Documentation/git-count-objects.txt b/Documentation/git-count-objects.txt
index 36888d9..47216f4 100644
--- a/Documentation/git-count-objects.txt
+++ b/Documentation/git-count-objects.txt
@@ -3,7 +3,7 @@ git-count-objects(1)
NAME
----
-git-count-objects - Reports on unpacked objects.
+git-count-objects - Reports on unpacked objects
SYNOPSIS
--------
diff --git a/Documentation/git-cvsimport.txt b/Documentation/git-cvsimport.txt
index dfe86ce..b0c6d7c 100644
--- a/Documentation/git-cvsimport.txt
+++ b/Documentation/git-cvsimport.txt
@@ -22,6 +22,12 @@ repository, or incrementally import into an existing one.
Splitting the CVS log into patch sets is done by 'cvsps'.
At least version 2.1 is required.
+You should *never* do any work of your own on the branches that are
+created by git-cvsimport. The initial import will create and populate a
+"master" branch from the CVS repository's main branch which you're free
+to work with; after that, you need to 'git merge' incremental imports, or
+any CVS branches, yourself.
+
OPTIONS
-------
-d <CVSROOT>::
@@ -93,21 +99,24 @@ If you need to pass multiple options, separate them with a comma.
CVS by default uses the unix username when writing its
commit logs. Using this option and an author-conv-file
in this format
-
++
+---------
exon=Andreas Ericsson <ae@op5.se>
spawn=Simon Pawn <spawn@frog-pond.org>
- git-cvsimport will make it appear as those authors had
- their GIT_AUTHOR_NAME and GIT_AUTHOR_EMAIL set properly
- all along.
-
- For convenience, this data is saved to $GIT_DIR/cvs-authors
- each time the -A option is provided and read from that same
- file each time git-cvsimport is run.
-
- It is not recommended to use this feature if you intend to
- export changes back to CVS again later with
- git-link[1]::git-cvsexportcommit.
+---------
++
+git-cvsimport will make it appear as those authors had
+their GIT_AUTHOR_NAME and GIT_AUTHOR_EMAIL set properly
+all along.
++
+For convenience, this data is saved to $GIT_DIR/cvs-authors
+each time the -A option is provided and read from that same
+file each time git-cvsimport is run.
++
+It is not recommended to use this feature if you intend to
+export changes back to CVS again later with
+git-link[1]::git-cvsexportcommit.
OUTPUT
------
diff --git a/Documentation/git-cvsserver.txt b/Documentation/git-cvsserver.txt
new file mode 100644
index 0000000..4dc13c3
--- /dev/null
+++ b/Documentation/git-cvsserver.txt
@@ -0,0 +1,148 @@
+git-cvsserver(1)
+================
+
+NAME
+----
+git-cvsserver - A CVS server emulator for git
+
+SYNOPSIS
+--------
+[verse]
+export CVS_SERVER=git-cvsserver
+'cvs' -d :ext:user@server/path/repo.git co <HEAD_name>
+
+DESCRIPTION
+-----------
+
+This application is a CVS emulation layer for git.
+
+It is highly functional. However, not all methods are implemented,
+and for those methods that are implemented,
+not all switches are implemented.
+
+Testing has been done using both the CLI CVS client, and the Eclipse CVS
+plugin. Most functionality works fine with both of these clients.
+
+LIMITATIONS
+-----------
+
+Currently cvsserver works over SSH connections for read/write clients, and
+over pserver for anonymous CVS access.
+
+CVS clients cannot tag, branch or perform GIT merges.
+
+INSTALLATION
+------------
+
+1. If you are going to offer anonymous CVS access via pserver, add a line in
+ /etc/inetd.conf like
+
+ cvspserver stream tcp nowait nobody git-cvsserver pserver
+
+ Note: In some cases, you need to pass the 'pserver' argument twice for
+ git-cvsserver to see it. So the line would look like
+
+ cvspserver stream tcp nowait nobody git-cvsserver pserver pserver
+
+ No special setup is needed for SSH access, other than having GIT tools
+ in the PATH. If you have clients that do not accept the CVS_SERVER
+ env variable, you can rename git-cvsserver to cvs.
+
+2. For each repo that you want accessible from CVS you need to edit config in
+ the repo and add the following section.
+
+ [gitcvs]
+ enabled=1
+ # optional for debugging
+ logfile=/path/to/logfile
+
+ Note: you need to ensure each user that is going to invoke git-cvsserver has
+ write access to the log file and to the git repository. When offering anon
+ access via pserver, this means that the nobody user should have write access
+ to at least the sqlite database at the root of the repository.
+
+3. On the client machine you need to set the following variables.
+ CVSROOT should be set as per normal, but the directory should point at the
+ appropriate git repo. For example:
+
+ For SSH access, CVS_SERVER should be set to git-cvsserver
+
+ Example:
+
+ export CVSROOT=:ext:user@server:/var/git/project.git
+ export CVS_SERVER=git-cvsserver
+
+4. For SSH clients that will make commits, make sure their .bashrc file
+ sets the GIT_AUTHOR and GIT_COMMITTER variables.
+
+5. Clients should now be able to check out the project. Use the CVS 'module'
+ name to indicate what GIT 'head' you want to check out. Example:
+
+ cvs co -d project-master master
+
+Eclipse CVS Client Notes
+------------------------
+
+To get a checkout with the Eclipse CVS client:
+
+1. Select "Create a new project -> From CVS checkout"
+2. Create a new location. See the notes below for details on how to choose the
+ right protocol.
+3. Browse the 'modules' available. It will give you a list of the heads in
+ the repository. You will not be able to browse the tree from there. Only
+ the heads.
+4. Pick 'HEAD' when it asks what branch/tag to check out. Untick the
+ "launch commit wizard" to avoid committing the .project file.
+
+Protocol notes: If you are using anonymous acces via pserver, just select that.
+Those using SSH access should choose the 'ext' protocol, and configure 'ext'
+access on the Preferences->Team->CVS->ExtConnection pane. Set CVS_SERVER to
+'git-cvsserver'. Not that password support is not good when using 'ext',
+you will definitely want to have SSH keys setup.
+
+Alternatively, you can just use the non-standard extssh protocol that Eclipse
+offer. In that case CVS_SERVER is ignored, and you will have to replace
+the cvs utility on the server with git-cvsserver or manipulate your .bashrc
+so that calling 'cvs' effectively calls git-cvsserver.
+
+Clients known to work
+---------------------
+
+CVS 1.12.9 on Debian
+CVS 1.11.17 on MacOSX (from Fink package)
+Eclipse 3.0, 3.1.2 on MacOSX (see Eclipse CVS Client Notes)
+TortoiseCVS
+
+Operations supported
+--------------------
+
+All the operations required for normal use are supported, including
+checkout, diff, status, update, log, add, remove, commit.
+Legacy monitoring operations are not supported (edit, watch and related).
+Exports and tagging (tags and branches) are not supported at this stage.
+
+The server will set the -k mode to binary when relevant. In proper GIT
+tradition, the contents of the files are always respected.
+No keyword expansion or newline munging is supported.
+
+Dependencies
+------------
+
+git-cvsserver depends on DBD::SQLite.
+
+Copyright and Authors
+---------------------
+
+This program is copyright The Open University UK - 2006.
+
+Authors: Martyn Smith <martyn@catalyst.net.nz>
+ Martin Langhoff <martin@catalyst.net.nz>
+ with ideas and patches from participants of the git-list <git@vger.kernel.org>.
+
+Documentation
+--------------
+Documentation by Martyn Smith <martyn@catalyst.net.nz> and Martin Langhoff <martin@catalyst.net.nz> Matthias Urlichs <smurf@smurf.noris.de>.
+
+GIT
+---
+Part of the gitlink:git[7] suite
diff --git a/Documentation/git-daemon.txt b/Documentation/git-daemon.txt
index 2cc6075..924a676 100644
--- a/Documentation/git-daemon.txt
+++ b/Documentation/git-daemon.txt
@@ -3,7 +3,7 @@ git-daemon(1)
NAME
----
-git-daemon - A really simple server for git repositories.
+git-daemon - A really simple server for git repositories
SYNOPSIS
--------
diff --git a/Documentation/git-describe.txt b/Documentation/git-describe.txt
index 0efe82a..7a253ea 100644
--- a/Documentation/git-describe.txt
+++ b/Documentation/git-describe.txt
@@ -3,7 +3,7 @@ git-describe(1)
NAME
----
-git-describe - Show the most recent tag that is reachable from a commit.
+git-describe - Show the most recent tag that is reachable from a commit
SYNOPSIS
diff --git a/Documentation/git-diff-stages.txt b/Documentation/git-diff-stages.txt
index 28c60fc..3273918 100644
--- a/Documentation/git-diff-stages.txt
+++ b/Documentation/git-diff-stages.txt
@@ -3,7 +3,7 @@ git-diff-stages(1)
NAME
----
-git-diff-stages - Compares content and mode of blobs between stages in an unmerged index file.
+git-diff-stages - Compares content and mode of blobs between stages in an unmerged index file
SYNOPSIS
diff --git a/Documentation/git-diff.txt b/Documentation/git-diff.txt
index ca41634..890931c 100644
--- a/Documentation/git-diff.txt
+++ b/Documentation/git-diff.txt
@@ -3,7 +3,7 @@ git-diff(1)
NAME
----
-git-diff - Show changes between commits, commit and working tree, etc.
+git-diff - Show changes between commits, commit and working tree, etc
SYNOPSIS
diff --git a/Documentation/git-fetch-pack.txt b/Documentation/git-fetch-pack.txt
index b507e9b..bff9aa6 100644
--- a/Documentation/git-fetch-pack.txt
+++ b/Documentation/git-fetch-pack.txt
@@ -3,12 +3,12 @@ git-fetch-pack(1)
NAME
----
-git-fetch-pack - Receive missing objects from another repository.
+git-fetch-pack - Receive missing objects from another repository
SYNOPSIS
--------
-git-fetch-pack [-q] [-k] [--exec=<git-upload-pack>] [<host>:]<directory> [<refs>...]
+'git-fetch-pack' [-q] [-k] [--exec=<git-upload-pack>] [<host>:]<directory> [<refs>...]
DESCRIPTION
-----------
diff --git a/Documentation/git-fetch.txt b/Documentation/git-fetch.txt
index a67dc34..a9e86fd 100644
--- a/Documentation/git-fetch.txt
+++ b/Documentation/git-fetch.txt
@@ -3,7 +3,7 @@ git-fetch(1)
NAME
----
-git-fetch - Download objects and a head from another repository.
+git-fetch - Download objects and a head from another repository
SYNOPSIS
diff --git a/Documentation/git-format-patch.txt b/Documentation/git-format-patch.txt
index 9ac0636..7cc7faf 100644
--- a/Documentation/git-format-patch.txt
+++ b/Documentation/git-format-patch.txt
@@ -3,13 +3,13 @@ git-format-patch(1)
NAME
----
-git-format-patch - Prepare patches for e-mail submission.
+git-format-patch - Prepare patches for e-mail submission
SYNOPSIS
--------
[verse]
-'git-format-patch' [-n | -k] [-o <dir> | --stdout] [-s] [-c]
+'git-format-patch' [-n | -k] [-o <dir> | --stdout] [--attach] [-s] [-c]
[--diff-options] <his> [<mine>]
DESCRIPTION
@@ -60,6 +60,18 @@ OPTIONS
standard output, instead of saving them into a file per
patch and implies --mbox.
+--attach::
+ Create attachments instead of inlining patches.
+
+
+CONFIGURATION
+-------------
+You can specify extra mail header lines to be added to each
+message in the repository configuration as follows:
+
+[format]
+ headers = "Organization: git-foo\n"
+
EXAMPLES
--------
diff --git a/Documentation/git-fsck-objects.txt b/Documentation/git-fsck-objects.txt
index 387b435..93ce9dc 100644
--- a/Documentation/git-fsck-objects.txt
+++ b/Documentation/git-fsck-objects.txt
@@ -10,7 +10,7 @@ SYNOPSIS
--------
[verse]
'git-fsck-objects' [--tags] [--root] [--unreachable] [--cache]
- [--standalone | --full] [--strict] [<object>*]
+ [--full] [--strict] [<object>*]
DESCRIPTION
-----------
@@ -38,21 +38,14 @@ index file and all SHA1 references in .git/refs/* as heads.
Consider any object recorded in the index also as a head node for
an unreachability trace.
---standalone::
- Limit checks to the contents of GIT_OBJECT_DIRECTORY
- ($GIT_DIR/objects), making sure that it is consistent and
- complete without referring to objects found in alternate
- object pools listed in GIT_ALTERNATE_OBJECT_DIRECTORIES,
- nor packed git archives found in $GIT_DIR/objects/pack;
- cannot be used with --full.
-
--full::
Check not just objects in GIT_OBJECT_DIRECTORY
($GIT_DIR/objects), but also the ones found in alternate
- object pools listed in GIT_ALTERNATE_OBJECT_DIRECTORIES,
+ object pools listed in GIT_ALTERNATE_OBJECT_DIRECTORIES
+ or $GIT_DIR/objects/info/alternates,
and in packed git archives found in $GIT_DIR/objects/pack
and corresponding pack subdirectories in alternate
- object pools; cannot be used with --standalone.
+ object pools.
--strict::
Enable more strict checking, namely to catch a file mode
diff --git a/Documentation/git-get-tar-commit-id.txt b/Documentation/git-get-tar-commit-id.txt
index 30b1fbf..48805b6 100644
--- a/Documentation/git-get-tar-commit-id.txt
+++ b/Documentation/git-get-tar-commit-id.txt
@@ -3,7 +3,7 @@ git-get-tar-commit-id(1)
NAME
----
-git-get-tar-commit-id - Extract commit ID from an archive created using git-tar-tree.
+git-get-tar-commit-id - Extract commit ID from an archive created using git-tar-tree
SYNOPSIS
diff --git a/Documentation/git-grep.txt b/Documentation/git-grep.txt
index bf4b592..d55456a 100644
--- a/Documentation/git-grep.txt
+++ b/Documentation/git-grep.txt
@@ -3,7 +3,7 @@ git-grep(1)
NAME
----
-git-grep - print lines matching a pattern
+git-grep - Print lines matching a pattern
SYNOPSIS
@@ -24,13 +24,13 @@ OPTIONS
<option>...::
Either an option to pass to `grep` or `git-ls-files`.
-
- The following are the specific `git-ls-files` options
- that may be given: `-o`, `--cached`, `--deleted`, `--others`,
- `--killed`, `--ignored`, `--modified`, `--exclude=*`,
- `--exclude-from=*`, and `--exclude-per-directory=*`.
-
- All other options will be passed to `grep`.
++
+The following are the specific `git-ls-files` options
+that may be given: `-o`, `--cached`, `--deleted`, `--others`,
+`--killed`, `--ignored`, `--modified`, `--exclude=\*`,
+`--exclude-from=\*`, and `--exclude-per-directory=\*`.
++
+All other options will be passed to `grep`.
<pattern>::
The pattern to look for. The first non option is taken
diff --git a/Documentation/git-hash-object.txt b/Documentation/git-hash-object.txt
index 0924931..04e8d00 100644
--- a/Documentation/git-hash-object.txt
+++ b/Documentation/git-hash-object.txt
@@ -3,7 +3,7 @@ git-hash-object(1)
NAME
----
-git-hash-object - Computes object ID and optionally creates a blob from a file.
+git-hash-object - Computes object ID and optionally creates a blob from a file
SYNOPSIS
diff --git a/Documentation/git-http-push.txt b/Documentation/git-http-push.txt
index c7066d6..7e1f894 100644
--- a/Documentation/git-http-push.txt
+++ b/Documentation/git-http-push.txt
@@ -3,7 +3,7 @@ git-http-push(1)
NAME
----
-git-http-push - Push missing objects using HTTP/DAV.
+git-http-push - Push missing objects using HTTP/DAV
SYNOPSIS
diff --git a/Documentation/git-init-db.txt b/Documentation/git-init-db.txt
index ea4d849..aeb1115 100644
--- a/Documentation/git-init-db.txt
+++ b/Documentation/git-init-db.txt
@@ -14,7 +14,8 @@ SYNOPSIS
OPTIONS
-------
--template=<template_directory>::
- Provide the directory in from which templates will be used.
+ Provide the directory from which templates will be used.
+ The default template directory is `/usr/share/git-core/templates`.
--shared::
Specify that the git repository is to be shared amongst several users.
@@ -22,9 +23,17 @@ OPTIONS
DESCRIPTION
-----------
-This simply creates an empty git repository - basically a `.git` directory
-and `.git/object/??/`, `.git/refs/heads` and `.git/refs/tags` directories,
-and links `.git/HEAD` symbolically to `.git/refs/heads/master`.
+This command creates an empty git repository - basically a `.git` directory
+with subdirectories for `objects`, `refs/heads`, `refs/tags`, and
+templated files.
+An initial `HEAD` file that references the HEAD of the master branch
+is also created.
+
+If `--template=<template_directory>` is specified, `<template_directory>`
+is used as the source of the template files rather than the default.
+The template files include some directory structure, some suggested
+"exclude patterns", and copies of non-executing "hook" files. The
+suggested patterns and hook files are all modifiable and extensible.
If the `$GIT_DIR` environment variable is set then it specifies a path
to use instead of `./.git` for the base of the repository.
@@ -38,7 +47,6 @@ repository. When specifying `--shared` the config variable "core.sharedRepositor
is set to 'true' so that directories under `$GIT_DIR` are made group writable
(and g+sx, since the git group may be not the primary group of all users).
-
Running `git-init-db` in an existing repository is safe. It will not overwrite
things that are already there. The primary reason for rerunning `git-init-db`
is to pick up newly added templates.
diff --git a/Documentation/git-lost-found.txt b/Documentation/git-lost-found.txt
index 03156f2..f52a9d7 100644
--- a/Documentation/git-lost-found.txt
+++ b/Documentation/git-lost-found.txt
@@ -3,7 +3,7 @@ git-lost-found(1)
NAME
----
-git-lost-found - Recover lost refs that luckily have not yet been pruned.
+git-lost-found - Recover lost refs that luckily have not yet been pruned
SYNOPSIS
--------
diff --git a/Documentation/git-ls-files.txt b/Documentation/git-ls-files.txt
index fe53412..796d049 100644
--- a/Documentation/git-ls-files.txt
+++ b/Documentation/git-ls-files.txt
@@ -8,13 +8,15 @@ git-ls-files - Information about files in the index/working directory
SYNOPSIS
--------
-'git-ls-files' [-z] [-t]
+[verse]
+'git-ls-files' [-z] [-t] [-v]
(--[cached|deleted|others|ignored|stage|unmerged|killed|modified])\*
(-[c|d|o|i|s|u|k|m])\*
[-x <pattern>|--exclude=<pattern>]
[-X <file>|--exclude-from=<file>]
- [--exclude-per-directory=<file>]
- [--full-name] [--] [<file>]\*
+ [--exclude-per-directory=<file>]
+ [--error-unmatch]
+ [--full-name] [--abbrev] [--] [<file>]\*
DESCRIPTION
-----------
@@ -50,6 +52,9 @@ OPTIONS
If a whole directory is classified as "other", show just its
name (with a trailing slash) and not its whole contents.
+--no-empty-directory::
+ Do not list empty directories. Has no effect without --directory.
+
-u|--unmerged::
Show unmerged files in the output (forces --stage)
@@ -72,6 +77,10 @@ OPTIONS
read additional exclude patterns that apply only to the
directory and its subdirectories in <file>.
+--error-unmatch::
+ If any <file> does not appear in the index, treat this as an
+ error (return 1).
+
-t::
Identify the file status with the following tags (followed by
a space) at the start of each line:
@@ -82,12 +91,21 @@ OPTIONS
K:: to be killed
?:: other
+-v::
+ Similar to `-t`, but use lowercase letters for files
+ that are marked as 'always matching index'.
+
--full-name::
When run from a subdirectory, the command usually
outputs paths relative to the current directory. This
option forces paths to be output relative to the project
top directory.
+--abbrev[=<n>]::
+ Instead of showing the full 40-byte hexadecimal object
+ lines, show only handful hexdigits prefix.
+ Non default number of digits can be specified with --abbrev=<n>.
+
--::
Do not interpret any more arguments as options.
diff --git a/Documentation/git-ls-remote.txt b/Documentation/git-ls-remote.txt
index 66fe60f..ae4c1a2 100644
--- a/Documentation/git-ls-remote.txt
+++ b/Documentation/git-ls-remote.txt
@@ -3,7 +3,7 @@ git-ls-remote(1)
NAME
----
-git-ls-remote - Look at references other repository has.
+git-ls-remote - Look at references other repository has
SYNOPSIS
diff --git a/Documentation/git-ls-tree.txt b/Documentation/git-ls-tree.txt
index b92a8b2..018c401 100644
--- a/Documentation/git-ls-tree.txt
+++ b/Documentation/git-ls-tree.txt
@@ -3,12 +3,14 @@ git-ls-tree(1)
NAME
----
-git-ls-tree - Lists the contents of a tree object.
+git-ls-tree - Lists the contents of a tree object
SYNOPSIS
--------
-'git-ls-tree' [-d] [-r] [-t] [-z] [--name-only] [--name-status] <tree-ish> [paths...]
+'git-ls-tree' [-d] [-r] [-t] [-z]
+ [--name-only] [--name-status] [--full-name] [--abbrev=[<n>]]
+ <tree-ish> [paths...]
DESCRIPTION
-----------
@@ -40,6 +42,11 @@ OPTIONS
--name-status::
List only filenames (instead of the "long" output), one per line.
+--abbrev[=<n>]::
+ Instead of showing the full 40-byte hexadecimal object
+ lines, show only handful hexdigits prefix.
+ Non default number of digits can be specified with --abbrev=<n>.
+
paths::
When paths are given, show them (note that this isn't really raw
pathnames, but rather a list of patterns to match). Otherwise
diff --git a/Documentation/git-mailinfo.txt b/Documentation/git-mailinfo.txt
index 8890754..ea0a065 100644
--- a/Documentation/git-mailinfo.txt
+++ b/Documentation/git-mailinfo.txt
@@ -3,7 +3,7 @@ git-mailinfo(1)
NAME
----
-git-mailinfo - Extracts patch from a single e-mail message.
+git-mailinfo - Extracts patch from a single e-mail message
SYNOPSIS
diff --git a/Documentation/git-mailsplit.txt b/Documentation/git-mailsplit.txt
index e0703e9..209e36b 100644
--- a/Documentation/git-mailsplit.txt
+++ b/Documentation/git-mailsplit.txt
@@ -3,7 +3,7 @@ git-mailsplit(1)
NAME
----
-git-mailsplit - Totally braindamaged mbox splitter program.
+git-mailsplit - Totally braindamaged mbox splitter program
SYNOPSIS
--------
diff --git a/Documentation/git-mv.txt b/Documentation/git-mv.txt
index d242b39..207c43a 100644
--- a/Documentation/git-mv.txt
+++ b/Documentation/git-mv.txt
@@ -3,7 +3,7 @@ git-mv(1)
NAME
----
-git-mv - Script used to move or rename a file, directory or symlink.
+git-mv - Move or rename a file, directory or symlink
SYNOPSIS
diff --git a/Documentation/git-name-rev.txt b/Documentation/git-name-rev.txt
index e37b0b8..6870708 100644
--- a/Documentation/git-name-rev.txt
+++ b/Documentation/git-name-rev.txt
@@ -3,7 +3,7 @@ git-name-rev(1)
NAME
----
-git-name-rev - Find symbolic names for given revs.
+git-name-rev - Find symbolic names for given revs
SYNOPSIS
diff --git a/Documentation/git-pack-objects.txt b/Documentation/git-pack-objects.txt
index 4cb2e83..4991f88 100644
--- a/Documentation/git-pack-objects.txt
+++ b/Documentation/git-pack-objects.txt
@@ -3,7 +3,7 @@ git-pack-objects(1)
NAME
----
-git-pack-objects - Create a packed archive of objects.
+git-pack-objects - Create a packed archive of objects
SYNOPSIS
@@ -101,7 +101,7 @@ Documentation
-------------
Documentation by Junio C Hamano
-See-Also
+See Also
--------
gitlink:git-repack[1]
gitlink:git-prune-packed[1]
diff --git a/Documentation/git-pack-redundant.txt b/Documentation/git-pack-redundant.txt
index 9fe86ae..8fb0659 100644
--- a/Documentation/git-pack-redundant.txt
+++ b/Documentation/git-pack-redundant.txt
@@ -3,12 +3,12 @@ git-pack-redundant(1)
NAME
----
-git-pack-redundant - Program used to find redundant pack files.
+git-pack-redundant - Program used to find redundant pack files
SYNOPSIS
--------
-'git-pack-redundant [ --verbose ] [ --alt-odb ] < --all | .pack filename ... >'
+'git-pack-redundant' [ --verbose ] [ --alt-odb ] < --all | .pack filename ... >
DESCRIPTION
-----------
@@ -46,7 +46,7 @@ Documentation
--------------
Documentation by Lukas Sandström <lukass@etek.chalmers.se>
-See-Also
+See Also
--------
gitlink:git-pack-objects[1]
gitlink:git-repack[1]
diff --git a/Documentation/git-patch-id.txt b/Documentation/git-patch-id.txt
index c8bd197..723b8cc 100644
--- a/Documentation/git-patch-id.txt
+++ b/Documentation/git-patch-id.txt
@@ -3,7 +3,7 @@ git-patch-id(1)
NAME
----
-git-patch-id - Generate a patch ID.
+git-patch-id - Generate a patch ID
SYNOPSIS
--------
diff --git a/Documentation/git-peek-remote.txt b/Documentation/git-peek-remote.txt
index 915d3f8..a00060c 100644
--- a/Documentation/git-peek-remote.txt
+++ b/Documentation/git-peek-remote.txt
@@ -3,7 +3,7 @@ git-peek-remote(1)
NAME
----
-git-peek-remote - Lists the references in a remote repository.
+git-peek-remote - Lists the references in a remote repository
SYNOPSIS
diff --git a/Documentation/git-prune-packed.txt b/Documentation/git-prune-packed.txt
index 37c53a9..2348826 100644
--- a/Documentation/git-prune-packed.txt
+++ b/Documentation/git-prune-packed.txt
@@ -40,7 +40,7 @@ Documentation
--------------
Documentation by Ryan Anderson <ryan@michonline.com>
-See-Also
+See Also
--------
gitlink:git-pack-objects[1]
gitlink:git-repack[1]
diff --git a/Documentation/git-pull.txt b/Documentation/git-pull.txt
index 20175f4..51577fc 100644
--- a/Documentation/git-pull.txt
+++ b/Documentation/git-pull.txt
@@ -3,7 +3,7 @@ git-pull(1)
NAME
----
-git-pull - Pull and merge from another repository.
+git-pull - Pull and merge from another repository
SYNOPSIS
diff --git a/Documentation/git-push.txt b/Documentation/git-push.txt
index 6f4a48a..d5b5ca1 100644
--- a/Documentation/git-push.txt
+++ b/Documentation/git-push.txt
@@ -3,7 +3,7 @@ git-push(1)
NAME
----
-git-push - Update remote refs along with associated objects.
+git-push - Update remote refs along with associated objects
SYNOPSIS
diff --git a/Documentation/git-read-tree.txt b/Documentation/git-read-tree.txt
index 6fbd6d9..844cfda 100644
--- a/Documentation/git-read-tree.txt
+++ b/Documentation/git-read-tree.txt
@@ -8,7 +8,7 @@ git-read-tree - Reads tree information into the index
SYNOPSIS
--------
-'git-read-tree' (<tree-ish> | [[-m | --reset] [-u | -i]] <tree-ish1> [<tree-ish2> [<tree-ish3>]])
+'git-read-tree' (<tree-ish> | [[-m [--aggressive]| --reset] [-u | -i]] <tree-ish1> [<tree-ish2> [<tree-ish3>]])
DESCRIPTION
@@ -50,6 +50,19 @@ OPTIONS
trees that are not directly related to the current
working tree status into a temporary index file.
+--aggressive::
+ Usually a three-way merge by `git-read-tree` resolves
+ the merge for really trivial cases and leaves other
+ cases unresolved in the index, so that Porcelains can
+ implement different merge policies. This flag makes the
+ command to resolve a few more cases internally:
++
+* when one side removes a path and the other side leaves the path
+ unmodified. The resolution is to remove that path.
+* when both sides remove a path. The resolution is to remove that path.
+* when both sides adds a path identically. The resolution
+ is to add that path.
+
<tree-ish#>::
The id of the tree object(s) to be read/merged.
diff --git a/Documentation/git-rebase.txt b/Documentation/git-rebase.txt
index 16c158f..4a7e67a 100644
--- a/Documentation/git-rebase.txt
+++ b/Documentation/git-rebase.txt
@@ -3,22 +3,74 @@ git-rebase(1)
NAME
----
-git-rebase - Rebase local commits to new upstream head.
+git-rebase - Rebase local commits to new upstream head
SYNOPSIS
--------
-'git-rebase' <upstream> [<head>]
+'git-rebase' [--onto <newbase>] <upstream> [<branch>]
DESCRIPTION
-----------
-Rebases local commits to the new head of the upstream tree.
+git-rebase applies to <upstream> (or optionally to <newbase>) commits
+from <branch> that do not appear in <upstream>. When <branch> is not
+specified it defaults to the current branch (HEAD).
+
+When git-rebase is complete, <branch> will be updated to point to the
+newly created line of commit objects, so the previous line will not be
+accessible unless there are other references to it already.
+
+Assume the following history exists and the current branch is "topic":
+
+ A---B---C topic
+ /
+ D---E---F---G master
+
+From this point, the result of either of the following commands:
+
+ git-rebase master
+ git-rebase master topic
+
+would be:
+
+ A'--B'--C' topic
+ /
+ D---E---F---G master
+
+While, starting from the same point, the result of either of the following
+commands:
+
+ git-rebase --onto master~1 master
+ git-rebase --onto master~1 master topic
+
+would be:
+
+ A'--B'--C' topic
+ /
+ D---E---F---G master
+
+In case of conflict, git-rebase will stop at the first problematic commit
+and leave conflict markers in the tree. After resolving the conflict manually
+and updating the index with the desired resolution, you can continue the
+rebasing process with
+
+ git am --resolved --3way
+
+Alternatively, you can undo the git-rebase with
+
+ git reset --hard ORIG_HEAD
+ rm -r .dotest
OPTIONS
-------
+<newbase>::
+ Starting point at which to create the new commits. If the
+ --onto option is not specified, the starting point is
+ <upstream>.
+
<upstream>::
Upstream branch to compare against.
-<head>::
+<branch>::
Working branch; defaults to HEAD.
Author
diff --git a/Documentation/git-relink.txt b/Documentation/git-relink.txt
index 6240535..aca6012 100644
--- a/Documentation/git-relink.txt
+++ b/Documentation/git-relink.txt
@@ -3,7 +3,7 @@ git-relink(1)
NAME
----
-git-relink - Hardlink common objects in local repositories.
+git-relink - Hardlink common objects in local repositories
SYNOPSIS
--------
diff --git a/Documentation/git-repack.txt b/Documentation/git-repack.txt
index 6c0f792..d2f9a44 100644
--- a/Documentation/git-repack.txt
+++ b/Documentation/git-repack.txt
@@ -63,7 +63,7 @@ Documentation
--------------
Documentation by Ryan Anderson <ryan@michonline.com>
-See-Also
+See Also
--------
gitlink:git-pack-objects[1]
gitlink:git-prune-packed[1]
diff --git a/Documentation/git-repo-config.txt b/Documentation/git-repo-config.txt
index 33fcde4..26759a8 100644
--- a/Documentation/git-repo-config.txt
+++ b/Documentation/git-repo-config.txt
@@ -3,11 +3,12 @@ git-repo-config(1)
NAME
----
-git-repo-config - Get and set options in .git/config.
+git-repo-config - Get and set options in .git/config
SYNOPSIS
--------
+[verse]
'git-repo-config' [type] name [value [value_regex]]
'git-repo-config' [type] --replace-all name [value [value_regex]]
'git-repo-config' [type] --get name [value_regex]
diff --git a/Documentation/git-request-pull.txt b/Documentation/git-request-pull.txt
index 2463ec9..478a5fd 100644
--- a/Documentation/git-request-pull.txt
+++ b/Documentation/git-request-pull.txt
@@ -3,7 +3,7 @@ git-request-pull(1)
NAME
----
-git-request-pull - Generates a summary of pending changes.
+git-request-pull - Generates a summary of pending changes
SYNOPSIS
--------
diff --git a/Documentation/git-reset.txt b/Documentation/git-reset.txt
index b4e737e..b7b9798 100644
--- a/Documentation/git-reset.txt
+++ b/Documentation/git-reset.txt
@@ -3,7 +3,7 @@ git-reset(1)
NAME
----
-git-reset - Reset current HEAD to the specified state.
+git-reset - Reset current HEAD to the specified state
SYNOPSIS
--------
diff --git a/Documentation/git-rev-list.txt b/Documentation/git-rev-list.txt
index 1c6146c..8255ae1 100644
--- a/Documentation/git-rev-list.txt
+++ b/Documentation/git-rev-list.txt
@@ -16,9 +16,9 @@ SYNOPSIS
[ \--no-merges ]
[ \--remove-empty ]
[ \--all ]
- [ [ \--merge-order [ \--show-breaks ] ] | [ \--topo-order ] ]
+ [ \--topo-order ]
[ \--parents ]
- [ \--objects [ \--unpacked ] ]
+ [ [\--objects | \--objects-edge] [ \--unpacked ] ]
[ \--pretty | \--header ]
[ \--bisect ]
<commit>... [ \-- <paths>... ]
@@ -53,6 +53,14 @@ OPTIONS
which I need to download if I have the commit object 'bar', but
not 'foo'".
+--objects-edge::
+ Similar to `--objects`, but also print the IDs of
+ excluded commits refixed with a `-` character. This is
+ used by `git-pack-objects` to build 'thin' pack, which
+ records objects in deltified form based on objects
+ contained in these excluded commits to reduce network
+ traffic.
+
--unpacked::
Only useful with `--objects`; print the object IDs that
are not in packs.
@@ -94,57 +102,10 @@ OPTIONS
topological order (i.e. descendant commits are shown
before their parents).
---merge-order::
- When specified the commit history is decomposed into a unique
- sequence of minimal, non-linear epochs and maximal, linear epochs.
- Non-linear epochs are then linearised by sorting them into merge
- order, which is described below.
-+
-Maximal, linear epochs correspond to periods of sequential development.
-Minimal, non-linear epochs correspond to periods of divergent development
-followed by a converging merge. The theory of epochs is described in more
-detail at
-link:http://blackcubes.dyndns.org/epoch/[http://blackcubes.dyndns.org/epoch/].
-+
-The merge order for a non-linear epoch is defined as a linearisation for which
-the following invariants are true:
-+
- 1. if a commit P is reachable from commit N, commit P sorts after commit N
- in the linearised list.
- 2. if Pi and Pj are any two parents of a merge M (with i < j), then any
- commit N, such that N is reachable from Pj but not reachable from Pi,
- sorts before all commits reachable from Pi.
-+
-Invariant 1 states that later commits appear before earlier commits they are
-derived from.
-+
-Invariant 2 states that commits unique to "later" parents in a merge, appear
-before all commits from "earlier" parents of a merge.
-
---show-breaks::
- Each item of the list is output with a 2-character prefix consisting
- of one of: (|), (^), (=) followed by a space.
-+
-Commits marked with (=) represent the boundaries of minimal, non-linear epochs
-and correspond either to the start of a period of divergent development or to
-the end of such a period.
-+
-Commits marked with (|) are direct parents of commits immediately preceding
-the marked commit in the list.
-+
-Commits marked with (^) are not parents of the immediately preceding commit.
-These "breaks" represent necessary discontinuities implied by trying to
-represent an arbitrary DAG in a linear form.
-+
-`--show-breaks` is only valid if `--merge-order` is also specified.
-
-
Author
------
Written by Linus Torvalds <torvalds@osdl.org>
-Original *--merge-order* logic by Jon Seymour <jon.seymour@gmail.com>
-
Documentation
--------------
Documentation by David Greaves, Junio C Hamano and the git-list <git@vger.kernel.org>.
diff --git a/Documentation/git-rev-parse.txt b/Documentation/git-rev-parse.txt
index 1662e06..8b95df0 100644
--- a/Documentation/git-rev-parse.txt
+++ b/Documentation/git-rev-parse.txt
@@ -3,7 +3,7 @@ git-rev-parse(1)
NAME
----
-git-rev-parse - Pick out and massage parameters.
+git-rev-parse - Pick out and massage parameters
SYNOPSIS
@@ -80,7 +80,7 @@ OPTIONS
--git-dir::
Show `$GIT_DIR` if defined else show the path to the .git directory.
---short, short=number::
+--short, --short=number::
Instead of outputting the full SHA1 values of object names try to
abbriviate them to a shorter unique name. When no length is specified
7 is used. The minimum length is 4.
diff --git a/Documentation/git-revert.txt b/Documentation/git-revert.txt
index e27c680..71f7815 100644
--- a/Documentation/git-revert.txt
+++ b/Documentation/git-revert.txt
@@ -3,7 +3,7 @@ git-revert(1)
NAME
----
-git-revert - Revert an existing commit.
+git-revert - Revert an existing commit
SYNOPSIS
--------
diff --git a/Documentation/git-rm.txt b/Documentation/git-rm.txt
new file mode 100644
index 0000000..c9c3088
--- /dev/null
+++ b/Documentation/git-rm.txt
@@ -0,0 +1,92 @@
+git-rm(1)
+=========
+
+NAME
+----
+git-rm - Remove files from the working tree and from the index
+
+SYNOPSIS
+--------
+'git-rm' [-f] [-n] [-v] [--] <file>...
+
+DESCRIPTION
+-----------
+A convenience wrapper for git-update-index --remove. For those coming
+from cvs, git-rm provides an operation similar to "cvs rm" or "cvs
+remove".
+
+
+OPTIONS
+-------
+<file>...::
+ Files to remove from the index and optionally, from the
+ working tree as well.
+
+-f::
+ Remove files from the working tree as well as from the index.
+
+-n::
+ Don't actually remove the file(s), just show if they exist in
+ the index.
+
+-v::
+ Be verbose.
+
+--::
+ This option can be used to separate command-line options from
+ the list of files, (useful when filenames might be mistaken
+ for command-line options).
+
+
+DISCUSSION
+----------
+
+The list of <file> given to the command is fed to `git-ls-files`
+command to list files that are registered in the index and
+are not ignored/excluded by `$GIT_DIR/info/exclude` file or
+`.gitignore` file in each directory. This means two things:
+
+. You can put the name of a directory on the command line, and the
+ command will remove all files in it and its subdirectories (the
+ directories themselves are never removed from the working tree);
+
+. Giving the name of a file that is not in the index does not
+ remove that file.
+
+
+EXAMPLES
+--------
+git-rm Documentation/\\*.txt::
+
+ Removes all `\*.txt` files from the index that are under the
+ `Documentation` directory and any of its subdirectories. The
+ files are not removed from the working tree.
++
+Note that the asterisk `\*` is quoted from the shell in this
+example; this lets the command include the files from
+subdirectories of `Documentation/` directory.
+
+git-rm -f git-*.sh::
+
+ Remove all git-*.sh scripts that are in the index. The files
+ are removed from the index, and (because of the -f option),
+ from the working tree as well. Because this example lets the
+ shell expand the asterisk (i.e. you are listing the files
+ explicitly), it does not remove `subdir/git-foo.sh`.
+
+See Also
+--------
+gitlink:git-add[1]
+
+Author
+------
+Written by Linus Torvalds <torvalds@osdl.org>
+
+Documentation
+--------------
+Documentation by Junio C Hamano and the git-list <git@vger.kernel.org>.
+
+GIT
+---
+Part of the gitlink:git[7] suite
+
diff --git a/Documentation/git-send-email.txt b/Documentation/git-send-email.txt
index 00537d8..8c58685 100644
--- a/Documentation/git-send-email.txt
+++ b/Documentation/git-send-email.txt
@@ -24,6 +24,9 @@ OPTIONS
-------
The options available are:
+--cc::
+ Specify a starting "Cc:" value for each email.
+
--chain-reply-to, --no-chain-reply-to::
If this is set, each email will be sent as a reply to the previous
email sent. If disabled with "--no-chain-reply-to", all emails after
@@ -48,6 +51,9 @@ The options available are:
Only necessary if --compose is also set. If --compose
is not set, this will be prompted for.
+--no-signed-off-by-cc::
+ Do not add emails foudn in Signed-off-by: lines to the cc list.
+
--quiet::
Make git-send-email less verbose. One line per email should be
all that is output.
@@ -61,6 +67,10 @@ The options available are:
Only necessary if --compose is also set. If --compose
is not set, this will be prompted for.
+--suppress-from::
+ Do not add the From: address to the cc: list, if it shows up in a From:
+ line.
+
--to::
Specify the primary recipient of the emails generated.
Generally, this will be the upstream maintainer of the
diff --git a/Documentation/git-send-pack.txt b/Documentation/git-send-pack.txt
index 577f06a..08e0705 100644
--- a/Documentation/git-send-pack.txt
+++ b/Documentation/git-send-pack.txt
@@ -3,7 +3,7 @@ git-send-pack(1)
NAME
----
-git-send-pack - Push missing objects packed.
+git-send-pack - Push missing objects packed
SYNOPSIS
diff --git a/Documentation/git-sh-setup.txt b/Documentation/git-sh-setup.txt
index 6ef59ac..6742c9b 100644
--- a/Documentation/git-sh-setup.txt
+++ b/Documentation/git-sh-setup.txt
@@ -3,7 +3,7 @@ git-sh-setup(1)
NAME
----
-git-sh-setup - Common git shell script setup code.
+git-sh-setup - Common git shell script setup code
SYNOPSIS
--------
diff --git a/Documentation/git-shell.txt b/Documentation/git-shell.txt
index 3f4d804..cc4266d 100644
--- a/Documentation/git-shell.txt
+++ b/Documentation/git-shell.txt
@@ -8,7 +8,7 @@ git-shell - Restricted login shell for GIT over SSH only
SYNOPSIS
--------
-'git-shell -c <command> <argument>'
+'git-shell' -c <command> <argument>
DESCRIPTION
-----------
diff --git a/Documentation/git-shortlog.txt b/Documentation/git-shortlog.txt
index 65ca77f..54fb922 100644
--- a/Documentation/git-shortlog.txt
+++ b/Documentation/git-shortlog.txt
@@ -3,12 +3,12 @@ git-shortlog(1)
NAME
----
-git-shortlog - Summarize 'git log' output.
+git-shortlog - Summarize 'git log' output
SYNOPSIS
--------
-'git-log --pretty=short | git shortlog'
+git-log --pretty=short | 'git-shortlog'
DESCRIPTION
-----------
diff --git a/Documentation/git-show-branch.txt b/Documentation/git-show-branch.txt
index 7b1a9c9..f115b45 100644
--- a/Documentation/git-show-branch.txt
+++ b/Documentation/git-show-branch.txt
@@ -3,14 +3,14 @@ git-show-branch(1)
NAME
----
-git-show-branch - Show branches and their commits.
+git-show-branch - Show branches and their commits
SYNOPSIS
--------
[verse]
-git-show-branch [--all] [--heads] [--tags] [--topo-order] [--current]
- [--more=<n> | --list | --independent | --merge-base]
- [--no-name | --sha1-name] [<rev> | <glob>]...
+'git-show-branch' [--all] [--heads] [--tags] [--topo-order] [--current]
+ [--more=<n> | --list | --independent | --merge-base]
+ [--no-name | --sha1-name] [<rev> | <glob>]...
DESCRIPTION
-----------
@@ -141,7 +141,7 @@ it, having the following in the configuration file may help:
------------
-With this,`git show-branch` without extra parameters would show
+With this, `git show-branch` without extra parameters would show
only the primary branches. In addition, if you happen to be on
your topic branch, it is shown as well.
diff --git a/Documentation/git-show.txt b/Documentation/git-show.txt
index 9c359a4..2b4df3f 100644
--- a/Documentation/git-show.txt
+++ b/Documentation/git-show.txt
@@ -3,7 +3,7 @@ git-show(1)
NAME
----
-git-show - Show one commit with difference it introduces.
+git-show - Show one commit with difference it introduces
SYNOPSIS
diff --git a/Documentation/git-status.txt b/Documentation/git-status.txt
index 753fc08..e446f48 100644
--- a/Documentation/git-status.txt
+++ b/Documentation/git-status.txt
@@ -3,7 +3,7 @@ git-status(1)
NAME
----
-git-status - Show working tree status.
+git-status - Show working tree status
SYNOPSIS
diff --git a/Documentation/git-stripspace.txt b/Documentation/git-stripspace.txt
index 528a1b6..3a03dd0 100644
--- a/Documentation/git-stripspace.txt
+++ b/Documentation/git-stripspace.txt
@@ -3,7 +3,7 @@ git-stripspace(1)
NAME
----
-git-stripspace - Filter out empty lines.
+git-stripspace - Filter out empty lines
SYNOPSIS
diff --git a/Documentation/git-svnimport.txt b/Documentation/git-svnimport.txt
index 63e28b8..b1b87c2 100644
--- a/Documentation/git-svnimport.txt
+++ b/Documentation/git-svnimport.txt
@@ -9,11 +9,13 @@ git-svnimport - Import a SVN repository into git
SYNOPSIS
--------
+[verse]
'git-svnimport' [ -o <branch-for-HEAD> ] [ -h ] [ -v ] [ -d | -D ]
- [ -C <GIT_repository> ] [ -i ] [ -u ] [-l limit_rev]
- [ -b branch_subdir ] [ -T trunk_subdir ] [ -t tag_subdir ]
- [ -s start_chg ] [ -m ] [ -M regex ]
- <SVN_repository_URL> [ <path> ]
+ [ -C <GIT_repository> ] [ -i ] [ -u ] [-l limit_rev]
+ [ -b branch_subdir ] [ -T trunk_subdir ] [ -t tag_subdir ]
+ [ -s start_chg ] [ -m ] [ -r ] [ -M regex ]
+ [ -I <ignorefile_name> ] [ -A <author_file> ]
+ <SVN_repository_URL> [ <path> ]
DESCRIPTION
@@ -61,6 +63,34 @@ When importing incrementally, you might need to edit the .git/svn2git file.
the git repository. Use this option if you want to import into a
different branch.
+-r::
+ Prepend 'rX: ' to commit messages, where X is the imported
+ subversion revision.
+
+-I <ignorefile_name>::
+ Import the svn:ignore directory property to files with this
+ name in each directory. (The Subversion and GIT ignore
+ syntaxes are similar enough that using the Subversion patterns
+ directly with "-I .gitignore" will almost always just work.)
+
+-A <author_file>::
+ Read a file with lines on the form
++
+------
+ username = User's Full Name <email@addr.es>
+
+------
++
+and use "User's Full Name <email@addr.es>" as the GIT
+author and committer for Subversion commits made by
+"username". If encountering a commit made by a user not in the
+list, abort.
++
+For convenience, this data is saved to $GIT_DIR/svn-authors
+each time the -A option is provided, and read from that same
+file each time git-svnimport is run with an existing GIT
+repository without -A.
+
-m::
Attempt to detect merges based on the commit message. This option
will enable default regexes that try to capture the name source
diff --git a/Documentation/git-tag.txt b/Documentation/git-tag.txt
index e8892bb..45476c2 100644
--- a/Documentation/git-tag.txt
+++ b/Documentation/git-tag.txt
@@ -3,16 +3,18 @@ git-tag(1)
NAME
----
-git-tag - Create a tag object signed with GPG
+git-tag - Create a tag object signed with GPG
SYNOPSIS
--------
+[verse]
'git-tag' [-a | -s | -u <key-id>] [-f | -d] [-m <msg>] <name> [<head>]
+'git-tag' -l [<pattern>]
DESCRIPTION
-----------
-Adds a 'tag' reference in .git/refs/tags/
+Adds a 'tag' reference in `.git/refs/tags/`
Unless `-f` is given, the tag must not yet exist in
`.git/refs/tags/` directory.
@@ -32,6 +34,9 @@ GnuPG key for signing.
`-d <tag>` deletes the tag.
+`-l <pattern>` lists tags that match the given pattern (or all
+if no pattern is given).
+
OPTIONS
-------
-a::
@@ -49,6 +54,9 @@ OPTIONS
-d::
Delete an existing tag with the given name
+-l <pattern>::
+ List tags that match the given pattern (or all if no pattern is given).
+
-m <msg>::
Use the given tag message (instead of prompting)
diff --git a/Documentation/git-tools.txt b/Documentation/git-tools.txt
new file mode 100644
index 0000000..00e57a6
--- /dev/null
+++ b/Documentation/git-tools.txt
@@ -0,0 +1,97 @@
+A short git tools survey
+========================
+
+
+Introduction
+------------
+
+Apart from git contrib/ area there are some others third-party tools
+you may want to look.
+
+This document presents a brief summary of each tool and the corresponding
+link.
+
+
+Alternative/Augmentative Procelains
+-----------------------------------
+
+ - *Cogito* (http://www.kernel.org/pub/software/scm/cogito/)
+
+ Cogito is a version control system layered on top of the git tree history
+ storage system. It aims at seamless user interface and ease of use,
+ providing generally smoother user experience than the "raw" Core GIT
+ itself and indeed many other version control systems.
+
+
+ - *pg* (http://www.spearce.org/category/projects/scm/pg/)
+
+ pg is a shell script wrapper around GIT to help the user manage a set of
+ patches to files. pg is somewhat like quilt or StGIT, but it does have a
+ slightly different feature set.
+
+
+ - *StGit* (http://www.procode.org/stgit/)
+
+ Stacked GIT provides a quilt-like patch management functionality in the
+ GIT environment. You can easily manage your patches in the scope of GIT
+ until they get merged upstream.
+
+
+History Viewers
+---------------
+
+ - *gitk* (shipped with git-core)
+
+ gitk is a simple TK GUI for browsing history of GIT repositories easily.
+
+
+ - *gitview* (contrib/)
+
+ gitview is a GTK based repository browser for git
+
+
+ - *gitweb* (ftp://ftp.kernel.org/pub/software/scm/gitweb/)
+
+ GITweb provides full-fledged web interface for GIT repositories.
+
+
+ - *qgit* (http://digilander.libero.it/mcostalba/)
+
+ QGit is a git/StGIT GUI viewer built on Qt/C++. QGit could be used
+ to browse history and directory tree, view annotated files, commit
+ changes cherry picking single files or applying patches.
+ Currently it is the fastest and most feature rich among the git
+ viewers and commit tools.
+
+
+
+Foreign SCM interface
+---------------------
+
+ - *git-svn* (contrib/)
+
+ git-svn is a simple conduit for changesets between a single Subversion
+ branch and git.
+
+
+ - *quilt2git / git2quilt* (http://home-tj.org/wiki/index.php/Misc)
+
+ These utilities convert patch series in a quilt repository and commit
+ series in git back and forth.
+
+
+Others
+------
+
+ - *(h)gct* (http://www.cyd.liu.se/users/~freku045/gct/)
+
+ Commit Tool or (h)gct is a GUI enabled commit tool for git and
+ Mercurial (hg). It allows the user to view diffs, select which files
+ to committed (or ignored / reverted) write commit messages and
+ perform the commit itself.
+
+ - *git.el* (contrib/)
+
+ This is an Emacs interface for git. The user interface is modeled on
+ pcl-cvs. It has been developed on Emacs 21 and will probably need some
+ tweaking to work on XEmacs.
diff --git a/Documentation/git-unpack-objects.txt b/Documentation/git-unpack-objects.txt
index 31ea34d..1828062 100644
--- a/Documentation/git-unpack-objects.txt
+++ b/Documentation/git-unpack-objects.txt
@@ -3,7 +3,7 @@ git-unpack-objects(1)
NAME
----
-git-unpack-objects - Unpack objects from a packed archive.
+git-unpack-objects - Unpack objects from a packed archive
SYNOPSIS
diff --git a/Documentation/git-update-index.txt b/Documentation/git-update-index.txt
index c74311d..0a1b0ad 100644
--- a/Documentation/git-update-index.txt
+++ b/Documentation/git-update-index.txt
@@ -8,11 +8,14 @@ git-update-index - Modifies the index or directory cache
SYNOPSIS
--------
+[verse]
'git-update-index'
[--add] [--remove | --force-remove] [--replace]
[--refresh [-q] [--unmerged] [--ignore-missing]]
[--cacheinfo <mode> <object> <file>]\*
[--chmod=(+|-)x]
+ [--assume-unchanged | --no-assume-unchanged]
+ [--really-refresh]
[--info-only] [--index-info]
[-z] [--stdin]
[--verbose]
@@ -65,6 +68,18 @@ OPTIONS
--chmod=(+|-)x::
Set the execute permissions on the updated files.
+--assume-unchanged, --no-assume-unchanged::
+ When these flags are specified, the object name recorded
+ for the paths are not updated. Instead, these options
+ sets and unsets the "assume unchanged" bit for the
+ paths. When the "assume unchanged" bit is on, git stops
+ checking the working tree files for possible
+ modifications, so you need to manually unset the bit to
+ tell git when you change the working tree file. This is
+ sometimes helpful when working with a big project on a
+ filesystem that has very slow lstat(2) system call
+ (e.g. cifs).
+
--info-only::
Do not create objects in the object database for all
<file> arguments that follow this flag; just insert
@@ -193,6 +208,37 @@ $ git ls-files -s
------------
+Using "assume unchanged" bit
+----------------------------
+
+Many operations in git depend on your filesystem to have an
+efficient `lstat(2)` implementation, so that `st_mtime`
+information for working tree files can be cheaply checked to see
+if the file contents have changed from the version recorded in
+the index file. Unfortunately, some filesystems have
+inefficient `lstat(2)`. If your filesystem is one of them, you
+can set "assume unchanged" bit to paths you have not changed to
+cause git not to do this check. Note that setting this bit on a
+path does not mean git will check the contents of the file to
+see if it has changed -- it makes git to omit any checking and
+assume it has *not* changed. When you make changes to working
+tree files, you have to explicitly tell git about it by dropping
+"assume unchanged" bit, either before or after you modify them.
+
+In order to set "assume unchanged" bit, use `--assume-unchanged`
+option. To unset, use `--no-assume-unchanged`.
+
+The command looks at `core.ignorestat` configuration variable. When
+this is true, paths updated with `git-update-index paths...` and
+paths updated with other git commands that update both index and
+working tree (e.g. `git-apply --index`, `git-checkout-index -u`,
+and `git-read-tree -u`) are automatically marked as "assume
+unchanged". Note that "assume unchanged" bit is *not* set if
+`git-update-index --refresh` finds the working tree file matches
+the index (use `git-update-index --really-refresh` if you want
+to mark them as "assume unchanged").
+
+
Examples
--------
To update and refresh only the files already checked out:
@@ -201,6 +247,35 @@ To update and refresh only the files already checked out:
$ git-checkout-index -n -f -a && git-update-index --ignore-missing --refresh
----------------
+On an inefficient filesystem with `core.ignorestat` set:
+
+------------
+$ git update-index --really-refresh <1>
+$ git update-index --no-assume-unchanged foo.c <2>
+$ git diff --name-only <3>
+$ edit foo.c
+$ git diff --name-only <4>
+M foo.c
+$ git update-index foo.c <5>
+$ git diff --name-only <6>
+$ edit foo.c
+$ git diff --name-only <7>
+$ git update-index --no-assume-unchanged foo.c <8>
+$ git diff --name-only <9>
+M foo.c
+
+<1> forces lstat(2) to set "assume unchanged" bits for paths
+ that match index.
+<2> mark the path to be edited.
+<3> this does lstat(2) and finds index matches the path.
+<4> this does lstat(2) and finds index does not match the path.
+<5> registering the new version to index sets "assume unchanged" bit.
+<6> and it is assumed unchanged.
+<7> even after you edit it.
+<8> you can tell about the change after the fact.
+<9> now it checks with lstat(2) and finds it has been changed.
+------------
+
Configuration
-------------
@@ -213,6 +288,9 @@ in the index and the file mode on the filesystem if they differ only on
executable bit. On such an unfortunate filesystem, you may
need to use `git-update-index --chmod=`.
+The command looks at `core.ignorestat` configuration variable. See
+'Using "assume unchanged" bit' section above.
+
See Also
--------
diff --git a/Documentation/git-update-ref.txt b/Documentation/git-update-ref.txt
index 69715aa..475237f 100644
--- a/Documentation/git-update-ref.txt
+++ b/Documentation/git-update-ref.txt
@@ -7,7 +7,7 @@ git-update-ref - update the object name stored in a ref safely
SYNOPSIS
--------
-`git-update-ref` <ref> <newvalue> [<oldvalue>]
+'git-update-ref' <ref> <newvalue> [<oldvalue>]
DESCRIPTION
-----------
diff --git a/Documentation/git-upload-pack.txt b/Documentation/git-upload-pack.txt
index 3d8f8ef..4795e98 100644
--- a/Documentation/git-upload-pack.txt
+++ b/Documentation/git-upload-pack.txt
@@ -3,7 +3,7 @@ git-upload-pack(1)
NAME
----
-git-upload-pack - Send missing objects packed.
+git-upload-pack - Send missing objects packed
SYNOPSIS
diff --git a/Documentation/git-var.txt b/Documentation/git-var.txt
index c22d34f..90cb157 100644
--- a/Documentation/git-var.txt
+++ b/Documentation/git-var.txt
@@ -8,7 +8,7 @@ git-var - Print the git users identity
SYNOPSIS
--------
-git-var [ -l | <variable> ]
+'git-var' [ -l | <variable> ]
DESCRIPTION
-----------
diff --git a/Documentation/git-verify-pack.txt b/Documentation/git-verify-pack.txt
index d032280..4962d69 100644
--- a/Documentation/git-verify-pack.txt
+++ b/Documentation/git-verify-pack.txt
@@ -3,7 +3,7 @@ git-verify-pack(1)
NAME
----
-git-verify-pack - Validate packed git archive files.
+git-verify-pack - Validate packed git archive files
SYNOPSIS
diff --git a/Documentation/git-verify-tag.txt b/Documentation/git-verify-tag.txt
index b8a73c4..0f9bdb5 100644
--- a/Documentation/git-verify-tag.txt
+++ b/Documentation/git-verify-tag.txt
@@ -3,7 +3,7 @@ git-verify-tag(1)
NAME
----
-git-verify-tag - Check the GPG signature of tag.
+git-verify-tag - Check the GPG signature of tag
SYNOPSIS
--------
diff --git a/Documentation/git-whatchanged.txt b/Documentation/git-whatchanged.txt
index 6c150b0..641cb7e 100644
--- a/Documentation/git-whatchanged.txt
+++ b/Documentation/git-whatchanged.txt
@@ -3,7 +3,7 @@ git-whatchanged(1)
NAME
----
-git-whatchanged - Show logs with difference each commit introduces.
+git-whatchanged - Show logs with difference each commit introduces
SYNOPSIS
@@ -47,9 +47,9 @@ OPTIONS
By default, differences for merge commits are not shown.
With this flag, show differences to that commit from all
of its parents.
-
- However, it is not very useful in general, although it
- *is* useful on a file-by-file basis.
++
+However, it is not very useful in general, although it
+*is* useful on a file-by-file basis.
Examples
--------
diff --git a/Documentation/git.txt b/Documentation/git.txt
index 2d0ca9d..06b2e53 100644
--- a/Documentation/git.txt
+++ b/Documentation/git.txt
@@ -12,77 +12,65 @@ SYNOPSIS
DESCRIPTION
-----------
-'git' is both a program and a directory content tracker system.
-The program 'git' is just a wrapper to reach the core git programs
-(or a potty if you like, as it's not exactly porcelain but still
-brings your stuff to the plumbing).
+Git is a fast, scalable, distributed revision control system with an
+unusually rich command set that provides both high-level operations
+and full access to internals.
+
+See this link:tutorial.html[tutorial] to get started, then see
+link:everyday.html[Everyday Git] for a useful minimum set of commands, and
+"man git-commandname" for documentation of each command. CVS users may
+also want to read link:cvs-migration.html[CVS migration].
OPTIONS
-------
--version::
- prints the git suite version that the 'git' program came from.
+ Prints the git suite version that the 'git' program came from.
--help::
- prints the synopsis and a list of available commands.
- If a git command is named this option will bring up the
- man-page for that command.
+ Prints the synopsis and a list of the most commonly used
+ commands. If a git command is named this option will bring up
+ the man-page for that command. If the option '--all' or '-a' is
+ given then all available commands are printed.
--exec-path::
- path to wherever your core git programs are installed.
+ Path to wherever your core git programs are installed.
This can also be controlled by setting the GIT_EXEC_PATH
environment variable. If no path is given 'git' will print
the current setting and then exit.
-NOT LEARNING CORE GIT COMMANDS
-------------------------------
-
-This manual is intended to give complete background information
-and internal workings of git, which may be too much for most
-people. The <<Discussion>> section below contains much useful
-definition and clarification - read that first.
-
-If you are interested in using git to manage (version control)
-projects, use link:tutorial.html[The Tutorial] to get you started,
-and then link:everyday.html[Everyday GIT] as a guide to the
-minimum set of commands you need to know for day-to-day work.
-Most likely, that will get you started, and you can go a long
-way without knowing the low level details too much.
-
-The link:core-tutorial.html[Core tutorial] document covers how things
-internally work.
-
-If you are migrating from CVS, link:cvs-migration.html[cvs
-migration] document may be helpful after you finish the
-tutorial.
+FURTHER DOCUMENTATION
+---------------------
-After you get the general feel from the tutorial and this
-overview page, you may want to take a look at the
-link:howto-index.html[howto] documents.
+See the references above to get started using git. The following is
+probably more detail than necessary for a first-time user.
+The <<Discussion,Discussion>> section below and the
+link:core-tutorial.html[Core tutorial] both provide introductions to the
+underlying git architecture.
-CORE GIT COMMANDS
------------------
+See also the link:howto-index.html[howto] documents for some useful
+examples.
-If you are writing your own Porcelain, you need to be familiar
-with most of the low level commands --- I suggest starting from
-gitlink:git-update-index[1] and gitlink:git-read-tree[1].
+GIT COMMANDS
+------------
+We divide git into high level ("porcelain") commands and low level
+("plumbing") commands.
-Commands Overview
------------------
-The git commands can helpfully be split into those that manipulate
-the repository, the index and the files in the working tree, those that
-interrogate and compare them, and those that moves objects and
-references between repositories.
+Low-level commands (plumbing)
+-----------------------------
-In addition, git itself comes with a spartan set of porcelain
-commands. They are usable but are not meant to compete with real
-Porcelains.
+Although git includes its
+own porcelain layer, its low-level commands are sufficient to support
+development of alternative porcelains. Developers of such porcelains
+might start by reading about gitlink:git-update-index[1] and
+gitlink:git-read-tree[1].
-There are also some ancillary programs that can be viewed as useful
-aids for using the core commands but which are unlikely to be used by
-SCMs layered over git.
+We divide the low-level commands into commands that manipulate objects (in
+the repository, index, and working tree), commands that interrogate and
+compare objects, and commands that move objects and references between
+repositories.
Manipulation commands
~~~~~~~~~~~~~~~~~~~~~
@@ -247,8 +235,14 @@ gitlink:git-upload-pack[1]::
what are asked for.
-Porcelain-ish Commands
-----------------------
+High-level commands (porcelain)
+-------------------------------
+
+We separate the porcelain commands into the main commands and some
+ancillary user utilities.
+
+Main porcelain commands
+~~~~~~~~~~~~~~~~~~~~~~~
gitlink:git-add[1]::
Add paths to the index.
@@ -328,6 +322,9 @@ gitlink:git-revert[1]::
gitlink:git-shortlog[1]::
Summarizes 'git log' output.
+gitlink:git-show[1]::
+ Show one commit log and its diff.
+
gitlink:git-show-branch[1]::
Show branches and their commits.
@@ -342,7 +339,7 @@ gitlink:git-whatchanged[1]::
Ancillary Commands
-------------------
+~~~~~~~~~~~~~~~~~~
Manipulators:
gitlink:git-applypatch[1]::
@@ -517,16 +514,14 @@ HEAD::
a valid head 'name'
(i.e. the contents of `$GIT_DIR/refs/heads/<head>`).
-<snap>::
- a valid snapshot 'name'
- (i.e. the contents of `$GIT_DIR/refs/snap/<snap>`).
-
File/Directory Structure
------------------------
Please see link:repository-layout.html[repository layout] document.
+Read link:hooks.html[hooks] for more details about each hook.
+
Higher level SCMs may provide and manage additional information in the
`$GIT_DIR`.
diff --git a/Documentation/hooks.txt b/Documentation/hooks.txt
index 4ad1920..3824a95 100644
--- a/Documentation/hooks.txt
+++ b/Documentation/hooks.txt
@@ -97,16 +97,31 @@ send out a commit notification e-mail.
update
------
-This hook is invoked by `git-receive-pack`, which is invoked
-when a `git push` is done against the repository. It takes
-three parameters, name of the ref being updated, old object name
-stored in the ref, and the new objectname to be stored in the
-ref. Exiting with non-zero status from this hook prevents
-`git-receive-pack` from updating the ref.
-
-This can be used to prevent 'forced' update on certain refs by
+This hook is invoked by `git-receive-pack` on the remote repository,
+which is happens when a `git push` is done on a local repository.
+Just before updating the ref on the remote repository, the update hook
+is invoked. It's exit status determins the success or failure of
+the ref update.
+
+The hook executes once for each ref to be updated, and takes
+three parameters:
+ - the name of the ref being updated,
+ - the old object name stored in the ref,
+ - and the new objectname to be stored in the ref.
+
+A zero exit from the update hook allows the ref to be updated.
+Exiting with a non-zero status prevents `git-receive-pack`
+from updating the ref.
+
+This hook can be used to prevent 'forced' update on certain refs by
making sure that the object name is a commit object that is a
descendant of the commit object named by the old object name.
+That is, to enforce a "fast forward only" policy.
+
+It could also be used to log the old..new status. However, it
+does not know the entire set of branches, so it would end up
+firing one e-mail per ref when used naively, though.
+
Another use suggested on the mailing list is to use this hook to
implement access control which is finer grained than the one
based on filesystem group.
@@ -115,20 +130,30 @@ The standard output of this hook is sent to /dev/null; if you
want to report something to the git-send-pack on the other end,
you can redirect your output to your stderr.
+
post-update
-----------
-This hook is invoked by `git-receive-pack`, which is invoked
-when a `git push` is done against the repository. It takes
-variable number of parameters; each of which is the name of ref
-that was actually updated.
+This hook is invoked by `git-receive-pack` on the remote repository,
+which is happens when a `git push` is done on a local repository.
+It executes on the remote repository once after all the refs have
+been updated.
+
+It takes a variable number of parameters, each of which is the
+name of ref that was actually updated.
This hook is meant primarily for notification, and cannot affect
the outcome of `git-receive-pack`.
+The post-update hook can tell what are the heads that were pushed,
+but it does not know what their original and updated values are,
+so it is a poor place to do log old..new.
+
The default post-update hook, when enabled, runs
`git-update-server-info` to keep the information used by dumb
-transport up-to-date.
+transports (eg, http) up-to-date. If you are publishing
+a git repository that is accessible via http, you should
+probably enable this hook.
The standard output of this hook is sent to /dev/null; if you
want to report something to the git-send-pack on the other end,
diff --git a/Documentation/repository-layout.txt b/Documentation/repository-layout.txt
index 1f19bf8..98fbe7d 100644
--- a/Documentation/repository-layout.txt
+++ b/Documentation/repository-layout.txt
@@ -89,6 +89,8 @@ hooks::
commands. A handful of sample hooks are installed when
`git init-db` is run, but all of them are disabled by
default. To enable, they need to be made executable.
+ Read link:hooks.html[hooks] for more details about
+ each hook.
index::
The current index file for the repository. It is
diff --git a/Documentation/tutorial.txt b/Documentation/tutorial.txt
index 66680d7..fa79b01 100644
--- a/Documentation/tutorial.txt
+++ b/Documentation/tutorial.txt
@@ -309,7 +309,7 @@ git diff HEAD^^ HEAD^
-------------------------------------
shows the difference between that previous state and the state two
-commits ago. Also, HEAD~5 can be used as a shorthand for HEAD^^^^^,
+commits ago. Also, HEAD~5 can be used as a shorthand for HEAD{caret}{caret}{caret}{caret}{caret},
and more generally HEAD~n can refer to the nth previous commit.
Commits representing merges have more than one parent, and you can
specify which parent to follow in that case; see
diff --git a/GIT-VERSION-GEN b/GIT-VERSION-GEN
index 1056b7c..d2200ac 100755
--- a/GIT-VERSION-GEN
+++ b/GIT-VERSION-GEN
@@ -1,14 +1,17 @@
#!/bin/sh
GVF=GIT-VERSION-FILE
-DEF_VER=v1.2.GIT
+DEF_VER=v1.3-rc2.GIT
# First try git-describe, then see if there is a version file
# (included in release tarballs), then default
if VN=$(git-describe --abbrev=4 HEAD 2>/dev/null); then
VN=$(echo "$VN" | sed -e 's/-/./g');
-else
+elif test -f version
+then
VN=$(cat version) || VN="$DEF_VER"
+else
+ VN="$DEF_VER"
fi
VN=$(expr "$VN" : v*'\(.*\)')
diff --git a/INSTALL b/INSTALL
index 433449f..63af8ec 100644
--- a/INSTALL
+++ b/INSTALL
@@ -40,9 +40,7 @@ Issues of note:
If you don't have openssl, you can use one of the SHA1 libraries
that come with git (git includes the one from Mozilla, and has
- its own PowerPC-optimized one too - see the Makefile), and you
- can avoid the bignum support by excising git-rev-list support
- for "--merge-order" (by hand).
+ its own PowerPC and ARM optimized ones too - see the Makefile).
- "libcurl" and "curl" executable. git-http-fetch and
git-fetch use them. If you do not use http
diff --git a/Makefile b/Makefile
index 648469e..145099a 100644
--- a/Makefile
+++ b/Makefile
@@ -6,8 +6,8 @@ all:
# on non-x86 architectures (e.g. PowerPC), while the OpenSSL version (default
# choice) has very fast version optimized for i586.
#
-# Define NO_OPENSSL environment variable if you do not have OpenSSL. You will
-# miss out git-rev-list --merge-order. This also implies MOZILLA_SHA1.
+# Define NO_OPENSSL environment variable if you do not have OpenSSL.
+# This also implies MOZILLA_SHA1.
#
# Define NO_CURL if you do not have curl installed. git-http-pull and
# git-http-push are not built, and you cannot use http:// and https://
@@ -53,6 +53,13 @@ all:
# Define NO_SOCKADDR_STORAGE if your platform does not have struct
# sockaddr_storage.
#
+# Define NO_ICONV if your libc does not properly support iconv.
+#
+# Define NO_ACCURATE_DIFF if your diff program at least sometimes misses
+# a missing newline at the end of the file.
+#
+# Define NO_PYTHON if you want to loose all benefits of the recursive merge.
+#
# Define COLLISION_CHECK below if you believe that SHA1's
# 1461501637330902918203684832716283019655932542976 hashes do not give you
# sufficient guarantee that no collisions between objects will ever happen.
@@ -70,6 +77,12 @@ GIT-VERSION-FILE: .FORCE-GIT-VERSION-FILE
@$(SHELL_PATH) ./GIT-VERSION-GEN
-include GIT-VERSION-FILE
+uname_S := $(shell sh -c 'uname -s 2>/dev/null || echo not')
+uname_M := $(shell sh -c 'uname -m 2>/dev/null || echo not')
+uname_O := $(shell sh -c 'uname -o 2>/dev/null || echo not')
+uname_R := $(shell sh -c 'uname -r 2>/dev/null || echo not')
+uname_P := $(shell sh -c 'uname -p 2>/dev/null || echo not')
+
# CFLAGS and LDFLAGS are for the users to override from the command line.
CFLAGS = -g -O2 -Wall
@@ -80,7 +93,7 @@ STRIP ?= strip
prefix = $(HOME)
bindir = $(prefix)/bin
-gitexecdir = $(prefix)/bin
+gitexecdir = $(bindir)
template_dir = $(prefix)/share/git-core/templates/
GIT_PYTHON_DIR = $(prefix)/share/git-core/python
# DESTDIR=
@@ -107,7 +120,7 @@ SCRIPT_SH = \
git-merge-one-file.sh git-parse-remote.sh \
git-prune.sh git-pull.sh git-push.sh git-rebase.sh \
git-repack.sh git-request-pull.sh git-reset.sh \
- git-resolve.sh git-revert.sh git-sh-setup.sh \
+ git-resolve.sh git-revert.sh git-rm.sh git-sh-setup.sh \
git-tag.sh git-verify-tag.sh git-whatchanged.sh \
git-applymbox.sh git-applypatch.sh git-am.sh \
git-merge.sh git-merge-stupid.sh git-merge-octopus.sh \
@@ -117,6 +130,7 @@ SCRIPT_SH = \
SCRIPT_PERL = \
git-archimport.perl git-cvsimport.perl git-relink.perl \
git-shortlog.perl git-fmt-merge-msg.perl git-rerere.perl \
+ git-annotate.perl git-cvsserver.perl \
git-svnimport.perl git-mv.perl git-cvsexportcommit.perl
SCRIPT_PYTHON = \
@@ -127,9 +141,9 @@ SCRIPTS = $(patsubst %.sh,%,$(SCRIPT_SH)) \
$(patsubst %.py,%,$(SCRIPT_PYTHON)) \
git-cherry-pick git-show git-status
-# The ones that do not have to link with lcrypto nor lz.
+# The ones that do not have to link with lcrypto, lz nor xdiff.
SIMPLE_PROGRAMS = \
- git-get-tar-commit-id$X git-mailinfo$X git-mailsplit$X \
+ git-get-tar-commit-id$X git-mailsplit$X \
git-stripspace$X git-daemon$X
# ... and all the rest that could be moved out of bindir to gitexecdir
@@ -139,9 +153,9 @@ PROGRAMS = \
git-convert-objects$X git-diff-files$X \
git-diff-index$X git-diff-stages$X \
git-diff-tree$X git-fetch-pack$X git-fsck-objects$X \
- git-hash-object$X git-index-pack$X git-init-db$X \
- git-local-fetch$X git-ls-files$X git-ls-tree$X git-merge-base$X \
- git-merge-index$X git-mktag$X git-pack-objects$X git-patch-id$X \
+ git-hash-object$X git-index-pack$X git-init-db$X git-local-fetch$X \
+ git-ls-files$X git-ls-tree$X git-mailinfo$X git-merge-base$X \
+ git-merge-index$X git-mktag$X git-mktree$X git-pack-objects$X git-patch-id$X \
git-peek-remote$X git-prune-packed$X git-read-tree$X \
git-receive-pack$X git-rev-list$X git-rev-parse$X \
git-send-pack$X git-show-branch$X git-shell$X \
@@ -151,7 +165,7 @@ PROGRAMS = \
git-upload-pack$X git-verify-pack$X git-write-tree$X \
git-update-ref$X git-symbolic-ref$X git-check-ref-format$X \
git-name-rev$X git-pack-redundant$X git-repo-config$X git-var$X \
- git-describe$X
+ git-describe$X git-merge-tree$X git-blame$X git-imap-send$X
# what 'all' will build and 'install' will install, in gitexecdir
ALL_PROGRAMS = $(PROGRAMS) $(SIMPLE_PROGRAMS) $(SCRIPTS)
@@ -174,34 +188,31 @@ PYMODULES = \
gitMergeCommon.py
LIB_FILE=libgit.a
+XDIFF_LIB=xdiff/lib.a
LIB_H = \
- blob.h cache.h commit.h count-delta.h csum-file.h delta.h \
- diff.h epoch.h object.h pack.h pkt-line.h quote.h refs.h \
- run-command.h strbuf.h tag.h tree.h git-compat-util.h
+ blob.h cache.h commit.h csum-file.h delta.h \
+ diff.h object.h pack.h pkt-line.h quote.h refs.h \
+ run-command.h strbuf.h tag.h tree.h git-compat-util.h revision.h \
+ tree-walk.h
DIFF_OBJS = \
diff.o diffcore-break.o diffcore-order.o diffcore-pathspec.o \
- diffcore-pickaxe.o diffcore-rename.o tree-diff.o combine-diff.o
+ diffcore-pickaxe.o diffcore-rename.o tree-diff.o combine-diff.o \
+ diffcore-delta.o
LIB_OBJS = \
- blob.o commit.o connect.o count-delta.o csum-file.o \
+ blob.o commit.o connect.o csum-file.o \
date.o diff-delta.o entry.o exec_cmd.o ident.o index.o \
object.o pack-check.o patch-delta.o path.o pkt-line.o \
quote.o read-cache.o refs.o run-command.o \
server-info.o setup.o sha1_file.o sha1_name.o strbuf.o \
tag.o tree.o usage.o config.o environment.o ctype.o copy.o \
- fetch-clone.o \
+ fetch-clone.o revision.o pager.o tree-walk.o \
$(DIFF_OBJS)
-LIBS = $(LIB_FILE)
-LIBS += -lz
-
-# Shell quote;
-# Result of this needs to be placed inside ''
-shq = $(subst ','\'',$(1))
-# This has surrounding ''
-shellquote = '$(call shq,$(1))'
+GITLIBS = $(LIB_FILE) $(XDIFF_LIB)
+LIBS = $(GITLIBS) -lz
#
# Platform specific tweaks
@@ -210,28 +221,28 @@ shellquote = '$(call shq,$(1))'
# We choose to avoid "if .. else if .. else .. endif endif"
# because maintaining the nesting to match is a pain. If
# we had "elif" things would have been much nicer...
-uname_S := $(shell sh -c 'uname -s 2>/dev/null || echo not')
-uname_M := $(shell sh -c 'uname -m 2>/dev/null || echo not')
-uname_O := $(shell sh -c 'uname -o 2>/dev/null || echo not')
-uname_R := $(shell sh -c 'uname -r 2>/dev/null || echo not')
ifeq ($(uname_S),Darwin)
NEEDS_SSL_WITH_CRYPTO = YesPlease
NEEDS_LIBICONV = YesPlease
## fink
- ALL_CFLAGS += -I/sw/include
- ALL_LDFLAGS += -L/sw/lib
+ ifeq ($(shell test -d /sw/lib && echo y),y)
+ ALL_CFLAGS += -I/sw/include
+ ALL_LDFLAGS += -L/sw/lib
+ endif
## darwinports
- ALL_CFLAGS += -I/opt/local/include
- ALL_LDFLAGS += -L/opt/local/lib
+ ifeq ($(shell test -d /opt/local/lib && echo y),y)
+ ALL_CFLAGS += -I/opt/local/include
+ ALL_LDFLAGS += -L/opt/local/lib
+ endif
endif
ifeq ($(uname_S),SunOS)
NEEDS_SOCKET = YesPlease
NEEDS_NSL = YesPlease
- NEEDS_LIBICONV = YesPlease
SHELL_PATH = /bin/bash
NO_STRCASESTR = YesPlease
ifeq ($(uname_R),5.8)
+ NEEDS_LIBICONV = YesPlease
NO_UNSETENV = YesPlease
NO_SETENV = YesPlease
endif
@@ -271,6 +282,16 @@ ifeq ($(uname_S),AIX)
NO_STRCASESTR=YesPlease
NEEDS_LIBICONV=YesPlease
endif
+ifeq ($(uname_S),IRIX64)
+ NO_IPV6=YesPlease
+ NO_SETENV=YesPlease
+ NO_STRCASESTR=YesPlease
+ NO_SOCKADDR_STORAGE=YesPlease
+ SHELL_PATH=/usr/gnu/bin/bash
+ ALL_CFLAGS += -DPATH_MAX=1024
+ # for now, build 32-bit version
+ ALL_LDFLAGS += -L/usr/lib32
+endif
ifneq (,$(findstring arm,$(uname_M)))
ARM_SHA1 = YesPlease
endif
@@ -280,8 +301,10 @@ endif
ifdef WITH_OWN_SUBPROCESS_PY
PYMODULES += compat/subprocess.py
else
- ifneq ($(shell $(PYTHON_PATH) -c 'import subprocess;print"OK"' 2>/dev/null),OK)
- PYMODULES += compat/subprocess.py
+ ifeq ($(NO_PYTHON),)
+ ifneq ($(shell $(PYTHON_PATH) -c 'import subprocess;print"OK"' 2>/dev/null),OK)
+ PYMODULES += compat/subprocess.py
+ endif
endif
endif
@@ -308,7 +331,6 @@ ifndef NO_CURL
endif
ifndef NO_OPENSSL
- LIB_OBJS += epoch.o
OPENSSL_LIBSSL = -lssl
ifdef OPENSSLDIR
# Again this may be problematic -- gcc does not always want -R.
@@ -380,6 +402,10 @@ else
endif
endif
+ifdef NO_ICONV
+ ALL_CFLAGS += -DNO_ICONV
+endif
+
ifdef PPC_SHA1
SHA1_HEADER = "ppc/sha1.h"
LIB_OBJS += ppc/sha1.o ppc/sha1ppc.o
@@ -397,8 +423,25 @@ else
endif
endif
endif
+ifdef NO_ACCURATE_DIFF
+ ALL_CFLAGS += -DNO_ACCURATE_DIFF
+endif
+
+# Shell quote (do not use $(call) to accomodate ancient setups);
+
+SHA1_HEADER_SQ = $(subst ','\'',$(SHA1_HEADER))
-ALL_CFLAGS += -DSHA1_HEADER=$(call shellquote,$(SHA1_HEADER)) $(COMPAT_CFLAGS)
+DESTDIR_SQ = $(subst ','\'',$(DESTDIR))
+bindir_SQ = $(subst ','\'',$(bindir))
+gitexecdir_SQ = $(subst ','\'',$(gitexecdir))
+template_dir_SQ = $(subst ','\'',$(template_dir))
+
+SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH))
+PERL_PATH_SQ = $(subst ','\'',$(PERL_PATH))
+PYTHON_PATH_SQ = $(subst ','\'',$(PYTHON_PATH))
+GIT_PYTHON_DIR_SQ = $(subst ','\'',$(GIT_PYTHON_DIR))
+
+ALL_CFLAGS += -DSHA1_HEADER='$(SHA1_HEADER_SQ)' $(COMPAT_CFLAGS)
LIB_OBJS += $(COMPAT_OBJS)
export prefix TAR INSTALL DESTDIR SHELL_PATH template_dir
### Build rules
@@ -411,29 +454,34 @@ all:
strip: $(PROGRAMS) git$X
$(STRIP) $(STRIP_OPTS) $(PROGRAMS) git$X
-git$X: git.c $(LIB_FILE)
+git$X: git.c common-cmds.h $(GITLIBS)
$(CC) -DGIT_VERSION='"$(GIT_VERSION)"' \
- $(CFLAGS) $(COMPAT_CFLAGS) -o $@ $(filter %.c,$^) $(LIB_FILE)
+ $(ALL_CFLAGS) -o $@ $(filter %.c,$^) \
+ $(ALL_LDFLAGS) $(LIBS)
+
+common-cmds.h: Documentation/git-*.txt
+ ./generate-cmdlist.sh > $@
$(patsubst %.sh,%,$(SCRIPT_SH)) : % : %.sh
rm -f $@
- sed -e '1s|#!.*/sh|#!$(call shq,$(SHELL_PATH))|' \
+ sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' \
-e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
-e 's/@@NO_CURL@@/$(NO_CURL)/g' \
+ -e 's/@@NO_PYTHON@@/$(NO_PYTHON)/g' \
$@.sh >$@
chmod +x $@
$(patsubst %.perl,%,$(SCRIPT_PERL)) : % : %.perl
rm -f $@
- sed -e '1s|#!.*perl|#!$(call shq,$(PERL_PATH))|' \
+ sed -e '1s|#!.*perl|#!$(PERL_PATH_SQ)|' \
-e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
$@.perl >$@
chmod +x $@
$(patsubst %.py,%,$(SCRIPT_PYTHON)) : % : %.py
rm -f $@
- sed -e '1s|#!.*python|#!$(call shq,$(PYTHON_PATH))|' \
- -e 's|@@GIT_PYTHON_PATH@@|$(call shq,$(GIT_PYTHON_DIR))|g' \
+ sed -e '1s|#!.*python|#!$(PYTHON_PATH_SQ)|' \
+ -e 's|@@GIT_PYTHON_PATH@@|$(GIT_PYTHON_DIR_SQ)|g' \
-e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
$@.py >$@
chmod +x $@
@@ -459,46 +507,80 @@ git$X git.spec \
%.o: %.S
$(CC) -o $*.o -c $(ALL_CFLAGS) $<
-exec_cmd.o: ALL_CFLAGS += -DGIT_EXEC_PATH=\"$(gitexecdir)\"
+exec_cmd.o: exec_cmd.c
+ $(CC) -o $*.o -c $(ALL_CFLAGS) '-DGIT_EXEC_PATH="$(gitexecdir_SQ)"' $<
-git-%$X: %.o $(LIB_FILE)
+http.o: http.c
+ $(CC) -o $*.o -c $(ALL_CFLAGS) -DGIT_USER_AGENT='"git/$(GIT_VERSION)"' $<
+
+git-%$X: %.o $(GITLIBS)
$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) $(LIBS)
-git-mailinfo$X : SIMPLE_LIB += $(LIB_4_ICONV)
$(SIMPLE_PROGRAMS) : $(LIB_FILE)
$(SIMPLE_PROGRAMS) : git-%$X : %.o
$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) \
$(LIB_FILE) $(SIMPLE_LIB)
-git-http-fetch$X: fetch.o http.o
-git-http-push$X: http.o
+git-mailinfo$X: mailinfo.o $(LIB_FILE)
+ $(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) \
+ $(LIB_FILE) $(SIMPLE_LIB) $(LIB_4_ICONV)
+
git-local-fetch$X: fetch.o
git-ssh-fetch$X: rsh.o fetch.o
git-ssh-upload$X: rsh.o
git-ssh-pull$X: rsh.o fetch.o
git-ssh-push$X: rsh.o
-git-http-fetch$X: LIBS += $(CURL_LIBCURL)
-git-http-push$X: LIBS += $(CURL_LIBCURL) $(EXPAT_LIBEXPAT)
-git-rev-list$X: LIBS += $(OPENSSL_LIBSSL)
+git-imap-send$X: imap-send.o $(LIB_FILE)
+
+git-http-fetch$X: fetch.o http.o http-fetch.o $(LIB_FILE)
+ $(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) \
+ $(LIBS) $(CURL_LIBCURL)
+
+git-http-push$X: revision.o http.o http-push.o $(LIB_FILE)
+ $(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) \
+ $(LIBS) $(CURL_LIBCURL) $(EXPAT_LIBEXPAT)
+
+git-rev-list$X: rev-list.o $(LIB_FILE)
+ $(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) \
+ $(LIBS) $(OPENSSL_LIBSSL)
init-db.o: init-db.c
$(CC) -c $(ALL_CFLAGS) \
- -DDEFAULT_GIT_TEMPLATE_DIR=$(call shellquote,"$(template_dir)") $*.c
+ -DDEFAULT_GIT_TEMPLATE_DIR='"$(template_dir_SQ)"' $*.c
$(LIB_OBJS): $(LIB_H)
-$(patsubst git-%$X,%.o,$(PROGRAMS)): $(LIB_H)
+$(patsubst git-%$X,%.o,$(PROGRAMS)): $(GITLIBS)
$(DIFF_OBJS): diffcore.h
$(LIB_FILE): $(LIB_OBJS)
$(AR) rcs $@ $(LIB_OBJS)
+XDIFF_OBJS=xdiff/xdiffi.o xdiff/xprepare.o xdiff/xutils.o xdiff/xemit.o
+
+$(XDIFF_LIB): $(XDIFF_OBJS)
+ $(AR) rcs $@ $(XDIFF_OBJS)
+
+
doc:
$(MAKE) -C Documentation all
+TAGS:
+ rm -f TAGS
+ find . -name '*.[hcS]' -print | xargs etags -a
+
+tags:
+ rm -f tags
+ find . -name '*.[hcS]' -print | xargs ctags -a
### Testing rules
+# GNU make supports exporting all variables by "export" without parameters.
+# However, the environment gets quite big, and some programs have problems
+# with that.
+
+export NO_PYTHON
+
test: all
$(MAKE) -C t/ all
@@ -506,7 +588,7 @@ test-date$X: test-date.c date.o ctype.o
$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) test-date.c date.o ctype.o
test-delta$X: test-delta.c diff-delta.o patch-delta.o
- $(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $^
+ $(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $^ -lz
check:
for i in *.c; do sparse $(ALL_CFLAGS) $(SPARSE_FLAGS) $$i || exit; done
@@ -516,13 +598,13 @@ check:
### Installation rules
install: all
- $(INSTALL) -d -m755 $(call shellquote,$(DESTDIR)$(bindir))
- $(INSTALL) -d -m755 $(call shellquote,$(DESTDIR)$(gitexecdir))
- $(INSTALL) $(ALL_PROGRAMS) $(call shellquote,$(DESTDIR)$(gitexecdir))
- $(INSTALL) git$X gitk $(call shellquote,$(DESTDIR)$(bindir))
+ $(INSTALL) -d -m755 '$(DESTDIR_SQ)$(bindir_SQ)'
+ $(INSTALL) -d -m755 '$(DESTDIR_SQ)$(gitexecdir_SQ)'
+ $(INSTALL) $(ALL_PROGRAMS) '$(DESTDIR_SQ)$(gitexecdir_SQ)'
+ $(INSTALL) git$X gitk '$(DESTDIR_SQ)$(bindir_SQ)'
$(MAKE) -C templates install
- $(INSTALL) -d -m755 $(call shellquote,$(DESTDIR)$(GIT_PYTHON_DIR))
- $(INSTALL) $(PYMODULES) $(call shellquote,$(DESTDIR)$(GIT_PYTHON_DIR))
+ $(INSTALL) -d -m755 '$(DESTDIR_SQ)$(GIT_PYTHON_DIR_SQ)'
+ $(INSTALL) $(PYMODULES) '$(DESTDIR_SQ)$(GIT_PYTHON_DIR_SQ)'
install-doc:
$(MAKE) -C Documentation install
@@ -552,9 +634,10 @@ rpm: dist
### Cleaning rules
clean:
- rm -f *.o mozilla-sha1/*.o arm/*.o ppc/*.o compat/*.o $(LIB_FILE)
+ rm -f *.o mozilla-sha1/*.o arm/*.o ppc/*.o compat/*.o xdiff/*.o \
+ $(LIB_FILE) $(XDIFF_LIB)
rm -f $(ALL_PROGRAMS) git$X
- rm -f *.spec *.pyc *.pyo */*.pyc */*.pyo
+ rm -f *.spec *.pyc *.pyo */*.pyc */*.pyo common-cmds.h TAGS tags
rm -rf $(GIT_TARNAME)
rm -f $(GIT_TARNAME).tar.gz git-core_$(GIT_VERSION)-*.tar.gz
$(MAKE) -C Documentation/ clean
@@ -563,5 +646,5 @@ clean:
rm -f GIT-VERSION-FILE
.PHONY: all install clean strip
-.PHONY: .FORCE-GIT-VERSION-FILE
+.PHONY: .FORCE-GIT-VERSION-FILE TAGS tags
diff --git a/apply.c b/apply.c
index 453482a..33b4271 100644
--- a/apply.c
+++ b/apply.c
@@ -9,6 +9,7 @@
#include <fnmatch.h>
#include "cache.h"
#include "quote.h"
+#include "blob.h"
// --check turns on checking that the working tree matches the
// files that are being modified, but doesn't apply the patch
@@ -32,7 +33,7 @@ static int no_add = 0;
static int show_index_info = 0;
static int line_termination = '\n';
static const char apply_usage[] =
-"git-apply [--stat] [--numstat] [--summary] [--check] [--index] [--apply] [--no-add] [--index-info] [--allow-binary-replacement] [-z] [-pNUM] <patch>...";
+"git-apply [--stat] [--numstat] [--summary] [--check] [--index] [--apply] [--no-add] [--index-info] [--allow-binary-replacement] [-z] [-pNUM] [--whitespace=<nowarn|warn|error|error-all|strip>] <patch>...";
static enum whitespace_eol {
nowarn_whitespace,
@@ -651,7 +652,7 @@ static int parse_git_header(char *line, int len, unsigned int size, struct patch
len = linelen(line, size);
if (!len || line[len-1] != '\n')
break;
- for (i = 0; i < sizeof(optable) / sizeof(optable[0]); i++) {
+ for (i = 0; i < ARRAY_SIZE(optable); i++) {
const struct opentry *p = optable + i;
int oplen = strlen(p->str);
if (len < oplen || memcmp(p->str, line, oplen))
@@ -693,7 +694,7 @@ static int parse_range(const char *line, int len, int offset, const char *expect
line += digits;
len -= digits;
- *p2 = *p1;
+ *p2 = 1;
if (*line == ',') {
digits = parse_num(line+1, p2);
if (!digits)
@@ -834,7 +835,7 @@ static int parse_fragment(char *line, unsigned long size, struct patch *patch, s
patch->new_name = NULL;
}
- if (patch->is_new != !oldlines)
+ if (patch->is_new && oldlines)
return error("new file depends on old contents");
if (patch->is_delete != !newlines) {
if (newlines)
@@ -901,6 +902,8 @@ static int parse_fragment(char *line, unsigned long size, struct patch *patch, s
break;
}
}
+ if (oldlines || newlines)
+ return -1;
/* If a fragment ends with an incomplete line, we failed to include
* it in the above loop because we hit oldlines == newlines == 0
* before seeing it.
@@ -922,8 +925,7 @@ static int parse_single_patch(char *line, unsigned long size, struct patch *patc
struct fragment *fragment;
int len;
- fragment = xmalloc(sizeof(*fragment));
- memset(fragment, 0, sizeof(*fragment));
+ fragment = xcalloc(1, sizeof(*fragment));
len = parse_fragment(line, size, patch, fragment);
if (len <= 0)
die("corrupt patch at line %d", linenr);
@@ -1232,6 +1234,14 @@ static int apply_one_fragment(struct buffer_desc *desc, struct fragment *frag)
size -= len;
}
+#ifdef NO_ACCURATE_DIFF
+ if (oldsize > 0 && old[oldsize - 1] == '\n' &&
+ newsize > 0 && new[newsize - 1] == '\n') {
+ oldsize--;
+ newsize--;
+ }
+#endif
+
offset = find_offset(buf, desc->size, old, oldsize, frag->newpos);
if (offset >= 0) {
int diff = newsize - oldsize;
@@ -1286,7 +1296,7 @@ static int apply_fragments(struct buffer_desc *desc, struct patch *patch)
* applies to.
*/
write_sha1_file_prepare(desc->buffer, desc->size,
- "blob", sha1, hdr, &hdrlen);
+ blob_type, sha1, hdr, &hdrlen);
if (strcmp(sha1_to_hex(sha1), patch->old_sha1_prefix))
return error("the patch applies to '%s' (%s), "
"which does not match the "
@@ -1394,12 +1404,13 @@ static int check_patch(struct patch *patch)
costate.not_new = 0;
costate.refresh_cache = 1;
if (checkout_entry(active_cache[pos],
- &costate) ||
+ &costate,
+ NULL) ||
lstat(old_name, &st))
return -1;
}
- changed = ce_match_stat(active_cache[pos], &st);
+ changed = ce_match_stat(active_cache[pos], &st, 1);
if (changed)
return error("%s: does not match index",
old_name);
@@ -1640,15 +1651,14 @@ static void add_index_file(const char *path, unsigned mode, void *buf, unsigned
if (!write_index)
return;
- ce = xmalloc(ce_size);
- memset(ce, 0, ce_size);
+ ce = xcalloc(1, ce_size);
memcpy(ce->name, path, namelen);
ce->ce_mode = create_ce_mode(mode);
ce->ce_flags = htons(namelen);
if (lstat(path, &st) < 0)
die("unable to stat newly created file %s", path);
fill_stat_cache_info(ce, &st);
- if (write_sha1_file(buf, size, "blob", ce->sha1) < 0)
+ if (write_sha1_file(buf, size, blob_type, ce->sha1) < 0)
die("unable to create backing store for newly created file %s", path);
if (add_cache_entry(ce, ADD_CACHE_OK_TO_ADD) < 0)
die("unable to add cache entry for %s", path);
@@ -1797,8 +1807,7 @@ static int apply_patch(int fd, const char *filename)
struct patch *patch;
int nr;
- patch = xmalloc(sizeof(*patch));
- memset(patch, 0, sizeof(*patch));
+ patch = xcalloc(1, sizeof(*patch));
nr = parse_chunk(buffer + offset, size, patch);
if (nr < 0)
break;
diff --git a/blame.c b/blame.c
new file mode 100644
index 0000000..98f9992
--- /dev/null
+++ b/blame.c
@@ -0,0 +1,892 @@
+/*
+ * Copyright (C) 2006, Fredrik Kuivinen <freku045@student.liu.se>
+ */
+
+#include <assert.h>
+#include <time.h>
+#include <sys/time.h>
+#include <math.h>
+
+#include "cache.h"
+#include "refs.h"
+#include "tag.h"
+#include "commit.h"
+#include "tree.h"
+#include "blob.h"
+#include "diff.h"
+#include "diffcore.h"
+#include "revision.h"
+
+#define DEBUG 0
+
+static const char blame_usage[] = "[-c] [-l] [--] file [commit]\n"
+ " -c, --compability Use the same output mode as git-annotate (Default: off)\n"
+ " -l, --long Show long commit SHA1 (Default: off)\n"
+ " -h, --help This message";
+
+static struct commit **blame_lines;
+static int num_blame_lines;
+static char* blame_contents;
+static int blame_len;
+
+struct util_info {
+ int *line_map;
+ unsigned char sha1[20]; /* blob sha, not commit! */
+ char *buf;
+ unsigned long size;
+ int num_lines;
+ const char* pathname;
+
+ void* topo_data;
+};
+
+struct chunk {
+ int off1, len1; // ---
+ int off2, len2; // +++
+};
+
+struct patch {
+ struct chunk *chunks;
+ int num;
+};
+
+static void get_blob(struct commit *commit);
+
+/* Only used for statistics */
+static int num_get_patch = 0;
+static int num_commits = 0;
+static int patch_time = 0;
+
+#define TEMPFILE_PATH_LEN 60
+static struct patch *get_patch(struct commit *commit, struct commit *other)
+{
+ struct patch *ret;
+ struct util_info *info_c = (struct util_info *)commit->object.util;
+ struct util_info *info_o = (struct util_info *)other->object.util;
+ char tmp_path1[TEMPFILE_PATH_LEN], tmp_path2[TEMPFILE_PATH_LEN];
+ char diff_cmd[TEMPFILE_PATH_LEN*2 + 20];
+ struct timeval tv_start, tv_end;
+ int fd;
+ FILE *fin;
+ char buf[1024];
+
+ ret = xmalloc(sizeof(struct patch));
+ ret->chunks = NULL;
+ ret->num = 0;
+
+ get_blob(commit);
+ get_blob(other);
+
+ gettimeofday(&tv_start, NULL);
+
+ fd = git_mkstemp(tmp_path1, TEMPFILE_PATH_LEN, "git-blame-XXXXXX");
+ if (fd < 0)
+ die("unable to create temp-file: %s", strerror(errno));
+
+ if (xwrite(fd, info_c->buf, info_c->size) != info_c->size)
+ die("write failed: %s", strerror(errno));
+ close(fd);
+
+ fd = git_mkstemp(tmp_path2, TEMPFILE_PATH_LEN, "git-blame-XXXXXX");
+ if (fd < 0)
+ die("unable to create temp-file: %s", strerror(errno));
+
+ if (xwrite(fd, info_o->buf, info_o->size) != info_o->size)
+ die("write failed: %s", strerror(errno));
+ close(fd);
+
+ sprintf(diff_cmd, "diff -U 0 %s %s", tmp_path1, tmp_path2);
+ fin = popen(diff_cmd, "r");
+ if (!fin)
+ die("popen failed: %s", strerror(errno));
+
+ while (fgets(buf, sizeof(buf), fin)) {
+ struct chunk *chunk;
+ char *start, *sp;
+
+ if (buf[0] != '@' || buf[1] != '@')
+ continue;
+
+ if (DEBUG)
+ printf("chunk line: %s", buf);
+ ret->num++;
+ ret->chunks = xrealloc(ret->chunks,
+ sizeof(struct chunk) * ret->num);
+ chunk = &ret->chunks[ret->num - 1];
+
+ assert(!strncmp(buf, "@@ -", 4));
+
+ start = buf + 4;
+ sp = index(start, ' ');
+ *sp = '\0';
+ if (index(start, ',')) {
+ int ret =
+ sscanf(start, "%d,%d", &chunk->off1, &chunk->len1);
+ assert(ret == 2);
+ } else {
+ int ret = sscanf(start, "%d", &chunk->off1);
+ assert(ret == 1);
+ chunk->len1 = 1;
+ }
+ *sp = ' ';
+
+ start = sp + 1;
+ sp = index(start, ' ');
+ *sp = '\0';
+ if (index(start, ',')) {
+ int ret =
+ sscanf(start, "%d,%d", &chunk->off2, &chunk->len2);
+ assert(ret == 2);
+ } else {
+ int ret = sscanf(start, "%d", &chunk->off2);
+ assert(ret == 1);
+ chunk->len2 = 1;
+ }
+ *sp = ' ';
+
+ if (chunk->len1 == 0)
+ chunk->off1++;
+ if (chunk->len2 == 0)
+ chunk->off2++;
+
+ if (chunk->off1 > 0)
+ chunk->off1--;
+ if (chunk->off2 > 0)
+ chunk->off2--;
+
+ assert(chunk->off1 >= 0);
+ assert(chunk->off2 >= 0);
+ }
+ pclose(fin);
+ unlink(tmp_path1);
+ unlink(tmp_path2);
+
+ gettimeofday(&tv_end, NULL);
+ patch_time += 1000000 * (tv_end.tv_sec - tv_start.tv_sec) +
+ tv_end.tv_usec - tv_start.tv_usec;
+
+ num_get_patch++;
+ return ret;
+}
+
+static void free_patch(struct patch *p)
+{
+ free(p->chunks);
+ free(p);
+}
+
+static int get_blob_sha1_internal(unsigned char *sha1, const char *base,
+ int baselen, const char *pathname,
+ unsigned mode, int stage);
+
+static unsigned char blob_sha1[20];
+static const char* blame_file;
+static int get_blob_sha1(struct tree *t, const char *pathname,
+ unsigned char *sha1)
+{
+ int i;
+ const char *pathspec[2];
+ blame_file = pathname;
+ pathspec[0] = pathname;
+ pathspec[1] = NULL;
+ memset(blob_sha1, 0, sizeof(blob_sha1));
+ read_tree_recursive(t, "", 0, 0, pathspec, get_blob_sha1_internal);
+
+ for (i = 0; i < 20; i++) {
+ if (blob_sha1[i] != 0)
+ break;
+ }
+
+ if (i == 20)
+ return -1;
+
+ memcpy(sha1, blob_sha1, 20);
+ return 0;
+}
+
+static int get_blob_sha1_internal(unsigned char *sha1, const char *base,
+ int baselen, const char *pathname,
+ unsigned mode, int stage)
+{
+ if (S_ISDIR(mode))
+ return READ_TREE_RECURSIVE;
+
+ if (strncmp(blame_file, base, baselen) ||
+ strcmp(blame_file + baselen, pathname))
+ return -1;
+
+ memcpy(blob_sha1, sha1, 20);
+ return -1;
+}
+
+static void get_blob(struct commit *commit)
+{
+ struct util_info *info = commit->object.util;
+ char type[20];
+
+ if (info->buf)
+ return;
+
+ info->buf = read_sha1_file(info->sha1, type, &info->size);
+
+ assert(!strcmp(type, blob_type));
+}
+
+/* For debugging only */
+static void print_patch(struct patch *p)
+{
+ int i;
+ printf("Num chunks: %d\n", p->num);
+ for (i = 0; i < p->num; i++) {
+ printf("%d,%d %d,%d\n", p->chunks[i].off1, p->chunks[i].len1,
+ p->chunks[i].off2, p->chunks[i].len2);
+ }
+}
+
+#if DEBUG
+/* For debugging only */
+static void print_map(struct commit *cmit, struct commit *other)
+{
+ struct util_info *util = cmit->object.util;
+ struct util_info *util2 = other->object.util;
+
+ int i;
+ int max =
+ util->num_lines >
+ util2->num_lines ? util->num_lines : util2->num_lines;
+ int num;
+
+ for (i = 0; i < max; i++) {
+ printf("i: %d ", i);
+ num = -1;
+
+ if (i < util->num_lines) {
+ num = util->line_map[i];
+ printf("%d\t", num);
+ } else
+ printf("\t");
+
+ if (i < util2->num_lines) {
+ int num2 = util2->line_map[i];
+ printf("%d\t", num2);
+ if (num != -1 && num2 != num)
+ printf("---");
+ } else
+ printf("\t");
+
+ printf("\n");
+ }
+}
+#endif
+
+// p is a patch from commit to other.
+static void fill_line_map(struct commit *commit, struct commit *other,
+ struct patch *p)
+{
+ struct util_info *util = commit->object.util;
+ struct util_info *util2 = other->object.util;
+ int *map = util->line_map;
+ int *map2 = util2->line_map;
+ int cur_chunk = 0;
+ int i1, i2;
+
+ if (p->num && DEBUG)
+ print_patch(p);
+
+ if (DEBUG)
+ printf("num lines 1: %d num lines 2: %d\n", util->num_lines,
+ util2->num_lines);
+
+ for (i1 = 0, i2 = 0; i1 < util->num_lines; i1++, i2++) {
+ struct chunk *chunk = NULL;
+ if (cur_chunk < p->num)
+ chunk = &p->chunks[cur_chunk];
+
+ if (chunk && chunk->off1 == i1) {
+ if (DEBUG && i2 != chunk->off2)
+ printf("i2: %d off2: %d\n", i2, chunk->off2);
+
+ assert(i2 == chunk->off2);
+
+ i1--;
+ i2--;
+ if (chunk->len1 > 0)
+ i1 += chunk->len1;
+
+ if (chunk->len2 > 0)
+ i2 += chunk->len2;
+
+ cur_chunk++;
+ } else {
+ if (i2 >= util2->num_lines)
+ break;
+
+ if (map[i1] != map2[i2] && map[i1] != -1) {
+ if (DEBUG)
+ printf("map: i1: %d %d %p i2: %d %d %p\n",
+ i1, map[i1],
+ i1 != -1 ? blame_lines[map[i1]] : NULL,
+ i2, map2[i2],
+ i2 != -1 ? blame_lines[map2[i2]] : NULL);
+ if (map2[i2] != -1 &&
+ blame_lines[map[i1]] &&
+ !blame_lines[map2[i2]])
+ map[i1] = map2[i2];
+ }
+
+ if (map[i1] == -1 && map2[i2] != -1)
+ map[i1] = map2[i2];
+ }
+
+ if (DEBUG > 1)
+ printf("l1: %d l2: %d i1: %d i2: %d\n",
+ map[i1], map2[i2], i1, i2);
+ }
+}
+
+static int map_line(struct commit *commit, int line)
+{
+ struct util_info *info = commit->object.util;
+ assert(line >= 0 && line < info->num_lines);
+ return info->line_map[line];
+}
+
+static struct util_info* get_util(struct commit *commit)
+{
+ struct util_info *util = commit->object.util;
+
+ if (util)
+ return util;
+
+ util = xmalloc(sizeof(struct util_info));
+ util->buf = NULL;
+ util->size = 0;
+ util->line_map = NULL;
+ util->num_lines = -1;
+ util->pathname = NULL;
+ commit->object.util = util;
+ return util;
+}
+
+static int fill_util_info(struct commit *commit)
+{
+ struct util_info *util = commit->object.util;
+
+ assert(util);
+ assert(util->pathname);
+
+ if (get_blob_sha1(commit->tree, util->pathname, util->sha1))
+ return 1;
+ else
+ return 0;
+}
+
+static void alloc_line_map(struct commit *commit)
+{
+ struct util_info *util = commit->object.util;
+ int i;
+
+ if (util->line_map)
+ return;
+
+ get_blob(commit);
+
+ util->num_lines = 0;
+ for (i = 0; i < util->size; i++) {
+ if (util->buf[i] == '\n')
+ util->num_lines++;
+ }
+ if(util->buf[util->size - 1] != '\n')
+ util->num_lines++;
+
+ util->line_map = xmalloc(sizeof(int) * util->num_lines);
+
+ for (i = 0; i < util->num_lines; i++)
+ util->line_map[i] = -1;
+}
+
+static void init_first_commit(struct commit* commit, const char* filename)
+{
+ struct util_info* util = commit->object.util;
+ int i;
+
+ util->pathname = filename;
+ if (fill_util_info(commit))
+ die("fill_util_info failed");
+
+ alloc_line_map(commit);
+
+ util = commit->object.util;
+
+ for (i = 0; i < util->num_lines; i++)
+ util->line_map[i] = i;
+}
+
+
+static void process_commits(struct rev_info *rev, const char *path,
+ struct commit** initial)
+{
+ int i;
+ struct util_info* util;
+ int lines_left;
+ int *blame_p;
+ int *new_lines;
+ int new_lines_len;
+
+ struct commit* commit = get_revision(rev);
+ assert(commit);
+ init_first_commit(commit, path);
+
+ util = commit->object.util;
+ num_blame_lines = util->num_lines;
+ blame_lines = xmalloc(sizeof(struct commit *) * num_blame_lines);
+ blame_contents = util->buf;
+ blame_len = util->size;
+
+ for (i = 0; i < num_blame_lines; i++)
+ blame_lines[i] = NULL;
+
+ lines_left = num_blame_lines;
+ blame_p = xmalloc(sizeof(int) * num_blame_lines);
+ new_lines = xmalloc(sizeof(int) * num_blame_lines);
+ do {
+ struct commit_list *parents;
+ int num_parents;
+ struct util_info *util;
+
+ if (DEBUG)
+ printf("\nProcessing commit: %d %s\n", num_commits,
+ sha1_to_hex(commit->object.sha1));
+
+ if (lines_left == 0)
+ return;
+
+ num_commits++;
+ memset(blame_p, 0, sizeof(int) * num_blame_lines);
+ new_lines_len = 0;
+ num_parents = 0;
+ for (parents = commit->parents;
+ parents != NULL; parents = parents->next)
+ num_parents++;
+
+ if(num_parents == 0)
+ *initial = commit;
+
+ if (fill_util_info(commit))
+ continue;
+
+ alloc_line_map(commit);
+ util = commit->object.util;
+
+ for (parents = commit->parents;
+ parents != NULL; parents = parents->next) {
+ struct commit *parent = parents->item;
+ struct patch *patch;
+
+ if (parse_commit(parent) < 0)
+ die("parse_commit error");
+
+ if (DEBUG)
+ printf("parent: %s\n",
+ sha1_to_hex(parent->object.sha1));
+
+ if (fill_util_info(parent)) {
+ num_parents--;
+ continue;
+ }
+
+ patch = get_patch(parent, commit);
+ alloc_line_map(parent);
+ fill_line_map(parent, commit, patch);
+
+ for (i = 0; i < patch->num; i++) {
+ int l;
+ for (l = 0; l < patch->chunks[i].len2; l++) {
+ int mapped_line =
+ map_line(commit, patch->chunks[i].off2 + l);
+ if (mapped_line != -1) {
+ blame_p[mapped_line]++;
+ if (blame_p[mapped_line] == num_parents)
+ new_lines[new_lines_len++] = mapped_line;
+ }
+ }
+ }
+ free_patch(patch);
+ }
+
+ if (DEBUG)
+ printf("parents: %d\n", num_parents);
+
+ for (i = 0; i < new_lines_len; i++) {
+ int mapped_line = new_lines[i];
+ if (blame_lines[mapped_line] == NULL) {
+ blame_lines[mapped_line] = commit;
+ lines_left--;
+ if (DEBUG)
+ printf("blame: mapped: %d i: %d\n",
+ mapped_line, i);
+ }
+ }
+ } while ((commit = get_revision(rev)) != NULL);
+}
+
+
+static int compare_tree_path(struct rev_info* revs,
+ struct commit* c1, struct commit* c2)
+{
+ const char* paths[2];
+ struct util_info* util = c2->object.util;
+ paths[0] = util->pathname;
+ paths[1] = NULL;
+
+ diff_tree_setup_paths(get_pathspec(revs->prefix, paths));
+ return rev_compare_tree(c1->tree, c2->tree);
+}
+
+
+static int same_tree_as_empty_path(struct rev_info *revs, struct tree* t1,
+ const char* path)
+{
+ const char* paths[2];
+ paths[0] = path;
+ paths[1] = NULL;
+
+ diff_tree_setup_paths(get_pathspec(revs->prefix, paths));
+ return rev_same_tree_as_empty(t1);
+}
+
+static const char* find_rename(struct commit* commit, struct commit* parent)
+{
+ struct util_info* cutil = commit->object.util;
+ struct diff_options diff_opts;
+ const char *paths[1];
+ int i;
+
+ if (DEBUG) {
+ printf("find_rename commit: %s ",
+ sha1_to_hex(commit->object.sha1));
+ puts(sha1_to_hex(parent->object.sha1));
+ }
+
+ diff_setup(&diff_opts);
+ diff_opts.recursive = 1;
+ diff_opts.detect_rename = DIFF_DETECT_RENAME;
+ paths[0] = NULL;
+ diff_tree_setup_paths(paths);
+ if (diff_setup_done(&diff_opts) < 0)
+ die("diff_setup_done failed");
+
+ diff_tree_sha1(commit->tree->object.sha1, parent->tree->object.sha1,
+ "", &diff_opts);
+ diffcore_std(&diff_opts);
+
+ for (i = 0; i < diff_queued_diff.nr; i++) {
+ struct diff_filepair *p = diff_queued_diff.queue[i];
+
+ if (p->status == 'R' && !strcmp(p->one->path, cutil->pathname)) {
+ if (DEBUG)
+ printf("rename %s -> %s\n", p->one->path, p->two->path);
+ return p->two->path;
+ }
+ }
+
+ return 0;
+}
+
+static void simplify_commit(struct rev_info *revs, struct commit *commit)
+{
+ struct commit_list **pp, *parent;
+
+ if (!commit->tree)
+ return;
+
+ if (!commit->parents) {
+ struct util_info* util = commit->object.util;
+ if (!same_tree_as_empty_path(revs, commit->tree,
+ util->pathname))
+ commit->object.flags |= TREECHANGE;
+ return;
+ }
+
+ pp = &commit->parents;
+ while ((parent = *pp) != NULL) {
+ struct commit *p = parent->item;
+
+ if (p->object.flags & UNINTERESTING) {
+ pp = &parent->next;
+ continue;
+ }
+
+ parse_commit(p);
+ switch (compare_tree_path(revs, p, commit)) {
+ case REV_TREE_SAME:
+ parent->next = NULL;
+ commit->parents = parent;
+ get_util(p)->pathname = get_util(commit)->pathname;
+ return;
+
+ case REV_TREE_NEW:
+ {
+
+ struct util_info* util = commit->object.util;
+ if (revs->remove_empty_trees &&
+ same_tree_as_empty_path(revs, p->tree,
+ util->pathname)) {
+ const char* new_name = find_rename(commit, p);
+ if (new_name) {
+ struct util_info* putil = get_util(p);
+ if (!putil->pathname)
+ putil->pathname = strdup(new_name);
+ } else {
+ *pp = parent->next;
+ continue;
+ }
+ }
+ }
+
+ /* fallthrough */
+ case REV_TREE_DIFFERENT:
+ pp = &parent->next;
+ if (!get_util(p)->pathname)
+ get_util(p)->pathname =
+ get_util(commit)->pathname;
+ continue;
+ }
+ die("bad tree compare for commit %s",
+ sha1_to_hex(commit->object.sha1));
+ }
+ commit->object.flags |= TREECHANGE;
+}
+
+
+struct commit_info
+{
+ char* author;
+ char* author_mail;
+ unsigned long author_time;
+ char* author_tz;
+};
+
+static void get_commit_info(struct commit* commit, struct commit_info* ret)
+{
+ int len;
+ char* tmp;
+ static char author_buf[1024];
+
+ tmp = strstr(commit->buffer, "\nauthor ") + 8;
+ len = index(tmp, '\n') - tmp;
+ ret->author = author_buf;
+ memcpy(ret->author, tmp, len);
+
+ tmp = ret->author;
+ tmp += len;
+ *tmp = 0;
+ while(*tmp != ' ')
+ tmp--;
+ ret->author_tz = tmp+1;
+
+ *tmp = 0;
+ while(*tmp != ' ')
+ tmp--;
+ ret->author_time = strtoul(tmp, NULL, 10);
+
+ *tmp = 0;
+ while(*tmp != ' ')
+ tmp--;
+ ret->author_mail = tmp + 1;
+
+ *tmp = 0;
+}
+
+static const char* format_time(unsigned long time, const char* tz_str)
+{
+ static char time_buf[128];
+ time_t t = time;
+ int minutes, tz;
+ struct tm *tm;
+
+ tz = atoi(tz_str);
+ minutes = tz < 0 ? -tz : tz;
+ minutes = (minutes / 100)*60 + (minutes % 100);
+ minutes = tz < 0 ? -minutes : minutes;
+ t = time + minutes * 60;
+ tm = gmtime(&t);
+
+ strftime(time_buf, sizeof(time_buf), "%Y-%m-%d %H:%M:%S ", tm);
+ strcat(time_buf, tz_str);
+ return time_buf;
+}
+
+static void topo_setter(struct commit* c, void* data)
+{
+ struct util_info* util = c->object.util;
+ util->topo_data = data;
+}
+
+static void* topo_getter(struct commit* c)
+{
+ struct util_info* util = c->object.util;
+ return util->topo_data;
+}
+
+int main(int argc, const char **argv)
+{
+ int i;
+ struct commit *initial = NULL;
+ unsigned char sha1[20];
+
+ const char *filename = NULL, *commit = NULL;
+ char filename_buf[256];
+ int sha1_len = 8;
+ int compability = 0;
+ int options = 1;
+ struct commit* start_commit;
+
+ const char* args[10];
+ struct rev_info rev;
+
+ struct commit_info ci;
+ const char *buf;
+ int max_digits;
+ int longest_file, longest_author;
+ int found_rename;
+
+ const char* prefix = setup_git_directory();
+ git_config(git_default_config);
+
+ for(i = 1; i < argc; i++) {
+ if(options) {
+ if(!strcmp(argv[i], "-h") ||
+ !strcmp(argv[i], "--help"))
+ usage(blame_usage);
+ else if(!strcmp(argv[i], "-l") ||
+ !strcmp(argv[i], "--long")) {
+ sha1_len = 40;
+ continue;
+ } else if(!strcmp(argv[i], "-c") ||
+ !strcmp(argv[i], "--compability")) {
+ compability = 1;
+ continue;
+ } else if(!strcmp(argv[i], "--")) {
+ options = 0;
+ continue;
+ } else if(argv[i][0] == '-')
+ usage(blame_usage);
+ else
+ options = 0;
+ }
+
+ if(!options) {
+ if(!filename)
+ filename = argv[i];
+ else if(!commit)
+ commit = argv[i];
+ else
+ usage(blame_usage);
+ }
+ }
+
+ if(!filename)
+ usage(blame_usage);
+ if(!commit)
+ commit = "HEAD";
+
+ if(prefix)
+ sprintf(filename_buf, "%s%s", prefix, filename);
+ else
+ strcpy(filename_buf, filename);
+ filename = filename_buf;
+
+ if (get_sha1(commit, sha1))
+ die("get_sha1 failed, commit '%s' not found", commit);
+ start_commit = lookup_commit_reference(sha1);
+ get_util(start_commit)->pathname = filename;
+ if (fill_util_info(start_commit)) {
+ printf("%s not found in %s\n", filename, commit);
+ return 1;
+ }
+
+
+ init_revisions(&rev);
+ rev.remove_empty_trees = 1;
+ rev.topo_order = 1;
+ rev.prune_fn = simplify_commit;
+ rev.topo_setter = topo_setter;
+ rev.topo_getter = topo_getter;
+ rev.limited = 1;
+
+ commit_list_insert(start_commit, &rev.commits);
+
+ args[0] = filename;
+ args[1] = NULL;
+ diff_tree_setup_paths(args);
+ prepare_revision_walk(&rev);
+ process_commits(&rev, filename, &initial);
+
+ buf = blame_contents;
+ for (max_digits = 1, i = 10; i <= num_blame_lines + 1; max_digits++)
+ i *= 10;
+
+ longest_file = 0;
+ longest_author = 0;
+ found_rename = 0;
+ for (i = 0; i < num_blame_lines; i++) {
+ struct commit *c = blame_lines[i];
+ struct util_info* u;
+ if (!c)
+ c = initial;
+ u = c->object.util;
+
+ if (!found_rename && strcmp(filename, u->pathname))
+ found_rename = 1;
+ if (longest_file < strlen(u->pathname))
+ longest_file = strlen(u->pathname);
+ get_commit_info(c, &ci);
+ if (longest_author < strlen(ci.author))
+ longest_author = strlen(ci.author);
+ }
+
+ for (i = 0; i < num_blame_lines; i++) {
+ struct commit *c = blame_lines[i];
+ struct util_info* u;
+
+ if (!c)
+ c = initial;
+
+ u = c->object.util;
+ get_commit_info(c, &ci);
+ fwrite(sha1_to_hex(c->object.sha1), sha1_len, 1, stdout);
+ if(compability) {
+ printf("\t(%10s\t%10s\t%d)", ci.author,
+ format_time(ci.author_time, ci.author_tz), i+1);
+ } else {
+ if (found_rename)
+ printf(" %-*.*s", longest_file, longest_file,
+ u->pathname);
+ printf(" (%-*.*s %10s %*d) ",
+ longest_author, longest_author, ci.author,
+ format_time(ci.author_time, ci.author_tz),
+ max_digits, i+1);
+ }
+
+ if(i == num_blame_lines - 1) {
+ fwrite(buf, blame_len - (buf - blame_contents),
+ 1, stdout);
+ if(blame_contents[blame_len-1] != '\n')
+ putc('\n', stdout);
+ } else {
+ char* next_buf = index(buf, '\n') + 1;
+ fwrite(buf, next_buf - buf, 1, stdout);
+ buf = next_buf;
+ }
+ }
+
+ if (DEBUG) {
+ printf("num get patch: %d\n", num_get_patch);
+ printf("num commits: %d\n", num_commits);
+ printf("patch time: %f\n", patch_time / 1000000.0);
+ printf("initial: %s\n", sha1_to_hex(initial->object.sha1));
+ }
+
+ return 0;
+}
diff --git a/blob.c b/blob.c
index 84ec121..c1fdd86 100644
--- a/blob.c
+++ b/blob.c
@@ -8,8 +8,7 @@ struct blob *lookup_blob(const unsigned char *sha1)
{
struct object *obj = lookup_object(sha1);
if (!obj) {
- struct blob *ret = xmalloc(sizeof(struct blob));
- memset(ret, 0, sizeof(struct blob));
+ struct blob *ret = xcalloc(1, sizeof(struct blob));
created_object(sha1, &ret->object);
ret->object.type = blob_type;
return ret;
diff --git a/cache.h b/cache.h
index f686e72..69801b0 100644
--- a/cache.h
+++ b/cache.h
@@ -10,7 +10,7 @@
#define deflateBound(c,s) ((s) + (((s) + 7) >> 3) + (((s) + 63) >> 6) + 11)
#endif
-#if defined(DT_UNKNOWN) && !NO_D_TYPE_IN_DIRENT
+#if defined(DT_UNKNOWN) && !defined(NO_D_TYPE_IN_DIRENT)
#define DTYPE(de) ((de)->d_type)
#else
#undef DT_UNKNOWN
@@ -91,6 +91,7 @@ struct cache_entry {
#define CE_NAMEMASK (0x0fff)
#define CE_STAGEMASK (0x3000)
#define CE_UPDATE (0x4000)
+#define CE_VALID (0x8000)
#define CE_STAGESHIFT 12
#define create_ce_flags(len, stage) htons((len) | ((stage) << CE_STAGESHIFT))
@@ -105,6 +106,9 @@ static inline unsigned int create_ce_mode(unsigned int mode)
return htonl(S_IFLNK);
return htonl(S_IFREG | ce_permissions(mode));
}
+#define canon_mode(mode) \
+ (S_ISREG(mode) ? (S_IFREG | ce_permissions(mode)) : \
+ S_ISLNK(mode) ? S_IFLNK : S_IFDIR)
#define cache_entry_size(len) ((offsetof(struct cache_entry,name) + (len) + 8) & ~7)
@@ -144,8 +148,8 @@ extern int add_cache_entry(struct cache_entry *ce, int option);
extern int remove_cache_entry_at(int pos);
extern int remove_file_from_cache(const char *path);
extern int ce_same_name(struct cache_entry *a, struct cache_entry *b);
-extern int ce_match_stat(struct cache_entry *ce, struct stat *st);
-extern int ce_modified(struct cache_entry *ce, struct stat *st);
+extern int ce_match_stat(struct cache_entry *ce, struct stat *st, int);
+extern int ce_modified(struct cache_entry *ce, struct stat *st, int);
extern int ce_path_match(const struct cache_entry *ce, const char **pathspec);
extern int index_fd(unsigned char *sha1, int fd, struct stat *st, int write_object, const char *type);
extern int index_pipe(unsigned char *sha1, int fd, const char *type, int write_object);
@@ -162,7 +166,9 @@ extern void rollback_index_file(struct cache_file *);
/* Environment bits from configuration mechanism */
extern int trust_executable_bit;
+extern int assume_unchanged;
extern int only_use_symrefs;
+extern int warn_ambiguous_refs;
extern int diff_rename_limit_default;
extern int shared_repository;
extern const char *apply_default_whitespace;
@@ -260,7 +266,7 @@ struct checkout {
refresh_cache:1;
};
-extern int checkout_entry(struct cache_entry *ce, struct checkout *state);
+extern int checkout_entry(struct cache_entry *ce, struct checkout *state, char *topath);
extern struct alternate_object_database {
struct alternate_object_database *next;
@@ -324,7 +330,7 @@ extern int num_packed_objects(const struct packed_git *p);
extern int nth_packed_object_sha1(const struct packed_git *, int, unsigned char*);
extern int find_pack_entry_one(const unsigned char *, struct pack_entry *, struct packed_git *);
extern void *unpack_entry_gently(struct pack_entry *, char *, unsigned long *);
-extern void packed_object_info_detail(struct pack_entry *, char *, unsigned long *, unsigned long *, int *, unsigned char *);
+extern void packed_object_info_detail(struct pack_entry *, char *, unsigned long *, unsigned long *, unsigned int *, unsigned char *);
/* Dumb servers support */
extern int update_server_info(int);
@@ -352,4 +358,7 @@ extern int copy_fd(int ifd, int ofd);
extern int receive_unpack_pack(int fd[2], const char *me, int quiet);
extern int receive_keep_pack(int fd[2], const char *me, int quiet);
+/* pager.c */
+extern void setup_pager(void);
+
#endif /* CACHE_H */
diff --git a/cat-file.c b/cat-file.c
index 96d66b4..628f6ca 100644
--- a/cat-file.c
+++ b/cat-file.c
@@ -4,6 +4,94 @@
* Copyright (C) Linus Torvalds, 2005
*/
#include "cache.h"
+#include "exec_cmd.h"
+#include "tag.h"
+#include "tree.h"
+
+static void flush_buffer(const char *buf, unsigned long size)
+{
+ while (size > 0) {
+ long ret = xwrite(1, buf, size);
+ if (ret < 0) {
+ /* Ignore epipe */
+ if (errno == EPIPE)
+ break;
+ die("git-cat-file: %s", strerror(errno));
+ } else if (!ret) {
+ die("git-cat-file: disk full?");
+ }
+ size -= ret;
+ buf += ret;
+ }
+}
+
+static int pprint_tag(const unsigned char *sha1, const char *buf, unsigned long size)
+{
+ /* the parser in tag.c is useless here. */
+ const char *endp = buf + size;
+ const char *cp = buf;
+
+ while (cp < endp) {
+ char c = *cp++;
+ if (c != '\n')
+ continue;
+ if (7 <= endp - cp && !memcmp("tagger ", cp, 7)) {
+ const char *tagger = cp;
+
+ /* Found the tagger line. Copy out the contents
+ * of the buffer so far.
+ */
+ flush_buffer(buf, cp - buf);
+
+ /*
+ * Do something intelligent, like pretty-printing
+ * the date.
+ */
+ while (cp < endp) {
+ if (*cp++ == '\n') {
+ /* tagger to cp is a line
+ * that has ident and time.
+ */
+ const char *sp = tagger;
+ char *ep;
+ unsigned long date;
+ long tz;
+ while (sp < cp && *sp != '>')
+ sp++;
+ if (sp == cp) {
+ /* give up */
+ flush_buffer(tagger,
+ cp - tagger);
+ break;
+ }
+ while (sp < cp &&
+ !('0' <= *sp && *sp <= '9'))
+ sp++;
+ flush_buffer(tagger, sp - tagger);
+ date = strtoul(sp, &ep, 10);
+ tz = strtol(ep, NULL, 10);
+ sp = show_date(date, tz);
+ flush_buffer(sp, strlen(sp));
+ xwrite(1, "\n", 1);
+ break;
+ }
+ }
+ break;
+ }
+ if (cp < endp && *cp == '\n')
+ /* end of header */
+ break;
+ }
+ /* At this point, we have copied out the header up to the end of
+ * the tagger line and cp points at one past \n. It could be the
+ * next header line after the tagger line, or it could be another
+ * \n that marks the end of the headers. We need to copy out the
+ * remainder as is.
+ */
+ if (cp < endp)
+ flush_buffer(cp, endp - cp);
+ return 0;
+}
int main(int argc, char **argv)
{
@@ -14,8 +102,9 @@ int main(int argc, char **argv)
int opt;
setup_git_directory();
+ git_config(git_default_config);
if (argc != 3 || get_sha1(argv[2], sha1))
- usage("git-cat-file [-t|-s|-e|<type>] <sha1>");
+ usage("git-cat-file [-t|-s|-e|-p|<type>] <sha1>");
opt = 0;
if ( argv[1][0] == '-' ) {
@@ -43,6 +132,23 @@ int main(int argc, char **argv)
case 'e':
return !has_sha1_file(sha1);
+ case 'p':
+ if (get_sha1(argv[2], sha1) ||
+ sha1_object_info(sha1, type, NULL))
+ die("Not a valid object name %s", argv[2]);
+
+ /* custom pretty-print here */
+ if (!strcmp(type, tree_type))
+ return execl_git_cmd("ls-tree", argv[2], NULL);
+
+ buf = read_sha1_file(sha1, type, &size);
+ if (!buf)
+ die("Cannot read object %s", argv[2]);
+ if (!strcmp(type, tag_type))
+ return pprint_tag(sha1, buf, size);
+
+ /* otherwise just spit out the data */
+ break;
case 0:
buf = read_object_with_reference(sha1, argv[1], &size, NULL);
break;
@@ -54,18 +160,6 @@ int main(int argc, char **argv)
if (!buf)
die("git-cat-file %s: bad file", argv[2]);
- while (size > 0) {
- long ret = xwrite(1, buf, size);
- if (ret < 0) {
- /* Ignore epipe */
- if (errno == EPIPE)
- break;
- die("git-cat-file: %s", strerror(errno));
- } else if (!ret) {
- die("git-cat-file: disk full?");
- }
- size -= ret;
- buf += ret;
- }
+ flush_buffer(buf, size);
return 0;
}
diff --git a/checkout-index.c b/checkout-index.c
index 53dd8cb..dd6a2d8 100644
--- a/checkout-index.c
+++ b/checkout-index.c
@@ -22,6 +22,10 @@
*
* find . -name '*.h' -print0 | xargs -0 git-checkout-index -f --
*
+ * or:
+ *
+ * find . -name '*.h' -print0 | git-checkout-index -f -z --stdin
+ *
* which will force all existing *.h files to be replaced with
* their cached copies. If an empty command line implied "all",
* then this would force-refresh everything in the cache, which
@@ -33,10 +37,16 @@
* but get used to it in scripting!).
*/
#include "cache.h"
+#include "strbuf.h"
+#include "quote.h"
+#define CHECKOUT_ALL 4
static const char *prefix;
static int prefix_length;
+static int line_termination = '\n';
static int checkout_stage; /* default to checkout stage0 */
+static int to_tempfile;
+static char topath[4][MAXPATHLEN+1];
static struct checkout state = {
.base_dir = "",
@@ -47,11 +57,39 @@ static struct checkout state = {
.refresh_cache = 0,
};
+static void write_tempfile_record (const char *name)
+{
+ int i;
+
+ if (CHECKOUT_ALL == checkout_stage) {
+ for (i = 1; i < 4; i++) {
+ if (i > 1)
+ putchar(' ');
+ if (topath[i][0])
+ fputs(topath[i], stdout);
+ else
+ putchar('.');
+ }
+ } else
+ fputs(topath[checkout_stage], stdout);
+
+ putchar('\t');
+ write_name_quoted("", 0, name + prefix_length,
+ line_termination, stdout);
+ putchar(line_termination);
+
+ for (i = 0; i < 4; i++) {
+ topath[i][0] = 0;
+ }
+}
+
static int checkout_file(const char *name)
{
int namelen = strlen(name);
int pos = cache_name_pos(name, namelen);
int has_same_name = 0;
+ int did_checkout = 0;
+ int errs = 0;
if (pos < 0)
pos = -pos - 1;
@@ -62,9 +100,20 @@ static int checkout_file(const char *name)
memcmp(ce->name, name, namelen))
break;
has_same_name = 1;
- if (checkout_stage == ce_stage(ce))
- return checkout_entry(ce, &state);
pos++;
+ if (ce_stage(ce) != checkout_stage
+ && (CHECKOUT_ALL != checkout_stage || !ce_stage(ce)))
+ continue;
+ did_checkout = 1;
+ if (checkout_entry(ce, &state,
+ to_tempfile ? topath[ce_stage(ce)] : NULL) < 0)
+ errs++;
+ }
+
+ if (did_checkout) {
+ if (to_tempfile)
+ write_tempfile_record(name);
+ return errs > 0 ? -1 : 0;
}
if (!state.quiet) {
@@ -84,18 +133,29 @@ static int checkout_file(const char *name)
static int checkout_all(void)
{
int i, errs = 0;
+ struct cache_entry* last_ce = NULL;
for (i = 0; i < active_nr ; i++) {
struct cache_entry *ce = active_cache[i];
- if (ce_stage(ce) != checkout_stage)
+ if (ce_stage(ce) != checkout_stage
+ && (CHECKOUT_ALL != checkout_stage || !ce_stage(ce)))
continue;
if (prefix && *prefix &&
(ce_namelen(ce) <= prefix_length ||
memcmp(prefix, ce->name, prefix_length)))
continue;
- if (checkout_entry(ce, &state) < 0)
+ if (last_ce && to_tempfile) {
+ if (ce_namelen(last_ce) != ce_namelen(ce)
+ || memcmp(last_ce->name, ce->name, ce_namelen(ce)))
+ write_tempfile_record(last_ce->name);
+ }
+ if (checkout_entry(ce, &state,
+ to_tempfile ? topath[ce_stage(ce)] : NULL) < 0)
errs++;
+ last_ce = ce;
}
+ if (last_ce && to_tempfile)
+ write_tempfile_record(last_ce->name);
if (errs)
/* we have already done our error reporting.
* exit with the same code as die().
@@ -105,7 +165,7 @@ static int checkout_all(void)
}
static const char checkout_cache_usage[] =
-"git-checkout-index [-u] [-q] [-a] [-f] [-n] [--stage=[123]] [--prefix=<string>] [--] <file>...";
+"git-checkout-index [-u] [-q] [-a] [-f] [-n] [--stage=[123]|all] [--prefix=<string>] [--temp] [--] <file>...";
static struct cache_file cache_file;
@@ -114,8 +174,10 @@ int main(int argc, char **argv)
int i;
int newfd = -1;
int all = 0;
+ int read_from_stdin = 0;
prefix = setup_git_directory();
+ git_config(git_default_config);
prefix_length = prefix ? strlen(prefix) : 0;
if (read_cache() < 0) {
@@ -155,17 +217,37 @@ int main(int argc, char **argv)
die("cannot open index.lock file.");
continue;
}
+ if (!strcmp(arg, "-z")) {
+ line_termination = 0;
+ continue;
+ }
+ if (!strcmp(arg, "--stdin")) {
+ if (i != argc - 1)
+ die("--stdin must be at the end");
+ read_from_stdin = 1;
+ i++; /* do not consider arg as a file name */
+ break;
+ }
+ if (!strcmp(arg, "--temp")) {
+ to_tempfile = 1;
+ continue;
+ }
if (!strncmp(arg, "--prefix=", 9)) {
state.base_dir = arg+9;
state.base_dir_len = strlen(state.base_dir);
continue;
}
if (!strncmp(arg, "--stage=", 8)) {
- int ch = arg[8];
- if ('1' <= ch && ch <= '3')
- checkout_stage = arg[8] - '0';
- else
- die("stage should be between 1 and 3");
+ if (!strcmp(arg + 8, "all")) {
+ to_tempfile = 1;
+ checkout_stage = CHECKOUT_ALL;
+ } else {
+ int ch = arg[8];
+ if ('1' <= ch && ch <= '3')
+ checkout_stage = arg[8] - '0';
+ else
+ die("stage should be between 1 and 3 or all");
+ }
continue;
}
if (arg[0] == '-')
@@ -173,7 +255,7 @@ int main(int argc, char **argv)
break;
}
- if (state.base_dir_len) {
+ if (state.base_dir_len || to_tempfile) {
/* when --prefix is specified we do not
* want to update cache.
*/
@@ -190,9 +272,31 @@ int main(int argc, char **argv)
if (all)
die("git-checkout-index: don't mix '--all' and explicit filenames");
+ if (read_from_stdin)
+ die("git-checkout-index: don't mix '--stdin' and explicit filenames");
checkout_file(prefix_path(prefix, prefix_length, arg));
}
+ if (read_from_stdin) {
+ struct strbuf buf;
+ if (all)
+ die("git-checkout-index: don't mix '--all' and '--stdin'");
+ strbuf_init(&buf);
+ while (1) {
+ char *path_name;
+ read_line(&buf, stdin, line_termination);
+ if (buf.eof)
+ break;
+ if (line_termination && buf.buf[0] == '"')
+ path_name = unquote_c_style(buf.buf, NULL);
+ else
+ path_name = buf.buf;
+ checkout_file(prefix_path(prefix, prefix_length, path_name));
+ if (path_name != buf.buf)
+ free(path_name);
+ }
+ }
+
if (all)
checkout_all();
diff --git a/combine-diff.c b/combine-diff.c
index a23894d..7693884 100644
--- a/combine-diff.c
+++ b/combine-diff.c
@@ -1,5 +1,6 @@
#include "cache.h"
#include "commit.h"
+#include "blob.h"
#include "diff.h"
#include "diffcore.h"
#include "quote.h"
@@ -104,7 +105,7 @@ static char *grab_blob(const unsigned char *sha1, unsigned long *size)
return xcalloc(1, 1);
}
blob = read_sha1_file(sha1, type, size);
- if (strcmp(type, "blob"))
+ if (strcmp(type, blob_type))
die("object '%s' is not a blob!", sha1_to_hex(sha1));
return blob;
}
@@ -649,7 +650,7 @@ static int show_patch_diff(struct combine_diff_path *elem, int num_parent,
int len = st.st_size;
int cnt = 0;
- elem->mode = DIFF_FILE_CANON_MODE(st.st_mode);
+ elem->mode = canon_mode(st.st_mode);
size = len;
result = xmalloc(len + 1);
while (cnt < len) {
diff --git a/commit-tree.c b/commit-tree.c
index 88871b0..2d86518 100644
--- a/commit-tree.c
+++ b/commit-tree.c
@@ -4,6 +4,8 @@
* Copyright (C) Linus Torvalds, 2005
*/
#include "cache.h"
+#include "commit.h"
+#include "tree.h"
#define BLOCKING (1ul << 14)
@@ -93,13 +95,13 @@ int main(int argc, char **argv)
if (argc < 2 || get_sha1_hex(argv[1], tree_sha1) < 0)
usage(commit_tree_usage);
- check_valid(tree_sha1, "tree");
+ check_valid(tree_sha1, tree_type);
for (i = 2; i < argc; i += 2) {
char *a, *b;
a = argv[i]; b = argv[i+1];
if (!b || strcmp(a, "-p") || get_sha1(b, parent_sha1[parents]))
usage(commit_tree_usage);
- check_valid(parent_sha1[parents], "commit");
+ check_valid(parent_sha1[parents], commit_type);
if (new_parent(parents))
parents++;
}
@@ -125,7 +127,10 @@ int main(int argc, char **argv)
while (fgets(comment, sizeof(comment), stdin) != NULL)
add_buffer(&buffer, &size, "%s", comment);
- write_sha1_file(buffer, size, "commit", commit_sha1);
- printf("%s\n", sha1_to_hex(commit_sha1));
- return 0;
+ if (!write_sha1_file(buffer, size, commit_type, commit_sha1)) {
+ printf("%s\n", sha1_to_hex(commit_sha1));
+ return 0;
+ }
+ else
+ return 1;
}
diff --git a/commit.c b/commit.c
index 512b5d7..d4976fb 100644
--- a/commit.c
+++ b/commit.c
@@ -73,8 +73,7 @@ struct commit *lookup_commit(const unsigned char *sha1)
{
struct object *obj = lookup_object(sha1);
if (!obj) {
- struct commit *ret = xmalloc(sizeof(struct commit));
- memset(ret, 0, sizeof(struct commit));
+ struct commit *ret = xcalloc(1, sizeof(struct commit));
created_object(sha1, &ret->object);
ret->object.type = commit_type;
return ret;
@@ -569,10 +568,28 @@ int count_parents(struct commit * commit)
return count;
}
+void topo_sort_default_setter(struct commit *c, void *data)
+{
+ c->object.util = data;
+}
+
+void *topo_sort_default_getter(struct commit *c)
+{
+ return c->object.util;
+}
+
/*
* Performs an in-place topological sort on the list supplied.
*/
-void sort_in_topological_order(struct commit_list ** list)
+void sort_in_topological_order(struct commit_list ** list, int lifo)
+{
+ sort_in_topological_order_fn(list, lifo, topo_sort_default_setter,
+ topo_sort_default_getter);
+}
+
+void sort_in_topological_order_fn(struct commit_list ** list, int lifo,
+ topo_sort_set_fn_t setter,
+ topo_sort_get_fn_t getter)
{
struct commit_list * next = *list;
struct commit_list * work = NULL, **insert;
@@ -596,7 +613,7 @@ void sort_in_topological_order(struct commit_list ** list)
next=*list;
while (next) {
next_nodes->list_item = next;
- next->item->object.util = next_nodes;
+ setter(next->item, next_nodes);
next_nodes++;
next = next->next;
}
@@ -606,8 +623,8 @@ void sort_in_topological_order(struct commit_list ** list)
struct commit_list * parents = next->item->parents;
while (parents) {
struct commit * parent=parents->item;
- struct sort_node * pn = (struct sort_node *)parent->object.util;
-
+ struct sort_node * pn = (struct sort_node *) getter(parent);
+
if (pn)
pn->indegree++;
parents=parents->next;
@@ -624,32 +641,39 @@ void sort_in_topological_order(struct commit_list ** list)
next=*list;
insert = &work;
while (next) {
- struct sort_node * node = (struct sort_node *)next->item->object.util;
+ struct sort_node * node = (struct sort_node *) getter(next->item);
if (node->indegree == 0) {
insert = &commit_list_insert(next->item, insert)->next;
}
next=next->next;
}
+
/* process the list in topological order */
+ if (!lifo)
+ sort_by_date(&work);
while (work) {
struct commit * work_item = pop_commit(&work);
- struct sort_node * work_node = (struct sort_node *)work_item->object.util;
+ struct sort_node * work_node = (struct sort_node *) getter(work_item);
struct commit_list * parents = work_item->parents;
while (parents) {
struct commit * parent=parents->item;
- struct sort_node * pn = (struct sort_node *)parent->object.util;
-
+ struct sort_node * pn = (struct sort_node *) getter(parent);
+
if (pn) {
- /*
+ /*
* parents are only enqueued for emission
* when all their children have been emitted thereby
* guaranteeing topological order.
*/
pn->indegree--;
- if (!pn->indegree)
- commit_list_insert(parent, &work);
+ if (!pn->indegree) {
+ if (!lifo)
+ insert_by_date(parent, &work);
+ else
+ commit_list_insert(parent, &work);
+ }
}
parents=parents->next;
}
@@ -660,7 +684,7 @@ void sort_in_topological_order(struct commit_list ** list)
*pptr = work_node->list_item;
pptr = &(*pptr)->next;
*pptr = NULL;
- work_item->object.util = NULL;
+ setter(work_item, NULL);
}
free(nodes);
}
diff --git a/commit.h b/commit.h
index 986b22d..98682b2 100644
--- a/commit.h
+++ b/commit.h
@@ -65,13 +65,29 @@ int count_parents(struct commit * commit);
/*
* Performs an in-place topological sort of list supplied.
*
- * Pre-conditions:
+ * Pre-conditions for sort_in_topological_order:
* all commits in input list and all parents of those
* commits must have object.util == NULL
- *
- * Post-conditions:
+ *
+ * Pre-conditions for sort_in_topological_order_fn:
+ * all commits in input list and all parents of those
+ * commits must have getter(commit) == NULL
+ *
+ * Post-conditions:
* invariant of resulting list is:
* a reachable from b => ord(b) < ord(a)
+ * in addition, when lifo == 0, commits on parallel tracks are
+ * sorted in the dates order.
*/
-void sort_in_topological_order(struct commit_list ** list);
+
+typedef void (*topo_sort_set_fn_t)(struct commit*, void *data);
+typedef void* (*topo_sort_get_fn_t)(struct commit*);
+
+void topo_sort_default_setter(struct commit *c, void *data);
+void *topo_sort_default_getter(struct commit *c);
+
+void sort_in_topological_order(struct commit_list ** list, int lifo);
+void sort_in_topological_order_fn(struct commit_list ** list, int lifo,
+ topo_sort_set_fn_t setter,
+ topo_sort_get_fn_t getter);
#endif /* COMMIT_H */
diff --git a/config.c b/config.c
index 8355224..95ec349 100644
--- a/config.c
+++ b/config.c
@@ -222,11 +222,21 @@ int git_default_config(const char *var, const char *value)
return 0;
}
+ if (!strcmp(var, "core.ignorestat")) {
+ assume_unchanged = git_config_bool(var, value);
+ return 0;
+ }
+
if (!strcmp(var, "core.symrefsonly")) {
only_use_symrefs = git_config_bool(var, value);
return 0;
}
+ if (!strcmp(var, "core.warnambiguousrefs")) {
+ warn_ambiguous_refs = git_config_bool(var, value);
+ return 0;
+ }
+
if (!strcmp(var, "user.name")) {
strncpy(git_default_name, value, sizeof(git_default_name));
return 0;
diff --git a/contrib/README b/contrib/README
new file mode 100644
index 0000000..e1c0a01
--- /dev/null
+++ b/contrib/README
@@ -0,0 +1,44 @@
+Contributed Software
+
+Although these pieces are available as part of the official git
+source tree, they are in somewhat different status. The
+intention is to keep interesting tools around git here, maybe
+even experimental ones, to give users an easier access to them,
+and to give tools wider exposure, so that they can be improved
+faster.
+
+I am not expecting to touch these myself that much. As far as
+my day-to-day operation is concerned, these subdirectories are
+owned by their respective primary authors. I am willing to help
+if users of these components and the contrib/ subtree "owners"
+have technical/design issues to resolve, but the initiative to
+fix and/or enhance things _must_ be on the side of the subtree
+owners. IOW, I won't be actively looking for bugs and rooms for
+enhancements in them as the git maintainer -- I may only do so
+just as one of the users when I want to scratch my own itch. If
+you have patches to things in contrib/ area, the patch should be
+first sent to the primary author, and then the primary author
+should ack and forward it to me (git pull request is nicer).
+This is the same way as how I have been treating gitk, and to a
+lesser degree various foreign SCM interfaces, so you know the
+drill.
+
+I expect that things that start their life in the contrib/ area
+to graduate out of contrib/ once they mature, either by becoming
+projects on their own, or moving to the toplevel directory. On
+the other hand, I expect I'll be proposing removal of disused
+and inactive ones from time to time.
+
+If you have new things to add to this area, please first propose
+it on the git mailing list, and after a list discussion proves
+there are some general interests (it does not have to be a
+list-wide consensus for a tool targeted to a relatively narrow
+audience -- for example I do not work with projects whose
+upstream is svn, so I have no use for git-svn myself, but it is
+of general interest for people who need to interoperate with SVN
+repositories in a way git-svn works better than git-svnimport),
+submit a patch to create a subdirectory of contrib/ and put your
+stuff there.
+
+-jc
+
diff --git a/contrib/emacs/.gitignore b/contrib/emacs/.gitignore
new file mode 100644
index 0000000..c531d98
--- /dev/null
+++ b/contrib/emacs/.gitignore
@@ -0,0 +1 @@
+*.elc
diff --git a/contrib/emacs/Makefile b/contrib/emacs/Makefile
new file mode 100644
index 0000000..d3619db
--- /dev/null
+++ b/contrib/emacs/Makefile
@@ -0,0 +1,20 @@
+## Build and install stuff
+
+EMACS = emacs
+
+ELC = git.elc vc-git.elc
+INSTALL = install
+INSTALL_ELC = $(INSTALL) -m 644
+prefix = $(HOME)
+emacsdir = $(prefix)/share/emacs/site-lisp
+
+all: $(ELC)
+
+install: all
+ $(INSTALL) -d $(emacsdir)
+ $(INSTALL_ELC) $(ELC) $(emacsdir)
+
+%.elc: %.el
+ $(EMACS) --batch --eval '(byte-compile-file "$<")'
+
+clean:; rm -f $(ELC)
diff --git a/contrib/emacs/git.el b/contrib/emacs/git.el
new file mode 100644
index 0000000..ebd00ef
--- /dev/null
+++ b/contrib/emacs/git.el
@@ -0,0 +1,1012 @@
+;;; git.el --- A user interface for git
+
+;; Copyright (C) 2005, 2006 Alexandre Julliard <julliard@winehq.org>
+
+;; Version: 1.0
+
+;; This program is free software; you can redistribute it and/or
+;; modify it under the terms of the GNU General Public License as
+;; published by the Free Software Foundation; either version 2 of
+;; the License, or (at your option) any later version.
+;;
+;; This program is distributed in the hope that it will be
+;; useful, but WITHOUT ANY WARRANTY; without even the implied
+;; warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+;; PURPOSE. See the GNU General Public License for more details.
+;;
+;; You should have received a copy of the GNU General Public
+;; License along with this program; if not, write to the Free
+;; Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
+;; MA 02111-1307 USA
+
+;;; Commentary:
+
+;; This file contains an interface for the git version control
+;; system. It provides easy access to the most frequently used git
+;; commands. The user interface is as far as possible identical to
+;; that of the PCL-CVS mode.
+;;
+;; To install: put this file on the load-path and place the following
+;; in your .emacs file:
+;;
+;; (require 'git)
+;;
+;; To start: `M-x git-status'
+;;
+;; TODO
+;; - portability to XEmacs
+;; - better handling of subprocess errors
+;; - hook into file save (after-save-hook)
+;; - diff against other branch
+;; - renaming files from the status buffer
+;; - creating tags
+;; - fetch/pull
+;; - switching branches
+;; - revlist browser
+;; - git-show-branch browser
+;; - menus
+;;
+
+(eval-when-compile (require 'cl))
+(require 'ewoc)
+
+
+;;;; Customizations
+;;;; ------------------------------------------------------------
+
+(defgroup git nil
+ "Git user interface")
+
+(defcustom git-committer-name nil
+ "User name to use for commits.
+The default is to fall back to the repository config, then to `add-log-full-name' and then to `user-full-name'."
+ :group 'git
+ :type '(choice (const :tag "Default" nil)
+ (string :tag "Name")))
+
+(defcustom git-committer-email nil
+ "Email address to use for commits.
+The default is to fall back to the git repository config, then to `add-log-mailing-address' and then to `user-mail-address'."
+ :group 'git
+ :type '(choice (const :tag "Default" nil)
+ (string :tag "Email")))
+
+(defcustom git-commits-coding-system 'utf-8
+ "Default coding system for the log message of git commits."
+ :group 'git
+ :type 'coding-system)
+
+(defcustom git-append-signed-off-by nil
+ "Whether to append a Signed-off-by line to the commit message before editing."
+ :group 'git
+ :type 'boolean)
+
+(defcustom git-per-dir-ignore-file ".gitignore"
+ "Name of the per-directory ignore file."
+ :group 'git
+ :type 'string)
+
+(defface git-status-face
+ '((((class color) (background light)) (:foreground "purple")))
+ "Git mode face used to highlight added and modified files."
+ :group 'git)
+
+(defface git-unmerged-face
+ '((((class color) (background light)) (:foreground "red" :bold t)))
+ "Git mode face used to highlight unmerged files."
+ :group 'git)
+
+(defface git-unknown-face
+ '((((class color) (background light)) (:foreground "goldenrod" :bold t)))
+ "Git mode face used to highlight unknown files."
+ :group 'git)
+
+(defface git-uptodate-face
+ '((((class color) (background light)) (:foreground "grey60")))
+ "Git mode face used to highlight up-to-date files."
+ :group 'git)
+
+(defface git-ignored-face
+ '((((class color) (background light)) (:foreground "grey60")))
+ "Git mode face used to highlight ignored files."
+ :group 'git)
+
+(defface git-mark-face
+ '((((class color) (background light)) (:foreground "red" :bold t)))
+ "Git mode face used for the file marks."
+ :group 'git)
+
+(defface git-header-face
+ '((((class color) (background light)) (:foreground "blue")))
+ "Git mode face used for commit headers."
+ :group 'git)
+
+(defface git-separator-face
+ '((((class color) (background light)) (:foreground "brown")))
+ "Git mode face used for commit separator."
+ :group 'git)
+
+(defface git-permission-face
+ '((((class color) (background light)) (:foreground "green" :bold t)))
+ "Git mode face used for permission changes."
+ :group 'git)
+
+
+;;;; Utilities
+;;;; ------------------------------------------------------------
+
+(defconst git-log-msg-separator "--- log message follows this line ---")
+
+(defun git-get-env-strings (env)
+ "Build a list of NAME=VALUE strings from a list of environment strings."
+ (mapcar (lambda (entry) (concat (car entry) "=" (cdr entry))) env))
+
+(defun git-call-process-env (buffer env &rest args)
+ "Wrapper for call-process that sets environment strings."
+ (if env
+ (apply #'call-process "env" nil buffer nil
+ (append (git-get-env-strings env) (list "git") args))
+ (apply #'call-process "git" nil buffer nil args)))
+
+(defun git-call-process-env-string (env &rest args)
+ "Wrapper for call-process that sets environment strings, and returns the process output as a string."
+ (with-temp-buffer
+ (and (eq 0 (apply #' git-call-process-env t env args))
+ (buffer-string))))
+
+(defun git-run-process-region (buffer start end program args)
+ "Run a git process with a buffer region as input."
+ (let ((output-buffer (current-buffer))
+ (dir default-directory))
+ (with-current-buffer buffer
+ (cd dir)
+ (apply #'call-process-region start end program
+ nil (list output-buffer nil) nil args))))
+
+(defun git-run-command-buffer (buffer-name &rest args)
+ "Run a git command, sending the output to a buffer named BUFFER-NAME."
+ (let ((dir default-directory)
+ (buffer (get-buffer-create buffer-name)))
+ (message "Running git %s..." (car args))
+ (with-current-buffer buffer
+ (let ((default-directory dir)
+ (buffer-read-only nil))
+ (erase-buffer)
+ (apply #'git-call-process-env buffer nil args)))
+ (message "Running git %s...done" (car args))
+ buffer))
+
+(defun git-run-command (buffer env &rest args)
+ (message "Running git %s..." (car args))
+ (apply #'git-call-process-env buffer env args)
+ (message "Running git %s...done" (car args)))
+
+(defun git-run-command-region (buffer start end env &rest args)
+ "Run a git command with specified buffer region as input."
+ (message "Running git %s..." (car args))
+ (unless (eq 0 (if env
+ (git-run-process-region
+ buffer start end "env"
+ (append (git-get-env-strings env) (list "git") args))
+ (git-run-process-region
+ buffer start end "git" args)))
+ (error "Failed to run \"git %s\":\n%s" (mapconcat (lambda (x) x) args " ") (buffer-string)))
+ (message "Running git %s...done" (car args)))
+
+(defun git-get-string-sha1 (string)
+ "Read a SHA1 from the specified string."
+ (and string
+ (string-match "[0-9a-f]\\{40\\}" string)
+ (match-string 0 string)))
+
+(defun git-get-committer-name ()
+ "Return the name to use as GIT_COMMITTER_NAME."
+ ; copied from log-edit
+ (or git-committer-name
+ (git-repo-config "user.name")
+ (and (boundp 'add-log-full-name) add-log-full-name)
+ (and (fboundp 'user-full-name) (user-full-name))
+ (and (boundp 'user-full-name) user-full-name)))
+
+(defun git-get-committer-email ()
+ "Return the email address to use as GIT_COMMITTER_EMAIL."
+ ; copied from log-edit
+ (or git-committer-email
+ (git-repo-config "user.email")
+ (and (boundp 'add-log-mailing-address) add-log-mailing-address)
+ (and (fboundp 'user-mail-address) (user-mail-address))
+ (and (boundp 'user-mail-address) user-mail-address)))
+
+(defun git-escape-file-name (name)
+ "Escape a file name if necessary."
+ (if (string-match "[\n\t\"\\]" name)
+ (concat "\""
+ (mapconcat (lambda (c)
+ (case c
+ (?\n "\\n")
+ (?\t "\\t")
+ (?\\ "\\\\")
+ (?\" "\\\"")
+ (t (char-to-string c))))
+ name "")
+ "\"")
+ name))
+
+(defun git-get-top-dir (dir)
+ "Retrieve the top-level directory of a git tree."
+ (let ((cdup (with-output-to-string
+ (with-current-buffer standard-output
+ (cd dir)
+ (unless (eq 0 (call-process "git" nil t nil "rev-parse" "--show-cdup"))
+ (error "cannot find top-level git tree for %s." dir))))))
+ (expand-file-name (concat (file-name-as-directory dir)
+ (car (split-string cdup "\n"))))))
+
+;stolen from pcl-cvs
+(defun git-append-to-ignore (file)
+ "Add a file name to the ignore file in its directory."
+ (let* ((fullname (expand-file-name file))
+ (dir (file-name-directory fullname))
+ (name (file-name-nondirectory fullname))
+ (ignore-name (expand-file-name git-per-dir-ignore-file dir))
+ (created (not (file-exists-p ignore-name))))
+ (save-window-excursion
+ (set-buffer (find-file-noselect ignore-name))
+ (goto-char (point-max))
+ (unless (zerop (current-column)) (insert "\n"))
+ (insert name "\n")
+ (sort-lines nil (point-min) (point-max))
+ (save-buffer))
+ (when created
+ (git-run-command nil nil "update-index" "--info-only" "--add" "--" (file-relative-name ignore-name)))
+ (git-add-status-file (if created 'added 'modified) (file-relative-name ignore-name))))
+
+
+;;;; Wrappers for basic git commands
+;;;; ------------------------------------------------------------
+
+(defun git-rev-parse (rev)
+ "Parse a revision name and return its SHA1."
+ (git-get-string-sha1
+ (git-call-process-env-string nil "rev-parse" rev)))
+
+(defun git-repo-config (key)
+ "Retrieve the value associated to KEY in the git repository config file."
+ (let ((str (git-call-process-env-string nil "repo-config" key)))
+ (and str (car (split-string str "\n")))))
+
+(defun git-symbolic-ref (ref)
+ "Wrapper for the git-symbolic-ref command."
+ (let ((str (git-call-process-env-string nil "symbolic-ref" ref)))
+ (and str (car (split-string str "\n")))))
+
+(defun git-update-ref (ref val &optional oldval)
+ "Update a reference by calling git-update-ref."
+ (apply #'git-call-process-env nil nil "update-ref" ref val (if oldval (list oldval))))
+
+(defun git-read-tree (tree &optional index-file)
+ "Read a tree into the index file."
+ (apply #'git-call-process-env nil
+ (if index-file `(("GIT_INDEX_FILE" . ,index-file)) nil)
+ "read-tree" (if tree (list tree))))
+
+(defun git-write-tree (&optional index-file)
+ "Call git-write-tree and return the resulting tree SHA1 as a string."
+ (git-get-string-sha1
+ (git-call-process-env-string (and index-file `(("GIT_INDEX_FILE" . ,index-file))) "write-tree")))
+
+(defun git-commit-tree (buffer tree head)
+ "Call git-commit-tree with buffer as input and return the resulting commit SHA1."
+ (let ((author-name (git-get-committer-name))
+ (author-email (git-get-committer-email))
+ author-date log-start log-end args)
+ (when head
+ (push "-p" args)
+ (push head args))
+ (with-current-buffer buffer
+ (goto-char (point-min))
+ (if
+ (setq log-start (re-search-forward (concat "^" (regexp-quote git-log-msg-separator) "\n") nil t))
+ (save-restriction
+ (narrow-to-region (point-min) log-start)
+ (goto-char (point-min))
+ (when (re-search-forward "^Author: +\\(.*?\\) *<\\(.*\\)> *$" nil t)
+ (setq author-name (match-string 1)
+ author-email (match-string 2)))
+ (goto-char (point-min))
+ (when (re-search-forward "^Date: +\\(.*\\)$" nil t)
+ (setq author-date (match-string 1)))
+ (goto-char (point-min))
+ (while (re-search-forward "^Parent: +\\([0-9a-f]+\\)" nil t)
+ (unless (string-equal head (match-string 1))
+ (push "-p" args)
+ (push (match-string 1) args))))
+ (setq log-start (point-min)))
+ (setq log-end (point-max)))
+ (git-get-string-sha1
+ (with-output-to-string
+ (with-current-buffer standard-output
+ (let ((coding-system-for-write git-commits-coding-system)
+ (env `(("GIT_AUTHOR_NAME" . ,author-name)
+ ("GIT_AUTHOR_EMAIL" . ,author-email)
+ ("GIT_COMMITTER_NAME" . ,(git-get-committer-name))
+ ("GIT_COMMITTER_EMAIL" . ,(git-get-committer-email)))))
+ (when author-date (push `("GIT_AUTHOR_DATE" . ,author-date) env))
+ (apply #'git-run-command-region
+ buffer log-start log-end env
+ "commit-tree" tree (nreverse args))))))))
+
+(defun git-empty-db-p ()
+ "Check if the git db is empty (no commit done yet)."
+ (not (eq 0 (call-process "git" nil nil nil "rev-parse" "--verify" "HEAD"))))
+
+(defun git-get-merge-heads ()
+ "Retrieve the merge heads from the MERGE_HEAD file if present."
+ (let (heads)
+ (when (file-readable-p ".git/MERGE_HEAD")
+ (with-temp-buffer
+ (insert-file-contents ".git/MERGE_HEAD" nil nil nil t)
+ (goto-char (point-min))
+ (while (re-search-forward "[0-9a-f]\\{40\\}" nil t)
+ (push (match-string 0) heads))))
+ (nreverse heads)))
+
+;;;; File info structure
+;;;; ------------------------------------------------------------
+
+; fileinfo structure stolen from pcl-cvs
+(defstruct (git-fileinfo
+ (:copier nil)
+ (:constructor git-create-fileinfo (state name &optional old-perm new-perm rename-state orig-name marked))
+ (:conc-name git-fileinfo->))
+ marked ;; t/nil
+ state ;; current state
+ name ;; file name
+ old-perm new-perm ;; permission flags
+ rename-state ;; rename or copy state
+ orig-name ;; original name for renames or copies
+ needs-refresh) ;; whether file needs to be refreshed
+
+(defvar git-status nil)
+
+(defun git-clear-status (status)
+ "Remove everything from the status list."
+ (ewoc-filter status (lambda (info) nil)))
+
+(defun git-set-files-state (files state)
+ "Set the state of a list of files."
+ (dolist (info files)
+ (unless (eq (git-fileinfo->state info) state)
+ (setf (git-fileinfo->state info) state)
+ (setf (git-fileinfo->rename-state info) nil)
+ (setf (git-fileinfo->orig-name info) nil)
+ (setf (git-fileinfo->needs-refresh info) t))))
+
+(defun git-state-code (code)
+ "Convert from a string to a added/deleted/modified state."
+ (case (string-to-char code)
+ (?M 'modified)
+ (?? 'unknown)
+ (?A 'added)
+ (?D 'deleted)
+ (?U 'unmerged)
+ (t nil)))
+
+(defun git-status-code-as-string (code)
+ "Format a git status code as string."
+ (case code
+ ('modified (propertize "Modified" 'face 'git-status-face))
+ ('unknown (propertize "Unknown " 'face 'git-unknown-face))
+ ('added (propertize "Added " 'face 'git-status-face))
+ ('deleted (propertize "Deleted " 'face 'git-status-face))
+ ('unmerged (propertize "Unmerged" 'face 'git-unmerged-face))
+ ('uptodate (propertize "Uptodate" 'face 'git-uptodate-face))
+ ('ignored (propertize "Ignored " 'face 'git-ignored-face))
+ (t "? ")))
+
+(defun git-rename-as-string (info)
+ "Return a string describing the copy or rename associated with INFO, or an empty string if none."
+ (let ((state (git-fileinfo->rename-state info)))
+ (if state
+ (propertize
+ (concat " ("
+ (if (eq state 'copy) "copied from "
+ (if (eq (git-fileinfo->state info) 'added) "renamed to "
+ "renamed from "))
+ (git-escape-file-name (git-fileinfo->orig-name info))
+ ")") 'face 'git-status-face)
+ "")))
+
+(defun git-permissions-as-string (old-perm new-perm)
+ "Format a permission change as string."
+ (propertize
+ (if (or (not old-perm)
+ (not new-perm)
+ (eq 0 (logand ?\111 (logxor old-perm new-perm))))
+ " "
+ (if (eq 0 (logand ?\111 old-perm)) "+x" "-x"))
+ 'face 'git-permission-face))
+
+(defun git-fileinfo-prettyprint (info)
+ "Pretty-printer for the git-fileinfo structure."
+ (insert (format " %s %s %s %s%s"
+ (if (git-fileinfo->marked info) (propertize "*" 'face 'git-mark-face) " ")
+ (git-status-code-as-string (git-fileinfo->state info))
+ (git-permissions-as-string (git-fileinfo->old-perm info) (git-fileinfo->new-perm info))
+ (git-escape-file-name (git-fileinfo->name info))
+ (git-rename-as-string info))))
+
+(defun git-parse-status (status)
+ "Parse the output of git-diff-index in the current buffer."
+ (goto-char (point-min))
+ (while (re-search-forward
+ ":\\([0-7]\\{6\\}\\) \\([0-7]\\{6\\}\\) [0-9a-f]\\{40\\} [0-9a-f]\\{40\\} \\(\\([ADMU]\\)\0\\([^\0]+\\)\\|\\([CR]\\)[0-9]*\0\\([^\0]+\\)\0\\([^\0]+\\)\\)\0"
+ nil t 1)
+ (let ((old-perm (string-to-number (match-string 1) 8))
+ (new-perm (string-to-number (match-string 2) 8))
+ (state (or (match-string 4) (match-string 6)))
+ (name (or (match-string 5) (match-string 7)))
+ (new-name (match-string 8)))
+ (if new-name ; copy or rename
+ (if (eq ?C (string-to-char state))
+ (ewoc-enter-last status (git-create-fileinfo 'added new-name old-perm new-perm 'copy name))
+ (ewoc-enter-last status (git-create-fileinfo 'deleted name 0 0 'rename new-name))
+ (ewoc-enter-last status (git-create-fileinfo 'added new-name old-perm new-perm 'rename name)))
+ (ewoc-enter-last status (git-create-fileinfo (git-state-code state) name old-perm new-perm))))))
+
+(defun git-find-status-file (status file)
+ "Find a given file in the status ewoc and return its node."
+ (let ((node (ewoc-nth status 0)))
+ (while (and node (not (string= file (git-fileinfo->name (ewoc-data node)))))
+ (setq node (ewoc-next status node)))
+ node))
+
+(defun git-parse-ls-files (status default-state &optional skip-existing)
+ "Parse the output of git-ls-files in the current buffer."
+ (goto-char (point-min))
+ (let (infolist)
+ (while (re-search-forward "\\([HMRCK?]\\) \\([^\0]*\\)\0" nil t 1)
+ (let ((state (match-string 1))
+ (name (match-string 2)))
+ (unless (and skip-existing (git-find-status-file status name))
+ (push (git-create-fileinfo (or (git-state-code state) default-state) name) infolist))))
+ (dolist (info (nreverse infolist))
+ (ewoc-enter-last status info))))
+
+(defun git-parse-ls-unmerged (status)
+ "Parse the output of git-ls-files -u in the current buffer."
+ (goto-char (point-min))
+ (let (files)
+ (while (re-search-forward "[0-7]\\{6\\} [0-9a-f]\\{40\\} [123]\t\\([^\0]+\\)\0" nil t)
+ (let ((node (git-find-status-file status (match-string 1))))
+ (when node (push (ewoc-data node) files))))
+ (git-set-files-state files 'unmerged)))
+
+(defun git-add-status-file (state name)
+ "Add a new file to the status list (if not existing already) and return its node."
+ (unless git-status (error "Not in git-status buffer."))
+ (or (git-find-status-file git-status name)
+ (ewoc-enter-last git-status (git-create-fileinfo state name))))
+
+(defun git-marked-files ()
+ "Return a list of all marked files, or if none a list containing just the file at cursor position."
+ (unless git-status (error "Not in git-status buffer."))
+ (or (ewoc-collect git-status (lambda (info) (git-fileinfo->marked info)))
+ (list (ewoc-data (ewoc-locate git-status)))))
+
+(defun git-marked-files-state (&rest states)
+ "Return marked files that are in the specified states."
+ (let ((files (git-marked-files))
+ result)
+ (dolist (info files)
+ (when (memq (git-fileinfo->state info) states)
+ (push info result)))
+ result))
+
+(defun git-refresh-files ()
+ "Refresh all files that need it and clear the needs-refresh flag."
+ (unless git-status (error "Not in git-status buffer."))
+ (ewoc-map
+ (lambda (info)
+ (let ((refresh (git-fileinfo->needs-refresh info)))
+ (setf (git-fileinfo->needs-refresh info) nil)
+ refresh))
+ git-status)
+ ; move back to goal column
+ (when goal-column (move-to-column goal-column)))
+
+(defun git-refresh-ewoc-hf (status)
+ "Refresh the ewoc header and footer."
+ (let ((branch (git-symbolic-ref "HEAD"))
+ (head (if (git-empty-db-p) "Nothing committed yet"
+ (substring (git-rev-parse "HEAD") 0 10)))
+ (merge-heads (git-get-merge-heads)))
+ (ewoc-set-hf status
+ (format "Directory: %s\nBranch: %s\nHead: %s%s\n"
+ default-directory
+ (if (string-match "^refs/heads/" branch)
+ (substring branch (match-end 0))
+ branch)
+ head
+ (if merge-heads
+ (concat "\nMerging: "
+ (mapconcat (lambda (str) (substring str 0 10)) merge-heads " "))
+ ""))
+ (if (ewoc-nth status 0) "" " No changes."))))
+
+(defun git-get-filenames (files)
+ (mapcar (lambda (info) (git-fileinfo->name info)) files))
+
+(defun git-update-index (index-file files)
+ "Run git-update-index on a list of files."
+ (let ((env (and index-file `(("GIT_INDEX_FILE" . ,index-file))))
+ added deleted modified)
+ (dolist (info files)
+ (case (git-fileinfo->state info)
+ ('added (push info added))
+ ('deleted (push info deleted))
+ ('modified (push info modified))))
+ (when added
+ (apply #'git-run-command nil env "update-index" "--add" "--" (git-get-filenames added)))
+ (when deleted
+ (apply #'git-run-command nil env "update-index" "--remove" "--" (git-get-filenames deleted)))
+ (when modified
+ (apply #'git-run-command nil env "update-index" "--" (git-get-filenames modified)))))
+
+(defun git-do-commit ()
+ "Perform the actual commit using the current buffer as log message."
+ (interactive)
+ (let ((buffer (current-buffer))
+ (index-file (make-temp-file "gitidx")))
+ (with-current-buffer log-edit-parent-buffer
+ (if (git-marked-files-state 'unmerged)
+ (message "You cannot commit unmerged files, resolve them first.")
+ (unwind-protect
+ (let ((files (git-marked-files-state 'added 'deleted 'modified))
+ head head-tree)
+ (unless (git-empty-db-p)
+ (setq head (git-rev-parse "HEAD")
+ head-tree (git-rev-parse "HEAD^{tree}")))
+ (if files
+ (progn
+ (git-read-tree head-tree index-file)
+ (git-update-index nil files) ;update both the default index
+ (git-update-index index-file files) ;and the temporary one
+ (let ((tree (git-write-tree index-file)))
+ (if (or (not (string-equal tree head-tree))
+ (yes-or-no-p "The tree was not modified, do you really want to perform an empty commit? "))
+ (let ((commit (git-commit-tree buffer tree head)))
+ (git-update-ref "HEAD" commit head)
+ (condition-case nil (delete-file ".git/MERGE_HEAD") (error nil))
+ (with-current-buffer buffer (erase-buffer))
+ (git-set-files-state files 'uptodate)
+ (git-refresh-files)
+ (git-refresh-ewoc-hf git-status)
+ (message "Committed %s." commit))
+ (message "Commit aborted."))))
+ (message "No files to commit.")))
+ (delete-file index-file))))))
+
+
+;;;; Interactive functions
+;;;; ------------------------------------------------------------
+
+(defun git-mark-file ()
+ "Mark the file that the cursor is on and move to the next one."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (let* ((pos (ewoc-locate git-status))
+ (info (ewoc-data pos)))
+ (setf (git-fileinfo->marked info) t)
+ (ewoc-invalidate git-status pos)
+ (ewoc-goto-next git-status 1)))
+
+(defun git-unmark-file ()
+ "Unmark the file that the cursor is on and move to the next one."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (let* ((pos (ewoc-locate git-status))
+ (info (ewoc-data pos)))
+ (setf (git-fileinfo->marked info) nil)
+ (ewoc-invalidate git-status pos)
+ (ewoc-goto-next git-status 1)))
+
+(defun git-unmark-file-up ()
+ "Unmark the file that the cursor is on and move to the previous one."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (let* ((pos (ewoc-locate git-status))
+ (info (ewoc-data pos)))
+ (setf (git-fileinfo->marked info) nil)
+ (ewoc-invalidate git-status pos)
+ (ewoc-goto-prev git-status 1)))
+
+(defun git-mark-all ()
+ "Mark all files."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (ewoc-map (lambda (info) (setf (git-fileinfo->marked info) t) t) git-status)
+ ; move back to goal column after invalidate
+ (when goal-column (move-to-column goal-column)))
+
+(defun git-unmark-all ()
+ "Unmark all files."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (ewoc-map (lambda (info) (setf (git-fileinfo->marked info) nil) t) git-status)
+ ; move back to goal column after invalidate
+ (when goal-column (move-to-column goal-column)))
+
+(defun git-toggle-all-marks ()
+ "Toggle all file marks."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (ewoc-map (lambda (info) (setf (git-fileinfo->marked info) (not (git-fileinfo->marked info))) t) git-status)
+ ; move back to goal column after invalidate
+ (when goal-column (move-to-column goal-column)))
+
+(defun git-next-file (&optional n)
+ "Move the selection down N files."
+ (interactive "p")
+ (unless git-status (error "Not in git-status buffer."))
+ (ewoc-goto-next git-status n))
+
+(defun git-prev-file (&optional n)
+ "Move the selection up N files."
+ (interactive "p")
+ (unless git-status (error "Not in git-status buffer."))
+ (ewoc-goto-prev git-status n))
+
+(defun git-add-file ()
+ "Add marked file(s) to the index cache."
+ (interactive)
+ (let ((files (git-marked-files-state 'unknown)))
+ (unless files
+ (push (ewoc-data
+ (git-add-status-file 'added (file-relative-name
+ (read-file-name "File to add: " nil nil t))))
+ files))
+ (apply #'git-run-command nil nil "update-index" "--info-only" "--add" "--" (git-get-filenames files))
+ (git-set-files-state files 'added)
+ (git-refresh-files)))
+
+(defun git-ignore-file ()
+ "Add marked file(s) to the ignore list."
+ (interactive)
+ (let ((files (git-marked-files-state 'unknown)))
+ (unless files
+ (push (ewoc-data
+ (git-add-status-file 'unknown (file-relative-name
+ (read-file-name "File to ignore: " nil nil t))))
+ files))
+ (dolist (info files) (git-append-to-ignore (git-fileinfo->name info)))
+ (git-set-files-state files 'ignored)
+ (git-refresh-files)))
+
+(defun git-remove-file ()
+ "Remove the marked file(s)."
+ (interactive)
+ (let ((files (git-marked-files-state 'added 'modified 'unknown 'uptodate)))
+ (unless files
+ (push (ewoc-data
+ (git-add-status-file 'unknown (file-relative-name
+ (read-file-name "File to remove: " nil nil t))))
+ files))
+ (if (yes-or-no-p
+ (format "Remove %d file%s? " (length files) (if (> (length files) 1) "s" "")))
+ (progn
+ (dolist (info files)
+ (let ((name (git-fileinfo->name info)))
+ (when (file-exists-p name) (delete-file name))))
+ (apply #'git-run-command nil nil "update-index" "--info-only" "--remove" "--" (git-get-filenames files))
+ ; remove unknown files from the list, set the others to deleted
+ (ewoc-filter git-status
+ (lambda (info files)
+ (not (and (memq info files) (eq (git-fileinfo->state info) 'unknown))))
+ files)
+ (git-set-files-state files 'deleted)
+ (git-refresh-files)
+ (unless (ewoc-nth git-status 0) ; refresh header if list is empty
+ (git-refresh-ewoc-hf git-status)))
+ (message "Aborting"))))
+
+(defun git-revert-file ()
+ "Revert changes to the marked file(s)."
+ (interactive)
+ (let ((files (git-marked-files))
+ added modified)
+ (when (and files
+ (yes-or-no-p
+ (format "Revert %d file%s? " (length files) (if (> (length files) 1) "s" ""))))
+ (dolist (info files)
+ (case (git-fileinfo->state info)
+ ('added (push info added))
+ ('deleted (push info modified))
+ ('unmerged (push info modified))
+ ('modified (push info modified))))
+ (when added
+ (apply #'git-run-command nil nil "update-index" "--force-remove" "--" (git-get-filenames added))
+ (git-set-files-state added 'unknown))
+ (when modified
+ (apply #'git-run-command nil nil "checkout" "HEAD" (git-get-filenames modified))
+ (git-set-files-state modified 'uptodate))
+ (git-refresh-files))))
+
+(defun git-resolve-file ()
+ "Resolve conflicts in marked file(s)."
+ (interactive)
+ (let ((files (git-marked-files-state 'unmerged)))
+ (when files
+ (apply #'git-run-command nil nil "update-index" "--info-only" "--" (git-get-filenames files))
+ (git-set-files-state files 'modified)
+ (git-refresh-files))))
+
+(defun git-remove-handled ()
+ "Remove handled files from the status list."
+ (interactive)
+ (ewoc-filter git-status
+ (lambda (info)
+ (not (or (eq (git-fileinfo->state info) 'ignored)
+ (eq (git-fileinfo->state info) 'uptodate)))))
+ (unless (ewoc-nth git-status 0) ; refresh header if list is empty
+ (git-refresh-ewoc-hf git-status)))
+
+(defun git-setup-diff-buffer (buffer)
+ "Setup a buffer for displaying a diff."
+ (with-current-buffer buffer
+ (diff-mode)
+ (goto-char (point-min))
+ (setq buffer-read-only t))
+ (display-buffer buffer)
+ (shrink-window-if-larger-than-buffer))
+
+(defun git-diff-file ()
+ "Diff the marked file(s) against HEAD."
+ (interactive)
+ (let ((files (git-marked-files)))
+ (git-setup-diff-buffer
+ (apply #'git-run-command-buffer "*git-diff*" "diff-index" "-p" "-M" "HEAD" "--" (git-get-filenames files)))))
+
+(defun git-diff-file-merge-head (arg)
+ "Diff the marked file(s) against the first merge head (or the nth one with a numeric prefix)."
+ (interactive "p")
+ (let ((files (git-marked-files))
+ (merge-heads (git-get-merge-heads)))
+ (unless merge-heads (error "No merge in progress"))
+ (git-setup-diff-buffer
+ (apply #'git-run-command-buffer "*git-diff*" "diff-index" "-p" "-M"
+ (or (nth (1- arg) merge-heads) "HEAD") "--" (git-get-filenames files)))))
+
+(defun git-diff-unmerged-file (stage)
+ "Diff the marked unmerged file(s) against the specified stage."
+ (let ((files (git-marked-files)))
+ (git-setup-diff-buffer
+ (apply #'git-run-command-buffer "*git-diff*" "diff-files" "-p" stage "--" (git-get-filenames files)))))
+
+(defun git-diff-file-base ()
+ "Diff the marked unmerged file(s) against the common base file."
+ (interactive)
+ (git-diff-unmerged-file "-1"))
+
+(defun git-diff-file-mine ()
+ "Diff the marked unmerged file(s) against my pre-merge version."
+ (interactive)
+ (git-diff-unmerged-file "-2"))
+
+(defun git-diff-file-other ()
+ "Diff the marked unmerged file(s) against the other's pre-merge version."
+ (interactive)
+ (git-diff-unmerged-file "-3"))
+
+(defun git-diff-file-combined ()
+ "Do a combined diff of the marked unmerged file(s)."
+ (interactive)
+ (git-diff-unmerged-file "-c"))
+
+(defun git-diff-file-idiff ()
+ "Perform an interactive diff on the current file."
+ (interactive)
+ (error "Interactive diffs not implemented yet."))
+
+(defun git-log-file ()
+ "Display a log of changes to the marked file(s)."
+ (interactive)
+ (let* ((files (git-marked-files))
+ (coding-system-for-read git-commits-coding-system)
+ (buffer (apply #'git-run-command-buffer "*git-log*" "rev-list" "--pretty" "HEAD" "--" (git-get-filenames files))))
+ (with-current-buffer buffer
+ ; (git-log-mode) FIXME: implement log mode
+ (goto-char (point-min))
+ (setq buffer-read-only t))
+ (display-buffer buffer)))
+
+(defun git-log-edit-files ()
+ "Return a list of marked files for use in the log-edit buffer."
+ (with-current-buffer log-edit-parent-buffer
+ (git-get-filenames (git-marked-files-state 'added 'deleted 'modified))))
+
+(defun git-commit-file ()
+ "Commit the marked file(s), asking for a commit message."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (let ((buffer (get-buffer-create "*git-commit*"))
+ (merge-heads (git-get-merge-heads))
+ (dir default-directory)
+ (sign-off git-append-signed-off-by))
+ (with-current-buffer buffer
+ (when (eq 0 (buffer-size))
+ (cd dir)
+ (erase-buffer)
+ (insert
+ (propertize
+ (format "Author: %s <%s>\n%s"
+ (git-get-committer-name) (git-get-committer-email)
+ (if merge-heads
+ (format "Parent: %s\n%s\n"
+ (git-rev-parse "HEAD")
+ (mapconcat (lambda (str) (concat "Parent: " str)) merge-heads "\n"))
+ ""))
+ 'face 'git-header-face)
+ (propertize git-log-msg-separator 'face 'git-separator-face)
+ "\n")
+ (cond ((and merge-heads (file-readable-p ".git/MERGE_MSG"))
+ (insert-file-contents ".git/MERGE_MSG"))
+ (sign-off
+ (insert (format "\n\nSigned-off-by: %s <%s>\n"
+ (git-get-committer-name) (git-get-committer-email)))))))
+ (let ((log-edit-font-lock-keywords
+ `(("^\\(Author:\\|Date:\\|Parent:\\|Signed-off-by:\\)\\(.*\\)"
+ (1 font-lock-keyword-face)
+ (2 font-lock-function-name-face))
+ (,(concat "^\\(" (regexp-quote git-log-msg-separator) "\\)$")
+ (1 font-lock-comment-face)))))
+ (log-edit #'git-do-commit nil #'git-log-edit-files buffer))))
+
+(defun git-find-file ()
+ "Visit the current file in its own buffer."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (let ((info (ewoc-data (ewoc-locate git-status))))
+ (find-file (git-fileinfo->name info))
+ (when (eq 'unmerged (git-fileinfo->state info))
+ (smerge-mode))))
+
+(defun git-find-file-imerge ()
+ "Visit the current file in interactive merge mode."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (let ((info (ewoc-data (ewoc-locate git-status))))
+ (find-file (git-fileinfo->name info))
+ (smerge-ediff)))
+
+(defun git-view-file ()
+ "View the current file in its own buffer."
+ (interactive)
+ (unless git-status (error "Not in git-status buffer."))
+ (let ((info (ewoc-data (ewoc-locate git-status))))
+ (view-file (git-fileinfo->name info))))
+
+(defun git-refresh-status ()
+ "Refresh the git status buffer."
+ (interactive)
+ (let* ((status git-status)
+ (pos (ewoc-locate status))
+ (cur-name (and pos (git-fileinfo->name (ewoc-data pos)))))
+ (unless status (error "Not in git-status buffer."))
+ (git-clear-status status)
+ (git-run-command nil nil "update-index" "--info-only" "--refresh")
+ (if (git-empty-db-p)
+ ; we need some special handling for an empty db
+ (with-temp-buffer
+ (git-run-command t nil "ls-files" "-z" "-t" "-c")
+ (git-parse-ls-files status 'added))
+ (with-temp-buffer
+ (git-run-command t nil "diff-index" "-z" "-M" "HEAD")
+ (git-parse-status status)))
+ (with-temp-buffer
+ (git-run-command t nil "ls-files" "-z" "-u")
+ (git-parse-ls-unmerged status))
+ (when (file-readable-p ".git/info/exclude")
+ (with-temp-buffer
+ (git-run-command t nil "ls-files" "-z" "-t" "-o"
+ "--exclude-from=.git/info/exclude"
+ (concat "--exclude-per-directory=" git-per-dir-ignore-file))
+ (git-parse-ls-files status 'unknown)))
+ (git-refresh-files)
+ (git-refresh-ewoc-hf status)
+ ; move point to the current file name if any
+ (let ((node (and cur-name (git-find-status-file status cur-name))))
+ (when node (ewoc-goto-node status node)))))
+
+(defun git-status-quit ()
+ "Quit git-status mode."
+ (interactive)
+ (bury-buffer))
+
+;;;; Major Mode
+;;;; ------------------------------------------------------------
+
+(defvar git-status-mode-hook nil
+ "Run after `git-status-mode' is setup.")
+
+(defvar git-status-mode-map nil
+ "Keymap for git major mode.")
+
+(defvar git-status nil
+ "List of all files managed by the git-status mode.")
+
+(unless git-status-mode-map
+ (let ((map (make-keymap))
+ (diff-map (make-sparse-keymap)))
+ (suppress-keymap map)
+ (define-key map " " 'git-next-file)
+ (define-key map "a" 'git-add-file)
+ (define-key map "c" 'git-commit-file)
+ (define-key map "d" diff-map)
+ (define-key map "=" 'git-diff-file)
+ (define-key map "f" 'git-find-file)
+ (define-key map "\r" 'git-find-file)
+ (define-key map "g" 'git-refresh-status)
+ (define-key map "i" 'git-ignore-file)
+ (define-key map "l" 'git-log-file)
+ (define-key map "m" 'git-mark-file)
+ (define-key map "M" 'git-mark-all)
+ (define-key map "n" 'git-next-file)
+ (define-key map "p" 'git-prev-file)
+ (define-key map "q" 'git-status-quit)
+ (define-key map "r" 'git-remove-file)
+ (define-key map "R" 'git-resolve-file)
+ (define-key map "T" 'git-toggle-all-marks)
+ (define-key map "u" 'git-unmark-file)
+ (define-key map "U" 'git-revert-file)
+ (define-key map "v" 'git-view-file)
+ (define-key map "x" 'git-remove-handled)
+ (define-key map "\C-?" 'git-unmark-file-up)
+ (define-key map "\M-\C-?" 'git-unmark-all)
+ ; the diff submap
+ (define-key diff-map "b" 'git-diff-file-base)
+ (define-key diff-map "c" 'git-diff-file-combined)
+ (define-key diff-map "=" 'git-diff-file)
+ (define-key diff-map "e" 'git-diff-file-idiff)
+ (define-key diff-map "E" 'git-find-file-imerge)
+ (define-key diff-map "h" 'git-diff-file-merge-head)
+ (define-key diff-map "m" 'git-diff-file-mine)
+ (define-key diff-map "o" 'git-diff-file-other)
+ (setq git-status-mode-map map)))
+
+;; git mode should only run in the *git status* buffer
+(put 'git-status-mode 'mode-class 'special)
+
+(defun git-status-mode ()
+ "Major mode for interacting with Git.
+Commands:
+\\{git-status-mode-map}"
+ (kill-all-local-variables)
+ (buffer-disable-undo)
+ (setq mode-name "git status"
+ major-mode 'git-status-mode
+ goal-column 17
+ buffer-read-only t)
+ (use-local-map git-status-mode-map)
+ (let ((buffer-read-only nil))
+ (erase-buffer)
+ (let ((status (ewoc-create 'git-fileinfo-prettyprint "" "")))
+ (set (make-local-variable 'git-status) status))
+ (set (make-local-variable 'list-buffers-directory) default-directory)
+ (run-hooks 'git-status-mode-hook)))
+
+(defun git-status (dir)
+ "Entry point into git-status mode."
+ (interactive "DSelect directory: ")
+ (setq dir (git-get-top-dir dir))
+ (if (file-directory-p (concat (file-name-as-directory dir) ".git"))
+ (let ((buffer (create-file-buffer (expand-file-name "*git-status*" dir))))
+ (switch-to-buffer buffer)
+ (cd dir)
+ (git-status-mode)
+ (git-refresh-status)
+ (goto-char (point-min)))
+ (message "%s is not a git working tree." dir)))
+
+(provide 'git)
+;;; git.el ends here
diff --git a/contrib/emacs/vc-git.el b/contrib/emacs/vc-git.el
new file mode 100644
index 0000000..2453cdc
--- /dev/null
+++ b/contrib/emacs/vc-git.el
@@ -0,0 +1,135 @@
+;;; vc-git.el --- VC backend for the git version control system
+
+;; Copyright (C) 2006 Alexandre Julliard
+
+;; This program is free software; you can redistribute it and/or
+;; modify it under the terms of the GNU General Public License as
+;; published by the Free Software Foundation; either version 2 of
+;; the License, or (at your option) any later version.
+;;
+;; This program is distributed in the hope that it will be
+;; useful, but WITHOUT ANY WARRANTY; without even the implied
+;; warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+;; PURPOSE. See the GNU General Public License for more details.
+;;
+;; You should have received a copy of the GNU General Public
+;; License along with this program; if not, write to the Free
+;; Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
+;; MA 02111-1307 USA
+
+;;; Commentary:
+
+;; This file contains a VC backend for the git version control
+;; system.
+;;
+;; To install: put this file on the load-path and add GIT to the list
+;; of supported backends in `vc-handled-backends'.
+;;
+;; TODO
+;; - changelog generation
+;; - working with revisions other than HEAD
+;;
+
+(defvar git-commits-coding-system 'utf-8
+ "Default coding system for git commits.")
+
+(defun vc-git--run-command-string (file &rest args)
+ "Run a git command on FILE and return its output as string."
+ (let* ((ok t)
+ (str (with-output-to-string
+ (with-current-buffer standard-output
+ (unless (eq 0 (apply #'call-process "git" nil '(t nil) nil
+ (append args (list (file-relative-name file)))))
+ (setq ok nil))))))
+ (and ok str)))
+
+(defun vc-git--run-command (file &rest args)
+ "Run a git command on FILE, discarding any output."
+ (let ((name (file-relative-name file)))
+ (eq 0 (apply #'call-process "git" nil (get-buffer "*Messages") nil (append args (list name))))))
+
+(defun vc-git-registered (file)
+ "Check whether FILE is registered with git."
+ (with-temp-buffer
+ (let* ((dir (file-name-directory file))
+ (name (file-relative-name file dir)))
+ (when dir (cd dir))
+ (and (eq 0 (call-process "git" nil '(t nil) nil "ls-files" "-c" "-z" "--" name))
+ (let ((str (buffer-string)))
+ (and (> (length str) (length name))
+ (string= (substring str 0 (1+ (length name))) (concat name "\0"))))))))
+
+(defun vc-git-state (file)
+ "git-specific version of `vc-state'."
+ (let ((diff (vc-git--run-command-string file "diff-index" "-z" "HEAD" "--")))
+ (if (and diff (string-match ":[0-7]\\{6\\} [0-7]\\{6\\} [0-9a-f]\\{40\\} [0-9a-f]\\{40\\} [ADMU]\0[^\0]+\0" diff))
+ 'edited
+ 'up-to-date)))
+
+(defun vc-git-workfile-version (file)
+ "git-specific version of `vc-workfile-version'."
+ (let ((str (with-output-to-string
+ (with-current-buffer standard-output
+ (call-process "git" nil '(t nil) nil "symbolic-ref" "HEAD")))))
+ (if (string-match "^\\(refs/heads/\\)?\\(.+\\)$" str)
+ (match-string 2 str)
+ str)))
+
+(defun vc-git-revert (file &optional contents-done)
+ "Revert FILE to the version stored in the git repository."
+ (if contents-done
+ (vc-git--run-command file "update-index" "--")
+ (vc-git--run-command file "checkout" "HEAD")))
+
+(defun vc-git-checkout-model (file)
+ 'implicit)
+
+(defun vc-git-workfile-unchanged-p (file)
+ (let ((sha1 (vc-git--run-command-string file "hash-object" "--"))
+ (head (vc-git--run-command-string file "ls-tree" "-z" "HEAD" "--")))
+ (and head
+ (string-match "[0-7]\\{6\\} blob \\([0-9a-f]\\{40\\}\\)\t[^\0]+\0" head)
+ (string= (car (split-string sha1 "\n")) (match-string 1 head)))))
+
+(defun vc-git-register (file &optional rev comment)
+ "Register FILE into the git version-control system."
+ (vc-git--run-command file "update-index" "--add" "--"))
+
+(defun vc-git-print-log (file)
+ (let ((name (file-relative-name file))
+ (coding-system-for-read git-commits-coding-system))
+ (vc-do-command nil 'async "git" name "rev-list" "--pretty" "HEAD" "--")))
+
+(defun vc-git-diff (file &optional rev1 rev2)
+ (let ((name (file-relative-name file)))
+ (if (and rev1 rev2)
+ (vc-do-command "*vc-diff*" 0 "git" name "diff-tree" "-p" rev1 rev2 "--")
+ (vc-do-command "*vc-diff*" 0 "git" name "diff-index" "-p" (or rev1 "HEAD") "--"))
+ ; git-diff-index doesn't set exit status like diff does
+ (if (vc-git-workfile-unchanged-p file) 0 1)))
+
+(defun vc-git-checkin (file rev comment)
+ (let ((coding-system-for-write git-commits-coding-system))
+ (vc-git--run-command file "commit" "-m" comment "--only" "--")))
+
+(defun vc-git-checkout (file &optional editable rev destfile)
+ (vc-git--run-command file "checkout" (or rev "HEAD")))
+
+(defun vc-git-annotate-command (file buf &optional rev)
+ ; FIXME: rev is ignored
+ (let ((name (file-relative-name file)))
+ (call-process "git" nil buf nil "annotate" name)))
+
+(defun vc-git-annotate-time ()
+ (and (re-search-forward "[0-9a-f]+\t(.*\t\\([0-9]+\\)-\\([0-9]+\\)-\\([0-9]+\\) \\([0-9]+\\):\\([0-9]+\\):\\([0-9]+\\) \\([-+0-9]+\\)\t[0-9]+)" nil t)
+ (vc-annotate-convert-time
+ (apply #'encode-time (mapcar (lambda (match) (string-to-number (match-string match))) '(6 5 4 3 2 1 7))))))
+
+;; Not really useful since we can't do anything with the revision yet
+;;(defun vc-annotate-extract-revision-at-line ()
+;; (save-excursion
+;; (move-beginning-of-line 1)
+;; (and (looking-at "[0-9a-f]+")
+;; (buffer-substring (match-beginning 0) (match-end 0)))))
+
+(provide 'vc-git)
diff --git a/contrib/git-svn/.gitignore b/contrib/git-svn/.gitignore
new file mode 100644
index 0000000..d8d87e3
--- /dev/null
+++ b/contrib/git-svn/.gitignore
@@ -0,0 +1,4 @@
+git-svn
+git-svn.xml
+git-svn.html
+git-svn.1
diff --git a/contrib/git-svn/Makefile b/contrib/git-svn/Makefile
new file mode 100644
index 0000000..acedf73
--- /dev/null
+++ b/contrib/git-svn/Makefile
@@ -0,0 +1,35 @@
+all: git-svn
+
+prefix?=$(HOME)
+bindir=$(prefix)/bin
+mandir=$(prefix)/man
+man1=$(mandir)/man1
+INSTALL?=install
+doc_conf=../../Documentation/asciidoc.conf
+-include ../../config.mak
+
+git-svn: git-svn.perl
+ cp $< $@
+ chmod +x $@
+
+install: all
+ $(INSTALL) -d -m755 $(DESTDIR)$(bindir)
+ $(INSTALL) git-svn $(DESTDIR)$(bindir)
+
+install-doc: doc
+ $(INSTALL) git-svn.1 $(DESTDIR)$(man1)
+
+doc: git-svn.1
+git-svn.1 : git-svn.xml
+ xmlto man git-svn.xml
+git-svn.xml : git-svn.txt
+ asciidoc -b docbook -d manpage \
+ -f ../../Documentation/asciidoc.conf $<
+git-svn.html : git-svn.txt
+ asciidoc -b xhtml11 -d manpage \
+ -f ../../Documentation/asciidoc.conf $<
+test: git-svn
+ cd t && $(SHELL) ./t0000-contrib-git-svn.sh
+
+clean:
+ rm -f git-svn *.xml *.html *.1
diff --git a/contrib/git-svn/git-svn.perl b/contrib/git-svn/git-svn.perl
new file mode 100755
index 0000000..7c44450
--- /dev/null
+++ b/contrib/git-svn/git-svn.perl
@@ -0,0 +1,1180 @@
+#!/usr/bin/env perl
+# Copyright (C) 2006, Eric Wong <normalperson@yhbt.net>
+# License: GPL v2 or later
+use warnings;
+use strict;
+use vars qw/ $AUTHOR $VERSION
+ $SVN_URL $SVN_INFO $SVN_WC $SVN_UUID
+ $GIT_SVN_INDEX $GIT_SVN
+ $GIT_DIR $REV_DIR/;
+$AUTHOR = 'Eric Wong <normalperson@yhbt.net>';
+$VERSION = '0.11.0';
+
+use Cwd qw/abs_path/;
+$GIT_DIR = abs_path($ENV{GIT_DIR} || '.git');
+$ENV{GIT_DIR} = $GIT_DIR;
+
+# make sure the svn binary gives consistent output between locales and TZs:
+$ENV{TZ} = 'UTC';
+$ENV{LC_ALL} = 'C';
+
+# If SVN:: library support is added, please make the dependencies
+# optional and preserve the capability to use the command-line client.
+# use eval { require SVN::... } to make it lazy load
+# We don't use any modules not in the standard Perl distribution:
+use Carp qw/croak/;
+use IO::File qw//;
+use File::Basename qw/dirname basename/;
+use File::Path qw/mkpath/;
+use Getopt::Long qw/:config gnu_getopt no_ignore_case auto_abbrev/;
+use File::Spec qw//;
+use POSIX qw/strftime/;
+my $sha1 = qr/[a-f\d]{40}/;
+my $sha1_short = qr/[a-f\d]{4,40}/;
+my ($_revision,$_stdin,$_no_ignore_ext,$_no_stop_copy,$_help,$_rmdir,$_edit,
+ $_find_copies_harder, $_l, $_version, $_upgrade, $_authors);
+my (@_branch_from, %tree_map, %users);
+my $_svn_co_url_revs;
+
+my %fc_opts = ( 'no-ignore-externals' => \$_no_ignore_ext,
+ 'branch|b=s' => \@_branch_from,
+ 'authors-file|A=s' => \$_authors );
+my %cmd = (
+ fetch => [ \&fetch, "Download new revisions from SVN",
+ { 'revision|r=s' => \$_revision, %fc_opts } ],
+ init => [ \&init, "Initialize and fetch (import)", { } ],
+ commit => [ \&commit, "Commit git revisions to SVN",
+ { 'stdin|' => \$_stdin,
+ 'edit|e' => \$_edit,
+ 'rmdir' => \$_rmdir,
+ 'find-copies-harder' => \$_find_copies_harder,
+ 'l=i' => \$_l,
+ %fc_opts,
+ } ],
+ 'show-ignore' => [ \&show_ignore, "Show svn:ignore listings", { } ],
+ rebuild => [ \&rebuild, "Rebuild git-svn metadata (after git clone)",
+ { 'no-ignore-externals' => \$_no_ignore_ext,
+ 'upgrade' => \$_upgrade } ],
+);
+my $cmd;
+for (my $i = 0; $i < @ARGV; $i++) {
+ if (defined $cmd{$ARGV[$i]}) {
+ $cmd = $ARGV[$i];
+ splice @ARGV, $i, 1;
+ last;
+ }
+};
+
+my %opts = %{$cmd{$cmd}->[2]} if (defined $cmd);
+
+# convert GetOpt::Long specs for use by git-repo-config
+foreach my $o (keys %opts) {
+ my $v = $opts{$o};
+ my ($key) = ($o =~ /^([a-z\-]+)/);
+ $key =~ s/-//g;
+ my $arg = 'git-repo-config';
+ $arg .= ' --int' if ($o =~ /=i$/);
+ $arg .= ' --bool' if ($o !~ /=[sfi]$/);
+ if (ref $v eq 'ARRAY') {
+ chomp(my @tmp = `$arg --get-all svn.$key`);
+ @$v = @tmp if @tmp;
+ } else {
+ chomp(my $tmp = `$arg --get svn.$key`);
+ if ($tmp && !($arg =~ / --bool / && $tmp eq 'false')) {
+ $$v = $tmp;
+ }
+ }
+}
+
+GetOptions(%opts, 'help|H|h' => \$_help,
+ 'version|V' => \$_version,
+ 'id|i=s' => \$GIT_SVN) or exit 1;
+
+$GIT_SVN ||= $ENV{GIT_SVN_ID} || 'git-svn';
+$GIT_SVN_INDEX = "$GIT_DIR/$GIT_SVN/index";
+$SVN_URL = undef;
+$REV_DIR = "$GIT_DIR/$GIT_SVN/revs";
+$SVN_WC = "$GIT_DIR/$GIT_SVN/tree";
+
+usage(0) if $_help;
+version() if $_version;
+usage(1) unless defined $cmd;
+load_authors() if $_authors;
+svn_compat_check();
+$cmd{$cmd}->[0]->(@ARGV);
+exit 0;
+
+####################### primary functions ######################
+sub usage {
+ my $exit = shift || 0;
+ my $fd = $exit ? \*STDERR : \*STDOUT;
+ print $fd <<"";
+git-svn - bidirectional operations between a single Subversion tree and git
+Usage: $0 <command> [options] [arguments]\n
+
+ print $fd "Available commands:\n" unless $cmd;
+
+ foreach (sort keys %cmd) {
+ next if $cmd && $cmd ne $_;
+ print $fd ' ',pack('A13',$_),$cmd{$_}->[1],"\n";
+ foreach (keys %{$cmd{$_}->[2]}) {
+ # prints out arguments as they should be passed:
+ my $x = s#=s$## ? '<arg>' : s#=i$## ? '<num>' : '';
+ print $fd ' ' x 17, join(', ', map { length $_ > 1 ?
+ "--$_" : "-$_" }
+ split /\|/,$_)," $x\n";
+ }
+ }
+ print $fd <<"";
+\nGIT_SVN_ID may be set in the environment or via the --id/-i switch to an
+arbitrary identifier if you're tracking multiple SVN branches/repositories in
+one git repository and want to keep them separate. See git-svn(1) for more
+information.
+
+ exit $exit;
+}
+
+sub version {
+ print "git-svn version $VERSION\n";
+ exit 0;
+}
+
+sub rebuild {
+ $SVN_URL = shift or undef;
+ my $newest_rev = 0;
+ if ($_upgrade) {
+ sys('git-update-ref',"refs/remotes/$GIT_SVN","$GIT_SVN-HEAD");
+ } else {
+ check_upgrade_needed();
+ }
+
+ my $pid = open(my $rev_list,'-|');
+ defined $pid or croak $!;
+ if ($pid == 0) {
+ exec("git-rev-list","refs/remotes/$GIT_SVN") or croak $!;
+ }
+ my $latest;
+ while (<$rev_list>) {
+ chomp;
+ my $c = $_;
+ croak "Non-SHA1: $c\n" unless $c =~ /^$sha1$/o;
+ my @commit = grep(/^git-svn-id: /,`git-cat-file commit $c`);
+ next if (!@commit); # skip merges
+ my $id = $commit[$#commit];
+ my ($url, $rev, $uuid) = ($id =~ /^git-svn-id:\s(\S+?)\@(\d+)
+ \s([a-f\d\-]+)$/x);
+ if (!$rev || !$uuid || !$url) {
+ # some of the original repositories I made had
+ # indentifiers like this:
+ ($rev, $uuid) = ($id =~/^git-svn-id:\s(\d+)
+ \@([a-f\d\-]+)/x);
+ if (!$rev || !$uuid) {
+ croak "Unable to extract revision or UUID from ",
+ "$c, $id\n";
+ }
+ }
+
+ # if we merged or otherwise started elsewhere, this is
+ # how we break out of it
+ next if (defined $SVN_UUID && ($uuid ne $SVN_UUID));
+ next if (defined $SVN_URL && defined $url && ($url ne $SVN_URL));
+
+ print "r$rev = $c\n";
+ unless (defined $latest) {
+ if (!$SVN_URL && !$url) {
+ croak "SVN repository location required: $url\n";
+ }
+ $SVN_URL ||= $url;
+ $SVN_UUID ||= $uuid;
+ setup_git_svn();
+ $latest = $rev;
+ }
+ assert_revision_eq_or_unknown($rev, $c);
+ sys('git-update-ref',"$GIT_SVN/revs/$rev",$c);
+ $newest_rev = $rev if ($rev > $newest_rev);
+ }
+ close $rev_list or croak $?;
+ if (!chdir $SVN_WC) {
+ svn_cmd_checkout($SVN_URL, $latest, $SVN_WC);
+ chdir $SVN_WC or croak $!;
+ }
+
+ $pid = fork;
+ defined $pid or croak $!;
+ if ($pid == 0) {
+ my @svn_up = qw(svn up);
+ push @svn_up, '--ignore-externals' unless $_no_ignore_ext;
+ sys(@svn_up,"-r$newest_rev");
+ $ENV{GIT_INDEX_FILE} = $GIT_SVN_INDEX;
+ git_addremove();
+ exec('git-write-tree');
+ }
+ waitpid $pid, 0;
+
+ if ($_upgrade) {
+ print STDERR <<"";
+Keeping deprecated refs/head/$GIT_SVN-HEAD for now. Please remove it
+when you have upgraded your tools and habits to use refs/remotes/$GIT_SVN
+
+ }
+}
+
+sub init {
+ $SVN_URL = shift or croak "SVN repository location required\n";
+ unless (-d $GIT_DIR) {
+ sys('git-init-db');
+ }
+ setup_git_svn();
+}
+
+sub fetch {
+ my (@parents) = @_;
+ check_upgrade_needed();
+ $SVN_URL ||= file_to_s("$GIT_DIR/$GIT_SVN/info/url");
+ my @log_args = -d $SVN_WC ? ($SVN_WC) : ($SVN_URL);
+ unless ($_revision) {
+ $_revision = -d $SVN_WC ? 'BASE:HEAD' : '0:HEAD';
+ }
+ push @log_args, "-r$_revision";
+ push @log_args, '--stop-on-copy' unless $_no_stop_copy;
+
+ my $svn_log = svn_log_raw(@log_args);
+
+ my $base = next_log_entry($svn_log) or croak "No base revision!\n";
+ my $last_commit = undef;
+ unless (-d $SVN_WC) {
+ svn_cmd_checkout($SVN_URL,$base->{revision},$SVN_WC);
+ chdir $SVN_WC or croak $!;
+ read_uuid();
+ $last_commit = git_commit($base, @parents);
+ assert_svn_wc_clean($base->{revision}, $last_commit);
+ } else {
+ chdir $SVN_WC or croak $!;
+ read_uuid();
+ $last_commit = file_to_s("$REV_DIR/$base->{revision}");
+ }
+ my @svn_up = qw(svn up);
+ push @svn_up, '--ignore-externals' unless $_no_ignore_ext;
+ my $last = $base;
+ while (my $log_msg = next_log_entry($svn_log)) {
+ assert_svn_wc_clean($last->{revision}, $last_commit);
+ if ($last->{revision} >= $log_msg->{revision}) {
+ croak "Out of order: last >= current: ",
+ "$last->{revision} >= $log_msg->{revision}\n";
+ }
+ sys(@svn_up,"-r$log_msg->{revision}");
+ $last_commit = git_commit($log_msg, $last_commit, @parents);
+ $last = $log_msg;
+ }
+ assert_svn_wc_clean($last->{revision}, $last_commit);
+ unless (-e "$GIT_DIR/refs/heads/master") {
+ sys(qw(git-update-ref refs/heads/master),$last_commit);
+ }
+ return $last;
+}
+
+sub commit {
+ my (@commits) = @_;
+ check_upgrade_needed();
+ if ($_stdin || !@commits) {
+ print "Reading from stdin...\n";
+ @commits = ();
+ while (<STDIN>) {
+ if (/\b($sha1_short)\b/o) {
+ unshift @commits, $1;
+ }
+ }
+ }
+ my @revs;
+ foreach my $c (@commits) {
+ chomp(my @tmp = safe_qx('git-rev-parse',$c));
+ if (scalar @tmp == 1) {
+ push @revs, $tmp[0];
+ } elsif (scalar @tmp > 1) {
+ push @revs, reverse (safe_qx('git-rev-list',@tmp));
+ } else {
+ die "Failed to rev-parse $c\n";
+ }
+ }
+ chomp @revs;
+
+ fetch();
+ chdir $SVN_WC or croak $!;
+ my $info = svn_info('.');
+ read_uuid($info);
+ my $svn_current_rev = $info->{'Last Changed Rev'};
+ foreach my $c (@revs) {
+ my $mods = svn_checkout_tree($svn_current_rev, $c);
+ if (scalar @$mods == 0) {
+ print "Skipping, no changes detected\n";
+ next;
+ }
+ $svn_current_rev = svn_commit_tree($svn_current_rev, $c);
+ }
+ print "Done committing ",scalar @revs," revisions to SVN\n";
+
+}
+
+sub show_ignore {
+ require File::Find or die $!;
+ my $exclude_file = "$GIT_DIR/info/exclude";
+ open my $fh, '<', $exclude_file or croak $!;
+ chomp(my @excludes = (<$fh>));
+ close $fh or croak $!;
+
+ $SVN_URL ||= file_to_s("$GIT_DIR/$GIT_SVN/info/url");
+ chdir $SVN_WC or croak $!;
+ my %ign;
+ File::Find::find({wanted=>sub{if(lstat $_ && -d _ && -d "$_/.svn"){
+ s#^\./##;
+ @{$ign{$_}} = safe_qx(qw(svn propget svn:ignore),$_);
+ }}, no_chdir=>1},'.');
+
+ print "\n# /\n";
+ foreach (@{$ign{'.'}}) { print '/',$_ if /\S/ }
+ delete $ign{'.'};
+ foreach my $i (sort keys %ign) {
+ print "\n# ",$i,"\n";
+ foreach (@{$ign{$i}}) { print '/',$i,'/',$_ if /\S/ }
+ }
+}
+
+########################### utility functions #########################
+
+sub read_uuid {
+ return if $SVN_UUID;
+ my $info = shift || svn_info('.');
+ $SVN_UUID = $info->{'Repository UUID'} or
+ croak "Repository UUID unreadable\n";
+ s_to_file($SVN_UUID,"$GIT_DIR/$GIT_SVN/info/uuid");
+}
+
+sub setup_git_svn {
+ defined $SVN_URL or croak "SVN repository location required\n";
+ unless (-d $GIT_DIR) {
+ croak "GIT_DIR=$GIT_DIR does not exist!\n";
+ }
+ mkpath(["$GIT_DIR/$GIT_SVN"]);
+ mkpath(["$GIT_DIR/$GIT_SVN/info"]);
+ mkpath([$REV_DIR]);
+ s_to_file($SVN_URL,"$GIT_DIR/$GIT_SVN/info/url");
+
+ open my $fd, '>>', "$GIT_DIR/$GIT_SVN/info/exclude" or croak $!;
+ print $fd '.svn',"\n";
+ close $fd or croak $!;
+}
+
+sub assert_svn_wc_clean {
+ my ($svn_rev, $treeish) = @_;
+ croak "$svn_rev is not an integer!\n" unless ($svn_rev =~ /^\d+$/);
+ croak "$treeish is not a sha1!\n" unless ($treeish =~ /^$sha1$/o);
+ my $lcr = svn_info('.')->{'Last Changed Rev'};
+ if ($svn_rev != $lcr) {
+ print STDERR "Checking for copy-tree ... ";
+ # use
+ my @diff = grep(/^Index: /,(safe_qx(qw(svn diff),
+ "-r$lcr:$svn_rev")));
+ if (@diff) {
+ croak "Nope! Expected r$svn_rev, got r$lcr\n";
+ } else {
+ print STDERR "OK!\n";
+ }
+ }
+ my @status = grep(!/^Performing status on external/,(`svn status`));
+ @status = grep(!/^\s*$/,@status);
+ if (scalar @status) {
+ print STDERR "Tree ($SVN_WC) is not clean:\n";
+ print STDERR $_ foreach @status;
+ croak;
+ }
+ assert_tree($treeish);
+}
+
+sub assert_tree {
+ my ($treeish) = @_;
+ croak "Not a sha1: $treeish\n" unless $treeish =~ /^$sha1$/o;
+ chomp(my $type = `git-cat-file -t $treeish`);
+ my $expected;
+ while ($type eq 'tag') {
+ chomp(($treeish, $type) = `git-cat-file tag $treeish`);
+ }
+ if ($type eq 'commit') {
+ $expected = (grep /^tree /,`git-cat-file commit $treeish`)[0];
+ ($expected) = ($expected =~ /^tree ($sha1)$/);
+ die "Unable to get tree from $treeish\n" unless $expected;
+ } elsif ($type eq 'tree') {
+ $expected = $treeish;
+ } else {
+ die "$treeish is a $type, expected tree, tag or commit\n";
+ }
+
+ my $old_index = $ENV{GIT_INDEX_FILE};
+ my $tmpindex = $GIT_SVN_INDEX.'.assert-tmp';
+ if (-e $tmpindex) {
+ unlink $tmpindex or croak $!;
+ }
+ $ENV{GIT_INDEX_FILE} = $tmpindex;
+ git_addremove();
+ chomp(my $tree = `git-write-tree`);
+ if ($old_index) {
+ $ENV{GIT_INDEX_FILE} = $old_index;
+ } else {
+ delete $ENV{GIT_INDEX_FILE};
+ }
+ if ($tree ne $expected) {
+ croak "Tree mismatch, Got: $tree, Expected: $expected\n";
+ }
+}
+
+sub parse_diff_tree {
+ my $diff_fh = shift;
+ local $/ = "\0";
+ my $state = 'meta';
+ my @mods;
+ while (<$diff_fh>) {
+ chomp $_; # this gets rid of the trailing "\0"
+ if ($state eq 'meta' && /^:(\d{6})\s(\d{6})\s
+ $sha1\s($sha1)\s([MTCRAD])\d*$/xo) {
+ push @mods, { mode_a => $1, mode_b => $2,
+ sha1_b => $3, chg => $4 };
+ if ($4 =~ /^(?:C|R)$/) {
+ $state = 'file_a';
+ } else {
+ $state = 'file_b';
+ }
+ } elsif ($state eq 'file_a') {
+ my $x = $mods[$#mods] or croak "Empty array\n";
+ if ($x->{chg} !~ /^(?:C|R)$/) {
+ croak "Error parsing $_, $x->{chg}\n";
+ }
+ $x->{file_a} = $_;
+ $state = 'file_b';
+ } elsif ($state eq 'file_b') {
+ my $x = $mods[$#mods] or croak "Empty array\n";
+ if (exists $x->{file_a} && $x->{chg} !~ /^(?:C|R)$/) {
+ croak "Error parsing $_, $x->{chg}\n";
+ }
+ if (!exists $x->{file_a} && $x->{chg} =~ /^(?:C|R)$/) {
+ croak "Error parsing $_, $x->{chg}\n";
+ }
+ $x->{file_b} = $_;
+ $state = 'meta';
+ } else {
+ croak "Error parsing $_\n";
+ }
+ }
+ close $diff_fh or croak $!;
+
+ return \@mods;
+}
+
+sub svn_check_prop_executable {
+ my $m = shift;
+ return if -l $m->{file_b};
+ if ($m->{mode_b} =~ /755$/) {
+ chmod((0755 &~ umask),$m->{file_b}) or croak $!;
+ if ($m->{mode_a} !~ /755$/) {
+ sys(qw(svn propset svn:executable 1), $m->{file_b});
+ }
+ -x $m->{file_b} or croak "$m->{file_b} is not executable!\n";
+ } elsif ($m->{mode_b} !~ /755$/ && $m->{mode_a} =~ /755$/) {
+ sys(qw(svn propdel svn:executable), $m->{file_b});
+ chmod((0644 &~ umask),$m->{file_b}) or croak $!;
+ -x $m->{file_b} and croak "$m->{file_b} is executable!\n";
+ }
+}
+
+sub svn_ensure_parent_path {
+ my $dir_b = dirname(shift);
+ svn_ensure_parent_path($dir_b) if ($dir_b ne File::Spec->curdir);
+ mkpath([$dir_b]) unless (-d $dir_b);
+ sys(qw(svn add -N), $dir_b) unless (-d "$dir_b/.svn");
+}
+
+sub precommit_check {
+ my $mods = shift;
+ my (%rm_file, %rmdir_check, %added_check);
+
+ my %o = ( D => 0, R => 1, C => 2, A => 3, M => 3, T => 3 );
+ foreach my $m (sort { $o{$a->{chg}} <=> $o{$b->{chg}} } @$mods) {
+ if ($m->{chg} eq 'R') {
+ if (-d $m->{file_b}) {
+ err_dir_to_file("$m->{file_a} => $m->{file_b}");
+ }
+ # dir/$file => dir/file/$file
+ my $dirname = dirname($m->{file_b});
+ while ($dirname ne File::Spec->curdir) {
+ if ($dirname ne $m->{file_a}) {
+ $dirname = dirname($dirname);
+ next;
+ }
+ err_file_to_dir("$m->{file_a} => $m->{file_b}");
+ }
+ # baz/zzz => baz (baz is a file)
+ $dirname = dirname($m->{file_a});
+ while ($dirname ne File::Spec->curdir) {
+ if ($dirname ne $m->{file_b}) {
+ $dirname = dirname($dirname);
+ next;
+ }
+ err_dir_to_file("$m->{file_a} => $m->{file_b}");
+ }
+ }
+ if ($m->{chg} =~ /^(D|R)$/) {
+ my $t = $1 eq 'D' ? 'file_b' : 'file_a';
+ $rm_file{ $m->{$t} } = 1;
+ my $dirname = dirname( $m->{$t} );
+ my $basename = basename( $m->{$t} );
+ $rmdir_check{$dirname}->{$basename} = 1;
+ } elsif ($m->{chg} =~ /^(?:A|C)$/) {
+ if (-d $m->{file_b}) {
+ err_dir_to_file($m->{file_b});
+ }
+ my $dirname = dirname( $m->{file_b} );
+ my $basename = basename( $m->{file_b} );
+ $added_check{$dirname}->{$basename} = 1;
+ while ($dirname ne File::Spec->curdir) {
+ if ($rm_file{$dirname}) {
+ err_file_to_dir($m->{file_b});
+ }
+ $dirname = dirname $dirname;
+ }
+ }
+ }
+ return (\%rmdir_check, \%added_check);
+
+ sub err_dir_to_file {
+ my $file = shift;
+ print STDERR "Node change from directory to file ",
+ "is not supported by Subversion: ",$file,"\n";
+ exit 1;
+ }
+ sub err_file_to_dir {
+ my $file = shift;
+ print STDERR "Node change from file to directory ",
+ "is not supported by Subversion: ",$file,"\n";
+ exit 1;
+ }
+}
+
+sub svn_checkout_tree {
+ my ($svn_rev, $treeish) = @_;
+ my $from = file_to_s("$REV_DIR/$svn_rev");
+ assert_svn_wc_clean($svn_rev,$from);
+ print "diff-tree $from $treeish\n";
+ my $pid = open my $diff_fh, '-|';
+ defined $pid or croak $!;
+ if ($pid == 0) {
+ my @diff_tree = qw(git-diff-tree -z -r -C);
+ push @diff_tree, '--find-copies-harder' if $_find_copies_harder;
+ push @diff_tree, "-l$_l" if defined $_l;
+ exec(@diff_tree, $from, $treeish) or croak $!;
+ }
+ my $mods = parse_diff_tree($diff_fh);
+ unless (@$mods) {
+ # git can do empty commits, but SVN doesn't allow it...
+ return $mods;
+ }
+ my ($rm, $add) = precommit_check($mods);
+
+ my %o = ( D => 1, R => 0, C => -1, A => 3, M => 3, T => 3 );
+ foreach my $m (sort { $o{$a->{chg}} <=> $o{$b->{chg}} } @$mods) {
+ if ($m->{chg} eq 'C') {
+ svn_ensure_parent_path( $m->{file_b} );
+ sys(qw(svn cp), $m->{file_a}, $m->{file_b});
+ apply_mod_line_blob($m);
+ svn_check_prop_executable($m);
+ } elsif ($m->{chg} eq 'D') {
+ sys(qw(svn rm --force), $m->{file_b});
+ } elsif ($m->{chg} eq 'R') {
+ svn_ensure_parent_path( $m->{file_b} );
+ sys(qw(svn mv --force), $m->{file_a}, $m->{file_b});
+ apply_mod_line_blob($m);
+ svn_check_prop_executable($m);
+ } elsif ($m->{chg} eq 'M') {
+ apply_mod_line_blob($m);
+ svn_check_prop_executable($m);
+ } elsif ($m->{chg} eq 'T') {
+ sys(qw(svn rm --force),$m->{file_b});
+ apply_mod_line_blob($m);
+ sys(qw(svn add --force), $m->{file_b});
+ svn_check_prop_executable($m);
+ } elsif ($m->{chg} eq 'A') {
+ svn_ensure_parent_path( $m->{file_b} );
+ apply_mod_line_blob($m);
+ sys(qw(svn add --force), $m->{file_b});
+ svn_check_prop_executable($m);
+ } else {
+ croak "Invalid chg: $m->{chg}\n";
+ }
+ }
+
+ assert_tree($treeish);
+ if ($_rmdir) { # remove empty directories
+ handle_rmdir($rm, $add);
+ }
+ assert_tree($treeish);
+ return $mods;
+}
+
+# svn ls doesn't work with respect to the current working tree, but what's
+# in the repository. There's not even an option for it... *sigh*
+# (added files don't show up and removed files remain in the ls listing)
+sub svn_ls_current {
+ my ($dir, $rm, $add) = @_;
+ chomp(my @ls = safe_qx('svn','ls',$dir));
+ my @ret = ();
+ foreach (@ls) {
+ s#/$##; # trailing slashes are evil
+ push @ret, $_ unless $rm->{$dir}->{$_};
+ }
+ if (exists $add->{$dir}) {
+ push @ret, keys %{$add->{$dir}};
+ }
+ return \@ret;
+}
+
+sub handle_rmdir {
+ my ($rm, $add) = @_;
+
+ foreach my $dir (sort {length $b <=> length $a} keys %$rm) {
+ my $ls = svn_ls_current($dir, $rm, $add);
+ next if (scalar @$ls);
+ sys(qw(svn rm --force),$dir);
+
+ my $dn = dirname $dir;
+ $rm->{ $dn }->{ basename $dir } = 1;
+ $ls = svn_ls_current($dn, $rm, $add);
+ while (scalar @$ls == 0 && $dn ne File::Spec->curdir) {
+ sys(qw(svn rm --force),$dn);
+ $dir = basename $dn;
+ $dn = dirname $dn;
+ $rm->{ $dn }->{ $dir } = 1;
+ $ls = svn_ls_current($dn, $rm, $add);
+ }
+ }
+}
+
+sub svn_commit_tree {
+ my ($svn_rev, $commit) = @_;
+ my $commit_msg = "$GIT_DIR/$GIT_SVN/.svn-commit.tmp.$$";
+ my %log_msg = ( msg => '' );
+ open my $msg, '>', $commit_msg or croak $!;
+
+ chomp(my $type = `git-cat-file -t $commit`);
+ if ($type eq 'commit') {
+ my $pid = open my $msg_fh, '-|';
+ defined $pid or croak $!;
+
+ if ($pid == 0) {
+ exec(qw(git-cat-file commit), $commit) or croak $!;
+ }
+ my $in_msg = 0;
+ while (<$msg_fh>) {
+ if (!$in_msg) {
+ $in_msg = 1 if (/^\s*$/);
+ } elsif (/^git-svn-id: /) {
+ # skip this, we regenerate the correct one
+ # on re-fetch anyways
+ } else {
+ print $msg $_ or croak $!;
+ }
+ }
+ close $msg_fh or croak $!;
+ }
+ close $msg or croak $!;
+
+ if ($_edit || ($type eq 'tree')) {
+ my $editor = $ENV{VISUAL} || $ENV{EDITOR} || 'vi';
+ system($editor, $commit_msg);
+ }
+
+ # file_to_s removes all trailing newlines, so just use chomp() here:
+ open $msg, '<', $commit_msg or croak $!;
+ { local $/; chomp($log_msg{msg} = <$msg>); }
+ close $msg or croak $!;
+
+ my ($oneline) = ($log_msg{msg} =~ /([^\n\r]+)/);
+ print "Committing $commit: $oneline\n";
+
+ my @ci_output = safe_qx(qw(svn commit -F),$commit_msg);
+ my ($committed) = grep(/^Committed revision \d+\./,@ci_output);
+ unlink $commit_msg;
+ defined $committed or croak
+ "Commit output failed to parse committed revision!\n",
+ join("\n",@ci_output),"\n";
+ my ($rev_committed) = ($committed =~ /^Committed revision (\d+)\./);
+
+ my @svn_up = qw(svn up);
+ push @svn_up, '--ignore-externals' unless $_no_ignore_ext;
+ if ($rev_committed == ($svn_rev + 1)) {
+ push @svn_up, "-r$rev_committed";
+ sys(@svn_up);
+ my $info = svn_info('.');
+ my $date = $info->{'Last Changed Date'} or die "Missing date\n";
+ if ($info->{'Last Changed Rev'} != $rev_committed) {
+ croak "$info->{'Last Changed Rev'} != $rev_committed\n"
+ }
+ my ($Y,$m,$d,$H,$M,$S,$tz) = ($date =~
+ /(\d{4})\-(\d\d)\-(\d\d)\s
+ (\d\d)\:(\d\d)\:(\d\d)\s([\-\+]\d+)/x)
+ or croak "Failed to parse date: $date\n";
+ $log_msg{date} = "$tz $Y-$m-$d $H:$M:$S";
+ $log_msg{author} = $info->{'Last Changed Author'};
+ $log_msg{revision} = $rev_committed;
+ $log_msg{msg} .= "\n";
+ my $parent = file_to_s("$REV_DIR/$svn_rev");
+ git_commit(\%log_msg, $parent, $commit);
+ return $rev_committed;
+ }
+ # resync immediately
+ push @svn_up, "-r$svn_rev";
+ sys(@svn_up);
+ return fetch("$rev_committed=$commit")->{revision};
+}
+
+# read the entire log into a temporary file (which is removed ASAP)
+# and store the file handle + parser state
+sub svn_log_raw {
+ my (@log_args) = @_;
+ my $log_fh = IO::File->new_tmpfile or croak $!;
+ my $pid = fork;
+ defined $pid or croak $!;
+ if (!$pid) {
+ open STDOUT, '>&', $log_fh or croak $!;
+ exec (qw(svn log), @log_args) or croak $!
+ }
+ waitpid $pid, 0;
+ croak if $?;
+ seek $log_fh, 0, 0 or croak $!;
+ return { state => 'sep', fh => $log_fh };
+}
+
+sub next_log_entry {
+ my $log = shift; # retval of svn_log_raw()
+ my $ret = undef;
+ my $fh = $log->{fh};
+
+ while (<$fh>) {
+ chomp;
+ if (/^\-{72}$/) {
+ if ($log->{state} eq 'msg') {
+ if ($ret->{lines}) {
+ $ret->{msg} .= $_."\n";
+ unless(--$ret->{lines}) {
+ $log->{state} = 'sep';
+ }
+ } else {
+ croak "Log parse error at: $_\n",
+ $ret->{revision},
+ "\n";
+ }
+ next;
+ }
+ if ($log->{state} ne 'sep') {
+ croak "Log parse error at: $_\n",
+ "state: $log->{state}\n",
+ $ret->{revision},
+ "\n";
+ }
+ $log->{state} = 'rev';
+
+ # if we have an empty log message, put something there:
+ if ($ret) {
+ $ret->{msg} ||= "\n";
+ delete $ret->{lines};
+ return $ret;
+ }
+ next;
+ }
+ if ($log->{state} eq 'rev' && s/^r(\d+)\s*\|\s*//) {
+ my $rev = $1;
+ my ($author, $date, $lines) = split(/\s*\|\s*/, $_, 3);
+ ($lines) = ($lines =~ /(\d+)/);
+ my ($Y,$m,$d,$H,$M,$S,$tz) = ($date =~
+ /(\d{4})\-(\d\d)\-(\d\d)\s
+ (\d\d)\:(\d\d)\:(\d\d)\s([\-\+]\d+)/x)
+ or croak "Failed to parse date: $date\n";
+ $ret = { revision => $rev,
+ date => "$tz $Y-$m-$d $H:$M:$S",
+ author => $author,
+ lines => $lines,
+ msg => '' };
+ if (defined $_authors && ! defined $users{$author}) {
+ die "Author: $author not defined in ",
+ "$_authors file\n";
+ }
+ $log->{state} = 'msg_start';
+ next;
+ }
+ # skip the first blank line of the message:
+ if ($log->{state} eq 'msg_start' && /^$/) {
+ $log->{state} = 'msg';
+ } elsif ($log->{state} eq 'msg') {
+ if ($ret->{lines}) {
+ $ret->{msg} .= $_."\n";
+ unless (--$ret->{lines}) {
+ $log->{state} = 'sep';
+ }
+ } else {
+ croak "Log parse error at: $_\n",
+ $ret->{revision},"\n";
+ }
+ }
+ }
+ return $ret;
+}
+
+sub svn_info {
+ my $url = shift || $SVN_URL;
+
+ my $pid = open my $info_fh, '-|';
+ defined $pid or croak $!;
+
+ if ($pid == 0) {
+ exec(qw(svn info),$url) or croak $!;
+ }
+
+ my $ret = {};
+ # only single-lines seem to exist in svn info output
+ while (<$info_fh>) {
+ chomp $_;
+ if (m#^([^:]+)\s*:\s*(\S.*)$#) {
+ $ret->{$1} = $2;
+ push @{$ret->{-order}}, $1;
+ }
+ }
+ close $info_fh or croak $!;
+ return $ret;
+}
+
+sub sys { system(@_) == 0 or croak $? }
+
+sub git_addremove {
+ system( "git-diff-files --name-only -z ".
+ " | git-update-index --remove -z --stdin && ".
+ "git-ls-files -z --others ".
+ "'--exclude-from=$GIT_DIR/$GIT_SVN/info/exclude'".
+ " | git-update-index --add -z --stdin"
+ ) == 0 or croak $?
+}
+
+sub s_to_file {
+ my ($str, $file, $mode) = @_;
+ open my $fd,'>',$file or croak $!;
+ print $fd $str,"\n" or croak $!;
+ close $fd or croak $!;
+ chmod ($mode &~ umask, $file) if (defined $mode);
+}
+
+sub file_to_s {
+ my $file = shift;
+ open my $fd,'<',$file or croak "$!: file: $file\n";
+ local $/;
+ my $ret = <$fd>;
+ close $fd or croak $!;
+ $ret =~ s/\s*$//s;
+ return $ret;
+}
+
+sub assert_revision_unknown {
+ my $revno = shift;
+ if (-f "$REV_DIR/$revno") {
+ croak "$REV_DIR/$revno already exists! ",
+ "Why are we refetching it?";
+ }
+}
+
+sub trees_eq {
+ my ($x, $y) = @_;
+ my @x = safe_qx('git-cat-file','commit',$x);
+ my @y = safe_qx('git-cat-file','commit',$y);
+ if (($y[0] ne $x[0]) || $x[0] !~ /^tree $sha1\n$/
+ || $y[0] !~ /^tree $sha1\n$/) {
+ print STDERR "Trees not equal: $y[0] != $x[0]\n";
+ return 0
+ }
+ return 1;
+}
+
+sub assert_revision_eq_or_unknown {
+ my ($revno, $commit) = @_;
+ if (-f "$REV_DIR/$revno") {
+ my $current = file_to_s("$REV_DIR/$revno");
+ if (($commit ne $current) && !trees_eq($commit, $current)) {
+ croak "$REV_DIR/$revno already exists!\n",
+ "current: $current\nexpected: $commit\n";
+ }
+ return;
+ }
+}
+
+sub git_commit {
+ my ($log_msg, @parents) = @_;
+ assert_revision_unknown($log_msg->{revision});
+ my $out_fh = IO::File->new_tmpfile or croak $!;
+
+ map_tree_joins() if (@_branch_from && !%tree_map);
+
+ # commit parents can be conditionally bound to a particular
+ # svn revision via: "svn_revno=commit_sha1", filter them out here:
+ my @exec_parents;
+ foreach my $p (@parents) {
+ next unless defined $p;
+ if ($p =~ /^(\d+)=($sha1_short)$/o) {
+ if ($1 == $log_msg->{revision}) {
+ push @exec_parents, $2;
+ }
+ } else {
+ push @exec_parents, $p if $p =~ /$sha1_short/o;
+ }
+ }
+
+ my $pid = fork;
+ defined $pid or croak $!;
+ if ($pid == 0) {
+ $ENV{GIT_INDEX_FILE} = $GIT_SVN_INDEX;
+ git_addremove();
+ chomp(my $tree = `git-write-tree`);
+ croak if $?;
+ if (exists $tree_map{$tree}) {
+ my %seen_parent = map { $_ => 1 } @exec_parents;
+ foreach (@{$tree_map{$tree}}) {
+ # MAXPARENT is defined to 16 in commit-tree.c:
+ if ($seen_parent{$_} || @exec_parents > 16) {
+ next;
+ }
+ push @exec_parents, $_;
+ $seen_parent{$_} = 1;
+ }
+ }
+ my $msg_fh = IO::File->new_tmpfile or croak $!;
+ print $msg_fh $log_msg->{msg}, "\ngit-svn-id: ",
+ "$SVN_URL\@$log_msg->{revision}",
+ " $SVN_UUID\n" or croak $!;
+ $msg_fh->flush == 0 or croak $!;
+ seek $msg_fh, 0, 0 or croak $!;
+
+ set_commit_env($log_msg);
+
+ my @exec = ('git-commit-tree',$tree);
+ push @exec, '-p', $_ foreach @exec_parents;
+ open STDIN, '<&', $msg_fh or croak $!;
+ open STDOUT, '>&', $out_fh or croak $!;
+ exec @exec or croak $!;
+ }
+ waitpid($pid,0);
+ croak if $?;
+
+ $out_fh->flush == 0 or croak $!;
+ seek $out_fh, 0, 0 or croak $!;
+ chomp(my $commit = do { local $/; <$out_fh> });
+ if ($commit !~ /^$sha1$/o) {
+ croak "Failed to commit, invalid sha1: $commit\n";
+ }
+ my @update_ref = ('git-update-ref',"refs/remotes/$GIT_SVN",$commit);
+ if (my $primary_parent = shift @exec_parents) {
+ $pid = fork;
+ defined $pid or croak $!;
+ if (!$pid) {
+ close STDERR;
+ close STDOUT;
+ exec 'git-rev-parse','--verify',
+ "refs/remotes/$GIT_SVN^0";
+ }
+ waitpid $pid, 0;
+ push @update_ref, $primary_parent unless $?;
+ }
+ sys(@update_ref);
+ sys('git-update-ref',"$GIT_SVN/revs/$log_msg->{revision}",$commit);
+ print "r$log_msg->{revision} = $commit\n";
+ return $commit;
+}
+
+sub set_commit_env {
+ my ($log_msg) = @_;
+ my $author = $log_msg->{author};
+ my ($name,$email) = defined $users{$author} ? @{$users{$author}}
+ : ($author,"$author\@$SVN_UUID");
+ $ENV{GIT_AUTHOR_NAME} = $ENV{GIT_COMMITTER_NAME} = $name;
+ $ENV{GIT_AUTHOR_EMAIL} = $ENV{GIT_COMMITTER_EMAIL} = $email;
+ $ENV{GIT_AUTHOR_DATE} = $ENV{GIT_COMMITTER_DATE} = $log_msg->{date};
+}
+
+sub apply_mod_line_blob {
+ my $m = shift;
+ if ($m->{mode_b} =~ /^120/) {
+ blob_to_symlink($m->{sha1_b}, $m->{file_b});
+ } else {
+ blob_to_file($m->{sha1_b}, $m->{file_b});
+ }
+}
+
+sub blob_to_symlink {
+ my ($blob, $link) = @_;
+ defined $link or croak "\$link not defined!\n";
+ croak "Not a sha1: $blob\n" unless $blob =~ /^$sha1$/o;
+ if (-l $link || -f _) {
+ unlink $link or croak $!;
+ }
+
+ my $dest = `git-cat-file blob $blob`; # no newline, so no chomp
+ symlink $dest, $link or croak $!;
+}
+
+sub blob_to_file {
+ my ($blob, $file) = @_;
+ defined $file or croak "\$file not defined!\n";
+ croak "Not a sha1: $blob\n" unless $blob =~ /^$sha1$/o;
+ if (-l $file || -f _) {
+ unlink $file or croak $!;
+ }
+
+ open my $blob_fh, '>', $file or croak "$!: $file\n";
+ my $pid = fork;
+ defined $pid or croak $!;
+
+ if ($pid == 0) {
+ open STDOUT, '>&', $blob_fh or croak $!;
+ exec('git-cat-file','blob',$blob);
+ }
+ waitpid $pid, 0;
+ croak $? if $?;
+
+ close $blob_fh or croak $!;
+}
+
+sub safe_qx {
+ my $pid = open my $child, '-|';
+ defined $pid or croak $!;
+ if ($pid == 0) {
+ exec(@_) or croak $?;
+ }
+ my @ret = (<$child>);
+ close $child or croak $?;
+ die $? if $?; # just in case close didn't error out
+ return wantarray ? @ret : join('',@ret);
+}
+
+sub svn_compat_check {
+ my @co_help = safe_qx(qw(svn co -h));
+ unless (grep /ignore-externals/,@co_help) {
+ print STDERR "W: Installed svn version does not support ",
+ "--ignore-externals\n";
+ $_no_ignore_ext = 1;
+ }
+ if (grep /usage: checkout URL\[\@REV\]/,@co_help) {
+ $_svn_co_url_revs = 1;
+ }
+
+ # I really, really hope nobody hits this...
+ unless (grep /stop-on-copy/, (safe_qx(qw(svn log -h)))) {
+ print STDERR <<'';
+W: The installed svn version does not support the --stop-on-copy flag in
+ the log command.
+ Lets hope the directory you're tracking is not a branch or tag
+ and was never moved within the repository...
+
+ $_no_stop_copy = 1;
+ }
+}
+
+# *sigh*, new versions of svn won't honor -r<rev> without URL@<rev>,
+# (and they won't honor URL@<rev> without -r<rev>, too!)
+sub svn_cmd_checkout {
+ my ($url, $rev, $dir) = @_;
+ my @cmd = ('svn','co', "-r$rev");
+ push @cmd, '--ignore-externals' unless $_no_ignore_ext;
+ $url .= "\@$rev" if $_svn_co_url_revs;
+ sys(@cmd, $url, $dir);
+}
+
+sub check_upgrade_needed {
+ my $old = eval {
+ my $pid = open my $child, '-|';
+ defined $pid or croak $!;
+ if ($pid == 0) {
+ close STDERR;
+ exec('git-rev-parse',"$GIT_SVN-HEAD") or croak $?;
+ }
+ my @ret = (<$child>);
+ close $child or croak $?;
+ die $? if $?; # just in case close didn't error out
+ return wantarray ? @ret : join('',@ret);
+ };
+ return unless $old;
+ my $head = eval { safe_qx('git-rev-parse',"refs/remotes/$GIT_SVN") };
+ if ($@ || !$head) {
+ print STDERR "Please run: $0 rebuild --upgrade\n";
+ exit 1;
+ }
+}
+
+# fills %tree_map with a reverse mapping of trees to commits. Useful
+# for finding parents to commit on.
+sub map_tree_joins {
+ foreach my $br (@_branch_from) {
+ my $pid = open my $pipe, '-|';
+ defined $pid or croak $!;
+ if ($pid == 0) {
+ exec(qw(git-rev-list --pretty=raw), $br) or croak $?;
+ }
+ while (<$pipe>) {
+ if (/^commit ($sha1)$/o) {
+ my $commit = $1;
+ my ($tree) = (<$pipe> =~ /^tree ($sha1)$/o);
+ unless (defined $tree) {
+ die "Failed to parse commit $commit\n";
+ }
+ push @{$tree_map{$tree}}, $commit;
+ }
+ }
+ close $pipe or croak $?;
+ }
+}
+
+# '<svn username> = real-name <email address>' mapping based on git-svnimport:
+sub load_authors {
+ open my $authors, '<', $_authors or die "Can't open $_authors $!\n";
+ while (<$authors>) {
+ chomp;
+ next unless /^(\S+?)\s*=\s*(.+?)\s*<(.+)>\s*$/;
+ my ($user, $name, $email) = ($1, $2, $3);
+ $users{$user} = [$name, $email];
+ }
+ close $authors or croak $!;
+}
+
+__END__
+
+Data structures:
+
+$svn_log hashref (as returned by svn_log_raw)
+{
+ fh => file handle of the log file,
+ state => state of the log file parser (sep/msg/rev/msg_start...)
+}
+
+$log_msg hashref as returned by next_log_entry($svn_log)
+{
+ msg => 'whitespace-formatted log entry
+', # trailing newline is preserved
+ revision => '8', # integer
+ date => '2004-02-24T17:01:44.108345Z', # commit date
+ author => 'committer name'
+};
+
+
+@mods = array of diff-index line hashes, each element represents one line
+ of diff-index output
+
+diff-index line ($m hash)
+{
+ mode_a => first column of diff-index output, no leading ':',
+ mode_b => second column of diff-index output,
+ sha1_b => sha1sum of the final blob,
+ chg => change type [MCRADT],
+ file_a => original file name of a file (iff chg is 'C' or 'R')
+ file_b => new/current file name of a file (any chg)
+}
+;
diff --git a/contrib/git-svn/git-svn.txt b/contrib/git-svn/git-svn.txt
new file mode 100644
index 0000000..e18fcaf
--- /dev/null
+++ b/contrib/git-svn/git-svn.txt
@@ -0,0 +1,296 @@
+git-svn(1)
+==========
+
+NAME
+----
+git-svn - bidirectional operation between a single Subversion branch and git
+
+SYNOPSIS
+--------
+'git-svn' <command> [options] [arguments]
+
+DESCRIPTION
+-----------
+git-svn is a simple conduit for changesets between a single Subversion
+branch and git.
+
+git-svn is not to be confused with git-svnimport. The were designed
+with very different goals in mind.
+
+git-svn is designed for an individual developer who wants a
+bidirectional flow of changesets between a single branch in Subversion
+and an arbitrary number of branches in git. git-svnimport is designed
+for read-only operation on repositories that match a particular layout
+(albeit the recommended one by SVN developers).
+
+For importing svn, git-svnimport is potentially more powerful when
+operating on repositories organized under the recommended
+trunk/branch/tags structure, and should be faster, too.
+
+git-svn mostly ignores the very limited view of branching that
+Subversion has. This allows git-svn to be much easier to use,
+especially on repositories that are not organized in a manner that
+git-svnimport is designed for.
+
+COMMANDS
+--------
+init::
+ Creates an empty git repository with additional metadata
+ directories for git-svn. The SVN_URL must be specified
+ at this point.
+
+fetch::
+ Fetch unfetched revisions from the SVN_URL we are tracking.
+ refs/heads/remotes/git-svn will be updated to the latest revision.
+
+ Note: You should never attempt to modify the remotes/git-svn branch
+ outside of git-svn. Instead, create a branch from remotes/git-svn
+ and work on that branch. Use the 'commit' command (see below)
+ to write git commits back to remotes/git-svn.
+
+commit::
+ Commit specified commit or tree objects to SVN. This relies on
+ your imported fetch data being up-to-date. This makes
+ absolutely no attempts to do patching when committing to SVN, it
+ simply overwrites files with those specified in the tree or
+ commit. All merging is assumed to have taken place
+ independently of git-svn functions.
+
+rebuild::
+ Not a part of daily usage, but this is a useful command if
+ you've just cloned a repository (using git-clone) that was
+ tracked with git-svn. Unfortunately, git-clone does not clone
+ git-svn metadata and the svn working tree that git-svn uses for
+ its operations. This rebuilds the metadata so git-svn can
+ resume fetch operations. SVN_URL may be optionally specified if
+ the directory/repository you're tracking has moved or changed
+ protocols.
+
+show-ignore::
+ Recursively finds and lists the svn:ignore property on
+ directories. The output is suitable for appending to
+ the $GIT_DIR/info/exclude file.
+
+OPTIONS
+-------
+-r <ARG>::
+--revision <ARG>::
+ Only used with the 'fetch' command.
+
+ Takes any valid -r<argument> svn would accept and passes it
+ directly to svn. -r<ARG1>:<ARG2> ranges and "{" DATE "}" syntax
+ is also supported. This is passed directly to svn, see svn
+ documentation for more details.
+
+ This can allow you to make partial mirrors when running fetch.
+
+-::
+--stdin::
+ Only used with the 'commit' command.
+
+ Read a list of commits from stdin and commit them in reverse
+ order. Only the leading sha1 is read from each line, so
+ git-rev-list --pretty=oneline output can be used.
+
+--rmdir::
+ Only used with the 'commit' command.
+
+ Remove directories from the SVN tree if there are no files left
+ behind. SVN can version empty directories, and they are not
+ removed by default if there are no files left in them. git
+ cannot version empty directories. Enabling this flag will make
+ the commit to SVN act like git.
+
+ repo-config key: svn.rmdir
+
+-e::
+--edit::
+ Only used with the 'commit' command.
+
+ Edit the commit message before committing to SVN. This is off by
+ default for objects that are commits, and forced on when committing
+ tree objects.
+
+ repo-config key: svn.edit
+
+-l<num>::
+--find-copies-harder::
+ Both of these are only used with the 'commit' command.
+
+ They are both passed directly to git-diff-tree see
+ git-diff-tree(1) for more information.
+
+ repo-config key: svn.l
+ repo-config key: svn.findcopiesharder
+
+ADVANCED OPTIONS
+----------------
+-b<refname>::
+--branch <refname>::
+ Used with 'fetch' or 'commit'.
+
+ This can be used to join arbitrary git branches to remotes/git-svn
+ on new commits where the tree object is equivalent.
+
+ When used with different GIT_SVN_ID values, tags and branches in
+ SVN can be tracked this way, as can some merges where the heads
+ end up having completely equivalent content. This can even be
+ used to track branches across multiple SVN _repositories_.
+
+ This option may be specified multiple times, once for each
+ branch.
+
+ repo-config key: svn.branch
+
+-i<GIT_SVN_ID>::
+--id <GIT_SVN_ID>::
+ This sets GIT_SVN_ID (instead of using the environment). See
+ the section on "Tracking Multiple Repositories or Branches" for
+ more information on using GIT_SVN_ID.
+
+COMPATIBILITY OPTIONS
+---------------------
+--upgrade::
+ Only used with the 'rebuild' command.
+
+ Run this if you used an old version of git-svn that used
+ "git-svn-HEAD" instead of "remotes/git-svn" as the branch
+ for tracking the remote.
+
+--no-ignore-externals::
+ Only used with the 'fetch' and 'rebuild' command.
+
+ By default, git-svn passes --ignore-externals to svn to avoid
+ fetching svn:external trees into git. Pass this flag to enable
+ externals tracking directly via git.
+
+ Versions of svn that do not support --ignore-externals are
+ automatically detected and this flag will be automatically
+ enabled for them.
+
+ Otherwise, do not enable this flag unless you know what you're
+ doing.
+
+ repo-config key: svn.noignoreexternals
+
+Basic Examples
+~~~~~~~~~~~~~~
+
+Tracking and contributing to an Subversion managed-project:
+
+------------------------------------------------------------------------
+# Initialize a tree (like git init-db):
+ git-svn init http://svn.foo.org/project/trunk
+# Fetch remote revisions:
+ git-svn fetch
+# Create your own branch to hack on:
+ git checkout -b my-branch remotes/git-svn
+# Commit only the git commits you want to SVN:
+ git-svn commit <tree-ish> [<tree-ish_2> ...]
+# Commit all the git commits from my-branch that don't exist in SVN:
+ git-svn commit remotes/git-svn..my-branch
+# Something is committed to SVN, pull the latest into your branch:
+ git-svn fetch && git pull . remotes/git-svn
+# Append svn:ignore settings to the default git exclude file:
+ git-svn show-ignore >> .git/info/exclude
+------------------------------------------------------------------------
+
+DESIGN PHILOSOPHY
+-----------------
+Merge tracking in Subversion is lacking and doing branched development
+with Subversion is cumbersome as a result. git-svn completely forgoes
+any automated merge/branch tracking on the Subversion side and leaves it
+entirely up to the user on the git side. It's simply not worth it to do
+a useful translation when the the original signal is weak.
+
+TRACKING MULTIPLE REPOSITORIES OR BRANCHES
+------------------------------------------
+This is for advanced users, most users should ignore this section.
+
+Because git-svn does not care about relationships between different
+branches or directories in a Subversion repository, git-svn has a simple
+hack to allow it to track an arbitrary number of related _or_ unrelated
+SVN repositories via one git repository. Simply set the GIT_SVN_ID
+environment variable to a name other other than "git-svn" (the default)
+and git-svn will ignore the contents of the $GIT_DIR/git-svn directory
+and instead do all of its work in $GIT_DIR/$GIT_SVN_ID for that
+invocation. The interface branch will be remotes/$GIT_SVN_ID, instead of
+remotes/git-svn. Any remotes/$GIT_SVN_ID branch should never be modified
+by the user outside of git-svn commands.
+
+ADDITIONAL FETCH ARGUMENTS
+--------------------------
+This is for advanced users, most users should ignore this section.
+
+Unfetched SVN revisions may be imported as children of existing commits
+by specifying additional arguments to 'fetch'. Additional parents may
+optionally be specified in the form of sha1 hex sums at the
+command-line. Unfetched SVN revisions may also be tied to particular
+git commits with the following syntax:
+
+ svn_revision_number=git_commit_sha1
+
+This allows you to tie unfetched SVN revision 375 to your current HEAD::
+
+ `git-svn fetch 375=$(git-rev-parse HEAD)`
+
+Advanced Example: Tracking a Reorganized Repository
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+If you're tracking a directory that has moved, or otherwise been
+branched or tagged off of another directory in the repository and you
+care about the full history of the project, then you can read this
+section.
+
+This is how Yann Dirson tracked the trunk of the ufoai directory when
+the /trunk directory of his repository was moved to /ufoai/trunk and
+he needed to continue tracking /ufoai/trunk where /trunk left off.
+
+------------------------------------------------------------------------
+ # This log message shows when the repository was reorganized:
+ r166 | ydirson | 2006-03-02 01:36:55 +0100 (Thu, 02 Mar 2006) | 1 line
+ Changed paths:
+ D /trunk
+ A /ufoai/trunk (from /trunk:165)
+
+ # First we start tracking the old revisions:
+ GIT_SVN_ID=git-oldsvn git-svn init \
+ https://svn.sourceforge.net/svnroot/ufoai/trunk
+ GIT_SVN_ID=git-oldsvn git-svn fetch -r1:165
+
+ # And now, we continue tracking the new revisions:
+ GIT_SVN_ID=git-newsvn git-svn init \
+ https://svn.sourceforge.net/svnroot/ufoai/ufoai/trunk
+ GIT_SVN_ID=git-newsvn git-svn fetch \
+ 166=`git-rev-parse refs/remotes/git-oldsvn`
+------------------------------------------------------------------------
+
+BUGS
+----
+If somebody commits a conflicting changeset to SVN at a bad moment
+(right before you commit) causing a conflict and your commit to fail,
+your svn working tree ($GIT_DIR/git-svn/tree) may be dirtied. The
+easiest thing to do is probably just to rm -rf $GIT_DIR/git-svn/tree and
+run 'rebuild'.
+
+We ignore all SVN properties except svn:executable. Too difficult to
+map them since we rely heavily on git write-tree being _exactly_ the
+same on both the SVN and git working trees and I prefer not to clutter
+working trees with metadata files.
+
+svn:keywords can't be ignored in Subversion (at least I don't know of
+a way to ignore them).
+
+Renamed and copied directories are not detected by git and hence not
+tracked when committing to SVN. I do not plan on adding support for
+this as it's quite difficult and time-consuming to get working for all
+the possible corner cases (git doesn't do it, either). Renamed and
+copied files are fully supported if they're similar enough for git to
+detect them.
+
+Author
+------
+Written by Eric Wong <normalperson@yhbt.net>.
+
+Documentation
+-------------
+Written by Eric Wong <normalperson@yhbt.net>.
diff --git a/contrib/git-svn/t/t0000-contrib-git-svn.sh b/contrib/git-svn/t/t0000-contrib-git-svn.sh
new file mode 100644
index 0000000..80ad357
--- /dev/null
+++ b/contrib/git-svn/t/t0000-contrib-git-svn.sh
@@ -0,0 +1,216 @@
+#!/bin/sh
+#
+# Copyright (c) 2006 Eric Wong
+#
+
+
+PATH=$PWD/../:$PATH
+test_description='git-svn tests'
+if test -d ../../../t
+then
+ cd ../../../t
+else
+ echo "Must be run in contrib/git-svn/t" >&2
+ exit 1
+fi
+
+. ./test-lib.sh
+
+GIT_DIR=$PWD/.git
+GIT_SVN_DIR=$GIT_DIR/git-svn
+SVN_TREE=$GIT_SVN_DIR/tree
+
+svnadmin >/dev/null 2>&1
+if test $? != 1
+then
+ test_expect_success 'skipping contrib/git-svn test' :
+ test_done
+ exit
+fi
+
+svn >/dev/null 2>&1
+if test $? != 1
+then
+ test_expect_success 'skipping contrib/git-svn test' :
+ test_done
+ exit
+fi
+
+svnrepo=$PWD/svnrepo
+
+set -e
+
+svnadmin create $svnrepo
+svnrepo="file://$svnrepo/test-git-svn"
+
+mkdir import
+
+cd import
+
+echo foo > foo
+ln -s foo foo.link
+mkdir -p dir/a/b/c/d/e
+echo 'deep dir' > dir/a/b/c/d/e/file
+mkdir -p bar
+echo 'zzz' > bar/zzz
+echo '#!/bin/sh' > exec.sh
+chmod +x exec.sh
+svn import -m 'import for git-svn' . $svnrepo >/dev/null
+
+cd ..
+
+rm -rf import
+
+test_expect_success \
+ 'initialize git-svn' \
+ "git-svn init $svnrepo"
+
+test_expect_success \
+ 'import an SVN revision into git' \
+ 'git-svn fetch'
+
+
+name='try a deep --rmdir with a commit'
+git checkout -b mybranch remotes/git-svn
+mv dir/a/b/c/d/e/file dir/file
+cp dir/file file
+git update-index --add --remove dir/a/b/c/d/e/file dir/file file
+git commit -m "$name"
+
+test_expect_success "$name" \
+ "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch &&
+ test -d $SVN_TREE/dir && test ! -d $SVN_TREE/dir/a"
+
+
+name='detect node change from file to directory #1'
+mkdir dir/new_file
+mv dir/file dir/new_file/file
+mv dir/new_file dir/file
+git update-index --remove dir/file
+git update-index --add dir/file/file
+git commit -m "$name"
+
+test_expect_code 1 "$name" \
+ 'git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch' \
+ || true
+
+
+name='detect node change from directory to file #1'
+rm -rf dir $GIT_DIR/index
+git checkout -b mybranch2 remotes/git-svn
+mv bar/zzz zzz
+rm -rf bar
+mv zzz bar
+git update-index --remove -- bar/zzz
+git update-index --add -- bar
+git commit -m "$name"
+
+test_expect_code 1 "$name" \
+ 'git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch2' \
+ || true
+
+
+name='detect node change from file to directory #2'
+rm -f $GIT_DIR/index
+git checkout -b mybranch3 remotes/git-svn
+rm bar/zzz
+git-update-index --remove bar/zzz
+mkdir bar/zzz
+echo yyy > bar/zzz/yyy
+git-update-index --add bar/zzz/yyy
+git commit -m "$name"
+
+test_expect_code 1 "$name" \
+ 'git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch3' \
+ || true
+
+
+name='detect node change from directory to file #2'
+rm -f $GIT_DIR/index
+git checkout -b mybranch4 remotes/git-svn
+rm -rf dir
+git update-index --remove -- dir/file
+touch dir
+echo asdf > dir
+git update-index --add -- dir
+git commit -m "$name"
+
+test_expect_code 1 "$name" \
+ 'git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch4' \
+ || true
+
+
+name='remove executable bit from a file'
+rm -f $GIT_DIR/index
+git checkout -b mybranch5 remotes/git-svn
+chmod -x exec.sh
+git update-index exec.sh
+git commit -m "$name"
+
+test_expect_success "$name" \
+ "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
+ test ! -x $SVN_TREE/exec.sh"
+
+
+name='add executable bit back file'
+chmod +x exec.sh
+git update-index exec.sh
+git commit -m "$name"
+
+test_expect_success "$name" \
+ "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
+ test -x $SVN_TREE/exec.sh"
+
+
+
+name='executable file becomes a symlink to bar/zzz (file)'
+rm exec.sh
+ln -s bar/zzz exec.sh
+git update-index exec.sh
+git commit -m "$name"
+
+test_expect_success "$name" \
+ "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
+ test -L $SVN_TREE/exec.sh"
+
+
+
+name='new symlink is added to a file that was also just made executable'
+chmod +x bar/zzz
+ln -s bar/zzz exec-2.sh
+git update-index --add bar/zzz exec-2.sh
+git commit -m "$name"
+
+test_expect_success "$name" \
+ "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
+ test -x $SVN_TREE/bar/zzz &&
+ test -L $SVN_TREE/exec-2.sh"
+
+
+
+name='modify a symlink to become a file'
+git help > help || true
+rm exec-2.sh
+cp help exec-2.sh
+git update-index exec-2.sh
+git commit -m "$name"
+
+test_expect_success "$name" \
+ "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
+ test -f $SVN_TREE/exec-2.sh &&
+ test ! -L $SVN_TREE/exec-2.sh &&
+ diff -u help $SVN_TREE/exec-2.sh"
+
+
+
+name='test fetch functionality (svn => git) with alternate GIT_SVN_ID'
+GIT_SVN_ID=alt
+export GIT_SVN_ID
+test_expect_success "$name" \
+ "git-svn init $svnrepo && git-svn fetch &&
+ git-rev-list --pretty=raw remotes/git-svn | grep ^tree | uniq > a &&
+ git-rev-list --pretty=raw remotes/alt | grep ^tree | uniq > b &&
+ diff -u a b"
+
+test_done
+
diff --git a/contrib/gitview/gitview b/contrib/gitview/gitview
new file mode 100755
index 0000000..781badb
--- /dev/null
+++ b/contrib/gitview/gitview
@@ -0,0 +1,1000 @@
+#! /usr/bin/env python
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+""" gitview
+GUI browser for git repository
+This program is based on bzrk by Scott James Remnant <scott@ubuntu.com>
+"""
+__copyright__ = "Copyright (C) 2006 Hewlett-Packard Development Company, L.P."
+__author__ = "Aneesh Kumar K.V <aneesh.kumar@hp.com>"
+
+
+import sys
+import os
+import gtk
+import pygtk
+import pango
+import re
+import time
+import gobject
+import cairo
+import math
+import string
+
+try:
+ import gtksourceview
+ have_gtksourceview = True
+except ImportError:
+ have_gtksourceview = False
+ print "Running without gtksourceview module"
+
+re_ident = re.compile('(author|committer) (?P<ident>.*) (?P<epoch>\d+) (?P<tz>[+-]\d{4})')
+
+def list_to_string(args, skip):
+ count = len(args)
+ i = skip
+ str_arg=" "
+ while (i < count ):
+ str_arg = str_arg + args[i]
+ str_arg = str_arg + " "
+ i = i+1
+
+ return str_arg
+
+def show_date(epoch, tz):
+ secs = float(epoch)
+ tzsecs = float(tz[1:3]) * 3600
+ tzsecs += float(tz[3:5]) * 60
+ if (tz[0] == "+"):
+ secs += tzsecs
+ else:
+ secs -= tzsecs
+
+ return time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(secs))
+
+
+class CellRendererGraph(gtk.GenericCellRenderer):
+ """Cell renderer for directed graph.
+
+ This module contains the implementation of a custom GtkCellRenderer that
+ draws part of the directed graph based on the lines suggested by the code
+ in graph.py.
+
+ Because we're shiny, we use Cairo to do this, and because we're naughty
+ we cheat and draw over the bits of the TreeViewColumn that are supposed to
+ just be for the background.
+
+ Properties:
+ node (column, colour, [ names ]) tuple to draw revision node,
+ in_lines (start, end, colour) tuple list to draw inward lines,
+ out_lines (start, end, colour) tuple list to draw outward lines.
+ """
+
+ __gproperties__ = {
+ "node": ( gobject.TYPE_PYOBJECT, "node",
+ "revision node instruction",
+ gobject.PARAM_WRITABLE
+ ),
+ "in-lines": ( gobject.TYPE_PYOBJECT, "in-lines",
+ "instructions to draw lines into the cell",
+ gobject.PARAM_WRITABLE
+ ),
+ "out-lines": ( gobject.TYPE_PYOBJECT, "out-lines",
+ "instructions to draw lines out of the cell",
+ gobject.PARAM_WRITABLE
+ ),
+ }
+
+ def do_set_property(self, property, value):
+ """Set properties from GObject properties."""
+ if property.name == "node":
+ self.node = value
+ elif property.name == "in-lines":
+ self.in_lines = value
+ elif property.name == "out-lines":
+ self.out_lines = value
+ else:
+ raise AttributeError, "no such property: '%s'" % property.name
+
+ def box_size(self, widget):
+ """Calculate box size based on widget's font.
+
+ Cache this as it's probably expensive to get. It ensures that we
+ draw the graph at least as large as the text.
+ """
+ try:
+ return self._box_size
+ except AttributeError:
+ pango_ctx = widget.get_pango_context()
+ font_desc = widget.get_style().font_desc
+ metrics = pango_ctx.get_metrics(font_desc)
+
+ ascent = pango.PIXELS(metrics.get_ascent())
+ descent = pango.PIXELS(metrics.get_descent())
+
+ self._box_size = ascent + descent + 6
+ return self._box_size
+
+ def set_colour(self, ctx, colour, bg, fg):
+ """Set the context source colour.
+
+ Picks a distinct colour based on an internal wheel; the bg
+ parameter provides the value that should be assigned to the 'zero'
+ colours and the fg parameter provides the multiplier that should be
+ applied to the foreground colours.
+ """
+ colours = [
+ ( 1.0, 0.0, 0.0 ),
+ ( 1.0, 1.0, 0.0 ),
+ ( 0.0, 1.0, 0.0 ),
+ ( 0.0, 1.0, 1.0 ),
+ ( 0.0, 0.0, 1.0 ),
+ ( 1.0, 0.0, 1.0 ),
+ ]
+
+ colour %= len(colours)
+ red = (colours[colour][0] * fg) or bg
+ green = (colours[colour][1] * fg) or bg
+ blue = (colours[colour][2] * fg) or bg
+
+ ctx.set_source_rgb(red, green, blue)
+
+ def on_get_size(self, widget, cell_area):
+ """Return the size we need for this cell.
+
+ Each cell is drawn individually and is only as wide as it needs
+ to be, we let the TreeViewColumn take care of making them all
+ line up.
+ """
+ box_size = self.box_size(widget)
+
+ cols = self.node[0]
+ for start, end, colour in self.in_lines + self.out_lines:
+ cols = int(max(cols, start, end))
+
+ (column, colour, names) = self.node
+ names_len = 0
+ if (len(names) != 0):
+ for item in names:
+ names_len += len(item)
+
+ width = box_size * (cols + 1 ) + names_len
+ height = box_size
+
+ # FIXME I have no idea how to use cell_area properly
+ return (0, 0, width, height)
+
+ def on_render(self, window, widget, bg_area, cell_area, exp_area, flags):
+ """Render an individual cell.
+
+ Draws the cell contents using cairo, taking care to clip what we
+ do to within the background area so we don't draw over other cells.
+ Note that we're a bit naughty there and should really be drawing
+ in the cell_area (or even the exposed area), but we explicitly don't
+ want any gutter.
+
+ We try and be a little clever, if the line we need to draw is going
+ to cross other columns we actually draw it as in the .---' style
+ instead of a pure diagonal ... this reduces confusion by an
+ incredible amount.
+ """
+ ctx = window.cairo_create()
+ ctx.rectangle(bg_area.x, bg_area.y, bg_area.width, bg_area.height)
+ ctx.clip()
+
+ box_size = self.box_size(widget)
+
+ ctx.set_line_width(box_size / 8)
+ ctx.set_line_cap(cairo.LINE_CAP_SQUARE)
+
+ # Draw lines into the cell
+ for start, end, colour in self.in_lines:
+ ctx.move_to(cell_area.x + box_size * start + box_size / 2,
+ bg_area.y - bg_area.height / 2)
+
+ if start - end > 1:
+ ctx.line_to(cell_area.x + box_size * start, bg_area.y)
+ ctx.line_to(cell_area.x + box_size * end + box_size, bg_area.y)
+ elif start - end < -1:
+ ctx.line_to(cell_area.x + box_size * start + box_size,
+ bg_area.y)
+ ctx.line_to(cell_area.x + box_size * end, bg_area.y)
+
+ ctx.line_to(cell_area.x + box_size * end + box_size / 2,
+ bg_area.y + bg_area.height / 2)
+
+ self.set_colour(ctx, colour, 0.0, 0.65)
+ ctx.stroke()
+
+ # Draw lines out of the cell
+ for start, end, colour in self.out_lines:
+ ctx.move_to(cell_area.x + box_size * start + box_size / 2,
+ bg_area.y + bg_area.height / 2)
+
+ if start - end > 1:
+ ctx.line_to(cell_area.x + box_size * start,
+ bg_area.y + bg_area.height)
+ ctx.line_to(cell_area.x + box_size * end + box_size,
+ bg_area.y + bg_area.height)
+ elif start - end < -1:
+ ctx.line_to(cell_area.x + box_size * start + box_size,
+ bg_area.y + bg_area.height)
+ ctx.line_to(cell_area.x + box_size * end,
+ bg_area.y + bg_area.height)
+
+ ctx.line_to(cell_area.x + box_size * end + box_size / 2,
+ bg_area.y + bg_area.height / 2 + bg_area.height)
+
+ self.set_colour(ctx, colour, 0.0, 0.65)
+ ctx.stroke()
+
+ # Draw the revision node in the right column
+ (column, colour, names) = self.node
+ ctx.arc(cell_area.x + box_size * column + box_size / 2,
+ cell_area.y + cell_area.height / 2,
+ box_size / 4, 0, 2 * math.pi)
+
+
+ self.set_colour(ctx, colour, 0.0, 0.5)
+ ctx.stroke_preserve()
+
+ self.set_colour(ctx, colour, 0.5, 1.0)
+ ctx.fill_preserve()
+
+ if (len(names) != 0):
+ name = " "
+ for item in names:
+ name = name + item + " "
+
+ ctx.set_font_size(13)
+ if (flags & 1):
+ self.set_colour(ctx, colour, 0.5, 1.0)
+ else:
+ self.set_colour(ctx, colour, 0.0, 0.5)
+ ctx.show_text(name)
+
+class Commit:
+ """ This represent a commit object obtained after parsing the git-rev-list
+ output """
+
+ children_sha1 = {}
+
+ def __init__(self, commit_lines):
+ self.message = ""
+ self.author = ""
+ self.date = ""
+ self.committer = ""
+ self.commit_date = ""
+ self.commit_sha1 = ""
+ self.parent_sha1 = [ ]
+ self.parse_commit(commit_lines)
+
+
+ def parse_commit(self, commit_lines):
+
+ # First line is the sha1 lines
+ line = string.strip(commit_lines[0])
+ sha1 = re.split(" ", line)
+ self.commit_sha1 = sha1[0]
+ self.parent_sha1 = sha1[1:]
+
+ #build the child list
+ for parent_id in self.parent_sha1:
+ try:
+ Commit.children_sha1[parent_id].append(self.commit_sha1)
+ except KeyError:
+ Commit.children_sha1[parent_id] = [self.commit_sha1]
+
+ # IF we don't have parent
+ if (len(self.parent_sha1) == 0):
+ self.parent_sha1 = [0]
+
+ for line in commit_lines[1:]:
+ m = re.match("^ ", line)
+ if (m != None):
+ # First line of the commit message used for short log
+ if self.message == "":
+ self.message = string.strip(line)
+ continue
+
+ m = re.match("tree", line)
+ if (m != None):
+ continue
+
+ m = re.match("parent", line)
+ if (m != None):
+ continue
+
+ m = re_ident.match(line)
+ if (m != None):
+ date = show_date(m.group('epoch'), m.group('tz'))
+ if m.group(1) == "author":
+ self.author = m.group('ident')
+ self.date = date
+ elif m.group(1) == "committer":
+ self.committer = m.group('ident')
+ self.commit_date = date
+
+ continue
+
+ def get_message(self, with_diff=0):
+ if (with_diff == 1):
+ message = self.diff_tree()
+ else:
+ fp = os.popen("git cat-file commit " + self.commit_sha1)
+ message = fp.read()
+ fp.close()
+
+ return message
+
+ def diff_tree(self):
+ fp = os.popen("git diff-tree --pretty --cc -v -p --always " + self.commit_sha1)
+ diff = fp.read()
+ fp.close()
+ return diff
+
+class DiffWindow:
+ """Diff window.
+ This object represents and manages a single window containing the
+ differences between two revisions on a branch.
+ """
+
+ def __init__(self):
+ self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
+ self.window.set_border_width(0)
+ self.window.set_title("Git repository browser diff window")
+
+ # Use two thirds of the screen by default
+ screen = self.window.get_screen()
+ monitor = screen.get_monitor_geometry(0)
+ width = int(monitor.width * 0.66)
+ height = int(monitor.height * 0.66)
+ self.window.set_default_size(width, height)
+
+ self.construct()
+
+ def construct(self):
+ """Construct the window contents."""
+ vbox = gtk.VBox()
+ self.window.add(vbox)
+ vbox.show()
+
+ menu_bar = gtk.MenuBar()
+ save_menu = gtk.ImageMenuItem(gtk.STOCK_SAVE)
+ save_menu.connect("activate", self.save_menu_response, "save")
+ save_menu.show()
+ menu_bar.append(save_menu)
+ vbox.pack_start(menu_bar, expand=False, fill=True)
+ menu_bar.show()
+
+ scrollwin = gtk.ScrolledWindow()
+ scrollwin.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
+ scrollwin.set_shadow_type(gtk.SHADOW_IN)
+ vbox.pack_start(scrollwin, expand=True, fill=True)
+ scrollwin.show()
+
+ if have_gtksourceview:
+ self.buffer = gtksourceview.SourceBuffer()
+ slm = gtksourceview.SourceLanguagesManager()
+ gsl = slm.get_language_from_mime_type("text/x-patch")
+ self.buffer.set_highlight(True)
+ self.buffer.set_language(gsl)
+ sourceview = gtksourceview.SourceView(self.buffer)
+ else:
+ self.buffer = gtk.TextBuffer()
+ sourceview = gtk.TextView(self.buffer)
+
+ sourceview.set_editable(False)
+ sourceview.modify_font(pango.FontDescription("Monospace"))
+ scrollwin.add(sourceview)
+ sourceview.show()
+
+
+ def set_diff(self, commit_sha1, parent_sha1, encoding):
+ """Set the differences showed by this window.
+ Compares the two trees and populates the window with the
+ differences.
+ """
+ # Diff with the first commit or the last commit shows nothing
+ if (commit_sha1 == 0 or parent_sha1 == 0 ):
+ return
+
+ fp = os.popen("git diff-tree -p " + parent_sha1 + " " + commit_sha1)
+ self.buffer.set_text(unicode(fp.read(), encoding).encode('utf-8'))
+ fp.close()
+ self.window.show()
+
+ def save_menu_response(self, widget, string):
+ dialog = gtk.FileChooserDialog("Save..", None, gtk.FILE_CHOOSER_ACTION_SAVE,
+ (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
+ gtk.STOCK_SAVE, gtk.RESPONSE_OK))
+ dialog.set_default_response(gtk.RESPONSE_OK)
+ response = dialog.run()
+ if response == gtk.RESPONSE_OK:
+ patch_buffer = self.buffer.get_text(self.buffer.get_start_iter(),
+ self.buffer.get_end_iter())
+ fp = open(dialog.get_filename(), "w")
+ fp.write(patch_buffer)
+ fp.close()
+ dialog.destroy()
+
+class GitView:
+ """ This is the main class
+ """
+ version = "0.7"
+
+ def __init__(self, with_diff=0):
+ self.with_diff = with_diff
+ self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
+ self.window.set_border_width(0)
+ self.window.set_title("Git repository browser")
+
+ self.get_encoding()
+ self.get_bt_sha1()
+
+ # Use three-quarters of the screen by default
+ screen = self.window.get_screen()
+ monitor = screen.get_monitor_geometry(0)
+ width = int(monitor.width * 0.75)
+ height = int(monitor.height * 0.75)
+ self.window.set_default_size(width, height)
+
+ # FIXME AndyFitz!
+ icon = self.window.render_icon(gtk.STOCK_INDEX, gtk.ICON_SIZE_BUTTON)
+ self.window.set_icon(icon)
+
+ self.accel_group = gtk.AccelGroup()
+ self.window.add_accel_group(self.accel_group)
+
+ self.construct()
+
+ def get_bt_sha1(self):
+ """ Update the bt_sha1 dictionary with the
+ respective sha1 details """
+
+ self.bt_sha1 = { }
+ ls_remote = re.compile('^(.{40})\trefs/([^^]+)(?:\\^(..))?$');
+ fp = os.popen('git ls-remote "${GIT_DIR-.git}"')
+ while 1:
+ line = string.strip(fp.readline())
+ if line == '':
+ break
+ m = ls_remote.match(line)
+ if not m:
+ continue
+ (sha1, name) = (m.group(1), m.group(2))
+ if not self.bt_sha1.has_key(sha1):
+ self.bt_sha1[sha1] = []
+ self.bt_sha1[sha1].append(name)
+ fp.close()
+
+ def get_encoding(self):
+ fp = os.popen("git repo-config --get i18n.commitencoding")
+ self.encoding=string.strip(fp.readline())
+ fp.close()
+ if (self.encoding == ""):
+ self.encoding = "utf-8"
+
+
+ def construct(self):
+ """Construct the window contents."""
+ vbox = gtk.VBox()
+ paned = gtk.VPaned()
+ paned.pack1(self.construct_top(), resize=False, shrink=True)
+ paned.pack2(self.construct_bottom(), resize=False, shrink=True)
+ menu_bar = gtk.MenuBar()
+ menu_bar.set_pack_direction(gtk.PACK_DIRECTION_RTL)
+ help_menu = gtk.MenuItem("Help")
+ menu = gtk.Menu()
+ about_menu = gtk.MenuItem("About")
+ menu.append(about_menu)
+ about_menu.connect("activate", self.about_menu_response, "about")
+ about_menu.show()
+ help_menu.set_submenu(menu)
+ help_menu.show()
+ menu_bar.append(help_menu)
+ menu_bar.show()
+ vbox.pack_start(menu_bar, expand=False, fill=True)
+ vbox.pack_start(paned, expand=True, fill=True)
+ self.window.add(vbox)
+ paned.show()
+ vbox.show()
+
+
+ def construct_top(self):
+ """Construct the top-half of the window."""
+ vbox = gtk.VBox(spacing=6)
+ vbox.set_border_width(12)
+ vbox.show()
+
+
+ scrollwin = gtk.ScrolledWindow()
+ scrollwin.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
+ scrollwin.set_shadow_type(gtk.SHADOW_IN)
+ vbox.pack_start(scrollwin, expand=True, fill=True)
+ scrollwin.show()
+
+ self.treeview = gtk.TreeView()
+ self.treeview.set_rules_hint(True)
+ self.treeview.set_search_column(4)
+ self.treeview.connect("cursor-changed", self._treeview_cursor_cb)
+ scrollwin.add(self.treeview)
+ self.treeview.show()
+
+ cell = CellRendererGraph()
+ column = gtk.TreeViewColumn()
+ column.set_resizable(True)
+ column.pack_start(cell, expand=True)
+ column.add_attribute(cell, "node", 1)
+ column.add_attribute(cell, "in-lines", 2)
+ column.add_attribute(cell, "out-lines", 3)
+ self.treeview.append_column(column)
+
+ cell = gtk.CellRendererText()
+ cell.set_property("width-chars", 65)
+ cell.set_property("ellipsize", pango.ELLIPSIZE_END)
+ column = gtk.TreeViewColumn("Message")
+ column.set_resizable(True)
+ column.pack_start(cell, expand=True)
+ column.add_attribute(cell, "text", 4)
+ self.treeview.append_column(column)
+
+ cell = gtk.CellRendererText()
+ cell.set_property("width-chars", 40)
+ cell.set_property("ellipsize", pango.ELLIPSIZE_END)
+ column = gtk.TreeViewColumn("Author")
+ column.set_resizable(True)
+ column.pack_start(cell, expand=True)
+ column.add_attribute(cell, "text", 5)
+ self.treeview.append_column(column)
+
+ cell = gtk.CellRendererText()
+ cell.set_property("ellipsize", pango.ELLIPSIZE_END)
+ column = gtk.TreeViewColumn("Date")
+ column.set_resizable(True)
+ column.pack_start(cell, expand=True)
+ column.add_attribute(cell, "text", 6)
+ self.treeview.append_column(column)
+
+ return vbox
+
+ def about_menu_response(self, widget, string):
+ dialog = gtk.AboutDialog()
+ dialog.set_name("Gitview")
+ dialog.set_version(GitView.version)
+ dialog.set_authors(["Aneesh Kumar K.V <aneesh.kumar@hp.com>"])
+ dialog.set_website("http://www.kernel.org/pub/software/scm/git/")
+ dialog.set_copyright("Use and distribute under the terms of the GNU General Public License")
+ dialog.set_wrap_license(True)
+ dialog.run()
+ dialog.destroy()
+
+
+ def construct_bottom(self):
+ """Construct the bottom half of the window."""
+ vbox = gtk.VBox(False, spacing=6)
+ vbox.set_border_width(12)
+ (width, height) = self.window.get_size()
+ vbox.set_size_request(width, int(height / 2.5))
+ vbox.show()
+
+ self.table = gtk.Table(rows=4, columns=4)
+ self.table.set_row_spacings(6)
+ self.table.set_col_spacings(6)
+ vbox.pack_start(self.table, expand=False, fill=True)
+ self.table.show()
+
+ align = gtk.Alignment(0.0, 0.5)
+ label = gtk.Label()
+ label.set_markup("<b>Revision:</b>")
+ align.add(label)
+ self.table.attach(align, 0, 1, 0, 1, gtk.FILL, gtk.FILL)
+ label.show()
+ align.show()
+
+ align = gtk.Alignment(0.0, 0.5)
+ self.revid_label = gtk.Label()
+ self.revid_label.set_selectable(True)
+ align.add(self.revid_label)
+ self.table.attach(align, 1, 2, 0, 1, gtk.EXPAND | gtk.FILL, gtk.FILL)
+ self.revid_label.show()
+ align.show()
+
+ align = gtk.Alignment(0.0, 0.5)
+ label = gtk.Label()
+ label.set_markup("<b>Committer:</b>")
+ align.add(label)
+ self.table.attach(align, 0, 1, 1, 2, gtk.FILL, gtk.FILL)
+ label.show()
+ align.show()
+
+ align = gtk.Alignment(0.0, 0.5)
+ self.committer_label = gtk.Label()
+ self.committer_label.set_selectable(True)
+ align.add(self.committer_label)
+ self.table.attach(align, 1, 2, 1, 2, gtk.EXPAND | gtk.FILL, gtk.FILL)
+ self.committer_label.show()
+ align.show()
+
+ align = gtk.Alignment(0.0, 0.5)
+ label = gtk.Label()
+ label.set_markup("<b>Timestamp:</b>")
+ align.add(label)
+ self.table.attach(align, 0, 1, 2, 3, gtk.FILL, gtk.FILL)
+ label.show()
+ align.show()
+
+ align = gtk.Alignment(0.0, 0.5)
+ self.timestamp_label = gtk.Label()
+ self.timestamp_label.set_selectable(True)
+ align.add(self.timestamp_label)
+ self.table.attach(align, 1, 2, 2, 3, gtk.EXPAND | gtk.FILL, gtk.FILL)
+ self.timestamp_label.show()
+ align.show()
+
+ align = gtk.Alignment(0.0, 0.5)
+ label = gtk.Label()
+ label.set_markup("<b>Parents:</b>")
+ align.add(label)
+ self.table.attach(align, 0, 1, 3, 4, gtk.FILL, gtk.FILL)
+ label.show()
+ align.show()
+ self.parents_widgets = []
+
+ align = gtk.Alignment(0.0, 0.5)
+ label = gtk.Label()
+ label.set_markup("<b>Children:</b>")
+ align.add(label)
+ self.table.attach(align, 2, 3, 3, 4, gtk.FILL, gtk.FILL)
+ label.show()
+ align.show()
+ self.children_widgets = []
+
+ scrollwin = gtk.ScrolledWindow()
+ scrollwin.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
+ scrollwin.set_shadow_type(gtk.SHADOW_IN)
+ vbox.pack_start(scrollwin, expand=True, fill=True)
+ scrollwin.show()
+
+ if have_gtksourceview:
+ self.message_buffer = gtksourceview.SourceBuffer()
+ slm = gtksourceview.SourceLanguagesManager()
+ gsl = slm.get_language_from_mime_type("text/x-patch")
+ self.message_buffer.set_highlight(True)
+ self.message_buffer.set_language(gsl)
+ sourceview = gtksourceview.SourceView(self.message_buffer)
+ else:
+ self.message_buffer = gtk.TextBuffer()
+ sourceview = gtk.TextView(self.message_buffer)
+
+ sourceview.set_editable(False)
+ sourceview.modify_font(pango.FontDescription("Monospace"))
+ scrollwin.add(sourceview)
+ sourceview.show()
+
+ return vbox
+
+ def _treeview_cursor_cb(self, *args):
+ """Callback for when the treeview cursor changes."""
+ (path, col) = self.treeview.get_cursor()
+ commit = self.model[path][0]
+
+ if commit.committer is not None:
+ committer = commit.committer
+ timestamp = commit.commit_date
+ message = commit.get_message(self.with_diff)
+ revid_label = commit.commit_sha1
+ else:
+ committer = ""
+ timestamp = ""
+ message = ""
+ revid_label = ""
+
+ self.revid_label.set_text(revid_label)
+ self.committer_label.set_text(committer)
+ self.timestamp_label.set_text(timestamp)
+ self.message_buffer.set_text(unicode(message, self.encoding).encode('utf-8'))
+
+ for widget in self.parents_widgets:
+ self.table.remove(widget)
+
+ self.parents_widgets = []
+ self.table.resize(4 + len(commit.parent_sha1) - 1, 4)
+ for idx, parent_id in enumerate(commit.parent_sha1):
+ self.table.set_row_spacing(idx + 3, 0)
+
+ align = gtk.Alignment(0.0, 0.0)
+ self.parents_widgets.append(align)
+ self.table.attach(align, 1, 2, idx + 3, idx + 4,
+ gtk.EXPAND | gtk.FILL, gtk.FILL)
+ align.show()
+
+ hbox = gtk.HBox(False, 0)
+ align.add(hbox)
+ hbox.show()
+
+ label = gtk.Label(parent_id)
+ label.set_selectable(True)
+ hbox.pack_start(label, expand=False, fill=True)
+ label.show()
+
+ image = gtk.Image()
+ image.set_from_stock(gtk.STOCK_JUMP_TO, gtk.ICON_SIZE_MENU)
+ image.show()
+
+ button = gtk.Button()
+ button.add(image)
+ button.set_relief(gtk.RELIEF_NONE)
+ button.connect("clicked", self._go_clicked_cb, parent_id)
+ hbox.pack_start(button, expand=False, fill=True)
+ button.show()
+
+ image = gtk.Image()
+ image.set_from_stock(gtk.STOCK_FIND, gtk.ICON_SIZE_MENU)
+ image.show()
+
+ button = gtk.Button()
+ button.add(image)
+ button.set_relief(gtk.RELIEF_NONE)
+ button.set_sensitive(True)
+ button.connect("clicked", self._show_clicked_cb,
+ commit.commit_sha1, parent_id, self.encoding)
+ hbox.pack_start(button, expand=False, fill=True)
+ button.show()
+
+ # Populate with child details
+ for widget in self.children_widgets:
+ self.table.remove(widget)
+
+ self.children_widgets = []
+ try:
+ child_sha1 = Commit.children_sha1[commit.commit_sha1]
+ except KeyError:
+ # We don't have child
+ child_sha1 = [ 0 ]
+
+ if ( len(child_sha1) > len(commit.parent_sha1)):
+ self.table.resize(4 + len(child_sha1) - 1, 4)
+
+ for idx, child_id in enumerate(child_sha1):
+ self.table.set_row_spacing(idx + 3, 0)
+
+ align = gtk.Alignment(0.0, 0.0)
+ self.children_widgets.append(align)
+ self.table.attach(align, 3, 4, idx + 3, idx + 4,
+ gtk.EXPAND | gtk.FILL, gtk.FILL)
+ align.show()
+
+ hbox = gtk.HBox(False, 0)
+ align.add(hbox)
+ hbox.show()
+
+ label = gtk.Label(child_id)
+ label.set_selectable(True)
+ hbox.pack_start(label, expand=False, fill=True)
+ label.show()
+
+ image = gtk.Image()
+ image.set_from_stock(gtk.STOCK_JUMP_TO, gtk.ICON_SIZE_MENU)
+ image.show()
+
+ button = gtk.Button()
+ button.add(image)
+ button.set_relief(gtk.RELIEF_NONE)
+ button.connect("clicked", self._go_clicked_cb, child_id)
+ hbox.pack_start(button, expand=False, fill=True)
+ button.show()
+
+ image = gtk.Image()
+ image.set_from_stock(gtk.STOCK_FIND, gtk.ICON_SIZE_MENU)
+ image.show()
+
+ button = gtk.Button()
+ button.add(image)
+ button.set_relief(gtk.RELIEF_NONE)
+ button.set_sensitive(True)
+ button.connect("clicked", self._show_clicked_cb,
+ child_id, commit.commit_sha1, self.encoding)
+ hbox.pack_start(button, expand=False, fill=True)
+ button.show()
+
+ def _destroy_cb(self, widget):
+ """Callback for when a window we manage is destroyed."""
+ self.quit()
+
+
+ def quit(self):
+ """Stop the GTK+ main loop."""
+ gtk.main_quit()
+
+ def run(self, args):
+ self.set_branch(args)
+ self.window.connect("destroy", self._destroy_cb)
+ self.window.show()
+ gtk.main()
+
+ def set_branch(self, args):
+ """Fill in different windows with info from the reposiroty"""
+ fp = os.popen("git rev-parse --sq --default HEAD " + list_to_string(args, 1))
+ git_rev_list_cmd = fp.read()
+ fp.close()
+ fp = os.popen("git rev-list --header --topo-order --parents " + git_rev_list_cmd)
+ self.update_window(fp)
+
+ def update_window(self, fp):
+ commit_lines = []
+
+ self.model = gtk.ListStore(gobject.TYPE_PYOBJECT, gobject.TYPE_PYOBJECT,
+ gobject.TYPE_PYOBJECT, gobject.TYPE_PYOBJECT, str, str, str)
+
+ # used for cursor positioning
+ self.index = {}
+
+ self.colours = {}
+ self.nodepos = {}
+ self.incomplete_line = {}
+ self.commits = []
+
+ index = 0
+ last_colour = 0
+ last_nodepos = -1
+ out_line = []
+ input_line = fp.readline()
+ while (input_line != ""):
+ # The commit header ends with '\0'
+ # This NULL is immediately followed by the sha1 of the
+ # next commit
+ if (input_line[0] != '\0'):
+ commit_lines.append(input_line)
+ input_line = fp.readline()
+ continue;
+
+ commit = Commit(commit_lines)
+ if (commit != None ):
+ self.commits.append(commit)
+
+ # Skip the '\0
+ commit_lines = []
+ commit_lines.append(input_line[1:])
+ input_line = fp.readline()
+
+ fp.close()
+
+ for commit in self.commits:
+ (out_line, last_colour, last_nodepos) = self.draw_graph(commit,
+ index, out_line,
+ last_colour,
+ last_nodepos)
+ self.index[commit.commit_sha1] = index
+ index += 1
+
+ self.treeview.set_model(self.model)
+ self.treeview.show()
+
+ def draw_graph(self, commit, index, out_line, last_colour, last_nodepos):
+ in_line=[]
+
+ # | -> outline
+ # X
+ # |\ <- inline
+
+ # Reset nodepostion
+ if (last_nodepos > 5):
+ last_nodepos = -1
+
+ # Add the incomplete lines of the last cell in this
+ try:
+ colour = self.colours[commit.commit_sha1]
+ except KeyError:
+ self.colours[commit.commit_sha1] = last_colour+1
+ last_colour = self.colours[commit.commit_sha1]
+ colour = self.colours[commit.commit_sha1]
+
+ try:
+ node_pos = self.nodepos[commit.commit_sha1]
+ except KeyError:
+ self.nodepos[commit.commit_sha1] = last_nodepos+1
+ last_nodepos = self.nodepos[commit.commit_sha1]
+ node_pos = self.nodepos[commit.commit_sha1]
+
+ #The first parent always continue on the same line
+ try:
+ # check we alreay have the value
+ tmp_node_pos = self.nodepos[commit.parent_sha1[0]]
+ except KeyError:
+ self.colours[commit.parent_sha1[0]] = colour
+ self.nodepos[commit.parent_sha1[0]] = node_pos
+
+ for sha1 in self.incomplete_line.keys():
+ if (sha1 != commit.commit_sha1):
+ self.draw_incomplete_line(sha1, node_pos,
+ out_line, in_line, index)
+ else:
+ del self.incomplete_line[sha1]
+
+
+ for parent_id in commit.parent_sha1:
+ try:
+ tmp_node_pos = self.nodepos[parent_id]
+ except KeyError:
+ self.colours[parent_id] = last_colour+1
+ last_colour = self.colours[parent_id]
+ self.nodepos[parent_id] = last_nodepos+1
+ last_nodepos = self.nodepos[parent_id]
+
+ in_line.append((node_pos, self.nodepos[parent_id],
+ self.colours[parent_id]))
+ self.add_incomplete_line(parent_id)
+
+ try:
+ branch_tag = self.bt_sha1[commit.commit_sha1]
+ except KeyError:
+ branch_tag = [ ]
+
+
+ node = (node_pos, colour, branch_tag)
+
+ self.model.append([commit, node, out_line, in_line,
+ commit.message, commit.author, commit.date])
+
+ return (in_line, last_colour, last_nodepos)
+
+ def add_incomplete_line(self, sha1):
+ try:
+ self.incomplete_line[sha1].append(self.nodepos[sha1])
+ except KeyError:
+ self.incomplete_line[sha1] = [self.nodepos[sha1]]
+
+ def draw_incomplete_line(self, sha1, node_pos, out_line, in_line, index):
+ for idx, pos in enumerate(self.incomplete_line[sha1]):
+ if(pos == node_pos):
+ #remove the straight line and add a slash
+ if ((pos, pos, self.colours[sha1]) in out_line):
+ out_line.remove((pos, pos, self.colours[sha1]))
+ out_line.append((pos, pos+0.5, self.colours[sha1]))
+ self.incomplete_line[sha1][idx] = pos = pos+0.5
+ try:
+ next_commit = self.commits[index+1]
+ if (next_commit.commit_sha1 == sha1 and pos != int(pos)):
+ # join the line back to the node point
+ # This need to be done only if we modified it
+ in_line.append((pos, pos-0.5, self.colours[sha1]))
+ continue;
+ except IndexError:
+ pass
+ in_line.append((pos, pos, self.colours[sha1]))
+
+
+ def _go_clicked_cb(self, widget, revid):
+ """Callback for when the go button for a parent is clicked."""
+ try:
+ self.treeview.set_cursor(self.index[revid])
+ except KeyError:
+ print "Revision %s not present in the list" % revid
+ # revid == 0 is the parent of the first commit
+ if (revid != 0 ):
+ print "Try running gitview without any options"
+
+ self.treeview.grab_focus()
+
+ def _show_clicked_cb(self, widget, commit_sha1, parent_sha1, encoding):
+ """Callback for when the show button for a parent is clicked."""
+ window = DiffWindow()
+ window.set_diff(commit_sha1, parent_sha1, encoding)
+ self.treeview.grab_focus()
+
+if __name__ == "__main__":
+ without_diff = 0
+
+ if (len(sys.argv) > 1 ):
+ if (sys.argv[1] == "--without-diff"):
+ without_diff = 1
+
+ view = GitView( without_diff != 1)
+ view.run(sys.argv[without_diff:])
+
+
diff --git a/contrib/gitview/gitview.txt b/contrib/gitview/gitview.txt
new file mode 100644
index 0000000..fcf759c
--- /dev/null
+++ b/contrib/gitview/gitview.txt
@@ -0,0 +1,38 @@
+gitview(1)
+==========
+
+NAME
+----
+gitview - A GTK based repository browser for git
+
+SYNOPSIS
+--------
+'gitview' [options] [args]
+
+DESCRIPTION
+---------
+
+Dependencies
+
+* Python 2.4
+* PyGTK 2.8 or later
+* PyCairo 1.0 or later
+
+OPTIONS
+------
+ --without-diff
+ If the user doesn't want to list the commit diffs in the main window. This may speed up the repository browsing.
+
+ <args>
+ All the valid option for git-rev-list(1)
+
+EXAMPLES
+------
+ gitview v2.6.12.. include/scsi drivers/scsi
+ Show as the changes since version v2.6.12 that changed any file in the include/scsi
+ or drivers/scsi subdirectories
+
+ gitview --since=2.weeks.ago
+ Show the changes during the last two weeks
+
+
diff --git a/convert-objects.c b/convert-objects.c
index b49bce2..12aacef 100644
--- a/convert-objects.c
+++ b/convert-objects.c
@@ -2,6 +2,9 @@
#define _XOPEN_SOURCE_EXTENDED 1 /* AIX 5.3L needs this */
#include <time.h>
#include "cache.h"
+#include "blob.h"
+#include "commit.h"
+#include "tree.h"
struct entry {
unsigned char old_sha1[20];
@@ -18,8 +21,7 @@ static struct entry * convert_entry(unsigned char *sha1);
static struct entry *insert_new(unsigned char *sha1, int pos)
{
- struct entry *new = xmalloc(sizeof(struct entry));
- memset(new, 0, sizeof(*new));
+ struct entry *new = xcalloc(1, sizeof(struct entry));
memcpy(new->old_sha1, sha1, 20);
memmove(convert + pos + 1, convert + pos, (nr_convert - pos) * sizeof(struct entry *));
convert[pos] = new;
@@ -122,7 +124,7 @@ static int write_subdirectory(void *buffer, unsigned long size, const char *base
buffer += len;
}
- write_sha1_file(new, newlen, "tree", result_sha1);
+ write_sha1_file(new, newlen, tree_type, result_sha1);
free(new);
return used;
}
@@ -262,8 +264,8 @@ static void convert_date(void *buffer, unsigned long size, unsigned char *result
memcpy(new + newlen, buffer, size);
newlen += size;
- write_sha1_file(new, newlen, "commit", result_sha1);
- free(new);
+ write_sha1_file(new, newlen, commit_type, result_sha1);
+ free(new);
}
static void convert_commit(void *buffer, unsigned long size, unsigned char *result_sha1)
@@ -297,12 +299,12 @@ static struct entry * convert_entry(unsigned char *sha1)
buffer = xmalloc(size);
memcpy(buffer, data, size);
-
- if (!strcmp(type, "blob")) {
- write_sha1_file(buffer, size, "blob", entry->new_sha1);
- } else if (!strcmp(type, "tree"))
+
+ if (!strcmp(type, blob_type)) {
+ write_sha1_file(buffer, size, blob_type, entry->new_sha1);
+ } else if (!strcmp(type, tree_type))
convert_tree(buffer, size, entry->new_sha1);
- else if (!strcmp(type, "commit"))
+ else if (!strcmp(type, commit_type))
convert_commit(buffer, size, entry->new_sha1);
else
die("unknown object type '%s' in %s", type, sha1_to_hex(sha1));
diff --git a/count-delta.c b/count-delta.c
deleted file mode 100644
index 058a2aa..0000000
--- a/count-delta.c
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (C) 2005 Junio C Hamano
- * The delta-parsing part is almost straight copy of patch-delta.c
- * which is (C) 2005 Nicolas Pitre <nico@cam.org>.
- */
-#include <stdlib.h>
-#include <string.h>
-#include <limits.h>
-#include "delta.h"
-#include "count-delta.h"
-
-/*
- * NOTE. We do not _interpret_ delta fully. As an approximation, we
- * just count the number of bytes that are copied from the source, and
- * the number of literal data bytes that are inserted.
- *
- * Number of bytes that are _not_ copied from the source is deletion,
- * and number of inserted literal bytes are addition, so sum of them
- * is the extent of damage.
- */
-int count_delta(void *delta_buf, unsigned long delta_size,
- unsigned long *src_copied, unsigned long *literal_added)
-{
- unsigned long copied_from_source, added_literal;
- const unsigned char *data, *top;
- unsigned char cmd;
- unsigned long src_size, dst_size, out;
-
- if (delta_size < DELTA_SIZE_MIN)
- return -1;
-
- data = delta_buf;
- top = delta_buf + delta_size;
-
- src_size = get_delta_hdr_size(&data);
- dst_size = get_delta_hdr_size(&data);
-
- added_literal = copied_from_source = out = 0;
- while (data < top) {
- cmd = *data++;
- if (cmd & 0x80) {
- unsigned long cp_off = 0, cp_size = 0;
- if (cmd & 0x01) cp_off = *data++;
- if (cmd & 0x02) cp_off |= (*data++ << 8);
- if (cmd & 0x04) cp_off |= (*data++ << 16);
- if (cmd & 0x08) cp_off |= (*data++ << 24);
- if (cmd & 0x10) cp_size = *data++;
- if (cmd & 0x20) cp_size |= (*data++ << 8);
- if (cmd & 0x40) cp_size |= (*data++ << 16);
- if (cp_size == 0) cp_size = 0x10000;
-
- copied_from_source += cp_size;
- out += cp_size;
- } else {
- /* write literal into dst */
- added_literal += cmd;
- out += cmd;
- data += cmd;
- }
- }
-
- /* sanity check */
- if (data != top || out != dst_size)
- return -1;
-
- /* delete size is what was _not_ copied from source.
- * edit size is that and literal additions.
- */
- *src_copied = copied_from_source;
- *literal_added = added_literal;
- return 0;
-}
diff --git a/count-delta.h b/count-delta.h
deleted file mode 100644
index 7359629..0000000
--- a/count-delta.h
+++ /dev/null
@@ -1,10 +0,0 @@
-/*
- * Copyright (C) 2005 Junio C Hamano
- */
-#ifndef COUNT_DELTA_H
-#define COUNT_DELTA_H
-
-int count_delta(void *, unsigned long,
- unsigned long *src_copied, unsigned long *literal_added);
-
-#endif
diff --git a/date.c b/date.c
index 18a0710..376d25d 100644
--- a/date.c
+++ b/date.c
@@ -123,8 +123,6 @@ static const struct {
{ "IDLE", +12, 0, }, /* International Date Line East */
};
-#define NR_TZ (sizeof(timezone_names) / sizeof(timezone_names[0]))
-
static int match_string(const char *date, const char *str)
{
int i = 0;
@@ -173,7 +171,7 @@ static int match_alpha(const char *date, struct tm *tm, int *offset)
}
}
- for (i = 0; i < NR_TZ; i++) {
+ for (i = 0; i < ARRAY_SIZE(timezone_names); i++) {
int match = match_string(date, timezone_names[i].name);
if (match >= 3) {
int off = timezone_names[i].offset;
diff --git a/diff-delta.c b/diff-delta.c
index c2f656a..1188b31 100644
--- a/diff-delta.c
+++ b/diff-delta.c
@@ -19,8 +19,9 @@
*/
#include <stdlib.h>
+#include <string.h>
+#include <zlib.h>
#include "delta.h"
-#include "zlib.h"
/* block size: min = 16, max = 64k, power of 2 */
@@ -29,149 +30,131 @@
#define MIN(a, b) ((a) < (b) ? (a) : (b))
#define GR_PRIME 0x9e370001
-#define HASH(v, b) (((unsigned int)(v) * GR_PRIME) >> (32 - (b)))
-
-static unsigned int hashbits(unsigned int size)
-{
- unsigned int val = 1, bits = 0;
- while (val < size && bits < 32) {
- val <<= 1;
- bits++;
- }
- return bits ? bits: 1;
-}
-
-typedef struct s_chanode {
- struct s_chanode *next;
- int icurr;
-} chanode_t;
-
-typedef struct s_chastore {
- int isize, nsize;
- chanode_t *ancur;
-} chastore_t;
+#define HASH(v, shift) (((unsigned int)(v) * GR_PRIME) >> (shift))
-static void cha_init(chastore_t *cha, int isize, int icount)
-{
- cha->isize = isize;
- cha->nsize = icount * isize;
- cha->ancur = NULL;
-}
-
-static void *cha_alloc(chastore_t *cha)
-{
- chanode_t *ancur;
- void *data;
-
- ancur = cha->ancur;
- if (!ancur || ancur->icurr == cha->nsize) {
- ancur = malloc(sizeof(chanode_t) + cha->nsize);
- if (!ancur)
- return NULL;
- ancur->icurr = 0;
- ancur->next = cha->ancur;
- cha->ancur = ancur;
- }
-
- data = (void *)ancur + sizeof(chanode_t) + ancur->icurr;
- ancur->icurr += cha->isize;
- return data;
-}
-
-static void cha_free(chastore_t *cha)
+struct index {
+ const unsigned char *ptr;
+ unsigned int val;
+ struct index *next;
+};
+
+static struct index ** delta_index(const unsigned char *buf,
+ unsigned long bufsize,
+ unsigned long trg_bufsize,
+ unsigned int *hash_shift)
{
- chanode_t *cur = cha->ancur;
- while (cur) {
- chanode_t *tmp = cur;
- cur = cur->next;
- free(tmp);
+ unsigned int i, hsize, hshift, hlimit, entries, *hash_count;
+ const unsigned char *data;
+ struct index *entry, **hash;
+ void *mem;
+
+ /* determine index hash size */
+ entries = bufsize / BLK_SIZE;
+ hsize = entries / 4;
+ for (i = 4; (1 << i) < hsize && i < 31; i++);
+ hsize = 1 << i;
+ hshift = 32 - i;
+ *hash_shift = hshift;
+
+ /* allocate lookup index */
+ mem = malloc(hsize * sizeof(*hash) + entries * sizeof(*entry));
+ if (!mem)
+ return NULL;
+ hash = mem;
+ entry = mem + hsize * sizeof(*hash);
+ memset(hash, 0, hsize * sizeof(*hash));
+
+ /* allocate an array to count hash entries */
+ hash_count = calloc(hsize, sizeof(*hash_count));
+ if (!hash_count) {
+ free(hash);
+ return NULL;
}
-}
-
-typedef struct s_bdrecord {
- struct s_bdrecord *next;
- unsigned int fp;
- const unsigned char *ptr;
-} bdrecord_t;
-typedef struct s_bdfile {
- chastore_t cha;
- unsigned int fphbits;
- bdrecord_t **fphash;
-} bdfile_t;
-
-static int delta_prepare(const unsigned char *buf, int bufsize, bdfile_t *bdf)
-{
- unsigned int fphbits;
- int i, hsize;
- const unsigned char *data, *top;
- bdrecord_t *brec;
- bdrecord_t **fphash;
-
- fphbits = hashbits(bufsize / BLK_SIZE + 1);
- hsize = 1 << fphbits;
- fphash = malloc(hsize * sizeof(bdrecord_t *));
- if (!fphash)
- return -1;
- for (i = 0; i < hsize; i++)
- fphash[i] = NULL;
- cha_init(&bdf->cha, sizeof(bdrecord_t), hsize / 4 + 1);
-
- top = buf + bufsize;
- data = buf + (bufsize / BLK_SIZE) * BLK_SIZE;
- if (data == top)
+ /* then populate the index */
+ data = buf + entries * BLK_SIZE - BLK_SIZE;
+ while (data >= buf) {
+ unsigned int val = adler32(0, data, BLK_SIZE);
+ i = HASH(val, hshift);
+ entry->ptr = data;
+ entry->val = val;
+ entry->next = hash[i];
+ hash[i] = entry++;
+ hash_count[i]++;
data -= BLK_SIZE;
-
- for ( ; data >= buf; data -= BLK_SIZE) {
- brec = cha_alloc(&bdf->cha);
- if (!brec) {
- cha_free(&bdf->cha);
- free(fphash);
- return -1;
- }
- brec->fp = adler32(0, data, MIN(BLK_SIZE, top - data));
- brec->ptr = data;
- i = HASH(brec->fp, fphbits);
- brec->next = fphash[i];
- fphash[i] = brec;
+ }
+
+ /*
+ * Determine a limit on the number of entries in the same hash
+ * bucket. This guard us against patological data sets causing
+ * really bad hash distribution with most entries in the same hash
+ * bucket that would bring us to O(m*n) computing costs (m and n
+ * corresponding to reference and target buffer sizes).
+ *
+ * The more the target buffer is large, the more it is important to
+ * have small entry lists for each hash buckets. With such a limit
+ * the cost is bounded to something more like O(m+n).
+ */
+ hlimit = (1 << 26) / trg_bufsize;
+ if (hlimit < 4*BLK_SIZE)
+ hlimit = 4*BLK_SIZE;
+
+ /*
+ * Now make sure none of the hash buckets has more entries than
+ * we're willing to test. Otherwise we cull the entry list
+ * uniformly to still preserve a good repartition across
+ * the reference buffer.
+ */
+ for (i = 0; i < hsize; i++) {
+ if (hash_count[i] < hlimit)
+ continue;
+ entry = hash[i];
+ do {
+ struct index *keep = entry;
+ int skip = hash_count[i] / hlimit / 2;
+ do {
+ entry = entry->next;
+ } while(--skip && entry);
+ keep->next = entry;
+ } while(entry);
}
+ free(hash_count);
- bdf->fphbits = fphbits;
- bdf->fphash = fphash;
-
- return 0;
-}
-
-static void delta_cleanup(bdfile_t *bdf)
-{
- free(bdf->fphash);
- cha_free(&bdf->cha);
+ return hash;
}
+/* provide the size of the copy opcode given the block offset and size */
#define COPYOP_SIZE(o, s) \
(!!(o & 0xff) + !!(o & 0xff00) + !!(o & 0xff0000) + !!(o & 0xff000000) + \
!!(s & 0xff) + !!(s & 0xff00) + 1)
+/* the maximum size for any opcode */
+#define MAX_OP_SIZE COPYOP_SIZE(0xffffffff, 0xffffffff)
+
void *diff_delta(void *from_buf, unsigned long from_size,
void *to_buf, unsigned long to_size,
unsigned long *delta_size,
unsigned long max_size)
{
- int i, outpos, outsize, inscnt, csize, msize, moff;
- unsigned int fp;
- const unsigned char *ref_data, *ref_top, *data, *top, *ptr1, *ptr2;
- unsigned char *out, *orig;
- bdrecord_t *brec;
- bdfile_t bdf;
-
- if (!from_size || !to_size || delta_prepare(from_buf, from_size, &bdf))
+ unsigned int i, outpos, outsize, hash_shift;
+ int inscnt;
+ const unsigned char *ref_data, *ref_top, *data, *top;
+ unsigned char *out;
+ struct index *entry, **hash;
+
+ if (!from_size || !to_size)
return NULL;
-
+ hash = delta_index(from_buf, from_size, to_size, &hash_shift);
+ if (!hash)
+ return NULL;
+
outpos = 0;
outsize = 8192;
+ if (max_size && outsize >= max_size)
+ outsize = max_size + MAX_OP_SIZE + 1;
out = malloc(outsize);
if (!out) {
- delta_cleanup(&bdf);
+ free(hash);
return NULL;
}
@@ -199,28 +182,30 @@ void *diff_delta(void *from_buf, unsigned long from_size,
}
inscnt = 0;
- moff = 0;
+
while (data < top) {
- msize = 0;
- fp = adler32(0, data, MIN(top - data, BLK_SIZE));
- i = HASH(fp, bdf.fphbits);
- for (brec = bdf.fphash[i]; brec; brec = brec->next) {
- if (brec->fp == fp) {
- csize = ref_top - brec->ptr;
- if (csize > top - data)
- csize = top - data;
- for (ptr1 = brec->ptr, ptr2 = data;
- csize && *ptr1 == *ptr2;
- csize--, ptr1++, ptr2++);
-
- csize = ptr1 - brec->ptr;
- if (csize > msize) {
- moff = brec->ptr - ref_data;
- msize = csize;
- if (msize >= 0x10000) {
- msize = 0x10000;
- break;
- }
+ unsigned int moff = 0, msize = 0;
+ if (data + BLK_SIZE <= top) {
+ unsigned int val = adler32(0, data, BLK_SIZE);
+ i = HASH(val, hash_shift);
+ for (entry = hash[i]; entry; entry = entry->next) {
+ const unsigned char *ref = entry->ptr;
+ const unsigned char *src = data;
+ unsigned int ref_size = ref_top - ref;
+ if (entry->val != val)
+ continue;
+ if (ref_size > top - src)
+ ref_size = top - src;
+ if (ref_size > 0x10000)
+ ref_size = 0x10000;
+ if (ref_size <= msize)
+ break;
+ while (ref_size-- && *src++ == *ref)
+ ref++;
+ if (msize < ref - entry->ptr) {
+ /* this is our best match so far */
+ msize = ref - entry->ptr;
+ moff = entry->ptr - ref_data;
}
}
}
@@ -235,13 +220,29 @@ void *diff_delta(void *from_buf, unsigned long from_size,
inscnt = 0;
}
} else {
+ unsigned char *op;
+
if (inscnt) {
+ while (moff && ref_data[moff-1] == data[-1]) {
+ if (msize == 0x10000)
+ break;
+ /* we can match one byte back */
+ msize++;
+ moff--;
+ data--;
+ outpos--;
+ if (--inscnt)
+ continue;
+ outpos--; /* remove count slot */
+ inscnt--; /* make it -1 */
+ break;
+ }
out[outpos - inscnt - 1] = inscnt;
inscnt = 0;
}
data += msize;
- orig = out + outpos++;
+ op = out + outpos++;
i = 0x80;
if (moff & 0xff) { out[outpos++] = moff; i |= 0x01; }
@@ -256,23 +257,21 @@ void *diff_delta(void *from_buf, unsigned long from_size,
msize >>= 8;
if (msize & 0xff) { out[outpos++] = msize; i |= 0x20; }
- *orig = i;
- }
-
- if (max_size && outpos > max_size) {
- free(out);
- delta_cleanup(&bdf);
- return NULL;
+ *op = i;
}
- /* next time around the largest possible output is 1 + 4 + 3 */
- if (outpos > outsize - 8) {
+ if (outpos >= outsize - MAX_OP_SIZE) {
void *tmp = out;
outsize = outsize * 3 / 2;
- out = realloc(out, outsize);
+ if (max_size && outsize >= max_size)
+ outsize = max_size + MAX_OP_SIZE + 1;
+ if (max_size && outpos > max_size)
+ out = NULL;
+ else
+ out = realloc(out, outsize);
if (!out) {
free(tmp);
- delta_cleanup(&bdf);
+ free(hash);
return NULL;
}
}
@@ -281,7 +280,7 @@ void *diff_delta(void *from_buf, unsigned long from_size,
if (inscnt)
out[outpos - inscnt - 1] = inscnt;
- delta_cleanup(&bdf);
+ free(hash);
*delta_size = outpos;
return out;
}
diff --git a/diff-files.c b/diff-files.c
index 0c3f800..3e7f5f1 100644
--- a/diff-files.c
+++ b/diff-files.c
@@ -149,7 +149,7 @@ int main(int argc, const char **argv)
memcpy(combine.p.parent[stage-2].sha1,
nce->sha1, 20);
combine.p.parent[stage-2].mode =
- DIFF_FILE_CANON_MODE(mode);
+ canon_mode(mode);
combine.p.parent[stage-2].status =
DIFF_STATUS_MODIFIED;
}
@@ -193,12 +193,12 @@ int main(int argc, const char **argv)
show_file('-', ce);
continue;
}
- changed = ce_match_stat(ce, &st);
+ changed = ce_match_stat(ce, &st, 0);
if (!changed && !diff_options.find_copies_harder)
continue;
oldmode = ntohl(ce->ce_mode);
- newmode = DIFF_FILE_CANON_MODE(st.st_mode);
+ newmode = canon_mode(st.st_mode);
if (!trust_executable_bit &&
S_ISREG(newmode) && S_ISREG(oldmode) &&
((newmode ^ oldmode) == 0111))
diff --git a/diff-index.c b/diff-index.c
index f8a102e..e376d65 100644
--- a/diff-index.c
+++ b/diff-index.c
@@ -33,12 +33,10 @@ static int get_stat_data(struct cache_entry *ce,
}
return -1;
}
- changed = ce_match_stat(ce, &st);
+ changed = ce_match_stat(ce, &st, 0);
if (changed) {
mode = create_ce_mode(st.st_mode);
- if (!trust_executable_bit &&
- S_ISREG(mode) && S_ISREG(ce->ce_mode) &&
- ((mode ^ ce->ce_mode) == 0111))
+ if (!trust_executable_bit && S_ISREG(st.st_mode))
mode = ce->ce_mode;
sha1 = no_sha1;
}
diff --git a/diff-tree.c b/diff-tree.c
index f55a35a..d1265d7 100644
--- a/diff-tree.c
+++ b/diff-tree.c
@@ -52,7 +52,7 @@ static int diff_root_tree(const unsigned char *new, const char *base)
void *tree;
struct tree_desc empty, real;
- tree = read_object_with_reference(new, "tree", &real.size, NULL);
+ tree = read_object_with_reference(new, tree_type, &real.size, NULL);
if (!tree)
die("unable to read root tree (%s)", sha1_to_hex(new));
real.buf = tree;
diff --git a/diff.c b/diff.c
index fa1613c..fe4664c 100644
--- a/diff.c
+++ b/diff.c
@@ -8,8 +8,7 @@
#include "quote.h"
#include "diff.h"
#include "diffcore.h"
-
-static const char *diff_opts = "-pu";
+#include "xdiff/xdiff.h"
static int use_size_cache;
@@ -69,25 +68,10 @@ static const char *external_diff(void)
{
static const char *external_diff_cmd = NULL;
static int done_preparing = 0;
- const char *env_diff_opts;
if (done_preparing)
return external_diff_cmd;
-
- /*
- * Default values above are meant to match the
- * Linux kernel development style. Examples of
- * alternative styles you can specify via environment
- * variables are:
- *
- * GIT_DIFF_OPTS="-c";
- */
external_diff_cmd = getenv("GIT_EXTERNAL_DIFF");
-
- /* In case external diff fails... */
- env_diff_opts = getenv("GIT_DIFF_OPTS");
- if (env_diff_opts) diff_opts = env_diff_opts;
-
done_preparing = 1;
return external_diff_cmd;
}
@@ -101,13 +85,12 @@ static struct diff_tempfile {
char tmp_path[TEMPFILE_PATH_LEN];
} diff_temp[2];
-static int count_lines(const char *filename)
+static int count_lines(const char *data, int size)
{
- FILE *in;
int count, ch, completely_empty = 1, nl_just_seen = 0;
- in = fopen(filename, "r");
count = 0;
- while ((ch = fgetc(in)) != EOF)
+ while (0 < size--) {
+ ch = *data++;
if (ch == '\n') {
count++;
nl_just_seen = 1;
@@ -117,7 +100,7 @@ static int count_lines(const char *filename)
nl_just_seen = 0;
completely_empty = 0;
}
- fclose(in);
+ }
if (completely_empty)
return 0;
if (!nl_just_seen)
@@ -140,12 +123,11 @@ static void print_line_count(int count)
}
}
-static void copy_file(int prefix, const char *filename)
+static void copy_file(int prefix, const char *data, int size)
{
- FILE *in;
int ch, nl_just_seen = 1;
- in = fopen(filename, "r");
- while ((ch = fgetc(in)) != EOF) {
+ while (0 < size--) {
+ ch = *data++;
if (nl_just_seen)
putchar(prefix);
putchar(ch);
@@ -154,107 +136,153 @@ static void copy_file(int prefix, const char *filename)
else
nl_just_seen = 0;
}
- fclose(in);
if (!nl_just_seen)
printf("\n\\ No newline at end of file\n");
}
static void emit_rewrite_diff(const char *name_a,
const char *name_b,
- struct diff_tempfile *temp)
+ struct diff_filespec *one,
+ struct diff_filespec *two)
{
/* Use temp[i].name as input, name_a and name_b as labels */
int lc_a, lc_b;
- lc_a = count_lines(temp[0].name);
- lc_b = count_lines(temp[1].name);
+ lc_a = count_lines(one->data, one->size);
+ lc_b = count_lines(two->data, two->size);
printf("--- %s\n+++ %s\n@@ -", name_a, name_b);
print_line_count(lc_a);
printf(" +");
print_line_count(lc_b);
printf(" @@\n");
if (lc_a)
- copy_file('-', temp[0].name);
+ copy_file('-', one->data, one->size);
if (lc_b)
- copy_file('+', temp[1].name);
+ copy_file('+', two->data, two->size);
+}
+
+static int fill_mmfile(mmfile_t *mf, struct diff_filespec *one)
+{
+ if (!DIFF_FILE_VALID(one)) {
+ mf->ptr = ""; /* does not matter */
+ mf->size = 0;
+ return 0;
+ }
+ else if (diff_populate_filespec(one, 0))
+ return -1;
+ mf->ptr = one->data;
+ mf->size = one->size;
+ return 0;
+}
+
+struct emit_callback {
+ const char **label_path;
+};
+
+static int fn_out(void *priv, mmbuffer_t *mb, int nbuf)
+{
+ int i;
+ struct emit_callback *ecbdata = priv;
+
+ if (ecbdata->label_path[0]) {
+ printf("--- %s\n", ecbdata->label_path[0]);
+ printf("+++ %s\n", ecbdata->label_path[1]);
+ ecbdata->label_path[0] = ecbdata->label_path[1] = NULL;
+ }
+ for (i = 0; i < nbuf; i++)
+ if (!fwrite(mb[i].ptr, mb[i].size, 1, stdout))
+ return -1;
+ return 0;
+}
+
+#define FIRST_FEW_BYTES 8000
+static int mmfile_is_binary(mmfile_t *mf)
+{
+ long sz = mf->size;
+ if (FIRST_FEW_BYTES < sz)
+ sz = FIRST_FEW_BYTES;
+ if (memchr(mf->ptr, 0, sz))
+ return 1;
+ return 0;
}
static void builtin_diff(const char *name_a,
const char *name_b,
- struct diff_tempfile *temp,
+ struct diff_filespec *one,
+ struct diff_filespec *two,
const char *xfrm_msg,
int complete_rewrite)
{
- int i, next_at, cmd_size;
- const char *const diff_cmd = "diff -L%s -L%s";
- const char *const diff_arg = "-- %s %s||:"; /* "||:" is to return 0 */
- const char *input_name_sq[2];
- const char *label_path[2];
- char *cmd;
-
- /* diff_cmd and diff_arg have 4 %s in total which makes
- * the sum of these strings 8 bytes larger than required.
- * we use 2 spaces around diff-opts, and we need to count
- * terminating NUL; we used to subtract 5 here, but we do not
- * care about small leaks in this subprocess that is about
- * to exec "diff" anymore.
- */
- cmd_size = (strlen(diff_cmd) + strlen(diff_opts) + strlen(diff_arg)
- + 128);
-
- for (i = 0; i < 2; i++) {
- input_name_sq[i] = sq_quote(temp[i].name);
- if (!strcmp(temp[i].name, "/dev/null"))
- label_path[i] = "/dev/null";
- else if (!i)
- label_path[i] = sq_quote(quote_two("a/", name_a));
- else
- label_path[i] = sq_quote(quote_two("b/", name_b));
- cmd_size += (strlen(label_path[i]) + strlen(input_name_sq[i]));
- }
-
- cmd = xmalloc(cmd_size);
-
- next_at = 0;
- next_at += snprintf(cmd+next_at, cmd_size-next_at,
- diff_cmd, label_path[0], label_path[1]);
- next_at += snprintf(cmd+next_at, cmd_size-next_at,
- " %s ", diff_opts);
- next_at += snprintf(cmd+next_at, cmd_size-next_at,
- diff_arg, input_name_sq[0], input_name_sq[1]);
-
- printf("diff --git %s %s\n",
- quote_two("a/", name_a), quote_two("b/", name_b));
- if (label_path[0][0] == '/') {
- /* dev/null */
- printf("new file mode %s\n", temp[1].mode);
+ mmfile_t mf1, mf2;
+ const char *lbl[2];
+ char *a_one, *b_two;
+
+ a_one = quote_two("a/", name_a);
+ b_two = quote_two("b/", name_b);
+ lbl[0] = DIFF_FILE_VALID(one) ? a_one : "/dev/null";
+ lbl[1] = DIFF_FILE_VALID(two) ? b_two : "/dev/null";
+ printf("diff --git %s %s\n", a_one, b_two);
+ if (lbl[0][0] == '/') {
+ /* /dev/null */
+ printf("new file mode %06o\n", two->mode);
if (xfrm_msg && xfrm_msg[0])
puts(xfrm_msg);
}
- else if (label_path[1][0] == '/') {
- printf("deleted file mode %s\n", temp[0].mode);
+ else if (lbl[1][0] == '/') {
+ printf("deleted file mode %06o\n", one->mode);
if (xfrm_msg && xfrm_msg[0])
puts(xfrm_msg);
}
else {
- if (strcmp(temp[0].mode, temp[1].mode)) {
- printf("old mode %s\n", temp[0].mode);
- printf("new mode %s\n", temp[1].mode);
+ if (one->mode != two->mode) {
+ printf("old mode %06o\n", one->mode);
+ printf("new mode %06o\n", two->mode);
}
if (xfrm_msg && xfrm_msg[0])
puts(xfrm_msg);
- if (strncmp(temp[0].mode, temp[1].mode, 3))
- /* we do not run diff between different kind
- * of objects.
- */
- exit(0);
+ /*
+ * we do not run diff between different kind
+ * of objects.
+ */
+ if ((one->mode ^ two->mode) & S_IFMT)
+ goto free_ab_and_return;
if (complete_rewrite) {
- fflush(NULL);
- emit_rewrite_diff(name_a, name_b, temp);
- exit(0);
+ emit_rewrite_diff(name_a, name_b, one, two);
+ goto free_ab_and_return;
}
}
- fflush(NULL);
- execlp("/bin/sh","sh", "-c", cmd, NULL);
+
+ if (fill_mmfile(&mf1, one) < 0 || fill_mmfile(&mf2, two) < 0)
+ die("unable to read files to diff");
+
+ if (mmfile_is_binary(&mf1) || mmfile_is_binary(&mf2))
+ printf("Binary files %s and %s differ\n", lbl[0], lbl[1]);
+ else {
+ /* Crazy xdl interfaces.. */
+ const char *diffopts = getenv("GIT_DIFF_OPTS");
+ xpparam_t xpp;
+ xdemitconf_t xecfg;
+ xdemitcb_t ecb;
+ struct emit_callback ecbdata;
+
+ ecbdata.label_path = lbl;
+ xpp.flags = XDF_NEED_MINIMAL;
+ xecfg.ctxlen = 3;
+ xecfg.flags = XDL_EMIT_FUNCNAMES;
+ if (!diffopts)
+ ;
+ else if (!strncmp(diffopts, "--unified=", 10))
+ xecfg.ctxlen = strtoul(diffopts + 10, NULL, 10);
+ else if (!strncmp(diffopts, "-u", 2))
+ xecfg.ctxlen = strtoul(diffopts + 2, NULL, 10);
+ ecb.outf = fn_out;
+ ecb.priv = &ecbdata;
+ xdl_diff(&mf1, &mf2, &xpp, &xecfg, &ecb);
+ }
+
+ free_ab_and_return:
+ free(a_one);
+ free(b_two);
+ return;
}
struct diff_filespec *alloc_filespec(const char *path)
@@ -272,7 +300,7 @@ void fill_filespec(struct diff_filespec *spec, const unsigned char *sha1,
unsigned short mode)
{
if (mode) {
- spec->mode = DIFF_FILE_CANON_MODE(mode);
+ spec->mode = canon_mode(mode);
memcpy(spec->sha1, sha1, 20);
spec->sha1_valid = !!memcmp(sha1, null_sha1, 20);
}
@@ -311,7 +339,7 @@ static int work_tree_matches(const char *name, const unsigned char *sha1)
ce = active_cache[pos];
if ((lstat(name, &st) < 0) ||
!S_ISREG(st.st_mode) || /* careful! */
- ce_match_stat(ce, &st) ||
+ ce_match_stat(ce, &st, 0) ||
memcmp(sha1, ce->sha1, 20))
return 0;
/* we return 1 only when we can stat, it is a regular file,
@@ -457,6 +485,8 @@ void diff_free_filespec_data(struct diff_filespec *s)
munmap(s->data, s->size);
s->should_free = s->should_munmap = 0;
s->data = NULL;
+ free(s->cnt_data);
+ s->cnt_data = NULL;
}
static void prep_temp_blob(struct diff_tempfile *temp,
@@ -559,6 +589,40 @@ static void remove_tempfile_on_signal(int signo)
raise(signo);
}
+static int spawn_prog(const char *pgm, const char **arg)
+{
+ pid_t pid;
+ int status;
+
+ fflush(NULL);
+ pid = fork();
+ if (pid < 0)
+ die("unable to fork");
+ if (!pid) {
+ execvp(pgm, (char *const*) arg);
+ exit(255);
+ }
+
+ while (waitpid(pid, &status, 0) < 0) {
+ if (errno == EINTR)
+ continue;
+ return -1;
+ }
+
+ /* Earlier we did not check the exit status because
+ * diff exits non-zero if files are different, and
+ * we are not interested in knowing that. It was a
+ * mistake which made it harder to quit a diff-*
+ * session that uses the git-apply-patch-script as
+ * the GIT_EXTERNAL_DIFF. A custom GIT_EXTERNAL_DIFF
+ * should also exit non-zero only when it wants to
+ * abort the entire diff-* session.
+ */
+ if (WIFEXITED(status) && !WEXITSTATUS(status))
+ return 0;
+ return -1;
+}
+
/* An external diff command takes:
*
* diff-cmd name infile1 infile1-sha1 infile1-mode \
@@ -573,11 +637,12 @@ static void run_external_diff(const char *pgm,
const char *xfrm_msg,
int complete_rewrite)
{
+ const char *spawn_arg[10];
struct diff_tempfile *temp = diff_temp;
- pid_t pid;
- int status;
+ int retval;
static int atexit_asked = 0;
const char *othername;
+ const char **arg = &spawn_arg[0];
othername = (other? other : name);
if (one && two) {
@@ -592,59 +657,50 @@ static void run_external_diff(const char *pgm,
signal(SIGINT, remove_tempfile_on_signal);
}
- fflush(NULL);
- pid = fork();
- if (pid < 0)
- die("unable to fork");
- if (!pid) {
- if (pgm) {
- if (one && two) {
- const char *exec_arg[10];
- const char **arg = &exec_arg[0];
- *arg++ = pgm;
- *arg++ = name;
- *arg++ = temp[0].name;
- *arg++ = temp[0].hex;
- *arg++ = temp[0].mode;
- *arg++ = temp[1].name;
- *arg++ = temp[1].hex;
- *arg++ = temp[1].mode;
- if (other) {
- *arg++ = other;
- *arg++ = xfrm_msg;
- }
- *arg = NULL;
- execvp(pgm, (char *const*) exec_arg);
- }
- else
- execlp(pgm, pgm, name, NULL);
+ if (one && two) {
+ *arg++ = pgm;
+ *arg++ = name;
+ *arg++ = temp[0].name;
+ *arg++ = temp[0].hex;
+ *arg++ = temp[0].mode;
+ *arg++ = temp[1].name;
+ *arg++ = temp[1].hex;
+ *arg++ = temp[1].mode;
+ if (other) {
+ *arg++ = other;
+ *arg++ = xfrm_msg;
}
- /*
- * otherwise we use the built-in one.
- */
- if (one && two)
- builtin_diff(name, othername, temp, xfrm_msg,
- complete_rewrite);
- else
- printf("* Unmerged path %s\n", name);
- exit(0);
+ } else {
+ *arg++ = pgm;
+ *arg++ = name;
}
- if (waitpid(pid, &status, 0) < 0 ||
- !WIFEXITED(status) || WEXITSTATUS(status)) {
- /* Earlier we did not check the exit status because
- * diff exits non-zero if files are different, and
- * we are not interested in knowing that. It was a
- * mistake which made it harder to quit a diff-*
- * session that uses the git-apply-patch-script as
- * the GIT_EXTERNAL_DIFF. A custom GIT_EXTERNAL_DIFF
- * should also exit non-zero only when it wants to
- * abort the entire diff-* session.
- */
- remove_tempfile();
+ *arg = NULL;
+ retval = spawn_prog(pgm, spawn_arg);
+ remove_tempfile();
+ if (retval) {
fprintf(stderr, "external diff died, stopping at %s.\n", name);
exit(1);
}
- remove_tempfile();
+}
+
+static void run_diff_cmd(const char *pgm,
+ const char *name,
+ const char *other,
+ struct diff_filespec *one,
+ struct diff_filespec *two,
+ const char *xfrm_msg,
+ int complete_rewrite)
+{
+ if (pgm) {
+ run_external_diff(pgm, name, other, one, two, xfrm_msg,
+ complete_rewrite);
+ return;
+ }
+ if (one && two)
+ builtin_diff(name, other ? other : name,
+ one, two, xfrm_msg, complete_rewrite);
+ else
+ printf("* Unmerged path %s\n", name);
}
static void diff_fill_sha1_info(struct diff_filespec *one)
@@ -676,8 +732,7 @@ static void run_diff(struct diff_filepair *p, struct diff_options *o)
if (DIFF_PAIR_UNMERGED(p)) {
/* unmerged */
- run_external_diff(pgm, p->one->path, NULL, NULL, NULL, NULL,
- 0);
+ run_diff_cmd(pgm, p->one->path, NULL, NULL, NULL, NULL, 0);
return;
}
@@ -749,15 +804,15 @@ static void run_diff(struct diff_filepair *p, struct diff_options *o)
* needs to be split into deletion and creation.
*/
struct diff_filespec *null = alloc_filespec(two->path);
- run_external_diff(NULL, name, other, one, null, xfrm_msg, 0);
+ run_diff_cmd(NULL, name, other, one, null, xfrm_msg, 0);
free(null);
null = alloc_filespec(one->path);
- run_external_diff(NULL, name, other, null, two, xfrm_msg, 0);
+ run_diff_cmd(NULL, name, other, null, two, xfrm_msg, 0);
free(null);
}
else
- run_external_diff(pgm, name, other, one, two, xfrm_msg,
- complete_rewrite);
+ run_diff_cmd(pgm, name, other, one, two, xfrm_msg,
+ complete_rewrite);
free(name_munged);
free(other_munged);
diff --git a/diff.h b/diff.h
index 8fac465..a268d16 100644
--- a/diff.h
+++ b/diff.h
@@ -4,17 +4,7 @@
#ifndef DIFF_H
#define DIFF_H
-#define DIFF_FILE_CANON_MODE(mode) \
- (S_ISREG(mode) ? (S_IFREG | ce_permissions(mode)) : \
- S_ISLNK(mode) ? S_IFLNK : S_IFDIR)
-
-struct tree_desc {
- void *buf;
- unsigned long size;
-};
-
-extern void update_tree_entry(struct tree_desc *);
-extern const unsigned char *tree_entry_extract(struct tree_desc *, const char **, unsigned int *);
+#include "tree-walk.h"
struct diff_options;
diff --git a/diffcore-break.c b/diffcore-break.c
index 95b5eb4..ed0e14c 100644
--- a/diffcore-break.c
+++ b/diffcore-break.c
@@ -4,8 +4,6 @@
#include "cache.h"
#include "diff.h"
#include "diffcore.h"
-#include "delta.h"
-#include "count-delta.h"
static int should_break(struct diff_filespec *src,
struct diff_filespec *dst,
@@ -47,9 +45,8 @@ static int should_break(struct diff_filespec *src,
* The value we return is 1 if we want the pair to be broken,
* or 0 if we do not.
*/
- void *delta;
- unsigned long delta_size, base_size, src_copied, literal_added;
- int to_break = 0;
+ unsigned long delta_size, base_size, src_copied, literal_added,
+ src_removed;
*merge_score_p = 0; /* assume no deletion --- "do not break"
* is the default.
@@ -69,47 +66,47 @@ static int should_break(struct diff_filespec *src,
if (base_size < MINIMUM_BREAK_SIZE)
return 0; /* we do not break too small filepair */
- delta = diff_delta(src->data, src->size,
- dst->data, dst->size,
- &delta_size, 0);
- if (!delta)
- return 0; /* error but caught downstream */
+ if (diffcore_count_changes(src->data, src->size,
+ dst->data, dst->size,
+ NULL, NULL,
+ 0,
+ &src_copied, &literal_added))
+ return 0;
- /* Estimate the edit size by interpreting delta. */
- if (count_delta(delta, delta_size,
- &src_copied, &literal_added)) {
- free(delta);
- return 0; /* we cannot tell */
+ /* sanity */
+ if (src->size < src_copied)
+ src_copied = src->size;
+ if (dst->size < literal_added + src_copied) {
+ if (src_copied < dst->size)
+ literal_added = dst->size - src_copied;
+ else
+ literal_added = 0;
}
- free(delta);
+ src_removed = src->size - src_copied;
/* Compute merge-score, which is "how much is removed
* from the source material". The clean-up stage will
* merge the surviving pair together if the score is
* less than the minimum, after rename/copy runs.
*/
- if (src->size <= src_copied)
- ; /* all copied, nothing removed */
- else {
- delta_size = src->size - src_copied;
- *merge_score_p = delta_size * MAX_SCORE / src->size;
- }
-
+ *merge_score_p = src_removed * MAX_SCORE / src->size;
+
/* Extent of damage, which counts both inserts and
* deletes.
*/
- if (src->size + literal_added <= src_copied)
- delta_size = 0; /* avoid wrapping around */
- else
- delta_size = (src->size - src_copied) + literal_added;
-
- /* We break if the edit exceeds the minimum.
- * i.e. (break_score / MAX_SCORE < delta_size / base_size)
+ delta_size = src_removed + literal_added;
+ if (delta_size * MAX_SCORE / base_size < break_score)
+ return 0;
+
+ /* If you removed a lot without adding new material, that is
+ * not really a rewrite.
*/
- if (break_score * base_size < delta_size * MAX_SCORE)
- to_break = 1;
+ if ((src->size * break_score < src_removed * MAX_SCORE) &&
+ (literal_added * 20 < src_removed) &&
+ (literal_added * 20 < src_copied))
+ return 0;
- return to_break;
+ return 1;
}
void diffcore_break(int break_score)
diff --git a/diffcore-delta.c b/diffcore-delta.c
new file mode 100644
index 0000000..7338a40
--- /dev/null
+++ b/diffcore-delta.c
@@ -0,0 +1,213 @@
+#include "cache.h"
+#include "diff.h"
+#include "diffcore.h"
+
+/*
+ * Idea here is very simple.
+ *
+ * We have total of (sz-N+1) N-byte overlapping sequences in buf whose
+ * size is sz. If the same N-byte sequence appears in both source and
+ * destination, we say the byte that starts that sequence is shared
+ * between them (i.e. copied from source to destination).
+ *
+ * For each possible N-byte sequence, if the source buffer has more
+ * instances of it than the destination buffer, that means the
+ * difference are the number of bytes not copied from source to
+ * destination. If the counts are the same, everything was copied
+ * from source to destination. If the destination has more,
+ * everything was copied, and destination added more.
+ *
+ * We are doing an approximation so we do not really have to waste
+ * memory by actually storing the sequence. We just hash them into
+ * somewhere around 2^16 hashbuckets and count the occurrences.
+ *
+ * The length of the sequence is arbitrarily set to 8 for now.
+ */
+
+/* Wild guess at the initial hash size */
+#define INITIAL_HASH_SIZE 9
+
+/* We leave more room in smaller hash but do not let it
+ * grow to have unused hole too much.
+ */
+#define INITIAL_FREE(sz_log2) ((1<<(sz_log2))*(sz_log2-3)/(sz_log2))
+
+/* A prime rather carefully chosen between 2^16..2^17, so that
+ * HASHBASE < INITIAL_FREE(17). We want to keep the maximum hashtable
+ * size under the current 2<<17 maximum, which can hold this many
+ * different values before overflowing to hashtable of size 2<<18.
+ */
+#define HASHBASE 107927
+
+struct spanhash {
+ unsigned int hashval;
+ unsigned int cnt;
+};
+struct spanhash_top {
+ int alloc_log2;
+ int free;
+ struct spanhash data[FLEX_ARRAY];
+};
+
+static struct spanhash *spanhash_find(struct spanhash_top *top,
+ unsigned int hashval)
+{
+ int sz = 1 << top->alloc_log2;
+ int bucket = hashval & (sz - 1);
+ while (1) {
+ struct spanhash *h = &(top->data[bucket++]);
+ if (!h->cnt)
+ return NULL;
+ if (h->hashval == hashval)
+ return h;
+ if (sz <= bucket)
+ bucket = 0;
+ }
+}
+
+static struct spanhash_top *spanhash_rehash(struct spanhash_top *orig)
+{
+ struct spanhash_top *new;
+ int i;
+ int osz = 1 << orig->alloc_log2;
+ int sz = osz << 1;
+
+ new = xmalloc(sizeof(*orig) + sizeof(struct spanhash) * sz);
+ new->alloc_log2 = orig->alloc_log2 + 1;
+ new->free = INITIAL_FREE(new->alloc_log2);
+ memset(new->data, 0, sizeof(struct spanhash) * sz);
+ for (i = 0; i < osz; i++) {
+ struct spanhash *o = &(orig->data[i]);
+ int bucket;
+ if (!o->cnt)
+ continue;
+ bucket = o->hashval & (sz - 1);
+ while (1) {
+ struct spanhash *h = &(new->data[bucket++]);
+ if (!h->cnt) {
+ h->hashval = o->hashval;
+ h->cnt = o->cnt;
+ new->free--;
+ break;
+ }
+ if (sz <= bucket)
+ bucket = 0;
+ }
+ }
+ free(orig);
+ return new;
+}
+
+static struct spanhash_top *add_spanhash(struct spanhash_top *top,
+ unsigned int hashval, int cnt)
+{
+ int bucket, lim;
+ struct spanhash *h;
+
+ lim = (1 << top->alloc_log2);
+ bucket = hashval & (lim - 1);
+ while (1) {
+ h = &(top->data[bucket++]);
+ if (!h->cnt) {
+ h->hashval = hashval;
+ h->cnt = cnt;
+ top->free--;
+ if (top->free < 0)
+ return spanhash_rehash(top);
+ return top;
+ }
+ if (h->hashval == hashval) {
+ h->cnt += cnt;
+ return top;
+ }
+ if (lim <= bucket)
+ bucket = 0;
+ }
+}
+
+static struct spanhash_top *hash_chars(unsigned char *buf, unsigned int sz)
+{
+ int i, n;
+ unsigned int accum1, accum2, hashval;
+ struct spanhash_top *hash;
+
+ i = INITIAL_HASH_SIZE;
+ hash = xmalloc(sizeof(*hash) + sizeof(struct spanhash) * (1<<i));
+ hash->alloc_log2 = i;
+ hash->free = INITIAL_FREE(i);
+ memset(hash->data, 0, sizeof(struct spanhash) * (1<<i));
+
+ n = 0;
+ accum1 = accum2 = 0;
+ while (sz) {
+ unsigned int c = *buf++;
+ unsigned int old_1 = accum1;
+ sz--;
+ accum1 = (accum1 << 7) ^ (accum2 >> 25);
+ accum2 = (accum2 << 7) ^ (old_1 >> 25);
+ accum1 += c;
+ if (++n < 64 && c != '\n')
+ continue;
+ hashval = (accum1 + accum2 * 0x61) % HASHBASE;
+ hash = add_spanhash(hash, hashval, n);
+ n = 0;
+ accum1 = accum2 = 0;
+ }
+ return hash;
+}
+
+int diffcore_count_changes(void *src, unsigned long src_size,
+ void *dst, unsigned long dst_size,
+ void **src_count_p,
+ void **dst_count_p,
+ unsigned long delta_limit,
+ unsigned long *src_copied,
+ unsigned long *literal_added)
+{
+ int i, ssz;
+ struct spanhash_top *src_count, *dst_count;
+ unsigned long sc, la;
+
+ src_count = dst_count = NULL;
+ if (src_count_p)
+ src_count = *src_count_p;
+ if (!src_count) {
+ src_count = hash_chars(src, src_size);
+ if (src_count_p)
+ *src_count_p = src_count;
+ }
+ if (dst_count_p)
+ dst_count = *dst_count_p;
+ if (!dst_count) {
+ dst_count = hash_chars(dst, dst_size);
+ if (dst_count_p)
+ *dst_count_p = dst_count;
+ }
+ sc = la = 0;
+
+ ssz = 1 << src_count->alloc_log2;
+ for (i = 0; i < ssz; i++) {
+ struct spanhash *s = &(src_count->data[i]);
+ struct spanhash *d;
+ unsigned dst_cnt, src_cnt;
+ if (!s->cnt)
+ continue;
+ src_cnt = s->cnt;
+ d = spanhash_find(dst_count, s->hashval);
+ dst_cnt = d ? d->cnt : 0;
+ if (src_cnt < dst_cnt) {
+ la += dst_cnt - src_cnt;
+ sc += src_cnt;
+ }
+ else
+ sc += dst_cnt;
+ }
+
+ if (!src_count_p)
+ free(src_count);
+ if (!dst_count_p)
+ free(dst_count);
+ *src_copied = sc;
+ *literal_added = la;
+ return 0;
+}
diff --git a/diffcore-rename.c b/diffcore-rename.c
index ffd126a..e992698 100644
--- a/diffcore-rename.c
+++ b/diffcore-rename.c
@@ -4,8 +4,6 @@
#include "cache.h"
#include "diff.h"
#include "diffcore.h"
-#include "delta.h"
-#include "count-delta.h"
/* Table of rename/copy destinations */
@@ -135,8 +133,7 @@ static int estimate_similarity(struct diff_filespec *src,
* match than anything else; the destination does not even
* call into this function in that case.
*/
- void *delta;
- unsigned long delta_size, base_size, src_copied, literal_added;
+ unsigned long max_size, delta_size, base_size, src_copied, literal_added;
unsigned long delta_limit;
int score;
@@ -147,9 +144,9 @@ static int estimate_similarity(struct diff_filespec *src,
if (!S_ISREG(src->mode) || !S_ISREG(dst->mode))
return 0;
- delta_size = ((src->size < dst->size) ?
- (dst->size - src->size) : (src->size - dst->size));
+ max_size = ((src->size > dst->size) ? src->size : dst->size);
base_size = ((src->size < dst->size) ? src->size : dst->size);
+ delta_size = max_size - base_size;
/* We would not consider edits that change the file size so
* drastically. delta_size must be smaller than
@@ -165,42 +162,22 @@ static int estimate_similarity(struct diff_filespec *src,
if (diff_populate_filespec(src, 0) || diff_populate_filespec(dst, 0))
return 0; /* error but caught downstream */
+
delta_limit = base_size * (MAX_SCORE-minimum_score) / MAX_SCORE;
- delta = diff_delta(src->data, src->size,
- dst->data, dst->size,
- &delta_size, delta_limit);
- if (!delta)
- /* If delta_limit is exceeded, we have too much differences */
+ if (diffcore_count_changes(src->data, src->size,
+ dst->data, dst->size,
+ &src->cnt_data, &dst->cnt_data,
+ delta_limit,
+ &src_copied, &literal_added))
return 0;
- /* A delta that has a lot of literal additions would have
- * big delta_size no matter what else it does.
+ /* How similar are they?
+ * what percentage of material in dst are from source?
*/
- if (base_size * (MAX_SCORE-minimum_score) < delta_size * MAX_SCORE) {
- free(delta);
- return 0;
- }
-
- /* Estimate the edit size by interpreting delta. */
- if (count_delta(delta, delta_size, &src_copied, &literal_added)) {
- free(delta);
- return 0;
- }
- free(delta);
-
- /* Extent of damage */
- if (src->size + literal_added < src_copied)
- delta_size = 0;
+ if (!dst->size)
+ score = 0; /* should not happen */
else
- delta_size = (src->size - src_copied) + literal_added;
-
- /*
- * Now we will give some score to it. 100% edit gets 0 points
- * and 0% edit gets MAX_SCORE points.
- */
- score = MAX_SCORE - (MAX_SCORE * delta_size / base_size);
- if (score < 0) return 0;
- if (MAX_SCORE < score) return MAX_SCORE;
+ score = src_copied * MAX_SCORE / max_size;
return score;
}
@@ -328,6 +305,8 @@ void diffcore_rename(struct diff_options *options)
m->score = estimate_similarity(one, two,
minimum_score);
}
+ /* We do not need the text anymore */
+ diff_free_filespec_data(two);
dst_cnt++;
}
/* cost matrix sorted by most to least similar pair */
diff --git a/diffcore.h b/diffcore.h
index 12cd816..73c7842 100644
--- a/diffcore.h
+++ b/diffcore.h
@@ -17,8 +17,8 @@
*/
#define MAX_SCORE 60000.0
#define DEFAULT_RENAME_SCORE 30000 /* rename/copy similarity minimum (50%) */
-#define DEFAULT_BREAK_SCORE 30000 /* minimum for break to happen (50%)*/
-#define DEFAULT_MERGE_SCORE 48000 /* maximum for break-merge to happen (80%)*/
+#define DEFAULT_BREAK_SCORE 30000 /* minimum for break to happen (50%) */
+#define DEFAULT_MERGE_SCORE 36000 /* maximum for break-merge to happen 60%) */
#define MINIMUM_BREAK_SIZE 400 /* do not break a file smaller than this */
@@ -26,6 +26,7 @@ struct diff_filespec {
unsigned char sha1[20];
char *path;
void *data;
+ void *cnt_data;
unsigned long size;
int xfrm_flags; /* for use by the xfrm */
unsigned short mode; /* file mode */
@@ -101,4 +102,12 @@ void diff_debug_queue(const char *, struct diff_queue_struct *);
#define diff_debug_queue(a,b) do {} while(0)
#endif
+extern int diffcore_count_changes(void *src, unsigned long src_size,
+ void *dst, unsigned long dst_size,
+ void **src_count_p,
+ void **dst_count_p,
+ unsigned long delta_limit,
+ unsigned long *src_copied,
+ unsigned long *literal_added);
+
#endif
diff --git a/entry.c b/entry.c
index 6c47c3a..793724f 100644
--- a/entry.c
+++ b/entry.c
@@ -1,6 +1,7 @@
#include <sys/types.h>
#include <dirent.h>
#include "cache.h"
+#include "blob.h"
static void create_directories(const char *path, struct checkout *state)
{
@@ -63,7 +64,7 @@ static int create_file(const char *path, unsigned int mode)
return open(path, O_WRONLY | O_CREAT | O_EXCL, mode);
}
-static int write_entry(struct cache_entry *ce, const char *path, struct checkout *state)
+static int write_entry(struct cache_entry *ce, char *path, struct checkout *state, int to_tempfile)
{
int fd;
void *new;
@@ -72,7 +73,7 @@ static int write_entry(struct cache_entry *ce, const char *path, struct checkout
char type[20];
new = read_sha1_file(ce->sha1, type, &size);
- if (!new || strcmp(type, "blob")) {
+ if (!new || strcmp(type, blob_type)) {
if (new)
free(new);
return error("git-checkout-index: unable to read sha1 file of %s (%s)",
@@ -80,7 +81,11 @@ static int write_entry(struct cache_entry *ce, const char *path, struct checkout
}
switch (ntohl(ce->ce_mode) & S_IFMT) {
case S_IFREG:
- fd = create_file(path, ntohl(ce->ce_mode));
+ if (to_tempfile) {
+ strcpy(path, ".merge_file_XXXXXX");
+ fd = mkstemp(path);
+ } else
+ fd = create_file(path, ntohl(ce->ce_mode));
if (fd < 0) {
free(new);
return error("git-checkout-index: unable to create file %s (%s)",
@@ -93,12 +98,27 @@ static int write_entry(struct cache_entry *ce, const char *path, struct checkout
return error("git-checkout-index: unable to write file %s", path);
break;
case S_IFLNK:
- if (symlink(new, path)) {
+ if (to_tempfile) {
+ strcpy(path, ".merge_link_XXXXXX");
+ fd = mkstemp(path);
+ if (fd < 0) {
+ free(new);
+ return error("git-checkout-index: unable to create "
+ "file %s (%s)", path, strerror(errno));
+ }
+ wrote = write(fd, new, size);
+ close(fd);
+ free(new);
+ if (wrote != size)
+ return error("git-checkout-index: unable to write file %s",
+ path);
+ } else {
+ wrote = symlink(new, path);
free(new);
- return error("git-checkout-index: unable to create "
- "symlink %s (%s)", path, strerror(errno));
+ if (wrote)
+ return error("git-checkout-index: unable to create "
+ "symlink %s (%s)", path, strerror(errno));
}
- free(new);
break;
default:
free(new);
@@ -113,17 +133,20 @@ static int write_entry(struct cache_entry *ce, const char *path, struct checkout
return 0;
}
-int checkout_entry(struct cache_entry *ce, struct checkout *state)
+int checkout_entry(struct cache_entry *ce, struct checkout *state, char *topath)
{
- struct stat st;
static char path[MAXPATHLEN+1];
+ struct stat st;
int len = state->base_dir_len;
+ if (topath)
+ return write_entry(ce, topath, state, 1);
+
memcpy(path, state->base_dir, len);
strcpy(path + len, ce->name);
if (!lstat(path, &st)) {
- unsigned changed = ce_match_stat(ce, &st);
+ unsigned changed = ce_match_stat(ce, &st, 1);
if (!changed)
return 0;
if (!state->force) {
@@ -144,10 +167,10 @@ int checkout_entry(struct cache_entry *ce, struct checkout *state)
return error("%s is a directory", path);
remove_subtree(path);
}
- } else if (state->not_new)
+ } else if (state->not_new)
return 0;
create_directories(path, state);
- return write_entry(ce, path, state);
+ return write_entry(ce, path, state, 0);
}
diff --git a/environment.c b/environment.c
index 73e4b1c..6df6478 100644
--- a/environment.c
+++ b/environment.c
@@ -12,7 +12,9 @@
char git_default_email[MAX_GITNAME];
char git_default_name[MAX_GITNAME];
int trust_executable_bit = 1;
+int assume_unchanged = 0;
int only_use_symrefs = 0;
+int warn_ambiguous_refs = 1;
int repository_format_version = 0;
char git_commit_encoding[MAX_ENCODING_LENGTH] = "utf-8";
int shared_repository = 0;
diff --git a/epoch.c b/epoch.c
deleted file mode 100644
index 3a76748..0000000
--- a/epoch.c
+++ /dev/null
@@ -1,639 +0,0 @@
-/*
- * Copyright (c) 2005, Jon Seymour
- *
- * For more information about epoch theory on which this module is based,
- * refer to http://blackcubes.dyndns.org/epoch/. That web page defines
- * terms such as "epoch" and "minimal, non-linear epoch" and provides rationales
- * for some of the algorithms used here.
- *
- */
-#include <stdlib.h>
-
-/* Provides arbitrary precision integers required to accurately represent
- * fractional mass: */
-#include <openssl/bn.h>
-
-#include "cache.h"
-#include "commit.h"
-#include "epoch.h"
-
-struct fraction {
- BIGNUM numerator;
- BIGNUM denominator;
-};
-
-#define HAS_EXACTLY_ONE_PARENT(n) ((n)->parents && !(n)->parents->next)
-
-static BN_CTX *context = NULL;
-static struct fraction *one = NULL;
-static struct fraction *zero = NULL;
-
-static BN_CTX *get_BN_CTX(void)
-{
- if (!context) {
- context = BN_CTX_new();
- }
- return context;
-}
-
-static struct fraction *new_zero(void)
-{
- struct fraction *result = xmalloc(sizeof(*result));
- BN_init(&result->numerator);
- BN_init(&result->denominator);
- BN_zero(&result->numerator);
- BN_one(&result->denominator);
- return result;
-}
-
-static void clear_fraction(struct fraction *fraction)
-{
- BN_clear(&fraction->numerator);
- BN_clear(&fraction->denominator);
-}
-
-static struct fraction *divide(struct fraction *result, struct fraction *fraction, int divisor)
-{
- BIGNUM bn_divisor;
-
- BN_init(&bn_divisor);
- BN_set_word(&bn_divisor, divisor);
-
- BN_copy(&result->numerator, &fraction->numerator);
- BN_mul(&result->denominator, &fraction->denominator, &bn_divisor, get_BN_CTX());
-
- BN_clear(&bn_divisor);
- return result;
-}
-
-static struct fraction *init_fraction(struct fraction *fraction)
-{
- BN_init(&fraction->numerator);
- BN_init(&fraction->denominator);
- BN_zero(&fraction->numerator);
- BN_one(&fraction->denominator);
- return fraction;
-}
-
-static struct fraction *get_one(void)
-{
- if (!one) {
- one = new_zero();
- BN_one(&one->numerator);
- }
- return one;
-}
-
-static struct fraction *get_zero(void)
-{
- if (!zero) {
- zero = new_zero();
- }
- return zero;
-}
-
-static struct fraction *copy(struct fraction *to, struct fraction *from)
-{
- BN_copy(&to->numerator, &from->numerator);
- BN_copy(&to->denominator, &from->denominator);
- return to;
-}
-
-static struct fraction *add(struct fraction *result, struct fraction *left, struct fraction *right)
-{
- BIGNUM a, b, gcd;
-
- BN_init(&a);
- BN_init(&b);
- BN_init(&gcd);
-
- BN_mul(&a, &left->numerator, &right->denominator, get_BN_CTX());
- BN_mul(&b, &left->denominator, &right->numerator, get_BN_CTX());
- BN_mul(&result->denominator, &left->denominator, &right->denominator, get_BN_CTX());
- BN_add(&result->numerator, &a, &b);
-
- BN_gcd(&gcd, &result->denominator, &result->numerator, get_BN_CTX());
- BN_div(&result->denominator, NULL, &result->denominator, &gcd, get_BN_CTX());
- BN_div(&result->numerator, NULL, &result->numerator, &gcd, get_BN_CTX());
-
- BN_clear(&a);
- BN_clear(&b);
- BN_clear(&gcd);
-
- return result;
-}
-
-static int compare(struct fraction *left, struct fraction *right)
-{
- BIGNUM a, b;
- int result;
-
- BN_init(&a);
- BN_init(&b);
-
- BN_mul(&a, &left->numerator, &right->denominator, get_BN_CTX());
- BN_mul(&b, &left->denominator, &right->numerator, get_BN_CTX());
-
- result = BN_cmp(&a, &b);
-
- BN_clear(&a);
- BN_clear(&b);
-
- return result;
-}
-
-struct mass_counter {
- struct fraction seen;
- struct fraction pending;
-};
-
-static struct mass_counter *new_mass_counter(struct commit *commit, struct fraction *pending)
-{
- struct mass_counter *mass_counter = xmalloc(sizeof(*mass_counter));
- memset(mass_counter, 0, sizeof(*mass_counter));
-
- init_fraction(&mass_counter->seen);
- init_fraction(&mass_counter->pending);
-
- copy(&mass_counter->pending, pending);
- copy(&mass_counter->seen, get_zero());
-
- if (commit->object.util) {
- die("multiple attempts to initialize mass counter for %s",
- sha1_to_hex(commit->object.sha1));
- }
-
- commit->object.util = mass_counter;
-
- return mass_counter;
-}
-
-static void free_mass_counter(struct mass_counter *counter)
-{
- clear_fraction(&counter->seen);
- clear_fraction(&counter->pending);
- free(counter);
-}
-
-/*
- * Finds the base commit of a list of commits.
- *
- * One property of the commit being searched for is that every commit reachable
- * from the base commit is reachable from the commits in the starting list only
- * via paths that include the base commit.
- *
- * This algorithm uses a conservation of mass approach to find the base commit.
- *
- * We start by injecting one unit of mass into the graph at each
- * of the commits in the starting list. Injecting mass into a commit
- * is achieved by adding to its pending mass counter and, if it is not already
- * enqueued, enqueuing the commit in a list of pending commits, in latest
- * commit date first order.
- *
- * The algorithm then proceeds to visit each commit in the pending queue.
- * Upon each visit, the pending mass is added to the mass already seen for that
- * commit and then divided into N equal portions, where N is the number of
- * parents of the commit being visited. The divided portions are then injected
- * into each of the parents.
- *
- * The algorithm continues until we discover a commit which has seen all the
- * mass originally injected or until we run out of things to do.
- *
- * If we find a commit that has seen all the original mass, we have found
- * the common base of all the commits in the starting list.
- *
- * The algorithm does _not_ depend on accurate timestamps for correct operation.
- * However, reasonably sane (e.g. non-random) timestamps are required in order
- * to prevent an exponential performance characteristic. The occasional
- * timestamp inaccuracy will not dramatically affect performance but may
- * result in more nodes being processed than strictly necessary.
- *
- * This procedure sets *boundary to the address of the base commit. It returns
- * non-zero if, and only if, there was a problem parsing one of the
- * commits discovered during the traversal.
- */
-static int find_base_for_list(struct commit_list *list, struct commit **boundary)
-{
- int ret = 0;
- struct commit_list *cleaner = NULL;
- struct commit_list *pending = NULL;
- struct fraction injected;
- init_fraction(&injected);
- *boundary = NULL;
-
- for (; list; list = list->next) {
- struct commit *item = list->item;
-
- if (!item->object.util) {
- new_mass_counter(list->item, get_one());
- add(&injected, &injected, get_one());
-
- commit_list_insert(list->item, &cleaner);
- commit_list_insert(list->item, &pending);
- }
- }
-
- while (!*boundary && pending && !ret) {
- struct commit *latest = pop_commit(&pending);
- struct mass_counter *latest_node = (struct mass_counter *) latest->object.util;
- int num_parents;
-
- if ((ret = parse_commit(latest)))
- continue;
- add(&latest_node->seen, &latest_node->seen, &latest_node->pending);
-
- num_parents = count_parents(latest);
- if (num_parents) {
- struct fraction distribution;
- struct commit_list *parents;
-
- divide(init_fraction(&distribution), &latest_node->pending, num_parents);
-
- for (parents = latest->parents; parents; parents = parents->next) {
- struct commit *parent = parents->item;
- struct mass_counter *parent_node = (struct mass_counter *) parent->object.util;
-
- if (!parent_node) {
- parent_node = new_mass_counter(parent, &distribution);
- insert_by_date(parent, &pending);
- commit_list_insert(parent, &cleaner);
- } else {
- if (!compare(&parent_node->pending, get_zero()))
- insert_by_date(parent, &pending);
- add(&parent_node->pending, &parent_node->pending, &distribution);
- }
- }
-
- clear_fraction(&distribution);
- }
-
- if (!compare(&latest_node->seen, &injected))
- *boundary = latest;
- copy(&latest_node->pending, get_zero());
- }
-
- while (cleaner) {
- struct commit *next = pop_commit(&cleaner);
- free_mass_counter((struct mass_counter *) next->object.util);
- next->object.util = NULL;
- }
-
- if (pending)
- free_commit_list(pending);
-
- clear_fraction(&injected);
- return ret;
-}
-
-
-/*
- * Finds the base of an minimal, non-linear epoch, headed at head, by
- * applying the find_base_for_list to a list consisting of the parents
- */
-static int find_base(struct commit *head, struct commit **boundary)
-{
- int ret = 0;
- struct commit_list *pending = NULL;
- struct commit_list *next;
-
- for (next = head->parents; next; next = next->next) {
- commit_list_insert(next->item, &pending);
- }
- ret = find_base_for_list(pending, boundary);
- free_commit_list(pending);
-
- return ret;
-}
-
-/*
- * This procedure traverses to the boundary of the first epoch in the epoch
- * sequence of the epoch headed at head_of_epoch. This is either the end of
- * the maximal linear epoch or the base of a minimal non-linear epoch.
- *
- * The queue of pending nodes is sorted in reverse date order and each node
- * is currently in the queue at most once.
- */
-static int find_next_epoch_boundary(struct commit *head_of_epoch, struct commit **boundary)
-{
- int ret;
- struct commit *item = head_of_epoch;
-
- ret = parse_commit(item);
- if (ret)
- return ret;
-
- if (HAS_EXACTLY_ONE_PARENT(item)) {
- /*
- * We are at the start of a maximimal linear epoch.
- * Traverse to the end.
- */
- while (HAS_EXACTLY_ONE_PARENT(item) && !ret) {
- item = item->parents->item;
- ret = parse_commit(item);
- }
- *boundary = item;
-
- } else {
- /*
- * Otherwise, we are at the start of a minimal, non-linear
- * epoch - find the common base of all parents.
- */
- ret = find_base(item, boundary);
- }
-
- return ret;
-}
-
-/*
- * Returns non-zero if parent is known to be a parent of child.
- */
-static int is_parent_of(struct commit *parent, struct commit *child)
-{
- struct commit_list *parents;
- for (parents = child->parents; parents; parents = parents->next) {
- if (!memcmp(parent->object.sha1, parents->item->object.sha1,
- sizeof(parents->item->object.sha1)))
- return 1;
- }
- return 0;
-}
-
-/*
- * Pushes an item onto the merge order stack. If the top of the stack is
- * marked as being a possible "break", we check to see whether it actually
- * is a break.
- */
-static void push_onto_merge_order_stack(struct commit_list **stack, struct commit *item)
-{
- struct commit_list *top = *stack;
- if (top && (top->item->object.flags & DISCONTINUITY)) {
- if (is_parent_of(top->item, item)) {
- top->item->object.flags &= ~DISCONTINUITY;
- }
- }
- commit_list_insert(item, stack);
-}
-
-/*
- * Marks all interesting, visited commits reachable from this commit
- * as uninteresting. We stop recursing when we reach the epoch boundary,
- * an unvisited node or a node that has already been marking uninteresting.
- *
- * This doesn't actually mark all ancestors between the start node and the
- * epoch boundary uninteresting, but does ensure that they will eventually
- * be marked uninteresting when the main sort_first_epoch() traversal
- * eventually reaches them.
- */
-static void mark_ancestors_uninteresting(struct commit *commit)
-{
- unsigned int flags = commit->object.flags;
- int visited = flags & VISITED;
- int boundary = flags & BOUNDARY;
- int uninteresting = flags & UNINTERESTING;
- struct commit_list *next;
-
- commit->object.flags |= UNINTERESTING;
-
- /*
- * We only need to recurse if
- * we are not on the boundary and
- * we have not already been marked uninteresting and
- * we have already been visited.
- *
- * The main sort_first_epoch traverse will mark unreachable
- * all uninteresting, unvisited parents as they are visited
- * so there is no need to duplicate that traversal here.
- *
- * Similarly, if we are already marked uninteresting
- * then either all ancestors have already been marked
- * uninteresting or will be once the sort_first_epoch
- * traverse reaches them.
- */
-
- if (uninteresting || boundary || !visited)
- return;
-
- for (next = commit->parents; next; next = next->next)
- mark_ancestors_uninteresting(next->item);
-}
-
-/*
- * Sorts the nodes of the first epoch of the epoch sequence of the epoch headed at head
- * into merge order.
- */
-static void sort_first_epoch(struct commit *head, struct commit_list **stack)
-{
- struct commit_list *parents;
-
- head->object.flags |= VISITED;
-
- /*
- * TODO: By sorting the parents in a different order, we can alter the
- * merge order to show contemporaneous changes in parallel branches
- * occurring after "local" changes. This is useful for a developer
- * when a developer wants to see all changes that were incorporated
- * into the same merge as her own changes occur after her own
- * changes.
- */
-
- for (parents = head->parents; parents; parents = parents->next) {
- struct commit *parent = parents->item;
-
- if (head->object.flags & UNINTERESTING) {
- /*
- * Propagates the uninteresting bit to all parents.
- * if we have already visited this parent, then
- * the uninteresting bit will be propagated to each
- * reachable commit that is still not marked
- * uninteresting and won't otherwise be reached.
- */
- mark_ancestors_uninteresting(parent);
- }
-
- if (!(parent->object.flags & VISITED)) {
- if (parent->object.flags & BOUNDARY) {
- if (*stack) {
- die("something else is on the stack - %s",
- sha1_to_hex((*stack)->item->object.sha1));
- }
- push_onto_merge_order_stack(stack, parent);
- parent->object.flags |= VISITED;
-
- } else {
- sort_first_epoch(parent, stack);
- if (parents) {
- /*
- * This indicates a possible
- * discontinuity it may not be be
- * actual discontinuity if the head
- * of parent N happens to be the tail
- * of parent N+1.
- *
- * The next push onto the stack will
- * resolve the question.
- */
- (*stack)->item->object.flags |= DISCONTINUITY;
- }
- }
- }
- }
-
- push_onto_merge_order_stack(stack, head);
-}
-
-/*
- * Emit the contents of the stack.
- *
- * The stack is freed and replaced by NULL.
- *
- * Sets the return value to STOP if no further output should be generated.
- */
-static int emit_stack(struct commit_list **stack, emitter_func emitter, int include_last)
-{
- unsigned int seen = 0;
- int action = CONTINUE;
-
- while (*stack && (action != STOP)) {
- struct commit *next = pop_commit(stack);
- seen |= next->object.flags;
- if (*stack || include_last) {
- if (!*stack)
- next->object.flags |= BOUNDARY;
- action = emitter(next);
- }
- }
-
- if (*stack) {
- free_commit_list(*stack);
- *stack = NULL;
- }
-
- return (action == STOP || (seen & UNINTERESTING)) ? STOP : CONTINUE;
-}
-
-/*
- * Sorts an arbitrary epoch into merge order by sorting each epoch
- * of its epoch sequence into order.
- *
- * Note: this algorithm currently leaves traces of its execution in the
- * object flags of nodes it discovers. This should probably be fixed.
- */
-static int sort_in_merge_order(struct commit *head_of_epoch, emitter_func emitter)
-{
- struct commit *next = head_of_epoch;
- int ret = 0;
- int action = CONTINUE;
-
- ret = parse_commit(head_of_epoch);
-
- next->object.flags |= BOUNDARY;
-
- while (next && next->parents && !ret && (action != STOP)) {
- struct commit *base = NULL;
-
- ret = find_next_epoch_boundary(next, &base);
- if (ret)
- return ret;
- next->object.flags |= BOUNDARY;
- if (base)
- base->object.flags |= BOUNDARY;
-
- if (HAS_EXACTLY_ONE_PARENT(next)) {
- while (HAS_EXACTLY_ONE_PARENT(next)
- && (action != STOP)
- && !ret) {
- if (next->object.flags & UNINTERESTING) {
- action = STOP;
- } else {
- action = emitter(next);
- }
- if (action != STOP) {
- next = next->parents->item;
- ret = parse_commit(next);
- }
- }
-
- } else {
- struct commit_list *stack = NULL;
- sort_first_epoch(next, &stack);
- action = emit_stack(&stack, emitter, (base == NULL));
- next = base;
- }
- }
-
- if (next && (action != STOP) && !ret) {
- emitter(next);
- }
-
- return ret;
-}
-
-/*
- * Sorts the nodes reachable from a starting list in merge order, we
- * first find the base for the starting list and then sort all nodes
- * in this subgraph using the sort_first_epoch algorithm. Once we have
- * reached the base we can continue sorting using sort_in_merge_order.
- */
-int sort_list_in_merge_order(struct commit_list *list, emitter_func emitter)
-{
- struct commit_list *stack = NULL;
- struct commit *base;
- int ret = 0;
- int action = CONTINUE;
- struct commit_list *reversed = NULL;
-
- for (; list; list = list->next)
- commit_list_insert(list->item, &reversed);
-
- if (!reversed)
- return ret;
- else if (!reversed->next) {
- /*
- * If there is only one element in the list, we can sort it
- * using sort_in_merge_order.
- */
- base = reversed->item;
- } else {
- /*
- * Otherwise, we search for the base of the list.
- */
- ret = find_base_for_list(reversed, &base);
- if (ret)
- return ret;
- if (base)
- base->object.flags |= BOUNDARY;
-
- while (reversed) {
- struct commit * next = pop_commit(&reversed);
-
- if (!(next->object.flags & VISITED) && next!=base) {
- sort_first_epoch(next, &stack);
- if (reversed) {
- /*
- * If we have more commits
- * to push, then the first
- * push for the next parent may
- * (or may * not) represent a
- * discontinuity with respect
- * to the parent currently on
- * the top of the stack.
- *
- * Mark it for checking here,
- * and check it with the next
- * push. See sort_first_epoch()
- * for more details.
- */
- stack->item->object.flags |= DISCONTINUITY;
- }
- }
- }
-
- action = emit_stack(&stack, emitter, (base==NULL));
- }
-
- if (base && (action != STOP)) {
- ret = sort_in_merge_order(base, emitter);
- }
-
- return ret;
-}
diff --git a/epoch.h b/epoch.h
deleted file mode 100644
index 7493d5a..0000000
--- a/epoch.h
+++ /dev/null
@@ -1,21 +0,0 @@
-#ifndef EPOCH_H
-#define EPOCH_H
-
-
-// return codes for emitter_func
-#define STOP 0
-#define CONTINUE 1
-#define DO 2
-typedef int (*emitter_func) (struct commit *);
-
-int sort_list_in_merge_order(struct commit_list *list, emitter_func emitter);
-
-/* Low bits are used by rev-list */
-#define UNINTERESTING (1u<<10)
-#define BOUNDARY (1u<<11)
-#define VISITED (1u<<12)
-#define DISCONTINUITY (1u<<13)
-#define LAST_EPOCH_FLAG (1u<<14)
-
-
-#endif /* EPOCH_H */
diff --git a/exec_cmd.c b/exec_cmd.c
index 55af33b..590e738 100644
--- a/exec_cmd.c
+++ b/exec_cmd.c
@@ -13,7 +13,7 @@ void git_set_exec_path(const char *exec_path)
/* Returns the highest-priority, location to look for git programs. */
-const char *git_exec_path()
+const char *git_exec_path(void)
{
const char *env;
@@ -29,17 +29,18 @@ const char *git_exec_path()
}
-int execv_git_cmd(char **argv)
+int execv_git_cmd(const char **argv)
{
char git_command[PATH_MAX + 1];
- char *tmp;
int len, err, i;
const char *paths[] = { current_exec_path,
getenv("GIT_EXEC_PATH"),
builtin_exec_path };
- for (i = 0; i < sizeof(paths)/sizeof(paths[0]); ++i) {
+ for (i = 0; i < ARRAY_SIZE(paths); ++i) {
const char *exec_dir = paths[i];
+ const char *tmp;
+
if (!exec_dir) continue;
if (*exec_dir != '/') {
@@ -82,7 +83,7 @@ int execv_git_cmd(char **argv)
argv[0] = git_command;
/* execve() can only ever return if it fails */
- execve(git_command, argv, environ);
+ execve(git_command, (char **)argv, environ);
err = errno;
@@ -93,11 +94,11 @@ int execv_git_cmd(char **argv)
}
-int execl_git_cmd(char *cmd,...)
+int execl_git_cmd(const char *cmd,...)
{
int argc;
- char *argv[MAX_ARGS + 1];
- char *arg;
+ const char *argv[MAX_ARGS + 1];
+ const char *arg;
va_list param;
va_start(param, cmd);
diff --git a/exec_cmd.h b/exec_cmd.h
index 5150ee2..989621f 100644
--- a/exec_cmd.h
+++ b/exec_cmd.h
@@ -3,8 +3,8 @@
extern void git_set_exec_path(const char *exec_path);
extern const char* git_exec_path(void);
-extern int execv_git_cmd(char **argv); /* NULL terminated */
-extern int execl_git_cmd(char *cmd, ...);
+extern int execv_git_cmd(const char **argv); /* NULL terminated */
+extern int execl_git_cmd(const char *cmd, ...);
#endif /* __GIT_EXEC_CMD_H_ */
diff --git a/fetch-pack.c b/fetch-pack.c
index aa6f42a..a3bcad0 100644
--- a/fetch-pack.c
+++ b/fetch-pack.c
@@ -7,8 +7,9 @@
static int keep_pack;
static int quiet;
static int verbose;
+static int fetch_all;
static const char fetch_pack_usage[] =
-"git-fetch-pack [-q] [-v] [-k] [--exec=upload-pack] [host:]directory <refs>...";
+"git-fetch-pack [--all] [-q] [-v] [-k] [--thin] [--exec=upload-pack] [host:]directory <refs>...";
static const char *exec = "git-upload-pack";
#define COMPLETE (1U << 0)
@@ -18,7 +19,7 @@ static const char *exec = "git-upload-pack";
#define POPPED (1U << 4)
static struct commit_list *rev_list = NULL;
-static int non_common_revs = 0, multi_ack = 0;
+static int non_common_revs = 0, multi_ack = 0, use_thin_pack = 0;
static void rev_list_push(struct commit *commit, int mark)
{
@@ -82,7 +83,7 @@ static void mark_common(struct commit *commit,
Get the next rev to send, ignoring the common.
*/
-static const unsigned char* get_rev()
+static const unsigned char* get_rev(void)
{
struct commit *commit = NULL;
@@ -156,8 +157,9 @@ static int find_common(int fd[2], unsigned char *result_sha1,
continue;
}
- packet_write(fd[1], "want %s%s\n", sha1_to_hex(remote),
- multi_ack ? " multi_ack" : "");
+ packet_write(fd[1], "want %s%s%s\n", sha1_to_hex(remote),
+ (multi_ack ? " multi_ack" : ""),
+ (use_thin_pack ? " thin-pack" : ""));
fetching++;
}
packet_flush(fd[1]);
@@ -265,8 +267,9 @@ static void filter_refs(struct ref **refs, int nr_match, char **match)
for (prev = NULL, current = *refs; current; current = next) {
next = current->next;
if ((!memcmp(current->name, "refs/", 5) &&
- check_ref_format(current->name + 5)) ||
- !path_match(current->name, nr_match, match)) {
+ check_ref_format(current->name + 5)) ||
+ (!fetch_all &&
+ !path_match(current->name, nr_match, match))) {
if (prev == NULL)
*refs = next;
else
@@ -375,7 +378,11 @@ static int fetch_pack(int fd[2], int nr_match, char **match)
goto all_done;
}
if (find_common(fd, sha1, ref) < 0)
- fprintf(stderr, "warning: no common commits\n");
+ if (!keep_pack)
+ /* When cloning, it is not unusual to have
+ * no common commit.
+ */
+ fprintf(stderr, "warning: no common commits\n");
if (keep_pack)
status = receive_keep_pack(fd, "git-fetch-pack", quiet);
@@ -421,6 +428,14 @@ int main(int argc, char **argv)
keep_pack = 1;
continue;
}
+ if (!strcmp("--thin", arg)) {
+ use_thin_pack = 1;
+ continue;
+ }
+ if (!strcmp("--all", arg)) {
+ fetch_all = 1;
+ continue;
+ }
if (!strcmp("-v", arg)) {
verbose = 1;
continue;
@@ -434,6 +449,8 @@ int main(int argc, char **argv)
}
if (!dest)
usage(fetch_pack_usage);
+ if (keep_pack)
+ use_thin_pack = 0;
pid = git_connect(fd, dest, exec);
if (pid < 0)
return 1;
diff --git a/fsck-objects.c b/fsck-objects.c
index 6439d55..59b2590 100644
--- a/fsck-objects.c
+++ b/fsck-objects.c
@@ -14,13 +14,12 @@
static int show_root = 0;
static int show_tags = 0;
static int show_unreachable = 0;
-static int standalone = 0;
static int check_full = 0;
static int check_strict = 0;
-static int keep_cache_objects = 0;
+static int keep_cache_objects = 0;
static unsigned char head_sha1[20];
-#if NO_D_INO_IN_DIRENT
+#ifdef NO_D_INO_IN_DIRENT
#define SORT_DIRENT 0
#define DIRENT_SORT_HINT(de) 0
#else
@@ -68,7 +67,7 @@ static void check_connectivity(void)
continue;
if (!obj->parsed) {
- if (!standalone && has_sha1_file(obj->sha1))
+ if (has_sha1_file(obj->sha1))
; /* it is in pack */
else
printf("missing %s %s\n",
@@ -82,7 +81,7 @@ static void check_connectivity(void)
for (j = 0; j < refs->count; j++) {
struct object *ref = refs->ref[j];
if (ref->parsed ||
- (!standalone && has_sha1_file(ref->sha1)))
+ (has_sha1_file(ref->sha1)))
continue;
printf("broken link from %7s %s\n",
obj->type, sha1_to_hex(obj->sha1));
@@ -390,7 +389,7 @@ static int fsck_handle_ref(const char *refname, const unsigned char *sha1)
obj = lookup_object(sha1);
if (!obj) {
- if (!standalone && has_sha1_file(sha1)) {
+ if (has_sha1_file(sha1)) {
default_refs++;
return 0; /* it is in a pack */
}
@@ -464,10 +463,6 @@ int main(int argc, char **argv)
keep_cache_objects = 1;
continue;
}
- if (!strcmp(arg, "--standalone")) {
- standalone = 1;
- continue;
- }
if (!strcmp(arg, "--full")) {
check_full = 1;
continue;
@@ -477,14 +472,9 @@ int main(int argc, char **argv)
continue;
}
if (*arg == '-')
- usage("git-fsck-objects [--tags] [--root] [[--unreachable] [--cache] [--standalone | --full] [--strict] <head-sha1>*]");
+ usage("git-fsck-objects [--tags] [--root] [[--unreachable] [--cache] [--full] [--strict] <head-sha1>*]");
}
- if (standalone && check_full)
- die("Only one of --standalone or --full can be used.");
- if (standalone)
- putenv("GIT_ALTERNATE_OBJECT_DIRECTORIES=");
-
fsck_head_link();
fsck_object_dir(get_object_directory());
if (check_full) {
diff --git a/generate-cmdlist.sh b/generate-cmdlist.sh
new file mode 100755
index 0000000..6c59dbd
--- /dev/null
+++ b/generate-cmdlist.sh
@@ -0,0 +1,52 @@
+#!/bin/sh
+
+echo "/* Automatically generated by $0 */
+struct cmdname_help
+{
+ char name[16];
+ char help[64];
+};
+
+struct cmdname_help common_cmds[] = {"
+
+sort <<\EOF |
+add
+apply
+bisect
+branch
+checkout
+cherry-pick
+clone
+commit
+diff
+fetch
+grep
+init-db
+log
+merge
+mv
+prune
+pull
+push
+rebase
+reset
+revert
+rm
+show
+show-branch
+status
+tag
+verify-tag
+whatchanged
+EOF
+while read cmd
+do
+ sed -n '
+ /NAME/,/git-'"$cmd"'/H
+ ${
+ x
+ s/.*git-'"$cmd"' - \(.*\)/ {"'"$cmd"'", "\1"},/
+ p
+ }' "Documentation/git-$cmd.txt"
+done
+echo "};"
diff --git a/git-add.sh b/git-add.sh
index 611f152..d6a4bc7 100755
--- a/git-add.sh
+++ b/git-add.sh
@@ -28,6 +28,17 @@ while : ; do
shift
done
+# Check misspelled pathspec
+case "$#" in
+0) ;;
+*)
+ git-ls-files --error-unmatch --others --cached -- "$@" >/dev/null || {
+ echo >&2 "Maybe you misspelled it?"
+ exit 1
+ }
+ ;;
+esac
+
if test -f "$GIT_DIR/info/exclude"
then
git-ls-files -z \
diff --git a/git-am.sh b/git-am.sh
index ab133fb..eab4aa8 100755
--- a/git-am.sh
+++ b/git-am.sh
@@ -2,7 +2,8 @@
#
# Copyright (c) 2005, 2006 Junio C Hamano
-USAGE='[--signoff] [--dotest=<dir>] [--utf8] [--binary] [--3way] <mbox>
+USAGE='[--signoff] [--dotest=<dir>] [--utf8] [--binary] [--3way]
+ [--interactive] [--whitespace=<option>] <mbox>...
or, when resuming [--skip | --resolved]'
. git-sh-setup
diff --git a/git-annotate.perl b/git-annotate.perl
new file mode 100755
index 0000000..9df72a1
--- /dev/null
+++ b/git-annotate.perl
@@ -0,0 +1,508 @@
+#!/usr/bin/perl
+# Copyright 2006, Ryan Anderson <ryan@michonline.com>
+#
+# GPL v2 (See COPYING)
+#
+# This file is licensed under the GPL v2, or a later version
+# at the discretion of Linus Torvalds.
+
+use warnings;
+use strict;
+use Getopt::Long;
+use POSIX qw(strftime gmtime);
+
+sub usage() {
+ print STDERR 'Usage: ${\basename $0} [-s] [-S revs-file] file [ revision ]
+ -l, --long
+ Show long rev (Defaults off)
+ -t, --time
+ Show raw timestamp (Defaults off)
+ -r, --rename
+ Follow renames (Defaults on).
+ -S, --rev-file revs-file
+ Use revs from revs-file instead of calling git-rev-list
+ -h, --help
+ This message.
+';
+
+ exit(1);
+}
+
+our ($help, $longrev, $rename, $rawtime, $starting_rev, $rev_file) = (0, 0, 1);
+
+my $rc = GetOptions( "long|l" => \$longrev,
+ "time|t" => \$rawtime,
+ "help|h" => \$help,
+ "rename|r" => \$rename,
+ "rev-file|S=s" => \$rev_file);
+if (!$rc or $help) {
+ usage();
+}
+
+my $filename = shift @ARGV;
+if (@ARGV) {
+ $starting_rev = shift @ARGV;
+}
+
+my @stack = (
+ {
+ 'rev' => defined $starting_rev ? $starting_rev : "HEAD",
+ 'filename' => $filename,
+ },
+);
+
+our @filelines = ();
+
+if (defined $starting_rev) {
+ @filelines = git_cat_file($starting_rev, $filename);
+} else {
+ open(F,"<",$filename)
+ or die "Failed to open filename: $!";
+
+ while(<F>) {
+ chomp;
+ push @filelines, $_;
+ }
+ close(F);
+
+}
+
+our %revs;
+our @revqueue;
+our $head;
+
+my $revsprocessed = 0;
+while (my $bound = pop @stack) {
+ my @revisions = git_rev_list($bound->{'rev'}, $bound->{'filename'});
+ foreach my $revinst (@revisions) {
+ my ($rev, @parents) = @$revinst;
+ $head ||= $rev;
+
+ if (!defined($rev)) {
+ $rev = "";
+ }
+ $revs{$rev}{'filename'} = $bound->{'filename'};
+ if (scalar @parents > 0) {
+ $revs{$rev}{'parents'} = \@parents;
+ next;
+ }
+
+ if (!$rename) {
+ next;
+ }
+
+ my $newbound = find_parent_renames($rev, $bound->{'filename'});
+ if ( exists $newbound->{'filename'} && $newbound->{'filename'} ne $bound->{'filename'}) {
+ push @stack, $newbound;
+ $revs{$rev}{'parents'} = [$newbound->{'rev'}];
+ }
+ }
+}
+push @revqueue, $head;
+init_claim( defined $starting_rev ? $head : 'dirty');
+unless (defined $starting_rev) {
+ my $diff = open_pipe("git","diff","-R", "HEAD", "--",$filename)
+ or die "Failed to call git diff to check for dirty state: $!";
+
+ _git_diff_parse($diff, $head, "dirty", (
+ 'author' => gitvar_name("GIT_AUTHOR_IDENT"),
+ 'author_date' => sprintf("%s +0000",time()),
+ )
+ );
+ close($diff);
+}
+handle_rev();
+
+
+my $i = 0;
+foreach my $l (@filelines) {
+ my ($output, $rev, $committer, $date);
+ if (ref $l eq 'ARRAY') {
+ ($output, $rev, $committer, $date) = @$l;
+ if (!$longrev && length($rev) > 8) {
+ $rev = substr($rev,0,8);
+ }
+ } else {
+ $output = $l;
+ ($rev, $committer, $date) = ('unknown', 'unknown', 'unknown');
+ }
+
+ printf("%s\t(%10s\t%10s\t%d)%s\n", $rev, $committer,
+ format_date($date), ++$i, $output);
+}
+
+sub init_claim {
+ my ($rev) = @_;
+ for (my $i = 0; $i < @filelines; $i++) {
+ $filelines[$i] = [ $filelines[$i], '', '', '', 1];
+ # line,
+ # rev,
+ # author,
+ # date,
+ # 1 <-- belongs to the original file.
+ }
+ $revs{$rev}{'lines'} = \@filelines;
+}
+
+
+sub handle_rev {
+ my $i = 0;
+ my %seen;
+ while (my $rev = shift @revqueue) {
+ next if $seen{$rev}++;
+
+ my %revinfo = git_commit_info($rev);
+
+ foreach my $p (@{$revs{$rev}{'parents'}}) {
+
+ git_diff_parse($p, $rev, %revinfo);
+ push @revqueue, $p;
+ }
+
+
+ if (scalar @{$revs{$rev}{parents}} == 0) {
+ # We must be at the initial rev here, so claim everything that is left.
+ for (my $i = 0; $i < @{$revs{$rev}{lines}}; $i++) {
+ if (ref ${$revs{$rev}{lines}}[$i] eq '' || ${$revs{$rev}{lines}}[$i][1] eq '') {
+ claim_line($i, $rev, $revs{$rev}{lines}, %revinfo);
+ }
+ }
+ }
+ }
+}
+
+
+sub git_rev_list {
+ my ($rev, $file) = @_;
+
+ my $revlist;
+ if ($rev_file) {
+ open($revlist, '<' . $rev_file)
+ or die "Failed to open $rev_file : $!";
+ } else {
+ $revlist = open_pipe("git-rev-list","--parents","--remove-empty",$rev,"--",$file)
+ or die "Failed to exec git-rev-list: $!";
+ }
+
+ my @revs;
+ while(my $line = <$revlist>) {
+ chomp $line;
+ my ($rev, @parents) = split /\s+/, $line;
+ push @revs, [ $rev, @parents ];
+ }
+ close($revlist);
+
+ printf("0 revs found for rev %s (%s)\n", $rev, $file) if (@revs == 0);
+ return @revs;
+}
+
+sub find_parent_renames {
+ my ($rev, $file) = @_;
+
+ my $patch = open_pipe("git-diff-tree", "-M50", "-r","--name-status", "-z","$rev")
+ or die "Failed to exec git-diff: $!";
+
+ local $/ = "\0";
+ my %bound;
+ my $junk = <$patch>;
+ while (my $change = <$patch>) {
+ chomp $change;
+ my $filename = <$patch>;
+ chomp $filename;
+
+ if ($change =~ m/^[AMD]$/ ) {
+ next;
+ } elsif ($change =~ m/^R/ ) {
+ my $oldfilename = $filename;
+ $filename = <$patch>;
+ chomp $filename;
+ if ( $file eq $filename ) {
+ my $parent = git_find_parent($rev, $oldfilename);
+ @bound{'rev','filename'} = ($parent, $oldfilename);
+ last;
+ }
+ }
+ }
+ close($patch);
+
+ return \%bound;
+}
+
+
+sub git_find_parent {
+ my ($rev, $filename) = @_;
+
+ my $revparent = open_pipe("git-rev-list","--remove-empty", "--parents","--max-count=1","$rev","--",$filename)
+ or die "Failed to open git-rev-list to find a single parent: $!";
+
+ my $parentline = <$revparent>;
+ chomp $parentline;
+ my ($revfound,$parent) = split m/\s+/, $parentline;
+
+ close($revparent);
+
+ return $parent;
+}
+
+
+# Get a diff between the current revision and a parent.
+# Record the commit information that results.
+sub git_diff_parse {
+ my ($parent, $rev, %revinfo) = @_;
+
+ my $diff = open_pipe("git-diff-tree","-M","-p",$rev,$parent,"--",
+ $revs{$rev}{'filename'}, $revs{$parent}{'filename'})
+ or die "Failed to call git-diff for annotation: $!";
+
+ _git_diff_parse($diff, $parent, $rev, %revinfo);
+
+ close($diff);
+}
+
+sub _git_diff_parse {
+ my ($diff, $parent, $rev, %revinfo) = @_;
+
+ my ($ri, $pi) = (0,0);
+ my $slines = $revs{$rev}{'lines'};
+ my @plines;
+
+ my $gotheader = 0;
+ my ($remstart);
+ my ($hunk_start, $hunk_index);
+ while(<$diff>) {
+ chomp;
+ if (m/^@@ -(\d+),(\d+) \+(\d+),(\d+)/) {
+ $remstart = $1;
+ # Adjust for 0-based arrays
+ $remstart--;
+ # Reinit hunk tracking.
+ $hunk_start = $remstart;
+ $hunk_index = 0;
+ $gotheader = 1;
+
+ for (my $i = $ri; $i < $remstart; $i++) {
+ $plines[$pi++] = $slines->[$i];
+ $ri++;
+ }
+ next;
+ } elsif (!$gotheader) {
+ next;
+ }
+
+ if (m/^\+(.*)$/) {
+ my $line = $1;
+ $plines[$pi++] = [ $line, '', '', '', 0 ];
+ next;
+
+ } elsif (m/^-(.*)$/) {
+ my $line = $1;
+ if (get_line($slines, $ri) eq $line) {
+ # Found a match, claim
+ claim_line($ri, $rev, $slines, %revinfo);
+ } else {
+ die sprintf("Sync error: %d/%d\n|%s\n|%s\n%s => %s\n",
+ $ri, $hunk_start + $hunk_index,
+ $line,
+ get_line($slines, $ri),
+ $rev, $parent);
+ }
+ $ri++;
+
+ } elsif (m/^\\/) {
+ ;
+ # Skip \No newline at end of file.
+ # But this can be internationalized, so only look
+ # for an initial \
+
+ } else {
+ if (substr($_,1) ne get_line($slines,$ri) ) {
+ die sprintf("Line %d (%d) does not match:\n|%s\n|%s\n%s => %s\n",
+ $hunk_start + $hunk_index, $ri,
+ substr($_,1),
+ get_line($slines,$ri),
+ $rev, $parent);
+ }
+ $plines[$pi++] = $slines->[$ri++];
+ }
+ $hunk_index++;
+ }
+ for (my $i = $ri; $i < @{$slines} ; $i++) {
+ push @plines, $slines->[$ri++];
+ }
+
+ $revs{$parent}{lines} = \@plines;
+ return;
+}
+
+sub get_line {
+ my ($lines, $index) = @_;
+
+ return ref $lines->[$index] ne '' ? $lines->[$index][0] : $lines->[$index];
+}
+
+sub git_cat_file {
+ my ($rev, $filename) = @_;
+ return () unless defined $rev && defined $filename;
+
+ my $blob = git_ls_tree($rev, $filename);
+ die "Failed to find a blob for $filename in rev $rev\n" if !defined $blob;
+
+ my $catfile = open_pipe("git","cat-file", "blob", $blob)
+ or die "Failed to git-cat-file blob $blob (rev $rev, file $filename): " . $!;
+
+ my @lines;
+ while(<$catfile>) {
+ chomp;
+ push @lines, $_;
+ }
+ close($catfile);
+
+ return @lines;
+}
+
+sub git_ls_tree {
+ my ($rev, $filename) = @_;
+
+ my $lstree = open_pipe("git","ls-tree",$rev,$filename)
+ or die "Failed to call git ls-tree: $!";
+
+ my ($mode, $type, $blob, $tfilename);
+ while(<$lstree>) {
+ chomp;
+ ($mode, $type, $blob, $tfilename) = split(/\s+/, $_, 4);
+ last if ($tfilename eq $filename);
+ }
+ close($lstree);
+
+ return $blob if ($tfilename eq $filename);
+ die "git-ls-tree failed to find blob for $filename";
+
+}
+
+
+
+sub claim_line {
+ my ($floffset, $rev, $lines, %revinfo) = @_;
+ my $oline = get_line($lines, $floffset);
+ @{$lines->[$floffset]} = ( $oline, $rev,
+ $revinfo{'author'}, $revinfo{'author_date'} );
+ #printf("Claiming line %d with rev %s: '%s'\n",
+ # $floffset, $rev, $oline) if 1;
+}
+
+sub git_commit_info {
+ my ($rev) = @_;
+ my $commit = open_pipe("git-cat-file", "commit", $rev)
+ or die "Failed to call git-cat-file: $!";
+
+ my %info;
+ while(<$commit>) {
+ chomp;
+ last if (length $_ == 0);
+
+ if (m/^author (.*) <(.*)> (.*)$/) {
+ $info{'author'} = $1;
+ $info{'author_email'} = $2;
+ $info{'author_date'} = $3;
+ } elsif (m/^committer (.*) <(.*)> (.*)$/) {
+ $info{'committer'} = $1;
+ $info{'committer_email'} = $2;
+ $info{'committer_date'} = $3;
+ }
+ }
+ close($commit);
+
+ return %info;
+}
+
+sub format_date {
+ if ($rawtime) {
+ return $_[0];
+ }
+ my ($timestamp, $timezone) = split(' ', $_[0]);
+ my $minutes = abs($timezone);
+ $minutes = int($minutes / 100) * 60 + ($minutes % 100);
+ if ($timezone < 0) {
+ $minutes = -$minutes;
+ }
+ my $t = $timestamp + $minutes * 60;
+ return strftime("%Y-%m-%d %H:%M:%S " . $timezone, gmtime($t));
+}
+
+# Copied from git-send-email.perl - We need a Git.pm module..
+sub gitvar {
+ my ($var) = @_;
+ my $fh;
+ my $pid = open($fh, '-|');
+ die "$!" unless defined $pid;
+ if (!$pid) {
+ exec('git-var', $var) or die "$!";
+ }
+ my ($val) = <$fh>;
+ close $fh or die "$!";
+ chomp($val);
+ return $val;
+}
+
+sub gitvar_name {
+ my ($name) = @_;
+ my $val = gitvar($name);
+ my @field = split(/\s+/, $val);
+ return join(' ', @field[0...(@field-4)]);
+}
+
+sub open_pipe {
+ if ($^O eq '##INSERT_ACTIVESTATE_STRING_HERE##') {
+ return open_pipe_activestate(@_);
+ } else {
+ return open_pipe_normal(@_);
+ }
+}
+
+sub open_pipe_activestate {
+ tie *fh, "Git::ActiveStatePipe", @_;
+ return *fh;
+}
+
+sub open_pipe_normal {
+ my (@execlist) = @_;
+
+ my $pid = open my $kid, "-|";
+ defined $pid or die "Cannot fork: $!";
+
+ unless ($pid) {
+ exec @execlist;
+ die "Cannot exec @execlist: $!";
+ }
+
+ return $kid;
+}
+
+package Git::ActiveStatePipe;
+use strict;
+
+sub TIEHANDLE {
+ my ($class, @params) = @_;
+ my $cmdline = join " ", @params;
+ my @data = qx{$cmdline};
+ bless { i => 0, data => \@data }, $class;
+}
+
+sub READLINE {
+ my $self = shift;
+ if ($self->{i} >= scalar @{$self->{data}}) {
+ return undef;
+ }
+ return $self->{'data'}->[ $self->{i}++ ];
+}
+
+sub CLOSE {
+ my $self = shift;
+ delete $self->{data};
+ delete $self->{i};
+}
+
+sub EOF {
+ my $self = shift;
+ return ($self->{i} >= scalar @{$self->{data}});
+}
diff --git a/git-archimport.perl b/git-archimport.perl
index 6792624..740bc1f 100755
--- a/git-archimport.perl
+++ b/git-archimport.perl
@@ -928,7 +928,7 @@ sub find_parents {
# now walk up to the mergepoint collecting what patches we have
my $branchtip = git_rev_parse($ps->{branch});
- my @ancestors = `git-rev-list --merge-order $branchtip ^$mergebase`;
+ my @ancestors = `git-rev-list --topo-order $branchtip ^$mergebase`;
my %have; # collected merges this branch has
foreach my $merge (@{$ps->{merges}}) {
$have{$merge} = 1;
@@ -951,7 +951,7 @@ sub find_parents {
# see what the remote branch has - these are the merges we
# will want to have in a consecutive series from the mergebase
my $otherbranchtip = git_rev_parse($branch);
- my @needraw = `git-rev-list --merge-order $otherbranchtip ^$mergebase`;
+ my @needraw = `git-rev-list --topo-order $otherbranchtip ^$mergebase`;
my @need;
foreach my $needps (@needraw) { # get the psets
$needps = commitid2pset($needps);
diff --git a/git-bisect.sh b/git-bisect.sh
index 0750253..03df143 100755
--- a/git-bisect.sh
+++ b/git-bisect.sh
@@ -49,9 +49,16 @@ bisect_start() {
die "Bad HEAD - I need a symbolic ref"
case "$head" in
refs/heads/bisect*)
- git checkout master || exit
+ if [ -s "$GIT_DIR/head-name" ]; then
+ branch=`cat "$GIT_DIR/head-name"`
+ else
+ branch=master
+ fi
+ git checkout $branch || exit
;;
refs/heads/*)
+ [ -s "$GIT_DIR/head-name" ] && die "won't bisect on seeked tree"
+ echo "$head" | sed 's#^refs/heads/##' >"$GIT_DIR/head-name"
;;
*)
die "Bad HEAD - strange symbolic ref"
@@ -159,7 +166,11 @@ bisect_visualize() {
bisect_reset() {
case "$#" in
- 0) branch=master ;;
+ 0) if [ -s "$GIT_DIR/head-name" ]; then
+ branch=`cat "$GIT_DIR/head-name"`
+ else
+ branch=master
+ fi ;;
1) test -f "$GIT_DIR/refs/heads/$1" || {
echo >&2 "$1 does not seem to be a valid branch"
exit 1
@@ -170,7 +181,7 @@ bisect_reset() {
esac
git checkout "$branch" &&
rm -fr "$GIT_DIR/refs/bisect"
- rm -f "$GIT_DIR/refs/heads/bisect"
+ rm -f "$GIT_DIR/refs/heads/bisect" "$GIT_DIR/head-name"
rm -f "$GIT_DIR/BISECT_LOG"
rm -f "$GIT_DIR/BISECT_NAMES"
}
diff --git a/git-branch.sh b/git-branch.sh
index 6ac961e..663a3a3 100755
--- a/git-branch.sh
+++ b/git-branch.sh
@@ -48,6 +48,12 @@ If you are sure you want to delete it, run 'git branch -D $branch_name'."
exit 0
}
+ls_remote_branches () {
+ git-rev-parse --symbolic --all |
+ sed -ne 's|^refs/\(remotes/\)|\1|p' |
+ sort
+}
+
force=
while case "$#,$1" in 0,*) break ;; *,-*) ;; *) break ;; esac
do
@@ -56,6 +62,10 @@ do
delete_branch "$@"
exit
;;
+ -r)
+ ls_remote_branches
+ exit
+ ;;
-f)
force="$1"
;;
diff --git a/git-clone.sh b/git-clone.sh
index 89c15d7..823c74b 100755
--- a/git-clone.sh
+++ b/git-clone.sh
@@ -9,7 +9,7 @@
unset CDPATH
usage() {
- echo >&2 "Usage: $0 [--bare] [-l [-s]] [-q] [-u <upload-pack>] [-o <name>] [-n] <repo> [<dir>]"
+ echo >&2 "Usage: $0 [--use-separate-remote] [--reference <reference-repo>] [--bare] [-l [-s]] [-q] [-u <upload-pack>] [--origin <name>] [-n] <repo> [<dir>]"
exit 1
}
@@ -40,22 +40,74 @@ Perhaps git-update-server-info needs to be run there?"
do
name=`expr "$refname" : 'refs/\(.*\)'` &&
case "$name" in
- *^*) ;;
- *)
- git-http-fetch -v -a -w "$name" "$name" "$1/" || exit 1
+ *^*) continue;;
esac
+ if test -n "$use_separate_remote" &&
+ branch_name=`expr "$name" : 'heads/\(.*\)'`
+ then
+ tname="remotes/$origin/$branch_name"
+ else
+ tname=$name
+ fi
+ git-http-fetch -v -a -w "$tname" "$name" "$1/" || exit 1
done <"$clone_tmp/refs"
rm -fr "$clone_tmp"
+ http_fetch "$1/HEAD" "$GIT_DIR/REMOTE_HEAD"
+}
+
+# Read git-fetch-pack -k output and store the remote branches.
+copy_refs='
+use File::Path qw(mkpath);
+use File::Basename qw(dirname);
+my $git_dir = $ARGV[0];
+my $use_separate_remote = $ARGV[1];
+my $origin = $ARGV[2];
+
+my $branch_top = ($use_separate_remote ? "remotes/$origin" : "heads");
+my $tag_top = "tags";
+
+sub store {
+ my ($sha1, $name, $top) = @_;
+ $name = "$git_dir/refs/$top/$name";
+ mkpath(dirname($name));
+ open O, ">", "$name";
+ print O "$sha1\n";
+ close O;
}
+open FH, "<", "$git_dir/CLONE_HEAD";
+while (<FH>) {
+ my ($sha1, $name) = /^([0-9a-f]{40})\s(.*)$/;
+ next if ($name =~ /\^\173/);
+ if ($name eq "HEAD") {
+ open O, ">", "$git_dir/REMOTE_HEAD";
+ print O "$sha1\n";
+ close O;
+ next;
+ }
+ if ($name =~ s/^refs\/heads\///) {
+ store($sha1, $name, $branch_top);
+ next;
+ }
+ if ($name =~ s/^refs\/tags\///) {
+ store($sha1, $name, $tag_top);
+ next;
+ }
+}
+close FH;
+'
+
quiet=
+local=no
use_local=no
local_shared=no
no_checkout=
upload_pack=
bare=
-origin=origin
+reference=
+origin=
origin_override=
+use_separate_remote=
while
case "$#,$1" in
0,*) break ;;
@@ -68,14 +120,27 @@ while
*,-s|*,--s|*,--sh|*,--sha|*,--shar|*,--share|*,--shared)
local_shared=yes; use_local=yes ;;
*,-q|*,--quiet) quiet=-q ;;
- 1,-o) usage;;
- *,-o)
- git-check-ref-format "$2" || {
+ *,--use-separate-remote)
+ use_separate_remote=t ;;
+ 1,--reference) usage ;;
+ *,--reference)
+ shift; reference="$1" ;;
+ *,--reference=*)
+ reference=`expr "$1" : '--reference=\(.*\)'` ;;
+ *,-o|*,--or|*,--ori|*,--orig|*,--origi|*,--origin)
+ case "$2" in
+ '')
+ usage ;;
+ */*)
+ echo >&2 "'$2' is not suitable for an origin name"
+ exit 1
+ esac
+ git-check-ref-format "heads/$2" || {
echo >&2 "'$2' is not suitable for a branch name"
exit 1
}
test -z "$origin_override" || {
- echo >&2 "Do not give more than one -o options."
+ echo >&2 "Do not give more than one --origin options."
exit 1
}
origin_override=yes
@@ -92,21 +157,36 @@ do
shift
done
+repo="$1"
+if test -z "$repo"
+then
+ echo >&2 'you must specify a repository to clone.'
+ exit 1
+fi
+
# --bare implies --no-checkout
if test yes = "$bare"
then
if test yes = "$origin_override"
then
- echo >&2 '--bare and -o $origin options are incompatible.'
+ echo >&2 '--bare and --origin $origin options are incompatible.'
+ exit 1
+ fi
+ if test t = "$use_separate_remote"
+ then
+ echo >&2 '--bare and --use-separate-remote options are incompatible.'
exit 1
fi
no_checkout=yes
fi
+if test -z "$origin"
+then
+ origin=origin
+fi
+
# Turn the source into an absolute path if
# it is local
-repo="$1"
-local=no
if base=$(get_repo_base "$repo"); then
repo="$base"
local=yes
@@ -130,6 +210,28 @@ yes)
GIT_DIR="$D/.git" ;;
esac
+if test -n "$reference"
+then
+ if test -d "$reference"
+ then
+ if test -d "$reference/.git/objects"
+ then
+ reference="$reference/.git"
+ fi
+ reference=$(cd "$reference" && pwd)
+ echo "$reference/objects" >"$GIT_DIR/objects/info/alternates"
+ (cd "$reference" && tar cf - refs) |
+ (cd "$GIT_DIR/refs" &&
+ mkdir reference-tmp &&
+ cd reference-tmp &&
+ tar xf -)
+ else
+ echo >&2 "$reference: not a local directory." && usage
+ fi
+fi
+
+rm -f "$GIT_DIR/CLONE_HEAD"
+
# We do local magic only when the user tells us to.
case "$local,$use_local" in
yes,yes)
@@ -154,7 +256,7 @@ yes,yes)
fi &&
rm -f "$GIT_DIR/objects/sample" &&
cd "$repo" &&
- find objects -depth -print | cpio -puamd$l "$GIT_DIR/" || exit 1
+ find objects -depth -print | cpio -pumd$l "$GIT_DIR/" || exit 1
;;
yes)
mkdir -p "$GIT_DIR/objects/info"
@@ -165,24 +267,14 @@ yes,yes)
} >"$GIT_DIR/objects/info/alternates"
;;
esac
-
- # Make a duplicate of refs and HEAD pointer
- HEAD=
- if test -f "$repo/HEAD"
- then
- HEAD=HEAD
- fi
- (cd "$repo" && tar cf - refs $HEAD) |
- (cd "$GIT_DIR" && tar xf -) || exit 1
+ git-ls-remote "$repo" >"$GIT_DIR/CLONE_HEAD"
;;
*)
case "$repo" in
rsync://*)
rsync $quiet -av --ignore-existing \
- --exclude info "$repo/objects/" "$GIT_DIR/objects/" &&
- rsync $quiet -av --ignore-existing \
- --exclude info "$repo/refs/" "$GIT_DIR/refs/" || exit
-
+ --exclude info "$repo/objects/" "$GIT_DIR/objects/" ||
+ exit
# Look at objects/info/alternates for rsync -- http will
# support it natively and git native ones will do it on the
# remote end. Not having that file is not a crime.
@@ -205,6 +297,7 @@ yes,yes)
done
rm -f "$GIT_DIR/TMP_ALT"
fi
+ git-ls-remote "$repo" >"$GIT_DIR/CLONE_HEAD"
;;
http://*)
if test -z "@@NO_CURL@@"
@@ -217,38 +310,89 @@ yes,yes)
;;
*)
cd "$D" && case "$upload_pack" in
- '') git-clone-pack $quiet "$repo" ;;
- *) git-clone-pack $quiet "$upload_pack" "$repo" ;;
- esac || {
- echo >&2 "clone-pack from '$repo' failed."
+ '') git-fetch-pack --all -k $quiet "$repo" ;;
+ *) git-fetch-pack --all -k $quiet "$upload_pack" "$repo" ;;
+ esac >"$GIT_DIR/CLONE_HEAD" || {
+ echo >&2 "fetch-pack from '$repo' failed."
exit 1
}
;;
esac
;;
esac
+test -d "$GIT_DIR/refs/reference-tmp" && rm -fr "$GIT_DIR/refs/reference-tmp"
+
+if test -f "$GIT_DIR/CLONE_HEAD"
+then
+ # Figure out where the remote HEAD points at.
+ perl -e "$copy_refs" "$GIT_DIR" "$use_separate_remote" "$origin"
+fi
cd "$D" || exit
-if test -f "$GIT_DIR/HEAD" && test -z "$bare"
+if test -z "$bare" && test -f "$GIT_DIR/REMOTE_HEAD"
then
- head_points_at=`git-symbolic-ref HEAD`
+ head_sha1=`cat "$GIT_DIR/REMOTE_HEAD"`
+ # Figure out which remote branch HEAD points at.
+ case "$use_separate_remote" in
+ '') remote_top=refs/heads ;;
+ *) remote_top="refs/remotes/$origin" ;;
+ esac
+
+ # What to use to track the remote primary branch
+ if test -n "$use_separate_remote"
+ then
+ origin_tracking="remotes/$origin/master"
+ else
+ origin_tracking="heads/$origin"
+ fi
+
+ # The name under $remote_top the remote HEAD seems to point at
+ head_points_at=$(
+ (
+ echo "master"
+ cd "$GIT_DIR/$remote_top" &&
+ find . -type f -print | sed -e 's/^\.\///'
+ ) | (
+ done=f
+ while read name
+ do
+ test t = $done && continue
+ branch_tip=`cat "$GIT_DIR/$remote_top/$name"`
+ if test "$head_sha1" = "$branch_tip"
+ then
+ echo "$name"
+ done=t
+ fi
+ done
+ )
+ )
+
+ # Write out remotes/$origin file.
case "$head_points_at" in
- refs/heads/*)
- head_points_at=`expr "$head_points_at" : 'refs/heads/\(.*\)'`
+ ?*)
mkdir -p "$GIT_DIR/remotes" &&
- echo >"$GIT_DIR/remotes/origin" \
+ echo >"$GIT_DIR/remotes/$origin" \
"URL: $repo
-Pull: $head_points_at:$origin" &&
- git-update-ref "refs/heads/$origin" $(git-rev-parse HEAD) &&
- (cd "$GIT_DIR" && find "refs/heads" -type f -print) |
- while read ref
+Pull: refs/heads/$head_points_at:refs/$origin_tracking" &&
+ case "$use_separate_remote" in
+ t) git-update-ref HEAD "$head_sha1" ;;
+ *) git-update-ref "refs/heads/$origin" $(git-rev-parse HEAD) ;;
+ esac &&
+ (cd "$GIT_DIR/$remote_top" && find . -type f -print) |
+ while read dotslref
do
- head=`expr "$ref" : 'refs/heads/\(.*\)'` &&
- test "$head_points_at" = "$head" ||
- test "$origin" = "$head" ||
- echo "Pull: ${head}:${head}"
- done >>"$GIT_DIR/remotes/origin"
+ name=`expr "$dotslref" : './\(.*\)'` &&
+ test "$head_points_at" = "$name" ||
+ test "$origin" = "$name" ||
+ echo "Pull: refs/heads/${name}:$remote_top/${name}"
+ done >>"$GIT_DIR/remotes/$origin" &&
+ case "$use_separate_remote" in
+ t)
+ rm -f "refs/remotes/$origin/HEAD"
+ git-symbolic-ref "refs/remotes/$origin/HEAD" \
+ "refs/remotes/$origin/$head_points_at"
+ esac
esac
case "$no_checkout" in
@@ -256,6 +400,7 @@ Pull: $head_points_at:$origin" &&
git-read-tree -m -u -v HEAD HEAD
esac
fi
+rm -f "$GIT_DIR/CLONE_HEAD" "$GIT_DIR/REMOTE_HEAD"
trap - exit
diff --git a/git-commit.sh b/git-commit.sh
index 59551d9..1e7c09e 100755
--- a/git-commit.sh
+++ b/git-commit.sh
@@ -3,7 +3,7 @@
# Copyright (c) 2005 Linus Torvalds
# Copyright (c) 2006 Junio C Hamano
-USAGE='[-a] [-i] [-s] [-v] [--no-verify] [-m <message> | -F <logfile> | (-C|-c) <commit>] [-e] [--author <author>] [<path>...]'
+USAGE='[-a] [-s] [-v] [--no-verify] [-m <message> | -F <logfile> | (-C|-c) <commit>) [--amend] [-e] [--author <author>] [[-i | -o] <path>...]'
SUBDIRECTORY_OK=Yes
. git-sh-setup
@@ -64,6 +64,22 @@ run_status () {
# We always show status for the whole tree.
cd "$TOP"
+ IS_INITIAL="$initial_commit"
+ REFERENCE=HEAD
+ case "$amend" in
+ t)
+ # If we are amending the initial commit, there
+ # is no HEAD^1.
+ if git-rev-parse --verify "HEAD^1" >/dev/null 2>&1
+ then
+ REFERENCE="HEAD^1"
+ IS_INITIAL=
+ else
+ IS_INITIAL=t
+ fi
+ ;;
+ esac
+
# If TMP_INDEX is defined, that means we are doing
# "--only" partial commit, and that index file is used
# to build the tree for the commit. Otherwise, if
@@ -85,10 +101,10 @@ run_status () {
*) echo "# On branch $branch" ;;
esac
- if test -z "$initial_commit"
+ if test -z "$IS_INITIAL"
then
git-diff-index -M --cached --name-status \
- --diff-filter=MDTCRA HEAD |
+ --diff-filter=MDTCRA $REFERENCE |
sed -e '
s/\\/\\\\/g
s/ /\\ /g
@@ -145,9 +161,9 @@ run_status () {
}
'
- if test -n "$verbose"
+ if test -n "$verbose" -a -z "$IS_INITIAL"
then
- git-diff-index --cached -M -p --diff-filter=MDTCRA HEAD
+ git-diff-index --cached -M -p --diff-filter=MDTCRA $REFERENCE
fi
case "$committable" in
0)
@@ -173,6 +189,7 @@ also=
only=
logfile=
use_commit=
+amend=
no_edit=
log_given=
log_message=
@@ -180,6 +197,7 @@ verify=t
verbose=
signoff=
force_author=
+only_include_assumed=
while case "$#" in 0) break;; esac
do
case "$1" in
@@ -253,6 +271,12 @@ do
verify=
shift
;;
+ --a|--am|--ame|--amen|--amend)
+ amend=t
+ log_given=t$log_given
+ use_commit=HEAD
+ shift
+ ;;
-c)
case "$#" in 1) usage ;; esac
shift
@@ -327,6 +351,15 @@ done
################################################################
# Sanity check options
+case "$amend,$initial_commit" in
+t,t)
+ die "You do not have anything to amend." ;;
+t,)
+ if [ -f "$GIT_DIR/MERGE_HEAD" ]; then
+ die "You are in the middle of a merge -- cannot amend."
+ fi ;;
+esac
+
case "$log_given" in
tt*)
die "Only one of -c/-C/-F/-m can be used." ;;
@@ -340,15 +373,8 @@ case "$#,$also$only" in
0,)
;;
*,)
- echo >&2 "assuming --include paths..."
- also=t
- # Later when switch the defaults, we will replace them with these:
- # echo >&2 "assuming --only paths..."
- # also=
-
- # If we are going to launch an editor, the message won't be
- # shown without this...
- test -z "$log_given$status_only" && sleep 1
+ only_include_assumed="# Explicit paths specified without -i nor -o; assuming --only paths..."
+ also=
;;
esac
unset only
@@ -383,6 +409,8 @@ t,)
;;
,t)
save_index &&
+ git-ls-files --error-unmatch -- "$@" >/dev/null || exit
+
git-diff-files --name-only -z -- "$@" |
(
cd "$TOP"
@@ -411,7 +439,7 @@ t,)
refuse_partial "Different in index and the last commit:
$dirty_in_index"
fi
- commit_only=`git-ls-files -- "$@"`
+ commit_only=`git-ls-files --error-unmatch -- "$@"` || exit
# Build the temporary index and update the real index
# the same way.
@@ -563,17 +591,25 @@ if test -z "$initial_commit"
then
if [ -f "$GIT_DIR/MERGE_HEAD" ]; then
PARENTS="-p HEAD "`sed -e 's/^/-p /' "$GIT_DIR/MERGE_HEAD"`
+ elif test -n "$amend"; then
+ PARENTS=$(git-cat-file commit HEAD |
+ sed -n -e '/^$/q' -e 's/^parent /-p /p')
fi
+ current=$(git-rev-parse --verify HEAD)
else
if [ -z "$(git-ls-files)" ]; then
echo >&2 Nothing to commit
exit 1
fi
PARENTS=""
+ current=
fi
-run_status >>"$GIT_DIR"/COMMIT_EDITMSG
-if [ "$?" != "0" -a ! -f "$GIT_DIR/MERGE_HEAD" ]
+{
+ test -z "$only_include_assumed" || echo "$only_include_assumed"
+ run_status
+} >>"$GIT_DIR"/COMMIT_EDITMSG
+if [ "$?" != "0" -a ! -f "$GIT_DIR/MERGE_HEAD" -a -z "$amend" ]
then
rm -f "$GIT_DIR/COMMIT_EDITMSG"
run_status
diff --git a/git-compat-util.h b/git-compat-util.h
index f982b8e..5d543d2 100644
--- a/git-compat-util.h
+++ b/git-compat-util.h
@@ -9,6 +9,8 @@
#endif
#endif
+#define ARRAY_SIZE(x) (sizeof(x)/sizeof(x[0]))
+
#include <unistd.h>
#include <stdio.h>
#include <sys/stat.h>
diff --git a/git-cvsimport.perl b/git-cvsimport.perl
index 24f9834..c0ae00b 100755
--- a/git-cvsimport.perl
+++ b/git-cvsimport.perl
@@ -452,8 +452,8 @@ chdir($git_tree);
my $last_branch = "";
my $orig_branch = "";
-my $forward_master = 0;
my %branch_date;
+my $tip_at_start = undef;
my $git_dir = $ENV{"GIT_DIR"} || ".git";
$git_dir = getwd()."/".$git_dir unless $git_dir =~ m#^/#;
@@ -488,21 +488,7 @@ unless(-d $git_dir) {
$last_branch = "master";
}
$orig_branch = $last_branch;
- if (-f "$git_dir/CVS2GIT_HEAD") {
- die <<EOM;
-CVS2GIT_HEAD exists.
-Make sure your working directory corresponds to HEAD and remove CVS2GIT_HEAD.
-You may need to run
-
- git read-tree -m -u CVS2GIT_HEAD HEAD
-EOM
- }
- system('cp', "$git_dir/HEAD", "$git_dir/CVS2GIT_HEAD");
-
- $forward_master =
- $opt_o ne 'master' && -f "$git_dir/refs/heads/master" &&
- system('cmp', '-s', "$git_dir/refs/heads/master",
- "$git_dir/refs/heads/$opt_o") == 0;
+ $tip_at_start = `git-rev-parse --verify HEAD`;
# populate index
system('git-read-tree', $last_branch);
@@ -691,11 +677,7 @@ my $commit = sub {
waitpid($pid,0);
die "Error running git-commit-tree: $?\n" if $?;
- open(C,">$git_dir/refs/heads/$branch")
- or die "Cannot open branch $branch for update: $!\n";
- print C "$cid\n"
- or die "Cannot write branch $branch for update: $!\n";
- close(C)
+ system("git-update-ref refs/heads/$branch $cid") == 0
or die "Cannot write branch $branch for update: $!\n";
if($tag) {
@@ -846,8 +828,12 @@ while(<CVS>) {
print "Drop $fn\n" if $opt_v;
} else {
print "".($init ? "New" : "Update")." $fn: $size bytes\n" if $opt_v;
- open my $F, '-|', "git-hash-object -w $tmpname"
+ my $pid = open(my $F, '-|');
+ die $! unless defined $pid;
+ if (!$pid) {
+ exec("git-hash-object", "-w", $tmpname)
or die "Cannot create object: $!\n";
+ }
my $sha = <$F>;
chomp $sha;
close $F;
@@ -885,17 +871,26 @@ if (defined $orig_git_index) {
# Now switch back to the branch we were in before all of this happened
if($orig_branch) {
- print "DONE\n" if $opt_v;
- system("cp","$git_dir/refs/heads/$opt_o","$git_dir/refs/heads/master")
- if $forward_master;
- unless ($opt_i) {
- system('git-read-tree', '-m', '-u', 'CVS2GIT_HEAD', 'HEAD');
- die "read-tree failed: $?\n" if $?;
+ print "DONE.\n" if $opt_v;
+ if ($opt_i) {
+ exit 0;
+ }
+ my $tip_at_end = `git-rev-parse --verify HEAD`;
+ if ($tip_at_start ne $tip_at_end) {
+ for ($tip_at_start, $tip_at_end) { chomp; }
+ print "Fetched into the current branch.\n" if $opt_v;
+ system(qw(git-read-tree -u -m),
+ $tip_at_start, $tip_at_end);
+ die "Fast-forward update failed: $?\n" if $?;
+ }
+ else {
+ system(qw(git-merge cvsimport HEAD), "refs/heads/$opt_o");
+ die "Could not merge $opt_o into the current branch.\n" if $?;
}
} else {
$orig_branch = "master";
print "DONE; creating $orig_branch branch\n" if $opt_v;
- system("cp","$git_dir/refs/heads/$opt_o","$git_dir/refs/heads/master")
+ system("git-update-ref", "refs/heads/master", "refs/heads/$opt_o")
unless -f "$git_dir/refs/heads/master";
system('git-update-ref', 'HEAD', "$orig_branch");
unless ($opt_i) {
@@ -903,4 +898,3 @@ if($orig_branch) {
die "checkout failed: $?\n" if $?;
}
}
-unlink("$git_dir/CVS2GIT_HEAD");
diff --git a/git-cvsserver.perl b/git-cvsserver.perl
new file mode 100755
index 0000000..7d3f78e
--- /dev/null
+++ b/git-cvsserver.perl
@@ -0,0 +1,2616 @@
+#!/usr/bin/perl
+
+####
+#### This application is a CVS emulation layer for git.
+#### It is intended for clients to connect over SSH.
+#### See the documentation for more details.
+####
+#### Copyright The Open University UK - 2006.
+####
+#### Authors: Martyn Smith <martyn@catalyst.net.nz>
+#### Martin Langhoff <martin@catalyst.net.nz>
+####
+####
+#### Released under the GNU Public License, version 2.
+####
+####
+
+use strict;
+use warnings;
+
+use Fcntl;
+use File::Temp qw/tempdir tempfile/;
+use File::Basename;
+
+my $log = GITCVS::log->new();
+my $cfg;
+
+my $DATE_LIST = {
+ Jan => "01",
+ Feb => "02",
+ Mar => "03",
+ Apr => "04",
+ May => "05",
+ Jun => "06",
+ Jul => "07",
+ Aug => "08",
+ Sep => "09",
+ Oct => "10",
+ Nov => "11",
+ Dec => "12",
+};
+
+# Enable autoflush for STDOUT (otherwise the whole thing falls apart)
+$| = 1;
+
+#### Definition and mappings of functions ####
+
+my $methods = {
+ 'Root' => \&req_Root,
+ 'Valid-responses' => \&req_Validresponses,
+ 'valid-requests' => \&req_validrequests,
+ 'Directory' => \&req_Directory,
+ 'Entry' => \&req_Entry,
+ 'Modified' => \&req_Modified,
+ 'Unchanged' => \&req_Unchanged,
+ 'Questionable' => \&req_Questionable,
+ 'Argument' => \&req_Argument,
+ 'Argumentx' => \&req_Argument,
+ 'expand-modules' => \&req_expandmodules,
+ 'add' => \&req_add,
+ 'remove' => \&req_remove,
+ 'co' => \&req_co,
+ 'update' => \&req_update,
+ 'ci' => \&req_ci,
+ 'diff' => \&req_diff,
+ 'log' => \&req_log,
+ 'rlog' => \&req_log,
+ 'tag' => \&req_CATCHALL,
+ 'status' => \&req_status,
+ 'admin' => \&req_CATCHALL,
+ 'history' => \&req_CATCHALL,
+ 'watchers' => \&req_CATCHALL,
+ 'editors' => \&req_CATCHALL,
+ 'annotate' => \&req_annotate,
+ 'Global_option' => \&req_Globaloption,
+ #'annotate' => \&req_CATCHALL,
+};
+
+##############################################
+
+
+# $state holds all the bits of information the clients sends us that could
+# potentially be useful when it comes to actually _doing_ something.
+my $state = {};
+$log->info("--------------- STARTING -----------------");
+
+my $TEMP_DIR = tempdir( CLEANUP => 1 );
+$log->debug("Temporary directory is '$TEMP_DIR'");
+
+# if we are called with a pserver argument,
+# deal with the authentication cat before entereing the
+# main loop
+if (@ARGV && $ARGV[0] eq 'pserver') {
+ my $line = <STDIN>; chomp $line;
+ unless( $line eq 'BEGIN AUTH REQUEST') {
+ die "E Do not understand $line - expecting BEGIN AUTH REQUEST\n";
+ }
+ $line = <STDIN>; chomp $line;
+ req_Root('root', $line) # reuse Root
+ or die "E Invalid root $line \n";
+ $line = <STDIN>; chomp $line;
+ unless ($line eq 'anonymous') {
+ print "E Only anonymous user allowed via pserver\n";
+ print "I HATE YOU\n";
+ }
+ $line = <STDIN>; chomp $line; # validate the password?
+ $line = <STDIN>; chomp $line;
+ unless ($line eq 'END AUTH REQUEST') {
+ die "E Do not understand $line -- expecting END AUTH REQUEST\n";
+ }
+ print "I LOVE YOU\n";
+ # and now back to our regular programme...
+}
+
+# Keep going until the client closes the connection
+while (<STDIN>)
+{
+ chomp;
+
+ # Check to see if we've seen this method, and call appropiate function.
+ if ( /^([\w-]+)(?:\s+(.*))?$/ and defined($methods->{$1}) )
+ {
+ # use the $methods hash to call the appropriate sub for this command
+ #$log->info("Method : $1");
+ &{$methods->{$1}}($1,$2);
+ } else {
+ # log fatal because we don't understand this function. If this happens
+ # we're fairly screwed because we don't know if the client is expecting
+ # a response. If it is, the client will hang, we'll hang, and the whole
+ # thing will be custard.
+ $log->fatal("Don't understand command $_\n");
+ die("Unknown command $_");
+ }
+}
+
+$log->debug("Processing time : user=" . (times)[0] . " system=" . (times)[1]);
+$log->info("--------------- FINISH -----------------");
+
+# Magic catchall method.
+# This is the method that will handle all commands we haven't yet
+# implemented. It simply sends a warning to the log file indicating a
+# command that hasn't been implemented has been invoked.
+sub req_CATCHALL
+{
+ my ( $cmd, $data ) = @_;
+ $log->warn("Unhandled command : req_$cmd : $data");
+}
+
+
+# Root pathname \n
+# Response expected: no. Tell the server which CVSROOT to use. Note that
+# pathname is a local directory and not a fully qualified CVSROOT variable.
+# pathname must already exist; if creating a new root, use the init
+# request, not Root. pathname does not include the hostname of the server,
+# how to access the server, etc.; by the time the CVS protocol is in use,
+# connection, authentication, etc., are already taken care of. The Root
+# request must be sent only once, and it must be sent before any requests
+# other than Valid-responses, valid-requests, UseUnchanged, Set or init.
+sub req_Root
+{
+ my ( $cmd, $data ) = @_;
+ $log->debug("req_Root : $data");
+
+ $state->{CVSROOT} = $data;
+
+ $ENV{GIT_DIR} = $state->{CVSROOT} . "/";
+ unless (-d $ENV{GIT_DIR} && -e $ENV{GIT_DIR}.'HEAD') {
+ print "E $ENV{GIT_DIR} does not seem to be a valid GIT repository\n";
+ print "E \n";
+ print "error 1 $ENV{GIT_DIR} is not a valid repository\n";
+ return 0;
+ }
+
+ my @gitvars = `git-var -l`;
+ if ($?) {
+ print "E problems executing git-var on the server -- this is not a git repository or the PATH is not set correcly.\n";
+ print "E \n";
+ print "error 1 - problem executing git-var\n";
+ return 0;
+ }
+ foreach my $line ( @gitvars )
+ {
+ next unless ( $line =~ /^(.*?)\.(.*?)=(.*)$/ );
+ $cfg->{$1}{$2} = $3;
+ }
+
+ unless ( defined ( $cfg->{gitcvs}{enabled} ) and $cfg->{gitcvs}{enabled} =~ /^\s*(1|true|yes)\s*$/i )
+ {
+ print "E GITCVS emulation needs to be enabled on this repo\n";
+ print "E the repo config file needs a [gitcvs] section added, and the parameter 'enabled' set to 1\n";
+ print "E \n";
+ print "error 1 GITCVS emulation disabled\n";
+ return 0;
+ }
+
+ if ( defined ( $cfg->{gitcvs}{logfile} ) )
+ {
+ $log->setfile($cfg->{gitcvs}{logfile});
+ } else {
+ $log->nofile();
+ }
+
+ return 1;
+}
+
+# Global_option option \n
+# Response expected: no. Transmit one of the global options `-q', `-Q',
+# `-l', `-t', `-r', or `-n'. option must be one of those strings, no
+# variations (such as combining of options) are allowed. For graceful
+# handling of valid-requests, it is probably better to make new global
+# options separate requests, rather than trying to add them to this
+# request.
+sub req_Globaloption
+{
+ my ( $cmd, $data ) = @_;
+ $log->debug("req_Globaloption : $data");
+
+ # TODO : is this data useful ???
+}
+
+# Valid-responses request-list \n
+# Response expected: no. Tell the server what responses the client will
+# accept. request-list is a space separated list of tokens.
+sub req_Validresponses
+{
+ my ( $cmd, $data ) = @_;
+ $log->debug("req_Validrepsonses : $data");
+
+ # TODO : re-enable this, currently it's not particularly useful
+ #$state->{validresponses} = [ split /\s+/, $data ];
+}
+
+# valid-requests \n
+# Response expected: yes. Ask the server to send back a Valid-requests
+# response.
+sub req_validrequests
+{
+ my ( $cmd, $data ) = @_;
+
+ $log->debug("req_validrequests");
+
+ $log->debug("SEND : Valid-requests " . join(" ",keys %$methods));
+ $log->debug("SEND : ok");
+
+ print "Valid-requests " . join(" ",keys %$methods) . "\n";
+ print "ok\n";
+}
+
+# Directory local-directory \n
+# Additional data: repository \n. Response expected: no. Tell the server
+# what directory to use. The repository should be a directory name from a
+# previous server response. Note that this both gives a default for Entry
+# and Modified and also for ci and the other commands; normal usage is to
+# send Directory for each directory in which there will be an Entry or
+# Modified, and then a final Directory for the original directory, then the
+# command. The local-directory is relative to the top level at which the
+# command is occurring (i.e. the last Directory which is sent before the
+# command); to indicate that top level, `.' should be sent for
+# local-directory.
+sub req_Directory
+{
+ my ( $cmd, $data ) = @_;
+
+ my $repository = <STDIN>;
+ chomp $repository;
+
+
+ $state->{localdir} = $data;
+ $state->{repository} = $repository;
+ $state->{directory} = $repository;
+ $state->{directory} =~ s/^$state->{CVSROOT}\///;
+ $state->{module} = $1 if ($state->{directory} =~ s/^(.*?)(\/|$)//);
+ $state->{directory} .= "/" if ( $state->{directory} =~ /\S/ );
+
+ $log->debug("req_Directory : localdir=$data repository=$repository directory=$state->{directory} module=$state->{module}");
+}
+
+# Entry entry-line \n
+# Response expected: no. Tell the server what version of a file is on the
+# local machine. The name in entry-line is a name relative to the directory
+# most recently specified with Directory. If the user is operating on only
+# some files in a directory, Entry requests for only those files need be
+# included. If an Entry request is sent without Modified, Is-modified, or
+# Unchanged, it means the file is lost (does not exist in the working
+# directory). If both Entry and one of Modified, Is-modified, or Unchanged
+# are sent for the same file, Entry must be sent first. For a given file,
+# one can send Modified, Is-modified, or Unchanged, but not more than one
+# of these three.
+sub req_Entry
+{
+ my ( $cmd, $data ) = @_;
+
+ $log->debug("req_Entry : $data");
+
+ my @data = split(/\//, $data);
+
+ $state->{entries}{$state->{directory}.$data[1]} = {
+ revision => $data[2],
+ conflict => $data[3],
+ options => $data[4],
+ tag_or_date => $data[5],
+ };
+}
+
+# add \n
+# Response expected: yes. Add a file or directory. This uses any previous
+# Argument, Directory, Entry, or Modified requests, if they have been sent.
+# The last Directory sent specifies the working directory at the time of
+# the operation. To add a directory, send the directory to be added using
+# Directory and Argument requests.
+sub req_add
+{
+ my ( $cmd, $data ) = @_;
+
+ argsplit("add");
+
+ my $addcount = 0;
+
+ foreach my $filename ( @{$state->{args}} )
+ {
+ $filename = filecleanup($filename);
+
+ unless ( defined ( $state->{entries}{$filename}{modified_filename} ) )
+ {
+ print "E cvs add: nothing known about `$filename'\n";
+ next;
+ }
+ # TODO : check we're not squashing an already existing file
+ if ( defined ( $state->{entries}{$filename}{revision} ) )
+ {
+ print "E cvs add: `$filename' has already been entered\n";
+ next;
+ }
+
+
+ my ( $filepart, $dirpart ) = filenamesplit($filename);
+
+ print "E cvs add: scheduling file `$filename' for addition\n";
+
+ print "Checked-in $dirpart\n";
+ print "$filename\n";
+ print "/$filepart/0///\n";
+
+ $addcount++;
+ }
+
+ if ( $addcount == 1 )
+ {
+ print "E cvs add: use `cvs commit' to add this file permanently\n";
+ }
+ elsif ( $addcount > 1 )
+ {
+ print "E cvs add: use `cvs commit' to add these files permanently\n";
+ }
+
+ print "ok\n";
+}
+
+# remove \n
+# Response expected: yes. Remove a file. This uses any previous Argument,
+# Directory, Entry, or Modified requests, if they have been sent. The last
+# Directory sent specifies the working directory at the time of the
+# operation. Note that this request does not actually do anything to the
+# repository; the only effect of a successful remove request is to supply
+# the client with a new entries line containing `-' to indicate a removed
+# file. In fact, the client probably could perform this operation without
+# contacting the server, although using remove may cause the server to
+# perform a few more checks. The client sends a subsequent ci request to
+# actually record the removal in the repository.
+sub req_remove
+{
+ my ( $cmd, $data ) = @_;
+
+ argsplit("remove");
+
+ # Grab a handle to the SQLite db and do any necessary updates
+ my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log);
+ $updater->update();
+
+ #$log->debug("add state : " . Dumper($state));
+
+ my $rmcount = 0;
+
+ foreach my $filename ( @{$state->{args}} )
+ {
+ $filename = filecleanup($filename);
+
+ if ( defined ( $state->{entries}{$filename}{unchanged} ) or defined ( $state->{entries}{$filename}{modified_filename} ) )
+ {
+ print "E cvs remove: file `$filename' still in working directory\n";
+ next;
+ }
+
+ my $meta = $updater->getmeta($filename);
+ my $wrev = revparse($filename);
+
+ unless ( defined ( $wrev ) )
+ {
+ print "E cvs remove: nothing known about `$filename'\n";
+ next;
+ }
+
+ if ( defined($wrev) and $wrev < 0 )
+ {
+ print "E cvs remove: file `$filename' already scheduled for removal\n";
+ next;
+ }
+
+ unless ( $wrev == $meta->{revision} )
+ {
+ # TODO : not sure if the format of this message is quite correct.
+ print "E cvs remove: Up to date check failed for `$filename'\n";
+ next;
+ }
+
+
+ my ( $filepart, $dirpart ) = filenamesplit($filename);
+
+ print "E cvs remove: scheduling `$filename' for removal\n";
+
+ print "Checked-in $dirpart\n";
+ print "$filename\n";
+ print "/$filepart/-1.$wrev///\n";
+
+ $rmcount++;
+ }
+
+ if ( $rmcount == 1 )
+ {
+ print "E cvs remove: use `cvs commit' to remove this file permanently\n";
+ }
+ elsif ( $rmcount > 1 )
+ {
+ print "E cvs remove: use `cvs commit' to remove these files permanently\n";
+ }
+
+ print "ok\n";
+}
+
+# Modified filename \n
+# Response expected: no. Additional data: mode, \n, file transmission. Send
+# the server a copy of one locally modified file. filename is a file within
+# the most recent directory sent with Directory; it must not contain `/'.
+# If the user is operating on only some files in a directory, only those
+# files need to be included. This can also be sent without Entry, if there
+# is no entry for the file.
+sub req_Modified
+{
+ my ( $cmd, $data ) = @_;
+
+ my $mode = <STDIN>;
+ chomp $mode;
+ my $size = <STDIN>;
+ chomp $size;
+
+ # Grab config information
+ my $blocksize = 8192;
+ my $bytesleft = $size;
+ my $tmp;
+
+ # Get a filehandle/name to write it to
+ my ( $fh, $filename ) = tempfile( DIR => $TEMP_DIR );
+
+ # Loop over file data writing out to temporary file.
+ while ( $bytesleft )
+ {
+ $blocksize = $bytesleft if ( $bytesleft < $blocksize );
+ read STDIN, $tmp, $blocksize;
+ print $fh $tmp;
+ $bytesleft -= $blocksize;
+ }
+
+ close $fh;
+
+ # Ensure we have something sensible for the file mode
+ if ( $mode =~ /u=(\w+)/ )
+ {
+ $mode = $1;
+ } else {
+ $mode = "rw";
+ }
+
+ # Save the file data in $state
+ $state->{entries}{$state->{directory}.$data}{modified_filename} = $filename;
+ $state->{entries}{$state->{directory}.$data}{modified_mode} = $mode;
+ $state->{entries}{$state->{directory}.$data}{modified_hash} = `git-hash-object $filename`;
+ $state->{entries}{$state->{directory}.$data}{modified_hash} =~ s/\s.*$//s;
+
+ #$log->debug("req_Modified : file=$data mode=$mode size=$size");
+}
+
+# Unchanged filename \n
+# Response expected: no. Tell the server that filename has not been
+# modified in the checked out directory. The filename is a file within the
+# most recent directory sent with Directory; it must not contain `/'.
+sub req_Unchanged
+{
+ my ( $cmd, $data ) = @_;
+
+ $state->{entries}{$state->{directory}.$data}{unchanged} = 1;
+
+ #$log->debug("req_Unchanged : $data");
+}
+
+# Questionable filename \n
+# Response expected: no. Additional data: no.
+# Tell the server to check whether filename should be ignored,
+# and if not, next time the server sends responses, send (in
+# a M response) `?' followed by the directory and filename.
+# filename must not contain `/'; it needs to be a file in the
+# directory named by the most recent Directory request.
+sub req_Questionable
+{
+ my ( $cmd, $data ) = @_;
+
+ $state->{entries}{$state->{directory}.$data}{questionable} = 1;
+
+ #$log->debug("req_Questionable : $data");
+}
+
+# Argument text \n
+# Response expected: no. Save argument for use in a subsequent command.
+# Arguments accumulate until an argument-using command is given, at which
+# point they are forgotten.
+# Argumentx text \n
+# Response expected: no. Append \n followed by text to the current argument
+# being saved.
+sub req_Argument
+{
+ my ( $cmd, $data ) = @_;
+
+ # TODO : Not quite sure how Argument and Argumentx differ, but I assume
+ # it's for multi-line arguments ... somehow ...
+
+ $log->debug("$cmd : $data");
+
+ push @{$state->{arguments}}, $data;
+}
+
+# expand-modules \n
+# Response expected: yes. Expand the modules which are specified in the
+# arguments. Returns the data in Module-expansion responses. Note that the
+# server can assume that this is checkout or export, not rtag or rdiff; the
+# latter do not access the working directory and thus have no need to
+# expand modules on the client side. Expand may not be the best word for
+# what this request does. It does not necessarily tell you all the files
+# contained in a module, for example. Basically it is a way of telling you
+# which working directories the server needs to know about in order to
+# handle a checkout of the specified modules. For example, suppose that the
+# server has a module defined by
+# aliasmodule -a 1dir
+# That is, one can check out aliasmodule and it will take 1dir in the
+# repository and check it out to 1dir in the working directory. Now suppose
+# the client already has this module checked out and is planning on using
+# the co request to update it. Without using expand-modules, the client
+# would have two bad choices: it could either send information about all
+# working directories under the current directory, which could be
+# unnecessarily slow, or it could be ignorant of the fact that aliasmodule
+# stands for 1dir, and neglect to send information for 1dir, which would
+# lead to incorrect operation. With expand-modules, the client would first
+# ask for the module to be expanded:
+sub req_expandmodules
+{
+ my ( $cmd, $data ) = @_;
+
+ argsplit();
+
+ $log->debug("req_expandmodules : " . ( defined($data) ? $data : "[NULL]" ) );
+
+ unless ( ref $state->{arguments} eq "ARRAY" )
+ {
+ print "ok\n";
+ return;
+ }
+
+ foreach my $module ( @{$state->{arguments}} )
+ {
+ $log->debug("SEND : Module-expansion $module");
+ print "Module-expansion $module\n";
+ }
+
+ print "ok\n";
+ statecleanup();
+}
+
+# co \n
+# Response expected: yes. Get files from the repository. This uses any
+# previous Argument, Directory, Entry, or Modified requests, if they have
+# been sent. Arguments to this command are module names; the client cannot
+# know what directories they correspond to except by (1) just sending the
+# co request, and then seeing what directory names the server sends back in
+# its responses, and (2) the expand-modules request.
+sub req_co
+{
+ my ( $cmd, $data ) = @_;
+
+ argsplit("co");
+
+ my $module = $state->{args}[0];
+ my $checkout_path = $module;
+
+ # use the user specified directory if we're given it
+ $checkout_path = $state->{opt}{d} if ( exists ( $state->{opt}{d} ) );
+
+ $log->debug("req_co : " . ( defined($data) ? $data : "[NULL]" ) );
+
+ $log->info("Checking out module '$module' ($state->{CVSROOT}) to '$checkout_path'");
+
+ $ENV{GIT_DIR} = $state->{CVSROOT} . "/";
+
+ # Grab a handle to the SQLite db and do any necessary updates
+ my $updater = GITCVS::updater->new($state->{CVSROOT}, $module, $log);
+ $updater->update();
+
+ $checkout_path =~ s|/$||; # get rid of trailing slashes
+
+ # Eclipse seems to need the Clear-sticky command
+ # to prepare the 'Entries' file for the new directory.
+ print "Clear-sticky $checkout_path/\n";
+ print $state->{CVSROOT} . "/$module/\n";
+ print "Clear-static-directory $checkout_path/\n";
+ print $state->{CVSROOT} . "/$module/\n";
+ print "Clear-sticky $checkout_path/\n"; # yes, twice
+ print $state->{CVSROOT} . "/$module/\n";
+ print "Template $checkout_path/\n";
+ print $state->{CVSROOT} . "/$module/\n";
+ print "0\n";
+
+ # instruct the client that we're checking out to $checkout_path
+ print "E cvs checkout: Updating $checkout_path\n";
+
+ my %seendirs = ();
+ my $lastdir ='';
+
+ # recursive
+ sub prepdir {
+ my ($dir, $repodir, $remotedir, $seendirs) = @_;
+ my $parent = dirname($dir);
+ $dir =~ s|/+$||;
+ $repodir =~ s|/+$||;
+ $remotedir =~ s|/+$||;
+ $parent =~ s|/+$||;
+ $log->debug("announcedir $dir, $repodir, $remotedir" );
+
+ if ($parent eq '.' || $parent eq './') {
+ $parent = '';
+ }
+ # recurse to announce unseen parents first
+ if (length($parent) && !exists($seendirs->{$parent})) {
+ prepdir($parent, $repodir, $remotedir, $seendirs);
+ }
+ # Announce that we are going to modify at the parent level
+ if ($parent) {
+ print "E cvs checkout: Updating $remotedir/$parent\n";
+ } else {
+ print "E cvs checkout: Updating $remotedir\n";
+ }
+ print "Clear-sticky $remotedir/$parent/\n";
+ print "$repodir/$parent/\n";
+
+ print "Clear-static-directory $remotedir/$dir/\n";
+ print "$repodir/$dir/\n";
+ print "Clear-sticky $remotedir/$parent/\n"; # yes, twice
+ print "$repodir/$parent/\n";
+ print "Template $remotedir/$dir/\n";
+ print "$repodir/$dir/\n";
+ print "0\n";
+
+ $seendirs->{$dir} = 1;
+ }
+
+ foreach my $git ( @{$updater->gethead} )
+ {
+ # Don't want to check out deleted files
+ next if ( $git->{filehash} eq "deleted" );
+
+ ( $git->{name}, $git->{dir} ) = filenamesplit($git->{name});
+
+ if (length($git->{dir}) && $git->{dir} ne './'
+ && $git->{dir} ne $lastdir ) {
+ unless (exists($seendirs{$git->{dir}})) {
+ prepdir($git->{dir}, $state->{CVSROOT} . "/$module/",
+ $checkout_path, \%seendirs);
+ $lastdir = $git->{dir};
+ $seendirs{$git->{dir}} = 1;
+ }
+ print "E cvs checkout: Updating /$checkout_path/$git->{dir}\n";
+ }
+
+ # modification time of this file
+ print "Mod-time $git->{modified}\n";
+
+ # print some information to the client
+ if ( defined ( $git->{dir} ) and $git->{dir} ne "./" )
+ {
+ print "M U $checkout_path/$git->{dir}$git->{name}\n";
+ } else {
+ print "M U $checkout_path/$git->{name}\n";
+ }
+
+ # instruct client we're sending a file to put in this path
+ print "Created $checkout_path/" . ( defined ( $git->{dir} ) and $git->{dir} ne "./" ? $git->{dir} . "/" : "" ) . "\n";
+
+ print $state->{CVSROOT} . "/$module/" . ( defined ( $git->{dir} ) and $git->{dir} ne "./" ? $git->{dir} . "/" : "" ) . "$git->{name}\n";
+
+ # this is an "entries" line
+ print "/$git->{name}/1.$git->{revision}///\n";
+ # permissions
+ print "u=$git->{mode},g=$git->{mode},o=$git->{mode}\n";
+
+ # transmit file
+ transmitfile($git->{filehash});
+ }
+
+ print "ok\n";
+
+ statecleanup();
+}
+
+# update \n
+# Response expected: yes. Actually do a cvs update command. This uses any
+# previous Argument, Directory, Entry, or Modified requests, if they have
+# been sent. The last Directory sent specifies the working directory at the
+# time of the operation. The -I option is not used--files which the client
+# can decide whether to ignore are not mentioned and the client sends the
+# Questionable request for others.
+sub req_update
+{
+ my ( $cmd, $data ) = @_;
+
+ $log->debug("req_update : " . ( defined($data) ? $data : "[NULL]" ));
+
+ argsplit("update");
+
+ #
+ # It may just be a client exploring the available heads/modukles
+ # in that case, list them as top level directories and leave it
+ # at that. Eclipse uses this technique to offer you a list of
+ # projects (heads in this case) to checkout.
+ #
+ if ($state->{module} eq '') {
+ print "E cvs update: Updating .\n";
+ opendir HEADS, $state->{CVSROOT} . '/refs/heads';
+ while (my $head = readdir(HEADS)) {
+ if (-f $state->{CVSROOT} . '/refs/heads/' . $head) {
+ print "E cvs update: New directory `$head'\n";
+ }
+ }
+ closedir HEADS;
+ print "ok\n";
+ return 1;
+ }
+
+
+ # Grab a handle to the SQLite db and do any necessary updates
+ my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log);
+
+ $updater->update();
+
+ # if no files were specified, we need to work out what files we should be providing status on ...
+ argsfromdir($updater) if ( scalar ( @{$state->{args}} ) == 0 );
+
+ #$log->debug("update state : " . Dumper($state));
+
+ # foreach file specified on the commandline ...
+ foreach my $filename ( @{$state->{args}} )
+ {
+ $filename = filecleanup($filename);
+
+ # if we have a -C we should pretend we never saw modified stuff
+ if ( exists ( $state->{opt}{C} ) )
+ {
+ delete $state->{entries}{$filename}{modified_hash};
+ delete $state->{entries}{$filename}{modified_filename};
+ $state->{entries}{$filename}{unchanged} = 1;
+ }
+
+ my $meta;
+ if ( defined($state->{opt}{r}) and $state->{opt}{r} =~ /^1\.(\d+)/ )
+ {
+ $meta = $updater->getmeta($filename, $1);
+ } else {
+ $meta = $updater->getmeta($filename);
+ }
+
+ next unless ( $meta->{revision} );
+
+ my $oldmeta = $meta;
+
+ my $wrev = revparse($filename);
+
+ # If the working copy is an old revision, lets get that version too for comparison.
+ if ( defined($wrev) and $wrev != $meta->{revision} )
+ {
+ $oldmeta = $updater->getmeta($filename, $wrev);
+ }
+
+ #$log->debug("Target revision is $meta->{revision}, current working revision is $wrev");
+
+ # Files are up to date if the working copy and repo copy have the same revision,
+ # and the working copy is unmodified _and_ the user hasn't specified -C
+ next if ( defined ( $wrev )
+ and defined($meta->{revision})
+ and $wrev == $meta->{revision}
+ and $state->{entries}{$filename}{unchanged}
+ and not exists ( $state->{opt}{C} ) );
+
+ # If the working copy and repo copy have the same revision,
+ # but the working copy is modified, tell the client it's modified
+ if ( defined ( $wrev )
+ and defined($meta->{revision})
+ and $wrev == $meta->{revision}
+ and not exists ( $state->{opt}{C} ) )
+ {
+ $log->info("Tell the client the file is modified");
+ print "MT text U\n";
+ print "MT fname $filename\n";
+ print "MT newline\n";
+ next;
+ }
+
+ if ( $meta->{filehash} eq "deleted" )
+ {
+ my ( $filepart, $dirpart ) = filenamesplit($filename);
+
+ $log->info("Removing '$filename' from working copy (no longer in the repo)");
+
+ print "E cvs update: `$filename' is no longer in the repository\n";
+ print "Removed $dirpart\n";
+ print "$filepart\n";
+ }
+ elsif ( not defined ( $state->{entries}{$filename}{modified_hash} )
+ or $state->{entries}{$filename}{modified_hash} eq $oldmeta->{filehash} )
+ {
+ $log->info("Updating '$filename'");
+ # normal update, just send the new revision (either U=Update, or A=Add, or R=Remove)
+ print "MT +updated\n";
+ print "MT text U\n";
+ print "MT fname $filename\n";
+ print "MT newline\n";
+ print "MT -updated\n";
+
+ my ( $filepart, $dirpart ) = filenamesplit($filename);
+ $dirpart =~ s/^$state->{directory}//;
+
+ if ( defined ( $wrev ) )
+ {
+ # instruct client we're sending a file to put in this path as a replacement
+ print "Update-existing $dirpart\n";
+ $log->debug("Updating existing file 'Update-existing $dirpart'");
+ } else {
+ # instruct client we're sending a file to put in this path as a new file
+ print "Created $dirpart\n";
+ $log->debug("Creating new file 'Created $dirpart'");
+ }
+ print $state->{CVSROOT} . "/$state->{module}/$filename\n";
+
+ # this is an "entries" line
+ $log->debug("/$filepart/1.$meta->{revision}///");
+ print "/$filepart/1.$meta->{revision}///\n";
+
+ # permissions
+ $log->debug("SEND : u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}");
+ print "u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}\n";
+
+ # transmit file
+ transmitfile($meta->{filehash});
+ } else {
+ $log->info("Updating '$filename'");
+ my ( $filepart, $dirpart ) = filenamesplit($meta->{name});
+
+ my $dir = tempdir( DIR => $TEMP_DIR, CLEANUP => 1 ) . "/";
+
+ chdir $dir;
+ my $file_local = $filepart . ".mine";
+ system("ln","-s",$state->{entries}{$filename}{modified_filename}, $file_local);
+ my $file_old = $filepart . "." . $oldmeta->{revision};
+ transmitfile($oldmeta->{filehash}, $file_old);
+ my $file_new = $filepart . "." . $meta->{revision};
+ transmitfile($meta->{filehash}, $file_new);
+
+ # we need to merge with the local changes ( M=successful merge, C=conflict merge )
+ $log->info("Merging $file_local, $file_old, $file_new");
+
+ $log->debug("Temporary directory for merge is $dir");
+
+ my $return = system("merge", $file_local, $file_old, $file_new);
+ $return >>= 8;
+
+ if ( $return == 0 )
+ {
+ $log->info("Merged successfully");
+ print "M M $filename\n";
+ $log->debug("Update-existing $dirpart");
+ print "Update-existing $dirpart\n";
+ $log->debug($state->{CVSROOT} . "/$state->{module}/$filename");
+ print $state->{CVSROOT} . "/$state->{module}/$filename\n";
+ $log->debug("/$filepart/1.$meta->{revision}///");
+ print "/$filepart/1.$meta->{revision}///\n";
+ }
+ elsif ( $return == 1 )
+ {
+ $log->info("Merged with conflicts");
+ print "M C $filename\n";
+ print "Update-existing $dirpart\n";
+ print $state->{CVSROOT} . "/$state->{module}/$filename\n";
+ print "/$filepart/1.$meta->{revision}/+//\n";
+ }
+ else
+ {
+ $log->warn("Merge failed");
+ next;
+ }
+
+ # permissions
+ $log->debug("SEND : u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}");
+ print "u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}\n";
+
+ # transmit file, format is single integer on a line by itself (file
+ # size) followed by the file contents
+ # TODO : we should copy files in blocks
+ my $data = `cat $file_local`;
+ $log->debug("File size : " . length($data));
+ print length($data) . "\n";
+ print $data;
+
+ chdir "/";
+ }
+
+ }
+
+ print "ok\n";
+}
+
+sub req_ci
+{
+ my ( $cmd, $data ) = @_;
+
+ argsplit("ci");
+
+ #$log->debug("State : " . Dumper($state));
+
+ $log->info("req_ci : " . ( defined($data) ? $data : "[NULL]" ));
+
+ if ( @ARGV && $ARGV[0] eq 'pserver')
+ {
+ print "error 1 pserver access cannot commit\n";
+ exit;
+ }
+
+ if ( -e $state->{CVSROOT} . "/index" )
+ {
+ print "error 1 Index already exists in git repo\n";
+ exit;
+ }
+
+ my $lockfile = "$state->{CVSROOT}/refs/heads/$state->{module}.lock";
+ unless ( sysopen(LOCKFILE,$lockfile,O_EXCL|O_CREAT|O_WRONLY) )
+ {
+ print "error 1 Lock file '$lockfile' already exists, please try again\n";
+ exit;
+ }
+
+ # Grab a handle to the SQLite db and do any necessary updates
+ my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log);
+ $updater->update();
+
+ my $tmpdir = tempdir ( DIR => $TEMP_DIR );
+ my ( undef, $file_index ) = tempfile ( DIR => $TEMP_DIR, OPEN => 0 );
+ $log->info("Lock successful, basing commit on '$tmpdir', index file is '$file_index'");
+
+ $ENV{GIT_DIR} = $state->{CVSROOT} . "/";
+ $ENV{GIT_INDEX_FILE} = $file_index;
+
+ chdir $tmpdir;
+
+ # populate the temporary index based
+ system("git-read-tree", $state->{module});
+ unless ($? == 0)
+ {
+ die "Error running git-read-tree $state->{module} $file_index $!";
+ }
+ $log->info("Created index '$file_index' with for head $state->{module} - exit status $?");
+
+
+ my @committedfiles = ();
+
+ # foreach file specified on the commandline ...
+ foreach my $filename ( @{$state->{args}} )
+ {
+ $filename = filecleanup($filename);
+
+ next unless ( exists $state->{entries}{$filename}{modified_filename} or not $state->{entries}{$filename}{unchanged} );
+
+ my $meta = $updater->getmeta($filename);
+
+ my $wrev = revparse($filename);
+
+ my ( $filepart, $dirpart ) = filenamesplit($filename);
+
+ # do a checkout of the file if it part of this tree
+ if ($wrev) {
+ system('git-checkout-index', '-f', '-u', $filename);
+ unless ($? == 0) {
+ die "Error running git-checkout-index -f -u $filename : $!";
+ }
+ }
+
+ my $addflag = 0;
+ my $rmflag = 0;
+ $rmflag = 1 if ( defined($wrev) and $wrev < 0 );
+ $addflag = 1 unless ( -e $filename );
+
+ # Do up to date checking
+ unless ( $addflag or $wrev == $meta->{revision} or ( $rmflag and -$wrev == $meta->{revision} ) )
+ {
+ # fail everything if an up to date check fails
+ print "error 1 Up to date check failed for $filename\n";
+ close LOCKFILE;
+ unlink($lockfile);
+ chdir "/";
+ exit;
+ }
+
+ push @committedfiles, $filename;
+ $log->info("Committing $filename");
+
+ system("mkdir","-p",$dirpart) unless ( -d $dirpart );
+
+ unless ( $rmflag )
+ {
+ $log->debug("rename $state->{entries}{$filename}{modified_filename} $filename");
+ rename $state->{entries}{$filename}{modified_filename},$filename;
+
+ # Calculate modes to remove
+ my $invmode = "";
+ foreach ( qw (r w x) ) { $invmode .= $_ unless ( $state->{entries}{$filename}{modified_mode} =~ /$_/ ); }
+
+ $log->debug("chmod u+" . $state->{entries}{$filename}{modified_mode} . "-" . $invmode . " $filename");
+ system("chmod","u+" . $state->{entries}{$filename}{modified_mode} . "-" . $invmode, $filename);
+ }
+
+ if ( $rmflag )
+ {
+ $log->info("Removing file '$filename'");
+ unlink($filename);
+ system("git-update-index", "--remove", $filename);
+ }
+ elsif ( $addflag )
+ {
+ $log->info("Adding file '$filename'");
+ system("git-update-index", "--add", $filename);
+ } else {
+ $log->info("Updating file '$filename'");
+ system("git-update-index", $filename);
+ }
+ }
+
+ unless ( scalar(@committedfiles) > 0 )
+ {
+ print "E No files to commit\n";
+ print "ok\n";
+ close LOCKFILE;
+ unlink($lockfile);
+ chdir "/";
+ return;
+ }
+
+ my $treehash = `git-write-tree`;
+ my $parenthash = `cat $ENV{GIT_DIR}refs/heads/$state->{module}`;
+ chomp $treehash;
+ chomp $parenthash;
+
+ $log->debug("Treehash : $treehash, Parenthash : $parenthash");
+
+ # write our commit message out if we have one ...
+ my ( $msg_fh, $msg_filename ) = tempfile( DIR => $TEMP_DIR );
+ print $msg_fh $state->{opt}{m};# if ( exists ( $state->{opt}{m} ) );
+ print $msg_fh "\n\nvia git-CVS emulator\n";
+ close $msg_fh;
+
+ my $commithash = `git-commit-tree $treehash -p $parenthash < $msg_filename`;
+ $log->info("Commit hash : $commithash");
+
+ unless ( $commithash =~ /[a-zA-Z0-9]{40}/ )
+ {
+ $log->warn("Commit failed (Invalid commit hash)");
+ print "error 1 Commit failed (unknown reason)\n";
+ close LOCKFILE;
+ unlink($lockfile);
+ chdir "/";
+ exit;
+ }
+
+ open FILE, ">", "$ENV{GIT_DIR}refs/heads/$state->{module}";
+ print FILE $commithash;
+ close FILE;
+
+ $updater->update();
+
+ # foreach file specified on the commandline ...
+ foreach my $filename ( @committedfiles )
+ {
+ $filename = filecleanup($filename);
+
+ my $meta = $updater->getmeta($filename);
+
+ my ( $filepart, $dirpart ) = filenamesplit($filename);
+
+ $log->debug("Checked-in $dirpart : $filename");
+
+ if ( $meta->{filehash} eq "deleted" )
+ {
+ print "Remove-entry $dirpart\n";
+ print "$filename\n";
+ } else {
+ print "Checked-in $dirpart\n";
+ print "$filename\n";
+ print "/$filepart/1.$meta->{revision}///\n";
+ }
+ }
+
+ close LOCKFILE;
+ unlink($lockfile);
+ chdir "/";
+
+ print "ok\n";
+}
+
+sub req_status
+{
+ my ( $cmd, $data ) = @_;
+
+ argsplit("status");
+
+ $log->info("req_status : " . ( defined($data) ? $data : "[NULL]" ));
+ #$log->debug("status state : " . Dumper($state));
+
+ # Grab a handle to the SQLite db and do any necessary updates
+ my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log);
+ $updater->update();
+
+ # if no files were specified, we need to work out what files we should be providing status on ...
+ argsfromdir($updater) if ( scalar ( @{$state->{args}} ) == 0 );
+
+ # foreach file specified on the commandline ...
+ foreach my $filename ( @{$state->{args}} )
+ {
+ $filename = filecleanup($filename);
+
+ my $meta = $updater->getmeta($filename);
+ my $oldmeta = $meta;
+
+ my $wrev = revparse($filename);
+
+ # If the working copy is an old revision, lets get that version too for comparison.
+ if ( defined($wrev) and $wrev != $meta->{revision} )
+ {
+ $oldmeta = $updater->getmeta($filename, $wrev);
+ }
+
+ # TODO : All possible statuses aren't yet implemented
+ my $status;
+ # Files are up to date if the working copy and repo copy have the same revision, and the working copy is unmodified
+ $status = "Up-to-date" if ( defined ( $wrev ) and defined($meta->{revision}) and $wrev == $meta->{revision}
+ and
+ ( ( $state->{entries}{$filename}{unchanged} and ( not defined ( $state->{entries}{$filename}{conflict} ) or $state->{entries}{$filename}{conflict} !~ /^\+=/ ) )
+ or ( defined($state->{entries}{$filename}{modified_hash}) and $state->{entries}{$filename}{modified_hash} eq $meta->{filehash} ) )
+ );
+
+ # Need checkout if the working copy has an older revision than the repo copy, and the working copy is unmodified
+ $status ||= "Needs Checkout" if ( defined ( $wrev ) and defined ( $meta->{revision} ) and $meta->{revision} > $wrev
+ and
+ ( $state->{entries}{$filename}{unchanged}
+ or ( defined($state->{entries}{$filename}{modified_hash}) and $state->{entries}{$filename}{modified_hash} eq $oldmeta->{filehash} ) )
+ );
+
+ # Need checkout if it exists in the repo but doesn't have a working copy
+ $status ||= "Needs Checkout" if ( not defined ( $wrev ) and defined ( $meta->{revision} ) );
+
+ # Locally modified if working copy and repo copy have the same revision but there are local changes
+ $status ||= "Locally Modified" if ( defined ( $wrev ) and defined($meta->{revision}) and $wrev == $meta->{revision} and $state->{entries}{$filename}{modified_filename} );
+
+ # Needs Merge if working copy revision is less than repo copy and there are local changes
+ $status ||= "Needs Merge" if ( defined ( $wrev ) and defined ( $meta->{revision} ) and $meta->{revision} > $wrev and $state->{entries}{$filename}{modified_filename} );
+
+ $status ||= "Locally Added" if ( defined ( $state->{entries}{$filename}{revision} ) and not defined ( $meta->{revision} ) );
+ $status ||= "Locally Removed" if ( defined ( $wrev ) and defined ( $meta->{revision} ) and -$wrev == $meta->{revision} );
+ $status ||= "Unresolved Conflict" if ( defined ( $state->{entries}{$filename}{conflict} ) and $state->{entries}{$filename}{conflict} =~ /^\+=/ );
+ $status ||= "File had conflicts on merge" if ( 0 );
+
+ $status ||= "Unknown";
+
+ print "M ===================================================================\n";
+ print "M File: $filename\tStatus: $status\n";
+ if ( defined($state->{entries}{$filename}{revision}) )
+ {
+ print "M Working revision:\t" . $state->{entries}{$filename}{revision} . "\n";
+ } else {
+ print "M Working revision:\tNo entry for $filename\n";
+ }
+ if ( defined($meta->{revision}) )
+ {
+ print "M Repository revision:\t1." . $meta->{revision} . "\t$state->{repository}/$filename,v\n";
+ print "M Sticky Tag:\t\t(none)\n";
+ print "M Sticky Date:\t\t(none)\n";
+ print "M Sticky Options:\t\t(none)\n";
+ } else {
+ print "M Repository revision:\tNo revision control file\n";
+ }
+ print "M\n";
+ }
+
+ print "ok\n";
+}
+
+sub req_diff
+{
+ my ( $cmd, $data ) = @_;
+
+ argsplit("diff");
+
+ $log->debug("req_diff : " . ( defined($data) ? $data : "[NULL]" ));
+ #$log->debug("status state : " . Dumper($state));
+
+ my ($revision1, $revision2);
+ if ( defined ( $state->{opt}{r} ) and ref $state->{opt}{r} eq "ARRAY" )
+ {
+ $revision1 = $state->{opt}{r}[0];
+ $revision2 = $state->{opt}{r}[1];
+ } else {
+ $revision1 = $state->{opt}{r};
+ }
+
+ $revision1 =~ s/^1\.// if ( defined ( $revision1 ) );
+ $revision2 =~ s/^1\.// if ( defined ( $revision2 ) );
+
+ $log->debug("Diffing revisions " . ( defined($revision1) ? $revision1 : "[NULL]" ) . " and " . ( defined($revision2) ? $revision2 : "[NULL]" ) );
+
+ # Grab a handle to the SQLite db and do any necessary updates
+ my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log);
+ $updater->update();
+
+ # if no files were specified, we need to work out what files we should be providing status on ...
+ argsfromdir($updater) if ( scalar ( @{$state->{args}} ) == 0 );
+
+ # foreach file specified on the commandline ...
+ foreach my $filename ( @{$state->{args}} )
+ {
+ $filename = filecleanup($filename);
+
+ my ( $fh, $file1, $file2, $meta1, $meta2, $filediff );
+
+ my $wrev = revparse($filename);
+
+ # We need _something_ to diff against
+ next unless ( defined ( $wrev ) );
+
+ # if we have a -r switch, use it
+ if ( defined ( $revision1 ) )
+ {
+ ( undef, $file1 ) = tempfile( DIR => $TEMP_DIR, OPEN => 0 );
+ $meta1 = $updater->getmeta($filename, $revision1);
+ unless ( defined ( $meta1 ) and $meta1->{filehash} ne "deleted" )
+ {
+ print "E File $filename at revision 1.$revision1 doesn't exist\n";
+ next;
+ }
+ transmitfile($meta1->{filehash}, $file1);
+ }
+ # otherwise we just use the working copy revision
+ else
+ {
+ ( undef, $file1 ) = tempfile( DIR => $TEMP_DIR, OPEN => 0 );
+ $meta1 = $updater->getmeta($filename, $wrev);
+ transmitfile($meta1->{filehash}, $file1);
+ }
+
+ # if we have a second -r switch, use it too
+ if ( defined ( $revision2 ) )
+ {
+ ( undef, $file2 ) = tempfile( DIR => $TEMP_DIR, OPEN => 0 );
+ $meta2 = $updater->getmeta($filename, $revision2);
+
+ unless ( defined ( $meta2 ) and $meta2->{filehash} ne "deleted" )
+ {
+ print "E File $filename at revision 1.$revision2 doesn't exist\n";
+ next;
+ }
+
+ transmitfile($meta2->{filehash}, $file2);
+ }
+ # otherwise we just use the working copy
+ else
+ {
+ $file2 = $state->{entries}{$filename}{modified_filename};
+ }
+
+ # if we have been given -r, and we don't have a $file2 yet, lets get one
+ if ( defined ( $revision1 ) and not defined ( $file2 ) )
+ {
+ ( undef, $file2 ) = tempfile( DIR => $TEMP_DIR, OPEN => 0 );
+ $meta2 = $updater->getmeta($filename, $wrev);
+ transmitfile($meta2->{filehash}, $file2);
+ }
+
+ # We need to have retrieved something useful
+ next unless ( defined ( $meta1 ) );
+
+ # Files to date if the working copy and repo copy have the same revision, and the working copy is unmodified
+ next if ( not defined ( $meta2 ) and $wrev == $meta1->{revision}
+ and
+ ( ( $state->{entries}{$filename}{unchanged} and ( not defined ( $state->{entries}{$filename}{conflict} ) or $state->{entries}{$filename}{conflict} !~ /^\+=/ ) )
+ or ( defined($state->{entries}{$filename}{modified_hash}) and $state->{entries}{$filename}{modified_hash} eq $meta1->{filehash} ) )
+ );
+
+ # Apparently we only show diffs for locally modified files
+ next unless ( defined($meta2) or defined ( $state->{entries}{$filename}{modified_filename} ) );
+
+ print "M Index: $filename\n";
+ print "M ===================================================================\n";
+ print "M RCS file: $state->{CVSROOT}/$state->{module}/$filename,v\n";
+ print "M retrieving revision 1.$meta1->{revision}\n" if ( defined ( $meta1 ) );
+ print "M retrieving revision 1.$meta2->{revision}\n" if ( defined ( $meta2 ) );
+ print "M diff ";
+ foreach my $opt ( keys %{$state->{opt}} )
+ {
+ if ( ref $state->{opt}{$opt} eq "ARRAY" )
+ {
+ foreach my $value ( @{$state->{opt}{$opt}} )
+ {
+ print "-$opt $value ";
+ }
+ } else {
+ print "-$opt ";
+ print "$state->{opt}{$opt} " if ( defined ( $state->{opt}{$opt} ) );
+ }
+ }
+ print "$filename\n";
+
+ $log->info("Diffing $filename -r $meta1->{revision} -r " . ( $meta2->{revision} or "workingcopy" ));
+
+ ( $fh, $filediff ) = tempfile ( DIR => $TEMP_DIR );
+
+ if ( exists $state->{opt}{u} )
+ {
+ system("diff -u -L '$filename revision 1.$meta1->{revision}' -L '$filename " . ( defined($meta2->{revision}) ? "revision 1.$meta2->{revision}" : "working copy" ) . "' $file1 $file2 > $filediff");
+ } else {
+ system("diff $file1 $file2 > $filediff");
+ }
+
+ while ( <$fh> )
+ {
+ print "M $_";
+ }
+ close $fh;
+ }
+
+ print "ok\n";
+}
+
+sub req_log
+{
+ my ( $cmd, $data ) = @_;
+
+ argsplit("log");
+
+ $log->debug("req_log : " . ( defined($data) ? $data : "[NULL]" ));
+ #$log->debug("log state : " . Dumper($state));
+
+ my ( $minrev, $maxrev );
+ if ( defined ( $state->{opt}{r} ) and $state->{opt}{r} =~ /([\d.]+)?(::?)([\d.]+)?/ )
+ {
+ my $control = $2;
+ $minrev = $1;
+ $maxrev = $3;
+ $minrev =~ s/^1\.// if ( defined ( $minrev ) );
+ $maxrev =~ s/^1\.// if ( defined ( $maxrev ) );
+ $minrev++ if ( defined($minrev) and $control eq "::" );
+ }
+
+ # Grab a handle to the SQLite db and do any necessary updates
+ my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log);
+ $updater->update();
+
+ # if no files were specified, we need to work out what files we should be providing status on ...
+ argsfromdir($updater) if ( scalar ( @{$state->{args}} ) == 0 );
+
+ # foreach file specified on the commandline ...
+ foreach my $filename ( @{$state->{args}} )
+ {
+ $filename = filecleanup($filename);
+
+ my $headmeta = $updater->getmeta($filename);
+
+ my $revisions = $updater->getlog($filename);
+ my $totalrevisions = scalar(@$revisions);
+
+ if ( defined ( $minrev ) )
+ {
+ $log->debug("Removing revisions less than $minrev");
+ while ( scalar(@$revisions) > 0 and $revisions->[-1]{revision} < $minrev )
+ {
+ pop @$revisions;
+ }
+ }
+ if ( defined ( $maxrev ) )
+ {
+ $log->debug("Removing revisions greater than $maxrev");
+ while ( scalar(@$revisions) > 0 and $revisions->[0]{revision} > $maxrev )
+ {
+ shift @$revisions;
+ }
+ }
+
+ next unless ( scalar(@$revisions) );
+
+ print "M \n";
+ print "M RCS file: $state->{CVSROOT}/$state->{module}/$filename,v\n";
+ print "M Working file: $filename\n";
+ print "M head: 1.$headmeta->{revision}\n";
+ print "M branch:\n";
+ print "M locks: strict\n";
+ print "M access list:\n";
+ print "M symbolic names:\n";
+ print "M keyword substitution: kv\n";
+ print "M total revisions: $totalrevisions;\tselected revisions: " . scalar(@$revisions) . "\n";
+ print "M description:\n";
+
+ foreach my $revision ( @$revisions )
+ {
+ print "M ----------------------------\n";
+ print "M revision 1.$revision->{revision}\n";
+ # reformat the date for log output
+ $revision->{modified} = sprintf('%04d/%02d/%02d %s', $3, $DATE_LIST->{$2}, $1, $4 ) if ( $revision->{modified} =~ /(\d+)\s+(\w+)\s+(\d+)\s+(\S+)/ and defined($DATE_LIST->{$2}) );
+ $revision->{author} =~ s/\s+.*//;
+ $revision->{author} =~ s/^(.{8}).*/$1/;
+ print "M date: $revision->{modified}; author: $revision->{author}; state: " . ( $revision->{filehash} eq "deleted" ? "dead" : "Exp" ) . "; lines: +2 -3\n";
+ my $commitmessage = $updater->commitmessage($revision->{commithash});
+ $commitmessage =~ s/^/M /mg;
+ print $commitmessage . "\n";
+ }
+ print "M =============================================================================\n";
+ }
+
+ print "ok\n";
+}
+
+sub req_annotate
+{
+ my ( $cmd, $data ) = @_;
+
+ argsplit("annotate");
+
+ $log->info("req_annotate : " . ( defined($data) ? $data : "[NULL]" ));
+ #$log->debug("status state : " . Dumper($state));
+
+ # Grab a handle to the SQLite db and do any necessary updates
+ my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log);
+ $updater->update();
+
+ # if no files were specified, we need to work out what files we should be providing annotate on ...
+ argsfromdir($updater) if ( scalar ( @{$state->{args}} ) == 0 );
+
+ # we'll need a temporary checkout dir
+ my $tmpdir = tempdir ( DIR => $TEMP_DIR );
+ my ( undef, $file_index ) = tempfile ( DIR => $TEMP_DIR, OPEN => 0 );
+ $log->info("Temp checkoutdir creation successful, basing annotate session work on '$tmpdir', index file is '$file_index'");
+
+ $ENV{GIT_DIR} = $state->{CVSROOT} . "/";
+ $ENV{GIT_INDEX_FILE} = $file_index;
+
+ chdir $tmpdir;
+
+ # foreach file specified on the commandline ...
+ foreach my $filename ( @{$state->{args}} )
+ {
+ $filename = filecleanup($filename);
+
+ my $meta = $updater->getmeta($filename);
+
+ next unless ( $meta->{revision} );
+
+ # get all the commits that this file was in
+ # in dense format -- aka skip dead revisions
+ my $revisions = $updater->gethistorydense($filename);
+ my $lastseenin = $revisions->[0][2];
+
+ # populate the temporary index based on the latest commit were we saw
+ # the file -- but do it cheaply without checking out any files
+ # TODO: if we got a revision from the client, use that instead
+ # to look up the commithash in sqlite (still good to default to
+ # the current head as we do now)
+ system("git-read-tree", $lastseenin);
+ unless ($? == 0)
+ {
+ die "Error running git-read-tree $lastseenin $file_index $!";
+ }
+ $log->info("Created index '$file_index' with commit $lastseenin - exit status $?");
+
+ # do a checkout of the file
+ system('git-checkout-index', '-f', '-u', $filename);
+ unless ($? == 0) {
+ die "Error running git-checkout-index -f -u $filename : $!";
+ }
+
+ $log->info("Annotate $filename");
+
+ # Prepare a file with the commits from the linearized
+ # history that annotate should know about. This prevents
+ # git-jsannotate telling us about commits we are hiding
+ # from the client.
+
+ open(ANNOTATEHINTS, ">$tmpdir/.annotate_hints") or die "Error opening > $tmpdir/.annotate_hints $!";
+ for (my $i=0; $i < @$revisions; $i++)
+ {
+ print ANNOTATEHINTS $revisions->[$i][2];
+ if ($i+1 < @$revisions) { # have we got a parent?
+ print ANNOTATEHINTS ' ' . $revisions->[$i+1][2];
+ }
+ print ANNOTATEHINTS "\n";
+ }
+
+ print ANNOTATEHINTS "\n";
+ close ANNOTATEHINTS;
+
+ my $annotatecmd = 'git-annotate';
+ open(ANNOTATE, "-|", $annotatecmd, '-l', '-S', "$tmpdir/.annotate_hints", $filename)
+ or die "Error invoking $annotatecmd -l -S $tmpdir/.annotate_hints $filename : $!";
+ my $metadata = {};
+ print "E Annotations for $filename\n";
+ print "E ***************\n";
+ while ( <ANNOTATE> )
+ {
+ if (m/^([a-zA-Z0-9]{40})\t\([^\)]*\)(.*)$/i)
+ {
+ my $commithash = $1;
+ my $data = $2;
+ unless ( defined ( $metadata->{$commithash} ) )
+ {
+ $metadata->{$commithash} = $updater->getmeta($filename, $commithash);
+ $metadata->{$commithash}{author} =~ s/\s+.*//;
+ $metadata->{$commithash}{author} =~ s/^(.{8}).*/$1/;
+ $metadata->{$commithash}{modified} = sprintf("%02d-%s-%02d", $1, $2, $3) if ( $metadata->{$commithash}{modified} =~ /^(\d+)\s(\w+)\s\d\d(\d\d)/ );
+ }
+ printf("M 1.%-5d (%-8s %10s): %s\n",
+ $metadata->{$commithash}{revision},
+ $metadata->{$commithash}{author},
+ $metadata->{$commithash}{modified},
+ $data
+ );
+ } else {
+ $log->warn("Error in annotate output! LINE: $_");
+ print "E Annotate error \n";
+ next;
+ }
+ }
+ close ANNOTATE;
+ }
+
+ # done; get out of the tempdir
+ chdir "/";
+
+ print "ok\n";
+
+}
+
+# This method takes the state->{arguments} array and produces two new arrays.
+# The first is $state->{args} which is everything before the '--' argument, and
+# the second is $state->{files} which is everything after it.
+sub argsplit
+{
+ return unless( defined($state->{arguments}) and ref $state->{arguments} eq "ARRAY" );
+
+ my $type = shift;
+
+ $state->{args} = [];
+ $state->{files} = [];
+ $state->{opt} = {};
+
+ if ( defined($type) )
+ {
+ my $opt = {};
+ $opt = { A => 0, N => 0, P => 0, R => 0, c => 0, f => 0, l => 0, n => 0, p => 0, s => 0, r => 1, D => 1, d => 1, k => 1, j => 1, } if ( $type eq "co" );
+ $opt = { v => 0, l => 0, R => 0 } if ( $type eq "status" );
+ $opt = { A => 0, P => 0, C => 0, d => 0, f => 0, l => 0, R => 0, p => 0, k => 1, r => 1, D => 1, j => 1, I => 1, W => 1 } if ( $type eq "update" );
+ $opt = { l => 0, R => 0, k => 1, D => 1, D => 1, r => 2 } if ( $type eq "diff" );
+ $opt = { c => 0, R => 0, l => 0, f => 0, F => 1, m => 1, r => 1 } if ( $type eq "ci" );
+ $opt = { k => 1, m => 1 } if ( $type eq "add" );
+ $opt = { f => 0, l => 0, R => 0 } if ( $type eq "remove" );
+ $opt = { l => 0, b => 0, h => 0, R => 0, t => 0, N => 0, S => 0, r => 1, d => 1, s => 1, w => 1 } if ( $type eq "log" );
+
+
+ while ( scalar ( @{$state->{arguments}} ) > 0 )
+ {
+ my $arg = shift @{$state->{arguments}};
+
+ next if ( $arg eq "--" );
+ next unless ( $arg =~ /\S/ );
+
+ # if the argument looks like a switch
+ if ( $arg =~ /^-(\w)(.*)/ )
+ {
+ # if it's a switch that takes an argument
+ if ( $opt->{$1} )
+ {
+ # If this switch has already been provided
+ if ( $opt->{$1} > 1 and exists ( $state->{opt}{$1} ) )
+ {
+ $state->{opt}{$1} = [ $state->{opt}{$1} ];
+ if ( length($2) > 0 )
+ {
+ push @{$state->{opt}{$1}},$2;
+ } else {
+ push @{$state->{opt}{$1}}, shift @{$state->{arguments}};
+ }
+ } else {
+ # if there's extra data in the arg, use that as the argument for the switch
+ if ( length($2) > 0 )
+ {
+ $state->{opt}{$1} = $2;
+ } else {
+ $state->{opt}{$1} = shift @{$state->{arguments}};
+ }
+ }
+ } else {
+ $state->{opt}{$1} = undef;
+ }
+ }
+ else
+ {
+ push @{$state->{args}}, $arg;
+ }
+ }
+ }
+ else
+ {
+ my $mode = 0;
+
+ foreach my $value ( @{$state->{arguments}} )
+ {
+ if ( $value eq "--" )
+ {
+ $mode++;
+ next;
+ }
+ push @{$state->{args}}, $value if ( $mode == 0 );
+ push @{$state->{files}}, $value if ( $mode == 1 );
+ }
+ }
+}
+
+# This method uses $state->{directory} to populate $state->{args} with a list of filenames
+sub argsfromdir
+{
+ my $updater = shift;
+
+ $state->{args} = [];
+
+ foreach my $file ( @{$updater->gethead} )
+ {
+ next if ( $file->{filehash} eq "deleted" and not defined ( $state->{entries}{$file->{name}} ) );
+ next unless ( $file->{name} =~ s/^$state->{directory}// );
+ push @{$state->{args}}, $file->{name};
+ }
+}
+
+# This method cleans up the $state variable after a command that uses arguments has run
+sub statecleanup
+{
+ $state->{files} = [];
+ $state->{args} = [];
+ $state->{arguments} = [];
+ $state->{entries} = {};
+}
+
+sub revparse
+{
+ my $filename = shift;
+
+ return undef unless ( defined ( $state->{entries}{$filename}{revision} ) );
+
+ return $1 if ( $state->{entries}{$filename}{revision} =~ /^1\.(\d+)/ );
+ return -$1 if ( $state->{entries}{$filename}{revision} =~ /^-1\.(\d+)/ );
+
+ return undef;
+}
+
+# This method takes a file hash and does a CVS "file transfer" which transmits the
+# size of the file, and then the file contents.
+# If a second argument $targetfile is given, the file is instead written out to
+# a file by the name of $targetfile
+sub transmitfile
+{
+ my $filehash = shift;
+ my $targetfile = shift;
+
+ if ( defined ( $filehash ) and $filehash eq "deleted" )
+ {
+ $log->warn("filehash is 'deleted'");
+ return;
+ }
+
+ die "Need filehash" unless ( defined ( $filehash ) and $filehash =~ /^[a-zA-Z0-9]{40}$/ );
+
+ my $type = `git-cat-file -t $filehash`;
+ chomp $type;
+
+ die ( "Invalid type '$type' (expected 'blob')" ) unless ( defined ( $type ) and $type eq "blob" );
+
+ my $size = `git-cat-file -s $filehash`;
+ chomp $size;
+
+ $log->debug("transmitfile($filehash) size=$size, type=$type");
+
+ if ( open my $fh, '-|', "git-cat-file", "blob", $filehash )
+ {
+ if ( defined ( $targetfile ) )
+ {
+ open NEWFILE, ">", $targetfile or die("Couldn't open '$targetfile' for writing : $!");
+ print NEWFILE $_ while ( <$fh> );
+ close NEWFILE;
+ } else {
+ print "$size\n";
+ print while ( <$fh> );
+ }
+ close $fh or die ("Couldn't close filehandle for transmitfile()");
+ } else {
+ die("Couldn't execute git-cat-file");
+ }
+}
+
+# This method takes a file name, and returns ( $dirpart, $filepart ) which
+# refers to the directory porition and the file portion of the filename
+# respectively
+sub filenamesplit
+{
+ my $filename = shift;
+
+ my ( $filepart, $dirpart ) = ( $filename, "." );
+ ( $filepart, $dirpart ) = ( $2, $1 ) if ( $filename =~ /(.*)\/(.*)/ );
+ $dirpart .= "/";
+
+ return ( $filepart, $dirpart );
+}
+
+sub filecleanup
+{
+ my $filename = shift;
+
+ return undef unless(defined($filename));
+ if ( $filename =~ /^\// )
+ {
+ print "E absolute filenames '$filename' not supported by server\n";
+ return undef;
+ }
+
+ $filename =~ s/^\.\///g;
+ $filename = $state->{directory} . $filename;
+
+ return $filename;
+}
+
+package GITCVS::log;
+
+####
+#### Copyright The Open University UK - 2006.
+####
+#### Authors: Martyn Smith <martyn@catalyst.net.nz>
+#### Martin Langhoff <martin@catalyst.net.nz>
+####
+####
+
+use strict;
+use warnings;
+
+=head1 NAME
+
+GITCVS::log
+
+=head1 DESCRIPTION
+
+This module provides very crude logging with a similar interface to
+Log::Log4perl
+
+=head1 METHODS
+
+=cut
+
+=head2 new
+
+Creates a new log object, optionally you can specify a filename here to
+indicate the file to log to. If no log file is specified, you can specifiy one
+later with method setfile, or indicate you no longer want logging with method
+nofile.
+
+Until one of these methods is called, all log calls will buffer messages ready
+to write out.
+
+=cut
+sub new
+{
+ my $class = shift;
+ my $filename = shift;
+
+ my $self = {};
+
+ bless $self, $class;
+
+ if ( defined ( $filename ) )
+ {
+ open $self->{fh}, ">>", $filename or die("Couldn't open '$filename' for writing : $!");
+ }
+
+ return $self;
+}
+
+=head2 setfile
+
+This methods takes a filename, and attempts to open that file as the log file.
+If successful, all buffered data is written out to the file, and any further
+logging is written directly to the file.
+
+=cut
+sub setfile
+{
+ my $self = shift;
+ my $filename = shift;
+
+ if ( defined ( $filename ) )
+ {
+ open $self->{fh}, ">>", $filename or die("Couldn't open '$filename' for writing : $!");
+ }
+
+ return unless ( defined ( $self->{buffer} ) and ref $self->{buffer} eq "ARRAY" );
+
+ while ( my $line = shift @{$self->{buffer}} )
+ {
+ print {$self->{fh}} $line;
+ }
+}
+
+=head2 nofile
+
+This method indicates no logging is going to be used. It flushes any entries in
+the internal buffer, and sets a flag to ensure no further data is put there.
+
+=cut
+sub nofile
+{
+ my $self = shift;
+
+ $self->{nolog} = 1;
+
+ return unless ( defined ( $self->{buffer} ) and ref $self->{buffer} eq "ARRAY" );
+
+ $self->{buffer} = [];
+}
+
+=head2 _logopen
+
+Internal method. Returns true if the log file is open, false otherwise.
+
+=cut
+sub _logopen
+{
+ my $self = shift;
+
+ return 1 if ( defined ( $self->{fh} ) and ref $self->{fh} eq "GLOB" );
+ return 0;
+}
+
+=head2 debug info warn fatal
+
+These four methods are wrappers to _log. They provide the actual interface for
+logging data.
+
+=cut
+sub debug { my $self = shift; $self->_log("debug", @_); }
+sub info { my $self = shift; $self->_log("info" , @_); }
+sub warn { my $self = shift; $self->_log("warn" , @_); }
+sub fatal { my $self = shift; $self->_log("fatal", @_); }
+
+=head2 _log
+
+This is an internal method called by the logging functions. It generates a
+timestamp and pushes the logged line either to file, or internal buffer.
+
+=cut
+sub _log
+{
+ my $self = shift;
+ my $level = shift;
+
+ return if ( $self->{nolog} );
+
+ my @time = localtime;
+ my $timestring = sprintf("%4d-%02d-%02d %02d:%02d:%02d : %-5s",
+ $time[5] + 1900,
+ $time[4] + 1,
+ $time[3],
+ $time[2],
+ $time[1],
+ $time[0],
+ uc $level,
+ );
+
+ if ( $self->_logopen )
+ {
+ print {$self->{fh}} $timestring . " - " . join(" ",@_) . "\n";
+ } else {
+ push @{$self->{buffer}}, $timestring . " - " . join(" ",@_) . "\n";
+ }
+}
+
+=head2 DESTROY
+
+This method simply closes the file handle if one is open
+
+=cut
+sub DESTROY
+{
+ my $self = shift;
+
+ if ( $self->_logopen )
+ {
+ close $self->{fh};
+ }
+}
+
+package GITCVS::updater;
+
+####
+#### Copyright The Open University UK - 2006.
+####
+#### Authors: Martyn Smith <martyn@catalyst.net.nz>
+#### Martin Langhoff <martin@catalyst.net.nz>
+####
+####
+
+use strict;
+use warnings;
+use DBI;
+
+=head1 METHODS
+
+=cut
+
+=head2 new
+
+=cut
+sub new
+{
+ my $class = shift;
+ my $config = shift;
+ my $module = shift;
+ my $log = shift;
+
+ die "Need to specify a git repository" unless ( defined($config) and -d $config );
+ die "Need to specify a module" unless ( defined($module) );
+
+ $class = ref($class) || $class;
+
+ my $self = {};
+
+ bless $self, $class;
+
+ $self->{dbdir} = $config . "/";
+ die "Database dir '$self->{dbdir}' isn't a directory" unless ( defined($self->{dbdir}) and -d $self->{dbdir} );
+
+ $self->{module} = $module;
+ $self->{file} = $self->{dbdir} . "/gitcvs.$module.sqlite";
+
+ $self->{git_path} = $config . "/";
+
+ $self->{log} = $log;
+
+ die "Git repo '$self->{git_path}' doesn't exist" unless ( -d $self->{git_path} );
+
+ $self->{dbh} = DBI->connect("dbi:SQLite:dbname=" . $self->{file},"","");
+
+ $self->{tables} = {};
+ foreach my $table ( $self->{dbh}->tables )
+ {
+ $table =~ s/^"//;
+ $table =~ s/"$//;
+ $self->{tables}{$table} = 1;
+ }
+
+ # Construct the revision table if required
+ unless ( $self->{tables}{revision} )
+ {
+ $self->{dbh}->do("
+ CREATE TABLE revision (
+ name TEXT NOT NULL,
+ revision INTEGER NOT NULL,
+ filehash TEXT NOT NULL,
+ commithash TEXT NOT NULL,
+ author TEXT NOT NULL,
+ modified TEXT NOT NULL,
+ mode TEXT NOT NULL
+ )
+ ");
+ }
+
+ # Construct the revision table if required
+ unless ( $self->{tables}{head} )
+ {
+ $self->{dbh}->do("
+ CREATE TABLE head (
+ name TEXT NOT NULL,
+ revision INTEGER NOT NULL,
+ filehash TEXT NOT NULL,
+ commithash TEXT NOT NULL,
+ author TEXT NOT NULL,
+ modified TEXT NOT NULL,
+ mode TEXT NOT NULL
+ )
+ ");
+ }
+
+ # Construct the properties table if required
+ unless ( $self->{tables}{properties} )
+ {
+ $self->{dbh}->do("
+ CREATE TABLE properties (
+ key TEXT NOT NULL PRIMARY KEY,
+ value TEXT
+ )
+ ");
+ }
+
+ # Construct the commitmsgs table if required
+ unless ( $self->{tables}{commitmsgs} )
+ {
+ $self->{dbh}->do("
+ CREATE TABLE commitmsgs (
+ key TEXT NOT NULL PRIMARY KEY,
+ value TEXT
+ )
+ ");
+ }
+
+ return $self;
+}
+
+=head2 update
+
+=cut
+sub update
+{
+ my $self = shift;
+
+ # first lets get the commit list
+ $ENV{GIT_DIR} = $self->{git_path};
+
+ # prepare database queries
+ my $db_insert_rev = $self->{dbh}->prepare_cached("INSERT INTO revision (name, revision, filehash, commithash, modified, author, mode) VALUES (?,?,?,?,?,?,?)",{},1);
+ my $db_insert_mergelog = $self->{dbh}->prepare_cached("INSERT INTO commitmsgs (key, value) VALUES (?,?)",{},1);
+ my $db_delete_head = $self->{dbh}->prepare_cached("DELETE FROM head",{},1);
+ my $db_insert_head = $self->{dbh}->prepare_cached("INSERT INTO head (name, revision, filehash, commithash, modified, author, mode) VALUES (?,?,?,?,?,?,?)",{},1);
+
+ my $commitinfo = `git-cat-file commit $self->{module} 2>&1`;
+ unless ( $commitinfo =~ /tree\s+[a-zA-Z0-9]{40}/ )
+ {
+ die("Invalid module '$self->{module}'");
+ }
+
+
+ my $git_log;
+ my $lastcommit = $self->_get_prop("last_commit");
+
+ # Start exclusive lock here...
+ $self->{dbh}->begin_work() or die "Cannot lock database for BEGIN";
+
+ # TODO: log processing is memory bound
+ # if we can parse into a 2nd file that is in reverse order
+ # we can probably do something really efficient
+ my @git_log_params = ('--parents', '--topo-order');
+
+ if (defined $lastcommit) {
+ push @git_log_params, "$lastcommit..$self->{module}";
+ } else {
+ push @git_log_params, $self->{module};
+ }
+ open(GITLOG, '-|', 'git-log', @git_log_params) or die "Cannot call git-log: $!";
+
+ my @commits;
+
+ my %commit = ();
+
+ while ( <GITLOG> )
+ {
+ chomp;
+ if (m/^commit\s+(.*)$/) {
+ # on ^commit lines put the just seen commit in the stack
+ # and prime things for the next one
+ if (keys %commit) {
+ my %copy = %commit;
+ unshift @commits, \%copy;
+ %commit = ();
+ }
+ my @parents = split(m/\s+/, $1);
+ $commit{hash} = shift @parents;
+ $commit{parents} = \@parents;
+ } elsif (m/^(\w+?):\s+(.*)$/ && !exists($commit{message})) {
+ # on rfc822-like lines seen before we see any message,
+ # lowercase the entry and put it in the hash as key-value
+ $commit{lc($1)} = $2;
+ } else {
+ # message lines - skip initial empty line
+ # and trim whitespace
+ if (!exists($commit{message}) && m/^\s*$/) {
+ # define it to mark the end of headers
+ $commit{message} = '';
+ next;
+ }
+ s/^\s+//; s/\s+$//; # trim ws
+ $commit{message} .= $_ . "\n";
+ }
+ }
+ close GITLOG;
+
+ unshift @commits, \%commit if ( keys %commit );
+
+ # Now all the commits are in the @commits bucket
+ # ordered by time DESC. for each commit that needs processing,
+ # determine whether it's following the last head we've seen or if
+ # it's on its own branch, grab a file list, and add whatever's changed
+ # NOTE: $lastcommit refers to the last commit from previous run
+ # $lastpicked is the last commit we picked in this run
+ my $lastpicked;
+ my $head = {};
+ if (defined $lastcommit) {
+ $lastpicked = $lastcommit;
+ }
+
+ my $committotal = scalar(@commits);
+ my $commitcount = 0;
+
+ # Load the head table into $head (for cached lookups during the update process)
+ foreach my $file ( @{$self->gethead()} )
+ {
+ $head->{$file->{name}} = $file;
+ }
+
+ foreach my $commit ( @commits )
+ {
+ $self->{log}->debug("GITCVS::updater - Processing commit $commit->{hash} (" . (++$commitcount) . " of $committotal)");
+ if (defined $lastpicked)
+ {
+ if (!in_array($lastpicked, @{$commit->{parents}}))
+ {
+ # skip, we'll see this delta
+ # as part of a merge later
+ # warn "skipping off-track $commit->{hash}\n";
+ next;
+ } elsif (@{$commit->{parents}} > 1) {
+ # it is a merge commit, for each parent that is
+ # not $lastpicked, see if we can get a log
+ # from the merge-base to that parent to put it
+ # in the message as a merge summary.
+ my @parents = @{$commit->{parents}};
+ foreach my $parent (@parents) {
+ # git-merge-base can potentially (but rarely) throw
+ # several candidate merge bases. let's assume
+ # that the first one is the best one.
+ if ($parent eq $lastpicked) {
+ next;
+ }
+ open my $p, 'git-merge-base '. $lastpicked . ' '
+ . $parent . '|';
+ my @output = (<$p>);
+ close $p;
+ my $base = join('', @output);
+ chomp $base;
+ if ($base) {
+ my @merged;
+ # print "want to log between $base $parent \n";
+ open(GITLOG, '-|', 'git-log', "$base..$parent")
+ or die "Cannot call git-log: $!";
+ my $mergedhash;
+ while (<GITLOG>) {
+ chomp;
+ if (!defined $mergedhash) {
+ if (m/^commit\s+(.+)$/) {
+ $mergedhash = $1;
+ } else {
+ next;
+ }
+ } else {
+ # grab the first line that looks non-rfc822
+ # aka has content after leading space
+ if (m/^\s+(\S.*)$/) {
+ my $title = $1;
+ $title = substr($title,0,100); # truncate
+ unshift @merged, "$mergedhash $title";
+ undef $mergedhash;
+ }
+ }
+ }
+ close GITLOG;
+ if (@merged) {
+ $commit->{mergemsg} = $commit->{message};
+ $commit->{mergemsg} .= "\nSummary of merged commits:\n\n";
+ foreach my $summary (@merged) {
+ $commit->{mergemsg} .= "\t$summary\n";
+ }
+ $commit->{mergemsg} .= "\n\n";
+ # print "Message for $commit->{hash} \n$commit->{mergemsg}";
+ }
+ }
+ }
+ }
+ }
+
+ # convert the date to CVS-happy format
+ $commit->{date} = "$2 $1 $4 $3 $5" if ( $commit->{date} =~ /^\w+\s+(\w+)\s+(\d+)\s+(\d+:\d+:\d+)\s+(\d+)\s+([+-]\d+)$/ );
+
+ if ( defined ( $lastpicked ) )
+ {
+ my $filepipe = open(FILELIST, '-|', 'git-diff-tree', '-r', $lastpicked, $commit->{hash}) or die("Cannot call git-diff-tree : $!");
+ while ( <FILELIST> )
+ {
+ unless ( /^:\d{6}\s+\d{3}(\d)\d{2}\s+[a-zA-Z0-9]{40}\s+([a-zA-Z0-9]{40})\s+(\w)\s+(.*)$/o )
+ {
+ die("Couldn't process git-diff-tree line : $_");
+ }
+
+ # $log->debug("File mode=$1, hash=$2, change=$3, name=$4");
+
+ my $git_perms = "";
+ $git_perms .= "r" if ( $1 & 4 );
+ $git_perms .= "w" if ( $1 & 2 );
+ $git_perms .= "x" if ( $1 & 1 );
+ $git_perms = "rw" if ( $git_perms eq "" );
+
+ if ( $3 eq "D" )
+ {
+ #$log->debug("DELETE $4");
+ $head->{$4} = {
+ name => $4,
+ revision => $head->{$4}{revision} + 1,
+ filehash => "deleted",
+ commithash => $commit->{hash},
+ modified => $commit->{date},
+ author => $commit->{author},
+ mode => $git_perms,
+ };
+ $db_insert_rev->execute($4, $head->{$4}{revision}, $2, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms);
+ }
+ elsif ( $3 eq "M" )
+ {
+ #$log->debug("MODIFIED $4");
+ $head->{$4} = {
+ name => $4,
+ revision => $head->{$4}{revision} + 1,
+ filehash => $2,
+ commithash => $commit->{hash},
+ modified => $commit->{date},
+ author => $commit->{author},
+ mode => $git_perms,
+ };
+ $db_insert_rev->execute($4, $head->{$4}{revision}, $2, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms);
+ }
+ elsif ( $3 eq "A" )
+ {
+ #$log->debug("ADDED $4");
+ $head->{$4} = {
+ name => $4,
+ revision => 1,
+ filehash => $2,
+ commithash => $commit->{hash},
+ modified => $commit->{date},
+ author => $commit->{author},
+ mode => $git_perms,
+ };
+ $db_insert_rev->execute($4, $head->{$4}{revision}, $2, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms);
+ }
+ else
+ {
+ $log->warn("UNKNOWN FILE CHANGE mode=$1, hash=$2, change=$3, name=$4");
+ die;
+ }
+ }
+ close FILELIST;
+ } else {
+ # this is used to detect files removed from the repo
+ my $seen_files = {};
+
+ my $filepipe = open(FILELIST, '-|', 'git-ls-tree', '-r', $commit->{hash}) or die("Cannot call git-ls-tree : $!");
+ while ( <FILELIST> )
+ {
+ unless ( /^(\d+)\s+(\w+)\s+([a-zA-Z0-9]+)\s+(.*)$/o )
+ {
+ die("Couldn't process git-ls-tree line : $_");
+ }
+
+ my ( $git_perms, $git_type, $git_hash, $git_filename ) = ( $1, $2, $3, $4 );
+
+ $seen_files->{$git_filename} = 1;
+
+ my ( $oldhash, $oldrevision, $oldmode ) = (
+ $head->{$git_filename}{filehash},
+ $head->{$git_filename}{revision},
+ $head->{$git_filename}{mode}
+ );
+
+ if ( $git_perms =~ /^\d\d\d(\d)\d\d/o )
+ {
+ $git_perms = "";
+ $git_perms .= "r" if ( $1 & 4 );
+ $git_perms .= "w" if ( $1 & 2 );
+ $git_perms .= "x" if ( $1 & 1 );
+ } else {
+ $git_perms = "rw";
+ }
+
+ # unless the file exists with the same hash, we need to update it ...
+ unless ( defined($oldhash) and $oldhash eq $git_hash and defined($oldmode) and $oldmode eq $git_perms )
+ {
+ my $newrevision = ( $oldrevision or 0 ) + 1;
+
+ $head->{$git_filename} = {
+ name => $git_filename,
+ revision => $newrevision,
+ filehash => $git_hash,
+ commithash => $commit->{hash},
+ modified => $commit->{date},
+ author => $commit->{author},
+ mode => $git_perms,
+ };
+
+
+ $db_insert_rev->execute($git_filename, $newrevision, $git_hash, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms);
+ }
+ }
+ close FILELIST;
+
+ # Detect deleted files
+ foreach my $file ( keys %$head )
+ {
+ unless ( exists $seen_files->{$file} or $head->{$file}{filehash} eq "deleted" )
+ {
+ $head->{$file}{revision}++;
+ $head->{$file}{filehash} = "deleted";
+ $head->{$file}{commithash} = $commit->{hash};
+ $head->{$file}{modified} = $commit->{date};
+ $head->{$file}{author} = $commit->{author};
+
+ $db_insert_rev->execute($file, $head->{$file}{revision}, $head->{$file}{filehash}, $commit->{hash}, $commit->{date}, $commit->{author}, $head->{$file}{mode});
+ }
+ }
+ # END : "Detect deleted files"
+ }
+
+
+ if (exists $commit->{mergemsg})
+ {
+ $db_insert_mergelog->execute($commit->{hash}, $commit->{mergemsg});
+ }
+
+ $lastpicked = $commit->{hash};
+
+ $self->_set_prop("last_commit", $commit->{hash});
+ }
+
+ $db_delete_head->execute();
+ foreach my $file ( keys %$head )
+ {
+ $db_insert_head->execute(
+ $file,
+ $head->{$file}{revision},
+ $head->{$file}{filehash},
+ $head->{$file}{commithash},
+ $head->{$file}{modified},
+ $head->{$file}{author},
+ $head->{$file}{mode},
+ );
+ }
+ # invalidate the gethead cache
+ $self->{gethead_cache} = undef;
+
+
+ # Ending exclusive lock here
+ $self->{dbh}->commit() or die "Failed to commit changes to SQLite";
+}
+
+sub _headrev
+{
+ my $self = shift;
+ my $filename = shift;
+
+ my $db_query = $self->{dbh}->prepare_cached("SELECT filehash, revision, mode FROM head WHERE name=?",{},1);
+ $db_query->execute($filename);
+ my ( $hash, $revision, $mode ) = $db_query->fetchrow_array;
+
+ return ( $hash, $revision, $mode );
+}
+
+sub _get_prop
+{
+ my $self = shift;
+ my $key = shift;
+
+ my $db_query = $self->{dbh}->prepare_cached("SELECT value FROM properties WHERE key=?",{},1);
+ $db_query->execute($key);
+ my ( $value ) = $db_query->fetchrow_array;
+
+ return $value;
+}
+
+sub _set_prop
+{
+ my $self = shift;
+ my $key = shift;
+ my $value = shift;
+
+ my $db_query = $self->{dbh}->prepare_cached("UPDATE properties SET value=? WHERE key=?",{},1);
+ $db_query->execute($value, $key);
+
+ unless ( $db_query->rows )
+ {
+ $db_query = $self->{dbh}->prepare_cached("INSERT INTO properties (key, value) VALUES (?,?)",{},1);
+ $db_query->execute($key, $value);
+ }
+
+ return $value;
+}
+
+=head2 gethead
+
+=cut
+
+sub gethead
+{
+ my $self = shift;
+
+ return $self->{gethead_cache} if ( defined ( $self->{gethead_cache} ) );
+
+ my $db_query = $self->{dbh}->prepare_cached("SELECT name, filehash, mode, revision, modified, commithash, author FROM head ORDER BY name ASC",{},1);
+ $db_query->execute();
+
+ my $tree = [];
+ while ( my $file = $db_query->fetchrow_hashref )
+ {
+ push @$tree, $file;
+ }
+
+ $self->{gethead_cache} = $tree;
+
+ return $tree;
+}
+
+=head2 getlog
+
+=cut
+
+sub getlog
+{
+ my $self = shift;
+ my $filename = shift;
+
+ my $db_query = $self->{dbh}->prepare_cached("SELECT name, filehash, author, mode, revision, modified, commithash FROM revision WHERE name=? ORDER BY revision DESC",{},1);
+ $db_query->execute($filename);
+
+ my $tree = [];
+ while ( my $file = $db_query->fetchrow_hashref )
+ {
+ push @$tree, $file;
+ }
+
+ return $tree;
+}
+
+=head2 getmeta
+
+This function takes a filename (with path) argument and returns a hashref of
+metadata for that file.
+
+=cut
+
+sub getmeta
+{
+ my $self = shift;
+ my $filename = shift;
+ my $revision = shift;
+
+ my $db_query;
+ if ( defined($revision) and $revision =~ /^\d+$/ )
+ {
+ $db_query = $self->{dbh}->prepare_cached("SELECT * FROM revision WHERE name=? AND revision=?",{},1);
+ $db_query->execute($filename, $revision);
+ }
+ elsif ( defined($revision) and $revision =~ /^[a-zA-Z0-9]{40}$/ )
+ {
+ $db_query = $self->{dbh}->prepare_cached("SELECT * FROM revision WHERE name=? AND commithash=?",{},1);
+ $db_query->execute($filename, $revision);
+ } else {
+ $db_query = $self->{dbh}->prepare_cached("SELECT * FROM head WHERE name=?",{},1);
+ $db_query->execute($filename);
+ }
+
+ return $db_query->fetchrow_hashref;
+}
+
+=head2 commitmessage
+
+this function takes a commithash and returns the commit message for that commit
+
+=cut
+sub commitmessage
+{
+ my $self = shift;
+ my $commithash = shift;
+
+ die("Need commithash") unless ( defined($commithash) and $commithash =~ /^[a-zA-Z0-9]{40}$/ );
+
+ my $db_query;
+ $db_query = $self->{dbh}->prepare_cached("SELECT value FROM commitmsgs WHERE key=?",{},1);
+ $db_query->execute($commithash);
+
+ my ( $message ) = $db_query->fetchrow_array;
+
+ if ( defined ( $message ) )
+ {
+ $message .= " " if ( $message =~ /\n$/ );
+ return $message;
+ }
+
+ my @lines = safe_pipe_capture("git-cat-file", "commit", $commithash);
+ shift @lines while ( $lines[0] =~ /\S/ );
+ $message = join("",@lines);
+ $message .= " " if ( $message =~ /\n$/ );
+ return $message;
+}
+
+=head2 gethistory
+
+This function takes a filename (with path) argument and returns an arrayofarrays
+containing revision,filehash,commithash ordered by revision descending
+
+=cut
+sub gethistory
+{
+ my $self = shift;
+ my $filename = shift;
+
+ my $db_query;
+ $db_query = $self->{dbh}->prepare_cached("SELECT revision, filehash, commithash FROM revision WHERE name=? ORDER BY revision DESC",{},1);
+ $db_query->execute($filename);
+
+ return $db_query->fetchall_arrayref;
+}
+
+=head2 gethistorydense
+
+This function takes a filename (with path) argument and returns an arrayofarrays
+containing revision,filehash,commithash ordered by revision descending.
+
+This version of gethistory skips deleted entries -- so it is useful for annotate.
+The 'dense' part is a reference to a '--dense' option available for git-rev-list
+and other git tools that depend on it.
+
+=cut
+sub gethistorydense
+{
+ my $self = shift;
+ my $filename = shift;
+
+ my $db_query;
+ $db_query = $self->{dbh}->prepare_cached("SELECT revision, filehash, commithash FROM revision WHERE name=? AND filehash!='deleted' ORDER BY revision DESC",{},1);
+ $db_query->execute($filename);
+
+ return $db_query->fetchall_arrayref;
+}
+
+=head2 in_array()
+
+from Array::PAT - mimics the in_array() function
+found in PHP. Yuck but works for small arrays.
+
+=cut
+sub in_array
+{
+ my ($check, @array) = @_;
+ my $retval = 0;
+ foreach my $test (@array){
+ if($check eq $test){
+ $retval = 1;
+ }
+ }
+ return $retval;
+}
+
+=head2 safe_pipe_capture
+
+an alterative to `command` that allows input to be passed as an array
+to work around shell problems with weird characters in arguments
+
+=cut
+sub safe_pipe_capture {
+
+ my @output;
+
+ if (my $pid = open my $child, '-|') {
+ @output = (<$child>);
+ close $child or die join(' ',@_).": $! $?";
+ } else {
+ exec(@_) or die "$! $?"; # exec() can fail the executable can't be found
+ }
+ return wantarray ? @output : join('',@output);
+}
+
+
+1;
diff --git a/git-diff.sh b/git-diff.sh
index dc4d1b3..dc0dd31 100755
--- a/git-diff.sh
+++ b/git-diff.sh
@@ -38,9 +38,9 @@ case " $flags " in
flags="$flags'$cc_or_p' " ;;
esac
-# If we do not have -B nor -C, default to -M.
+# If we do not have -B, -C, -r, nor -p, default to -M.
case " $flags " in
-*" '-"[BCM]* | *" '--find-copies-harder' "*)
+*" '-"[BCMrp]* | *" '--find-copies-harder' "*)
;; # something like -M50.
*)
flags="$flags'-M' " ;;
diff --git a/git-fetch.sh b/git-fetch.sh
index fcc24f8..954901d 100755
--- a/git-fetch.sh
+++ b/git-fetch.sh
@@ -94,6 +94,9 @@ append_fetch_head () {
# remote-nick is the URL given on the command line (or a shorthand)
# remote-name is the $GIT_DIR relative refs/ path we computed
# for this refspec.
+
+ # the $note_ variable will be fed to git-fmt-merge-msg for further
+ # processing.
case "$remote_name_" in
HEAD)
note_= ;;
@@ -103,6 +106,9 @@ append_fetch_head () {
refs/tags/*)
note_="$(expr "$remote_name_" : 'refs/tags/\(.*\)')"
note_="tag '$note_' of " ;;
+ refs/remotes/*)
+ note_="$(expr "$remote_name_" : 'refs/remotes/\(.*\)')"
+ note_="remote branch '$note_' of " ;;
*)
note_="$remote_name of " ;;
esac
@@ -147,10 +153,10 @@ fast_forward_local () {
else
echo >&2 "* $1: storing $3"
fi
- git-update-ref "$1" "$2"
+ git-update-ref "$1" "$2"
;;
- refs/heads/*)
+ refs/heads/* | refs/remotes/*)
# $1 is the ref being updated.
# $2 is the new value for the ref.
local=$(git-rev-parse --verify "$1^0" 2>/dev/null)
@@ -164,6 +170,7 @@ fast_forward_local () {
;;
*,$local)
echo >&2 "* $1: fast forward to $3"
+ echo >&2 " from $local to $2"
git-update-ref "$1" "$2" "$local"
;;
*)
@@ -178,6 +185,7 @@ fast_forward_local () {
;;
*)
echo >&2 " not updating."
+ exit 1
;;
esac
}
@@ -320,7 +328,7 @@ fetch_main () {
( : subshell because we muck with IFS
IFS=" $LF"
(
- git-fetch-pack $exec $keep "$remote" $rref || echo failed "$remote"
+ git-fetch-pack $exec $keep --thin "$remote" $rref || echo failed "$remote"
) |
while read sha1 remote_name
do
diff --git a/git-fmt-merge-msg.perl b/git-fmt-merge-msg.perl
index 0467a38..5986e54 100755
--- a/git-fmt-merge-msg.perl
+++ b/git-fmt-merge-msg.perl
@@ -28,37 +28,30 @@ sub andjoin {
}
sub repoconfig {
- my $fh;
- my $val;
- eval {
- open $fh, '-|', 'git-repo-config', '--get', 'merge.summary'
- or die "$!";
- ($val) = <$fh>;
- close $fh;
- };
+ my ($val) = qx{git-repo-config --get merge.summary};
return $val;
}
-sub mergebase {
- my ($other) = @_;
- my $fh;
- open $fh, '-|', 'git-merge-base', '--all', 'HEAD', $other or die "$!";
- my (@mb) = map { chomp; $_ } <$fh>;
- close $fh or die "$!";
- return @mb;
+sub current_branch {
+ my ($bra) = qx{git-symbolic-ref HEAD};
+ chomp($bra);
+ $bra =~ s|^refs/heads/||;
+ if ($bra ne 'master') {
+ $bra = " into $bra";
+ } else {
+ $bra = "";
+ }
+ return $bra;
}
sub shortlog {
- my ($tip, $limit, @base) = @_;
- my ($fh, @result);
- open $fh, '-|', ('git-log', "--max-count=$limit", '--topo-order',
- '--pretty=oneline', $tip, map { "^$_" } @base)
- or die "$!";
- while (<$fh>) {
+ my ($tip) = @_;
+ my @result;
+ foreach ( qx{git-log --no-merges --topo-order --pretty=oneline $tip ^HEAD} ) {
s/^[0-9a-f]{40}\s+//;
push @result, $_;
}
- close $fh or die "$!";
+ die "git-log failed\n" if $?;
return @result;
}
@@ -82,6 +75,7 @@ while (<>) {
$src{$src} = {
BRANCH => [],
TAG => [],
+ R_BRANCH => [],
GENERIC => [],
# &1 == has HEAD.
# &2 == has others.
@@ -98,6 +92,11 @@ while (<>) {
push @{$src{$src}{TAG}}, $1;
$src{$src}{HEAD_STATUS} |= 2;
}
+ elsif (/^remote branch (.*)$/) {
+ $origin = $1;
+ push @{$src{$src}{R_BRANCH}}, $1;
+ $src{$src}{HEAD_STATUS} |= 2;
+ }
elsif (/^HEAD$/) {
$origin = $src;
$src{$src}{HEAD_STATUS} |= 1;
@@ -130,6 +129,8 @@ for my $src (@src) {
}
push @this, andjoin("branch ", "branches ",
$src{$src}{BRANCH});
+ push @this, andjoin("remote branch ", "remote branches ",
+ $src{$src}{R_BRANCH});
push @this, andjoin("tag ", "tags ",
$src{$src}{TAG});
push @this, andjoin("commit ", "commits ",
@@ -140,7 +141,10 @@ for my $src (@src) {
}
push @msg, $this;
}
-print "Merge ", join("; ", @msg), "\n";
+
+my $into = current_branch();
+
+print "Merge ", join("; ", @msg), $into, "\n";
if (!repoconfig) {
exit(0);
@@ -151,8 +155,7 @@ my $limit = 20;
for (@origin) {
my ($sha1, $name) = @$_;
- my @mb = mergebase($sha1);
- my @log = shortlog($sha1, $limit + 1, @mb);
+ my @log = shortlog($sha1);
if ($limit + 1 <= @log) {
print "\n* $name: (" . scalar(@log) . " commits)\n";
}
diff --git a/git-format-patch.sh b/git-format-patch.sh
index eb75de4..2ebf7e8 100755
--- a/git-format-patch.sh
+++ b/git-format-patch.sh
@@ -3,7 +3,7 @@
# Copyright (c) 2005 Junio C Hamano
#
-USAGE='[-n | -k] [-o <dir> | --stdout] [--signoff] [--check] [--diff-options] <his> [<mine>]'
+USAGE='[-n | -k] [-o <dir> | --stdout] [--signoff] [--check] [--diff-options] [--attach] <his> [<mine>]'
LONG_USAGE='Prepare each commit with its patch since <mine> head forked from
<his> head, one file per patch formatted to resemble UNIX mailbox
format, for e-mail submission or use with git-am.
@@ -18,7 +18,9 @@ is ignored if --stdout is specified.
When -n is specified, instead of "[PATCH] Subject", the first
line is formatted as "[PATCH N/M] Subject", unless you have only
-one patch.'
+one patch.
+
+When --attach is specified, patches are attached, not inlined.'
. git-sh-setup
@@ -40,6 +42,8 @@ do
-d|--d|--da|--dat|--date|\
-m|--m|--mb|--mbo|--mbox) # now noop
;;
+ --at|--att|--atta|--attac|--attach)
+ attach=t ;;
-k|--k|--ke|--kee|--keep|--keep-|--keep-s|--keep-su|--keep-sub|\
--keep-subj|--keep-subje|--keep-subjec|--keep-subject)
keep_subject=t ;;
@@ -149,6 +153,12 @@ do
done >$series
me=`git-var GIT_AUTHOR_IDENT | sed -e 's/>.*/>/'`
+headers=`git-repo-config --get format.headers`
+case "$attach" in
+"") ;;
+*)
+ mimemagic="050802040500080604070107"
+esac
case "$outdir" in
*/) ;;
@@ -173,8 +183,8 @@ titleScript='
process_one () {
perl -w -e '
-my ($keep_subject, $num, $signoff, $commsg) = @ARGV;
-my ($signoff_pattern, $done_header, $done_subject, $signoff_seen,
+my ($keep_subject, $num, $signoff, $headers, $mimemagic, $commsg) = @ARGV;
+my ($signoff_pattern, $done_header, $done_subject, $done_separator, $signoff_seen,
$last_was_signoff);
if ($signoff) {
@@ -224,10 +234,28 @@ while (<FH>) {
s/^\[PATCH[^]]*\]\s*//;
s/^/[PATCH$num] /;
}
+ if ($headers) {
+ print "$headers\n";
+ }
print "Subject: $_";
+ if ($mimemagic) {
+ print "MIME-Version: 1.0\n";
+ print "Content-Type: multipart/mixed;\n";
+ print " boundary=\"------------$mimemagic\"\n";
+ print "\n";
+ print "This is a multi-part message in MIME format.\n";
+ print "--------------$mimemagic\n";
+ print "Content-Type: text/plain; charset=UTF-8; format=fixed\n";
+ print "Content-Transfer-Encoding: 8bit\n";
+ }
$done_subject = 1;
next;
}
+ unless ($done_separator) {
+ print "\n";
+ $done_separator = 1;
+ next if (/^$/);
+ }
$last_was_signoff = 0;
if (/Signed-off-by:/i) {
@@ -245,14 +273,33 @@ if (!$signoff_seen && $signoff ne "") {
}
print "\n---\n\n";
close FH or die "close $commsg pipe";
-' "$keep_subject" "$num" "$signoff" $commsg
+' "$keep_subject" "$num" "$signoff" "$headers" "$mimemagic" $commsg
git-diff-tree -p $diff_opts "$commit" | git-apply --stat --summary
echo
+ case "$mimemagic" in
+ '');;
+ *)
+ echo "--------------$mimemagic"
+ echo "Content-Type: text/x-patch;"
+ echo " name=\"$commit.diff\""
+ echo "Content-Transfer-Encoding: 8bit"
+ echo "Content-Disposition: inline;"
+ echo " filename=\"$commit.diff\""
+ echo
+ esac
git-diff-tree -p $diff_opts "$commit"
- echo "-- "
- echo "@@GIT_VERSION@@"
-
+ case "$mimemagic" in
+ '')
+ echo "-- "
+ echo "@@GIT_VERSION@@"
+ ;;
+ *)
+ echo
+ echo "--------------$mimemagic--"
+ echo
+ ;;
+ esac
echo
}
diff --git a/git-ls-remote.sh b/git-ls-remote.sh
index 2c9a588..b6882a9 100755
--- a/git-ls-remote.sh
+++ b/git-ls-remote.sh
@@ -53,7 +53,7 @@ http://* | https://* )
if [ -n "$GIT_SSL_NO_VERIFY" ]; then
curl_extra_args="-k"
fi
- curl -nsf $curl_extra_args "$peek_repo/info/refs" ||
+ curl -nsf $curl_extra_args --header "Pragma: no-cache" "$peek_repo/info/refs" ||
echo "failed slurping"
;;
diff --git a/git-merge.sh b/git-merge.sh
index c258ea7..78ab422 100755
--- a/git-merge.sh
+++ b/git-merge.sh
@@ -11,9 +11,17 @@ LF='
'
all_strategies='recursive octopus resolve stupid ours'
-default_strategies='recursive'
+default_twohead_strategies='recursive'
+default_octopus_strategies='octopus'
+no_trivial_merge_strategies='ours'
use_strategies=
+index_merge=t
+if test "@@NO_PYTHON@@"; then
+ all_strategies='resolve octopus stupid ours'
+ default_twohead_strategies='resolve'
+fi
+
dropsave() {
rm -f -- "$GIT_DIR/MERGE_HEAD" "$GIT_DIR/MERGE_MSG" \
"$GIT_DIR/MERGE_SAVE" || exit 1
@@ -86,8 +94,6 @@ do
shift
done
-test "$#" -le 2 && usage ;# we need at least two heads.
-
merge_msg="$1"
shift
head_arg="$1"
@@ -95,6 +101,8 @@ head=$(git-rev-parse --verify "$1"^0) || usage
shift
# All the rest are remote heads
+test "$#" = 0 && usage ;# we need at least one remote head.
+
remoteheads=
for remote
do
@@ -104,6 +112,27 @@ do
done
set x $remoteheads ; shift
+case "$use_strategies" in
+'')
+ case "$#" in
+ 1)
+ use_strategies="$default_twohead_strategies" ;;
+ *)
+ use_strategies="$default_octopus_strategies" ;;
+ esac
+ ;;
+esac
+
+for s in $use_strategies
+do
+ case " $s " in
+ *" $no_trivial_merge_strategies "*)
+ index_merge=f
+ break
+ ;;
+ esac
+done
+
case "$#" in
1)
common=$(git-merge-base --all $head "$@")
@@ -114,20 +143,23 @@ case "$#" in
esac
echo "$head" >"$GIT_DIR/ORIG_HEAD"
-case "$#,$common,$no_commit" in
-*,'',*)
+case "$index_merge,$#,$common,$no_commit" in
+f,*)
+ # We've been told not to try anything clever. Skip to real merge.
+ ;;
+?,*,'',*)
# No common ancestors found. We need a real merge.
;;
-1,"$1",*)
+?,1,"$1",*)
# If head can reach all the merge then we are up to date.
- # but first the most common case of merging one remote
+ # but first the most common case of merging one remote.
echo "Already up-to-date."
dropsave
exit 0
;;
-1,"$head",*)
+?,1,"$head",*)
# Again the most common case of merging one remote.
- echo "Updating from $head to $1."
+ echo "Updating from $head to $1"
git-update-index --refresh 2>/dev/null
new_head=$(git-rev-parse --verify "$1^0") &&
git-read-tree -u -v -m $head "$new_head" &&
@@ -135,11 +167,11 @@ case "$#,$common,$no_commit" in
dropsave
exit 0
;;
-1,?*"$LF"?*,*)
+?,1,?*"$LF"?*,*)
# We are not doing octopus and not fast forward. Need a
# real merge.
;;
-1,*,)
+?,1,*,)
# We are not doing octopus, not fast forward, and have only
# one common. See if it is really trivial.
git var GIT_COMMITTER_IDENT >/dev/null || exit
@@ -184,17 +216,6 @@ esac
# We are going to make a new commit.
git var GIT_COMMITTER_IDENT >/dev/null || exit
-case "$use_strategies" in
-'')
- case "$#" in
- 1)
- use_strategies="$default_strategies" ;;
- *)
- use_strategies=octopus ;;
- esac
- ;;
-esac
-
# At this point, we need a real merge. No matter what strategy
# we use, it would operate on the index, possibly affecting the
# working tree, and when resolved cleanly, have the desired tree
@@ -266,11 +287,7 @@ done
# auto resolved the merge cleanly.
if test '' != "$result_tree"
then
- parents="-p $head"
- for remote
- do
- parents="$parents -p $remote"
- done
+ parents=$(git-show-branch --independent "$head" "$@" | sed -e 's/^/-p /')
result_commit=$(echo "$merge_msg" | git-commit-tree $result_tree $parents) || exit
finish "$result_commit" "Merge $result_commit, made by $wt_strategy."
dropsave
diff --git a/git-mv.perl b/git-mv.perl
index f3e859a..75aa8fe 100755
--- a/git-mv.perl
+++ b/git-mv.perl
@@ -62,9 +62,17 @@ else {
$dstDir = "";
}
+my $subdir_prefix = `git rev-parse --show-prefix`;
+chomp($subdir_prefix);
+
+# run in git base directory, so that git-ls-files lists all revisioned files
+chdir "$GIT_DIR/..";
+
# normalize paths, needed to compare against versioned files and update-index
# also, this is nicer to end-users by doing ".//a/./b/.//./c" ==> "a/b/c"
for (@srcArgs, @dstArgs) {
+ # prepend git prefix as we run from base directory
+ $_ = $subdir_prefix.$_;
s|^\./||;
s|/\./|/| while (m|/\./|);
s|//+|/|g;
@@ -90,6 +98,15 @@ while(scalar @srcArgs > 0) {
$dst = shift @dstArgs;
$bad = "";
+ for ($src, $dst) {
+ # Be nicer to end-users by doing ".//a/./b/.//./c" ==> "a/b/c"
+ s|^\./||;
+ s|/\./|/| while (m|/\./|);
+ s|//+|/|g;
+ # Also "a/b/../c" ==> "a/c"
+ 1 while (s,(^|/)[^/]+/\.\./,$1,);
+ }
+
if ($opt_v) {
print "Checking rename of '$src' to '$dst'\n";
}
diff --git a/git-parse-remote.sh b/git-parse-remote.sh
index 5f158c6..63f2281 100755
--- a/git-parse-remote.sh
+++ b/git-parse-remote.sh
@@ -86,14 +86,14 @@ canon_refs_list_for_fetch () {
local=$(expr "$ref" : '[^:]*:\(.*\)')
case "$remote" in
'') remote=HEAD ;;
- refs/heads/* | refs/tags/*) ;;
- heads/* | tags/* ) remote="refs/$remote" ;;
+ refs/heads/* | refs/tags/* | refs/remotes/*) ;;
+ heads/* | tags/* | remotes/* ) remote="refs/$remote" ;;
*) remote="refs/heads/$remote" ;;
esac
case "$local" in
'') local= ;;
- refs/heads/* | refs/tags/*) ;;
- heads/* | tags/* ) local="refs/$local" ;;
+ refs/heads/* | refs/tags/* | refs/remotes/*) ;;
+ heads/* | tags/* | remotes/* ) local="refs/$local" ;;
*) local="refs/heads/$local" ;;
esac
diff --git a/git-pull.sh b/git-pull.sh
index 6caf1aa..4611ae6 100755
--- a/git-pull.sh
+++ b/git-pull.sh
@@ -55,9 +55,17 @@ then
# First update the working tree to match $curr_head.
echo >&2 "Warning: fetch updated the current branch head."
- echo >&2 "Warning: fast forwarding your working tree."
+ echo >&2 "Warning: fast forwarding your working tree from"
+ echo >&2 "Warning: $orig_head commit."
+ git-update-index --refresh 2>/dev/null
git-read-tree -u -m "$orig_head" "$curr_head" ||
- die "You need to first update your working tree."
+ die 'Cannot fast-forward your working tree.
+After making sure that you saved anything precious from
+$ git diff '$orig_head'
+output, run
+$ git reset --hard
+to recover.'
+
fi
merge_head=$(sed -e '/ not-for-merge /d' \
@@ -70,20 +78,16 @@ case "$merge_head" in
exit 0
;;
?*' '?*)
- var=`git repo-config --get pull.octopus`
- if test '' = "$var"
+ var=`git-repo-config --get pull.octopus`
+ if test -n "$var"
then
- strategy_default_args='-s octopus'
- else
strategy_default_args="-s $var"
fi
;;
*)
- var=`git repo-config --get pull.twohead`
- if test '' = "$var"
- then
- strategy_default_args='-s recursive'
- else
+ var=`git-repo-config --get pull.twohead`
+ if test -n "$var"
+ then
strategy_default_args="-s $var"
fi
;;
diff --git a/git-push.sh b/git-push.sh
index 706db99..f10cadb 100755
--- a/git-push.sh
+++ b/git-push.sh
@@ -8,6 +8,7 @@ USAGE='[--all] [--tags] [--force] <repository> [<refspec>...]'
has_all=
has_force=
has_exec=
+has_thin=--thin
remote=
do_tags=
@@ -22,6 +23,10 @@ do
has_force=--force ;;
--exec=*)
has_exec="$1" ;;
+ --thin)
+ ;; # noop
+ --no-thin)
+ has_thin= ;;
-*)
usage ;;
*)
@@ -72,6 +77,7 @@ set x "$remote" "$@"; shift
test "$has_all" && set x "$has_all" "$@" && shift
test "$has_force" && set x "$has_force" "$@" && shift
test "$has_exec" && set x "$has_exec" "$@" && shift
+test "$has_thin" && set x "$has_thin" "$@" && shift
case "$remote" in
http://* | https://*)
diff --git a/git-rebase.sh b/git-rebase.sh
index 16d4359..5956f06 100755
--- a/git-rebase.sh
+++ b/git-rebase.sh
@@ -3,10 +3,52 @@
# Copyright (c) 2005 Junio C Hamano.
#
-USAGE='<upstream> [<head>]'
+USAGE='[--onto <newbase>] <upstream> [<branch>]'
+LONG_USAGE='git-rebase applies to <upstream> (or optionally to <newbase>) commits
+from <branch> that do not appear in <upstream>. When <branch> is not
+specified it defaults to the current branch (HEAD).
+
+When git-rebase is complete, <branch> will be updated to point to the
+newly created line of commit objects, so the previous line will not be
+accessible unless there are other references to it already.
+
+Assuming the following history:
+
+ A---B---C topic
+ /
+ D---E---F---G master
+
+The result of the following command:
+
+ git-rebase --onto master~1 master topic
+
+ would be:
+
+ A'\''--B'\''--C'\'' topic
+ /
+ D---E---F---G master
+'
+
. git-sh-setup
-case $# in 1|2) ;; *) usage ;; esac
+unset newbase
+while case "$#" in 0) break ;; esac
+do
+ case "$1" in
+ --onto)
+ test 2 -le "$#" || usage
+ newbase="$2"
+ shift
+ ;;
+ -*)
+ usage
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+done
# Make sure we do not have .dotest
if mkdir .dotest
@@ -30,37 +72,61 @@ case "$diff" in
;;
esac
-# The other head is given. Make sure it is valid.
-other=$(git-rev-parse --verify "$1^0") || usage
+# The upstream head must be given. Make sure it is valid.
+upstream_name="$1"
+upstream=`git rev-parse --verify "${upstream_name}^0"` ||
+ die "invalid upstream $upstream_name"
-# Make sure the branch to rebase is valid.
-head=$(git-rev-parse --verify "${2-HEAD}^0") || exit
+# If a hook exists, give it a chance to interrupt
+if test -x "$GIT_DIR/hooks/pre-rebase"
+then
+ "$GIT_DIR/hooks/pre-rebase" ${1+"$@"} || {
+ echo >&2 "The pre-rebase hook refused to rebase."
+ exit 1
+ }
+fi
# If the branch to rebase is given, first switch to it.
case "$#" in
2)
+ branch_name="$2"
git-checkout "$2" || usage
+ ;;
+*)
+ branch_name=`git symbolic-ref HEAD` || die "No current branch"
+ branch_name=`expr "$branch_name" : 'refs/heads/\(.*\)'`
+ ;;
esac
+branch=$(git-rev-parse --verify "${branch_name}^0") || exit
+
+# Make sure the branch to rebase onto is valid.
+onto_name=${newbase-"$upstream_name"}
+onto=$(git-rev-parse --verify "${onto_name}^0") || exit
-mb=$(git-merge-base "$other" "$head")
+# Now we are rebasing commits $upstream..$branch on top of $onto
-# Check if we are already based on $other.
-if test "$mb" = "$other"
+# Check if we are already based on $onto, but this should be
+# done only when upstream and onto are the same.
+if test "$upstream" = "onto"
then
- echo >&2 "Current branch `git-symbolic-ref HEAD` is up to date."
- exit 0
+ mb=$(git-merge-base "$onto" "$branch")
+ if test "$mb" = "$onto"
+ then
+ echo >&2 "Current branch $branch_name is up to date."
+ exit 0
+ fi
fi
-# Rewind the head to "$other"
-git-reset --hard "$other"
+# Rewind the head to "$onto"; this saves our current head in ORIG_HEAD.
+git-reset --hard "$onto"
-# If the $other is a proper descendant of the tip of the branch, then
+# If the $onto is a proper descendant of the tip of the branch, then
# we just fast forwarded.
-if test "$mb" = "$head"
+if test "$mb" = "$onto"
then
- echo >&2 "Fast-forwarded $head to $other."
+ echo >&2 "Fast-forwarded $branch to $newbase."
exit 0
fi
-git-format-patch -k --stdout --full-index "$other" ORIG_HEAD |
+git-format-patch -k --stdout --full-index "$upstream" ORIG_HEAD |
git am --binary -3 -k
diff --git a/git-repack.sh b/git-repack.sh
index 3d6fec1..a5d349f 100755
--- a/git-repack.sh
+++ b/git-repack.sh
@@ -29,12 +29,10 @@ PACKDIR="$GIT_OBJECT_DIRECTORY/pack"
case ",$all_into_one," in
,,)
rev_list='--unpacked'
- rev_parse='--all'
pack_objects='--incremental'
;;
,t,)
rev_list=
- rev_parse='--all'
pack_objects=
# Redundancy check in all-into-one case is trivial.
@@ -43,7 +41,7 @@ case ",$all_into_one," in
;;
esac
pack_objects="$pack_objects $local $quiet $no_reuse_delta"
-name=$(git-rev-list --objects $rev_list $(git-rev-parse $rev_parse) 2>&1 |
+name=$(git-rev-list --objects --all $rev_list 2>&1 |
git-pack-objects --non-empty $pack_objects .tmp-pack) ||
exit 1
if [ -z "$name" ]; then
@@ -75,6 +73,7 @@ then
done
)
fi
+ git-prune-packed
fi
case "$no_update_info" in
diff --git a/git-rerere.perl b/git-rerere.perl
index df11951..d3664ff 100755
--- a/git-rerere.perl
+++ b/git-rerere.perl
@@ -131,7 +131,11 @@ sub record_preimage {
sub find_conflict {
my $in;
local $/ = "\0";
- open $in, '-|', qw(git ls-files -z -u) or die "$!: ls-files";
+ my $pid = open($in, '-|');
+ die "$!" unless defined $pid;
+ if (!$pid) {
+ exec(qw(git ls-files -z -u)) or die "$!: ls-files";
+ }
my %path = ();
my @path = ();
while (<$in>) {
diff --git a/git-resolve.sh b/git-resolve.sh
index b53ede8..1c7aaef 100755
--- a/git-resolve.sh
+++ b/git-resolve.sh
@@ -41,7 +41,7 @@ case "$common" in
exit 0
;;
"$head")
- echo "Updating from $head to $merge."
+ echo "Updating from $head to $merge"
git-read-tree -u -m $head $merge || exit 1
git-update-ref HEAD "$merge" "$head"
git-diff-tree -p $head $merge | git-apply --stat
diff --git a/git-revert.sh b/git-revert.sh
index 2c58706..c19d3a6 100755
--- a/git-revert.sh
+++ b/git-revert.sh
@@ -141,8 +141,9 @@ git-read-tree -m -u $base $head $next &&
result=$(git-write-tree 2>/dev/null) || {
echo >&2 "Simple $me fails; trying Automatic $me."
git-merge-index -o git-merge-one-file -a || {
- echo >&2 "Automatic $me failed. After fixing it up,"
- echo >&2 "you can use \"git commit -F .msg\""
+ echo >&2 "Automatic $me failed. After resolving the conflicts,"
+ echo >&2 "mark the corrected paths with 'git-update-index <paths>'"
+ echo >&2 "and commit with 'git commit -F .msg'"
case "$me" in
cherry-pick)
echo >&2 "You may choose to use the following when making"
diff --git a/git-rm.sh b/git-rm.sh
new file mode 100755
index 0000000..fda4541
--- /dev/null
+++ b/git-rm.sh
@@ -0,0 +1,70 @@
+#!/bin/sh
+
+USAGE='[-f] [-n] [-v] [--] <file>...'
+SUBDIRECTORY_OK='Yes'
+. git-sh-setup
+
+remove_files=
+show_only=
+verbose=
+while : ; do
+ case "$1" in
+ -f)
+ remove_files=true
+ ;;
+ -n)
+ show_only=true
+ ;;
+ -v)
+ verbose=--verbose
+ ;;
+ --)
+ shift; break
+ ;;
+ -*)
+ usage
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+done
+
+# This is typo-proofing. If some paths match and some do not, we want
+# to do nothing.
+case "$#" in
+0) ;;
+*)
+ git-ls-files --error-unmatch -- "$@" >/dev/null || {
+ echo >&2 "Maybe you misspelled it?"
+ exit 1
+ }
+ ;;
+esac
+
+if test -f "$GIT_DIR/info/exclude"
+then
+ git-ls-files -z \
+ --exclude-from="$GIT_DIR/info/exclude" \
+ --exclude-per-directory=.gitignore -- "$@"
+else
+ git-ls-files -z \
+ --exclude-per-directory=.gitignore -- "$@"
+fi |
+case "$show_only,$remove_files" in
+true,*)
+ xargs -0 echo
+ ;;
+*,true)
+ xargs -0 sh -c "
+ while [ \$# -gt 0 ]; do
+ file=\$1; shift
+ rm -- \"\$file\" && git-update-index --remove $verbose \"\$file\"
+ done
+ " inline
+ ;;
+*)
+ git-update-index --force-remove $verbose -z --stdin
+ ;;
+esac
diff --git a/git-send-email.perl b/git-send-email.perl
index 3f1b3ca..ecfa347 100755
--- a/git-send-email.perl
+++ b/git-send-email.perl
@@ -19,10 +19,16 @@
use strict;
use warnings;
use Term::ReadLine;
-use Mail::Sendmail qw(sendmail %mailcfg);
use Getopt::Long;
use Data::Dumper;
-use Email::Valid;
+use Net::SMTP;
+
+# most mail servers generate the Date: header, but not all...
+$ENV{LC_ALL} = 'C';
+use POSIX qw/strftime/;
+
+my $have_email_valid = eval { require Email::Valid; 1 };
+my $smtp;
sub unique_email_list(@);
sub cleanup_compose_files();
@@ -31,10 +37,10 @@ sub cleanup_compose_files();
my $compose_filename = ".msg.$$";
# Variables we fill in automatically, or via prompting:
-my (@to,@cc,$initial_reply_to,$initial_subject,@files,$from,$compose);
+my (@to,@cc,@initial_cc,$initial_reply_to,$initial_subject,@files,$from,$compose,$time);
# Behavior modification variables
-my ($chain_reply_to, $smtp_server, $quiet) = (1, "localhost", 0);
+my ($chain_reply_to, $smtp_server, $quiet, $suppress_from, $no_signed_off_cc) = (1, "localhost", 0, 0, 0);
# Example reply to:
#$initial_reply_to = ''; #<20050203173208.GA23964@foobar.com>';
@@ -48,32 +54,40 @@ my $rc = GetOptions("from=s" => \$from,
"in-reply-to=s" => \$initial_reply_to,
"subject=s" => \$initial_subject,
"to=s" => \@to,
+ "cc=s" => \@initial_cc,
"chain-reply-to!" => \$chain_reply_to,
"smtp-server=s" => \$smtp_server,
"compose" => \$compose,
"quiet" => \$quiet,
+ "suppress-from" => \$suppress_from,
+ "no-signed-off-cc|no-signed-off-by-cc" => \$no_signed_off_cc,
);
# Now, let's fill any that aren't set in with defaults:
-open(GITVAR,"-|","git-var","-l")
- or die "Failed to open pipe from git-var: $!";
-
-my ($author,$committer);
-while(<GITVAR>) {
- chomp;
- my ($var,$data) = split /=/,$_,2;
- my @fields = split /\s+/, $data;
-
- my $ident = join(" ", @fields[0...(@fields-3)]);
+sub gitvar {
+ my ($var) = @_;
+ my $fh;
+ my $pid = open($fh, '-|');
+ die "$!" unless defined $pid;
+ if (!$pid) {
+ exec('git-var', $var) or die "$!";
+ }
+ my ($val) = <$fh>;
+ close $fh or die "$!";
+ chomp($val);
+ return $val;
+}
- if ($var eq 'GIT_AUTHOR_IDENT') {
- $author = $ident;
- } elsif ($var eq 'GIT_COMMITTER_IDENT') {
- $committer = $ident;
- }
+sub gitvar_ident {
+ my ($name) = @_;
+ my $val = gitvar($name);
+ my @field = split(/\s+/, $val);
+ return join(' ', @field[0...(@field-3)]);
}
-close(GITVAR);
+
+my ($author) = gitvar_ident('GIT_AUTHOR_IDENT');
+my ($committer) = gitvar_ident('GIT_COMMITTER_IDENT');
my $prompting = 0;
if (!defined $from) {
@@ -197,6 +211,9 @@ Options:
--to Specify the primary "To:" line of the email.
+ --cc Specify an initial "Cc:" list for the entire series
+ of emails.
+
--compose Use \$EDITOR to edit an introductory message for the
patch series.
@@ -212,13 +229,19 @@ Options:
email sent, rather than to the first email sent.
Defaults to on.
+ --no-signed-off-cc Suppress the automatic addition of email addresses
+ that appear in a Signed-off-by: line, to the cc: list.
+ Note: Using this option is not recommended.
+
--smtp-server If set, specifies the outgoing SMTP server to use.
Defaults to localhost.
+ --suppress-from Supress sending emails to yourself if your address
+ appears in a From: line.
+
--quiet Make git-send-email less verbose. One line per email should be
all that is output.
-
Error: Please specify a file or a directory on the command line.
EOT
exit(1);
@@ -227,6 +250,16 @@ EOT
# Variables we set as part of the loop over files
our ($message_id, $cc, %mail, $subject, $reply_to, $message);
+sub extract_valid_address {
+ my $address = shift;
+ if ($have_email_valid) {
+ return Email::Valid->address($address);
+ } else {
+ # less robust/correct than the monster regexp in Email::Valid,
+ # but still does a 99% job, and one less dependency
+ return ($address =~ /([^\"<>\s]+@[^<>\s]+)/);
+ }
+}
# Usually don't need to change anything below here.
@@ -236,13 +269,12 @@ our ($message_id, $cc, %mail, $subject, $reply_to, $message);
# 1 second since the last time we were called.
# We'll setup a template for the message id, using the "from" address:
-my $message_id_from = Email::Valid->address($from);
+my $message_id_from = extract_valid_address($from);
my $message_id_template = "<%s-git-send-email-$message_id_from>";
sub make_message_id
{
- my $date = `date "+\%s"`;
- chomp($date);
+ my $date = time;
my $pseudo_rand = int (rand(4200));
$message_id = sprintf $message_id_template, "$date$pseudo_rand";
#print "new message id = $message_id\n"; # Was useful for debugging
@@ -251,38 +283,49 @@ sub make_message_id
$cc = "";
+$time = time - scalar $#files;
sub send_message
{
- my $to = join (", ", unique_email_list(@to));
-
- %mail = ( To => $to,
- From => $from,
- CC => $cc,
- Subject => $subject,
- Message => $message,
- 'Reply-to' => $from,
- 'In-Reply-To' => $reply_to,
- 'Message-ID' => $message_id,
- 'X-Mailer' => "git-send-email",
- );
-
- $mail{smtp} = $smtp_server;
- $mailcfg{mime} = 0;
-
- #print Data::Dumper->Dump([\%mail],[qw(*mail)]);
-
- sendmail(%mail) or die $Mail::Sendmail::error;
+ my @recipients = unique_email_list(@to);
+ my $to = join (",\n\t", @recipients);
+ @recipients = unique_email_list(@recipients,@cc);
+ my $date = strftime('%a, %d %b %Y %H:%M:%S %z', localtime($time++));
+
+ my $header = "From: $from
+To: $to
+Cc: $cc
+Subject: $subject
+Reply-To: $from
+Date: $date
+Message-Id: $message_id
+X-Mailer: git-send-email @@GIT_VERSION@@
+";
+ $header .= "In-Reply-To: $reply_to\n" if $reply_to;
+
+ $smtp ||= Net::SMTP->new( $smtp_server );
+ $smtp->mail( $from ) or die $smtp->message;
+ $smtp->to( @recipients ) or die $smtp->message;
+ $smtp->data or die $smtp->message;
+ $smtp->datasend("$header\n$message") or die $smtp->message;
+ $smtp->dataend() or die $smtp->message;
+ $smtp->ok or die "Failed to send $subject\n".$smtp->message;
if ($quiet) {
printf "Sent %s\n", $subject;
} else {
- print "OK. Log says:\n", $Mail::Sendmail::log;
- print "\n\n"
+ print "OK. Log says:
+Date: $date
+Server: $smtp_server Port: 25
+From: $from
+Subject: $subject
+Cc: $cc
+To: $to
+
+Result: ", $smtp->code, ' ', ($smtp->message =~ /\n([^\n]+\n)$/s), "\n";
}
}
-
$reply_to = $initial_reply_to;
make_message_id();
$subject = $initial_subject;
@@ -290,7 +333,8 @@ $subject = $initial_subject;
foreach my $t (@files) {
open(F,"<",$t) or die "can't open file $t";
- @cc = ();
+ my $author_not_sender = undef;
+ @cc = @initial_cc;
my $found_mbox = 0;
my $header_done = 0;
$message = "";
@@ -304,6 +348,12 @@ foreach my $t (@files) {
$subject = $1;
} elsif (/^(Cc|From):\s+(.*)$/) {
+ if ($2 eq $from) {
+ next if ($suppress_from);
+ }
+ else {
+ $author_not_sender = $2;
+ }
printf("(mbox) Adding cc: %s from line '%s'\n",
$2, $_) unless $quiet;
push @cc, $2;
@@ -332,7 +382,7 @@ foreach my $t (@files) {
}
} else {
$message .= $_;
- if (/^Signed-off-by: (.*)$/i) {
+ if (/^Signed-off-by: (.*)$/i && !$no_signed_off_cc) {
my $c = $1;
chomp $c;
push @cc, $c;
@@ -342,6 +392,9 @@ foreach my $t (@files) {
}
}
close F;
+ if (defined $author_not_sender) {
+ $message = "From: $author_not_sender\n\n$message";
+ }
$cc = join(", ", unique_email_list(@cc));
@@ -363,14 +416,14 @@ sub cleanup_compose_files() {
}
-
+$smtp->quit if $smtp;
sub unique_email_list(@) {
my %seen;
my @emails;
foreach my $entry (@_) {
- my $clean = Email::Valid->address($entry);
+ my $clean = extract_valid_address($entry);
next if $seen{$clean}++;
push @emails, $entry;
}
diff --git a/git-svnimport.perl b/git-svnimport.perl
index f17d5a2..114784f 100755
--- a/git-svnimport.perl
+++ b/git-svnimport.perl
@@ -10,10 +10,10 @@
# The head revision is on branch "origin" by default.
# You can change that with the '-o' option.
-require 5.008; # for shell-safe open("-|",LIST)
use strict;
use warnings;
use Getopt::Std;
+use File::Copy;
use File::Spec;
use File::Temp qw(tempfile);
use File::Path qw(mkpath);
@@ -30,19 +30,21 @@ die "Need SVN:Core 1.2.1 or better" if $SVN::Core::VERSION lt "1.2.1";
$SIG{'PIPE'}="IGNORE";
$ENV{'TZ'}="UTC";
-our($opt_h,$opt_o,$opt_v,$opt_u,$opt_C,$opt_i,$opt_m,$opt_M,$opt_t,$opt_T,$opt_b,$opt_s,$opt_l,$opt_d,$opt_D);
+our($opt_h,$opt_o,$opt_v,$opt_u,$opt_C,$opt_i,$opt_m,$opt_M,$opt_t,$opt_T,
+ $opt_b,$opt_r,$opt_I,$opt_A,$opt_s,$opt_l,$opt_d,$opt_D);
sub usage() {
print STDERR <<END;
Usage: ${\basename $0} # fetch/update GIT from SVN
[-o branch-for-HEAD] [-h] [-v] [-l max_rev]
[-C GIT_repository] [-t tagname] [-T trunkname] [-b branchname]
- [-d|-D] [-i] [-u] [-s start_chg] [-m] [-M regex] [SVN_URL]
+ [-d|-D] [-i] [-u] [-r] [-I ignorefilename] [-s start_chg]
+ [-m] [-M regex] [-A author_file] [SVN_URL]
END
exit(1);
}
-getopts("b:C:dDhil:mM:o:s:t:T:uv") or usage();
+getopts("A:b:C:dDhiI:l:mM:o:rs:t:T:uv") or usage();
usage if $opt_h;
my $tag_name = $opt_t || "tags";
@@ -67,6 +69,25 @@ if ($opt_M) {
push (@mergerx, qr/$opt_M/);
}
+# Absolutize filename now, since we will have chdir'ed by the time we
+# get around to opening it.
+$opt_A = File::Spec->rel2abs($opt_A) if $opt_A;
+
+our %users = ();
+our $users_file = undef;
+sub read_users($) {
+ $users_file = File::Spec->rel2abs(@_);
+ die "Cannot open $users_file\n" unless -f $users_file;
+ open(my $authors,$users_file);
+ while(<$authors>) {
+ chomp;
+ next unless /^(\S+?)\s*=\s*(.+?)\s*<(.+)>\s*$/;
+ (my $user,my $name,my $email) = ($1,$2,$3);
+ $users{$user} = [$name,$email];
+ }
+ close($authors);
+}
+
select(STDERR); $|=1; select(STDOUT);
@@ -113,16 +134,40 @@ sub file {
DIR => File::Spec->tmpdir(), UNLINK => 1);
print "... $rev $path ...\n" if $opt_v;
- my $pool = SVN::Pool->new();
- eval { $self->{'svn'}->get_file($path,$rev,$fh,$pool); };
- $pool->clear;
+ my (undef, $properties);
+ eval { (undef, $properties)
+ = $self->{'svn'}->get_file($path,$rev,$fh); };
if($@) {
return undef if $@ =~ /Attempted to get checksum/;
die $@;
}
+ my $mode;
+ if (exists $properties->{'svn:executable'}) {
+ $mode = '0755';
+ } else {
+ $mode = '0644';
+ }
close ($fh);
- return $name;
+ return ($name, $mode);
+}
+
+sub ignore {
+ my($self,$path,$rev) = @_;
+
+ print "... $rev $path ...\n" if $opt_v;
+ my (undef,undef,$properties)
+ = $self->{'svn'}->get_dir($path,$rev,undef);
+ if (exists $properties->{'svn:ignore'}) {
+ my ($fh, $name) = tempfile('gitsvn.XXXXXX',
+ DIR => File::Spec->tmpdir(),
+ UNLINK => 1);
+ print $fh $properties->{'svn:ignore'};
+ close($fh);
+ return $name;
+ } else {
+ return undef;
+ }
}
package main;
@@ -264,6 +309,14 @@ EOM
-d $git_dir
or die "Could not create git subdir ($git_dir).\n";
+my $default_authors = "$git_dir/svn-authors";
+if ($opt_A) {
+ read_users($opt_A);
+ copy($opt_A,$default_authors) or die "Copy failed: $!";
+} else {
+ read_users($default_authors) if -f $default_authors;
+}
+
open BRANCHES,">>", "$git_dir/svn2git";
sub node_kind($$$) {
@@ -297,7 +350,7 @@ sub get_file($$$) {
my $svnpath = revert_split_path($branch,$path);
# now get it
- my $name;
+ my ($name,$mode);
if($opt_d) {
my($req,$res);
@@ -317,21 +370,53 @@ sub get_file($$$) {
return undef if $res->code == 301; # directory?
die $res->status_line." at $url\n";
}
+ $mode = '0644'; # can't obtain mode via direct http request?
} else {
- $name = $svn->file("$svnpath",$rev);
+ ($name,$mode) = $svn->file("$svnpath",$rev);
return undef unless defined $name;
}
- open my $F, '-|', "git-hash-object", "-w", $name
+ my $pid = open(my $F, '-|');
+ die $! unless defined $pid;
+ if (!$pid) {
+ exec("git-hash-object", "-w", $name)
or die "Cannot create object: $!\n";
+ }
my $sha = <$F>;
chomp $sha;
close $F;
unlink $name;
- my $mode = "0644"; # SV does not seem to store any file modes
return [$mode, $sha, $path];
}
+sub get_ignore($$$$$) {
+ my($new,$old,$rev,$branch,$path) = @_;
+
+ return unless $opt_I;
+ my $svnpath = revert_split_path($branch,$path);
+ my $name = $svn->ignore("$svnpath",$rev);
+ if ($path eq '/') {
+ $path = $opt_I;
+ } else {
+ $path = File::Spec->catfile($path,$opt_I);
+ }
+ if (defined $name) {
+ my $pid = open(my $F, '-|');
+ die $! unless defined $pid;
+ if (!$pid) {
+ exec("git-hash-object", "-w", $name)
+ or die "Cannot create object: $!\n";
+ }
+ my $sha = <$F>;
+ chomp $sha;
+ close $F;
+ unlink $name;
+ push(@$new,['0644',$sha,$path]);
+ } else {
+ push(@$old,$path);
+ }
+}
+
sub split_path($$) {
my($rev,$path) = @_;
my $branch;
@@ -398,7 +483,12 @@ sub copy_path($$$$$$$$) {
$srcpath =~ s#/*$#/#;
}
- open my $f,"-|","git-ls-tree","-r","-z",$gitrev,$srcpath;
+ my $pid = open my $f,'-|';
+ die $! unless defined $pid;
+ if (!$pid) {
+ exec("git-ls-tree","-r","-z",$gitrev,$srcpath)
+ or die $!;
+ }
local $/ = "\0";
while(<$f>) {
chomp;
@@ -423,6 +513,10 @@ sub commit {
if (not defined $author) {
$author_name = $author_email = "unknown";
+ } elsif (defined $users_file) {
+ die "User $author is not listed in $users_file\n"
+ unless exists $users{$author};
+ ($author_name,$author_email) = @{$users{$author}};
} elsif ($author =~ /^(.*?)\s+<(.*)>$/) {
($author_name, $author_email) = ($1, $2);
} else {
@@ -532,6 +626,9 @@ sub commit {
my $opath = $action->[3];
print STDERR "$revision: $branch: could not fetch '$opath'\n";
}
+ } elsif ($node_kind eq $SVN::Node::dir) {
+ get_ignore(\@new, \@old, $revision,
+ $branch,$path);
}
} elsif ($action->[0] eq "D") {
push(@old,$path);
@@ -540,6 +637,9 @@ sub commit {
if ($node_kind eq $SVN::Node::file) {
my $f = get_file($revision,$branch,$path);
push(@new,$f) if $f;
+ } elsif ($node_kind eq $SVN::Node::dir) {
+ get_ignore(\@new, \@old, $revision,
+ $branch,$path);
}
} else {
die "$revision: unknown action '".$action->[0]."' for $path\n";
@@ -554,7 +654,11 @@ sub commit {
@o1 = @old;
@old = ();
}
- open my $F, "-|", "git-ls-files", "-z", @o1 or die $!;
+ my $pid = open my $F, "-|";
+ die "$!" unless defined $pid;
+ if (!$pid) {
+ exec("git-ls-files", "-z", @o1) or die $!;
+ }
@o1 = ();
local $/ = "\0";
while(<$F>) {
@@ -650,6 +754,7 @@ sub commit {
$pr->reader();
$message =~ s/[\s\n]+\z//;
+ $message = "r$revision: $message" if $opt_r;
print $pw "$message\n"
or die "Error writing to git-commit-tree: $!\n";
@@ -746,7 +851,7 @@ sub commit_all {
$opt_l = $svn->{'maxrev'} if not defined $opt_l or $opt_l > $svn->{'maxrev'};
-if ($svn->{'maxrev'} < $current_rev) {
+if ($opt_l < $current_rev) {
print "Up to date: no new revisions to fetch!\n" if $opt_v;
unlink("$git_dir/SVN2GIT_HEAD");
exit;
diff --git a/git-tag.sh b/git-tag.sh
index c74e1b4..76e51ed 100755
--- a/git-tag.sh
+++ b/git-tag.sh
@@ -1,7 +1,7 @@
#!/bin/sh
# Copyright (c) 2005 Linus Torvalds
-USAGE='[-a | -s | -u <key-id>] [-f | -d] [-m <msg>] <tagname> [<head>]'
+USAGE='-l [<pattern>] | [-a | -s | -u <key-id>] [-f | -d] [-m <msg>] <tagname> [<head>]'
SUBDIRECTORY_OK='Yes'
. git-sh-setup
@@ -10,6 +10,7 @@ signed=
force=
message=
username=
+list=
while case "$#" in 0) break ;; esac
do
case "$1" in
@@ -23,6 +24,17 @@ do
-f)
force=1
;;
+ -l)
+ cd "$GIT_DIR/refs" &&
+ case "$#" in
+ 1)
+ find tags -type f -print ;;
+ *)
+ shift
+ find tags -type f -print | grep "$@" ;;
+ esac
+ exit $?
+ ;;
-m)
annotate=1
shift
diff --git a/git-verify-tag.sh b/git-verify-tag.sh
index 726b1e7..36f171b 100755
--- a/git-verify-tag.sh
+++ b/git-verify-tag.sh
@@ -4,9 +4,21 @@ USAGE='<tag>'
SUBDIRECTORY_OK='Yes'
. git-sh-setup
+verbose=
+while case $# in 0) break;; esac
+do
+ case "$1" in
+ -v|--v|--ve|--ver|--verb|--verbo|--verbos|--verbose)
+ verbose=t ;;
+ *)
+ break ;;
+ esac
+ shift
+done
+
if [ "$#" != "1" ]
then
- usage
+ usage
fi
type="$(git-cat-file -t "$1" 2>/dev/null)" ||
@@ -15,6 +27,13 @@ type="$(git-cat-file -t "$1" 2>/dev/null)" ||
test "$type" = tag ||
die "$1: cannot verify a non-tag object of type $type."
+case "$verbose" in
+t)
+ git-cat-file -p "$1" |
+ sed -n -e '/^-----BEGIN PGP SIGNATURE-----/q' -e p
+ ;;
+esac
+
git-cat-file tag "$1" >"$GIT_DIR/.tmp-vtag" || exit 1
cat "$GIT_DIR/.tmp-vtag" |
sed '/-----BEGIN PGP/Q' |
diff --git a/git.c b/git.c
index 4616df6..b4a1ef2 100644
--- a/git.c
+++ b/git.c
@@ -11,6 +11,11 @@
#include <sys/ioctl.h>
#include "git-compat-util.h"
#include "exec_cmd.h"
+#include "common-cmds.h"
+
+#include "cache.h"
+#include "commit.h"
+#include "revision.h"
#ifndef PATH_MAX
# define PATH_MAX 4096
@@ -167,11 +172,29 @@ static void list_commands(const char *exec_path, const char *pattern)
putchar('\n');
}
+static void list_common_cmds_help(void)
+{
+ int i, longest = 0;
+
+ for (i = 0; i < ARRAY_SIZE(common_cmds); i++) {
+ if (longest < strlen(common_cmds[i].name))
+ longest = strlen(common_cmds[i].name);
+ }
+
+ puts("The most commonly used git commands are:");
+ for (i = 0; i < ARRAY_SIZE(common_cmds); i++) {
+ printf(" %s", common_cmds[i].name);
+ mput_char(' ', longest - strlen(common_cmds[i].name) + 4);
+ puts(common_cmds[i].help);
+ }
+ puts("(use 'git help -a' to get a list of all installed git commands)");
+}
+
#ifdef __GNUC__
-static void cmd_usage(const char *exec_path, const char *fmt, ...)
- __attribute__((__format__(__printf__, 2, 3), __noreturn__));
+static void cmd_usage(int show_all, const char *exec_path, const char *fmt, ...)
+ __attribute__((__format__(__printf__, 3, 4), __noreturn__));
#endif
-static void cmd_usage(const char *exec_path, const char *fmt, ...)
+static void cmd_usage(int show_all, const char *exec_path, const char *fmt, ...)
{
if (fmt) {
va_list ap;
@@ -185,10 +208,13 @@ static void cmd_usage(const char *exec_path, const char *fmt, ...)
else
puts(git_usage);
- putchar('\n');
-
- if(exec_path)
- list_commands(exec_path, "git-*");
+ if (exec_path) {
+ putchar('\n');
+ if (show_all)
+ list_commands(exec_path, "git-*");
+ else
+ list_common_cmds_help();
+ }
exit(1);
}
@@ -212,80 +238,231 @@ static void prepend_to_path(const char *dir, int len)
setenv("PATH", path, 1);
}
-static void show_man_page(char *git_cmd)
+static void show_man_page(const char *git_cmd)
{
- char *page;
+ const char *page;
if (!strncmp(git_cmd, "git", 3))
page = git_cmd;
else {
int page_len = strlen(git_cmd) + 4;
-
- page = malloc(page_len + 1);
- strcpy(page, "git-");
- strcpy(page + 4, git_cmd);
- page[page_len] = 0;
+ char *p = malloc(page_len + 1);
+ strcpy(p, "git-");
+ strcpy(p + 4, git_cmd);
+ p[page_len] = 0;
+ page = p;
}
execlp("man", "man", page, NULL);
}
-int main(int argc, char **argv, char **envp)
+static int cmd_version(int argc, const char **argv, char **envp)
{
- char git_command[PATH_MAX + 1];
- char wd[PATH_MAX + 1];
- int i, show_help = 0;
- const char *exec_path;
+ printf("git version %s\n", GIT_VERSION);
+ return 0;
+}
- getcwd(wd, PATH_MAX);
+static int cmd_help(int argc, const char **argv, char **envp)
+{
+ const char *help_cmd = argv[1];
+ if (!help_cmd)
+ cmd_usage(0, git_exec_path(), NULL);
+ else if (!strcmp(help_cmd, "--all") || !strcmp(help_cmd, "-a"))
+ cmd_usage(1, git_exec_path(), NULL);
+ else
+ show_man_page(help_cmd);
+ return 0;
+}
- for (i = 1; i < argc; i++) {
- char *arg = argv[i];
+#define LOGSIZE (65536)
- if (!strcmp(arg, "help")) {
- show_help = 1;
- continue;
+static int cmd_log(int argc, const char **argv, char **envp)
+{
+ struct rev_info rev;
+ struct commit *commit;
+ char *buf = xmalloc(LOGSIZE);
+ static enum cmit_fmt commit_format = CMIT_FMT_DEFAULT;
+ int abbrev = DEFAULT_ABBREV;
+ const char *commit_prefix = "commit ";
+
+ argc = setup_revisions(argc, argv, &rev, "HEAD");
+ while (1 < argc) {
+ const char *arg = argv[1];
+ if (!strncmp(arg, "--pretty", 8)) {
+ commit_format = get_commit_format(arg + 8);
+ if (commit_format == CMIT_FMT_ONELINE)
+ commit_prefix = "";
}
+ else if (!strcmp(arg, "--no-abbrev")) {
+ abbrev = 0;
+ }
+ else if (!strncmp(arg, "--abbrev=", 9)) {
+ abbrev = strtoul(arg + 9, NULL, 10);
+ if (abbrev && abbrev < MINIMUM_ABBREV)
+ abbrev = MINIMUM_ABBREV;
+ else if (40 < abbrev)
+ abbrev = 40;
+ }
+ else
+ die("unrecognized argument: %s", arg);
+ argc--; argv++;
+ }
- if (strncmp(arg, "--", 2))
- break;
-
- arg += 2;
-
- if (!strncmp(arg, "exec-path", 9)) {
- arg += 9;
- if (*arg == '=') {
- exec_path = arg + 1;
- git_set_exec_path(exec_path);
- } else {
- puts(git_exec_path());
- exit(0);
+ prepare_revision_walk(&rev);
+ setup_pager();
+ while ((commit = get_revision(&rev)) != NULL) {
+ printf("%s%s", commit_prefix,
+ sha1_to_hex(commit->object.sha1));
+ if (rev.parents) {
+ struct commit_list *parents = commit->parents;
+ while (parents) {
+ struct object *o = &(parents->item->object);
+ parents = parents->next;
+ if (o->flags & TMP_MARK)
+ continue;
+ printf(" %s", sha1_to_hex(o->sha1));
+ o->flags |= TMP_MARK;
}
+ /* TMP_MARK is a general purpose flag that can
+ * be used locally, but the user should clean
+ * things up after it is done with them.
+ */
+ for (parents = commit->parents;
+ parents;
+ parents = parents->next)
+ parents->item->object.flags &= ~TMP_MARK;
}
- else if (!strcmp(arg, "version")) {
- printf("git version %s\n", GIT_VERSION);
- exit(0);
- }
- else if (!strcmp(arg, "help"))
- show_help = 1;
- else if (!show_help)
- cmd_usage(NULL, NULL);
+ if (commit_format == CMIT_FMT_ONELINE)
+ putchar(' ');
+ else
+ putchar('\n');
+ pretty_print_commit(commit_format, commit, ~0, buf,
+ LOGSIZE, abbrev);
+ printf("%s\n", buf);
+ }
+ free(buf);
+ return 0;
+}
+
+static void handle_internal_command(int argc, const char **argv, char **envp)
+{
+ const char *cmd = argv[0];
+ static struct cmd_struct {
+ const char *cmd;
+ int (*fn)(int, const char **, char **);
+ } commands[] = {
+ { "version", cmd_version },
+ { "help", cmd_help },
+ { "log", cmd_log },
+ };
+ int i;
+
+ for (i = 0; i < ARRAY_SIZE(commands); i++) {
+ struct cmd_struct *p = commands+i;
+ if (strcmp(p->cmd, cmd))
+ continue;
+ exit(p->fn(argc, argv, envp));
}
+}
- if (i >= argc || show_help) {
- if (i >= argc)
- cmd_usage(git_exec_path(), NULL);
+int main(int argc, const char **argv, char **envp)
+{
+ const char *cmd = argv[0];
+ char *slash = strrchr(cmd, '/');
+ char git_command[PATH_MAX + 1];
+ const char *exec_path = NULL;
+
+ /*
+ * Take the basename of argv[0] as the command
+ * name, and the dirname as the default exec_path
+ * if it's an absolute path and we don't have
+ * anything better.
+ */
+ if (slash) {
+ *slash++ = 0;
+ if (*cmd == '/')
+ exec_path = cmd;
+ cmd = slash;
+ }
- show_man_page(argv[i]);
+ /*
+ * "git-xxxx" is the same as "git xxxx", but we obviously:
+ *
+ * - cannot take flags in between the "git" and the "xxxx".
+ * - cannot execute it externally (since it would just do
+ * the same thing over again)
+ *
+ * So we just directly call the internal command handler, and
+ * die if that one cannot handle it.
+ */
+ if (!strncmp(cmd, "git-", 4)) {
+ cmd += 4;
+ argv[0] = cmd;
+ handle_internal_command(argc, argv, envp);
+ die("cannot handle %s internally", cmd);
}
+ /* Default command: "help" */
+ cmd = "help";
+
+ /* Look for flags.. */
+ while (argc > 1) {
+ cmd = *++argv;
+ argc--;
+
+ if (strncmp(cmd, "--", 2))
+ break;
+
+ cmd += 2;
+
+ /*
+ * For legacy reasons, the "version" and "help"
+ * commands can be written with "--" prepended
+ * to make them look like flags.
+ */
+ if (!strcmp(cmd, "help"))
+ break;
+ if (!strcmp(cmd, "version"))
+ break;
+
+ /*
+ * Check remaining flags (which by now must be
+ * "--exec-path", but maybe we will accept
+ * other arguments some day)
+ */
+ if (!strncmp(cmd, "exec-path", 9)) {
+ cmd += 9;
+ if (*cmd == '=') {
+ git_set_exec_path(cmd + 1);
+ continue;
+ }
+ puts(git_exec_path());
+ exit(0);
+ }
+ cmd_usage(0, NULL, NULL);
+ }
+ argv[0] = cmd;
+
+ /*
+ * We search for git commands in the following order:
+ * - git_exec_path()
+ * - the path of the "git" command if we could find it
+ * in $0
+ * - the regular PATH.
+ */
+ if (exec_path)
+ prepend_to_path(exec_path, strlen(exec_path));
exec_path = git_exec_path();
prepend_to_path(exec_path, strlen(exec_path));
- execv_git_cmd(argv + i);
+ /* See if it's an internal command */
+ handle_internal_command(argc, argv, envp);
+
+ /* .. then try the external ones */
+ execv_git_cmd(argv);
if (errno == ENOENT)
- cmd_usage(exec_path, "'%s' is not a git-command", argv[i]);
+ cmd_usage(0, exec_path, "'%s' is not a git-command", cmd);
fprintf(stderr, "Failed to run command '%s': %s\n",
git_command, strerror(errno));
diff --git a/gitk b/gitk
index e482140..fa1e83c 100755
--- a/gitk
+++ b/gitk
@@ -34,14 +34,19 @@ proc parse_args {rargs} {
proc start_rev_list {rlargs} {
global startmsecs nextupdate ncmupdate
- global commfd leftover tclencoding
+ global commfd leftover tclencoding datemode
set startmsecs [clock clicks -milliseconds]
set nextupdate [expr {$startmsecs + 100}]
set ncmupdate 1
+ initlayout
+ set order "--topo-order"
+ if {$datemode} {
+ set order "--date-order"
+ }
if {[catch {
- set commfd [open [concat | git-rev-list --header --topo-order \
- --parents $rlargs] r]
+ set commfd [open [concat | git-rev-list --header $order \
+ --parents --boundary $rlargs] r]
} err]} {
puts stderr "Error executing git-rev-list: $err"
exit 1
@@ -57,16 +62,8 @@ proc start_rev_list {rlargs} {
}
proc getcommits {rargs} {
- global oldcommits commits phase canv mainfont env
+ global phase canv mainfont
- # check that we can find a .git directory somewhere...
- set gitdir [gitdir]
- if {![file isdirectory $gitdir]} {
- error_popup "Cannot find the git directory \"$gitdir\"."
- exit 1
- }
- set oldcommits {}
- set commits {}
set phase getcommits
start_rev_list [parse_args $rargs]
$canv delete all
@@ -75,10 +72,9 @@ proc getcommits {rargs} {
}
proc getcommitlines {commfd} {
- global oldcommits commits parents cdate children nchildren
- global commitlisted phase nextupdate
- global stopped redisplaying leftover
- global canv
+ global commitlisted nextupdate
+ global leftover
+ global displayorder commitidx commitrow commitdata
set stuff [read $commfd]
if {$stuff == {}} {
@@ -101,25 +97,33 @@ proc getcommitlines {commfd} {
exit 1
}
set start 0
+ set gotsome 0
while 1 {
set i [string first "\0" $stuff $start]
if {$i < 0} {
append leftover [string range $stuff $start end]
- return
+ break
}
- set cmit [string range $stuff $start [expr {$i - 1}]]
if {$start == 0} {
- set cmit "$leftover$cmit"
+ set cmit $leftover
+ append cmit [string range $stuff 0 [expr {$i - 1}]]
set leftover {}
+ } else {
+ set cmit [string range $stuff $start [expr {$i - 1}]]
}
set start [expr {$i + 1}]
set j [string first "\n" $cmit]
set ok 0
+ set listed 1
if {$j >= 0} {
set ids [string range $cmit 0 [expr {$j - 1}]]
+ if {[string range $ids 0 0] == "-"} {
+ set listed 0
+ set ids [string range $ids 1 end]
+ }
set ok 1
foreach id $ids {
- if {![regexp {^[0-9a-f]{40}$} $id]} {
+ if {[string length $id] != 40} {
set ok 0
break
}
@@ -134,29 +138,24 @@ proc getcommitlines {commfd} {
exit 1
}
set id [lindex $ids 0]
- set olds [lrange $ids 1 end]
- set cmit [string range $cmit [expr {$j + 1}] end]
- lappend commits $id
- set commitlisted($id) 1
- parsecommit $id $cmit 1 [lrange $ids 1 end]
- drawcommit $id 1
- if {[clock clicks -milliseconds] >= $nextupdate} {
- doupdate 1
- }
- while {$redisplaying} {
- set redisplaying 0
- if {$stopped == 1} {
- set stopped 0
- set phase "getcommits"
- foreach id $commits {
- drawcommit $id 1
- if {$stopped} break
- if {[clock clicks -milliseconds] >= $nextupdate} {
- doupdate 1
- }
- }
- }
+ if {$listed} {
+ set olds [lrange $ids 1 end]
+ set commitlisted($id) 1
+ } else {
+ set olds {}
}
+ updatechildren $id $olds
+ set commitdata($id) [string range $cmit [expr {$j + 1}] end]
+ set commitrow($id) $commitidx
+ incr commitidx
+ lappend displayorder $id
+ set gotsome 1
+ }
+ if {$gotsome} {
+ layoutmore
+ }
+ if {[clock clicks -milliseconds] >= $nextupdate} {
+ doupdate 1
}
}
@@ -182,106 +181,31 @@ proc doupdate {reading} {
proc readcommit {id} {
if {[catch {set contents [exec git-cat-file commit $id]}]} return
- parsecommit $id $contents 0 {}
+ updatechildren $id {}
+ parsecommit $id $contents 0
}
proc updatecommits {rargs} {
- global commitlisted commfd phase
- global startmsecs nextupdate ncmupdate
- global idtags idheads idotherrefs
- global leftover
- global parsed_args
- global canv mainfont
- global oldcommits commits
- global parents nchildren children ncleft
-
- set old_args $parsed_args
- parse_args $rargs
-
- if {$phase == "getcommits" || $phase == "incrdraw"} {
- # havent read all the old commits, just start again from scratch
- stopfindproc
- set oldcommits {}
- set commits {}
- foreach v {children nchildren parents commitlisted commitinfo
- selectedline matchinglines treediffs
- mergefilelist currentid rowtextx} {
- global $v
- catch {unset $v}
- }
- readrefs
- if {$phase == "incrdraw"} {
- allcanvs delete all
- $canv create text 3 3 -anchor nw -text "Reading commits..." \
- -font $mainfont -tags textitems
- set phase getcommits
- }
- start_rev_list $parsed_args
- return
- }
-
- foreach id $old_args {
- if {![regexp {^[0-9a-f]{40}$} $id]} continue
- if {[info exists oldref($id)]} continue
- set oldref($id) $id
- lappend ignoreold "^$id"
- }
- foreach id $parsed_args {
- if {![regexp {^[0-9a-f]{40}$} $id]} continue
- if {[info exists ref($id)]} continue
- set ref($id) $id
- lappend ignorenew "^$id"
- }
-
- foreach a $old_args {
- if {![info exists ref($a)]} {
- lappend ignorenew $a
- }
- }
-
- set phase updatecommits
- set oldcommits $commits
- set commits {}
- set removed_commits [split [eval exec git-rev-list $ignorenew] "\n" ]
- if {[llength $removed_commits] > 0} {
- allcanvs delete all
- foreach c $removed_commits {
- set i [lsearch -exact $oldcommits $c]
- if {$i >= 0} {
- set oldcommits [lreplace $oldcommits $i $i]
- unset commitlisted($c)
- foreach p $parents($c) {
- if {[info exists nchildren($p)]} {
- set j [lsearch -exact $children($p) $c]
- if {$j >= 0} {
- set children($p) [lreplace $children($p) $j $j]
- incr nchildren($p) -1
- }
- }
- }
- }
- }
- set phase removecommits
- }
-
- set args {}
- foreach a $parsed_args {
- if {![info exists oldref($a)]} {
- lappend args $a
- }
+ stopfindproc
+ foreach v {children nchildren parents nparents commitlisted
+ colormap selectedline matchinglines treediffs
+ mergefilelist currentid rowtextx commitrow
+ rowidlist rowoffsets idrowranges idrangedrawn iddrawn
+ linesegends crossings cornercrossings} {
+ global $v
+ catch {unset $v}
}
-
+ allcanvs delete all
readrefs
- start_rev_list [concat $ignoreold $args]
+ getcommits $rargs
}
proc updatechildren {id olds} {
- global children nchildren parents nparents ncleft
+ global children nchildren parents nparents
if {![info exists nchildren($id)]} {
set children($id) {}
set nchildren($id) 0
- set ncleft($id) 0
}
set parents($id) $olds
set nparents($id) [llength $olds]
@@ -289,16 +213,14 @@ proc updatechildren {id olds} {
if {![info exists nchildren($p)]} {
set children($p) [list $id]
set nchildren($p) 1
- set ncleft($p) 1
} elseif {[lsearch -exact $children($p) $id] < 0} {
lappend children($p) $id
incr nchildren($p)
- incr ncleft($p)
}
}
}
-proc parsecommit {id contents listed olds} {
+proc parsecommit {id contents listed} {
global commitinfo cdate
set inhdr 1
@@ -308,7 +230,6 @@ proc parsecommit {id contents listed olds} {
set audate {}
set comname {}
set comdate {}
- updatechildren $id $olds
set hdrend [string first "\n\n" $contents]
if {$hdrend < 0} {
# should never happen...
@@ -352,6 +273,21 @@ proc parsecommit {id contents listed olds} {
$comname $comdate $comment]
}
+proc getcommit {id} {
+ global commitdata commitinfo nparents
+
+ if {[info exists commitdata($id)]} {
+ parsecommit $id $commitdata($id) 1
+ } else {
+ readcommit $id
+ if {![info exists commitinfo($id)]} {
+ set commitinfo($id) {"No commit information available"}
+ set nparents($id) 0
+ }
+ }
+ return 1
+}
+
proc readrefs {} {
global tagids idtags headids idheads tagcontents
global otherrefids idotherrefs
@@ -405,6 +341,7 @@ proc error_popup msg {
button $w.ok -text OK -command "destroy $w"
pack $w.ok -side bottom -fill x
bind $w <Visibility> "grab $w; focus $w"
+ bind $w <Key-Return> "destroy $w"
tkwait window $w
}
@@ -457,7 +394,7 @@ proc makewindow {rargs} {
set canv .ctop.top.clist.canv
canvas $canv -height $geometry(canvh) -width $geometry(canv1) \
-bg white -bd 0 \
- -yscrollincr $linespc -yscrollcommand "$cscroll set"
+ -yscrollincr $linespc -yscrollcommand "scrollcanv $cscroll"
.ctop.top.clist add $canv
set canv2 .ctop.top.clist.canv2
canvas $canv2 -height $geometry(canvh) -width $geometry(canv2) \
@@ -575,8 +512,8 @@ proc makewindow {rargs} {
#bindall <B1-Motion> {selcanvline %W %x %y}
bindall <ButtonRelease-4> "allcanvs yview scroll -5 units"
bindall <ButtonRelease-5> "allcanvs yview scroll 5 units"
- bindall <2> "allcanvs scan mark 0 %y"
- bindall <B2-Motion> "allcanvs scan dragto 0 %y"
+ bindall <2> "canvscan mark %W %x %y"
+ bindall <B2-Motion> "canvscan dragto %W %x %y"
bind . <Key-Up> "selnextline -1"
bind . <Key-Down> "selnextline 1"
bind . <Key-Right> "goforw"
@@ -631,6 +568,24 @@ proc makewindow {rargs} {
$rowctxmenu add command -label "Write commit to file" -command writecommit
}
+# mouse-2 makes all windows scan vertically, but only the one
+# the cursor is in scans horizontally
+proc canvscan {op w x y} {
+ global canv canv2 canv3
+ foreach c [list $canv $canv2 $canv3] {
+ if {$c == $w} {
+ $c scan $op $x $y
+ } else {
+ $c scan $op 0 $y
+ }
+ }
+}
+
+proc scrollcanv {cscroll f0 f1} {
+ $cscroll set $f0 $f1
+ drawfrac $f0 $f1
+}
+
# when we make a key binding for the toplevel, make sure
# it doesn't get triggered when that key is pressed in the
# find string entry widget.
@@ -763,9 +718,9 @@ proc about {} {
toplevel $w
wm title $w "About gitk"
message $w.m -text {
-Gitk version 1.2
+Gitk - a commit viewer for git
-Copyright © 2005 Paul Mackerras
+Copyright © 2005-2006 Paul Mackerras
Use and redistribute under the terms of the GNU General Public License} \
-justify center -aspect 400
@@ -774,14 +729,794 @@ Use and redistribute under the terms of the GNU General Public License} \
pack $w.ok -side bottom
}
+proc shortids {ids} {
+ set res {}
+ foreach id $ids {
+ if {[llength $id] > 1} {
+ lappend res [shortids $id]
+ } elseif {[regexp {^[0-9a-f]{40}$} $id]} {
+ lappend res [string range $id 0 7]
+ } else {
+ lappend res $id
+ }
+ }
+ return $res
+}
+
+proc incrange {l x o} {
+ set n [llength $l]
+ while {$x < $n} {
+ set e [lindex $l $x]
+ if {$e ne {}} {
+ lset l $x [expr {$e + $o}]
+ }
+ incr x
+ }
+ return $l
+}
+
+proc ntimes {n o} {
+ set ret {}
+ for {} {$n > 0} {incr n -1} {
+ lappend ret $o
+ }
+ return $ret
+}
+
+proc usedinrange {id l1 l2} {
+ global children commitrow
+
+ if {[info exists commitrow($id)]} {
+ set r $commitrow($id)
+ if {$l1 <= $r && $r <= $l2} {
+ return [expr {$r - $l1 + 1}]
+ }
+ }
+ foreach c $children($id) {
+ if {[info exists commitrow($c)]} {
+ set r $commitrow($c)
+ if {$l1 <= $r && $r <= $l2} {
+ return [expr {$r - $l1 + 1}]
+ }
+ }
+ }
+ return 0
+}
+
+proc sanity {row {full 0}} {
+ global rowidlist rowoffsets
+
+ set col -1
+ set ids [lindex $rowidlist $row]
+ foreach id $ids {
+ incr col
+ if {$id eq {}} continue
+ if {$col < [llength $ids] - 1 &&
+ [lsearch -exact -start [expr {$col+1}] $ids $id] >= 0} {
+ puts "oops: [shortids $id] repeated in row $row col $col: {[shortids [lindex $rowidlist $row]]}"
+ }
+ set o [lindex $rowoffsets $row $col]
+ set y $row
+ set x $col
+ while {$o ne {}} {
+ incr y -1
+ incr x $o
+ if {[lindex $rowidlist $y $x] != $id} {
+ puts "oops: rowoffsets wrong at row [expr {$y+1}] col [expr {$x-$o}]"
+ puts " id=[shortids $id] check started at row $row"
+ for {set i $row} {$i >= $y} {incr i -1} {
+ puts " row $i ids={[shortids [lindex $rowidlist $i]]} offs={[lindex $rowoffsets $i]}"
+ }
+ break
+ }
+ if {!$full} break
+ set o [lindex $rowoffsets $y $x]
+ }
+ }
+}
+
+proc makeuparrow {oid x y z} {
+ global rowidlist rowoffsets uparrowlen idrowranges
+
+ for {set i 1} {$i < $uparrowlen && $y > 1} {incr i} {
+ incr y -1
+ incr x $z
+ set off0 [lindex $rowoffsets $y]
+ for {set x0 $x} {1} {incr x0} {
+ if {$x0 >= [llength $off0]} {
+ set x0 [llength [lindex $rowoffsets [expr {$y-1}]]]
+ break
+ }
+ set z [lindex $off0 $x0]
+ if {$z ne {}} {
+ incr x0 $z
+ break
+ }
+ }
+ set z [expr {$x0 - $x}]
+ lset rowidlist $y [linsert [lindex $rowidlist $y] $x $oid]
+ lset rowoffsets $y [linsert [lindex $rowoffsets $y] $x $z]
+ }
+ set tmp [lreplace [lindex $rowoffsets $y] $x $x {}]
+ lset rowoffsets $y [incrange $tmp [expr {$x+1}] -1]
+ lappend idrowranges($oid) $y
+}
+
+proc initlayout {} {
+ global rowidlist rowoffsets displayorder
+ global rowlaidout rowoptim
+ global idinlist rowchk
+ global commitidx numcommits canvxmax canv
+ global nextcolor
+
+ set commitidx 0
+ set numcommits 0
+ set displayorder {}
+ set nextcolor 0
+ set rowidlist {{}}
+ set rowoffsets {{}}
+ catch {unset idinlist}
+ catch {unset rowchk}
+ set rowlaidout 0
+ set rowoptim 0
+ set canvxmax [$canv cget -width]
+}
+
+proc setcanvscroll {} {
+ global canv canv2 canv3 numcommits linespc canvxmax canvy0
+
+ set ymax [expr {$canvy0 + ($numcommits - 0.5) * $linespc + 2}]
+ $canv conf -scrollregion [list 0 0 $canvxmax $ymax]
+ $canv2 conf -scrollregion [list 0 0 0 $ymax]
+ $canv3 conf -scrollregion [list 0 0 0 $ymax]
+}
+
+proc visiblerows {} {
+ global canv numcommits linespc
+
+ set ymax [lindex [$canv cget -scrollregion] 3]
+ if {$ymax eq {} || $ymax == 0} return
+ set f [$canv yview]
+ set y0 [expr {int([lindex $f 0] * $ymax)}]
+ set r0 [expr {int(($y0 - 3) / $linespc) - 1}]
+ if {$r0 < 0} {
+ set r0 0
+ }
+ set y1 [expr {int([lindex $f 1] * $ymax)}]
+ set r1 [expr {int(($y1 - 3) / $linespc) + 1}]
+ if {$r1 >= $numcommits} {
+ set r1 [expr {$numcommits - 1}]
+ }
+ return [list $r0 $r1]
+}
+
+proc layoutmore {} {
+ global rowlaidout rowoptim commitidx numcommits optim_delay
+ global uparrowlen
+
+ set row $rowlaidout
+ set rowlaidout [layoutrows $row $commitidx 0]
+ set orow [expr {$rowlaidout - $uparrowlen - 1}]
+ if {$orow > $rowoptim} {
+ checkcrossings $rowoptim $orow
+ optimize_rows $rowoptim 0 $orow
+ set rowoptim $orow
+ }
+ set canshow [expr {$rowoptim - $optim_delay}]
+ if {$canshow > $numcommits} {
+ showstuff $canshow
+ }
+}
+
+proc showstuff {canshow} {
+ global numcommits
+ global linesegends idrowranges idrangedrawn
+
+ if {$numcommits == 0} {
+ global phase
+ set phase "incrdraw"
+ allcanvs delete all
+ }
+ set row $numcommits
+ set numcommits $canshow
+ setcanvscroll
+ set rows [visiblerows]
+ set r0 [lindex $rows 0]
+ set r1 [lindex $rows 1]
+ for {set r $row} {$r < $canshow} {incr r} {
+ if {[info exists linesegends($r)]} {
+ foreach id $linesegends($r) {
+ set i -1
+ foreach {s e} $idrowranges($id) {
+ incr i
+ if {$e ne {} && $e < $numcommits && $s <= $r1 && $e >= $r0
+ && ![info exists idrangedrawn($id,$i)]} {
+ drawlineseg $id $i
+ set idrangedrawn($id,$i) 1
+ }
+ }
+ }
+ }
+ }
+ if {$canshow > $r1} {
+ set canshow $r1
+ }
+ while {$row < $canshow} {
+ drawcmitrow $row
+ incr row
+ }
+}
+
+proc layoutrows {row endrow last} {
+ global rowidlist rowoffsets displayorder
+ global uparrowlen downarrowlen maxwidth mingaplen
+ global nchildren parents nparents
+ global idrowranges linesegends
+ global commitidx
+ global idinlist rowchk
+
+ set idlist [lindex $rowidlist $row]
+ set offs [lindex $rowoffsets $row]
+ while {$row < $endrow} {
+ set id [lindex $displayorder $row]
+ set oldolds {}
+ set newolds {}
+ foreach p $parents($id) {
+ if {![info exists idinlist($p)]} {
+ lappend newolds $p
+ } elseif {!$idinlist($p)} {
+ lappend oldolds $p
+ }
+ }
+ set nev [expr {[llength $idlist] + [llength $newolds]
+ + [llength $oldolds] - $maxwidth + 1}]
+ if {$nev > 0} {
+ if {!$last && $row + $uparrowlen + $mingaplen >= $commitidx} break
+ for {set x [llength $idlist]} {[incr x -1] >= 0} {} {
+ set i [lindex $idlist $x]
+ if {![info exists rowchk($i)] || $row >= $rowchk($i)} {
+ set r [usedinrange $i [expr {$row - $downarrowlen}] \
+ [expr {$row + $uparrowlen + $mingaplen}]]
+ if {$r == 0} {
+ set idlist [lreplace $idlist $x $x]
+ set offs [lreplace $offs $x $x]
+ set offs [incrange $offs $x 1]
+ set idinlist($i) 0
+ set rm1 [expr {$row - 1}]
+ lappend linesegends($rm1) $i
+ lappend idrowranges($i) $rm1
+ if {[incr nev -1] <= 0} break
+ continue
+ }
+ set rowchk($id) [expr {$row + $r}]
+ }
+ }
+ lset rowidlist $row $idlist
+ lset rowoffsets $row $offs
+ }
+ set col [lsearch -exact $idlist $id]
+ if {$col < 0} {
+ set col [llength $idlist]
+ lappend idlist $id
+ lset rowidlist $row $idlist
+ set z {}
+ if {$nchildren($id) > 0} {
+ set z [expr {[llength [lindex $rowidlist [expr {$row-1}]]] - $col}]
+ unset idinlist($id)
+ }
+ lappend offs $z
+ lset rowoffsets $row $offs
+ if {$z ne {}} {
+ makeuparrow $id $col $row $z
+ }
+ } else {
+ unset idinlist($id)
+ }
+ if {[info exists idrowranges($id)]} {
+ lappend idrowranges($id) $row
+ }
+ incr row
+ set offs [ntimes [llength $idlist] 0]
+ set l [llength $newolds]
+ set idlist [eval lreplace \$idlist $col $col $newolds]
+ set o 0
+ if {$l != 1} {
+ set offs [lrange $offs 0 [expr {$col - 1}]]
+ foreach x $newolds {
+ lappend offs {}
+ incr o -1
+ }
+ incr o
+ set tmp [expr {[llength $idlist] - [llength $offs]}]
+ if {$tmp > 0} {
+ set offs [concat $offs [ntimes $tmp $o]]
+ }
+ } else {
+ lset offs $col {}
+ }
+ foreach i $newolds {
+ set idinlist($i) 1
+ set idrowranges($i) $row
+ }
+ incr col $l
+ foreach oid $oldolds {
+ set idinlist($oid) 1
+ set idlist [linsert $idlist $col $oid]
+ set offs [linsert $offs $col $o]
+ makeuparrow $oid $col $row $o
+ incr col
+ }
+ lappend rowidlist $idlist
+ lappend rowoffsets $offs
+ }
+ return $row
+}
+
+proc addextraid {id row} {
+ global displayorder commitrow commitinfo nparents
+ global commitidx
+
+ incr commitidx
+ lappend displayorder $id
+ set commitrow($id) $row
+ readcommit $id
+ if {![info exists commitinfo($id)]} {
+ set commitinfo($id) {"No commit information available"}
+ set nparents($id) 0
+ }
+}
+
+proc layouttail {} {
+ global rowidlist rowoffsets idinlist commitidx
+ global idrowranges
+
+ set row $commitidx
+ set idlist [lindex $rowidlist $row]
+ while {$idlist ne {}} {
+ set col [expr {[llength $idlist] - 1}]
+ set id [lindex $idlist $col]
+ addextraid $id $row
+ unset idinlist($id)
+ lappend idrowranges($id) $row
+ incr row
+ set offs [ntimes $col 0]
+ set idlist [lreplace $idlist $col $col]
+ lappend rowidlist $idlist
+ lappend rowoffsets $offs
+ }
+
+ foreach id [array names idinlist] {
+ addextraid $id $row
+ lset rowidlist $row [list $id]
+ lset rowoffsets $row 0
+ makeuparrow $id 0 $row 0
+ lappend idrowranges($id) $row
+ incr row
+ lappend rowidlist {}
+ lappend rowoffsets {}
+ }
+}
+
+proc insert_pad {row col npad} {
+ global rowidlist rowoffsets
+
+ set pad [ntimes $npad {}]
+ lset rowidlist $row [eval linsert [list [lindex $rowidlist $row]] $col $pad]
+ set tmp [eval linsert [list [lindex $rowoffsets $row]] $col $pad]
+ lset rowoffsets $row [incrange $tmp [expr {$col + $npad}] [expr {-$npad}]]
+}
+
+proc optimize_rows {row col endrow} {
+ global rowidlist rowoffsets idrowranges linesegends displayorder
+
+ for {} {$row < $endrow} {incr row} {
+ set idlist [lindex $rowidlist $row]
+ set offs [lindex $rowoffsets $row]
+ set haspad 0
+ for {} {$col < [llength $offs]} {incr col} {
+ if {[lindex $idlist $col] eq {}} {
+ set haspad 1
+ continue
+ }
+ set z [lindex $offs $col]
+ if {$z eq {}} continue
+ set isarrow 0
+ set x0 [expr {$col + $z}]
+ set y0 [expr {$row - 1}]
+ set z0 [lindex $rowoffsets $y0 $x0]
+ if {$z0 eq {}} {
+ set id [lindex $idlist $col]
+ if {[info exists idrowranges($id)] &&
+ $y0 > [lindex $idrowranges($id) 0]} {
+ set isarrow 1
+ }
+ }
+ if {$z < -1 || ($z < 0 && $isarrow)} {
+ set npad [expr {-1 - $z + $isarrow}]
+ set offs [incrange $offs $col $npad]
+ insert_pad $y0 $x0 $npad
+ if {$y0 > 0} {
+ optimize_rows $y0 $x0 $row
+ }
+ set z [lindex $offs $col]
+ set x0 [expr {$col + $z}]
+ set z0 [lindex $rowoffsets $y0 $x0]
+ } elseif {$z > 1 || ($z > 0 && $isarrow)} {
+ set npad [expr {$z - 1 + $isarrow}]
+ set y1 [expr {$row + 1}]
+ set offs2 [lindex $rowoffsets $y1]
+ set x1 -1
+ foreach z $offs2 {
+ incr x1
+ if {$z eq {} || $x1 + $z < $col} continue
+ if {$x1 + $z > $col} {
+ incr npad
+ }
+ lset rowoffsets $y1 [incrange $offs2 $x1 $npad]
+ break
+ }
+ set pad [ntimes $npad {}]
+ set idlist [eval linsert \$idlist $col $pad]
+ set tmp [eval linsert \$offs $col $pad]
+ incr col $npad
+ set offs [incrange $tmp $col [expr {-$npad}]]
+ set z [lindex $offs $col]
+ set haspad 1
+ }
+ if {$z0 eq {} && !$isarrow} {
+ # this line links to its first child on row $row-2
+ set rm2 [expr {$row - 2}]
+ set id [lindex $displayorder $rm2]
+ set xc [lsearch -exact [lindex $rowidlist $rm2] $id]
+ if {$xc >= 0} {
+ set z0 [expr {$xc - $x0}]
+ }
+ }
+ if {$z0 ne {} && $z < 0 && $z0 > 0} {
+ insert_pad $y0 $x0 1
+ set offs [incrange $offs $col 1]
+ optimize_rows $y0 [expr {$x0 + 1}] $row
+ }
+ }
+ if {!$haspad} {
+ set o {}
+ for {set col [llength $idlist]} {[incr col -1] >= 0} {} {
+ set o [lindex $offs $col]
+ if {$o eq {}} {
+ # check if this is the link to the first child
+ set id [lindex $idlist $col]
+ if {[info exists idrowranges($id)] &&
+ $row == [lindex $idrowranges($id) 0]} {
+ # it is, work out offset to child
+ set y0 [expr {$row - 1}]
+ set id [lindex $displayorder $y0]
+ set x0 [lsearch -exact [lindex $rowidlist $y0] $id]
+ if {$x0 >= 0} {
+ set o [expr {$x0 - $col}]
+ }
+ }
+ }
+ if {$o eq {} || $o <= 0} break
+ }
+ if {$o ne {} && [incr col] < [llength $idlist]} {
+ set y1 [expr {$row + 1}]
+ set offs2 [lindex $rowoffsets $y1]
+ set x1 -1
+ foreach z $offs2 {
+ incr x1
+ if {$z eq {} || $x1 + $z < $col} continue
+ lset rowoffsets $y1 [incrange $offs2 $x1 1]
+ break
+ }
+ set idlist [linsert $idlist $col {}]
+ set tmp [linsert $offs $col {}]
+ incr col
+ set offs [incrange $tmp $col -1]
+ }
+ }
+ lset rowidlist $row $idlist
+ lset rowoffsets $row $offs
+ set col 0
+ }
+}
+
+proc xc {row col} {
+ global canvx0 linespc
+ return [expr {$canvx0 + $col * $linespc}]
+}
+
+proc yc {row} {
+ global canvy0 linespc
+ return [expr {$canvy0 + $row * $linespc}]
+}
+
+proc linewidth {id} {
+ global thickerline lthickness
+
+ set wid $lthickness
+ if {[info exists thickerline] && $id eq $thickerline} {
+ set wid [expr {2 * $lthickness}]
+ }
+ return $wid
+}
+
+proc drawlineseg {id i} {
+ global rowoffsets rowidlist idrowranges
+ global displayorder
+ global canv colormap linespc
+
+ set startrow [lindex $idrowranges($id) [expr {2 * $i}]]
+ set row [lindex $idrowranges($id) [expr {2 * $i + 1}]]
+ if {$startrow == $row} return
+ assigncolor $id
+ set coords {}
+ set col [lsearch -exact [lindex $rowidlist $row] $id]
+ if {$col < 0} {
+ puts "oops: drawline: id $id not on row $row"
+ return
+ }
+ set lasto {}
+ set ns 0
+ while {1} {
+ set o [lindex $rowoffsets $row $col]
+ if {$o eq {}} break
+ if {$o ne $lasto} {
+ # changing direction
+ set x [xc $row $col]
+ set y [yc $row]
+ lappend coords $x $y
+ set lasto $o
+ }
+ incr col $o
+ incr row -1
+ }
+ set x [xc $row $col]
+ set y [yc $row]
+ lappend coords $x $y
+ if {$i == 0} {
+ # draw the link to the first child as part of this line
+ incr row -1
+ set child [lindex $displayorder $row]
+ set ccol [lsearch -exact [lindex $rowidlist $row] $child]
+ if {$ccol >= 0} {
+ set x [xc $row $ccol]
+ set y [yc $row]
+ if {$ccol < $col - 1} {
+ lappend coords [xc $row [expr {$col - 1}]] [yc $row]
+ } elseif {$ccol > $col + 1} {
+ lappend coords [xc $row [expr {$col + 1}]] [yc $row]
+ }
+ lappend coords $x $y
+ }
+ }
+ if {[llength $coords] < 4} return
+ set last [expr {[llength $idrowranges($id)] / 2 - 1}]
+ if {$i < $last} {
+ # This line has an arrow at the lower end: check if the arrow is
+ # on a diagonal segment, and if so, work around the Tk 8.4
+ # refusal to draw arrows on diagonal lines.
+ set x0 [lindex $coords 0]
+ set x1 [lindex $coords 2]
+ if {$x0 != $x1} {
+ set y0 [lindex $coords 1]
+ set y1 [lindex $coords 3]
+ if {$y0 - $y1 <= 2 * $linespc && $x1 == [lindex $coords 4]} {
+ # we have a nearby vertical segment, just trim off the diag bit
+ set coords [lrange $coords 2 end]
+ } else {
+ set slope [expr {($x0 - $x1) / ($y0 - $y1)}]
+ set xi [expr {$x0 - $slope * $linespc / 2}]
+ set yi [expr {$y0 - $linespc / 2}]
+ set coords [lreplace $coords 0 1 $xi $y0 $xi $yi]
+ }
+ }
+ }
+ set arrow [expr {2 * ($i > 0) + ($i < $last)}]
+ set arrow [lindex {none first last both} $arrow]
+ set t [$canv create line $coords -width [linewidth $id] \
+ -fill $colormap($id) -tags lines.$id -arrow $arrow]
+ $canv lower $t
+ bindline $t $id
+}
+
+proc drawparentlinks {id row col olds} {
+ global rowidlist canv colormap idrowranges
+
+ set row2 [expr {$row + 1}]
+ set x [xc $row $col]
+ set y [yc $row]
+ set y2 [yc $row2]
+ set ids [lindex $rowidlist $row2]
+ # rmx = right-most X coord used
+ set rmx 0
+ foreach p $olds {
+ set i [lsearch -exact $ids $p]
+ if {$i < 0} {
+ puts "oops, parent $p of $id not in list"
+ continue
+ }
+ set x2 [xc $row2 $i]
+ if {$x2 > $rmx} {
+ set rmx $x2
+ }
+ if {[info exists idrowranges($p)] &&
+ $row2 == [lindex $idrowranges($p) 0] &&
+ $row2 < [lindex $idrowranges($p) 1]} {
+ # drawlineseg will do this one for us
+ continue
+ }
+ assigncolor $p
+ # should handle duplicated parents here...
+ set coords [list $x $y]
+ if {$i < $col - 1} {
+ lappend coords [xc $row [expr {$i + 1}]] $y
+ } elseif {$i > $col + 1} {
+ lappend coords [xc $row [expr {$i - 1}]] $y
+ }
+ lappend coords $x2 $y2
+ set t [$canv create line $coords -width [linewidth $p] \
+ -fill $colormap($p) -tags lines.$p]
+ $canv lower $t
+ bindline $t $p
+ }
+ return $rmx
+}
+
+proc drawlines {id} {
+ global colormap canv
+ global idrowranges idrangedrawn
+ global children iddrawn commitrow rowidlist
+
+ $canv delete lines.$id
+ set nr [expr {[llength $idrowranges($id)] / 2}]
+ for {set i 0} {$i < $nr} {incr i} {
+ if {[info exists idrangedrawn($id,$i)]} {
+ drawlineseg $id $i
+ }
+ }
+ if {[info exists children($id)]} {
+ foreach child $children($id) {
+ if {[info exists iddrawn($child)]} {
+ set row $commitrow($child)
+ set col [lsearch -exact [lindex $rowidlist $row] $child]
+ if {$col >= 0} {
+ drawparentlinks $child $row $col [list $id]
+ }
+ }
+ }
+ }
+}
+
+proc drawcmittext {id row col rmx} {
+ global linespc canv canv2 canv3 canvy0
+ global commitlisted commitinfo rowidlist
+ global rowtextx idpos idtags idheads idotherrefs
+ global linehtag linentag linedtag
+ global mainfont namefont canvxmax
+
+ set ofill [expr {[info exists commitlisted($id)]? "blue": "white"}]
+ set x [xc $row $col]
+ set y [yc $row]
+ set orad [expr {$linespc / 3}]
+ set t [$canv create oval [expr {$x - $orad}] [expr {$y - $orad}] \
+ [expr {$x + $orad - 1}] [expr {$y + $orad - 1}] \
+ -fill $ofill -outline black -width 1]
+ $canv raise $t
+ $canv bind $t <1> {selcanvline {} %x %y}
+ set xt [xc $row [llength [lindex $rowidlist $row]]]
+ if {$xt < $rmx} {
+ set xt $rmx
+ }
+ set rowtextx($row) $xt
+ set idpos($id) [list $x $xt $y]
+ if {[info exists idtags($id)] || [info exists idheads($id)]
+ || [info exists idotherrefs($id)]} {
+ set xt [drawtags $id $x $xt $y]
+ }
+ set headline [lindex $commitinfo($id) 0]
+ set name [lindex $commitinfo($id) 1]
+ set date [lindex $commitinfo($id) 2]
+ set date [formatdate $date]
+ set linehtag($row) [$canv create text $xt $y -anchor w \
+ -text $headline -font $mainfont ]
+ $canv bind $linehtag($row) <Button-3> "rowmenu %X %Y $id"
+ set linentag($row) [$canv2 create text 3 $y -anchor w \
+ -text $name -font $namefont]
+ set linedtag($row) [$canv3 create text 3 $y -anchor w \
+ -text $date -font $mainfont]
+ set xr [expr {$xt + [font measure $mainfont $headline]}]
+ if {$xr > $canvxmax} {
+ set canvxmax $xr
+ setcanvscroll
+ }
+}
+
+proc drawcmitrow {row} {
+ global displayorder rowidlist
+ global idrowranges idrangedrawn iddrawn
+ global commitinfo commitlisted parents numcommits
+
+ if {$row >= $numcommits} return
+ foreach id [lindex $rowidlist $row] {
+ if {![info exists idrowranges($id)]} continue
+ set i -1
+ foreach {s e} $idrowranges($id) {
+ incr i
+ if {$row < $s} continue
+ if {$e eq {}} break
+ if {$row <= $e} {
+ if {$e < $numcommits && ![info exists idrangedrawn($id,$i)]} {
+ drawlineseg $id $i
+ set idrangedrawn($id,$i) 1
+ }
+ break
+ }
+ }
+ }
+
+ set id [lindex $displayorder $row]
+ if {[info exists iddrawn($id)]} return
+ set col [lsearch -exact [lindex $rowidlist $row] $id]
+ if {$col < 0} {
+ puts "oops, row $row id $id not in list"
+ return
+ }
+ if {![info exists commitinfo($id)]} {
+ getcommit $id
+ }
+ assigncolor $id
+ if {[info exists commitlisted($id)] && [info exists parents($id)]
+ && $parents($id) ne {}} {
+ set rmx [drawparentlinks $id $row $col $parents($id)]
+ } else {
+ set rmx 0
+ }
+ drawcmittext $id $row $col $rmx
+ set iddrawn($id) 1
+}
+
+proc drawfrac {f0 f1} {
+ global numcommits canv
+ global linespc
+
+ set ymax [lindex [$canv cget -scrollregion] 3]
+ if {$ymax eq {} || $ymax == 0} return
+ set y0 [expr {int($f0 * $ymax)}]
+ set row [expr {int(($y0 - 3) / $linespc) - 1}]
+ if {$row < 0} {
+ set row 0
+ }
+ set y1 [expr {int($f1 * $ymax)}]
+ set endrow [expr {int(($y1 - 3) / $linespc) + 1}]
+ if {$endrow >= $numcommits} {
+ set endrow [expr {$numcommits - 1}]
+ }
+ for {} {$row <= $endrow} {incr row} {
+ drawcmitrow $row
+ }
+}
+
+proc drawvisible {} {
+ global canv
+ eval drawfrac [$canv yview]
+}
+
+proc clear_display {} {
+ global iddrawn idrangedrawn
+
+ allcanvs delete all
+ catch {unset iddrawn}
+ catch {unset idrangedrawn}
+}
+
proc assigncolor {id} {
- global colormap commcolors colors nextcolor
+ global colormap colors nextcolor
global parents nparents children nchildren
global cornercrossings crossings
if {[info exists colormap($id)]} return
set ncolors [llength $colors]
- if {$nparents($id) <= 1 && $nchildren($id) == 1} {
+ if {$nchildren($id) == 1} {
set child [lindex $children($id) 0]
if {[info exists colormap($child)]
&& $nparents($child) == 1} {
@@ -845,28 +1580,6 @@ proc assigncolor {id} {
set colormap($id) $c
}
-proc initgraph {} {
- global canvy canvy0 lineno numcommits nextcolor linespc
- global nchildren ncleft
- global displist nhyperspace
-
- allcanvs delete all
- set nextcolor 0
- set canvy $canvy0
- set lineno -1
- set numcommits 0
- foreach v {mainline mainlinearrow sidelines colormap cornercrossings
- crossings idline lineid} {
- global $v
- catch {unset $v}
- }
- foreach id [array names nchildren] {
- set ncleft($id) $nchildren($id)
- }
- set displist {}
- set nhyperspace 0
-}
-
proc bindline {t id} {
global canv
@@ -876,121 +1589,10 @@ proc bindline {t id} {
$canv bind $t <Button-1> "lineclick %x %y $id 1"
}
-proc drawlines {id xtra delold} {
- global mainline mainlinearrow sidelines lthickness colormap canv
-
- if {$delold} {
- $canv delete lines.$id
- }
- if {[info exists mainline($id)]} {
- set t [$canv create line $mainline($id) \
- -width [expr {($xtra + 1) * $lthickness}] \
- -fill $colormap($id) -tags lines.$id \
- -arrow $mainlinearrow($id)]
- $canv lower $t
- bindline $t $id
- }
- if {[info exists sidelines($id)]} {
- foreach ls $sidelines($id) {
- set coords [lindex $ls 0]
- set thick [lindex $ls 1]
- set arrow [lindex $ls 2]
- set t [$canv create line $coords -fill $colormap($id) \
- -width [expr {($thick + $xtra) * $lthickness}] \
- -arrow $arrow -tags lines.$id]
- $canv lower $t
- bindline $t $id
- }
- }
-}
-
-# level here is an index in displist
-proc drawcommitline {level} {
- global parents children nparents displist
- global canv canv2 canv3 mainfont namefont canvy linespc
- global lineid linehtag linentag linedtag commitinfo
- global colormap numcommits currentparents dupparents
- global idtags idline idheads idotherrefs
- global lineno lthickness mainline mainlinearrow sidelines
- global commitlisted rowtextx idpos lastuse displist
- global oldnlines olddlevel olddisplist
-
- incr numcommits
- incr lineno
- set id [lindex $displist $level]
- set lastuse($id) $lineno
- set lineid($lineno) $id
- set idline($id) $lineno
- set ofill [expr {[info exists commitlisted($id)]? "blue": "white"}]
- if {![info exists commitinfo($id)]} {
- readcommit $id
- if {![info exists commitinfo($id)]} {
- set commitinfo($id) {"No commit information available"}
- set nparents($id) 0
- }
- }
- assigncolor $id
- set currentparents {}
- set dupparents {}
- if {[info exists commitlisted($id)] && [info exists parents($id)]} {
- foreach p $parents($id) {
- if {[lsearch -exact $currentparents $p] < 0} {
- lappend currentparents $p
- } else {
- # remember that this parent was listed twice
- lappend dupparents $p
- }
- }
- }
- set x [xcoord $level $level $lineno]
- set y1 $canvy
- set canvy [expr {$canvy + $linespc}]
- allcanvs conf -scrollregion \
- [list 0 0 0 [expr {$y1 + 0.5 * $linespc + 2}]]
- if {[info exists mainline($id)]} {
- lappend mainline($id) $x $y1
- if {$mainlinearrow($id) ne "none"} {
- set mainline($id) [trimdiagstart $mainline($id)]
- }
- }
- drawlines $id 0 0
- set orad [expr {$linespc / 3}]
- set t [$canv create oval [expr {$x - $orad}] [expr {$y1 - $orad}] \
- [expr {$x + $orad - 1}] [expr {$y1 + $orad - 1}] \
- -fill $ofill -outline black -width 1]
- $canv raise $t
- $canv bind $t <1> {selcanvline {} %x %y}
- set xt [xcoord [llength $displist] $level $lineno]
- if {[llength $currentparents] > 2} {
- set xt [expr {$xt + ([llength $currentparents] - 2) * $linespc}]
- }
- set rowtextx($lineno) $xt
- set idpos($id) [list $x $xt $y1]
- if {[info exists idtags($id)] || [info exists idheads($id)]
- || [info exists idotherrefs($id)]} {
- set xt [drawtags $id $x $xt $y1]
- }
- set headline [lindex $commitinfo($id) 0]
- set name [lindex $commitinfo($id) 1]
- set date [lindex $commitinfo($id) 2]
- set date [formatdate $date]
- set linehtag($lineno) [$canv create text $xt $y1 -anchor w \
- -text $headline -font $mainfont ]
- $canv bind $linehtag($lineno) <Button-3> "rowmenu %X %Y $id"
- set linentag($lineno) [$canv2 create text 3 $y1 -anchor w \
- -text $name -font $namefont]
- set linedtag($lineno) [$canv3 create text 3 $y1 -anchor w \
- -text $date -font $mainfont]
-
- set olddlevel $level
- set olddisplist $displist
- set oldnlines [llength $displist]
-}
-
proc drawtags {id x xt y1} {
global idtags idheads idotherrefs
global linespc lthickness
- global canv mainfont idline rowtextx
+ global canv mainfont commitrow rowtextx
set marks {}
set ntags 0
@@ -1033,7 +1635,7 @@ proc drawtags {id x xt y1} {
$xr $yt $xr $yb $xl $yb $x [expr {$yb - $delta}] \
-width 1 -outline black -fill yellow -tags tag.$id]
$canv bind $t <1> [list showtag $tag 1]
- set rowtextx($idline($id)) [expr {$xr + $linespc}]
+ set rowtextx($commitrow($id)) [expr {$xr + $linespc}]
} else {
# draw a head or other ref
if {[incr nheads -1] >= 0} {
@@ -1054,11 +1656,32 @@ proc drawtags {id x xt y1} {
return $xt
}
-proc notecrossings {id lo hi corner} {
- global olddisplist crossings cornercrossings
+proc checkcrossings {row endrow} {
+ global displayorder parents rowidlist
+
+ for {} {$row < $endrow} {incr row} {
+ set id [lindex $displayorder $row]
+ set i [lsearch -exact [lindex $rowidlist $row] $id]
+ if {$i < 0} continue
+ set idlist [lindex $rowidlist [expr {$row+1}]]
+ foreach p $parents($id) {
+ set j [lsearch -exact $idlist $p]
+ if {$j > 0} {
+ if {$j < $i - 1} {
+ notecrossings $row $p $j $i [expr {$j+1}]
+ } elseif {$j > $i + 1} {
+ notecrossings $row $p $i $j [expr {$j-1}]
+ }
+ }
+ }
+ }
+}
+
+proc notecrossings {row id lo hi corner} {
+ global rowidlist crossings cornercrossings
for {set i $lo} {[incr i] < $hi} {} {
- set p [lindex $olddisplist $i]
+ set p [lindex [lindex $rowidlist $row] $i]
if {$p == {}} continue
if {$i == $corner} {
if {![info exists cornercrossings($id)]
@@ -1094,512 +1717,23 @@ proc xcoord {i level ln} {
return $x
}
-# it seems Tk can't draw arrows on the end of diagonal line segments...
-proc trimdiagend {line} {
- while {[llength $line] > 4} {
- set x1 [lindex $line end-3]
- set y1 [lindex $line end-2]
- set x2 [lindex $line end-1]
- set y2 [lindex $line end]
- if {($x1 == $x2) != ($y1 == $y2)} break
- set line [lreplace $line end-1 end]
- }
- return $line
-}
-
-proc trimdiagstart {line} {
- while {[llength $line] > 4} {
- set x1 [lindex $line 0]
- set y1 [lindex $line 1]
- set x2 [lindex $line 2]
- set y2 [lindex $line 3]
- if {($x1 == $x2) != ($y1 == $y2)} break
- set line [lreplace $line 0 1]
- }
- return $line
-}
-
-proc drawslants {id needonscreen nohs} {
- global canv mainline mainlinearrow sidelines
- global canvx0 canvy xspc1 xspc2 lthickness
- global currentparents dupparents
- global lthickness linespc canvy colormap lineno geometry
- global maxgraphpct maxwidth
- global displist onscreen lastuse
- global parents commitlisted
- global oldnlines olddlevel olddisplist
- global nhyperspace numcommits nnewparents
-
- if {$lineno < 0} {
- lappend displist $id
- set onscreen($id) 1
- return 0
- }
-
- set y1 [expr {$canvy - $linespc}]
- set y2 $canvy
-
- # work out what we need to get back on screen
- set reins {}
- if {$onscreen($id) < 0} {
- # next to do isn't displayed, better get it on screen...
- lappend reins [list $id 0]
- }
- # make sure all the previous commits's parents are on the screen
- foreach p $currentparents {
- if {$onscreen($p) < 0} {
- lappend reins [list $p 0]
- }
- }
- # bring back anything requested by caller
- if {$needonscreen ne {}} {
- lappend reins $needonscreen
- }
-
- # try the shortcut
- if {$currentparents == $id && $onscreen($id) == 0 && $reins eq {}} {
- set dlevel $olddlevel
- set x [xcoord $dlevel $dlevel $lineno]
- set mainline($id) [list $x $y1]
- set mainlinearrow($id) none
- set lastuse($id) $lineno
- set displist [lreplace $displist $dlevel $dlevel $id]
- set onscreen($id) 1
- set xspc1([expr {$lineno + 1}]) $xspc1($lineno)
- return $dlevel
- }
-
- # update displist
- set displist [lreplace $displist $olddlevel $olddlevel]
- set j $olddlevel
- foreach p $currentparents {
- set lastuse($p) $lineno
- if {$onscreen($p) == 0} {
- set displist [linsert $displist $j $p]
- set onscreen($p) 1
- incr j
- }
- }
- if {$onscreen($id) == 0} {
- lappend displist $id
- set onscreen($id) 1
- }
-
- # remove the null entry if present
- set nullentry [lsearch -exact $displist {}]
- if {$nullentry >= 0} {
- set displist [lreplace $displist $nullentry $nullentry]
- }
-
- # bring back the ones we need now (if we did it earlier
- # it would change displist and invalidate olddlevel)
- foreach pi $reins {
- # test again in case of duplicates in reins
- set p [lindex $pi 0]
- if {$onscreen($p) < 0} {
- set onscreen($p) 1
- set lastuse($p) $lineno
- set displist [linsert $displist [lindex $pi 1] $p]
- incr nhyperspace -1
- }
- }
-
- set lastuse($id) $lineno
-
- # see if we need to make any lines jump off into hyperspace
- set displ [llength $displist]
- if {$displ > $maxwidth} {
- set ages {}
- foreach x $displist {
- lappend ages [list $lastuse($x) $x]
- }
- set ages [lsort -integer -index 0 $ages]
- set k 0
- while {$displ > $maxwidth} {
- set use [lindex $ages $k 0]
- set victim [lindex $ages $k 1]
- if {$use >= $lineno - 5} break
- incr k
- if {[lsearch -exact $nohs $victim] >= 0} continue
- set i [lsearch -exact $displist $victim]
- set displist [lreplace $displist $i $i]
- set onscreen($victim) -1
- incr nhyperspace
- incr displ -1
- if {$i < $nullentry} {
- incr nullentry -1
- }
- set x [lindex $mainline($victim) end-1]
- lappend mainline($victim) $x $y1
- set line [trimdiagend $mainline($victim)]
- set arrow "last"
- if {$mainlinearrow($victim) ne "none"} {
- set line [trimdiagstart $line]
- set arrow "both"
- }
- lappend sidelines($victim) [list $line 1 $arrow]
- unset mainline($victim)
- }
- }
-
- set dlevel [lsearch -exact $displist $id]
-
- # If we are reducing, put in a null entry
- if {$displ < $oldnlines} {
- # does the next line look like a merge?
- # i.e. does it have > 1 new parent?
- if {$nnewparents($id) > 1} {
- set i [expr {$dlevel + 1}]
- } elseif {$nnewparents([lindex $olddisplist $olddlevel]) == 0} {
- set i $olddlevel
- if {$nullentry >= 0 && $nullentry < $i} {
- incr i -1
- }
- } elseif {$nullentry >= 0} {
- set i $nullentry
- while {$i < $displ
- && [lindex $olddisplist $i] == [lindex $displist $i]} {
- incr i
- }
- } else {
- set i $olddlevel
- if {$dlevel >= $i} {
- incr i
- }
- }
- if {$i < $displ} {
- set displist [linsert $displist $i {}]
- incr displ
- if {$dlevel >= $i} {
- incr dlevel
- }
- }
- }
-
- # decide on the line spacing for the next line
- set lj [expr {$lineno + 1}]
- set maxw [expr {$maxgraphpct * $geometry(canv1) / 100}]
- if {$displ <= 1 || $canvx0 + $displ * $xspc2 <= $maxw} {
- set xspc1($lj) $xspc2
- } else {
- set xspc1($lj) [expr {($maxw - $canvx0 - $xspc2) / ($displ - 1)}]
- if {$xspc1($lj) < $lthickness} {
- set xspc1($lj) $lthickness
- }
- }
-
- foreach idi $reins {
- set id [lindex $idi 0]
- set j [lsearch -exact $displist $id]
- set xj [xcoord $j $dlevel $lj]
- set mainline($id) [list $xj $y2]
- set mainlinearrow($id) first
- }
-
- set i -1
- foreach id $olddisplist {
- incr i
- if {$id == {}} continue
- if {$onscreen($id) <= 0} continue
- set xi [xcoord $i $olddlevel $lineno]
- if {$i == $olddlevel} {
- foreach p $currentparents {
- set j [lsearch -exact $displist $p]
- set coords [list $xi $y1]
- set xj [xcoord $j $dlevel $lj]
- if {$xj < $xi - $linespc} {
- lappend coords [expr {$xj + $linespc}] $y1
- notecrossings $p $j $i [expr {$j + 1}]
- } elseif {$xj > $xi + $linespc} {
- lappend coords [expr {$xj - $linespc}] $y1
- notecrossings $p $i $j [expr {$j - 1}]
- }
- if {[lsearch -exact $dupparents $p] >= 0} {
- # draw a double-width line to indicate the doubled parent
- lappend coords $xj $y2
- lappend sidelines($p) [list $coords 2 none]
- if {![info exists mainline($p)]} {
- set mainline($p) [list $xj $y2]
- set mainlinearrow($p) none
- }
- } else {
- # normal case, no parent duplicated
- set yb $y2
- set dx [expr {abs($xi - $xj)}]
- if {0 && $dx < $linespc} {
- set yb [expr {$y1 + $dx}]
- }
- if {![info exists mainline($p)]} {
- if {$xi != $xj} {
- lappend coords $xj $yb
- }
- set mainline($p) $coords
- set mainlinearrow($p) none
- } else {
- lappend coords $xj $yb
- if {$yb < $y2} {
- lappend coords $xj $y2
- }
- lappend sidelines($p) [list $coords 1 none]
- }
- }
- }
- } else {
- set j $i
- if {[lindex $displist $i] != $id} {
- set j [lsearch -exact $displist $id]
- }
- if {$j != $i || $xspc1($lineno) != $xspc1($lj)
- || ($olddlevel < $i && $i < $dlevel)
- || ($dlevel < $i && $i < $olddlevel)} {
- set xj [xcoord $j $dlevel $lj]
- lappend mainline($id) $xi $y1 $xj $y2
- }
- }
- }
- return $dlevel
-}
-
-# search for x in a list of lists
-proc llsearch {llist x} {
- set i 0
- foreach l $llist {
- if {$l == $x || [lsearch -exact $l $x] >= 0} {
- return $i
- }
- incr i
- }
- return -1
-}
-
-proc drawmore {reading} {
- global displayorder numcommits ncmupdate nextupdate
- global stopped nhyperspace parents commitlisted
- global maxwidth onscreen displist currentparents olddlevel
-
- set n [llength $displayorder]
- while {$numcommits < $n} {
- set id [lindex $displayorder $numcommits]
- set ctxend [expr {$numcommits + 10}]
- if {!$reading && $ctxend > $n} {
- set ctxend $n
- }
- set dlist {}
- if {$numcommits > 0} {
- set dlist [lreplace $displist $olddlevel $olddlevel]
- set i $olddlevel
- foreach p $currentparents {
- if {$onscreen($p) == 0} {
- set dlist [linsert $dlist $i $p]
- incr i
- }
- }
- }
- set nohs {}
- set reins {}
- set isfat [expr {[llength $dlist] > $maxwidth}]
- if {$nhyperspace > 0 || $isfat} {
- if {$ctxend > $n} break
- # work out what to bring back and
- # what we want to don't want to send into hyperspace
- set room 1
- for {set k $numcommits} {$k < $ctxend} {incr k} {
- set x [lindex $displayorder $k]
- set i [llsearch $dlist $x]
- if {$i < 0} {
- set i [llength $dlist]
- lappend dlist $x
- }
- if {[lsearch -exact $nohs $x] < 0} {
- lappend nohs $x
- }
- if {$reins eq {} && $onscreen($x) < 0 && $room} {
- set reins [list $x $i]
- }
- set newp {}
- if {[info exists commitlisted($x)]} {
- set right 0
- foreach p $parents($x) {
- if {[llsearch $dlist $p] < 0} {
- lappend newp $p
- if {[lsearch -exact $nohs $p] < 0} {
- lappend nohs $p
- }
- if {$reins eq {} && $onscreen($p) < 0 && $room} {
- set reins [list $p [expr {$i + $right}]]
- }
- }
- set right 1
- }
- }
- set l [lindex $dlist $i]
- if {[llength $l] == 1} {
- set l $newp
- } else {
- set j [lsearch -exact $l $x]
- set l [concat [lreplace $l $j $j] $newp]
- }
- set dlist [lreplace $dlist $i $i $l]
- if {$room && $isfat && [llength $newp] <= 1} {
- set room 0
- }
- }
- }
-
- set dlevel [drawslants $id $reins $nohs]
- drawcommitline $dlevel
- if {[clock clicks -milliseconds] >= $nextupdate
- && $numcommits >= $ncmupdate} {
- doupdate $reading
- if {$stopped} break
- }
- }
-}
-
-# level here is an index in todo
-proc updatetodo {level noshortcut} {
- global ncleft todo nnewparents
- global commitlisted parents onscreen
-
- set id [lindex $todo $level]
- set olds {}
- if {[info exists commitlisted($id)]} {
- foreach p $parents($id) {
- if {[lsearch -exact $olds $p] < 0} {
- lappend olds $p
- }
- }
- }
- if {!$noshortcut && [llength $olds] == 1} {
- set p [lindex $olds 0]
- if {$ncleft($p) == 1 && [lsearch -exact $todo $p] < 0} {
- set ncleft($p) 0
- set todo [lreplace $todo $level $level $p]
- set onscreen($p) 0
- set nnewparents($id) 1
- return 0
- }
- }
-
- set todo [lreplace $todo $level $level]
- set i $level
- set n 0
- foreach p $olds {
- incr ncleft($p) -1
- set k [lsearch -exact $todo $p]
- if {$k < 0} {
- set todo [linsert $todo $i $p]
- set onscreen($p) 0
- incr i
- incr n
- }
- }
- set nnewparents($id) $n
-
- return 1
-}
-
-proc decidenext {{noread 0}} {
- global ncleft todo
- global datemode cdate
- global commitinfo
-
- # choose which one to do next time around
- set todol [llength $todo]
- set level -1
- set latest {}
- for {set k $todol} {[incr k -1] >= 0} {} {
- set p [lindex $todo $k]
- if {$ncleft($p) == 0} {
- if {$datemode} {
- if {![info exists commitinfo($p)]} {
- if {$noread} {
- return {}
- }
- readcommit $p
- }
- if {$latest == {} || $cdate($p) > $latest} {
- set level $k
- set latest $cdate($p)
- }
- } else {
- set level $k
- break
- }
- }
- }
-
- return $level
-}
-
-proc drawcommit {id reading} {
- global phase todo nchildren datemode nextupdate revlistorder ncleft
- global numcommits ncmupdate displayorder todo onscreen parents
- global commitlisted commitordered
-
- if {$phase != "incrdraw"} {
- set phase incrdraw
- set displayorder {}
- set todo {}
- initgraph
- catch {unset commitordered}
- }
- set commitordered($id) 1
- if {$nchildren($id) == 0} {
- lappend todo $id
- set onscreen($id) 0
- }
- if {$revlistorder} {
- set level [lsearch -exact $todo $id]
- if {$level < 0} {
- error_popup "oops, $id isn't in todo"
- return
- }
- lappend displayorder $id
- updatetodo $level 0
- } else {
- set level [decidenext 1]
- if {$level == {} || $level < 0} return
- while 1 {
- set id [lindex $todo $level]
- if {![info exists commitordered($id)]} {
- break
- }
- lappend displayorder [lindex $todo $level]
- if {[updatetodo $level $datemode]} {
- set level [decidenext 1]
- if {$level == {} || $level < 0} break
- }
- }
- }
- drawmore $reading
-}
-
proc finishcommits {} {
- global phase oldcommits commits
+ global commitidx phase
global canv mainfont ctext maincursor textcursor
- global parents displayorder todo
+ global findinprogress
- if {$phase == "incrdraw" || $phase == "removecommits"} {
- foreach id $oldcommits {
- lappend commits $id
- drawcommit $id 0
- }
- set oldcommits {}
+ if {$commitidx > 0} {
drawrest
- } elseif {$phase == "updatecommits"} {
- # there were no new commits, in fact
- set commits $oldcommits
- set oldcommits {}
- set phase {}
} else {
$canv delete all
$canv create text 3 3 -anchor nw -text "No commits selected" \
-font $mainfont -tags textitems
- set phase {}
}
- . config -cursor $maincursor
- settextcursor $textcursor
+ if {![info exists findinprogress]} {
+ . config -cursor $maincursor
+ settextcursor $textcursor
+ }
+ set phase {}
}
# Don't change the text pane cursor if it is currently the hand cursor,
@@ -1613,61 +1747,20 @@ proc settextcursor {c} {
set curtextcursor $c
}
-proc drawgraph {} {
- global nextupdate startmsecs ncmupdate
- global displayorder onscreen
-
- if {$displayorder == {}} return
- set startmsecs [clock clicks -milliseconds]
- set nextupdate [expr {$startmsecs + 100}]
- set ncmupdate 1
- initgraph
- foreach id $displayorder {
- set onscreen($id) 0
- }
- drawmore 0
-}
-
proc drawrest {} {
- global phase stopped redisplaying selectedline
- global datemode todo displayorder ncleft
- global numcommits ncmupdate
- global nextupdate startmsecs revlistorder
+ global numcommits
+ global startmsecs
+ global canvy0 numcommits linespc
+ global rowlaidout commitidx
- set level [decidenext]
- if {$level >= 0} {
- set phase drawgraph
- while 1 {
- lappend displayorder [lindex $todo $level]
- set hard [updatetodo $level $datemode]
- if {$hard} {
- set level [decidenext]
- if {$level < 0} break
- }
- }
- }
- if {$todo != {}} {
- puts "ERROR: none of the pending commits can be done yet:"
- foreach p $todo {
- puts " $p ($ncleft($p))"
- }
- }
+ set row $rowlaidout
+ layoutrows $rowlaidout $commitidx 1
+ layouttail
+ optimize_rows $row 0 $commitidx
+ showstuff $commitidx
- drawmore 0
- set phase {}
set drawmsecs [expr {[clock clicks -milliseconds] - $startmsecs}]
#puts "overall $drawmsecs ms for $numcommits commits"
- if {$redisplaying} {
- if {$stopped == 0 && [info exists selectedline]} {
- selectline $selectedline 0
- }
- if {$stopped == 1} {
- set stopped 0
- after idle drawgraph
- } else {
- set redisplaying 0
- }
- }
}
proc findmatches {f} {
@@ -1692,9 +1785,10 @@ proc findmatches {f} {
proc dofind {} {
global findtype findloc findstring markedmatches commitinfo
- global numcommits lineid linehtag linentag linedtag
+ global numcommits displayorder linehtag linentag linedtag
global mainfont namefont canv canv2 canv3 selectedline
- global matchinglines foundstring foundstrlen
+ global matchinglines foundstring foundstrlen matchstring
+ global commitdata
stopfindproc
unmarkmatches
@@ -1711,6 +1805,8 @@ proc dofind {} {
}
set foundstrlen [string length $findstring]
if {$foundstrlen == 0} return
+ regsub -all {[*?\[\\]} $foundstring {\\&} matchstring
+ set matchstring "*$matchstring*"
if {$findloc == "Files"} {
findfiles
return
@@ -1722,8 +1818,21 @@ proc dofind {} {
}
set didsel 0
set fldtypes {Headline Author Date Committer CDate Comment}
- for {set l 0} {$l < $numcommits} {incr l} {
- set id $lineid($l)
+ set l -1
+ foreach id $displayorder {
+ set d $commitdata($id)
+ incr l
+ if {$findtype == "Regexp"} {
+ set doesmatch [regexp $foundstring $d]
+ } elseif {$findtype == "IgnCase"} {
+ set doesmatch [string match -nocase $matchstring $d]
+ } else {
+ set doesmatch [string match $matchstring $d]
+ }
+ if {!$doesmatch} continue
+ if {![info exists commitinfo($id)]} {
+ getcommit $id
+ }
set info $commitinfo($id)
set doesmatch 0
foreach f $info ty $fldtypes {
@@ -1734,10 +1843,13 @@ proc dofind {} {
if {$matches == {}} continue
set doesmatch 1
if {$ty == "Headline"} {
+ drawcmitrow $l
markmatches $canv $l $f $linehtag($l) $matches $mainfont
} elseif {$ty == "Author"} {
+ drawcmitrow $l
markmatches $canv2 $l $f $linentag($l) $matches $namefont
} elseif {$ty == "Date"} {
+ drawcmitrow $l
markmatches $canv3 $l $f $linedtag($l) $matches $mainfont
}
}
@@ -1845,7 +1957,7 @@ proc stopfindproc {{done 0}} {
proc findpatches {} {
global findstring selectedline numcommits
global findprocpid findprocfile
- global finddidsel ctext lineid findinprogress
+ global finddidsel ctext displayorder findinprogress
global findinsertpos
if {$numcommits == 0} return
@@ -1862,7 +1974,7 @@ proc findpatches {} {
if {[incr l] >= $numcommits} {
set l 0
}
- append inputids $lineid($l) "\n"
+ append inputids [lindex $displayorder $l] "\n"
}
if {[catch {
@@ -1886,7 +1998,7 @@ proc findpatches {} {
proc readfindproc {} {
global findprocfile finddidsel
- global idline matchinglines findinsertpos
+ global commitrow matchinglines findinsertpos
set n [gets $findprocfile line]
if {$n < 0} {
@@ -1903,11 +2015,11 @@ proc readfindproc {} {
stopfindproc
return
}
- if {![info exists idline($id)]} {
+ if {![info exists commitrow($id)]} {
puts stderr "spurious id: $id"
return
}
- set l $idline($id)
+ set l $commitrow($id)
insertmatch $l $id
}
@@ -1933,10 +2045,10 @@ proc insertmatch {l id} {
}
proc findfiles {} {
- global selectedline numcommits lineid ctext
+ global selectedline numcommits displayorder ctext
global ffileline finddidsel parents nparents
global findinprogress findstartline findinsertpos
- global treediffs fdiffids fdiffsneeded fdiffpos
+ global treediffs fdiffid fdiffsneeded fdiffpos
global findmergefiles
if {$numcommits == 0} return
@@ -1951,13 +2063,11 @@ proc findfiles {} {
set diffsneeded {}
set fdiffsneeded {}
while 1 {
- set id $lineid($l)
+ set id [lindex $displayorder $l]
if {$findmergefiles || $nparents($id) == 1} {
- foreach p $parents($id) {
- if {![info exists treediffs([list $id $p])]} {
- append diffsneeded "$id $p\n"
- lappend fdiffsneeded [list $id $p]
- }
+ if {![info exists treediffs($id)]} {
+ append diffsneeded "$id\n"
+ lappend fdiffsneeded $id
}
}
if {[incr l] >= $numcommits} {
@@ -1974,7 +2084,7 @@ proc findfiles {} {
error_popup "Error starting search process: $err"
return
}
- catch {unset fdiffids}
+ catch {unset fdiffid}
set fdiffpos 0
fconfigure $df -blocking 0
fileevent $df readable [list readfilediffs $df]
@@ -1982,17 +2092,16 @@ proc findfiles {} {
set finddidsel 0
set findinsertpos end
- set id $lineid($l)
- set p [lindex $parents($id) 0]
+ set id [lindex $displayorder $l]
. config -cursor watch
settextcursor watch
set findinprogress 1
- findcont [list $id $p]
+ findcont $id
update
}
proc readfilediffs {df} {
- global findids fdiffids fdiffs
+ global findid fdiffid fdiffs
set n [gets $df line]
if {$n < 0} {
@@ -2002,19 +2111,19 @@ proc readfilediffs {df} {
stopfindproc
bell
error_popup "Error in git-diff-tree: $err"
- } elseif {[info exists findids]} {
- set ids $findids
+ } elseif {[info exists findid]} {
+ set id $findid
stopfindproc
bell
- error_popup "Couldn't find diffs for {$ids}"
+ error_popup "Couldn't find diffs for $id"
}
}
return
}
- if {[regexp {^([0-9a-f]{40}) \(from ([0-9a-f]{40})\)} $line match id p]} {
+ if {[regexp {^([0-9a-f]{40})$} $line match id]} {
# start of a new string of diffs
donefilediff
- set fdiffids [list $id $p]
+ set fdiffid $id
set fdiffs {}
} elseif {[string match ":*" $line]} {
lappend fdiffs [lindex $line 5]
@@ -2022,53 +2131,50 @@ proc readfilediffs {df} {
}
proc donefilediff {} {
- global fdiffids fdiffs treediffs findids
+ global fdiffid fdiffs treediffs findid
global fdiffsneeded fdiffpos
- if {[info exists fdiffids]} {
- while {[lindex $fdiffsneeded $fdiffpos] ne $fdiffids
+ if {[info exists fdiffid]} {
+ while {[lindex $fdiffsneeded $fdiffpos] ne $fdiffid
&& $fdiffpos < [llength $fdiffsneeded]} {
# git-diff-tree doesn't output anything for a commit
# which doesn't change anything
- set nullids [lindex $fdiffsneeded $fdiffpos]
- set treediffs($nullids) {}
- if {[info exists findids] && $nullids eq $findids} {
- unset findids
- findcont $nullids
+ set nullid [lindex $fdiffsneeded $fdiffpos]
+ set treediffs($nullid) {}
+ if {[info exists findid] && $nullid eq $findid} {
+ unset findid
+ findcont $nullid
}
incr fdiffpos
}
incr fdiffpos
- if {![info exists treediffs($fdiffids)]} {
- set treediffs($fdiffids) $fdiffs
+ if {![info exists treediffs($fdiffid)]} {
+ set treediffs($fdiffid) $fdiffs
}
- if {[info exists findids] && $fdiffids eq $findids} {
- unset findids
- findcont $fdiffids
+ if {[info exists findid] && $fdiffid eq $findid} {
+ unset findid
+ findcont $fdiffid
}
}
}
-proc findcont {ids} {
- global findids treediffs parents nparents
+proc findcont {id} {
+ global findid treediffs parents nparents
global ffileline findstartline finddidsel
- global lineid numcommits matchinglines findinprogress
+ global displayorder numcommits matchinglines findinprogress
global findmergefiles
- set id [lindex $ids 0]
- set p [lindex $ids 1]
- set pi [lsearch -exact $parents($id) $p]
set l $ffileline
while 1 {
if {$findmergefiles || $nparents($id) == 1} {
- if {![info exists treediffs($ids)]} {
- set findids $ids
+ if {![info exists treediffs($id)]} {
+ set findid $id
set ffileline $l
return
}
set doesmatch 0
- foreach f $treediffs($ids) {
+ foreach f $treediffs($id) {
set x [findmatches $f]
if {$x != {}} {
set doesmatch 1
@@ -2077,21 +2183,13 @@ proc findcont {ids} {
}
if {$doesmatch} {
insertmatch $l $id
- set pi $nparents($id)
}
- } else {
- set pi $nparents($id)
}
- if {[incr pi] >= $nparents($id)} {
- set pi 0
- if {[incr l] >= $numcommits} {
- set l 0
- }
- if {$l == $findstartline} break
- set id $lineid($l)
+ if {[incr l] >= $numcommits} {
+ set l 0
}
- set p [lindex $parents($id) $pi]
- set ids [list $id $p]
+ if {$l == $findstartline} break
+ set id [lindex $displayorder $l]
}
stopfindproc
if {!$finddidsel} {
@@ -2102,8 +2200,9 @@ proc findcont {ids} {
# mark a commit as matching by putting a yellow background
# behind the headline
proc markheadline {l id} {
- global canv mainfont linehtag commitinfo
+ global canv mainfont linehtag
+ drawcmitrow $l
set bbox [$canv bbox $linehtag($l)]
set t [$canv create rect $bbox -outline {} -tags matches -fill yellow]
$canv lower $t
@@ -2137,7 +2236,7 @@ proc unmarkmatches {} {
proc selcanvline {w x y} {
global canv canvy0 ctext linespc
- global lineid linehtag linentag linedtag rowtextx
+ global rowtextx
set ymax [lindex [$canv cget -scrollregion] 3]
if {$ymax == {}} return
set yfrac [lindex [$canv yview] 0]
@@ -2165,7 +2264,7 @@ proc commit_descriptor {p} {
# append some text to the ctext widget, and make any SHA1 ID
# that we know about be a clickable link.
proc appendwithlinks {text} {
- global ctext idline linknum
+ global ctext commitrow linknum
set start [$ctext index "end - 1c"]
$ctext insert end $text
@@ -2175,11 +2274,11 @@ proc appendwithlinks {text} {
set s [lindex $l 0]
set e [lindex $l 1]
set linkid [string range $text $s $e]
- if {![info exists idline($linkid)]} continue
+ if {![info exists commitrow($linkid)]} continue
incr e
$ctext tag add link "$start + $s c" "$start + $e c"
$ctext tag add link$linknum "$start + $s c" "$start + $e c"
- $ctext tag bind link$linknum <1> [list selectline $idline($linkid) 1]
+ $ctext tag bind link$linknum <1> [list selectline $commitrow($linkid) 1]
incr linknum
}
$ctext tag conf link -foreground blue -underline 1
@@ -2189,27 +2288,15 @@ proc appendwithlinks {text} {
proc selectline {l isnew} {
global canv canv2 canv3 ctext commitinfo selectedline
- global lineid linehtag linentag linedtag
+ global displayorder linehtag linentag linedtag
global canvy0 linespc parents nparents children
global cflist currentid sha1entry
- global commentend idtags idline linknum
- global mergemax
+ global commentend idtags linknum
+ global mergemax numcommits
$canv delete hover
normalline
- if {![info exists lineid($l)] || ![info exists linehtag($l)]} return
- $canv delete secsel
- set t [eval $canv create rect [$canv bbox $linehtag($l)] -outline {{}} \
- -tags secsel -fill [$canv cget -selectbackground]]
- $canv lower $t
- $canv2 delete secsel
- set t [eval $canv2 create rect [$canv2 bbox $linentag($l)] -outline {{}} \
- -tags secsel -fill [$canv2 cget -selectbackground]]
- $canv2 lower $t
- $canv3 delete secsel
- set t [eval $canv3 create rect [$canv3 bbox $linedtag($l)] -outline {{}} \
- -tags secsel -fill [$canv3 cget -selectbackground]]
- $canv3 lower $t
+ if {$l < 0 || $l >= $numcommits} return
set y [expr {$canvy0 + $l * $linespc}]
set ymax [lindex [$canv cget -scrollregion] 3]
set ytop [expr {$y - $linespc - 1}]
@@ -2243,15 +2330,30 @@ proc selectline {l isnew} {
set newtop 0
}
allcanvs yview moveto [expr {$newtop * 1.0 / $ymax}]
+ drawvisible
}
+ if {![info exists linehtag($l)]} return
+ $canv delete secsel
+ set t [eval $canv create rect [$canv bbox $linehtag($l)] -outline {{}} \
+ -tags secsel -fill [$canv cget -selectbackground]]
+ $canv lower $t
+ $canv2 delete secsel
+ set t [eval $canv2 create rect [$canv2 bbox $linentag($l)] -outline {{}} \
+ -tags secsel -fill [$canv2 cget -selectbackground]]
+ $canv2 lower $t
+ $canv3 delete secsel
+ set t [eval $canv3 create rect [$canv3 bbox $linedtag($l)] -outline {{}} \
+ -tags secsel -fill [$canv3 cget -selectbackground]]
+ $canv3 lower $t
+
if {$isnew} {
addtohistory [list selectline $l 0]
}
set selectedline $l
- set id $lineid($l)
+ set id [lindex $displayorder $l]
set currentid $id
$sha1entry delete 0 end
$sha1entry insert 0 $id
@@ -2315,9 +2417,9 @@ proc selectline {l isnew} {
$cflist delete 0 end
$cflist insert end "Comments"
- if {$nparents($id) == 1} {
+ if {$nparents($id) <= 1} {
startdiff $id
- } elseif {$nparents($id) > 1} {
+ } else {
mergediff $id
}
}
@@ -2389,9 +2491,10 @@ proc goforw {} {
proc mergediff {id} {
global parents diffmergeid diffopts mdifffd
- global difffilestart
+ global difffilestart diffids
set diffmergeid $id
+ set diffids $id
catch {unset difffilestart}
# this doesn't seem to actually affect anything...
set env(GIT_DIFF_OPTS) $diffopts
@@ -2408,7 +2511,7 @@ proc mergediff {id} {
proc getmergediffline {mdf id} {
global diffmergeid ctext cflist nextupdate nparents mergemax
- global difffilestart
+ global difffilestart mdifffd
set n [gets $mdf line]
if {$n < 0} {
@@ -2417,7 +2520,8 @@ proc getmergediffline {mdf id} {
}
return
}
- if {![info exists diffmergeid] || $id != $diffmergeid} {
+ if {![info exists diffmergeid] || $id != $diffmergeid
+ || $mdf != $mdifffd($id)} {
return
}
$ctext conf -state normal
@@ -2527,13 +2631,11 @@ proc gettreediffline {gdtf ids} {
set treediffs($ids) $treediff
unset treepending
if {$ids != $diffids} {
- gettreediffs $diffids
- } else {
- if {[info exists diffmergeid]} {
- contmergediff $ids
- } else {
- addtocflist $ids
+ if {![info exists diffmergeid]} {
+ gettreediffs $diffids
}
+ } else {
+ addtocflist $ids
}
return
}
@@ -2609,7 +2711,9 @@ proc getblobdiffline {bdf ids} {
set pad [string range "----------------------------------------" 1 $l]
$ctext insert end "$pad $header $pad\n" filesep
set diffinhdr 1
- } elseif {[regexp {^(---|\+\+\+)} $line]} {
+ } elseif {$diffinhdr && [string compare -length 3 $line "---"] == 0} {
+ # do nothing
+ } elseif {$diffinhdr && [string compare -length 3 $line "+++"] == 0} {
set diffinhdr 0
} elseif {[regexp {^@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@(.*)} \
$line match f1l f1c f2l f2c rest]} {
@@ -2676,22 +2780,26 @@ proc setcoords {} {
set linespc [font metrics $mainfont -linespace]
set charspc [font measure $mainfont "m"]
- set canvy0 [expr {3 + 0.5 * $linespc}]
- set canvx0 [expr {3 + 0.5 * $linespc}]
+ set canvy0 [expr {int(3 + 0.5 * $linespc)}]
+ set canvx0 [expr {int(3 + 0.5 * $linespc)}]
set lthickness [expr {int($linespc / 9) + 1}]
set xspc1(0) $linespc
set xspc2 $linespc
}
proc redisplay {} {
- global stopped redisplaying phase
- if {$stopped > 1} return
- if {$phase == "getcommits"} return
- set redisplaying 1
- if {$phase == "drawgraph" || $phase == "incrdraw"} {
- set stopped 1
- } else {
- drawgraph
+ global canv
+ global selectedline
+
+ set ymax [lindex [$canv cget -scrollregion] 3]
+ if {$ymax eq {} || $ymax == 0} return
+ set span [$canv yview]
+ clear_display
+ setcanvscroll
+ allcanvs yview moveto [lindex $span 0]
+ drawvisible
+ if {[info exists selectedline]} {
+ selectline $selectedline 0
}
}
@@ -2738,8 +2846,8 @@ proc sha1change {n1 n2 op} {
}
proc gotocommit {} {
- global sha1string currentid idline tagids
- global lineid numcommits
+ global sha1string currentid commitrow tagids
+ global displayorder numcommits
if {$sha1string == {}
|| ([info exists currentid] && $sha1string == $currentid)} return
@@ -2749,9 +2857,9 @@ proc gotocommit {} {
set id [string tolower $sha1string]
if {[regexp {^[0-9a-f]{4,39}$} $id]} {
set matches {}
- for {set l 0} {$l < $numcommits} {incr l} {
- if {[string match $id* $lineid($l)]} {
- lappend matches $lineid($l)
+ foreach i $displayorder {
+ if {[string match $id* $i]} {
+ lappend matches $i
}
}
if {$matches ne {}} {
@@ -2763,8 +2871,8 @@ proc gotocommit {} {
}
}
}
- if {[info exists idline($id)]} {
- selectline $idline($id) 1
+ if {[info exists commitrow($id)]} {
+ selectline $commitrow($id) 1
return
}
if {[regexp {^[0-9a-fA-F]{4,}$} $sha1string]} {
@@ -2779,7 +2887,7 @@ proc lineenter {x y id} {
global hoverx hovery hoverid hovertimer
global commitinfo canv
- if {![info exists commitinfo($id)]} return
+ if {![info exists commitinfo($id)] && ![getcommit $id]} return
set hoverx $x
set hovery $y
set hoverid $id
@@ -2839,65 +2947,26 @@ proc linehover {} {
}
proc clickisonarrow {id y} {
- global mainline mainlinearrow sidelines lthickness
+ global lthickness idrowranges
set thresh [expr {2 * $lthickness + 6}]
- if {[info exists mainline($id)]} {
- if {$mainlinearrow($id) ne "none"} {
- if {abs([lindex $mainline($id) 1] - $y) < $thresh} {
- return "up"
- }
- }
- }
- if {[info exists sidelines($id)]} {
- foreach ls $sidelines($id) {
- set coords [lindex $ls 0]
- set arrow [lindex $ls 2]
- if {$arrow eq "first" || $arrow eq "both"} {
- if {abs([lindex $coords 1] - $y) < $thresh} {
- return "up"
- }
- }
- if {$arrow eq "last" || $arrow eq "both"} {
- if {abs([lindex $coords end] - $y) < $thresh} {
- return "down"
- }
- }
+ set n [expr {[llength $idrowranges($id)] - 1}]
+ for {set i 1} {$i < $n} {incr i} {
+ set row [lindex $idrowranges($id) $i]
+ if {abs([yc $row] - $y) < $thresh} {
+ return $i
}
}
return {}
}
-proc arrowjump {id dirn y} {
- global mainline sidelines canv canv2 canv3
+proc arrowjump {id n y} {
+ global idrowranges canv
- set yt {}
- if {$dirn eq "down"} {
- if {[info exists mainline($id)]} {
- set y1 [lindex $mainline($id) 1]
- if {$y1 > $y} {
- set yt $y1
- }
- }
- if {[info exists sidelines($id)]} {
- foreach ls $sidelines($id) {
- set y1 [lindex $ls 0 1]
- if {$y1 > $y && ($yt eq {} || $y1 < $yt)} {
- set yt $y1
- }
- }
- }
- } else {
- if {[info exists sidelines($id)]} {
- foreach ls $sidelines($id) {
- set y1 [lindex $ls 0 end]
- if {$y1 < $y && ($yt eq {} || $y1 > $yt)} {
- set yt $y1
- }
- }
- }
- }
- if {$yt eq {}} return
+ # 1 <-> 2, 3 <-> 4, etc...
+ set n [expr {(($n - 1) ^ 1) + 1}]
+ set row [lindex $idrowranges($id) $n]
+ set yt [yc $row]
set ymax [lindex [$canv cget -scrollregion] 3]
if {$ymax eq {} || $ymax <= 0} return
set view [$canv yview]
@@ -2906,21 +2975,20 @@ proc arrowjump {id dirn y} {
if {$yfrac < 0} {
set yfrac 0
}
- $canv yview moveto $yfrac
- $canv2 yview moveto $yfrac
- $canv3 yview moveto $yfrac
+ allcanvs yview moveto $yfrac
}
proc lineclick {x y id isnew} {
global ctext commitinfo children cflist canv thickerline
+ if {![info exists commitinfo($id)] && ![getcommit $id]} return
unmarkmatches
unselectline
normalline
$canv delete hover
# draw this line thicker than normal
- drawlines $id 1 1
set thickerline $id
+ drawlines $id
if {$isnew} {
set ymax [lindex [$canv cget -scrollregion] 3]
if {$ymax eq {}} return
@@ -2955,6 +3023,7 @@ proc lineclick {x y id isnew} {
set i 0
foreach child $children($id) {
incr i
+ if {![info exists commitinfo($child)] && ![getcommit $child]} continue
set info $commitinfo($child)
$ctext insert end "\n\t"
$ctext insert end $child [list link link$i]
@@ -2973,15 +3042,16 @@ proc lineclick {x y id isnew} {
proc normalline {} {
global thickerline
if {[info exists thickerline]} {
- drawlines $thickerline 0 1
+ set id $thickerline
unset thickerline
+ drawlines $id
}
}
proc selbyid {id} {
- global idline
- if {[info exists idline($id)]} {
- selectline $idline($id) 1
+ global commitrow
+ if {[info exists commitrow($id)]} {
+ selectline $commitrow($id) 1
}
}
@@ -2994,9 +3064,9 @@ proc mstime {} {
}
proc rowmenu {x y id} {
- global rowctxmenu idline selectedline rowmenuid
+ global rowctxmenu commitrow selectedline rowmenuid
- if {![info exists selectedline] || $idline($id) eq $selectedline} {
+ if {![info exists selectedline] || $commitrow($id) eq $selectedline} {
set state disabled
} else {
set state normal
@@ -3009,15 +3079,15 @@ proc rowmenu {x y id} {
}
proc diffvssel {dirn} {
- global rowmenuid selectedline lineid
+ global rowmenuid selectedline displayorder
if {![info exists selectedline]} return
if {$dirn} {
- set oldid $lineid($selectedline)
+ set oldid [lindex $displayorder $selectedline]
set newid $rowmenuid
} else {
set oldid $rowmenuid
- set newid $lineid($selectedline)
+ set newid [lindex $displayorder $selectedline]
}
addtohistory [list doseldiff $oldid $newid]
doseldiff $oldid $newid
@@ -3199,13 +3269,14 @@ proc domktag {} {
}
proc redrawtags {id} {
- global canv linehtag idline idpos selectedline
+ global canv linehtag commitrow idpos selectedline
- if {![info exists idline($id)]} return
+ if {![info exists commitrow($id)]} return
+ drawcmitrow $commitrow($id)
$canv delete tag.$id
set xt [eval drawtags $id $idpos($id)]
- $canv coords $linehtag($idline($id)) $xt [lindex $idpos($id) 2]
- if {[info exists selectedline] && $selectedline == $idline($id)} {
+ $canv coords $linehtag($commitrow($id)) $xt [lindex $idpos($id) 2]
+ if {[info exists selectedline] && $selectedline == $commitrow($id)} {
selectline $selectedline 0
}
}
@@ -3711,6 +3782,9 @@ set maxgraphpct 50
set maxwidth 16
set revlistorder 0
set fastdate 0
+set uparrowlen 7
+set downarrowlen 7
+set mingaplen 30
set colors {green red blue magenta darkgrey brown orange}
@@ -3725,18 +3799,25 @@ foreach arg $argv {
switch -regexp -- $arg {
"^$" { }
"^-d" { set datemode 1 }
- "^-r" { set revlistorder 1 }
default {
lappend revtreeargs $arg
}
}
}
+# check that we can find a .git directory somewhere...
+set gitdir [gitdir]
+if {![file isdirectory $gitdir]} {
+ error_popup "Cannot find the git directory \"$gitdir\"."
+ exit 1
+}
+
set history {}
set historyindex 0
+set optim_delay 16
+
set stopped 0
-set redisplaying 0
set stuffsaved 0
set patchnum 0
setcoords
diff --git a/hash-object.c b/hash-object.c
index 6502b5b..43bd93b 100644
--- a/hash-object.c
+++ b/hash-object.c
@@ -2,9 +2,10 @@
* GIT - The information manager from hell
*
* Copyright (C) Linus Torvalds, 2005
- * Copyright (C) Junio C Hamano, 2005
+ * Copyright (C) Junio C Hamano, 2005
*/
#include "cache.h"
+#include "blob.h"
static void hash_object(const char *path, const char *type, int write_object)
{
@@ -35,7 +36,7 @@ static const char hash_object_usage[] =
int main(int argc, char **argv)
{
int i;
- const char *type = "blob";
+ const char *type = blob_type;
int write_object = 0;
const char *prefix = NULL;
int prefix_length = -1;
diff --git a/http-fetch.c b/http-fetch.c
index 8fd9de0..dc67218 100644
--- a/http-fetch.c
+++ b/http-fetch.c
@@ -8,6 +8,7 @@
#define RANGE_HEADER_SIZE 30
static int got_alternates = -1;
+static int corrupt_object_found = 0;
static struct curl_slist *no_pragma_header;
@@ -468,9 +469,13 @@ static void process_alternates_response(void *callback_data)
alt_req->url);
active_requests++;
slot->in_use = 1;
+ if (slot->finished != NULL)
+ (*slot->finished) = 0;
if (!start_active_slot(slot)) {
got_alternates = -1;
slot->in_use = 0;
+ if (slot->finished != NULL)
+ (*slot->finished) = 1;
}
return;
}
@@ -830,6 +835,7 @@ static int fetch_object(struct alt_base *repo, unsigned char *sha1)
obj_req->errorstr, obj_req->curl_result,
obj_req->http_code, hex);
} else if (obj_req->zret != Z_STREAM_END) {
+ corrupt_object_found++;
ret = error("File %s (%s) corrupt", hex, obj_req->url);
} else if (memcmp(obj_req->sha1, obj_req->real_sha1, 20)) {
ret = error("File %s has bad hash", hex);
@@ -989,5 +995,11 @@ int main(int argc, char **argv)
http_cleanup();
+ if (corrupt_object_found) {
+ fprintf(stderr,
+"Some loose object were found to be corrupt, but they might be just\n"
+"a false '404 Not Found' error message sent with incorrect HTTP\n"
+"status code. Suggest running git fsck-objects.\n");
+ }
return rc;
}
diff --git a/http-push.c b/http-push.c
index fe92560..b60fa8d 100644
--- a/http-push.c
+++ b/http-push.c
@@ -5,11 +5,14 @@
#include "tag.h"
#include "blob.h"
#include "http.h"
+#include "refs.h"
+#include "revision.h"
+#include "exec_cmd.h"
#include <expat.h>
static const char http_push_usage[] =
-"git-http-push [--complete] [--force] [--verbose] <url> <ref> [<ref>...]\n";
+"git-http-push [--all] [--force] [--verbose] <remote> [<head>...]\n";
#ifndef XML_STATUS_OK
enum XML_Status {
@@ -20,6 +23,7 @@ enum XML_Status {
#define XML_STATUS_ERROR 0
#endif
+#define PREV_BUF_SIZE 4096
#define RANGE_HEADER_SIZE 30
/* DAV methods */
@@ -29,6 +33,7 @@ enum XML_Status {
#define DAV_PROPFIND "PROPFIND"
#define DAV_PUT "PUT"
#define DAV_UNLOCK "UNLOCK"
+#define DAV_DELETE "DELETE"
/* DAV lock flags */
#define DAV_PROP_LOCKWR (1u << 0)
@@ -42,17 +47,31 @@ enum XML_Status {
#define DAV_ACTIVELOCK_OWNER ".prop.lockdiscovery.activelock.owner.href"
#define DAV_ACTIVELOCK_TIMEOUT ".prop.lockdiscovery.activelock.timeout"
#define DAV_ACTIVELOCK_TOKEN ".prop.lockdiscovery.activelock.locktoken.href"
+#define DAV_PROPFIND_RESP ".multistatus.response"
+#define DAV_PROPFIND_NAME ".multistatus.response.href"
+#define DAV_PROPFIND_COLLECTION ".multistatus.response.propstat.prop.resourcetype.collection"
/* DAV request body templates */
-#define PROPFIND_REQUEST "<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n<D:propfind xmlns:D=\"DAV:\">\n<D:prop xmlns:R=\"%s\">\n<D:supportedlock/>\n</D:prop>\n</D:propfind>"
+#define PROPFIND_SUPPORTEDLOCK_REQUEST "<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n<D:propfind xmlns:D=\"DAV:\">\n<D:prop xmlns:R=\"%s\">\n<D:supportedlock/>\n</D:prop>\n</D:propfind>"
+#define PROPFIND_ALL_REQUEST "<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n<D:propfind xmlns:D=\"DAV:\">\n<D:allprop/>\n</D:propfind>"
#define LOCK_REQUEST "<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n<D:lockinfo xmlns:D=\"DAV:\">\n<D:lockscope><D:exclusive/></D:lockscope>\n<D:locktype><D:write/></D:locktype>\n<D:owner>\n<D:href>mailto:%s</D:href>\n</D:owner>\n</D:lockinfo>"
#define LOCK_TIME 600
#define LOCK_REFRESH 30
+/* bits #0-4 in revision.h */
+
+#define LOCAL (1u << 5)
+#define REMOTE (1u << 6)
+#define FETCHING (1u << 7)
+#define PUSHING (1u << 8)
+
+/* We allow "recursive" symbolic refs. Only within reason, though */
+#define MAXDEPTH 5
+
static int pushing = 0;
static int aborted = 0;
-static char remote_dir_exists[256];
+static signed char remote_dir_exists[256];
static struct curl_slist *no_pragma_header;
static struct curl_slist *default_headers;
@@ -61,17 +80,25 @@ static int push_verbosely = 0;
static int push_all = 0;
static int force_all = 0;
+static struct object_list *objects = NULL;
+
struct repo
{
char *url;
+ int path_len;
+ int has_info_refs;
+ int can_update_info_refs;
+ int has_info_packs;
struct packed_git *packs;
+ struct remote_lock *locks;
};
static struct repo *remote = NULL;
enum transfer_state {
- NEED_CHECK,
- RUN_HEAD,
+ NEED_FETCH,
+ RUN_FETCH_LOOSE,
+ RUN_FETCH_PACKED,
NEED_PUSH,
RUN_MKCOL,
RUN_PUT,
@@ -82,14 +109,16 @@ enum transfer_state {
struct transfer_request
{
- unsigned char sha1[20];
+ struct object *obj;
char *url;
char *dest;
- struct active_lock *lock;
+ struct remote_lock *lock;
struct curl_slist *headers;
struct buffer buffer;
char filename[PATH_MAX];
char tmpfile[PATH_MAX];
+ int local_fileno;
+ FILE *local_stream;
enum transfer_state state;
CURLcode curl_result;
char errorstr[CURL_ERROR_SIZE];
@@ -99,6 +128,7 @@ struct transfer_request
z_stream stream;
int zret;
int rename;
+ void *userData;
struct active_request_slot *slot;
struct transfer_request *next;
};
@@ -114,7 +144,7 @@ struct xml_ctx
void *userData;
};
-struct active_lock
+struct remote_lock
{
char *url;
char *owner;
@@ -122,9 +152,30 @@ struct active_lock
time_t start_time;
long timeout;
int refreshing;
+ struct remote_lock *next;
+};
+
+/* Flags that control remote_ls processing */
+#define PROCESS_FILES (1u << 0)
+#define PROCESS_DIRS (1u << 1)
+#define RECURSIVE (1u << 2)
+
+/* Flags that remote_ls passes to callback functions */
+#define IS_DIR (1u << 0)
+
+struct remote_ls_ctx
+{
+ char *path;
+ void (*userFunc)(struct remote_ls_ctx *ls);
+ void *userData;
+ int flags;
+ char *dentry_name;
+ int dentry_flags;
+ struct remote_ls_ctx *parent;
};
static void finish_request(struct transfer_request *request);
+static void release_request(struct transfer_request *request);
static void process_response(void *callback_data)
{
@@ -134,42 +185,261 @@ static void process_response(void *callback_data)
finish_request(request);
}
-static void start_check(struct transfer_request *request)
+static size_t fwrite_sha1_file(void *ptr, size_t eltsize, size_t nmemb,
+ void *data)
{
- char *hex = sha1_to_hex(request->sha1);
- struct active_request_slot *slot;
+ unsigned char expn[4096];
+ size_t size = eltsize * nmemb;
+ int posn = 0;
+ struct transfer_request *request = (struct transfer_request *)data;
+ do {
+ ssize_t retval = write(request->local_fileno,
+ ptr + posn, size - posn);
+ if (retval < 0)
+ return posn;
+ posn += retval;
+ } while (posn < size);
+
+ request->stream.avail_in = size;
+ request->stream.next_in = ptr;
+ do {
+ request->stream.next_out = expn;
+ request->stream.avail_out = sizeof(expn);
+ request->zret = inflate(&request->stream, Z_SYNC_FLUSH);
+ SHA1_Update(&request->c, expn,
+ sizeof(expn) - request->stream.avail_out);
+ } while (request->stream.avail_in && request->zret == Z_OK);
+ data_received++;
+ return size;
+}
+
+static void start_fetch_loose(struct transfer_request *request)
+{
+ char *hex = sha1_to_hex(request->obj->sha1);
+ char *filename;
+ char prevfile[PATH_MAX];
+ char *url;
char *posn;
+ int prevlocal;
+ unsigned char prev_buf[PREV_BUF_SIZE];
+ ssize_t prev_read = 0;
+ long prev_posn = 0;
+ char range[RANGE_HEADER_SIZE];
+ struct curl_slist *range_header = NULL;
+ struct active_request_slot *slot;
- request->url = xmalloc(strlen(remote->url) + 55);
- strcpy(request->url, remote->url);
- posn = request->url + strlen(remote->url);
+ filename = sha1_file_name(request->obj->sha1);
+ snprintf(request->filename, sizeof(request->filename), "%s", filename);
+ snprintf(request->tmpfile, sizeof(request->tmpfile),
+ "%s.temp", filename);
+
+ snprintf(prevfile, sizeof(prevfile), "%s.prev", request->filename);
+ unlink(prevfile);
+ rename(request->tmpfile, prevfile);
+ unlink(request->tmpfile);
+
+ if (request->local_fileno != -1)
+ error("fd leakage in start: %d", request->local_fileno);
+ request->local_fileno = open(request->tmpfile,
+ O_WRONLY | O_CREAT | O_EXCL, 0666);
+ /* This could have failed due to the "lazy directory creation";
+ * try to mkdir the last path component.
+ */
+ if (request->local_fileno < 0 && errno == ENOENT) {
+ char *dir = strrchr(request->tmpfile, '/');
+ if (dir) {
+ *dir = 0;
+ mkdir(request->tmpfile, 0777);
+ *dir = '/';
+ }
+ request->local_fileno = open(request->tmpfile,
+ O_WRONLY | O_CREAT | O_EXCL, 0666);
+ }
+
+ if (request->local_fileno < 0) {
+ request->state = ABORTED;
+ error("Couldn't create temporary file %s for %s: %s",
+ request->tmpfile, request->filename, strerror(errno));
+ return;
+ }
+
+ memset(&request->stream, 0, sizeof(request->stream));
+
+ inflateInit(&request->stream);
+
+ SHA1_Init(&request->c);
+
+ url = xmalloc(strlen(remote->url) + 50);
+ request->url = xmalloc(strlen(remote->url) + 50);
+ strcpy(url, remote->url);
+ posn = url + strlen(remote->url);
strcpy(posn, "objects/");
posn += 8;
memcpy(posn, hex, 2);
posn += 2;
*(posn++) = '/';
strcpy(posn, hex + 2);
+ strcpy(request->url, url);
+
+ /* If a previous temp file is present, process what was already
+ fetched. */
+ prevlocal = open(prevfile, O_RDONLY);
+ if (prevlocal != -1) {
+ do {
+ prev_read = read(prevlocal, prev_buf, PREV_BUF_SIZE);
+ if (prev_read>0) {
+ if (fwrite_sha1_file(prev_buf,
+ 1,
+ prev_read,
+ request) == prev_read) {
+ prev_posn += prev_read;
+ } else {
+ prev_read = -1;
+ }
+ }
+ } while (prev_read > 0);
+ close(prevlocal);
+ }
+ unlink(prevfile);
+
+ /* Reset inflate/SHA1 if there was an error reading the previous temp
+ file; also rewind to the beginning of the local file. */
+ if (prev_read == -1) {
+ memset(&request->stream, 0, sizeof(request->stream));
+ inflateInit(&request->stream);
+ SHA1_Init(&request->c);
+ if (prev_posn>0) {
+ prev_posn = 0;
+ lseek(request->local_fileno, SEEK_SET, 0);
+ ftruncate(request->local_fileno, 0);
+ }
+ }
slot = get_active_slot();
slot->callback_func = process_response;
slot->callback_data = request;
+ request->slot = slot;
+
+ curl_easy_setopt(slot->curl, CURLOPT_FILE, request);
+ curl_easy_setopt(slot->curl, CURLOPT_WRITEFUNCTION, fwrite_sha1_file);
curl_easy_setopt(slot->curl, CURLOPT_ERRORBUFFER, request->errorstr);
- curl_easy_setopt(slot->curl, CURLOPT_URL, request->url);
- curl_easy_setopt(slot->curl, CURLOPT_NOBODY, 1);
+ curl_easy_setopt(slot->curl, CURLOPT_URL, url);
+ curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, no_pragma_header);
- if (start_active_slot(slot)) {
- request->slot = slot;
- request->state = RUN_HEAD;
- } else {
- request->state = ABORTED;
- free(request->url);
- request->url = NULL;
+ /* If we have successfully processed data from a previous fetch
+ attempt, only fetch the data we don't already have. */
+ if (prev_posn>0) {
+ if (push_verbosely)
+ fprintf(stderr,
+ "Resuming fetch of object %s at byte %ld\n",
+ hex, prev_posn);
+ sprintf(range, "Range: bytes=%ld-", prev_posn);
+ range_header = curl_slist_append(range_header, range);
+ curl_easy_setopt(slot->curl,
+ CURLOPT_HTTPHEADER, range_header);
+ }
+
+ /* Try to get the request started, abort the request on error */
+ request->state = RUN_FETCH_LOOSE;
+ if (!start_active_slot(slot)) {
+ fprintf(stderr, "Unable to start GET request\n");
+ remote->can_update_info_refs = 0;
+ release_request(request);
+ }
+}
+
+static void start_fetch_packed(struct transfer_request *request)
+{
+ char *url;
+ struct packed_git *target;
+ FILE *packfile;
+ char *filename;
+ long prev_posn = 0;
+ char range[RANGE_HEADER_SIZE];
+ struct curl_slist *range_header = NULL;
+
+ struct transfer_request *check_request = request_queue_head;
+ struct active_request_slot *slot;
+
+ target = find_sha1_pack(request->obj->sha1, remote->packs);
+ if (!target) {
+ fprintf(stderr, "Unable to fetch %s, will not be able to update server info refs\n", sha1_to_hex(request->obj->sha1));
+ remote->can_update_info_refs = 0;
+ release_request(request);
+ return;
+ }
+
+ fprintf(stderr, "Fetching pack %s\n", sha1_to_hex(target->sha1));
+ fprintf(stderr, " which contains %s\n", sha1_to_hex(request->obj->sha1));
+
+ filename = sha1_pack_name(target->sha1);
+ snprintf(request->filename, sizeof(request->filename), "%s", filename);
+ snprintf(request->tmpfile, sizeof(request->tmpfile),
+ "%s.temp", filename);
+
+ url = xmalloc(strlen(remote->url) + 64);
+ sprintf(url, "%sobjects/pack/pack-%s.pack",
+ remote->url, sha1_to_hex(target->sha1));
+
+ /* Make sure there isn't another open request for this pack */
+ while (check_request) {
+ if (check_request->state == RUN_FETCH_PACKED &&
+ !strcmp(check_request->url, url)) {
+ free(url);
+ release_request(request);
+ return;
+ }
+ check_request = check_request->next;
+ }
+
+ packfile = fopen(request->tmpfile, "a");
+ if (!packfile) {
+ fprintf(stderr, "Unable to open local file %s for pack",
+ filename);
+ remote->can_update_info_refs = 0;
+ free(url);
+ return;
+ }
+
+ slot = get_active_slot();
+ slot->callback_func = process_response;
+ slot->callback_data = request;
+ request->slot = slot;
+ request->local_stream = packfile;
+ request->userData = target;
+
+ request->url = url;
+ curl_easy_setopt(slot->curl, CURLOPT_FILE, packfile);
+ curl_easy_setopt(slot->curl, CURLOPT_WRITEFUNCTION, fwrite);
+ curl_easy_setopt(slot->curl, CURLOPT_URL, url);
+ curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, no_pragma_header);
+ slot->local = packfile;
+
+ /* If there is data present from a previous transfer attempt,
+ resume where it left off */
+ prev_posn = ftell(packfile);
+ if (prev_posn>0) {
+ if (push_verbosely)
+ fprintf(stderr,
+ "Resuming fetch of pack %s at byte %ld\n",
+ sha1_to_hex(target->sha1), prev_posn);
+ sprintf(range, "Range: bytes=%ld-", prev_posn);
+ range_header = curl_slist_append(range_header, range);
+ curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, range_header);
+ }
+
+ /* Try to get the request started, abort the request on error */
+ request->state = RUN_FETCH_PACKED;
+ if (!start_active_slot(slot)) {
+ fprintf(stderr, "Unable to start GET request\n");
+ remote->can_update_info_refs = 0;
+ release_request(request);
}
}
static void start_mkcol(struct transfer_request *request)
{
- char *hex = sha1_to_hex(request->sha1);
+ char *hex = sha1_to_hex(request->obj->sha1);
struct active_request_slot *slot;
char *posn;
@@ -203,7 +473,7 @@ static void start_mkcol(struct transfer_request *request)
static void start_put(struct transfer_request *request)
{
- char *hex = sha1_to_hex(request->sha1);
+ char *hex = sha1_to_hex(request->obj->sha1);
struct active_request_slot *slot;
char *posn;
char type[20];
@@ -214,7 +484,7 @@ static void start_put(struct transfer_request *request)
ssize_t size;
z_stream stream;
- unpacked = read_sha1_file(request->sha1, type, &len);
+ unpacked = read_sha1_file(request->obj->sha1, type, &len);
hdrlen = sprintf(hdr, "%s %lu", type, len) + 1;
/* Set it up */
@@ -309,9 +579,10 @@ static void start_move(struct transfer_request *request)
}
}
-static int refresh_lock(struct active_lock *lock)
+static int refresh_lock(struct remote_lock *lock)
{
struct active_request_slot *slot;
+ struct slot_results results;
char *if_header;
char timeout_header[25];
struct curl_slist *dav_headers = NULL;
@@ -326,6 +597,7 @@ static int refresh_lock(struct active_lock *lock)
dav_headers = curl_slist_append(dav_headers, timeout_header);
slot = get_active_slot();
+ slot->results = &results;
curl_easy_setopt(slot->curl, CURLOPT_HTTPGET, 1);
curl_easy_setopt(slot->curl, CURLOPT_WRITEFUNCTION, fwrite_null);
curl_easy_setopt(slot->curl, CURLOPT_URL, lock->url);
@@ -334,8 +606,9 @@ static int refresh_lock(struct active_lock *lock)
if (start_active_slot(slot)) {
run_active_slot(slot);
- if (slot->curl_result != CURLE_OK) {
- fprintf(stderr, "Got HTTP error %ld\n", slot->http_code);
+ if (results.curl_result != CURLE_OK) {
+ fprintf(stderr, "LOCK HTTP error %ld\n",
+ results.http_code);
} else {
lock->start_time = time(NULL);
rc = 1;
@@ -349,24 +622,62 @@ static int refresh_lock(struct active_lock *lock)
return rc;
}
-static void finish_request(struct transfer_request *request)
+static void check_locks(void)
{
+ struct remote_lock *lock = remote->locks;
time_t current_time = time(NULL);
int time_remaining;
- request->curl_result = request->slot->curl_result;
+ while (lock) {
+ time_remaining = lock->start_time + lock->timeout -
+ current_time;
+ if (!lock->refreshing && time_remaining < LOCK_REFRESH) {
+ if (!refresh_lock(lock)) {
+ fprintf(stderr,
+ "Unable to refresh lock for %s\n",
+ lock->url);
+ aborted = 1;
+ return;
+ }
+ }
+ lock = lock->next;
+ }
+}
+
+static void release_request(struct transfer_request *request)
+{
+ struct transfer_request *entry = request_queue_head;
+
+ if (request == request_queue_head) {
+ request_queue_head = request->next;
+ } else {
+ while (entry->next != NULL && entry->next != request)
+ entry = entry->next;
+ if (entry->next == request)
+ entry->next = entry->next->next;
+ }
+
+ if (request->local_fileno != -1)
+ close(request->local_fileno);
+ if (request->local_stream)
+ fclose(request->local_stream);
+ if (request->url != NULL)
+ free(request->url);
+ free(request);
+}
+
+static void finish_request(struct transfer_request *request)
+{
+ struct stat st;
+ struct packed_git *target;
+ struct packed_git **lst;
+
+ request->curl_result = request->slot->curl_result;
request->http_code = request->slot->http_code;
request->slot = NULL;
- /* Refresh the lock if it is close to timing out */
- time_remaining = request->lock->start_time + request->lock->timeout
- - current_time;
- if (time_remaining < LOCK_REFRESH && !request->lock->refreshing) {
- if (!refresh_lock(request->lock)) {
- fprintf(stderr, "Unable to refresh remote lock\n");
- aborted = 1;
- }
- }
+ /* Keep locks active */
+ check_locks();
if (request->headers != NULL)
curl_slist_free_all(request->headers);
@@ -375,29 +686,16 @@ static void finish_request(struct transfer_request *request)
if (request->state != RUN_PUT) {
free(request->url);
request->url = NULL;
- }
-
- if (request->state == RUN_HEAD) {
- if (request->http_code == 404) {
- request->state = NEED_PUSH;
- } else if (request->curl_result == CURLE_OK) {
- remote_dir_exists[request->sha1[0]] = 1;
- request->state = COMPLETE;
- } else {
- fprintf(stderr, "HEAD %s failed, aborting (%d/%ld)\n",
- sha1_to_hex(request->sha1),
- request->curl_result, request->http_code);
- request->state = ABORTED;
- aborted = 1;
- }
- } else if (request->state == RUN_MKCOL) {
+ }
+
+ if (request->state == RUN_MKCOL) {
if (request->curl_result == CURLE_OK ||
request->http_code == 405) {
- remote_dir_exists[request->sha1[0]] = 1;
+ remote_dir_exists[request->obj->sha1[0]] = 1;
start_put(request);
} else {
fprintf(stderr, "MKCOL %s failed, aborting (%d/%ld)\n",
- sha1_to_hex(request->sha1),
+ sha1_to_hex(request->obj->sha1),
request->curl_result, request->http_code);
request->state = ABORTED;
aborted = 1;
@@ -407,7 +705,7 @@ static void finish_request(struct transfer_request *request)
start_move(request);
} else {
fprintf(stderr, "PUT %s failed, aborting (%d/%ld)\n",
- sha1_to_hex(request->sha1),
+ sha1_to_hex(request->obj->sha1),
request->curl_result, request->http_code);
request->state = ABORTED;
aborted = 1;
@@ -415,41 +713,84 @@ static void finish_request(struct transfer_request *request)
} else if (request->state == RUN_MOVE) {
if (request->curl_result == CURLE_OK) {
if (push_verbosely)
- fprintf(stderr,
- "sent %s\n",
- sha1_to_hex(request->sha1));
- request->state = COMPLETE;
+ fprintf(stderr, " sent %s\n",
+ sha1_to_hex(request->obj->sha1));
+ request->obj->flags |= REMOTE;
+ release_request(request);
} else {
fprintf(stderr, "MOVE %s failed, aborting (%d/%ld)\n",
- sha1_to_hex(request->sha1),
+ sha1_to_hex(request->obj->sha1),
request->curl_result, request->http_code);
request->state = ABORTED;
aborted = 1;
}
- }
-}
+ } else if (request->state == RUN_FETCH_LOOSE) {
+ fchmod(request->local_fileno, 0444);
+ close(request->local_fileno); request->local_fileno = -1;
+
+ if (request->curl_result != CURLE_OK &&
+ request->http_code != 416) {
+ if (stat(request->tmpfile, &st) == 0) {
+ if (st.st_size == 0)
+ unlink(request->tmpfile);
+ }
+ } else {
+ if (request->http_code == 416)
+ fprintf(stderr, "Warning: requested range invalid; we may already have all the data.\n");
+
+ inflateEnd(&request->stream);
+ SHA1_Final(request->real_sha1, &request->c);
+ if (request->zret != Z_STREAM_END) {
+ unlink(request->tmpfile);
+ } else if (memcmp(request->obj->sha1, request->real_sha1, 20)) {
+ unlink(request->tmpfile);
+ } else {
+ request->rename =
+ move_temp_to_file(
+ request->tmpfile,
+ request->filename);
+ if (request->rename == 0) {
+ request->obj->flags |= (LOCAL | REMOTE);
+ }
+ }
+ }
-static void release_request(struct transfer_request *request)
-{
- struct transfer_request *entry = request_queue_head;
+ /* Try fetching packed if necessary */
+ if (request->obj->flags & LOCAL)
+ release_request(request);
+ else
+ start_fetch_packed(request);
- if (request == request_queue_head) {
- request_queue_head = request->next;
- } else {
- while (entry->next != NULL && entry->next != request)
- entry = entry->next;
- if (entry->next == request)
- entry->next = entry->next->next;
+ } else if (request->state == RUN_FETCH_PACKED) {
+ if (request->curl_result != CURLE_OK) {
+ fprintf(stderr, "Unable to get pack file %s\n%s",
+ request->url, curl_errorstr);
+ remote->can_update_info_refs = 0;
+ } else {
+ fclose(request->local_stream);
+ request->local_stream = NULL;
+ if (!move_temp_to_file(request->tmpfile,
+ request->filename)) {
+ target = (struct packed_git *)request->userData;
+ lst = &remote->packs;
+ while (*lst != target)
+ lst = &((*lst)->next);
+ *lst = (*lst)->next;
+
+ if (!verify_pack(target, 0))
+ install_packed_git(target);
+ else
+ remote->can_update_info_refs = 0;
+ }
+ }
+ release_request(request);
}
-
- if (request->url != NULL)
- free(request->url);
- free(request);
}
void fill_active_slots(void)
{
struct transfer_request *request = request_queue_head;
+ struct transfer_request *next;
struct active_request_slot *slot = active_queue_head;
int num_transfers;
@@ -457,17 +798,18 @@ void fill_active_slots(void)
return;
while (active_requests < max_requests && request != NULL) {
- if (!pushing && request->state == NEED_CHECK) {
- start_check(request);
- curl_multi_perform(curlm, &num_transfers);
+ next = request->next;
+ if (request->state == NEED_FETCH) {
+ start_fetch_loose(request);
} else if (pushing && request->state == NEED_PUSH) {
- if (remote_dir_exists[request->sha1[0]])
+ if (remote_dir_exists[request->obj->sha1[0]] == 1) {
start_put(request);
- else
+ } else {
start_mkcol(request);
+ }
curl_multi_perform(curlm, &num_transfers);
}
- request = request->next;
+ request = next;
}
while (slot != NULL) {
@@ -476,34 +818,80 @@ void fill_active_slots(void)
slot->curl = NULL;
}
slot = slot->next;
- }
+ }
}
-static void add_request(unsigned char *sha1, struct active_lock *lock)
+static void get_remote_object_list(unsigned char parent);
+
+static void add_fetch_request(struct object *obj)
+{
+ struct transfer_request *request;
+
+ check_locks();
+
+ /*
+ * Don't fetch the object if it's known to exist locally
+ * or is already in the request queue
+ */
+ if (remote_dir_exists[obj->sha1[0]] == -1)
+ get_remote_object_list(obj->sha1[0]);
+ if (obj->flags & (LOCAL | FETCHING))
+ return;
+
+ obj->flags |= FETCHING;
+ request = xmalloc(sizeof(*request));
+ request->obj = obj;
+ request->url = NULL;
+ request->lock = NULL;
+ request->headers = NULL;
+ request->local_fileno = -1;
+ request->local_stream = NULL;
+ request->state = NEED_FETCH;
+ request->next = request_queue_head;
+ request_queue_head = request;
+
+ fill_active_slots();
+ step_active_slots();
+}
+
+static int add_send_request(struct object *obj, struct remote_lock *lock)
{
struct transfer_request *request = request_queue_head;
struct packed_git *target;
-
- while (request != NULL && memcmp(request->sha1, sha1, 20))
- request = request->next;
- if (request != NULL)
- return;
- target = find_sha1_pack(sha1, remote->packs);
- if (target)
- return;
+ /* Keep locks active */
+ check_locks();
+
+ /*
+ * Don't push the object if it's known to exist on the remote
+ * or is already in the request queue
+ */
+ if (remote_dir_exists[obj->sha1[0]] == -1)
+ get_remote_object_list(obj->sha1[0]);
+ if (obj->flags & (REMOTE | PUSHING))
+ return 0;
+ target = find_sha1_pack(obj->sha1, remote->packs);
+ if (target) {
+ obj->flags |= REMOTE;
+ return 0;
+ }
+ obj->flags |= PUSHING;
request = xmalloc(sizeof(*request));
- memcpy(request->sha1, sha1, 20);
+ request->obj = obj;
request->url = NULL;
request->lock = lock;
request->headers = NULL;
- request->state = NEED_CHECK;
+ request->local_fileno = -1;
+ request->local_stream = NULL;
+ request->state = NEED_PUSH;
request->next = request_queue_head;
request_queue_head = request;
fill_active_slots();
step_active_slots();
+
+ return 1;
}
static int fetch_index(unsigned char *sha1)
@@ -518,16 +906,18 @@ static int fetch_index(unsigned char *sha1)
FILE *indexfile;
struct active_request_slot *slot;
+ struct slot_results results;
/* Don't use the index if the pack isn't there */
- url = xmalloc(strlen(remote->url) + 65);
- sprintf(url, "%s/objects/pack/pack-%s.pack", remote->url, hex);
+ url = xmalloc(strlen(remote->url) + 64);
+ sprintf(url, "%sobjects/pack/pack-%s.pack", remote->url, hex);
slot = get_active_slot();
+ slot->results = &results;
curl_easy_setopt(slot->curl, CURLOPT_URL, url);
curl_easy_setopt(slot->curl, CURLOPT_NOBODY, 1);
if (start_active_slot(slot)) {
run_active_slot(slot);
- if (slot->curl_result != CURLE_OK) {
+ if (results.curl_result != CURLE_OK) {
free(url);
return error("Unable to verify pack %s is available",
hex);
@@ -541,9 +931,9 @@ static int fetch_index(unsigned char *sha1)
if (push_verbosely)
fprintf(stderr, "Getting index for pack %s\n", hex);
-
- sprintf(url, "%s/objects/pack/pack-%s.idx", remote->url, hex);
-
+
+ sprintf(url, "%sobjects/pack/pack-%s.idx", remote->url, hex);
+
filename = sha1_pack_index_name(sha1);
snprintf(tmpfile, sizeof(tmpfile), "%s.temp", filename);
indexfile = fopen(tmpfile, "a");
@@ -552,6 +942,7 @@ static int fetch_index(unsigned char *sha1)
filename);
slot = get_active_slot();
+ slot->results = &results;
curl_easy_setopt(slot->curl, CURLOPT_NOBODY, 0);
curl_easy_setopt(slot->curl, CURLOPT_HTTPGET, 1);
curl_easy_setopt(slot->curl, CURLOPT_FILE, indexfile);
@@ -575,7 +966,7 @@ static int fetch_index(unsigned char *sha1)
if (start_active_slot(slot)) {
run_active_slot(slot);
- if (slot->curl_result != CURLE_OK) {
+ if (results.curl_result != CURLE_OK) {
free(url);
fclose(indexfile);
return error("Unable to get pack index %s\n%s", url,
@@ -615,30 +1006,31 @@ static int fetch_indices(void)
int i = 0;
struct active_request_slot *slot;
+ struct slot_results results;
- data = xmalloc(4096);
- memset(data, 0, 4096);
+ data = xcalloc(1, 4096);
buffer.size = 4096;
buffer.posn = 0;
buffer.buffer = data;
if (push_verbosely)
fprintf(stderr, "Getting pack list\n");
-
- url = xmalloc(strlen(remote->url) + 21);
- sprintf(url, "%s/objects/info/packs", remote->url);
+
+ url = xmalloc(strlen(remote->url) + 20);
+ sprintf(url, "%sobjects/info/packs", remote->url);
slot = get_active_slot();
+ slot->results = &results;
curl_easy_setopt(slot->curl, CURLOPT_FILE, &buffer);
curl_easy_setopt(slot->curl, CURLOPT_WRITEFUNCTION, fwrite_buffer);
curl_easy_setopt(slot->curl, CURLOPT_URL, url);
curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, NULL);
if (start_active_slot(slot)) {
run_active_slot(slot);
- if (slot->curl_result != CURLE_OK) {
+ if (results.curl_result != CURLE_OK) {
free(buffer.buffer);
free(url);
- if (slot->http_code == 404)
+ if (results.http_code == 404)
return 0;
else
return error("%s", curl_errorstr);
@@ -698,14 +1090,13 @@ static char *quote_ref_url(const char *base, const char *ref)
int len, baselen, ch;
baselen = strlen(base);
- len = baselen + 12; /* "refs/heads/" + NUL */
+ len = baselen + 1;
for (cp = ref; (ch = *cp) != 0; cp++, len++)
if (needs_quote(ch))
len += 2; /* extra two hex plus replacement % */
qref = xmalloc(len);
memcpy(qref, base, baselen);
- memcpy(qref + baselen, "refs/heads/", 11);
- for (cp = ref, dp = qref + baselen + 11; (ch = *cp) != 0; cp++) {
+ for (cp = ref, dp = qref + baselen; (ch = *cp) != 0; cp++) {
if (needs_quote(ch)) {
*dp++ = '%';
*dp++ = hex((ch >> 4) & 0xF);
@@ -726,20 +1117,22 @@ int fetch_ref(char *ref, unsigned char *sha1)
struct buffer buffer;
char *base = remote->url;
struct active_request_slot *slot;
+ struct slot_results results;
buffer.size = 41;
buffer.posn = 0;
buffer.buffer = hex;
hex[41] = '\0';
-
+
url = quote_ref_url(base, ref);
slot = get_active_slot();
+ slot->results = &results;
curl_easy_setopt(slot->curl, CURLOPT_FILE, &buffer);
curl_easy_setopt(slot->curl, CURLOPT_WRITEFUNCTION, fwrite_buffer);
curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, NULL);
curl_easy_setopt(slot->curl, CURLOPT_URL, url);
if (start_active_slot(slot)) {
run_active_slot(slot);
- if (slot->curl_result != CURLE_OK)
+ if (results.curl_result != CURLE_OK)
return error("Couldn't get %s for %s\n%s",
url, ref, curl_errorstr);
} else {
@@ -751,6 +1144,27 @@ int fetch_ref(char *ref, unsigned char *sha1)
return 0;
}
+static void one_remote_object(const char *hex)
+{
+ unsigned char sha1[20];
+ struct object *obj;
+
+ if (get_sha1_hex(hex, sha1) != 0)
+ return;
+
+ obj = lookup_object(sha1);
+ if (!obj)
+ obj = parse_object(sha1);
+
+ /* Ignore remote objects that don't exist locally */
+ if (!obj)
+ return;
+
+ obj->flags |= REMOTE;
+ if (!object_list_contains(objects, obj))
+ add_object(obj, &objects, NULL, "");
+}
+
static void handle_lockprop_ctx(struct xml_ctx *ctx, int tag_closed)
{
int *lock_flags = (int *)ctx->userData;
@@ -772,7 +1186,7 @@ static void handle_lockprop_ctx(struct xml_ctx *ctx, int tag_closed)
static void handle_new_lock_ctx(struct xml_ctx *ctx, int tag_closed)
{
- struct active_lock *lock = (struct active_lock *)ctx->userData;
+ struct remote_lock *lock = (struct remote_lock *)ctx->userData;
if (tag_closed && ctx->cdata) {
if (!strcmp(ctx->name, DAV_ACTIVELOCK_OWNER)) {
@@ -791,6 +1205,8 @@ static void handle_new_lock_ctx(struct xml_ctx *ctx, int tag_closed)
}
}
+static void one_remote_ref(char *refname);
+
static void
xml_start_tag(void *userData, const char *name, const char **atts)
{
@@ -848,9 +1264,10 @@ xml_cdata(void *userData, const XML_Char *s, int len)
strncpy(ctx->cdata, s, len);
}
-static struct active_lock *lock_remote(char *file, long timeout)
+static struct remote_lock *lock_remote(char *path, long timeout)
{
struct active_request_slot *slot;
+ struct slot_results results;
struct buffer out_buffer;
struct buffer in_buffer;
char *out_data;
@@ -858,28 +1275,29 @@ static struct active_lock *lock_remote(char *file, long timeout)
char *url;
char *ep;
char timeout_header[25];
- struct active_lock *new_lock = NULL;
+ struct remote_lock *lock = NULL;
XML_Parser parser = XML_ParserCreate(NULL);
enum XML_Status result;
struct curl_slist *dav_headers = NULL;
struct xml_ctx ctx;
- url = xmalloc(strlen(remote->url) + strlen(file) + 1);
- sprintf(url, "%s%s", remote->url, file);
+ url = xmalloc(strlen(remote->url) + strlen(path) + 1);
+ sprintf(url, "%s%s", remote->url, path);
/* Make sure leading directories exist for the remote ref */
ep = strchr(url + strlen(remote->url) + 11, '/');
while (ep) {
*ep = 0;
slot = get_active_slot();
+ slot->results = &results;
curl_easy_setopt(slot->curl, CURLOPT_HTTPGET, 1);
curl_easy_setopt(slot->curl, CURLOPT_URL, url);
curl_easy_setopt(slot->curl, CURLOPT_CUSTOMREQUEST, DAV_MKCOL);
curl_easy_setopt(slot->curl, CURLOPT_WRITEFUNCTION, fwrite_null);
if (start_active_slot(slot)) {
run_active_slot(slot);
- if (slot->curl_result != CURLE_OK &&
- slot->http_code != 405) {
+ if (results.curl_result != CURLE_OK &&
+ results.http_code != 405) {
fprintf(stderr,
"Unable to create branch path %s\n",
url);
@@ -887,7 +1305,7 @@ static struct active_lock *lock_remote(char *file, long timeout)
return NULL;
}
} else {
- fprintf(stderr, "Unable to start request\n");
+ fprintf(stderr, "Unable to start MKCOL request\n");
free(url);
return NULL;
}
@@ -911,6 +1329,7 @@ static struct active_lock *lock_remote(char *file, long timeout)
dav_headers = curl_slist_append(dav_headers, "Content-Type: text/xml");
slot = get_active_slot();
+ slot->results = &results;
curl_easy_setopt(slot->curl, CURLOPT_INFILE, &out_buffer);
curl_easy_setopt(slot->curl, CURLOPT_INFILESIZE, out_buffer.size);
curl_easy_setopt(slot->curl, CURLOPT_READFUNCTION, fread_buffer);
@@ -921,20 +1340,17 @@ static struct active_lock *lock_remote(char *file, long timeout)
curl_easy_setopt(slot->curl, CURLOPT_CUSTOMREQUEST, DAV_LOCK);
curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, dav_headers);
- new_lock = xcalloc(1, sizeof(*new_lock));
- new_lock->owner = NULL;
- new_lock->token = NULL;
- new_lock->timeout = -1;
- new_lock->refreshing = 0;
+ lock = xcalloc(1, sizeof(*lock));
+ lock->timeout = -1;
if (start_active_slot(slot)) {
run_active_slot(slot);
- if (slot->curl_result == CURLE_OK) {
+ if (results.curl_result == CURLE_OK) {
ctx.name = xcalloc(10, 1);
ctx.len = 0;
ctx.cdata = NULL;
ctx.userFunc = handle_new_lock_ctx;
- ctx.userData = new_lock;
+ ctx.userData = lock;
XML_SetUserData(parser, &ctx);
XML_SetElementHandler(parser, xml_start_tag,
xml_end_tag);
@@ -946,36 +1362,40 @@ static struct active_lock *lock_remote(char *file, long timeout)
fprintf(stderr, "XML error: %s\n",
XML_ErrorString(
XML_GetErrorCode(parser)));
- new_lock->timeout = -1;
+ lock->timeout = -1;
}
}
} else {
- fprintf(stderr, "Unable to start request\n");
+ fprintf(stderr, "Unable to start LOCK request\n");
}
curl_slist_free_all(dav_headers);
free(out_data);
free(in_data);
- if (new_lock->token == NULL || new_lock->timeout <= 0) {
- if (new_lock->token != NULL)
- free(new_lock->token);
- if (new_lock->owner != NULL)
- free(new_lock->owner);
+ if (lock->token == NULL || lock->timeout <= 0) {
+ if (lock->token != NULL)
+ free(lock->token);
+ if (lock->owner != NULL)
+ free(lock->owner);
free(url);
- free(new_lock);
- new_lock = NULL;
+ free(lock);
+ lock = NULL;
} else {
- new_lock->url = url;
- new_lock->start_time = time(NULL);
+ lock->url = url;
+ lock->start_time = time(NULL);
+ lock->next = remote->locks;
+ remote->locks = lock;
}
- return new_lock;
+ return lock;
}
-static int unlock_remote(struct active_lock *lock)
+static int unlock_remote(struct remote_lock *lock)
{
struct active_request_slot *slot;
+ struct slot_results results;
+ struct remote_lock *prev = remote->locks;
char *lock_token_header;
struct curl_slist *dav_headers = NULL;
int rc = 0;
@@ -986,6 +1406,7 @@ static int unlock_remote(struct active_lock *lock)
dav_headers = curl_slist_append(dav_headers, lock_token_header);
slot = get_active_slot();
+ slot->results = &results;
curl_easy_setopt(slot->curl, CURLOPT_WRITEFUNCTION, fwrite_null);
curl_easy_setopt(slot->curl, CURLOPT_URL, lock->url);
curl_easy_setopt(slot->curl, CURLOPT_CUSTOMREQUEST, DAV_UNLOCK);
@@ -993,18 +1414,27 @@ static int unlock_remote(struct active_lock *lock)
if (start_active_slot(slot)) {
run_active_slot(slot);
- if (slot->curl_result == CURLE_OK)
+ if (results.curl_result == CURLE_OK)
rc = 1;
else
- fprintf(stderr, "Got HTTP error %ld\n",
- slot->http_code);
+ fprintf(stderr, "UNLOCK HTTP error %ld\n",
+ results.http_code);
} else {
- fprintf(stderr, "Unable to start request\n");
+ fprintf(stderr, "Unable to start UNLOCK request\n");
}
curl_slist_free_all(dav_headers);
free(lock_token_header);
+ if (remote->locks == lock) {
+ remote->locks = lock->next;
+ } else {
+ while (prev && prev->next != lock)
+ prev = prev->next;
+ if (prev)
+ prev->next = prev->next->next;
+ }
+
if (lock->owner != NULL)
free(lock->owner);
free(lock->url);
@@ -1014,9 +1444,180 @@ static int unlock_remote(struct active_lock *lock)
return rc;
}
+static void remote_ls(const char *path, int flags,
+ void (*userFunc)(struct remote_ls_ctx *ls),
+ void *userData);
+
+static void process_ls_object(struct remote_ls_ctx *ls)
+{
+ unsigned int *parent = (unsigned int *)ls->userData;
+ char *path = ls->dentry_name;
+ char *obj_hex;
+
+ if (!strcmp(ls->path, ls->dentry_name) && (ls->flags & IS_DIR)) {
+ remote_dir_exists[*parent] = 1;
+ return;
+ }
+
+ if (strlen(path) != 49)
+ return;
+ path += 8;
+ obj_hex = xmalloc(strlen(path));
+ strncpy(obj_hex, path, 2);
+ strcpy(obj_hex + 2, path + 3);
+ one_remote_object(obj_hex);
+ free(obj_hex);
+}
+
+static void process_ls_ref(struct remote_ls_ctx *ls)
+{
+ if (!strcmp(ls->path, ls->dentry_name) && (ls->dentry_flags & IS_DIR)) {
+ fprintf(stderr, " %s\n", ls->dentry_name);
+ return;
+ }
+
+ if (!(ls->dentry_flags & IS_DIR))
+ one_remote_ref(ls->dentry_name);
+}
+
+static void handle_remote_ls_ctx(struct xml_ctx *ctx, int tag_closed)
+{
+ struct remote_ls_ctx *ls = (struct remote_ls_ctx *)ctx->userData;
+
+ if (tag_closed) {
+ if (!strcmp(ctx->name, DAV_PROPFIND_RESP) && ls->dentry_name) {
+ if (ls->dentry_flags & IS_DIR) {
+ if (ls->flags & PROCESS_DIRS) {
+ ls->userFunc(ls);
+ }
+ if (strcmp(ls->dentry_name, ls->path) &&
+ ls->flags & RECURSIVE) {
+ remote_ls(ls->dentry_name,
+ ls->flags,
+ ls->userFunc,
+ ls->userData);
+ }
+ } else if (ls->flags & PROCESS_FILES) {
+ ls->userFunc(ls);
+ }
+ } else if (!strcmp(ctx->name, DAV_PROPFIND_NAME) && ctx->cdata) {
+ ls->dentry_name = xmalloc(strlen(ctx->cdata) -
+ remote->path_len + 1);
+ strcpy(ls->dentry_name, ctx->cdata + remote->path_len);
+ } else if (!strcmp(ctx->name, DAV_PROPFIND_COLLECTION)) {
+ ls->dentry_flags |= IS_DIR;
+ }
+ } else if (!strcmp(ctx->name, DAV_PROPFIND_RESP)) {
+ if (ls->dentry_name) {
+ free(ls->dentry_name);
+ }
+ ls->dentry_name = NULL;
+ ls->dentry_flags = 0;
+ }
+}
+
+static void remote_ls(const char *path, int flags,
+ void (*userFunc)(struct remote_ls_ctx *ls),
+ void *userData)
+{
+ char *url = xmalloc(strlen(remote->url) + strlen(path) + 1);
+ struct active_request_slot *slot;
+ struct slot_results results;
+ struct buffer in_buffer;
+ struct buffer out_buffer;
+ char *in_data;
+ char *out_data;
+ XML_Parser parser = XML_ParserCreate(NULL);
+ enum XML_Status result;
+ struct curl_slist *dav_headers = NULL;
+ struct xml_ctx ctx;
+ struct remote_ls_ctx ls;
+
+ ls.flags = flags;
+ ls.path = strdup(path);
+ ls.dentry_name = NULL;
+ ls.dentry_flags = 0;
+ ls.userData = userData;
+ ls.userFunc = userFunc;
+
+ sprintf(url, "%s%s", remote->url, path);
+
+ out_buffer.size = strlen(PROPFIND_ALL_REQUEST);
+ out_data = xmalloc(out_buffer.size + 1);
+ snprintf(out_data, out_buffer.size + 1, PROPFIND_ALL_REQUEST);
+ out_buffer.posn = 0;
+ out_buffer.buffer = out_data;
+
+ in_buffer.size = 4096;
+ in_data = xmalloc(in_buffer.size);
+ in_buffer.posn = 0;
+ in_buffer.buffer = in_data;
+
+ dav_headers = curl_slist_append(dav_headers, "Depth: 1");
+ dav_headers = curl_slist_append(dav_headers, "Content-Type: text/xml");
+
+ slot = get_active_slot();
+ slot->results = &results;
+ curl_easy_setopt(slot->curl, CURLOPT_INFILE, &out_buffer);
+ curl_easy_setopt(slot->curl, CURLOPT_INFILESIZE, out_buffer.size);
+ curl_easy_setopt(slot->curl, CURLOPT_READFUNCTION, fread_buffer);
+ curl_easy_setopt(slot->curl, CURLOPT_FILE, &in_buffer);
+ curl_easy_setopt(slot->curl, CURLOPT_WRITEFUNCTION, fwrite_buffer);
+ curl_easy_setopt(slot->curl, CURLOPT_URL, url);
+ curl_easy_setopt(slot->curl, CURLOPT_UPLOAD, 1);
+ curl_easy_setopt(slot->curl, CURLOPT_CUSTOMREQUEST, DAV_PROPFIND);
+ curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, dav_headers);
+
+ if (start_active_slot(slot)) {
+ run_active_slot(slot);
+ if (results.curl_result == CURLE_OK) {
+ ctx.name = xcalloc(10, 1);
+ ctx.len = 0;
+ ctx.cdata = NULL;
+ ctx.userFunc = handle_remote_ls_ctx;
+ ctx.userData = &ls;
+ XML_SetUserData(parser, &ctx);
+ XML_SetElementHandler(parser, xml_start_tag,
+ xml_end_tag);
+ XML_SetCharacterDataHandler(parser, xml_cdata);
+ result = XML_Parse(parser, in_buffer.buffer,
+ in_buffer.posn, 1);
+ free(ctx.name);
+
+ if (result != XML_STATUS_OK) {
+ fprintf(stderr, "XML error: %s\n",
+ XML_ErrorString(
+ XML_GetErrorCode(parser)));
+ }
+ }
+ } else {
+ fprintf(stderr, "Unable to start PROPFIND request\n");
+ }
+
+ free(ls.path);
+ free(url);
+ free(out_data);
+ free(in_buffer.buffer);
+ curl_slist_free_all(dav_headers);
+}
+
+static void get_remote_object_list(unsigned char parent)
+{
+ char path[] = "objects/XX/";
+ static const char hex[] = "0123456789abcdef";
+ unsigned int val = parent;
+
+ path[8] = hex[val >> 4];
+ path[9] = hex[val & 0xf];
+ remote_dir_exists[val] = 0;
+ remote_ls(path, (PROCESS_FILES | PROCESS_DIRS),
+ process_ls_object, &val);
+}
+
static int locking_available(void)
{
struct active_request_slot *slot;
+ struct slot_results results;
struct buffer in_buffer;
struct buffer out_buffer;
char *in_data;
@@ -1027,9 +1628,12 @@ static int locking_available(void)
struct xml_ctx ctx;
int lock_flags = 0;
- out_buffer.size = strlen(PROPFIND_REQUEST) + strlen(remote->url) - 2;
+ out_buffer.size =
+ strlen(PROPFIND_SUPPORTEDLOCK_REQUEST) +
+ strlen(remote->url) - 2;
out_data = xmalloc(out_buffer.size + 1);
- snprintf(out_data, out_buffer.size + 1, PROPFIND_REQUEST, remote->url);
+ snprintf(out_data, out_buffer.size + 1,
+ PROPFIND_SUPPORTEDLOCK_REQUEST, remote->url);
out_buffer.posn = 0;
out_buffer.buffer = out_data;
@@ -1040,8 +1644,9 @@ static int locking_available(void)
dav_headers = curl_slist_append(dav_headers, "Depth: 0");
dav_headers = curl_slist_append(dav_headers, "Content-Type: text/xml");
-
+
slot = get_active_slot();
+ slot->results = &results;
curl_easy_setopt(slot->curl, CURLOPT_INFILE, &out_buffer);
curl_easy_setopt(slot->curl, CURLOPT_INFILESIZE, out_buffer.size);
curl_easy_setopt(slot->curl, CURLOPT_READFUNCTION, fread_buffer);
@@ -1054,7 +1659,7 @@ static int locking_available(void)
if (start_active_slot(slot)) {
run_active_slot(slot);
- if (slot->curl_result == CURLE_OK) {
+ if (results.curl_result == CURLE_OK) {
ctx.name = xcalloc(10, 1);
ctx.len = 0;
ctx.cdata = NULL;
@@ -1075,7 +1680,7 @@ static int locking_available(void)
}
}
} else {
- fprintf(stderr, "Unable to start request\n");
+ fprintf(stderr, "Unable to start PROPFIND request\n");
}
free(out_data);
@@ -1085,87 +1690,105 @@ static int locking_available(void)
return lock_flags;
}
-static int is_ancestor(unsigned char *sha1, struct commit *commit)
+static struct object_list **process_blob(struct blob *blob,
+ struct object_list **p,
+ struct name_path *path,
+ const char *name)
{
- struct commit_list *parents;
+ struct object *obj = &blob->object;
- if (parse_commit(commit))
- return 0;
- parents = commit->parents;
- for (; parents; parents = parents->next) {
- if (!memcmp(sha1, parents->item->object.sha1, 20)) {
- return 1;
- } else if (parents->item->object.type == commit_type) {
- if (is_ancestor(
- sha1,
- (struct commit *)&parents->item->object
- ))
- return 1;
- }
- }
- return 0;
+ obj->flags |= LOCAL;
+
+ if (obj->flags & (UNINTERESTING | SEEN))
+ return p;
+
+ obj->flags |= SEEN;
+ return add_object(obj, p, path, name);
}
-static void get_delta(unsigned char *sha1, struct object *obj,
- struct active_lock *lock)
+static struct object_list **process_tree(struct tree *tree,
+ struct object_list **p,
+ struct name_path *path,
+ const char *name)
{
- struct commit *commit;
- struct commit_list *parents;
- struct tree *tree;
+ struct object *obj = &tree->object;
struct tree_entry_list *entry;
+ struct name_path me;
+
+ obj->flags |= LOCAL;
+
+ if (obj->flags & (UNINTERESTING | SEEN))
+ return p;
+ if (parse_tree(tree) < 0)
+ die("bad tree object %s", sha1_to_hex(obj->sha1));
+
+ obj->flags |= SEEN;
+ p = add_object(obj, p, NULL, name);
+ me.up = path;
+ me.elem = name;
+ me.elem_len = strlen(name);
+ entry = tree->entries;
+ tree->entries = NULL;
+ while (entry) {
+ struct tree_entry_list *next = entry->next;
+ if (entry->directory)
+ p = process_tree(entry->item.tree, p, &me, entry->name);
+ else
+ p = process_blob(entry->item.blob, p, &me, entry->name);
+ free(entry);
+ entry = next;
+ }
+ return p;
+}
- if (sha1 && !memcmp(sha1, obj->sha1, 20))
- return;
+static int get_delta(struct rev_info *revs, struct remote_lock *lock)
+{
+ struct commit *commit;
+ struct object_list **p = &objects, *pending;
+ int count = 0;
+
+ while ((commit = get_revision(revs)) != NULL) {
+ p = process_tree(commit->tree, p, NULL, "");
+ commit->object.flags |= LOCAL;
+ if (!(commit->object.flags & UNINTERESTING))
+ count += add_send_request(&commit->object, lock);
+ }
- if (aborted)
- return;
+ for (pending = revs->pending_objects; pending; pending = pending->next) {
+ struct object *obj = pending->item;
+ const char *name = pending->name;
- if (obj->type == commit_type) {
- if (push_verbosely)
- fprintf(stderr, "walk %s\n", sha1_to_hex(obj->sha1));
- add_request(obj->sha1, lock);
- commit = (struct commit *)obj;
- if (parse_commit(commit)) {
- fprintf(stderr, "Error parsing commit %s\n",
- sha1_to_hex(obj->sha1));
- aborted = 1;
- return;
+ if (obj->flags & (UNINTERESTING | SEEN))
+ continue;
+ if (obj->type == tag_type) {
+ obj->flags |= SEEN;
+ p = add_object(obj, p, NULL, name);
+ continue;
}
- parents = commit->parents;
- for (; parents; parents = parents->next)
- if (sha1 == NULL ||
- memcmp(sha1, parents->item->object.sha1, 20))
- get_delta(sha1, &parents->item->object,
- lock);
- get_delta(sha1, &commit->tree->object, lock);
- } else if (obj->type == tree_type) {
- if (push_verbosely)
- fprintf(stderr, "walk %s\n", sha1_to_hex(obj->sha1));
- add_request(obj->sha1, lock);
- tree = (struct tree *)obj;
- if (parse_tree(tree)) {
- fprintf(stderr, "Error parsing tree %s\n",
- sha1_to_hex(obj->sha1));
- aborted = 1;
- return;
+ if (obj->type == tree_type) {
+ p = process_tree((struct tree *)obj, p, NULL, name);
+ continue;
}
- entry = tree->entries;
- tree->entries = NULL;
- while (entry) {
- struct tree_entry_list *next = entry->next;
- get_delta(sha1, entry->item.any, lock);
- free(entry->name);
- free(entry);
- entry = next;
+ if (obj->type == blob_type) {
+ p = process_blob((struct blob *)obj, p, NULL, name);
+ continue;
}
- } else if (obj->type == blob_type || obj->type == tag_type) {
- add_request(obj->sha1, lock);
+ die("unknown pending object %s (%s)", sha1_to_hex(obj->sha1), name);
}
+
+ while (objects) {
+ if (!(objects->item->flags & UNINTERESTING))
+ count += add_send_request(objects->item, lock);
+ objects = objects->next;
+ }
+
+ return count;
}
-static int update_remote(unsigned char *sha1, struct active_lock *lock)
+static int update_remote(unsigned char *sha1, struct remote_lock *lock)
{
struct active_request_slot *slot;
+ struct slot_results results;
char *out_data;
char *if_header;
struct buffer out_buffer;
@@ -1187,6 +1810,7 @@ static int update_remote(unsigned char *sha1, struct active_lock *lock)
out_buffer.buffer = out_data;
slot = get_active_slot();
+ slot->results = &results;
curl_easy_setopt(slot->curl, CURLOPT_INFILE, &out_buffer);
curl_easy_setopt(slot->curl, CURLOPT_INFILESIZE, out_buffer.size);
curl_easy_setopt(slot->curl, CURLOPT_READFUNCTION, fread_buffer);
@@ -1201,10 +1825,10 @@ static int update_remote(unsigned char *sha1, struct active_lock *lock)
run_active_slot(slot);
free(out_data);
free(if_header);
- if (slot->curl_result != CURLE_OK) {
+ if (results.curl_result != CURLE_OK) {
fprintf(stderr,
"PUT error: curl result=%d, HTTP code=%ld\n",
- slot->curl_result, slot->http_code);
+ results.curl_result, results.http_code);
/* We should attempt recovery? */
return 0;
}
@@ -1218,38 +1842,489 @@ static int update_remote(unsigned char *sha1, struct active_lock *lock)
return 1;
}
+static struct ref *local_refs, **local_tail;
+static struct ref *remote_refs, **remote_tail;
+
+static int one_local_ref(const char *refname, const unsigned char *sha1)
+{
+ struct ref *ref;
+ int len = strlen(refname) + 1;
+ ref = xcalloc(1, sizeof(*ref) + len);
+ memcpy(ref->new_sha1, sha1, 20);
+ memcpy(ref->name, refname, len);
+ *local_tail = ref;
+ local_tail = &ref->next;
+ return 0;
+}
+
+static void one_remote_ref(char *refname)
+{
+ struct ref *ref;
+ unsigned char remote_sha1[20];
+ struct object *obj;
+ int len = strlen(refname) + 1;
+
+ if (fetch_ref(refname, remote_sha1) != 0) {
+ fprintf(stderr,
+ "Unable to fetch ref %s from %s\n",
+ refname, remote->url);
+ return;
+ }
+
+ /*
+ * Fetch a copy of the object if it doesn't exist locally - it
+ * may be required for updating server info later.
+ */
+ if (remote->can_update_info_refs && !has_sha1_file(remote_sha1)) {
+ obj = lookup_unknown_object(remote_sha1);
+ if (obj) {
+ fprintf(stderr, " fetch %s for %s\n",
+ sha1_to_hex(remote_sha1), refname);
+ add_fetch_request(obj);
+ }
+ }
+
+ ref = xcalloc(1, sizeof(*ref) + len);
+ memcpy(ref->old_sha1, remote_sha1, 20);
+ memcpy(ref->name, refname, len);
+ *remote_tail = ref;
+ remote_tail = &ref->next;
+}
+
+static void get_local_heads(void)
+{
+ local_tail = &local_refs;
+ for_each_ref(one_local_ref);
+}
+
+static void get_dav_remote_heads(void)
+{
+ remote_tail = &remote_refs;
+ remote_ls("refs/", (PROCESS_FILES | PROCESS_DIRS | RECURSIVE), process_ls_ref, NULL);
+}
+
+static int is_zero_sha1(const unsigned char *sha1)
+{
+ int i;
+
+ for (i = 0; i < 20; i++) {
+ if (*sha1++)
+ return 0;
+ }
+ return 1;
+}
+
+static void unmark_and_free(struct commit_list *list, unsigned int mark)
+{
+ while (list) {
+ struct commit_list *temp = list;
+ temp->item->object.flags &= ~mark;
+ list = temp->next;
+ free(temp);
+ }
+}
+
+static int ref_newer(const unsigned char *new_sha1,
+ const unsigned char *old_sha1)
+{
+ struct object *o;
+ struct commit *old, *new;
+ struct commit_list *list, *used;
+ int found = 0;
+
+ /* Both new and old must be commit-ish and new is descendant of
+ * old. Otherwise we require --force.
+ */
+ o = deref_tag(parse_object(old_sha1), NULL, 0);
+ if (!o || o->type != commit_type)
+ return 0;
+ old = (struct commit *) o;
+
+ o = deref_tag(parse_object(new_sha1), NULL, 0);
+ if (!o || o->type != commit_type)
+ return 0;
+ new = (struct commit *) o;
+
+ if (parse_commit(new) < 0)
+ return 0;
+
+ used = list = NULL;
+ commit_list_insert(new, &list);
+ while (list) {
+ new = pop_most_recent_commit(&list, TMP_MARK);
+ commit_list_insert(new, &used);
+ if (new == old) {
+ found = 1;
+ break;
+ }
+ }
+ unmark_and_free(list, TMP_MARK);
+ unmark_and_free(used, TMP_MARK);
+ return found;
+}
+
+static void mark_edge_parents_uninteresting(struct commit *commit)
+{
+ struct commit_list *parents;
+
+ for (parents = commit->parents; parents; parents = parents->next) {
+ struct commit *parent = parents->item;
+ if (!(parent->object.flags & UNINTERESTING))
+ continue;
+ mark_tree_uninteresting(parent->tree);
+ }
+}
+
+static void mark_edges_uninteresting(struct commit_list *list)
+{
+ for ( ; list; list = list->next) {
+ struct commit *commit = list->item;
+
+ if (commit->object.flags & UNINTERESTING) {
+ mark_tree_uninteresting(commit->tree);
+ continue;
+ }
+ mark_edge_parents_uninteresting(commit);
+ }
+}
+
+static void add_remote_info_ref(struct remote_ls_ctx *ls)
+{
+ struct buffer *buf = (struct buffer *)ls->userData;
+ unsigned char remote_sha1[20];
+ struct object *o;
+ int len;
+ char *ref_info;
+
+ if (fetch_ref(ls->dentry_name, remote_sha1) != 0) {
+ fprintf(stderr,
+ "Unable to fetch ref %s from %s\n",
+ ls->dentry_name, remote->url);
+ aborted = 1;
+ return;
+ }
+
+ o = parse_object(remote_sha1);
+ if (!o) {
+ fprintf(stderr,
+ "Unable to parse object %s for remote ref %s\n",
+ sha1_to_hex(remote_sha1), ls->dentry_name);
+ aborted = 1;
+ return;
+ }
+
+ len = strlen(ls->dentry_name) + 42;
+ ref_info = xcalloc(len + 1, 1);
+ sprintf(ref_info, "%s %s\n",
+ sha1_to_hex(remote_sha1), ls->dentry_name);
+ fwrite_buffer(ref_info, 1, len, buf);
+ free(ref_info);
+
+ if (o->type == tag_type) {
+ o = deref_tag(o, ls->dentry_name, 0);
+ if (o) {
+ len = strlen(ls->dentry_name) + 45;
+ ref_info = xcalloc(len + 1, 1);
+ sprintf(ref_info, "%s %s^{}\n",
+ sha1_to_hex(o->sha1), ls->dentry_name);
+ fwrite_buffer(ref_info, 1, len, buf);
+ free(ref_info);
+ }
+ }
+}
+
+static void update_remote_info_refs(struct remote_lock *lock)
+{
+ struct buffer buffer;
+ struct active_request_slot *slot;
+ struct slot_results results;
+ char *if_header;
+ struct curl_slist *dav_headers = NULL;
+
+ buffer.buffer = xcalloc(1, 4096);
+ buffer.size = 4096;
+ buffer.posn = 0;
+ remote_ls("refs/", (PROCESS_FILES | RECURSIVE),
+ add_remote_info_ref, &buffer);
+ if (!aborted) {
+ if_header = xmalloc(strlen(lock->token) + 25);
+ sprintf(if_header, "If: (<opaquelocktoken:%s>)", lock->token);
+ dav_headers = curl_slist_append(dav_headers, if_header);
+
+ slot = get_active_slot();
+ slot->results = &results;
+ curl_easy_setopt(slot->curl, CURLOPT_INFILE, &buffer);
+ curl_easy_setopt(slot->curl, CURLOPT_INFILESIZE, buffer.posn);
+ curl_easy_setopt(slot->curl, CURLOPT_READFUNCTION, fread_buffer);
+ curl_easy_setopt(slot->curl, CURLOPT_WRITEFUNCTION, fwrite_null);
+ curl_easy_setopt(slot->curl, CURLOPT_CUSTOMREQUEST, DAV_PUT);
+ curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, dav_headers);
+ curl_easy_setopt(slot->curl, CURLOPT_UPLOAD, 1);
+ curl_easy_setopt(slot->curl, CURLOPT_PUT, 1);
+ curl_easy_setopt(slot->curl, CURLOPT_URL, lock->url);
+
+ buffer.posn = 0;
+
+ if (start_active_slot(slot)) {
+ run_active_slot(slot);
+ if (results.curl_result != CURLE_OK) {
+ fprintf(stderr,
+ "PUT error: curl result=%d, HTTP code=%ld\n",
+ results.curl_result, results.http_code);
+ }
+ }
+ free(if_header);
+ }
+ free(buffer.buffer);
+}
+
+static int remote_exists(const char *path)
+{
+ char *url = xmalloc(strlen(remote->url) + strlen(path) + 1);
+ struct active_request_slot *slot;
+ struct slot_results results;
+
+ sprintf(url, "%s%s", remote->url, path);
+
+ slot = get_active_slot();
+ slot->results = &results;
+ curl_easy_setopt(slot->curl, CURLOPT_URL, url);
+ curl_easy_setopt(slot->curl, CURLOPT_NOBODY, 1);
+
+ if (start_active_slot(slot)) {
+ run_active_slot(slot);
+ if (results.http_code == 404)
+ return 0;
+ else if (results.curl_result == CURLE_OK)
+ return 1;
+ else
+ fprintf(stderr, "HEAD HTTP error %ld\n", results.http_code);
+ } else {
+ fprintf(stderr, "Unable to start HEAD request\n");
+ }
+
+ return -1;
+}
+
+static void fetch_symref(char *path, char **symref, unsigned char *sha1)
+{
+ char *url;
+ struct buffer buffer;
+ struct active_request_slot *slot;
+ struct slot_results results;
+
+ url = xmalloc(strlen(remote->url) + strlen(path) + 1);
+ sprintf(url, "%s%s", remote->url, path);
+
+ buffer.size = 4096;
+ buffer.posn = 0;
+ buffer.buffer = xmalloc(buffer.size);
+
+ slot = get_active_slot();
+ slot->results = &results;
+ curl_easy_setopt(slot->curl, CURLOPT_FILE, &buffer);
+ curl_easy_setopt(slot->curl, CURLOPT_WRITEFUNCTION, fwrite_buffer);
+ curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, NULL);
+ curl_easy_setopt(slot->curl, CURLOPT_URL, url);
+ if (start_active_slot(slot)) {
+ run_active_slot(slot);
+ if (results.curl_result != CURLE_OK) {
+ die("Couldn't get %s for remote symref\n%s",
+ url, curl_errorstr);
+ }
+ } else {
+ die("Unable to start remote symref request");
+ }
+ free(url);
+
+ if (*symref != NULL)
+ free(*symref);
+ *symref = NULL;
+ memset(sha1, 0, 20);
+
+ if (buffer.posn == 0)
+ return;
+
+ /* If it's a symref, set the refname; otherwise try for a sha1 */
+ if (!strncmp((char *)buffer.buffer, "ref: ", 5)) {
+ *symref = xcalloc(buffer.posn - 5, 1);
+ strncpy(*symref, (char *)buffer.buffer + 5, buffer.posn - 6);
+ } else {
+ get_sha1_hex(buffer.buffer, sha1);
+ }
+
+ free(buffer.buffer);
+}
+
+static int verify_merge_base(unsigned char *head_sha1, unsigned char *branch_sha1)
+{
+ int pipe_fd[2];
+ pid_t merge_base_pid;
+ char line[PATH_MAX + 20];
+ unsigned char merge_sha1[20];
+ int verified = 0;
+
+ if (pipe(pipe_fd) < 0)
+ die("Verify merge base: pipe failed");
+
+ merge_base_pid = fork();
+ if (!merge_base_pid) {
+ static const char *args[] = {
+ "merge-base",
+ "-a",
+ NULL,
+ NULL,
+ NULL
+ };
+ args[2] = strdup(sha1_to_hex(head_sha1));
+ args[3] = sha1_to_hex(branch_sha1);
+
+ dup2(pipe_fd[1], 1);
+ close(pipe_fd[0]);
+ close(pipe_fd[1]);
+ execv_git_cmd(args);
+ die("merge-base setup failed");
+ }
+ if (merge_base_pid < 0)
+ die("merge-base fork failed");
+
+ dup2(pipe_fd[0], 0);
+ close(pipe_fd[0]);
+ close(pipe_fd[1]);
+ while (fgets(line, sizeof(line), stdin) != NULL) {
+ if (get_sha1_hex(line, merge_sha1))
+ die("expected sha1, got garbage:\n %s", line);
+ if (!memcmp(branch_sha1, merge_sha1, 20)) {
+ verified = 1;
+ break;
+ }
+ }
+
+ return verified;
+}
+
+static int delete_remote_branch(char *pattern, int force)
+{
+ struct ref *refs = remote_refs;
+ struct ref *remote_ref = NULL;
+ unsigned char head_sha1[20];
+ char *symref = NULL;
+ int match;
+ int patlen = strlen(pattern);
+ int i;
+ struct active_request_slot *slot;
+ struct slot_results results;
+ char *url;
+
+ /* Find the remote branch(es) matching the specified branch name */
+ for (match = 0; refs; refs = refs->next) {
+ char *name = refs->name;
+ int namelen = strlen(name);
+ if (namelen < patlen ||
+ memcmp(name + namelen - patlen, pattern, patlen))
+ continue;
+ if (namelen != patlen && name[namelen - patlen - 1] != '/')
+ continue;
+ match++;
+ remote_ref = refs;
+ }
+ if (match == 0)
+ return error("No remote branch matches %s", pattern);
+ if (match != 1)
+ return error("More than one remote branch matches %s",
+ pattern);
+
+ /*
+ * Remote HEAD must be a symref (not exactly foolproof; a remote
+ * symlink to a symref will look like a symref)
+ */
+ fetch_symref("HEAD", &symref, head_sha1);
+ if (!symref)
+ return error("Remote HEAD is not a symref");
+
+ /* Remote branch must not be the remote HEAD */
+ for (i=0; symref && i<MAXDEPTH; i++) {
+ if (!strcmp(remote_ref->name, symref))
+ return error("Remote branch %s is the current HEAD",
+ remote_ref->name);
+ fetch_symref(symref, &symref, head_sha1);
+ }
+
+ /* Run extra sanity checks if delete is not forced */
+ if (!force) {
+ /* Remote HEAD must resolve to a known object */
+ if (symref)
+ return error("Remote HEAD symrefs too deep");
+ if (is_zero_sha1(head_sha1))
+ return error("Unable to resolve remote HEAD");
+ if (!has_sha1_file(head_sha1))
+ return error("Remote HEAD resolves to object %s\nwhich does not exist locally, perhaps you need to fetch?", sha1_to_hex(head_sha1));
+
+ /* Remote branch must resolve to a known object */
+ if (is_zero_sha1(remote_ref->old_sha1))
+ return error("Unable to resolve remote branch %s",
+ remote_ref->name);
+ if (!has_sha1_file(remote_ref->old_sha1))
+ return error("Remote branch %s resolves to object %s\nwhich does not exist locally, perhaps you need to fetch?", remote_ref->name, sha1_to_hex(remote_ref->old_sha1));
+
+ /* Remote branch must be an ancestor of remote HEAD */
+ if (!verify_merge_base(head_sha1, remote_ref->old_sha1)) {
+ return error("The branch '%s' is not a strict subset of your current HEAD.\nIf you are sure you want to delete it, run:\n\t'git http-push -D %s %s'", remote_ref->name, remote->url, pattern);
+ }
+ }
+
+ /* Send delete request */
+ fprintf(stderr, "Removing remote branch '%s'\n", remote_ref->name);
+ url = xmalloc(strlen(remote->url) + strlen(remote_ref->name) + 1);
+ sprintf(url, "%s%s", remote->url, remote_ref->name);
+ slot = get_active_slot();
+ slot->results = &results;
+ curl_easy_setopt(slot->curl, CURLOPT_HTTPGET, 1);
+ curl_easy_setopt(slot->curl, CURLOPT_WRITEFUNCTION, fwrite_null);
+ curl_easy_setopt(slot->curl, CURLOPT_URL, url);
+ curl_easy_setopt(slot->curl, CURLOPT_CUSTOMREQUEST, DAV_DELETE);
+ if (start_active_slot(slot)) {
+ run_active_slot(slot);
+ free(url);
+ if (results.curl_result != CURLE_OK)
+ return error("DELETE request failed (%d/%ld)\n",
+ results.curl_result, results.http_code);
+ } else {
+ free(url);
+ return error("Unable to start DELETE request");
+ }
+
+ return 0;
+}
+
int main(int argc, char **argv)
{
struct transfer_request *request;
struct transfer_request *next_request;
int nr_refspec = 0;
char **refspec = NULL;
- int do_remote_update;
- int new_branch;
- int force_this;
- char *local_ref;
- unsigned char local_sha1[20];
- struct object *local_object = NULL;
- char *remote_ref = NULL;
- unsigned char remote_sha1[20];
- struct active_lock *remote_lock;
- char *remote_path = NULL;
+ struct remote_lock *ref_lock = NULL;
+ struct remote_lock *info_ref_lock = NULL;
+ struct rev_info revs;
+ int delete_branch = 0;
+ int force_delete = 0;
+ int objects_to_send;
int rc = 0;
int i;
+ int new_refs;
+ struct ref *ref;
setup_git_directory();
setup_ident();
- remote = xmalloc(sizeof(*remote));
- remote->url = NULL;
- remote->packs = NULL;
+ remote = xcalloc(sizeof(*remote), 1);
argv++;
for (i = 1; i < argc; i++, argv++) {
char *arg = *argv;
if (*arg == '-') {
- if (!strcmp(arg, "--complete")) {
+ if (!strcmp(arg, "--all")) {
push_all = 1;
continue;
}
@@ -1261,10 +2336,24 @@ int main(int argc, char **argv)
push_verbosely = 1;
continue;
}
- usage(http_push_usage);
+ if (!strcmp(arg, "-d")) {
+ delete_branch = 1;
+ continue;
+ }
+ if (!strcmp(arg, "-D")) {
+ delete_branch = 1;
+ force_delete = 1;
+ continue;
+ }
}
if (!remote->url) {
+ char *path = strstr(arg, "//");
remote->url = arg;
+ if (path) {
+ path = index(path+2, '/');
+ if (path)
+ remote->path_len = strlen(path);
+ }
continue;
}
refspec = argv;
@@ -1275,7 +2364,10 @@ int main(int argc, char **argv)
if (!remote->url)
usage(http_push_usage);
- memset(remote_dir_exists, 0, 256);
+ if (delete_branch && nr_refspec != 1)
+ die("You must specify only one branch name when deleting a remote branch");
+
+ memset(remote_dir_exists, -1, 256);
http_init();
@@ -1293,121 +2385,165 @@ int main(int argc, char **argv)
goto cleanup;
}
- /* Process each refspec */
- for (i = 0; i < nr_refspec; i++) {
- char *ep;
- force_this = 0;
- do_remote_update = 0;
- new_branch = 0;
- local_ref = refspec[i];
- if (*local_ref == '+') {
- force_this = 1;
- local_ref++;
+ /* Check whether the remote has server info files */
+ remote->can_update_info_refs = 0;
+ remote->has_info_refs = remote_exists("info/refs");
+ remote->has_info_packs = remote_exists("objects/info/packs");
+ if (remote->has_info_refs) {
+ info_ref_lock = lock_remote("info/refs", LOCK_TIME);
+ if (info_ref_lock)
+ remote->can_update_info_refs = 1;
+ }
+ if (remote->has_info_packs)
+ fetch_indices();
+
+ /* Get a list of all local and remote heads to validate refspecs */
+ get_local_heads();
+ fprintf(stderr, "Fetching remote heads...\n");
+ get_dav_remote_heads();
+
+ /* Remove a remote branch if -d or -D was specified */
+ if (delete_branch) {
+ if (delete_remote_branch(refspec[0], force_delete) == -1)
+ fprintf(stderr, "Unable to delete remote branch %s\n",
+ refspec[0]);
+ goto cleanup;
+ }
+
+ /* match them up */
+ if (!remote_tail)
+ remote_tail = &remote_refs;
+ if (match_refs(local_refs, remote_refs, &remote_tail,
+ nr_refspec, refspec, push_all))
+ return -1;
+ if (!remote_refs) {
+ fprintf(stderr, "No refs in common and none specified; doing nothing.\n");
+ return 0;
+ }
+
+ new_refs = 0;
+ for (ref = remote_refs; ref; ref = ref->next) {
+ char old_hex[60], *new_hex;
+ const char *commit_argv[4];
+ int commit_argc;
+ char *new_sha1_hex, *old_sha1_hex;
+
+ if (!ref->peer_ref)
+ continue;
+ if (!memcmp(ref->old_sha1, ref->peer_ref->new_sha1, 20)) {
+ if (push_verbosely || 1)
+ fprintf(stderr, "'%s': up-to-date\n", ref->name);
+ continue;
}
- ep = strchr(local_ref, ':');
- if (ep) {
- remote_ref = ep + 1;
- *ep = 0;
+
+ if (!force_all &&
+ !is_zero_sha1(ref->old_sha1) &&
+ !ref->force) {
+ if (!has_sha1_file(ref->old_sha1) ||
+ !ref_newer(ref->peer_ref->new_sha1,
+ ref->old_sha1)) {
+ /* We do not have the remote ref, or
+ * we know that the remote ref is not
+ * an ancestor of what we are trying to
+ * push. Either way this can be losing
+ * commits at the remote end and likely
+ * we were not up to date to begin with.
+ */
+ error("remote '%s' is not a strict "
+ "subset of local ref '%s'. "
+ "maybe you are not up-to-date and "
+ "need to pull first?",
+ ref->name,
+ ref->peer_ref->name);
+ rc = -2;
+ continue;
+ }
}
- else
- remote_ref = local_ref;
+ memcpy(ref->new_sha1, ref->peer_ref->new_sha1, 20);
+ if (is_zero_sha1(ref->new_sha1)) {
+ error("cannot happen anymore");
+ rc = -3;
+ continue;
+ }
+ new_refs++;
+ strcpy(old_hex, sha1_to_hex(ref->old_sha1));
+ new_hex = sha1_to_hex(ref->new_sha1);
+
+ fprintf(stderr, "updating '%s'", ref->name);
+ if (strcmp(ref->name, ref->peer_ref->name))
+ fprintf(stderr, " using '%s'", ref->peer_ref->name);
+ fprintf(stderr, "\n from %s\n to %s\n", old_hex, new_hex);
+
/* Lock remote branch ref */
- if (remote_path)
- free(remote_path);
- remote_path = xmalloc(strlen(remote_ref) + 12);
- sprintf(remote_path, "refs/heads/%s", remote_ref);
- remote_lock = lock_remote(remote_path, LOCK_TIME);
- if (remote_lock == NULL) {
+ ref_lock = lock_remote(ref->name, LOCK_TIME);
+ if (ref_lock == NULL) {
fprintf(stderr, "Unable to lock remote branch %s\n",
- remote_ref);
+ ref->name);
rc = 1;
continue;
}
- /* Resolve local and remote refs */
- if (fetch_ref(remote_ref, remote_sha1) != 0) {
- fprintf(stderr,
- "Remote branch %s does not exist on %s\n",
- remote_ref, remote->url);
- new_branch = 1;
+ /* Set up revision info for this refspec */
+ commit_argc = 3;
+ new_sha1_hex = strdup(sha1_to_hex(ref->new_sha1));
+ old_sha1_hex = NULL;
+ commit_argv[1] = "--objects";
+ commit_argv[2] = new_sha1_hex;
+ if (!push_all && !is_zero_sha1(ref->old_sha1)) {
+ old_sha1_hex = xmalloc(42);
+ sprintf(old_sha1_hex, "^%s",
+ sha1_to_hex(ref->old_sha1));
+ commit_argv[3] = old_sha1_hex;
+ commit_argc++;
}
- if (get_sha1(local_ref, local_sha1) != 0) {
- fprintf(stderr, "Error resolving local branch %s\n",
- local_ref);
- rc = 1;
- goto unlock;
- }
-
- /* Find relationship between local and remote */
- local_object = parse_object(local_sha1);
- if (!local_object) {
- fprintf(stderr, "Unable to parse local object %s\n",
- sha1_to_hex(local_sha1));
- rc = 1;
- goto unlock;
- } else if (new_branch) {
- do_remote_update = 1;
- } else {
- if (!memcmp(local_sha1, remote_sha1, 20)) {
- fprintf(stderr,
- "* %s: same as branch '%s' of %s\n",
- local_ref, remote_ref, remote->url);
- } else if (is_ancestor(remote_sha1,
- (struct commit *)local_object)) {
- fprintf(stderr,
- "Remote %s will fast-forward to local %s\n",
- remote_ref, local_ref);
- do_remote_update = 1;
- } else if (force_all || force_this) {
- fprintf(stderr,
- "* %s on %s does not fast forward to local branch '%s', overwriting\n",
- remote_ref, remote->url, local_ref);
- do_remote_update = 1;
- } else {
- fprintf(stderr,
- "* %s on %s does not fast forward to local branch '%s'\n",
- remote_ref, remote->url, local_ref);
- rc = 1;
- goto unlock;
- }
+ setup_revisions(commit_argc, commit_argv, &revs, NULL);
+ free(new_sha1_hex);
+ if (old_sha1_hex) {
+ free(old_sha1_hex);
+ commit_argv[1] = NULL;
}
- /* Generate and check list of required objects */
+ /* Generate a list of objects that need to be pushed */
pushing = 0;
- if (do_remote_update || push_all)
- fetch_indices();
- get_delta(push_all ? NULL : remote_sha1,
- local_object, remote_lock);
+ prepare_revision_walk(&revs);
+ mark_edges_uninteresting(revs.commits);
+ objects_to_send = get_delta(&revs, ref_lock);
finish_all_active_slots();
/* Push missing objects to remote, this would be a
convenient time to pack them first if appropriate. */
pushing = 1;
+ if (objects_to_send)
+ fprintf(stderr, " sending %d objects\n",
+ objects_to_send);
fill_active_slots();
finish_all_active_slots();
/* Update the remote branch if all went well */
- if (do_remote_update) {
- if (!aborted && update_remote(local_sha1,
- remote_lock)) {
- fprintf(stderr, "%s remote branch %s\n",
- new_branch ? "Created" : "Updated",
- remote_ref);
- } else {
- fprintf(stderr,
- "Unable to %s remote branch %s\n",
- new_branch ? "create" : "update",
- remote_ref);
- rc = 1;
- goto unlock;
- }
+ if (aborted || !update_remote(ref->new_sha1, ref_lock)) {
+ rc = 1;
+ goto unlock;
}
unlock:
- unlock_remote(remote_lock);
- free(remote_path);
+ if (!rc)
+ fprintf(stderr, " done\n");
+ unlock_remote(ref_lock);
+ check_locks();
+ }
+
+ /* Update remote server info if appropriate */
+ if (remote->has_info_refs && new_refs) {
+ if (info_ref_lock && remote->can_update_info_refs) {
+ fprintf(stderr, "Updating remote server info\n");
+ update_remote_info_refs(info_ref_lock);
+ } else {
+ fprintf(stderr, "Unable to update server info\n");
+ }
}
+ if (info_ref_lock)
+ unlock_remote(info_ref_lock);
cleanup:
free(remote);
diff --git a/http.c b/http.c
index 14a7669..0cb42a8 100644
--- a/http.c
+++ b/http.c
@@ -195,6 +195,8 @@ static CURL* get_curl_handle(void)
if (getenv("GIT_CURL_VERBOSE"))
curl_easy_setopt(result, CURLOPT_VERBOSE, 1);
+ curl_easy_setopt(result, CURLOPT_USERAGENT, GIT_USER_AGENT);
+
return result;
}
@@ -339,6 +341,7 @@ struct active_request_slot *get_active_slot(void)
slot->in_use = 1;
slot->local = NULL;
slot->results = NULL;
+ slot->finished = NULL;
slot->callback_data = NULL;
slot->callback_func = NULL;
curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, pragma_header);
@@ -389,8 +392,10 @@ void run_active_slot(struct active_request_slot *slot)
fd_set excfds;
int max_fd;
struct timeval select_timeout;
+ int finished = 0;
- while (slot->in_use) {
+ slot->finished = &finished;
+ while (!finished) {
data_received = 0;
step_active_slots();
@@ -442,6 +447,9 @@ static void finish_active_slot(struct active_request_slot *slot)
closedown_active_slot(slot);
curl_easy_getinfo(slot->curl, CURLINFO_HTTP_CODE, &slot->http_code);
+ if (slot->finished != NULL)
+ (*slot->finished) = 1;
+
/* Store slot results so they can be read after the slot is reused */
if (slot->results != NULL) {
slot->results->curl_result = slot->curl_result;
diff --git a/http.h b/http.h
index 36fa154..9ca16ac 100644
--- a/http.h
+++ b/http.h
@@ -35,6 +35,7 @@ struct active_request_slot
int in_use;
CURLcode curl_result;
long http_code;
+ int *finished;
struct slot_results *results;
void *callback_data;
void (*callback_func)(void *data);
diff --git a/imap-send.c b/imap-send.c
new file mode 100644
index 0000000..f3cb79b
--- /dev/null
+++ b/imap-send.c
@@ -0,0 +1,1359 @@
+/*
+ * git-imap-send - drops patches into an imap Drafts folder
+ * derived from isync/mbsync - mailbox synchronizer
+ *
+ * Copyright (C) 2000-2002 Michael R. Elkins <me@mutt.org>
+ * Copyright (C) 2002-2004 Oswald Buddenhagen <ossi@users.sf.net>
+ * Copyright (C) 2004 Theodore Y. Ts'o <tytso@mit.edu>
+ * Copyright (C) 2006 Mike McCormack
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ */
+
+#include "cache.h"
+
+#include <assert.h>
+#include <netinet/in.h>
+#include <netinet/tcp.h>
+#include <arpa/inet.h>
+#include <sys/socket.h>
+#include <netdb.h>
+
+typedef struct store_conf {
+ char *name;
+ const char *path; /* should this be here? its interpretation is driver-specific */
+ char *map_inbox;
+ char *trash;
+ unsigned max_size; /* off_t is overkill */
+ unsigned trash_remote_new:1, trash_only_new:1;
+} store_conf_t;
+
+typedef struct string_list {
+ struct string_list *next;
+ char string[1];
+} string_list_t;
+
+typedef struct channel_conf {
+ struct channel_conf *next;
+ char *name;
+ store_conf_t *master, *slave;
+ char *master_name, *slave_name;
+ char *sync_state;
+ string_list_t *patterns;
+ int mops, sops;
+ unsigned max_messages; /* for slave only */
+} channel_conf_t;
+
+typedef struct group_conf {
+ struct group_conf *next;
+ char *name;
+ string_list_t *channels;
+} group_conf_t;
+
+/* For message->status */
+#define M_RECENT (1<<0) /* unsyncable flag; maildir_* depend on this being 1<<0 */
+#define M_DEAD (1<<1) /* expunged */
+#define M_FLAGS (1<<2) /* flags fetched */
+
+typedef struct message {
+ struct message *next;
+ /* string_list_t *keywords; */
+ size_t size; /* zero implies "not fetched" */
+ int uid;
+ unsigned char flags, status;
+} message_t;
+
+typedef struct store {
+ store_conf_t *conf; /* foreign */
+
+ /* currently open mailbox */
+ const char *name; /* foreign! maybe preset? */
+ char *path; /* own */
+ message_t *msgs; /* own */
+ int uidvalidity;
+ unsigned char opts; /* maybe preset? */
+ /* note that the following do _not_ reflect stats from msgs, but mailbox totals */
+ int count; /* # of messages */
+ int recent; /* # of recent messages - don't trust this beyond the initial read */
+} store_t;
+
+typedef struct {
+ char *data;
+ int len;
+ unsigned char flags;
+ unsigned char crlf:1;
+} msg_data_t;
+
+#define DRV_OK 0
+#define DRV_MSG_BAD -1
+#define DRV_BOX_BAD -2
+#define DRV_STORE_BAD -3
+
+static int Verbose, Quiet;
+
+static void info( const char *, ... );
+static void warn( const char *, ... );
+
+static char *next_arg( char ** );
+
+static void free_generic_messages( message_t * );
+
+static int nfvasprintf( char **str, const char *fmt, va_list va );
+static int nfsnprintf( char *buf, int blen, const char *fmt, ... );
+
+
+static void arc4_init( void );
+static unsigned char arc4_getbyte( void );
+
+typedef struct imap_server_conf {
+ char *name;
+ char *tunnel;
+ char *host;
+ int port;
+ char *user;
+ char *pass;
+} imap_server_conf_t;
+
+typedef struct imap_store_conf {
+ store_conf_t gen;
+ imap_server_conf_t *server;
+ unsigned use_namespace:1;
+} imap_store_conf_t;
+
+#define NIL (void*)0x1
+#define LIST (void*)0x2
+
+typedef struct _list {
+ struct _list *next, *child;
+ char *val;
+ int len;
+} list_t;
+
+typedef struct {
+ int fd;
+} Socket_t;
+
+typedef struct {
+ Socket_t sock;
+ int bytes;
+ int offset;
+ char buf[1024];
+} buffer_t;
+
+struct imap_cmd;
+
+typedef struct imap {
+ int uidnext; /* from SELECT responses */
+ list_t *ns_personal, *ns_other, *ns_shared; /* NAMESPACE info */
+ unsigned caps, rcaps; /* CAPABILITY results */
+ /* command queue */
+ int nexttag, num_in_progress, literal_pending;
+ struct imap_cmd *in_progress, **in_progress_append;
+ buffer_t buf; /* this is BIG, so put it last */
+} imap_t;
+
+typedef struct imap_store {
+ store_t gen;
+ int uidvalidity;
+ imap_t *imap;
+ const char *prefix;
+ unsigned /*currentnc:1,*/ trashnc:1;
+} imap_store_t;
+
+struct imap_cmd_cb {
+ int (*cont)( imap_store_t *ctx, struct imap_cmd *cmd, const char *prompt );
+ void (*done)( imap_store_t *ctx, struct imap_cmd *cmd, int response);
+ void *ctx;
+ char *data;
+ int dlen;
+ int uid;
+ unsigned create:1, trycreate:1;
+};
+
+struct imap_cmd {
+ struct imap_cmd *next;
+ struct imap_cmd_cb cb;
+ char *cmd;
+ int tag;
+};
+
+#define CAP(cap) (imap->caps & (1 << (cap)))
+
+enum CAPABILITY {
+ NOLOGIN = 0,
+ UIDPLUS,
+ LITERALPLUS,
+ NAMESPACE,
+};
+
+static const char *cap_list[] = {
+ "LOGINDISABLED",
+ "UIDPLUS",
+ "LITERAL+",
+ "NAMESPACE",
+};
+
+#define RESP_OK 0
+#define RESP_NO 1
+#define RESP_BAD 2
+
+static int get_cmd_result( imap_store_t *ctx, struct imap_cmd *tcmd );
+
+
+static const char *Flags[] = {
+ "Draft",
+ "Flagged",
+ "Answered",
+ "Seen",
+ "Deleted",
+};
+
+static void
+socket_perror( const char *func, Socket_t *sock, int ret )
+{
+ if (ret < 0)
+ perror( func );
+ else
+ fprintf( stderr, "%s: unexpected EOF\n", func );
+}
+
+static int
+socket_read( Socket_t *sock, char *buf, int len )
+{
+ int n = read( sock->fd, buf, len );
+ if (n <= 0) {
+ socket_perror( "read", sock, n );
+ close( sock->fd );
+ sock->fd = -1;
+ }
+ return n;
+}
+
+static int
+socket_write( Socket_t *sock, char *buf, int len )
+{
+ int n = write( sock->fd, buf, len );
+ if (n != len) {
+ socket_perror( "write", sock, n );
+ close( sock->fd );
+ sock->fd = -1;
+ }
+ return n;
+}
+
+/* simple line buffering */
+static int
+buffer_gets( buffer_t * b, char **s )
+{
+ int n;
+ int start = b->offset;
+
+ *s = b->buf + start;
+
+ for (;;) {
+ /* make sure we have enough data to read the \r\n sequence */
+ if (b->offset + 1 >= b->bytes) {
+ if (start) {
+ /* shift down used bytes */
+ *s = b->buf;
+
+ assert( start <= b->bytes );
+ n = b->bytes - start;
+
+ if (n)
+ memcpy( b->buf, b->buf + start, n );
+ b->offset -= start;
+ b->bytes = n;
+ start = 0;
+ }
+
+ n = socket_read( &b->sock, b->buf + b->bytes,
+ sizeof(b->buf) - b->bytes );
+
+ if (n <= 0)
+ return -1;
+
+ b->bytes += n;
+ }
+
+ if (b->buf[b->offset] == '\r') {
+ assert( b->offset + 1 < b->bytes );
+ if (b->buf[b->offset + 1] == '\n') {
+ b->buf[b->offset] = 0; /* terminate the string */
+ b->offset += 2; /* next line */
+ if (Verbose)
+ puts( *s );
+ return 0;
+ }
+ }
+
+ b->offset++;
+ }
+ /* not reached */
+}
+
+static void
+info( const char *msg, ... )
+{
+ va_list va;
+
+ if (!Quiet) {
+ va_start( va, msg );
+ vprintf( msg, va );
+ va_end( va );
+ fflush( stdout );
+ }
+}
+
+static void
+warn( const char *msg, ... )
+{
+ va_list va;
+
+ if (Quiet < 2) {
+ va_start( va, msg );
+ vfprintf( stderr, msg, va );
+ va_end( va );
+ }
+}
+
+static char *
+next_arg( char **s )
+{
+ char *ret;
+
+ if (!s || !*s)
+ return NULL;
+ while (isspace( (unsigned char) **s ))
+ (*s)++;
+ if (!**s) {
+ *s = NULL;
+ return NULL;
+ }
+ if (**s == '"') {
+ ++*s;
+ ret = *s;
+ *s = strchr( *s, '"' );
+ } else {
+ ret = *s;
+ while (**s && !isspace( (unsigned char) **s ))
+ (*s)++;
+ }
+ if (*s) {
+ if (**s)
+ *(*s)++ = 0;
+ if (!**s)
+ *s = NULL;
+ }
+ return ret;
+}
+
+static void
+free_generic_messages( message_t *msgs )
+{
+ message_t *tmsg;
+
+ for (; msgs; msgs = tmsg) {
+ tmsg = msgs->next;
+ free( msgs );
+ }
+}
+
+static int
+git_vasprintf( char **strp, const char *fmt, va_list ap )
+{
+ int len;
+ char tmp[1024];
+
+ if ((len = vsnprintf( tmp, sizeof(tmp), fmt, ap )) < 0 || !(*strp = xmalloc( len + 1 )))
+ return -1;
+ if (len >= (int)sizeof(tmp))
+ vsprintf( *strp, fmt, ap );
+ else
+ memcpy( *strp, tmp, len + 1 );
+ return len;
+}
+
+static int
+nfsnprintf( char *buf, int blen, const char *fmt, ... )
+{
+ int ret;
+ va_list va;
+
+ va_start( va, fmt );
+ if (blen <= 0 || (unsigned)(ret = vsnprintf( buf, blen, fmt, va )) >= (unsigned)blen)
+ die( "Fatal: buffer too small. Please report a bug.\n");
+ va_end( va );
+ return ret;
+}
+
+static int
+nfvasprintf( char **str, const char *fmt, va_list va )
+{
+ int ret = git_vasprintf( str, fmt, va );
+ if (ret < 0)
+ die( "Fatal: Out of memory\n");
+ return ret;
+}
+
+static struct {
+ unsigned char i, j, s[256];
+} rs;
+
+static void
+arc4_init( void )
+{
+ int i, fd;
+ unsigned char j, si, dat[128];
+
+ if ((fd = open( "/dev/urandom", O_RDONLY )) < 0 && (fd = open( "/dev/random", O_RDONLY )) < 0) {
+ fprintf( stderr, "Fatal: no random number source available.\n" );
+ exit( 3 );
+ }
+ if (read( fd, dat, 128 ) != 128) {
+ fprintf( stderr, "Fatal: cannot read random number source.\n" );
+ exit( 3 );
+ }
+ close( fd );
+
+ for (i = 0; i < 256; i++)
+ rs.s[i] = i;
+ for (i = j = 0; i < 256; i++) {
+ si = rs.s[i];
+ j += si + dat[i & 127];
+ rs.s[i] = rs.s[j];
+ rs.s[j] = si;
+ }
+ rs.i = rs.j = 0;
+
+ for (i = 0; i < 256; i++)
+ arc4_getbyte();
+}
+
+static unsigned char
+arc4_getbyte( void )
+{
+ unsigned char si, sj;
+
+ rs.i++;
+ si = rs.s[rs.i];
+ rs.j += si;
+ sj = rs.s[rs.j];
+ rs.s[rs.i] = sj;
+ rs.s[rs.j] = si;
+ return rs.s[(si + sj) & 0xff];
+}
+
+static struct imap_cmd *
+v_issue_imap_cmd( imap_store_t *ctx, struct imap_cmd_cb *cb,
+ const char *fmt, va_list ap )
+{
+ imap_t *imap = ctx->imap;
+ struct imap_cmd *cmd;
+ int n, bufl;
+ char buf[1024];
+
+ cmd = xmalloc( sizeof(struct imap_cmd) );
+ nfvasprintf( &cmd->cmd, fmt, ap );
+ cmd->tag = ++imap->nexttag;
+
+ if (cb)
+ cmd->cb = *cb;
+ else
+ memset( &cmd->cb, 0, sizeof(cmd->cb) );
+
+ while (imap->literal_pending)
+ get_cmd_result( ctx, NULL );
+
+ bufl = nfsnprintf( buf, sizeof(buf), cmd->cb.data ? CAP(LITERALPLUS) ?
+ "%d %s{%d+}\r\n" : "%d %s{%d}\r\n" : "%d %s\r\n",
+ cmd->tag, cmd->cmd, cmd->cb.dlen );
+ if (Verbose) {
+ if (imap->num_in_progress)
+ printf( "(%d in progress) ", imap->num_in_progress );
+ if (memcmp( cmd->cmd, "LOGIN", 5 ))
+ printf( ">>> %s", buf );
+ else
+ printf( ">>> %d LOGIN <user> <pass>\n", cmd->tag );
+ }
+ if (socket_write( &imap->buf.sock, buf, bufl ) != bufl) {
+ free( cmd->cmd );
+ free( cmd );
+ if (cb && cb->data)
+ free( cb->data );
+ return NULL;
+ }
+ if (cmd->cb.data) {
+ if (CAP(LITERALPLUS)) {
+ n = socket_write( &imap->buf.sock, cmd->cb.data, cmd->cb.dlen );
+ free( cmd->cb.data );
+ if (n != cmd->cb.dlen ||
+ (n = socket_write( &imap->buf.sock, "\r\n", 2 )) != 2)
+ {
+ free( cmd->cmd );
+ free( cmd );
+ return NULL;
+ }
+ cmd->cb.data = NULL;
+ } else
+ imap->literal_pending = 1;
+ } else if (cmd->cb.cont)
+ imap->literal_pending = 1;
+ cmd->next = NULL;
+ *imap->in_progress_append = cmd;
+ imap->in_progress_append = &cmd->next;
+ imap->num_in_progress++;
+ return cmd;
+}
+
+static struct imap_cmd *
+issue_imap_cmd( imap_store_t *ctx, struct imap_cmd_cb *cb, const char *fmt, ... )
+{
+ struct imap_cmd *ret;
+ va_list ap;
+
+ va_start( ap, fmt );
+ ret = v_issue_imap_cmd( ctx, cb, fmt, ap );
+ va_end( ap );
+ return ret;
+}
+
+static int
+imap_exec( imap_store_t *ctx, struct imap_cmd_cb *cb, const char *fmt, ... )
+{
+ va_list ap;
+ struct imap_cmd *cmdp;
+
+ va_start( ap, fmt );
+ cmdp = v_issue_imap_cmd( ctx, cb, fmt, ap );
+ va_end( ap );
+ if (!cmdp)
+ return RESP_BAD;
+
+ return get_cmd_result( ctx, cmdp );
+}
+
+static int
+imap_exec_m( imap_store_t *ctx, struct imap_cmd_cb *cb, const char *fmt, ... )
+{
+ va_list ap;
+ struct imap_cmd *cmdp;
+
+ va_start( ap, fmt );
+ cmdp = v_issue_imap_cmd( ctx, cb, fmt, ap );
+ va_end( ap );
+ if (!cmdp)
+ return DRV_STORE_BAD;
+
+ switch (get_cmd_result( ctx, cmdp )) {
+ case RESP_BAD: return DRV_STORE_BAD;
+ case RESP_NO: return DRV_MSG_BAD;
+ default: return DRV_OK;
+ }
+}
+
+static int
+is_atom( list_t *list )
+{
+ return list && list->val && list->val != NIL && list->val != LIST;
+}
+
+static int
+is_list( list_t *list )
+{
+ return list && list->val == LIST;
+}
+
+static void
+free_list( list_t *list )
+{
+ list_t *tmp;
+
+ for (; list; list = tmp) {
+ tmp = list->next;
+ if (is_list( list ))
+ free_list( list->child );
+ else if (is_atom( list ))
+ free( list->val );
+ free( list );
+ }
+}
+
+static int
+parse_imap_list_l( imap_t *imap, char **sp, list_t **curp, int level )
+{
+ list_t *cur;
+ char *s = *sp, *p;
+ int n, bytes;
+
+ for (;;) {
+ while (isspace( (unsigned char)*s ))
+ s++;
+ if (level && *s == ')') {
+ s++;
+ break;
+ }
+ *curp = cur = xmalloc( sizeof(*cur) );
+ curp = &cur->next;
+ cur->val = NULL; /* for clean bail */
+ if (*s == '(') {
+ /* sublist */
+ s++;
+ cur->val = LIST;
+ if (parse_imap_list_l( imap, &s, &cur->child, level + 1 ))
+ goto bail;
+ } else if (imap && *s == '{') {
+ /* literal */
+ bytes = cur->len = strtol( s + 1, &s, 10 );
+ if (*s != '}')
+ goto bail;
+
+ s = cur->val = xmalloc( cur->len );
+
+ /* dump whats left over in the input buffer */
+ n = imap->buf.bytes - imap->buf.offset;
+
+ if (n > bytes)
+ /* the entire message fit in the buffer */
+ n = bytes;
+
+ memcpy( s, imap->buf.buf + imap->buf.offset, n );
+ s += n;
+ bytes -= n;
+
+ /* mark that we used part of the buffer */
+ imap->buf.offset += n;
+
+ /* now read the rest of the message */
+ while (bytes > 0) {
+ if ((n = socket_read (&imap->buf.sock, s, bytes)) <= 0)
+ goto bail;
+ s += n;
+ bytes -= n;
+ }
+
+ if (buffer_gets( &imap->buf, &s ))
+ goto bail;
+ } else if (*s == '"') {
+ /* quoted string */
+ s++;
+ p = s;
+ for (; *s != '"'; s++)
+ if (!*s)
+ goto bail;
+ cur->len = s - p;
+ s++;
+ cur->val = xmalloc( cur->len + 1 );
+ memcpy( cur->val, p, cur->len );
+ cur->val[cur->len] = 0;
+ } else {
+ /* atom */
+ p = s;
+ for (; *s && !isspace( (unsigned char)*s ); s++)
+ if (level && *s == ')')
+ break;
+ cur->len = s - p;
+ if (cur->len == 3 && !memcmp ("NIL", p, 3))
+ cur->val = NIL;
+ else {
+ cur->val = xmalloc( cur->len + 1 );
+ memcpy( cur->val, p, cur->len );
+ cur->val[cur->len] = 0;
+ }
+ }
+
+ if (!level)
+ break;
+ if (!*s)
+ goto bail;
+ }
+ *sp = s;
+ *curp = NULL;
+ return 0;
+
+ bail:
+ *curp = NULL;
+ return -1;
+}
+
+static list_t *
+parse_imap_list( imap_t *imap, char **sp )
+{
+ list_t *head;
+
+ if (!parse_imap_list_l( imap, sp, &head, 0 ))
+ return head;
+ free_list( head );
+ return NULL;
+}
+
+static list_t *
+parse_list( char **sp )
+{
+ return parse_imap_list( NULL, sp );
+}
+
+static void
+parse_capability( imap_t *imap, char *cmd )
+{
+ char *arg;
+ unsigned i;
+
+ imap->caps = 0x80000000;
+ while ((arg = next_arg( &cmd )))
+ for (i = 0; i < ARRAY_SIZE(cap_list); i++)
+ if (!strcmp( cap_list[i], arg ))
+ imap->caps |= 1 << i;
+ imap->rcaps = imap->caps;
+}
+
+static int
+parse_response_code( imap_store_t *ctx, struct imap_cmd_cb *cb, char *s )
+{
+ imap_t *imap = ctx->imap;
+ char *arg, *p;
+
+ if (*s != '[')
+ return RESP_OK; /* no response code */
+ s++;
+ if (!(p = strchr( s, ']' ))) {
+ fprintf( stderr, "IMAP error: malformed response code\n" );
+ return RESP_BAD;
+ }
+ *p++ = 0;
+ arg = next_arg( &s );
+ if (!strcmp( "UIDVALIDITY", arg )) {
+ if (!(arg = next_arg( &s )) || !(ctx->gen.uidvalidity = atoi( arg ))) {
+ fprintf( stderr, "IMAP error: malformed UIDVALIDITY status\n" );
+ return RESP_BAD;
+ }
+ } else if (!strcmp( "UIDNEXT", arg )) {
+ if (!(arg = next_arg( &s )) || !(imap->uidnext = atoi( arg ))) {
+ fprintf( stderr, "IMAP error: malformed NEXTUID status\n" );
+ return RESP_BAD;
+ }
+ } else if (!strcmp( "CAPABILITY", arg )) {
+ parse_capability( imap, s );
+ } else if (!strcmp( "ALERT", arg )) {
+ /* RFC2060 says that these messages MUST be displayed
+ * to the user
+ */
+ for (; isspace( (unsigned char)*p ); p++);
+ fprintf( stderr, "*** IMAP ALERT *** %s\n", p );
+ } else if (cb && cb->ctx && !strcmp( "APPENDUID", arg )) {
+ if (!(arg = next_arg( &s )) || !(ctx->gen.uidvalidity = atoi( arg )) ||
+ !(arg = next_arg( &s )) || !(*(int *)cb->ctx = atoi( arg )))
+ {
+ fprintf( stderr, "IMAP error: malformed APPENDUID status\n" );
+ return RESP_BAD;
+ }
+ }
+ return RESP_OK;
+}
+
+static int
+get_cmd_result( imap_store_t *ctx, struct imap_cmd *tcmd )
+{
+ imap_t *imap = ctx->imap;
+ struct imap_cmd *cmdp, **pcmdp, *ncmdp;
+ char *cmd, *arg, *arg1, *p;
+ int n, resp, resp2, tag;
+
+ for (;;) {
+ if (buffer_gets( &imap->buf, &cmd ))
+ return RESP_BAD;
+
+ arg = next_arg( &cmd );
+ if (*arg == '*') {
+ arg = next_arg( &cmd );
+ if (!arg) {
+ fprintf( stderr, "IMAP error: unable to parse untagged response\n" );
+ return RESP_BAD;
+ }
+
+ if (!strcmp( "NAMESPACE", arg )) {
+ imap->ns_personal = parse_list( &cmd );
+ imap->ns_other = parse_list( &cmd );
+ imap->ns_shared = parse_list( &cmd );
+ } else if (!strcmp( "OK", arg ) || !strcmp( "BAD", arg ) ||
+ !strcmp( "NO", arg ) || !strcmp( "BYE", arg )) {
+ if ((resp = parse_response_code( ctx, NULL, cmd )) != RESP_OK)
+ return resp;
+ } else if (!strcmp( "CAPABILITY", arg ))
+ parse_capability( imap, cmd );
+ else if ((arg1 = next_arg( &cmd ))) {
+ if (!strcmp( "EXISTS", arg1 ))
+ ctx->gen.count = atoi( arg );
+ else if (!strcmp( "RECENT", arg1 ))
+ ctx->gen.recent = atoi( arg );
+ } else {
+ fprintf( stderr, "IMAP error: unable to parse untagged response\n" );
+ return RESP_BAD;
+ }
+ } else if (!imap->in_progress) {
+ fprintf( stderr, "IMAP error: unexpected reply: %s %s\n", arg, cmd ? cmd : "" );
+ return RESP_BAD;
+ } else if (*arg == '+') {
+ /* This can happen only with the last command underway, as
+ it enforces a round-trip. */
+ cmdp = (struct imap_cmd *)((char *)imap->in_progress_append -
+ offsetof(struct imap_cmd, next));
+ if (cmdp->cb.data) {
+ n = socket_write( &imap->buf.sock, cmdp->cb.data, cmdp->cb.dlen );
+ free( cmdp->cb.data );
+ cmdp->cb.data = NULL;
+ if (n != (int)cmdp->cb.dlen)
+ return RESP_BAD;
+ } else if (cmdp->cb.cont) {
+ if (cmdp->cb.cont( ctx, cmdp, cmd ))
+ return RESP_BAD;
+ } else {
+ fprintf( stderr, "IMAP error: unexpected command continuation request\n" );
+ return RESP_BAD;
+ }
+ if (socket_write( &imap->buf.sock, "\r\n", 2 ) != 2)
+ return RESP_BAD;
+ if (!cmdp->cb.cont)
+ imap->literal_pending = 0;
+ if (!tcmd)
+ return DRV_OK;
+ } else {
+ tag = atoi( arg );
+ for (pcmdp = &imap->in_progress; (cmdp = *pcmdp); pcmdp = &cmdp->next)
+ if (cmdp->tag == tag)
+ goto gottag;
+ fprintf( stderr, "IMAP error: unexpected tag %s\n", arg );
+ return RESP_BAD;
+ gottag:
+ if (!(*pcmdp = cmdp->next))
+ imap->in_progress_append = pcmdp;
+ imap->num_in_progress--;
+ if (cmdp->cb.cont || cmdp->cb.data)
+ imap->literal_pending = 0;
+ arg = next_arg( &cmd );
+ if (!strcmp( "OK", arg ))
+ resp = DRV_OK;
+ else {
+ if (!strcmp( "NO", arg )) {
+ if (cmdp->cb.create && cmd && (cmdp->cb.trycreate || !memcmp( cmd, "[TRYCREATE]", 11 ))) { /* SELECT, APPEND or UID COPY */
+ p = strchr( cmdp->cmd, '"' );
+ if (!issue_imap_cmd( ctx, NULL, "CREATE \"%.*s\"", strchr( p + 1, '"' ) - p + 1, p )) {
+ resp = RESP_BAD;
+ goto normal;
+ }
+ /* not waiting here violates the spec, but a server that does not
+ grok this nonetheless violates it too. */
+ cmdp->cb.create = 0;
+ if (!(ncmdp = issue_imap_cmd( ctx, &cmdp->cb, "%s", cmdp->cmd ))) {
+ resp = RESP_BAD;
+ goto normal;
+ }
+ free( cmdp->cmd );
+ free( cmdp );
+ if (!tcmd)
+ return 0; /* ignored */
+ if (cmdp == tcmd)
+ tcmd = ncmdp;
+ continue;
+ }
+ resp = RESP_NO;
+ } else /*if (!strcmp( "BAD", arg ))*/
+ resp = RESP_BAD;
+ fprintf( stderr, "IMAP command '%s' returned response (%s) - %s\n",
+ memcmp (cmdp->cmd, "LOGIN", 5) ?
+ cmdp->cmd : "LOGIN <user> <pass>",
+ arg, cmd ? cmd : "");
+ }
+ if ((resp2 = parse_response_code( ctx, &cmdp->cb, cmd )) > resp)
+ resp = resp2;
+ normal:
+ if (cmdp->cb.done)
+ cmdp->cb.done( ctx, cmdp, resp );
+ if (cmdp->cb.data)
+ free( cmdp->cb.data );
+ free( cmdp->cmd );
+ free( cmdp );
+ if (!tcmd || tcmd == cmdp)
+ return resp;
+ }
+ }
+ /* not reached */
+}
+
+static void
+imap_close_server( imap_store_t *ictx )
+{
+ imap_t *imap = ictx->imap;
+
+ if (imap->buf.sock.fd != -1) {
+ imap_exec( ictx, NULL, "LOGOUT" );
+ close( imap->buf.sock.fd );
+ }
+ free_list( imap->ns_personal );
+ free_list( imap->ns_other );
+ free_list( imap->ns_shared );
+ free( imap );
+}
+
+static void
+imap_close_store( store_t *ctx )
+{
+ imap_close_server( (imap_store_t *)ctx );
+ free_generic_messages( ctx->msgs );
+ free( ctx );
+}
+
+static store_t *
+imap_open_store( imap_server_conf_t *srvc )
+{
+ imap_store_t *ctx;
+ imap_t *imap;
+ char *arg, *rsp;
+ struct hostent *he;
+ struct sockaddr_in addr;
+ int s, a[2], preauth;
+
+ ctx = xcalloc( sizeof(*ctx), 1 );
+
+ ctx->imap = imap = xcalloc( sizeof(*imap), 1 );
+ imap->buf.sock.fd = -1;
+ imap->in_progress_append = &imap->in_progress;
+
+ /* open connection to IMAP server */
+
+ if (srvc->tunnel) {
+ info( "Starting tunnel '%s'... ", srvc->tunnel );
+
+ if (socketpair( PF_UNIX, SOCK_STREAM, 0, a )) {
+ perror( "socketpair" );
+ exit( 1 );
+ }
+
+ if (fork() == 0) {
+ if (dup2( a[0], 0 ) == -1 || dup2( a[0], 1 ) == -1)
+ _exit( 127 );
+ close( a[0] );
+ close( a[1] );
+ execl( "/bin/sh", "sh", "-c", srvc->tunnel, NULL );
+ _exit( 127 );
+ }
+
+ close (a[0]);
+
+ imap->buf.sock.fd = a[1];
+
+ info( "ok\n" );
+ } else {
+ memset( &addr, 0, sizeof(addr) );
+ addr.sin_port = htons( srvc->port );
+ addr.sin_family = AF_INET;
+
+ info( "Resolving %s... ", srvc->host );
+ he = gethostbyname( srvc->host );
+ if (!he) {
+ perror( "gethostbyname" );
+ goto bail;
+ }
+ info( "ok\n" );
+
+ addr.sin_addr.s_addr = *((int *) he->h_addr_list[0]);
+
+ s = socket( PF_INET, SOCK_STREAM, 0 );
+
+ info( "Connecting to %s:%hu... ", inet_ntoa( addr.sin_addr ), ntohs( addr.sin_port ) );
+ if (connect( s, (struct sockaddr *)&addr, sizeof(addr) )) {
+ close( s );
+ perror( "connect" );
+ goto bail;
+ }
+ info( "ok\n" );
+
+ imap->buf.sock.fd = s;
+
+ }
+
+ /* read the greeting string */
+ if (buffer_gets( &imap->buf, &rsp )) {
+ fprintf( stderr, "IMAP error: no greeting response\n" );
+ goto bail;
+ }
+ arg = next_arg( &rsp );
+ if (!arg || *arg != '*' || (arg = next_arg( &rsp )) == NULL) {
+ fprintf( stderr, "IMAP error: invalid greeting response\n" );
+ goto bail;
+ }
+ preauth = 0;
+ if (!strcmp( "PREAUTH", arg ))
+ preauth = 1;
+ else if (strcmp( "OK", arg ) != 0) {
+ fprintf( stderr, "IMAP error: unknown greeting response\n" );
+ goto bail;
+ }
+ parse_response_code( ctx, NULL, rsp );
+ if (!imap->caps && imap_exec( ctx, NULL, "CAPABILITY" ) != RESP_OK)
+ goto bail;
+
+ if (!preauth) {
+
+ info ("Logging in...\n");
+ if (!srvc->user) {
+ fprintf( stderr, "Skipping server %s, no user\n", srvc->host );
+ goto bail;
+ }
+ if (!srvc->pass) {
+ char prompt[80];
+ sprintf( prompt, "Password (%s@%s): ", srvc->user, srvc->host );
+ arg = getpass( prompt );
+ if (!arg) {
+ perror( "getpass" );
+ exit( 1 );
+ }
+ if (!*arg) {
+ fprintf( stderr, "Skipping account %s@%s, no password\n", srvc->user, srvc->host );
+ goto bail;
+ }
+ /*
+ * getpass() returns a pointer to a static buffer. make a copy
+ * for long term storage.
+ */
+ srvc->pass = strdup( arg );
+ }
+ if (CAP(NOLOGIN)) {
+ fprintf( stderr, "Skipping account %s@%s, server forbids LOGIN\n", srvc->user, srvc->host );
+ goto bail;
+ }
+ warn( "*** IMAP Warning *** Password is being sent in the clear\n" );
+ if (imap_exec( ctx, NULL, "LOGIN \"%s\" \"%s\"", srvc->user, srvc->pass ) != RESP_OK) {
+ fprintf( stderr, "IMAP error: LOGIN failed\n" );
+ goto bail;
+ }
+ } /* !preauth */
+
+ ctx->prefix = "";
+ ctx->trashnc = 1;
+ return (store_t *)ctx;
+
+ bail:
+ imap_close_store( &ctx->gen );
+ return NULL;
+}
+
+static int
+imap_make_flags( int flags, char *buf )
+{
+ const char *s;
+ unsigned i, d;
+
+ for (i = d = 0; i < ARRAY_SIZE(Flags); i++)
+ if (flags & (1 << i)) {
+ buf[d++] = ' ';
+ buf[d++] = '\\';
+ for (s = Flags[i]; *s; s++)
+ buf[d++] = *s;
+ }
+ buf[0] = '(';
+ buf[d++] = ')';
+ return d;
+}
+
+#define TUIDL 8
+
+static int
+imap_store_msg( store_t *gctx, msg_data_t *data, int *uid )
+{
+ imap_store_t *ctx = (imap_store_t *)gctx;
+ imap_t *imap = ctx->imap;
+ struct imap_cmd_cb cb;
+ char *fmap, *buf;
+ const char *prefix, *box;
+ int ret, i, j, d, len, extra, nocr;
+ int start, sbreak = 0, ebreak = 0;
+ char flagstr[128], tuid[TUIDL * 2 + 1];
+
+ memset( &cb, 0, sizeof(cb) );
+
+ fmap = data->data;
+ len = data->len;
+ nocr = !data->crlf;
+ extra = 0, i = 0;
+ if (!CAP(UIDPLUS) && uid) {
+ nloop:
+ start = i;
+ while (i < len)
+ if (fmap[i++] == '\n') {
+ extra += nocr;
+ if (i - 2 + nocr == start) {
+ sbreak = ebreak = i - 2 + nocr;
+ goto mktid;
+ }
+ if (!memcmp( fmap + start, "X-TUID: ", 8 )) {
+ extra -= (ebreak = i) - (sbreak = start) + nocr;
+ goto mktid;
+ }
+ goto nloop;
+ }
+ /* invalid message */
+ free( fmap );
+ return DRV_MSG_BAD;
+ mktid:
+ for (j = 0; j < TUIDL; j++)
+ sprintf( tuid + j * 2, "%02x", arc4_getbyte() );
+ extra += 8 + TUIDL * 2 + 2;
+ }
+ if (nocr)
+ for (; i < len; i++)
+ if (fmap[i] == '\n')
+ extra++;
+
+ cb.dlen = len + extra;
+ buf = cb.data = xmalloc( cb.dlen );
+ i = 0;
+ if (!CAP(UIDPLUS) && uid) {
+ if (nocr) {
+ for (; i < sbreak; i++)
+ if (fmap[i] == '\n') {
+ *buf++ = '\r';
+ *buf++ = '\n';
+ } else
+ *buf++ = fmap[i];
+ } else {
+ memcpy( buf, fmap, sbreak );
+ buf += sbreak;
+ }
+ memcpy( buf, "X-TUID: ", 8 );
+ buf += 8;
+ memcpy( buf, tuid, TUIDL * 2 );
+ buf += TUIDL * 2;
+ *buf++ = '\r';
+ *buf++ = '\n';
+ i = ebreak;
+ }
+ if (nocr) {
+ for (; i < len; i++)
+ if (fmap[i] == '\n') {
+ *buf++ = '\r';
+ *buf++ = '\n';
+ } else
+ *buf++ = fmap[i];
+ } else
+ memcpy( buf, fmap + i, len - i );
+
+ free( fmap );
+
+ d = 0;
+ if (data->flags) {
+ d = imap_make_flags( data->flags, flagstr );
+ flagstr[d++] = ' ';
+ }
+ flagstr[d] = 0;
+
+ if (!uid) {
+ box = gctx->conf->trash;
+ prefix = ctx->prefix;
+ cb.create = 1;
+ if (ctx->trashnc)
+ imap->caps = imap->rcaps & ~(1 << LITERALPLUS);
+ } else {
+ box = gctx->name;
+ prefix = !strcmp( box, "INBOX" ) ? "" : ctx->prefix;
+ cb.create = 0;
+ }
+ cb.ctx = uid;
+ ret = imap_exec_m( ctx, &cb, "APPEND \"%s%s\" %s", prefix, box, flagstr );
+ imap->caps = imap->rcaps;
+ if (ret != DRV_OK)
+ return ret;
+ if (!uid)
+ ctx->trashnc = 0;
+ else
+ gctx->count++;
+
+ return DRV_OK;
+}
+
+#define CHUNKSIZE 0x1000
+
+static int
+read_message( FILE *f, msg_data_t *msg )
+{
+ int len, r;
+
+ memset( msg, 0, sizeof *msg );
+ len = CHUNKSIZE;
+ msg->data = xmalloc( len+1 );
+ msg->data[0] = 0;
+
+ while(!feof( f )) {
+ if (msg->len >= len) {
+ void *p;
+ len += CHUNKSIZE;
+ p = xrealloc(msg->data, len+1);
+ if (!p)
+ break;
+ }
+ r = fread( &msg->data[msg->len], 1, len - msg->len, f );
+ if (r <= 0)
+ break;
+ msg->len += r;
+ }
+ msg->data[msg->len] = 0;
+ return msg->len;
+}
+
+static int
+count_messages( msg_data_t *msg )
+{
+ int count = 0;
+ char *p = msg->data;
+
+ while (1) {
+ if (!strncmp( "From ", p, 5 )) {
+ count++;
+ p += 5;
+ }
+ p = strstr( p+5, "\nFrom ");
+ if (!p)
+ break;
+ p++;
+ }
+ return count;
+}
+
+static int
+split_msg( msg_data_t *all_msgs, msg_data_t *msg, int *ofs )
+{
+ char *p, *data;
+
+ memset( msg, 0, sizeof *msg );
+ if (*ofs >= all_msgs->len)
+ return 0;
+
+ data = &all_msgs->data[ *ofs ];
+ msg->len = all_msgs->len - *ofs;
+
+ if (msg->len < 5 || strncmp( data, "From ", 5 ))
+ return 0;
+
+ p = strstr( data, "\nFrom " );
+ if (p)
+ msg->len = &p[1] - data;
+
+ msg->data = xmalloc( msg->len + 1 );
+ if (!msg->data)
+ return 0;
+
+ memcpy( msg->data, data, msg->len );
+ msg->data[ msg->len ] = 0;
+
+ *ofs += msg->len;
+ return 1;
+}
+
+static imap_server_conf_t server =
+{
+ NULL, /* name */
+ NULL, /* tunnel */
+ NULL, /* host */
+ 0, /* port */
+ NULL, /* user */
+ NULL, /* pass */
+};
+
+static char *imap_folder;
+
+static int
+git_imap_config(const char *key, const char *val)
+{
+ char imap_key[] = "imap.";
+
+ if (strncmp( key, imap_key, sizeof imap_key - 1 ))
+ return 0;
+ key += sizeof imap_key - 1;
+
+ if (!strcmp( "folder", key )) {
+ imap_folder = strdup( val );
+ } else if (!strcmp( "host", key )) {
+ {
+ if (!strncmp( "imap:", val, 5 ))
+ val += 5;
+ if (!server.port)
+ server.port = 143;
+ }
+ if (!strncmp( "//", val, 2 ))
+ val += 2;
+ server.host = strdup( val );
+ }
+ else if (!strcmp( "user", key ))
+ server.user = strdup( val );
+ else if (!strcmp( "pass", key ))
+ server.pass = strdup( val );
+ else if (!strcmp( "port", key ))
+ server.port = git_config_int( key, val );
+ else if (!strcmp( "tunnel", key ))
+ server.tunnel = strdup( val );
+ return 0;
+}
+
+int
+main(int argc, char **argv)
+{
+ msg_data_t all_msgs, msg;
+ store_t *ctx = NULL;
+ int uid = 0;
+ int ofs = 0;
+ int r;
+ int total, n = 0;
+
+ /* init the random number generator */
+ arc4_init();
+
+ git_config( git_imap_config );
+
+ if (!imap_folder) {
+ fprintf( stderr, "no imap store specified\n" );
+ return 1;
+ }
+
+ /* read the messages */
+ if (!read_message( stdin, &all_msgs )) {
+ fprintf(stderr,"nothing to send\n");
+ return 1;
+ }
+
+ /* write it to the imap server */
+ ctx = imap_open_store( &server );
+ if (!ctx) {
+ fprintf( stderr,"failed to open store\n");
+ return 1;
+ }
+
+ total = count_messages( &all_msgs );
+ fprintf( stderr, "sending %d message%s\n", total, (total!=1)?"s":"" );
+ ctx->name = imap_folder;
+ while (1) {
+ unsigned percent = n * 100 / total;
+ fprintf( stderr, "%4u%% (%d/%d) done\r", percent, n, total );
+ if (!split_msg( &all_msgs, &msg, &ofs ))
+ break;
+ r = imap_store_msg( ctx, &msg, &uid );
+ if (r != DRV_OK) break;
+ n++;
+ }
+ fprintf( stderr,"\n" );
+
+ imap_close_store( ctx );
+
+ return 0;
+}
diff --git a/index-pack.c b/index-pack.c
index babe34b..b39953d 100644
--- a/index-pack.c
+++ b/index-pack.c
@@ -2,6 +2,10 @@
#include "delta.h"
#include "pack.h"
#include "csum-file.h"
+#include "blob.h"
+#include "commit.h"
+#include "tag.h"
+#include "tree.h"
static const char index_pack_usage[] =
"git-index-pack [-o index-file] pack-file";
@@ -224,10 +228,10 @@ static void sha1_object(const void *data, unsigned long size,
const char *type_str;
switch (type) {
- case OBJ_COMMIT: type_str = "commit"; break;
- case OBJ_TREE: type_str = "tree"; break;
- case OBJ_BLOB: type_str = "blob"; break;
- case OBJ_TAG: type_str = "tag"; break;
+ case OBJ_COMMIT: type_str = commit_type; break;
+ case OBJ_TREE: type_str = tree_type; break;
+ case OBJ_BLOB: type_str = blob_type; break;
+ case OBJ_TAG: type_str = tag_type; break;
default:
die("bad type %d", type);
}
diff --git a/ls-files.c b/ls-files.c
index 7024cf1..4a4af1c 100644
--- a/ls-files.c
+++ b/ls-files.c
@@ -11,6 +11,7 @@
#include "cache.h"
#include "quote.h"
+static int abbrev = 0;
static int show_deleted = 0;
static int show_cached = 0;
static int show_others = 0;
@@ -20,11 +21,15 @@ static int show_unmerged = 0;
static int show_modified = 0;
static int show_killed = 0;
static int show_other_directories = 0;
+static int hide_empty_directories = 0;
+static int show_valid_bit = 0;
static int line_terminator = '\n';
static int prefix_len = 0, prefix_offset = 0;
static const char *prefix = NULL;
static const char **pathspec = NULL;
+static int error_unmatch = 0;
+static char *ps_matched = NULL;
static const char *tag_cached = "";
static const char *tag_unmerged = "";
@@ -89,11 +94,12 @@ static int add_excludes_from_file_1(const char *fname,
close(fd);
return 0;
}
- buf = xmalloc(size);
+ buf = xmalloc(size+1);
if (read(fd, buf, size) != size)
goto err;
close(fd);
+ buf[size++] = '\n';
entry = buf;
for (i = 0; i < size; i++) {
if (buf[i] == '\n') {
@@ -254,11 +260,12 @@ static int dir_exists(const char *dirname, int len)
* Also, we ignore the name ".git" (even if it is not a directory).
* That likely will not change.
*/
-static void read_directory(const char *path, const char *base, int baselen)
+static int read_directory(const char *path, const char *base, int baselen)
{
- DIR *dir = opendir(path);
+ DIR *fdir = opendir(path);
+ int contents = 0;
- if (dir) {
+ if (fdir) {
int exclude_stk;
struct dirent *de;
char fullname[MAXPATHLEN + 1];
@@ -266,7 +273,7 @@ static void read_directory(const char *path, const char *base, int baselen)
exclude_stk = push_exclude_per_directory(base, baselen);
- while ((de = readdir(dir)) != NULL) {
+ while ((de = readdir(fdir)) != NULL) {
int len;
if ((de->d_name[0] == '.') &&
@@ -276,11 +283,15 @@ static void read_directory(const char *path, const char *base, int baselen)
continue;
len = strlen(de->d_name);
memcpy(fullname + baselen, de->d_name, len+1);
- if (excluded(fullname) != show_ignored)
- continue;
+ if (excluded(fullname) != show_ignored) {
+ if (!show_ignored || DTYPE(de) != DT_DIR) {
+ continue;
+ }
+ }
switch (DTYPE(de)) {
struct stat st;
+ int subdir, rewind_base;
default:
continue;
case DT_UNKNOWN:
@@ -294,22 +305,32 @@ static void read_directory(const char *path, const char *base, int baselen)
case DT_DIR:
memcpy(fullname + baselen + len, "/", 2);
len++;
+ rewind_base = nr_dir;
+ subdir = read_directory(fullname, fullname,
+ baselen + len);
if (show_other_directories &&
- !dir_exists(fullname, baselen + len))
+ (subdir || !hide_empty_directories) &&
+ !dir_exists(fullname, baselen + len)) {
+ // Rewind the read subdirectory
+ while (nr_dir > rewind_base)
+ free(dir[--nr_dir]);
break;
- read_directory(fullname, fullname,
- baselen + len);
+ }
+ contents += subdir;
continue;
case DT_REG:
case DT_LNK:
break;
}
add_name(fullname, baselen + len);
+ contents++;
}
- closedir(dir);
+ closedir(fdir);
pop_exclude_per_directory(exclude_stk);
}
+
+ return contents;
}
static int cmp_name(const void *p1, const void *p2)
@@ -325,7 +346,8 @@ static int cmp_name(const void *p1, const void *p2)
* Match a pathspec against a filename. The first "len" characters
* are the common prefix
*/
-static int match(const char **spec, const char *filename, int len)
+static int match(const char **spec, char *ps_matched,
+ const char *filename, int len)
{
const char *m;
@@ -333,17 +355,24 @@ static int match(const char **spec, const char *filename, int len)
int matchlen = strlen(m + len);
if (!matchlen)
- return 1;
+ goto matched;
if (!strncmp(m + len, filename + len, matchlen)) {
if (m[len + matchlen - 1] == '/')
- return 1;
+ goto matched;
switch (filename[len + matchlen]) {
case '/': case '\0':
- return 1;
+ goto matched;
}
}
if (!fnmatch(m + len, filename + len, 0))
- return 1;
+ goto matched;
+ if (ps_matched)
+ ps_matched++;
+ continue;
+ matched:
+ if (ps_matched)
+ *ps_matched = 1;
+ return 1;
}
return 0;
}
@@ -356,7 +385,7 @@ static void show_dir_entry(const char *tag, struct nond_on_fs *ent)
if (len >= ent->len)
die("git-ls-files: internal error - directory entry not superset of prefix");
- if (pathspec && !match(pathspec, ent->name, len))
+ if (pathspec && !match(pathspec, ps_matched, ent->name, len))
return;
fputs(tag, stdout);
@@ -444,9 +473,26 @@ static void show_ce_entry(const char *tag, struct cache_entry *ce)
if (len >= ce_namelen(ce))
die("git-ls-files: internal error - cache entry not superset of prefix");
- if (pathspec && !match(pathspec, ce->name, len))
+ if (pathspec && !match(pathspec, ps_matched, ce->name, len))
return;
+ if (tag && *tag && show_valid_bit &&
+ (ce->ce_flags & htons(CE_VALID))) {
+ static char alttag[4];
+ memcpy(alttag, tag, 3);
+ if (isalpha(tag[0]))
+ alttag[0] = tolower(tag[0]);
+ else if (tag[0] == '?')
+ alttag[0] = '!';
+ else {
+ alttag[0] = 'v';
+ alttag[1] = tag[0];
+ alttag[2] = ' ';
+ alttag[3] = 0;
+ }
+ tag = alttag;
+ }
+
if (!show_stage) {
fputs(tag, stdout);
write_name_quoted("", 0, ce->name + offset,
@@ -457,7 +503,8 @@ static void show_ce_entry(const char *tag, struct cache_entry *ce)
printf("%s%06o %s %d\t",
tag,
ntohl(ce->ce_mode),
- sha1_to_hex(ce->sha1),
+ abbrev ? find_unique_abbrev(ce->sha1,abbrev)
+ : sha1_to_hex(ce->sha1),
ce_stage(ce));
write_name_quoted("", 0, ce->name + offset,
line_terminator, stdout);
@@ -523,7 +570,7 @@ static void show_files(void)
err = lstat(ce->name, &st);
if (show_deleted && err)
show_ce_entry(tag_removed, ce);
- if (show_modified && ce_modified(ce, &st))
+ if (show_modified && ce_modified(ce, &st, 0))
show_ce_entry(tag_modified, ce);
}
}
@@ -596,9 +643,10 @@ static void verify_pathspec(void)
}
static const char ls_files_usage[] =
- "git-ls-files [-z] [-t] (--[cached|deleted|others|stage|unmerged|killed|modified])* "
+ "git-ls-files [-z] [-t] [-v] (--[cached|deleted|others|stage|unmerged|killed|modified])* "
"[ --ignored ] [--exclude=<pattern>] [--exclude-from=<file>] "
- "[ --exclude-per-directory=<filename> ] [--full-name] [--] [<file>]*";
+ "[ --exclude-per-directory=<filename> ] [--full-name] [--abbrev] "
+ "[--] [<file>]*";
int main(int argc, const char **argv)
{
@@ -621,13 +669,15 @@ int main(int argc, const char **argv)
line_terminator = 0;
continue;
}
- if (!strcmp(arg, "-t")) {
+ if (!strcmp(arg, "-t") || !strcmp(arg, "-v")) {
tag_cached = "H ";
tag_unmerged = "M ";
tag_removed = "R ";
tag_modified = "C ";
tag_other = "? ";
tag_killed = "K ";
+ if (arg[1] == 'v')
+ show_valid_bit = 1;
continue;
}
if (!strcmp(arg, "-c") || !strcmp(arg, "--cached")) {
@@ -662,6 +712,10 @@ int main(int argc, const char **argv)
show_other_directories = 1;
continue;
}
+ if (!strcmp(arg, "--no-empty-directory")) {
+ hide_empty_directories = 1;
+ continue;
+ }
if (!strcmp(arg, "-u") || !strcmp(arg, "--unmerged")) {
/* There's no point in showing unmerged unless
* you also show the stage information.
@@ -699,6 +753,22 @@ int main(int argc, const char **argv)
prefix_offset = 0;
continue;
}
+ if (!strcmp(arg, "--error-unmatch")) {
+ error_unmatch = 1;
+ continue;
+ }
+ if (!strncmp(arg, "--abbrev=", 9)) {
+ abbrev = strtoul(arg+9, NULL, 10);
+ if (abbrev && abbrev < MINIMUM_ABBREV)
+ abbrev = MINIMUM_ABBREV;
+ else if (abbrev > 40)
+ abbrev = 40;
+ continue;
+ }
+ if (!strcmp(arg, "--abbrev")) {
+ abbrev = DEFAULT_ABBREV;
+ continue;
+ }
if (*arg == '-')
usage(ls_files_usage);
break;
@@ -710,6 +780,14 @@ int main(int argc, const char **argv)
if (pathspec)
verify_pathspec();
+ /* Treat unmatching pathspec elements as errors */
+ if (pathspec && error_unmatch) {
+ int num;
+ for (num = 0; pathspec[num]; num++)
+ ;
+ ps_matched = xcalloc(1, num);
+ }
+
if (show_ignored && !exc_given) {
fprintf(stderr, "%s: --ignored needs some exclude pattern\n",
argv[0]);
@@ -725,5 +803,21 @@ int main(int argc, const char **argv)
if (prefix)
prune_cache();
show_files();
+
+ if (ps_matched) {
+ /* We need to make sure all pathspec matched otherwise
+ * it is an error.
+ */
+ int num, errors = 0;
+ for (num = 0; pathspec[num]; num++) {
+ if (ps_matched[num])
+ continue;
+ error("pathspec '%s' did not match any.",
+ pathspec[num] + prefix_offset);
+ errors++;
+ }
+ return errors ? 1 : 0;
+ }
+
return 0;
}
diff --git a/ls-tree.c b/ls-tree.c
index d005643..e4ef200 100644
--- a/ls-tree.c
+++ b/ls-tree.c
@@ -13,13 +13,14 @@ static int line_termination = '\n';
#define LS_TREE_ONLY 2
#define LS_SHOW_TREES 4
#define LS_NAME_ONLY 8
+static int abbrev = 0;
static int ls_options = 0;
const char **pathspec;
static int chomp_prefix = 0;
static const char *prefix;
static const char ls_tree_usage[] =
- "git-ls-tree [-d] [-r] [-t] [-z] [--name-only] [--name-status] [--full-name] <tree-ish> [path...]";
+ "git-ls-tree [-d] [-r] [-t] [-z] [--name-only] [--name-status] [--full-name] [--abbrev[=<n>]] <tree-ish> [path...]";
static int show_recursive(const char *base, int baselen, const char *pathname)
{
@@ -55,7 +56,7 @@ static int show_tree(unsigned char *sha1, const char *base, int baselen,
const char *pathname, unsigned mode, int stage)
{
int retval = 0;
- const char *type = "blob";
+ const char *type = blob_type;
if (S_ISDIR(mode)) {
if (show_recursive(base, baselen, pathname)) {
@@ -63,7 +64,7 @@ static int show_tree(unsigned char *sha1, const char *base, int baselen,
if (!(ls_options & LS_SHOW_TREES))
return retval;
}
- type = "tree";
+ type = tree_type;
}
else if (ls_options & LS_TREE_ONLY)
return 0;
@@ -73,7 +74,9 @@ static int show_tree(unsigned char *sha1, const char *base, int baselen,
return 0;
if (!(ls_options & LS_NAME_ONLY))
- printf("%06o %s %s\t", mode, type, sha1_to_hex(sha1));
+ printf("%06o %s %s\t", mode, type,
+ abbrev ? find_unique_abbrev(sha1,abbrev)
+ : sha1_to_hex(sha1));
write_name_quoted(base + chomp_prefix, baselen - chomp_prefix,
pathname,
line_termination, stdout);
@@ -87,6 +90,7 @@ int main(int argc, const char **argv)
struct tree *tree;
prefix = setup_git_directory();
+ git_config(git_default_config);
if (prefix && *prefix)
chomp_prefix = strlen(prefix);
while (1 < argc && argv[1][0] == '-') {
@@ -113,6 +117,18 @@ int main(int argc, const char **argv)
chomp_prefix = 0;
break;
}
+ if (!strncmp(argv[1]+2, "abbrev=",7)) {
+ abbrev = strtoul(argv[1]+9, NULL, 10);
+ if (abbrev && abbrev < MINIMUM_ABBREV)
+ abbrev = MINIMUM_ABBREV;
+ else if (abbrev > 40)
+ abbrev = 40;
+ break;
+ }
+ if (!strcmp(argv[1]+2, "abbrev")) {
+ abbrev = DEFAULT_ABBREV;
+ break;
+ }
/* otherwise fallthru */
default:
usage(ls_tree_usage);
diff --git a/mailinfo.c b/mailinfo.c
index ff2d4d4..3c56f8c 100644
--- a/mailinfo.c
+++ b/mailinfo.c
@@ -7,7 +7,9 @@
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
+#ifndef NO_ICONV
#include <iconv.h>
+#endif
#include "git-compat-util.h"
#include "cache.h"
@@ -469,6 +471,7 @@ static int decode_b_segment(char *in, char *ot, char *ep)
static void convert_to_utf8(char *line, char *charset)
{
+#ifndef NO_ICONV
char *in, *out;
size_t insize, outsize, nrc;
char outbuf[4096]; /* cheat */
@@ -501,6 +504,7 @@ static void convert_to_utf8(char *line, char *charset)
return;
*out = 0;
strcpy(line, outbuf);
+#endif
}
static void decode_header_bq(char *it)
diff --git a/merge-base.c b/merge-base.c
index e73fca7..07f5ab4 100644
--- a/merge-base.c
+++ b/merge-base.c
@@ -237,6 +237,7 @@ int main(int argc, char **argv)
unsigned char rev1key[20], rev2key[20];
setup_git_directory();
+ git_config(git_default_config);
while (1 < argc && argv[1][0] == '-') {
char *arg = argv[1];
diff --git a/merge-tree.c b/merge-tree.c
new file mode 100644
index 0000000..50528d5
--- /dev/null
+++ b/merge-tree.c
@@ -0,0 +1,178 @@
+#include "cache.h"
+#include "tree-walk.h"
+
+static const char merge_tree_usage[] = "git-merge-tree <base-tree> <branch1> <branch2>";
+static int resolve_directories = 1;
+
+static void merge_trees(struct tree_desc t[3], const char *base);
+
+/* An empty entry never compares same, not even to another empty entry */
+static int same_entry(struct name_entry *a, struct name_entry *b)
+{
+ return a->sha1 &&
+ b->sha1 &&
+ !memcmp(a->sha1, b->sha1, 20) &&
+ a->mode == b->mode;
+}
+
+static const char *sha1_to_hex_zero(const unsigned char *sha1)
+{
+ if (sha1)
+ return sha1_to_hex(sha1);
+ return "0000000000000000000000000000000000000000";
+}
+
+static void resolve(const char *base, struct name_entry *branch1, struct name_entry *result)
+{
+ char branch1_sha1[50];
+
+ /* If it's already branch1, don't bother showing it */
+ if (!branch1)
+ return;
+ memcpy(branch1_sha1, sha1_to_hex_zero(branch1->sha1), 41);
+
+ printf("0 %06o->%06o %s->%s %s%s\n",
+ branch1->mode, result->mode,
+ branch1_sha1, sha1_to_hex_zero(result->sha1),
+ base, result->path);
+}
+
+static int unresolved_directory(const char *base, struct name_entry n[3])
+{
+ int baselen;
+ char *newbase;
+ struct name_entry *p;
+ struct tree_desc t[3];
+ void *buf0, *buf1, *buf2;
+
+ if (!resolve_directories)
+ return 0;
+ p = n;
+ if (!p->mode) {
+ p++;
+ if (!p->mode)
+ p++;
+ }
+ if (!S_ISDIR(p->mode))
+ return 0;
+ baselen = strlen(base);
+ newbase = xmalloc(baselen + p->pathlen + 2);
+ memcpy(newbase, base, baselen);
+ memcpy(newbase + baselen, p->path, p->pathlen);
+ memcpy(newbase + baselen + p->pathlen, "/", 2);
+
+ buf0 = fill_tree_descriptor(t+0, n[0].sha1);
+ buf1 = fill_tree_descriptor(t+1, n[1].sha1);
+ buf2 = fill_tree_descriptor(t+2, n[2].sha1);
+ merge_trees(t, newbase);
+
+ free(buf0);
+ free(buf1);
+ free(buf2);
+ free(newbase);
+ return 1;
+}
+
+static void unresolved(const char *base, struct name_entry n[3])
+{
+ if (unresolved_directory(base, n))
+ return;
+ if (n[0].sha1)
+ printf("1 %06o %s %s%s\n", n[0].mode, sha1_to_hex(n[0].sha1), base, n[0].path);
+ if (n[1].sha1)
+ printf("2 %06o %s %s%s\n", n[1].mode, sha1_to_hex(n[1].sha1), base, n[1].path);
+ if (n[2].sha1)
+ printf("3 %06o %s %s%s\n", n[2].mode, sha1_to_hex(n[2].sha1), base, n[2].path);
+}
+
+/*
+ * Merge two trees together (t[1] and t[2]), using a common base (t[0])
+ * as the origin.
+ *
+ * This walks the (sorted) trees in lock-step, checking every possible
+ * name. Note that directories automatically sort differently from other
+ * files (see "base_name_compare"), so you'll never see file/directory
+ * conflicts, because they won't ever compare the same.
+ *
+ * IOW, if a directory changes to a filename, it will automatically be
+ * seen as the directory going away, and the filename being created.
+ *
+ * Think of this as a three-way diff.
+ *
+ * The output will be either:
+ * - successful merge
+ * "0 mode sha1 filename"
+ * NOTE NOTE NOTE! FIXME! We really really need to walk the index
+ * in parallel with this too!
+ *
+ * - conflict:
+ * "1 mode sha1 filename"
+ * "2 mode sha1 filename"
+ * "3 mode sha1 filename"
+ * where not all of the 1/2/3 lines may exist, of course.
+ *
+ * The successful merge rules are the same as for the three-way merge
+ * in git-read-tree.
+ */
+static void threeway_callback(int n, unsigned long mask, struct name_entry *entry, const char *base)
+{
+ /* Same in both? */
+ if (same_entry(entry+1, entry+2)) {
+ if (entry[0].sha1) {
+ resolve(base, NULL, entry+1);
+ return;
+ }
+ }
+
+ if (same_entry(entry+0, entry+1)) {
+ if (entry[2].sha1 && !S_ISDIR(entry[2].mode)) {
+ resolve(base, entry+1, entry+2);
+ return;
+ }
+ }
+
+ if (same_entry(entry+0, entry+2)) {
+ if (entry[1].sha1 && !S_ISDIR(entry[1].mode)) {
+ resolve(base, NULL, entry+1);
+ return;
+ }
+ }
+
+ unresolved(base, entry);
+}
+
+static void merge_trees(struct tree_desc t[3], const char *base)
+{
+ traverse_trees(3, t, base, threeway_callback);
+}
+
+static void *get_tree_descriptor(struct tree_desc *desc, const char *rev)
+{
+ unsigned char sha1[20];
+ void *buf;
+
+ if (get_sha1(rev, sha1) < 0)
+ die("unknown rev %s", rev);
+ buf = fill_tree_descriptor(desc, sha1);
+ if (!buf)
+ die("%s is not a tree", rev);
+ return buf;
+}
+
+int main(int argc, char **argv)
+{
+ struct tree_desc t[3];
+ void *buf1, *buf2, *buf3;
+
+ if (argc < 4)
+ usage(merge_tree_usage);
+
+ buf1 = get_tree_descriptor(t+0, argv[1]);
+ buf2 = get_tree_descriptor(t+1, argv[2]);
+ buf3 = get_tree_descriptor(t+2, argv[3]);
+ merge_trees(t, "");
+ free(buf1);
+ free(buf2);
+ free(buf3);
+ return 0;
+}
diff --git a/mktag.c b/mktag.c
index fc6a9bf..2328878 100644
--- a/mktag.c
+++ b/mktag.c
@@ -1,4 +1,5 @@
#include "cache.h"
+#include "tag.h"
/*
* A signature file has a very simple fixed format: three lines
@@ -126,7 +127,7 @@ int main(int argc, char **argv)
if (verify_tag(buffer, size) < 0)
die("invalid tag signature file");
- if (write_sha1_file(buffer, size, "tag", result_sha1) < 0)
+ if (write_sha1_file(buffer, size, tag_type, result_sha1) < 0)
die("unable to write tag file");
printf("%s\n", sha1_to_hex(result_sha1));
return 0;
diff --git a/mktree.c b/mktree.c
new file mode 100644
index 0000000..ab63cd9
--- /dev/null
+++ b/mktree.c
@@ -0,0 +1,138 @@
+/*
+ * GIT - the stupid content tracker
+ *
+ * Copyright (c) Junio C Hamano, 2006
+ */
+#include "cache.h"
+#include "strbuf.h"
+#include "quote.h"
+#include "tree.h"
+
+static struct treeent {
+ unsigned mode;
+ unsigned char sha1[20];
+ int len;
+ char name[FLEX_ARRAY];
+} **entries;
+static int alloc, used;
+
+static void append_to_tree(unsigned mode, unsigned char *sha1, char *path)
+{
+ struct treeent *ent;
+ int len = strlen(path);
+ if (strchr(path, '/'))
+ die("path %s contains slash", path);
+
+ if (alloc <= used) {
+ alloc = alloc_nr(used);
+ entries = xrealloc(entries, sizeof(*entries) * alloc);
+ }
+ ent = entries[used++] = xmalloc(sizeof(**entries) + len + 1);
+ ent->mode = mode;
+ ent->len = len;
+ memcpy(ent->sha1, sha1, 20);
+ memcpy(ent->name, path, len+1);
+}
+
+static int ent_compare(const void *a_, const void *b_)
+{
+ struct treeent *a = *(struct treeent **)a_;
+ struct treeent *b = *(struct treeent **)b_;
+ return base_name_compare(a->name, a->len, a->mode,
+ b->name, b->len, b->mode);
+}
+
+static void write_tree(unsigned char *sha1)
+{
+ char *buffer;
+ unsigned long size, offset;
+ int i;
+
+ qsort(entries, used, sizeof(*entries), ent_compare);
+ size = 100;
+ for (size = i = 0; i < used; i++)
+ size += 32 + entries[i]->len;
+ buffer = xmalloc(size);
+ offset = 0;
+
+ for (i = 0; i < used; i++) {
+ struct treeent *ent = entries[i];
+
+ if (offset + ent->len + 100 < size) {
+ size = alloc_nr(offset + ent->len + 100);
+ buffer = xrealloc(buffer, size);
+ }
+ offset += sprintf(buffer + offset, "%o ", ent->mode);
+ offset += sprintf(buffer + offset, "%s", ent->name);
+ buffer[offset++] = 0;
+ memcpy(buffer + offset, ent->sha1, 20);
+ offset += 20;
+ }
+ write_sha1_file(buffer, offset, tree_type, sha1);
+}
+
+static const char mktree_usage[] = "mktree [-z]";
+
+int main(int ac, char **av)
+{
+ struct strbuf sb;
+ unsigned char sha1[20];
+ int line_termination = '\n';
+
+ setup_git_directory();
+
+ while ((1 < ac) && av[1][0] == '-') {
+ char *arg = av[1];
+ if (!strcmp("-z", arg))
+ line_termination = 0;
+ else
+ usage(mktree_usage);
+ ac--;
+ av++;
+ }
+
+ strbuf_init(&sb);
+ while (1) {
+ int len;
+ char *ptr, *ntr;
+ unsigned mode;
+ char type[20];
+ char *path;
+
+ read_line(&sb, stdin, line_termination);
+ if (sb.eof)
+ break;
+ len = sb.len;
+ ptr = sb.buf;
+ /* Input is non-recursive ls-tree output format
+ * mode SP type SP sha1 TAB name
+ */
+ mode = strtoul(ptr, &ntr, 8);
+ if (ptr == ntr || !ntr || *ntr != ' ')
+ die("input format error: %s", sb.buf);
+ ptr = ntr + 1; /* type */
+ ntr = strchr(ptr, ' ');
+ if (!ntr || sb.buf + len <= ntr + 41 ||
+ ntr[41] != '\t' ||
+ get_sha1_hex(ntr + 1, sha1))
+ die("input format error: %s", sb.buf);
+ if (sha1_object_info(sha1, type, NULL))
+ die("object %s unavailable", sha1_to_hex(sha1));
+ *ntr++ = 0; /* now at the beginning of SHA1 */
+ if (strcmp(ptr, type))
+ die("object type %s mismatch (%s)", ptr, type);
+ ntr += 41; /* at the beginning of name */
+ if (line_termination && ntr[0] == '"')
+ path = unquote_c_style(ntr, NULL);
+ else
+ path = ntr;
+
+ append_to_tree(mode, sha1, path);
+
+ if (path != ntr)
+ free(path);
+ }
+ write_tree(sha1);
+ puts(sha1_to_hex(sha1));
+ exit(0);
+}
diff --git a/name-rev.c b/name-rev.c
index 0c3f547..bad8a53 100644
--- a/name-rev.c
+++ b/name-rev.c
@@ -127,6 +127,7 @@ int main(int argc, char **argv)
int as_is = 0, all = 0, transform_stdin = 0;
setup_git_directory();
+ git_config(git_default_config);
if (argc < 2)
usage(name_rev_usage);
diff --git a/object.c b/object.c
index c9ca481..4d46e0d 100644
--- a/object.c
+++ b/object.c
@@ -85,8 +85,7 @@ struct object_refs *alloc_object_refs(unsigned count)
struct object_refs *refs;
size_t size = sizeof(*refs) + count*sizeof(struct object *);
- refs = xmalloc(size);
- memset(refs, 0, size);
+ refs = xcalloc(1, size);
refs->count = count;
return refs;
}
@@ -178,8 +177,7 @@ struct object *lookup_unknown_object(const unsigned char *sha1)
{
struct object *obj = lookup_object(sha1);
if (!obj) {
- union any_object *ret = xmalloc(sizeof(*ret));
- memset(ret, 0, sizeof(*ret));
+ union any_object *ret = xcalloc(1, sizeof(*ret));
created_object(sha1, &ret->object);
ret->object.type = NULL;
return &ret->object;
@@ -196,15 +194,15 @@ struct object *parse_object(const unsigned char *sha1)
struct object *obj;
if (check_sha1_signature(sha1, buffer, size, type) < 0)
printf("sha1 mismatch %s\n", sha1_to_hex(sha1));
- if (!strcmp(type, "blob")) {
+ if (!strcmp(type, blob_type)) {
struct blob *blob = lookup_blob(sha1);
parse_blob_buffer(blob, buffer, size);
obj = &blob->object;
- } else if (!strcmp(type, "tree")) {
+ } else if (!strcmp(type, tree_type)) {
struct tree *tree = lookup_tree(sha1);
parse_tree_buffer(tree, buffer, size);
obj = &tree->object;
- } else if (!strcmp(type, "commit")) {
+ } else if (!strcmp(type, commit_type)) {
struct commit *commit = lookup_commit(sha1);
parse_commit_buffer(commit, buffer, size);
if (!commit->buffer) {
@@ -212,7 +210,7 @@ struct object *parse_object(const unsigned char *sha1)
buffer = NULL;
}
obj = &commit->object;
- } else if (!strcmp(type, "tag")) {
+ } else if (!strcmp(type, tag_type)) {
struct tag *tag = lookup_tag(sha1);
parse_tag_buffer(tag, buffer, size);
obj = &tag->object;
diff --git a/pack-check.c b/pack-check.c
index 67a7ecd..84ed90d 100644
--- a/pack-check.c
+++ b/pack-check.c
@@ -70,13 +70,17 @@ static int verify_packfile(struct packed_git *p)
}
+#define MAX_CHAIN 40
+
static void show_pack_info(struct packed_git *p)
{
struct pack_header *hdr;
int nr_objects, i;
+ unsigned int chain_histogram[MAX_CHAIN];
hdr = p->pack_base;
nr_objects = ntohl(hdr->hdr_entries);
+ memset(chain_histogram, 0, sizeof(chain_histogram));
for (i = 0; i < nr_objects; i++) {
unsigned char sha1[20], base_sha1[20];
@@ -84,7 +88,7 @@ static void show_pack_info(struct packed_git *p)
char type[20];
unsigned long size;
unsigned long store_size;
- int delta_chain_length;
+ unsigned int delta_chain_length;
if (nth_packed_object_sha1(p, i, sha1))
die("internal error pack-check nth-packed-object");
@@ -97,11 +101,25 @@ static void show_pack_info(struct packed_git *p)
printf("%s ", sha1_to_hex(sha1));
if (!delta_chain_length)
printf("%-6s %lu %u\n", type, size, e.offset);
- else
- printf("%-6s %lu %u %d %s\n", type, size, e.offset,
+ else {
+ printf("%-6s %lu %u %u %s\n", type, size, e.offset,
delta_chain_length, sha1_to_hex(base_sha1));
+ if (delta_chain_length < MAX_CHAIN)
+ chain_histogram[delta_chain_length]++;
+ else
+ chain_histogram[0]++;
+ }
}
+ for (i = 0; i < MAX_CHAIN; i++) {
+ if (!chain_histogram[i])
+ continue;
+ printf("chain length %s %d: %d object%s\n",
+ i ? "=" : ">=",
+ i ? i : MAX_CHAIN,
+ chain_histogram[i],
+ 1 < chain_histogram[i] ? "s" : "");
+ }
}
int verify_pack(struct packed_git *p, int verbose)
diff --git a/pack-objects.c b/pack-objects.c
index 7d62477..9346392 100644
--- a/pack-objects.c
+++ b/pack-objects.c
@@ -1,8 +1,13 @@
#include "cache.h"
#include "object.h"
+#include "blob.h"
+#include "commit.h"
+#include "tag.h"
+#include "tree.h"
#include "delta.h"
#include "pack.h"
#include "csum-file.h"
+#include "tree-walk.h"
#include <sys/time.h>
#include <signal.h>
@@ -27,6 +32,10 @@ struct object_entry {
struct object_entry *delta_sibling; /* other deltified objects who
* uses the same base as me
*/
+ int preferred_base; /* we do not pack this, but is encouraged to
+ * be used as the base objectto delta huge
+ * objects against.
+ */
};
/*
@@ -49,7 +58,7 @@ static int local = 0;
static int incremental = 0;
static struct object_entry **sorted_by_sha, **sorted_by_type;
static struct object_entry *objects = NULL;
-static int nr_objects = 0, nr_alloc = 0;
+static int nr_objects = 0, nr_alloc = 0, nr_result = 0;
static const char *base_name;
static unsigned char pack_file_sha1[20];
static int progress = 1;
@@ -91,7 +100,7 @@ static int reused_delta = 0;
static int pack_revindex_ix(struct packed_git *p)
{
- unsigned int ui = (unsigned int) p;
+ unsigned long ui = (unsigned long)p;
int i;
ui = ui ^ (ui >> 16); /* defeat structure alignment */
@@ -231,7 +240,8 @@ static int encode_header(enum object_type type, unsigned long size, unsigned cha
return n;
}
-static unsigned long write_object(struct sha1file *f, struct object_entry *entry)
+static unsigned long write_object(struct sha1file *f,
+ struct object_entry *entry)
{
unsigned long size;
char type[10];
@@ -241,6 +251,9 @@ static unsigned long write_object(struct sha1file *f, struct object_entry *entry
enum object_type obj_type;
int to_reuse = 0;
+ if (entry->preferred_base)
+ return 0;
+
obj_type = entry->type;
if (! entry->in_pack)
to_reuse = 0; /* can't reuse what we don't have */
@@ -335,20 +348,22 @@ static void write_pack_file(void)
do_progress = progress;
}
if (do_progress)
- fprintf(stderr, "Writing %d objects.\n", nr_objects);
+ fprintf(stderr, "Writing %d objects.\n", nr_result);
hdr.hdr_signature = htonl(PACK_SIGNATURE);
hdr.hdr_version = htonl(PACK_VERSION);
- hdr.hdr_entries = htonl(nr_objects);
+ hdr.hdr_entries = htonl(nr_result);
sha1write(f, &hdr, sizeof(hdr));
offset = sizeof(hdr);
+ if (!nr_result)
+ goto done;
for (i = 0; i < nr_objects; i++) {
offset = write_one(f, objects + i, offset);
if (do_progress) {
- unsigned percent = written * 100 / nr_objects;
+ unsigned percent = written * 100 / nr_result;
if (progress_update || percent != last_percent) {
fprintf(stderr, "%4u%% (%u/%u) done\r",
- percent, written, nr_objects);
+ percent, written, nr_result);
progress_update = 0;
last_percent = percent;
}
@@ -356,16 +371,17 @@ static void write_pack_file(void)
}
if (do_progress)
fputc('\n', stderr);
-
+ done:
sha1close(f, pack_file_sha1, 1);
}
static void write_index_file(void)
{
int i;
- struct sha1file *f = sha1create("%s-%s.%s", base_name, sha1_to_hex(object_list_sha1), "idx");
+ struct sha1file *f = sha1create("%s-%s.%s", base_name,
+ sha1_to_hex(object_list_sha1), "idx");
struct object_entry **list = sorted_by_sha;
- struct object_entry **last = list + nr_objects;
+ struct object_entry **last = list + nr_result;
unsigned int array[256];
/*
@@ -390,7 +406,7 @@ static void write_index_file(void)
* Write the actual SHA1 entries..
*/
list = sorted_by_sha;
- for (i = 0; i < nr_objects; i++) {
+ for (i = 0; i < nr_result; i++) {
struct object_entry *entry = *list++;
unsigned int offset = htonl(entry->offset);
sha1write(f, &offset, 4);
@@ -400,27 +416,139 @@ static void write_index_file(void)
sha1close(f, NULL, 1);
}
-static int add_object_entry(unsigned char *sha1, unsigned int hash)
+static int locate_object_entry_hash(const unsigned char *sha1)
+{
+ int i;
+ unsigned int ui;
+ memcpy(&ui, sha1, sizeof(unsigned int));
+ i = ui % object_ix_hashsz;
+ while (0 < object_ix[i]) {
+ if (!memcmp(sha1, objects[object_ix[i]-1].sha1, 20))
+ return i;
+ if (++i == object_ix_hashsz)
+ i = 0;
+ }
+ return -1 - i;
+}
+
+static struct object_entry *locate_object_entry(const unsigned char *sha1)
+{
+ int i;
+
+ if (!object_ix_hashsz)
+ return NULL;
+
+ i = locate_object_entry_hash(sha1);
+ if (0 <= i)
+ return &objects[object_ix[i]-1];
+ return NULL;
+}
+
+static void rehash_objects(void)
+{
+ int i;
+ struct object_entry *oe;
+
+ object_ix_hashsz = nr_objects * 3;
+ if (object_ix_hashsz < 1024)
+ object_ix_hashsz = 1024;
+ object_ix = xrealloc(object_ix, sizeof(int) * object_ix_hashsz);
+ object_ix = memset(object_ix, 0, sizeof(int) * object_ix_hashsz);
+ for (i = 0, oe = objects; i < nr_objects; i++, oe++) {
+ int ix = locate_object_entry_hash(oe->sha1);
+ if (0 <= ix)
+ continue;
+ ix = -1 - ix;
+ object_ix[ix] = i + 1;
+ }
+}
+
+struct name_path {
+ struct name_path *up;
+ const char *elem;
+ int len;
+};
+
+#define DIRBITS 12
+
+static unsigned name_hash(struct name_path *path, const char *name)
+{
+ struct name_path *p = path;
+ const char *n = name + strlen(name);
+ unsigned hash = 0, name_hash = 0, name_done = 0;
+
+ if (n != name && n[-1] == '\n')
+ n--;
+ while (name <= --n) {
+ unsigned char c = *n;
+ if (c == '/' && !name_done) {
+ name_hash = hash;
+ name_done = 1;
+ hash = 0;
+ }
+ hash = hash * 11 + c;
+ }
+ if (!name_done) {
+ name_hash = hash;
+ hash = 0;
+ }
+ for (p = path; p; p = p->up) {
+ hash = hash * 11 + '/';
+ n = p->elem + p->len;
+ while (p->elem <= --n) {
+ unsigned char c = *n;
+ hash = hash * 11 + c;
+ }
+ }
+ /*
+ * Make sure "Makefile" and "t/Makefile" are hashed separately
+ * but close enough.
+ */
+ hash = (name_hash<<DIRBITS) | (hash & ((1U<<DIRBITS )-1));
+
+ if (0) { /* debug */
+ n = name + strlen(name);
+ if (n != name && n[-1] == '\n')
+ n--;
+ while (name <= --n)
+ fputc(*n, stderr);
+ for (p = path; p; p = p->up) {
+ fputc('/', stderr);
+ n = p->elem + p->len;
+ while (p->elem <= --n)
+ fputc(*n, stderr);
+ }
+ fprintf(stderr, "\t%08x\n", hash);
+ }
+ return hash;
+}
+
+static int add_object_entry(const unsigned char *sha1, unsigned hash, int exclude)
{
unsigned int idx = nr_objects;
struct object_entry *entry;
struct packed_git *p;
unsigned int found_offset = 0;
struct packed_git *found_pack = NULL;
-
- for (p = packed_git; p; p = p->next) {
- struct pack_entry e;
- if (find_pack_entry_one(sha1, &e, p)) {
- if (incremental)
- return 0;
- if (local && !p->pack_local)
- return 0;
- if (!found_pack) {
- found_offset = e.offset;
- found_pack = e.p;
+ int ix, status = 0;
+
+ if (!exclude) {
+ for (p = packed_git; p; p = p->next) {
+ struct pack_entry e;
+ if (find_pack_entry_one(sha1, &e, p)) {
+ if (incremental)
+ return 0;
+ if (local && !p->pack_local)
+ return 0;
+ if (!found_pack) {
+ found_offset = e.offset;
+ found_pack = e.p;
+ }
}
}
}
+ if ((entry = locate_object_entry(sha1)) != NULL)
+ goto already_added;
if (idx >= nr_alloc) {
unsigned int needed = (idx + 1024) * 3 / 2;
@@ -428,45 +556,94 @@ static int add_object_entry(unsigned char *sha1, unsigned int hash)
nr_alloc = needed;
}
entry = objects + idx;
+ nr_objects = idx + 1;
memset(entry, 0, sizeof(*entry));
memcpy(entry->sha1, sha1, 20);
entry->hash = hash;
- if (found_pack) {
- entry->in_pack = found_pack;
- entry->in_pack_offset = found_offset;
+
+ if (object_ix_hashsz * 3 <= nr_objects * 4)
+ rehash_objects();
+ else {
+ ix = locate_object_entry_hash(entry->sha1);
+ if (0 <= ix)
+ die("internal error in object hashing.");
+ object_ix[-1 - ix] = idx + 1;
}
- nr_objects = idx+1;
- return 1;
+ status = 1;
+
+ already_added:
+ if (progress_update) {
+ fprintf(stderr, "Counting objects...%d\r", nr_objects);
+ progress_update = 0;
+ }
+ if (exclude)
+ entry->preferred_base = 1;
+ else {
+ if (found_pack) {
+ entry->in_pack = found_pack;
+ entry->in_pack_offset = found_offset;
+ }
+ }
+ return status;
}
-static int locate_object_entry_hash(unsigned char *sha1)
+static void add_pbase_tree(struct tree_desc *tree, struct name_path *up)
{
- int i;
- unsigned int ui;
- memcpy(&ui, sha1, sizeof(unsigned int));
- i = ui % object_ix_hashsz;
- while (0 < object_ix[i]) {
- if (!memcmp(sha1, objects[object_ix[i]-1].sha1, 20))
- return i;
- if (++i == object_ix_hashsz)
- i = 0;
+ while (tree->size) {
+ const unsigned char *sha1;
+ const char *name;
+ unsigned mode, hash;
+ unsigned long size;
+ char type[20];
+
+ sha1 = tree_entry_extract(tree, &name, &mode);
+ update_tree_entry(tree);
+ if (!has_sha1_file(sha1))
+ continue;
+ if (sha1_object_info(sha1, type, &size))
+ continue;
+
+ hash = name_hash(up, name);
+ if (!add_object_entry(sha1, hash, 1))
+ continue;
+
+ if (!strcmp(type, tree_type)) {
+ struct tree_desc sub;
+ void *elem;
+ struct name_path me;
+
+ elem = read_sha1_file(sha1, type, &sub.size);
+ sub.buf = elem;
+ if (sub.buf) {
+ me.up = up;
+ me.elem = name;
+ me.len = strlen(name);
+ add_pbase_tree(&sub, &me);
+ free(elem);
+ }
+ }
}
- return -1 - i;
}
-static struct object_entry *locate_object_entry(unsigned char *sha1)
+static void add_preferred_base(unsigned char *sha1)
{
- int i = locate_object_entry_hash(sha1);
- if (0 <= i)
- return &objects[object_ix[i]-1];
- return NULL;
+ struct tree_desc tree;
+ void *elem;
+
+ elem = read_object_with_reference(sha1, tree_type, &tree.size, NULL);
+ tree.buf = elem;
+ if (!tree.buf)
+ return;
+ if (add_object_entry(sha1, name_hash(NULL, ""), 1))
+ add_pbase_tree(&tree, NULL);
+ free(elem);
}
static void check_object(struct object_entry *entry)
{
char type[20];
- if (entry->in_pack) {
+ if (entry->in_pack && !entry->preferred_base) {
unsigned char base[20];
unsigned long size;
struct object_entry *base_entry;
@@ -485,7 +662,8 @@ static void check_object(struct object_entry *entry)
*/
if (!no_reuse_delta &&
entry->in_pack_type == OBJ_DELTA &&
- (base_entry = locate_object_entry(base))) {
+ (base_entry = locate_object_entry(base)) &&
+ (!base_entry->preferred_base)) {
/* Depth value does not matter - find_deltas()
* will never consider reused delta as the
@@ -510,38 +688,19 @@ static void check_object(struct object_entry *entry)
die("unable to get type of object %s",
sha1_to_hex(entry->sha1));
- if (!strcmp(type, "commit")) {
+ if (!strcmp(type, commit_type)) {
entry->type = OBJ_COMMIT;
- } else if (!strcmp(type, "tree")) {
+ } else if (!strcmp(type, tree_type)) {
entry->type = OBJ_TREE;
- } else if (!strcmp(type, "blob")) {
+ } else if (!strcmp(type, blob_type)) {
entry->type = OBJ_BLOB;
- } else if (!strcmp(type, "tag")) {
+ } else if (!strcmp(type, tag_type)) {
entry->type = OBJ_TAG;
} else
die("unable to pack object %s of type %s",
sha1_to_hex(entry->sha1), type);
}
-static void hash_objects(void)
-{
- int i;
- struct object_entry *oe;
-
- object_ix_hashsz = nr_objects * 2;
- object_ix = xcalloc(sizeof(int), object_ix_hashsz);
- for (i = 0, oe = objects; i < nr_objects; i++, oe++) {
- int ix = locate_object_entry_hash(oe->sha1);
- if (0 <= ix) {
- error("the same object '%s' added twice",
- sha1_to_hex(oe->sha1));
- continue;
- }
- ix = -1 - ix;
- object_ix[ix] = i + 1;
- }
-}
-
static unsigned int check_delta_limit(struct object_entry *me, unsigned int n)
{
struct object_entry *child = me->delta_child;
@@ -560,14 +719,26 @@ static void get_object_details(void)
int i;
struct object_entry *entry;
- hash_objects();
prepare_pack_ix();
for (i = 0, entry = objects; i < nr_objects; i++, entry++)
check_object(entry);
- for (i = 0, entry = objects; i < nr_objects; i++, entry++)
- if (!entry->delta && entry->delta_child)
- entry->delta_limit =
- check_delta_limit(entry, 1);
+
+ if (nr_objects == nr_result) {
+ /*
+ * Depth of objects that depend on the entry -- this
+ * is subtracted from depth-max to break too deep
+ * delta chain because of delta data reusing.
+ * However, we loosen this restriction when we know we
+ * are creating a thin pack -- it will have to be
+ * expanded on the other end anyway, so do not
+ * artificially cut the delta chain and let it go as
+ * deep as it wants.
+ */
+ for (i = 0, entry = objects; i < nr_objects; i++, entry++)
+ if (!entry->delta && entry->delta_child)
+ entry->delta_limit =
+ check_delta_limit(entry, 1);
+ }
}
typedef int (*entry_sort_t)(const struct object_entry *, const struct object_entry *);
@@ -598,6 +769,24 @@ static int sha1_sort(const struct object_entry *a, const struct object_entry *b)
return memcmp(a->sha1, b->sha1, 20);
}
+static struct object_entry **create_final_object_list(void)
+{
+ struct object_entry **list;
+ int i, j;
+
+ for (i = nr_result = 0; i < nr_objects; i++)
+ if (!objects[i].preferred_base)
+ nr_result++;
+ list = xmalloc(nr_result * sizeof(struct object_entry *));
+ for (i = j = 0; i < nr_objects; i++) {
+ if (!objects[i].preferred_base)
+ list[j++] = objects + i;
+ }
+ current_sort = sha1_sort;
+ qsort(list, nr_result, sizeof(struct object_entry *), sort_comparator);
+ return list;
+}
+
static int type_size_sort(const struct object_entry *a, const struct object_entry *b)
{
if (a->type < b->type)
@@ -608,6 +797,10 @@ static int type_size_sort(const struct object_entry *a, const struct object_entr
return -1;
if (a->hash > b->hash)
return 1;
+ if (a->preferred_base < b->preferred_base)
+ return -1;
+ if (a->preferred_base > b->preferred_base)
+ return 1;
if (a->size < b->size)
return -1;
if (a->size > b->size)
@@ -640,9 +833,15 @@ static int try_delta(struct unpacked *cur, struct unpacked *old, unsigned max_de
if (cur_entry->type != old_entry->type)
return -1;
- /* If the current object is at edge, take the depth the objects
- * that depend on the current object into account -- otherwise
- * they would become too deep.
+ /* We do not compute delta to *create* objects we are not
+ * going to pack.
+ */
+ if (cur_entry->preferred_base)
+ return -1;
+
+ /* If the current object is at pack edge, take the depth the
+ * objects that depend on the current object into account --
+ * otherwise they would become too deep.
*/
if (cur_entry->delta_child) {
if (max_depth <= cur_entry->delta_limit)
@@ -651,11 +850,10 @@ static int try_delta(struct unpacked *cur, struct unpacked *old, unsigned max_de
}
size = cur_entry->size;
- if (size < 50)
- return -1;
oldsize = old_entry->size;
sizediff = oldsize > size ? oldsize - size : size - oldsize;
- if (sizediff > size / 8)
+
+ if (size < 50)
return -1;
if (old_entry->depth >= max_depth)
return 0;
@@ -700,7 +898,7 @@ static void find_deltas(struct object_entry **list, int window, int depth)
i = nr_objects;
idx = 0;
if (progress)
- fprintf(stderr, "Deltifying %d objects.\n", nr_objects);
+ fprintf(stderr, "Deltifying %d objects.\n", nr_result);
while (--i >= 0) {
struct object_entry *entry = list[i];
@@ -709,12 +907,14 @@ static void find_deltas(struct object_entry **list, int window, int depth)
char type[10];
int j;
- processed++;
+ if (!entry->preferred_base)
+ processed++;
+
if (progress) {
- unsigned percent = processed * 100 / nr_objects;
+ unsigned percent = processed * 100 / nr_result;
if (percent != last_percent || progress_update) {
fprintf(stderr, "%4u%% (%u/%u) done\r",
- percent, processed, nr_objects);
+ percent, processed, nr_result);
progress_update = 0;
last_percent = percent;
}
@@ -744,6 +944,15 @@ static void find_deltas(struct object_entry **list, int window, int depth)
if (try_delta(n, m, depth) < 0)
break;
}
+#if 0
+ /* if we made n a delta, and if n is already at max
+ * depth, leaving it in the window is pointless. we
+ * should evict it first.
+ * ... in theory only; somehow this makes things worse.
+ */
+ if (entry->delta && depth <= entry->depth)
+ continue;
+#endif
idx++;
if (idx >= window)
idx = 0;
@@ -906,8 +1115,6 @@ int main(int argc, char **argv)
}
for (;;) {
- unsigned int hash;
- char *p;
unsigned char sha1[20];
if (!fgets(line, sizeof(line), stdin)) {
@@ -921,40 +1128,38 @@ int main(int argc, char **argv)
continue;
}
- if (progress_update) {
- fprintf(stderr, "Counting objects...%d\r", nr_objects);
- progress_update = 0;
+ if (line[0] == '-') {
+ if (get_sha1_hex(line+1, sha1))
+ die("expected edge sha1, got garbage:\n %s",
+ line+1);
+ add_preferred_base(sha1);
+ continue;
}
if (get_sha1_hex(line, sha1))
die("expected sha1, got garbage:\n %s", line);
- hash = 0;
- p = line+40;
- while (*p) {
- unsigned char c = *p++;
- if (isspace(c))
- continue;
- hash = hash * 11 + c;
- }
- add_object_entry(sha1, hash);
+ add_object_entry(sha1, name_hash(NULL, line+41), 0);
}
if (progress)
fprintf(stderr, "Done counting %d objects.\n", nr_objects);
- if (non_empty && !nr_objects)
+ sorted_by_sha = create_final_object_list();
+ if (non_empty && !nr_result)
return 0;
- sorted_by_sha = create_sorted_list(sha1_sort);
SHA1_Init(&ctx);
list = sorted_by_sha;
- for (i = 0; i < nr_objects; i++) {
+ for (i = 0; i < nr_result; i++) {
struct object_entry *entry = *list++;
SHA1_Update(&ctx, entry->sha1, 20);
}
SHA1_Final(object_list_sha1, &ctx);
+ if (progress && (nr_objects != nr_result))
+ fprintf(stderr, "Result has %d objects.\n", nr_result);
if (reuse_cached_pack(object_list_sha1, pack_to_stdout))
;
else {
- prepare_pack(window, depth);
+ if (nr_result)
+ prepare_pack(window, depth);
if (progress && pack_to_stdout) {
/* the other end usually displays progress itself */
struct itimerval v = {{0,},};
@@ -970,6 +1175,6 @@ int main(int argc, char **argv)
}
if (progress)
fprintf(stderr, "Total %d, written %d (delta %d), reused %d (delta %d)\n",
- nr_objects, written, written_delta, reused, reused_delta);
+ nr_result, written, written_delta, reused, reused_delta);
return 0;
}
diff --git a/pack-redundant.c b/pack-redundant.c
index 1869b38..cd81f5a 100644
--- a/pack-redundant.c
+++ b/pack-redundant.c
@@ -45,7 +45,7 @@ static inline void llist_item_put(struct llist_item *item)
free_nodes = item;
}
-static inline struct llist_item *llist_item_get()
+static inline struct llist_item *llist_item_get(void)
{
struct llist_item *new;
if ( free_nodes ) {
@@ -275,7 +275,7 @@ static void cmp_two_packs(struct pack_list *p1, struct pack_list *p2)
}
}
-void pll_free(struct pll *l)
+static void pll_free(struct pll *l)
{
struct pll *old;
struct pack_list *opl;
diff --git a/pager.c b/pager.c
new file mode 100644
index 0000000..1364e15
--- /dev/null
+++ b/pager.c
@@ -0,0 +1,48 @@
+#include "cache.h"
+
+/*
+ * This is split up from the rest of git so that we might do
+ * something different on Windows, for example.
+ */
+
+static void run_pager(void)
+{
+ const char *prog = getenv("PAGER");
+ if (!prog)
+ prog = "less";
+ setenv("LESS", "-S", 0);
+ execlp(prog, prog, NULL);
+}
+
+void setup_pager(void)
+{
+ pid_t pid;
+ int fd[2];
+
+ if (!isatty(1))
+ return;
+ if (pipe(fd) < 0)
+ return;
+ pid = fork();
+ if (pid < 0) {
+ close(fd[0]);
+ close(fd[1]);
+ return;
+ }
+
+ /* return in the child */
+ if (!pid) {
+ dup2(fd[1], 1);
+ close(fd[0]);
+ close(fd[1]);
+ return;
+ }
+
+ /* The original process turns into the PAGER */
+ dup2(fd[0], 0);
+ close(fd[0]);
+ close(fd[1]);
+
+ run_pager();
+ exit(255);
+}
diff --git a/read-cache.c b/read-cache.c
index c5474d4..f97f92d 100644
--- a/read-cache.c
+++ b/read-cache.c
@@ -27,6 +27,9 @@ void fill_stat_cache_info(struct cache_entry *ce, struct stat *st)
ce->ce_uid = htonl(st->st_uid);
ce->ce_gid = htonl(st->st_gid);
ce->ce_size = htonl(st->st_size);
+
+ if (assume_unchanged)
+ ce->ce_flags |= htons(CE_VALID);
}
static int ce_compare_data(struct cache_entry *ce, struct stat *st)
@@ -146,9 +149,18 @@ static int ce_match_stat_basic(struct cache_entry *ce, struct stat *st)
return changed;
}
-int ce_match_stat(struct cache_entry *ce, struct stat *st)
+int ce_match_stat(struct cache_entry *ce, struct stat *st, int ignore_valid)
{
- unsigned int changed = ce_match_stat_basic(ce, st);
+ unsigned int changed;
+
+ /*
+ * If it's marked as always valid in the index, it's
+ * valid whatever the checked-out copy says.
+ */
+ if (!ignore_valid && (ce->ce_flags & htons(CE_VALID)))
+ return 0;
+
+ changed = ce_match_stat_basic(ce, st);
/*
* Within 1 second of this sequence:
@@ -164,7 +176,7 @@ int ce_match_stat(struct cache_entry *ce, struct stat *st)
* effectively mean we can make at most one commit per second,
* which is not acceptable. Instead, we check cache entries
* whose mtime are the same as the index file timestamp more
- * careful than others.
+ * carefully than others.
*/
if (!changed &&
index_file_timestamp &&
@@ -174,10 +186,10 @@ int ce_match_stat(struct cache_entry *ce, struct stat *st)
return changed;
}
-int ce_modified(struct cache_entry *ce, struct stat *st)
+int ce_modified(struct cache_entry *ce, struct stat *st, int really)
{
int changed, changed_fs;
- changed = ce_match_stat(ce, st);
+ changed = ce_match_stat(ce, st, really);
if (!changed)
return 0;
/*
@@ -233,6 +245,11 @@ int cache_name_compare(const char *name1, int flags1, const char *name2, int fla
return -1;
if (len1 > len2)
return 1;
+
+ /* Compare stages */
+ flags1 &= CE_STAGEMASK;
+ flags2 &= CE_STAGEMASK;
+
if (flags1 < flags2)
return -1;
if (flags1 > flags2)
@@ -430,6 +447,7 @@ int add_cache_entry(struct cache_entry *ce, int option)
int ok_to_add = option & ADD_CACHE_OK_TO_ADD;
int ok_to_replace = option & ADD_CACHE_OK_TO_REPLACE;
int skip_df_check = option & ADD_CACHE_SKIP_DFCHECK;
+
pos = cache_name_pos(ce->name, ntohs(ce->ce_flags));
/* existing match? Just replace it. */
diff --git a/read-tree.c b/read-tree.c
index 32fb6fa..26f4f7e 100644
--- a/read-tree.c
+++ b/read-tree.c
@@ -133,11 +133,9 @@ static int unpack_trees_rec(struct tree_entry_list **posns, int len,
pathlen = strlen(first);
ce_size = cache_entry_size(baselen + pathlen);
- src = xmalloc(sizeof(struct cache_entry *) * src_size);
- memset(src, 0, sizeof(struct cache_entry *) * src_size);
+ src = xcalloc(src_size, sizeof(struct cache_entry *));
- subposns = xmalloc(sizeof(struct tree_list_entry *) * len);
- memset(subposns, 0, sizeof(struct tree_list_entry *) * len);
+ subposns = xcalloc(len, sizeof(struct tree_list_entry *));
if (cache_name && !strcmp(cache_name, first)) {
any_files = 1;
@@ -177,8 +175,7 @@ static int unpack_trees_rec(struct tree_entry_list **posns, int len,
else
ce_stage = 2;
- ce = xmalloc(ce_size);
- memset(ce, 0, ce_size);
+ ce = xcalloc(1, ce_size);
ce->ce_mode = create_ce_mode(posns[i]->mode);
ce->ce_flags = create_ce_flags(baselen + pathlen,
ce_stage);
@@ -347,7 +344,7 @@ static void check_updates(struct cache_entry **src, int nr)
if (ce->ce_flags & mask) {
ce->ce_flags &= ~mask;
if (update)
- checkout_entry(ce, &state);
+ checkout_entry(ce, &state, NULL);
}
}
if (total) {
@@ -414,7 +411,7 @@ static void verify_uptodate(struct cache_entry *ce)
return;
if (!lstat(ce->name, &st)) {
- unsigned changed = ce_match_stat(ce, &st);
+ unsigned changed = ce_match_stat(ce, &st, 1);
if (!changed)
return;
errno = 0;
@@ -716,7 +713,7 @@ static int read_cache_unmerged(void)
return deleted;
}
-static const char read_tree_usage[] = "git-read-tree (<sha> | -m [-u | -i] <sha1> [<sha2> [<sha3>]])";
+static const char read_tree_usage[] = "git-read-tree (<sha> | -m [--aggressive] [-u | -i] <sha1> [<sha2> [<sha3>]])";
static struct cache_file cache_file;
@@ -727,6 +724,7 @@ int main(int argc, char **argv)
merge_fn_t fn = NULL;
setup_git_directory();
+ git_config(git_default_config);
newfd = hold_index_file_for_update(&cache_file, get_index_file());
if (newfd < 0)
diff --git a/receive-pack.c b/receive-pack.c
index 2a3db16..93929b5 100644
--- a/receive-pack.c
+++ b/receive-pack.c
@@ -6,7 +6,7 @@
static const char receive_pack_usage[] = "git-receive-pack <git-dir>";
-static char *unpacker[] = { "unpack-objects", NULL };
+static const char *unpacker[] = { "unpack-objects", NULL };
static int report_status = 0;
@@ -177,7 +177,7 @@ static void run_update_post_hook(struct command *cmd)
{
struct command *cmd_p;
int argc;
- char **argv;
+ const char **argv;
if (access(update_post_hook, X_OK) < 0)
return;
@@ -190,10 +190,12 @@ static void run_update_post_hook(struct command *cmd)
argv[0] = update_post_hook;
for (argc = 1, cmd_p = cmd; cmd_p; cmd_p = cmd_p->next) {
+ char *p;
if (cmd_p->error_string)
continue;
- argv[argc] = xmalloc(strlen(cmd_p->ref_name) + 1);
- strcpy(argv[argc], cmd_p->ref_name);
+ p = xmalloc(strlen(cmd_p->ref_name) + 1);
+ strcpy(p, cmd_p->ref_name);
+ argv[argc] = p;
argc++;
}
argv[argc] = NULL;
diff --git a/refs.c b/refs.c
index 826ae7a..03398cc 100644
--- a/refs.c
+++ b/refs.c
@@ -151,10 +151,15 @@ static int do_for_each_ref(const char *base, int (*fn)(const char *path, const u
break;
continue;
}
- if (read_ref(git_path("%s", path), sha1) < 0)
+ if (read_ref(git_path("%s", path), sha1) < 0) {
+ error("%s points nowhere!", path);
continue;
- if (!has_sha1_file(sha1))
+ }
+ if (!has_sha1_file(sha1)) {
+ error("%s does not point to a valid "
+ "commit object!", path);
continue;
+ }
retval = fn(path, sha1);
if (retval)
break;
diff --git a/repo-config.c b/repo-config.c
index 9cf6519..c5ebb76 100644
--- a/repo-config.c
+++ b/repo-config.c
@@ -14,6 +14,9 @@ static enum { T_RAW, T_INT, T_BOOL } type = T_RAW;
static int show_config(const char* key_, const char* value_)
{
+ if (value_ == NULL)
+ value_ = "";
+
if (!strcmp(key_, key) &&
(regexp == NULL ||
(do_not_match ^
@@ -35,7 +38,7 @@ static int show_config(const char* key_, const char* value_)
sprintf(value, "%s", git_config_bool(key_, value_)
? "true" : "false");
} else {
- value = strdup(value_ ? value_ : "");
+ value = strdup(value_);
}
seen++;
}
diff --git a/rev-list.c b/rev-list.c
index 63391fc..22141e2 100644
--- a/rev-list.c
+++ b/rev-list.c
@@ -4,15 +4,12 @@
#include "commit.h"
#include "tree.h"
#include "blob.h"
-#include "epoch.h"
-#include "diff.h"
+#include "tree-walk.h"
+#include "revision.h"
-#define SEEN (1u << 0)
-#define INTERESTING (1u << 1)
-#define COUNTED (1u << 2)
-#define SHOWN (1u << 3)
-#define TREECHANGE (1u << 4)
-#define TMP_MARK (1u << 5) /* for isolated cases; clean after use */
+/* bits #0-5 in revision.h */
+
+#define COUNTED (1u<<6)
static const char rev_list_usage[] =
"git-rev-list [OPTION] <commit-id>... [ -- paths... ]\n"
@@ -25,11 +22,11 @@ static const char rev_list_usage[] =
" --remove-empty\n"
" --all\n"
" ordering output:\n"
-" --merge-order [ --show-breaks ]\n"
" --topo-order\n"
+" --date-order\n"
" formatting output:\n"
" --parents\n"
-" --objects\n"
+" --objects | --objects-edge\n"
" --unpacked\n"
" --header | --pretty\n"
" --abbrev=nr | --no-abbrev\n"
@@ -37,42 +34,26 @@ static const char rev_list_usage[] =
" --bisect"
;
-static int dense = 1;
-static int unpacked = 0;
+struct rev_info revs;
+
static int bisect_list = 0;
-static int tag_objects = 0;
-static int tree_objects = 0;
-static int blob_objects = 0;
static int verbose_header = 0;
static int abbrev = DEFAULT_ABBREV;
-static int show_parents = 0;
+static int show_timestamp = 0;
static int hdr_termination = 0;
static const char *commit_prefix = "";
-static unsigned long max_age = -1;
-static unsigned long min_age = -1;
-static int max_count = -1;
static enum cmit_fmt commit_format = CMIT_FMT_RAW;
-static int merge_order = 0;
-static int show_breaks = 0;
-static int stop_traversal = 0;
-static int topo_order = 0;
-static int no_merges = 0;
-static const char **paths = NULL;
-static int remove_empty_trees = 0;
static void show_commit(struct commit *commit)
{
- commit->object.flags |= SHOWN;
- if (show_breaks) {
- commit_prefix = "| ";
- if (commit->object.flags & DISCONTINUITY) {
- commit_prefix = "^ ";
- } else if (commit->object.flags & BOUNDARY) {
- commit_prefix = "= ";
- }
- }
- printf("%s%s", commit_prefix, sha1_to_hex(commit->object.sha1));
- if (show_parents) {
+ if (show_timestamp)
+ printf("%lu ", commit->date);
+ if (commit_prefix[0])
+ fputs(commit_prefix, stdout);
+ if (commit->object.flags & BOUNDARY)
+ putchar('-');
+ fputs(sha1_to_hex(commit->object.sha1), stdout);
+ if (revs.parents) {
struct commit_list *parents = commit->parents;
while (parents) {
struct object *o = &(parents->item->object);
@@ -104,157 +85,87 @@ static void show_commit(struct commit *commit)
fflush(stdout);
}
-static int rewrite_one(struct commit **pp)
-{
- for (;;) {
- struct commit *p = *pp;
- if (p->object.flags & (TREECHANGE | UNINTERESTING))
- return 0;
- if (!p->parents)
- return -1;
- *pp = p->parents->item;
- }
-}
-
-static void rewrite_parents(struct commit *commit)
-{
- struct commit_list **pp = &commit->parents;
- while (*pp) {
- struct commit_list *parent = *pp;
- if (rewrite_one(&parent->item) < 0) {
- *pp = parent->next;
- continue;
- }
- pp = &parent->next;
- }
-}
-
-static int filter_commit(struct commit * commit)
-{
- if (stop_traversal && (commit->object.flags & BOUNDARY))
- return STOP;
- if (commit->object.flags & (UNINTERESTING|SHOWN))
- return CONTINUE;
- if (min_age != -1 && (commit->date > min_age))
- return CONTINUE;
- if (max_age != -1 && (commit->date < max_age)) {
- stop_traversal=1;
- return CONTINUE;
- }
- if (no_merges && (commit->parents && commit->parents->next))
- return CONTINUE;
- if (paths && dense) {
- if (!(commit->object.flags & TREECHANGE))
- return CONTINUE;
- rewrite_parents(commit);
- }
- return DO;
-}
-
-static int process_commit(struct commit * commit)
-{
- int action=filter_commit(commit);
-
- if (action == STOP) {
- return STOP;
- }
-
- if (action == CONTINUE) {
- return CONTINUE;
- }
-
- if (max_count != -1 && !max_count--)
- return STOP;
-
- show_commit(commit);
-
- return CONTINUE;
-}
-
-static struct object_list **add_object(struct object *obj, struct object_list **p, const char *name)
-{
- struct object_list *entry = xmalloc(sizeof(*entry));
- entry->item = obj;
- entry->next = *p;
- entry->name = name;
- *p = entry;
- return &entry->next;
-}
-
-static struct object_list **process_blob(struct blob *blob, struct object_list **p, const char *name)
+static struct object_list **process_blob(struct blob *blob,
+ struct object_list **p,
+ struct name_path *path,
+ const char *name)
{
struct object *obj = &blob->object;
- if (!blob_objects)
+ if (!revs.blob_objects)
return p;
if (obj->flags & (UNINTERESTING | SEEN))
return p;
obj->flags |= SEEN;
- return add_object(obj, p, name);
+ return add_object(obj, p, path, name);
}
-static struct object_list **process_tree(struct tree *tree, struct object_list **p, const char *name)
+static struct object_list **process_tree(struct tree *tree,
+ struct object_list **p,
+ struct name_path *path,
+ const char *name)
{
struct object *obj = &tree->object;
struct tree_entry_list *entry;
+ struct name_path me;
- if (!tree_objects)
+ if (!revs.tree_objects)
return p;
if (obj->flags & (UNINTERESTING | SEEN))
return p;
if (parse_tree(tree) < 0)
die("bad tree object %s", sha1_to_hex(obj->sha1));
obj->flags |= SEEN;
- p = add_object(obj, p, name);
+ p = add_object(obj, p, path, name);
+ me.up = path;
+ me.elem = name;
+ me.elem_len = strlen(name);
entry = tree->entries;
tree->entries = NULL;
while (entry) {
struct tree_entry_list *next = entry->next;
if (entry->directory)
- p = process_tree(entry->item.tree, p, entry->name);
+ p = process_tree(entry->item.tree, p, &me, entry->name);
else
- p = process_blob(entry->item.blob, p, entry->name);
+ p = process_blob(entry->item.blob, p, &me, entry->name);
free(entry);
entry = next;
}
return p;
}
-static struct object_list *pending_objects = NULL;
-
-static void show_commit_list(struct commit_list *list)
+static void show_commit_list(struct rev_info *revs)
{
+ struct commit *commit;
struct object_list *objects = NULL, **p = &objects, *pending;
- while (list) {
- struct commit *commit = pop_most_recent_commit(&list, SEEN);
- p = process_tree(commit->tree, p, "");
- if (process_commit(commit) == STOP)
- break;
+ while ((commit = get_revision(revs)) != NULL) {
+ p = process_tree(commit->tree, p, NULL, "");
+ show_commit(commit);
}
- for (pending = pending_objects; pending; pending = pending->next) {
+ for (pending = revs->pending_objects; pending; pending = pending->next) {
struct object *obj = pending->item;
const char *name = pending->name;
if (obj->flags & (UNINTERESTING | SEEN))
continue;
if (obj->type == tag_type) {
obj->flags |= SEEN;
- p = add_object(obj, p, name);
+ p = add_object(obj, p, NULL, name);
continue;
}
if (obj->type == tree_type) {
- p = process_tree((struct tree *)obj, p, name);
+ p = process_tree((struct tree *)obj, p, NULL, name);
continue;
}
if (obj->type == blob_type) {
- p = process_blob((struct blob *)obj, p, name);
+ p = process_blob((struct blob *)obj, p, NULL, name);
continue;
}
die("unknown pending object %s (%s)", sha1_to_hex(obj->sha1), name);
}
while (objects) {
- /* An object with name "foo\n0000000000000000000000000000000000000000"
- * can be used confuse downstream git-pack-objects very badly.
+ /* An object with name "foo\n0000000..." can be used to
+ * confuse downstream git-pack-objects very badly.
*/
const char *ep = strchr(objects->name, '\n');
if (ep) {
@@ -268,88 +179,6 @@ static void show_commit_list(struct commit_list *list)
}
}
-static void mark_blob_uninteresting(struct blob *blob)
-{
- if (!blob_objects)
- return;
- if (blob->object.flags & UNINTERESTING)
- return;
- blob->object.flags |= UNINTERESTING;
-}
-
-static void mark_tree_uninteresting(struct tree *tree)
-{
- struct object *obj = &tree->object;
- struct tree_entry_list *entry;
-
- if (!tree_objects)
- return;
- if (obj->flags & UNINTERESTING)
- return;
- obj->flags |= UNINTERESTING;
- if (!has_sha1_file(obj->sha1))
- return;
- if (parse_tree(tree) < 0)
- die("bad tree %s", sha1_to_hex(obj->sha1));
- entry = tree->entries;
- tree->entries = NULL;
- while (entry) {
- struct tree_entry_list *next = entry->next;
- if (entry->directory)
- mark_tree_uninteresting(entry->item.tree);
- else
- mark_blob_uninteresting(entry->item.blob);
- free(entry);
- entry = next;
- }
-}
-
-static void mark_parents_uninteresting(struct commit *commit)
-{
- struct commit_list *parents = commit->parents;
-
- while (parents) {
- struct commit *commit = parents->item;
- commit->object.flags |= UNINTERESTING;
-
- /*
- * Normally we haven't parsed the parent
- * yet, so we won't have a parent of a parent
- * here. However, it may turn out that we've
- * reached this commit some other way (where it
- * wasn't uninteresting), in which case we need
- * to mark its parents recursively too..
- */
- if (commit->parents)
- mark_parents_uninteresting(commit);
-
- /*
- * A missing commit is ok iff its parent is marked
- * uninteresting.
- *
- * We just mark such a thing parsed, so that when
- * it is popped next time around, we won't be trying
- * to parse it and get an error.
- */
- if (!has_sha1_file(commit->object.sha1))
- commit->object.parsed = 1;
- parents = parents->next;
- }
-}
-
-static int everybody_uninteresting(struct commit_list *orig)
-{
- struct commit_list *list = orig;
- while (list) {
- struct commit *commit = list->item;
- list = list->next;
- if (commit->object.flags & UNINTERESTING)
- continue;
- return 0;
- }
- return 1;
-}
-
/*
* This is a truly stupid algorithm, but it's only
* used for bisection, and we just don't care enough.
@@ -367,7 +196,7 @@ static int count_distance(struct commit_list *entry)
if (commit->object.flags & (UNINTERESTING | COUNTED))
break;
- if (!paths || (commit->object.flags & TREECHANGE))
+ if (!revs.prune_fn || (commit->object.flags & TREECHANGE))
nr++;
commit->object.flags |= COUNTED;
p = commit->parents;
@@ -401,7 +230,7 @@ static struct commit_list *find_bisection(struct commit_list *list)
nr = 0;
p = list;
while (p) {
- if (!paths || (p->item->object.flags & TREECHANGE))
+ if (!revs.prune_fn || (p->item->object.flags & TREECHANGE))
nr++;
p = p->next;
}
@@ -411,7 +240,7 @@ static struct commit_list *find_bisection(struct commit_list *list)
for (p = list; p; p = p->next) {
int distance;
- if (paths && !(p->item->object.flags & TREECHANGE))
+ if (revs.prune_fn && !(p->item->object.flags & TREECHANGE))
continue;
distance = count_distance(p);
@@ -428,369 +257,58 @@ static struct commit_list *find_bisection(struct commit_list *list)
return best;
}
-static void mark_edges_uninteresting(struct commit_list *list)
+static void mark_edge_parents_uninteresting(struct commit *commit)
{
- for ( ; list; list = list->next) {
- struct commit_list *parents = list->item->parents;
-
- for ( ; parents; parents = parents->next) {
- struct commit *commit = parents->item;
- if (commit->object.flags & UNINTERESTING)
- mark_tree_uninteresting(commit->tree);
- }
- }
-}
-
-#define TREE_SAME 0
-#define TREE_NEW 1
-#define TREE_DIFFERENT 2
-static int tree_difference = TREE_SAME;
-
-static void file_add_remove(struct diff_options *options,
- int addremove, unsigned mode,
- const unsigned char *sha1,
- const char *base, const char *path)
-{
- int diff = TREE_DIFFERENT;
-
- /*
- * Is it an add of a new file? It means that
- * the old tree didn't have it at all, so we
- * will turn "TREE_SAME" -> "TREE_NEW", but
- * leave any "TREE_DIFFERENT" alone (and if
- * it already was "TREE_NEW", we'll keep it
- * "TREE_NEW" of course).
- */
- if (addremove == '+') {
- diff = tree_difference;
- if (diff != TREE_SAME)
- return;
- diff = TREE_NEW;
- }
- tree_difference = diff;
-}
-
-static void file_change(struct diff_options *options,
- unsigned old_mode, unsigned new_mode,
- const unsigned char *old_sha1,
- const unsigned char *new_sha1,
- const char *base, const char *path)
-{
- tree_difference = TREE_DIFFERENT;
-}
-
-static struct diff_options diff_opt = {
- .recursive = 1,
- .add_remove = file_add_remove,
- .change = file_change,
-};
-
-static int compare_tree(struct tree *t1, struct tree *t2)
-{
- if (!t1)
- return TREE_NEW;
- if (!t2)
- return TREE_DIFFERENT;
- tree_difference = TREE_SAME;
- if (diff_tree_sha1(t1->object.sha1, t2->object.sha1, "", &diff_opt) < 0)
- return TREE_DIFFERENT;
- return tree_difference;
-}
-
-static int same_tree_as_empty(struct tree *t1)
-{
- int retval;
- void *tree;
- struct tree_desc empty, real;
-
- if (!t1)
- return 0;
-
- tree = read_object_with_reference(t1->object.sha1, "tree", &real.size, NULL);
- if (!tree)
- return 0;
- real.buf = tree;
-
- empty.buf = "";
- empty.size = 0;
-
- tree_difference = 0;
- retval = diff_tree(&empty, &real, "", &diff_opt);
- free(tree);
-
- return retval >= 0 && !tree_difference;
-}
-
-static void try_to_simplify_commit(struct commit *commit)
-{
- struct commit_list **pp, *parent;
-
- if (!commit->tree)
- return;
-
- if (!commit->parents) {
- if (!same_tree_as_empty(commit->tree))
- commit->object.flags |= TREECHANGE;
- return;
- }
-
- pp = &commit->parents;
- while ((parent = *pp) != NULL) {
- struct commit *p = parent->item;
+ struct commit_list *parents;
- if (p->object.flags & UNINTERESTING) {
- pp = &parent->next;
+ for (parents = commit->parents; parents; parents = parents->next) {
+ struct commit *parent = parents->item;
+ if (!(parent->object.flags & UNINTERESTING))
continue;
+ mark_tree_uninteresting(parent->tree);
+ if (revs.edge_hint && !(parent->object.flags & SHOWN)) {
+ parent->object.flags |= SHOWN;
+ printf("-%s\n", sha1_to_hex(parent->object.sha1));
}
-
- parse_commit(p);
- switch (compare_tree(p->tree, commit->tree)) {
- case TREE_SAME:
- parent->next = NULL;
- commit->parents = parent;
- return;
-
- case TREE_NEW:
- if (remove_empty_trees && same_tree_as_empty(p->tree)) {
- *pp = parent->next;
- continue;
- }
- /* fallthrough */
- case TREE_DIFFERENT:
- pp = &parent->next;
- continue;
- }
- die("bad tree compare for commit %s", sha1_to_hex(commit->object.sha1));
}
- commit->object.flags |= TREECHANGE;
}
-static void add_parents_to_list(struct commit *commit, struct commit_list **list)
-{
- struct commit_list *parent = commit->parents;
-
- /*
- * If the commit is uninteresting, don't try to
- * prune parents - we want the maximal uninteresting
- * set.
- *
- * Normally we haven't parsed the parent
- * yet, so we won't have a parent of a parent
- * here. However, it may turn out that we've
- * reached this commit some other way (where it
- * wasn't uninteresting), in which case we need
- * to mark its parents recursively too..
- */
- if (commit->object.flags & UNINTERESTING) {
- while (parent) {
- struct commit *p = parent->item;
- parent = parent->next;
- parse_commit(p);
- p->object.flags |= UNINTERESTING;
- if (p->parents)
- mark_parents_uninteresting(p);
- if (p->object.flags & SEEN)
- continue;
- p->object.flags |= SEEN;
- insert_by_date(p, list);
- }
- return;
- }
-
- /*
- * Ok, the commit wasn't uninteresting. Try to
- * simplify the commit history and find the parent
- * that has no differences in the path set if one exists.
- */
- if (paths)
- try_to_simplify_commit(commit);
-
- parent = commit->parents;
- while (parent) {
- struct commit *p = parent->item;
-
- parent = parent->next;
-
- parse_commit(p);
- if (p->object.flags & SEEN)
- continue;
- p->object.flags |= SEEN;
- insert_by_date(p, list);
- }
-}
-
-static struct commit_list *limit_list(struct commit_list *list)
+static void mark_edges_uninteresting(struct commit_list *list)
{
- struct commit_list *newlist = NULL;
- struct commit_list **p = &newlist;
- while (list) {
- struct commit_list *entry = list;
+ for ( ; list; list = list->next) {
struct commit *commit = list->item;
- struct object *obj = &commit->object;
-
- list = list->next;
- free(entry);
- if (max_age != -1 && (commit->date < max_age))
- obj->flags |= UNINTERESTING;
- if (unpacked && has_sha1_pack(obj->sha1))
- obj->flags |= UNINTERESTING;
- add_parents_to_list(commit, &list);
- if (obj->flags & UNINTERESTING) {
- mark_parents_uninteresting(commit);
- if (everybody_uninteresting(list))
- break;
+ if (commit->object.flags & UNINTERESTING) {
+ mark_tree_uninteresting(commit->tree);
continue;
}
- if (min_age != -1 && (commit->date > min_age))
- continue;
- p = &commit_list_insert(commit, p)->next;
+ mark_edge_parents_uninteresting(commit);
}
- if (tree_objects)
- mark_edges_uninteresting(newlist);
- if (bisect_list)
- newlist = find_bisection(newlist);
- return newlist;
-}
-
-static void add_pending_object(struct object *obj, const char *name)
-{
- add_object(obj, &pending_objects, name);
-}
-
-static struct commit *get_commit_reference(const char *name, const unsigned char *sha1, unsigned int flags)
-{
- struct object *object;
-
- object = parse_object(sha1);
- if (!object)
- die("bad object %s", name);
-
- /*
- * Tag object? Look what it points to..
- */
- while (object->type == tag_type) {
- struct tag *tag = (struct tag *) object;
- object->flags |= flags;
- if (tag_objects && !(object->flags & UNINTERESTING))
- add_pending_object(object, tag->tag);
- object = parse_object(tag->tagged->sha1);
- if (!object)
- die("bad object %s", sha1_to_hex(tag->tagged->sha1));
- }
-
- /*
- * Commit object? Just return it, we'll do all the complex
- * reachability crud.
- */
- if (object->type == commit_type) {
- struct commit *commit = (struct commit *)object;
- object->flags |= flags;
- if (parse_commit(commit) < 0)
- die("unable to parse commit %s", name);
- if (flags & UNINTERESTING)
- mark_parents_uninteresting(commit);
- return commit;
- }
-
- /*
- * Tree object? Either mark it uniniteresting, or add it
- * to the list of objects to look at later..
- */
- if (object->type == tree_type) {
- struct tree *tree = (struct tree *)object;
- if (!tree_objects)
- return NULL;
- if (flags & UNINTERESTING) {
- mark_tree_uninteresting(tree);
- return NULL;
- }
- add_pending_object(object, "");
- return NULL;
- }
-
- /*
- * Blob object? You know the drill by now..
- */
- if (object->type == blob_type) {
- struct blob *blob = (struct blob *)object;
- if (!blob_objects)
- return NULL;
- if (flags & UNINTERESTING) {
- mark_blob_uninteresting(blob);
- return NULL;
- }
- add_pending_object(object, "");
- return NULL;
- }
- die("%s is unknown object", name);
-}
-
-static void handle_one_commit(struct commit *com, struct commit_list **lst)
-{
- if (!com || com->object.flags & SEEN)
- return;
- com->object.flags |= SEEN;
- commit_list_insert(com, lst);
-}
-
-/* for_each_ref() callback does not allow user data -- Yuck. */
-static struct commit_list **global_lst;
-
-static int include_one_commit(const char *path, const unsigned char *sha1)
-{
- struct commit *com = get_commit_reference(path, sha1, 0);
- handle_one_commit(com, global_lst);
- return 0;
-}
-
-static void handle_all(struct commit_list **lst)
-{
- global_lst = lst;
- for_each_ref(include_one_commit);
- global_lst = NULL;
}
int main(int argc, const char **argv)
{
- const char *prefix = setup_git_directory();
- struct commit_list *list = NULL;
- int i, limited = 0;
+ struct commit_list *list;
+ int i;
+
+ argc = setup_revisions(argc, argv, &revs, NULL);
for (i = 1 ; i < argc; i++) {
- int flags;
const char *arg = argv[i];
- char *dotdot;
- struct commit *commit;
- unsigned char sha1[20];
/* accept -<digit>, like traditilnal "head" */
if ((*arg == '-') && isdigit(arg[1])) {
- max_count = atoi(arg + 1);
+ revs.max_count = atoi(arg + 1);
continue;
}
if (!strcmp(arg, "-n")) {
if (++i >= argc)
die("-n requires an argument");
- max_count = atoi(argv[i]);
+ revs.max_count = atoi(argv[i]);
continue;
}
if (!strncmp(arg,"-n",2)) {
- max_count = atoi(arg + 2);
- continue;
- }
- if (!strncmp(arg, "--max-count=", 12)) {
- max_count = atoi(arg + 12);
- continue;
- }
- if (!strncmp(arg, "--max-age=", 10)) {
- max_age = atoi(arg + 10);
- limited = 1;
- continue;
- }
- if (!strncmp(arg, "--min-age=", 10)) {
- min_age = atoi(arg + 10);
- limited = 1;
+ revs.max_count = atoi(arg + 2);
continue;
}
if (!strcmp(arg, "--header")) {
@@ -819,138 +337,35 @@ int main(int argc, const char **argv)
commit_prefix = "commit ";
continue;
}
- if (!strncmp(arg, "--no-merges", 11)) {
- no_merges = 1;
- continue;
- }
- if (!strcmp(arg, "--parents")) {
- show_parents = 1;
+ if (!strcmp(arg, "--timestamp")) {
+ show_timestamp = 1;
continue;
}
if (!strcmp(arg, "--bisect")) {
bisect_list = 1;
continue;
}
- if (!strcmp(arg, "--all")) {
- handle_all(&list);
- continue;
- }
- if (!strcmp(arg, "--objects")) {
- tag_objects = 1;
- tree_objects = 1;
- blob_objects = 1;
- continue;
- }
- if (!strcmp(arg, "--unpacked")) {
- unpacked = 1;
- limited = 1;
- continue;
- }
- if (!strcmp(arg, "--merge-order")) {
- merge_order = 1;
- continue;
- }
- if (!strcmp(arg, "--show-breaks")) {
- show_breaks = 1;
- continue;
- }
- if (!strcmp(arg, "--topo-order")) {
- topo_order = 1;
- limited = 1;
- continue;
- }
- if (!strcmp(arg, "--dense")) {
- dense = 1;
- continue;
- }
- if (!strcmp(arg, "--sparse")) {
- dense = 0;
- continue;
- }
- if (!strcmp(arg, "--remove-empty")) {
- remove_empty_trees = 1;
- continue;
- }
- if (!strcmp(arg, "--")) {
- i++;
- break;
- }
+ usage(rev_list_usage);
- if (show_breaks && !merge_order)
- usage(rev_list_usage);
-
- flags = 0;
- dotdot = strstr(arg, "..");
- if (dotdot) {
- unsigned char from_sha1[20];
- char *next = dotdot + 2;
- *dotdot = 0;
- if (!*next)
- next = "HEAD";
- if (!get_sha1(arg, from_sha1) && !get_sha1(next, sha1)) {
- struct commit *exclude;
- struct commit *include;
-
- exclude = get_commit_reference(arg, from_sha1, UNINTERESTING);
- include = get_commit_reference(next, sha1, 0);
- if (!exclude || !include)
- die("Invalid revision range %s..%s", arg, next);
- limited = 1;
- handle_one_commit(exclude, &list);
- handle_one_commit(include, &list);
- continue;
- }
- *dotdot = '.';
- }
- if (*arg == '^') {
- flags = UNINTERESTING;
- arg++;
- limited = 1;
- }
- if (get_sha1(arg, sha1) < 0) {
- struct stat st;
- if (lstat(arg, &st) < 0)
- die("'%s': %s", arg, strerror(errno));
- break;
- }
- commit = get_commit_reference(arg, sha1, flags);
- handle_one_commit(commit, &list);
}
+ list = revs.commits;
+
if (!list &&
- (!(tag_objects||tree_objects||blob_objects) && !pending_objects))
+ (!(revs.tag_objects||revs.tree_objects||revs.blob_objects) && !revs.pending_objects))
usage(rev_list_usage);
- paths = get_pathspec(prefix, argv + i);
- if (paths) {
- limited = 1;
- diff_tree_setup_paths(paths);
- }
-
save_commit_buffer = verbose_header;
track_object_refs = 0;
- if (!merge_order) {
- sort_by_date(&list);
- if (list && !limited && max_count == 1 &&
- !tag_objects && !tree_objects && !blob_objects) {
- show_commit(list->item);
- return 0;
- }
- if (limited)
- list = limit_list(list);
- if (topo_order)
- sort_in_topological_order(&list);
- show_commit_list(list);
- } else {
-#ifndef NO_OPENSSL
- if (sort_list_in_merge_order(list, &process_commit)) {
- die("merge order sort failed\n");
- }
-#else
- die("merge order sort unsupported, OpenSSL not linked");
-#endif
- }
+ prepare_revision_walk(&revs);
+ if (revs.tree_objects)
+ mark_edges_uninteresting(revs.commits);
+
+ if (bisect_list)
+ revs.commits = find_bisection(revs.commits);
+
+ show_commit_list(&revs);
return 0;
}
diff --git a/rev-parse.c b/rev-parse.c
index 70a8271..e956cd5 100644
--- a/rev-parse.c
+++ b/rev-parse.c
@@ -39,15 +39,15 @@ static int is_rev_argument(const char *arg)
"--header",
"--max-age=",
"--max-count=",
- "--merge-order",
"--min-age=",
"--no-merges",
"--objects",
+ "--objects-edge",
"--parents",
"--pretty",
- "--show-breaks",
"--sparse",
"--topo-order",
+ "--date-order",
"--unpacked",
NULL
};
@@ -166,13 +166,17 @@ int main(int argc, char **argv)
unsigned char sha1[20];
const char *prefix = setup_git_directory();
+ git_config(git_default_config);
+
for (i = 1; i < argc; i++) {
struct stat st;
char *arg = argv[i];
char *dotdot;
-
+
if (as_is) {
- show_file(arg);
+ if (show_file(arg) && as_is < 2)
+ if (lstat(arg, &st) < 0)
+ die("'%s': %s", arg, strerror(errno));
continue;
}
if (!strcmp(arg,"-n")) {
@@ -192,7 +196,7 @@ int main(int argc, char **argv)
if (*arg == '-') {
if (!strcmp(arg, "--")) {
- as_is = 1;
+ as_is = 2;
/* Pass on the "--" if we show anything but files.. */
if (filter & (DO_FLAGS | DO_REVS))
show_file(arg);
@@ -311,16 +315,17 @@ int main(int argc, char **argv)
dotdot = strstr(arg, "..");
if (dotdot) {
unsigned char end[20];
- char *n = dotdot+2;
+ char *next = dotdot + 2;
+ char *this = arg;
*dotdot = 0;
- if (!get_sha1(arg, sha1)) {
- if (!*n)
- n = "HEAD";
- if (!get_sha1(n, end)) {
- show_rev(NORMAL, end, n);
- show_rev(REVERSED, sha1, arg);
- continue;
- }
+ if (!*next)
+ next = "HEAD";
+ if (dotdot == arg)
+ this = "HEAD";
+ if (!get_sha1(this, sha1) && !get_sha1(next, end)) {
+ show_rev(NORMAL, end, next);
+ show_rev(REVERSED, sha1, this);
+ continue;
}
*dotdot = '.';
}
diff --git a/revision.c b/revision.c
new file mode 100644
index 0000000..ce35b5a
--- /dev/null
+++ b/revision.c
@@ -0,0 +1,811 @@
+#include "cache.h"
+#include "tag.h"
+#include "blob.h"
+#include "tree.h"
+#include "commit.h"
+#include "diff.h"
+#include "refs.h"
+#include "revision.h"
+
+static char *path_name(struct name_path *path, const char *name)
+{
+ struct name_path *p;
+ char *n, *m;
+ int nlen = strlen(name);
+ int len = nlen + 1;
+
+ for (p = path; p; p = p->up) {
+ if (p->elem_len)
+ len += p->elem_len + 1;
+ }
+ n = xmalloc(len);
+ m = n + len - (nlen + 1);
+ strcpy(m, name);
+ for (p = path; p; p = p->up) {
+ if (p->elem_len) {
+ m -= p->elem_len + 1;
+ memcpy(m, p->elem, p->elem_len);
+ m[p->elem_len] = '/';
+ }
+ }
+ return n;
+}
+
+struct object_list **add_object(struct object *obj,
+ struct object_list **p,
+ struct name_path *path,
+ const char *name)
+{
+ struct object_list *entry = xmalloc(sizeof(*entry));
+ entry->item = obj;
+ entry->next = *p;
+ entry->name = path_name(path, name);
+ *p = entry;
+ return &entry->next;
+}
+
+static void mark_blob_uninteresting(struct blob *blob)
+{
+ if (blob->object.flags & UNINTERESTING)
+ return;
+ blob->object.flags |= UNINTERESTING;
+}
+
+void mark_tree_uninteresting(struct tree *tree)
+{
+ struct object *obj = &tree->object;
+ struct tree_entry_list *entry;
+
+ if (obj->flags & UNINTERESTING)
+ return;
+ obj->flags |= UNINTERESTING;
+ if (!has_sha1_file(obj->sha1))
+ return;
+ if (parse_tree(tree) < 0)
+ die("bad tree %s", sha1_to_hex(obj->sha1));
+ entry = tree->entries;
+ tree->entries = NULL;
+ while (entry) {
+ struct tree_entry_list *next = entry->next;
+ if (entry->directory)
+ mark_tree_uninteresting(entry->item.tree);
+ else
+ mark_blob_uninteresting(entry->item.blob);
+ free(entry);
+ entry = next;
+ }
+}
+
+void mark_parents_uninteresting(struct commit *commit)
+{
+ struct commit_list *parents = commit->parents;
+
+ while (parents) {
+ struct commit *commit = parents->item;
+ if (!(commit->object.flags & UNINTERESTING)) {
+ commit->object.flags |= UNINTERESTING;
+
+ /*
+ * Normally we haven't parsed the parent
+ * yet, so we won't have a parent of a parent
+ * here. However, it may turn out that we've
+ * reached this commit some other way (where it
+ * wasn't uninteresting), in which case we need
+ * to mark its parents recursively too..
+ */
+ if (commit->parents)
+ mark_parents_uninteresting(commit);
+ }
+
+ /*
+ * A missing commit is ok iff its parent is marked
+ * uninteresting.
+ *
+ * We just mark such a thing parsed, so that when
+ * it is popped next time around, we won't be trying
+ * to parse it and get an error.
+ */
+ if (!has_sha1_file(commit->object.sha1))
+ commit->object.parsed = 1;
+ parents = parents->next;
+ }
+}
+
+static void add_pending_object(struct rev_info *revs, struct object *obj, const char *name)
+{
+ add_object(obj, &revs->pending_objects, NULL, name);
+}
+
+static struct commit *get_commit_reference(struct rev_info *revs, const char *name, const unsigned char *sha1, unsigned int flags)
+{
+ struct object *object;
+
+ object = parse_object(sha1);
+ if (!object)
+ die("bad object %s", name);
+
+ /*
+ * Tag object? Look what it points to..
+ */
+ while (object->type == tag_type) {
+ struct tag *tag = (struct tag *) object;
+ object->flags |= flags;
+ if (revs->tag_objects && !(object->flags & UNINTERESTING))
+ add_pending_object(revs, object, tag->tag);
+ object = parse_object(tag->tagged->sha1);
+ if (!object)
+ die("bad object %s", sha1_to_hex(tag->tagged->sha1));
+ }
+
+ /*
+ * Commit object? Just return it, we'll do all the complex
+ * reachability crud.
+ */
+ if (object->type == commit_type) {
+ struct commit *commit = (struct commit *)object;
+ object->flags |= flags;
+ if (parse_commit(commit) < 0)
+ die("unable to parse commit %s", name);
+ if (flags & UNINTERESTING) {
+ mark_parents_uninteresting(commit);
+ revs->limited = 1;
+ }
+ return commit;
+ }
+
+ /*
+ * Tree object? Either mark it uniniteresting, or add it
+ * to the list of objects to look at later..
+ */
+ if (object->type == tree_type) {
+ struct tree *tree = (struct tree *)object;
+ if (!revs->tree_objects)
+ return NULL;
+ if (flags & UNINTERESTING) {
+ mark_tree_uninteresting(tree);
+ return NULL;
+ }
+ add_pending_object(revs, object, "");
+ return NULL;
+ }
+
+ /*
+ * Blob object? You know the drill by now..
+ */
+ if (object->type == blob_type) {
+ struct blob *blob = (struct blob *)object;
+ if (!revs->blob_objects)
+ return NULL;
+ if (flags & UNINTERESTING) {
+ mark_blob_uninteresting(blob);
+ return NULL;
+ }
+ add_pending_object(revs, object, "");
+ return NULL;
+ }
+ die("%s is unknown object", name);
+}
+
+static int everybody_uninteresting(struct commit_list *orig)
+{
+ struct commit_list *list = orig;
+ while (list) {
+ struct commit *commit = list->item;
+ list = list->next;
+ if (commit->object.flags & UNINTERESTING)
+ continue;
+ return 0;
+ }
+ return 1;
+}
+
+static int tree_difference = REV_TREE_SAME;
+
+static void file_add_remove(struct diff_options *options,
+ int addremove, unsigned mode,
+ const unsigned char *sha1,
+ const char *base, const char *path)
+{
+ int diff = REV_TREE_DIFFERENT;
+
+ /*
+ * Is it an add of a new file? It means that the old tree
+ * didn't have it at all, so we will turn "REV_TREE_SAME" ->
+ * "REV_TREE_NEW", but leave any "REV_TREE_DIFFERENT" alone
+ * (and if it already was "REV_TREE_NEW", we'll keep it
+ * "REV_TREE_NEW" of course).
+ */
+ if (addremove == '+') {
+ diff = tree_difference;
+ if (diff != REV_TREE_SAME)
+ return;
+ diff = REV_TREE_NEW;
+ }
+ tree_difference = diff;
+}
+
+static void file_change(struct diff_options *options,
+ unsigned old_mode, unsigned new_mode,
+ const unsigned char *old_sha1,
+ const unsigned char *new_sha1,
+ const char *base, const char *path)
+{
+ tree_difference = REV_TREE_DIFFERENT;
+}
+
+static struct diff_options diff_opt = {
+ .recursive = 1,
+ .add_remove = file_add_remove,
+ .change = file_change,
+};
+
+int rev_compare_tree(struct tree *t1, struct tree *t2)
+{
+ if (!t1)
+ return REV_TREE_NEW;
+ if (!t2)
+ return REV_TREE_DIFFERENT;
+ tree_difference = REV_TREE_SAME;
+ if (diff_tree_sha1(t1->object.sha1, t2->object.sha1, "", &diff_opt) < 0)
+ return REV_TREE_DIFFERENT;
+ return tree_difference;
+}
+
+int rev_same_tree_as_empty(struct tree *t1)
+{
+ int retval;
+ void *tree;
+ struct tree_desc empty, real;
+
+ if (!t1)
+ return 0;
+
+ tree = read_object_with_reference(t1->object.sha1, tree_type, &real.size, NULL);
+ if (!tree)
+ return 0;
+ real.buf = tree;
+
+ empty.buf = "";
+ empty.size = 0;
+
+ tree_difference = 0;
+ retval = diff_tree(&empty, &real, "", &diff_opt);
+ free(tree);
+
+ return retval >= 0 && !tree_difference;
+}
+
+static void try_to_simplify_commit(struct rev_info *revs, struct commit *commit)
+{
+ struct commit_list **pp, *parent;
+ int tree_changed = 0;
+
+ if (!commit->tree)
+ return;
+
+ if (!commit->parents) {
+ if (!rev_same_tree_as_empty(commit->tree))
+ commit->object.flags |= TREECHANGE;
+ return;
+ }
+
+ pp = &commit->parents;
+ while ((parent = *pp) != NULL) {
+ struct commit *p = parent->item;
+
+ parse_commit(p);
+ switch (rev_compare_tree(p->tree, commit->tree)) {
+ case REV_TREE_SAME:
+ if (p->object.flags & UNINTERESTING) {
+ /* Even if a merge with an uninteresting
+ * side branch brought the entire change
+ * we are interested in, we do not want
+ * to lose the other branches of this
+ * merge, so we just keep going.
+ */
+ pp = &parent->next;
+ continue;
+ }
+ parent->next = NULL;
+ commit->parents = parent;
+ return;
+
+ case REV_TREE_NEW:
+ if (revs->remove_empty_trees &&
+ rev_same_tree_as_empty(p->tree)) {
+ /* We are adding all the specified
+ * paths from this parent, so the
+ * history beyond this parent is not
+ * interesting. Remove its parents
+ * (they are grandparents for us).
+ * IOW, we pretend this parent is a
+ * "root" commit.
+ */
+ parse_commit(p);
+ p->parents = NULL;
+ }
+ /* fallthrough */
+ case REV_TREE_DIFFERENT:
+ tree_changed = 1;
+ pp = &parent->next;
+ continue;
+ }
+ die("bad tree compare for commit %s", sha1_to_hex(commit->object.sha1));
+ }
+ if (tree_changed)
+ commit->object.flags |= TREECHANGE;
+}
+
+static void add_parents_to_list(struct rev_info *revs, struct commit *commit, struct commit_list **list)
+{
+ struct commit_list *parent = commit->parents;
+
+ /*
+ * If the commit is uninteresting, don't try to
+ * prune parents - we want the maximal uninteresting
+ * set.
+ *
+ * Normally we haven't parsed the parent
+ * yet, so we won't have a parent of a parent
+ * here. However, it may turn out that we've
+ * reached this commit some other way (where it
+ * wasn't uninteresting), in which case we need
+ * to mark its parents recursively too..
+ */
+ if (commit->object.flags & UNINTERESTING) {
+ while (parent) {
+ struct commit *p = parent->item;
+ parent = parent->next;
+ parse_commit(p);
+ p->object.flags |= UNINTERESTING;
+ if (p->parents)
+ mark_parents_uninteresting(p);
+ if (p->object.flags & SEEN)
+ continue;
+ p->object.flags |= SEEN;
+ insert_by_date(p, list);
+ }
+ return;
+ }
+
+ /*
+ * Ok, the commit wasn't uninteresting. Try to
+ * simplify the commit history and find the parent
+ * that has no differences in the path set if one exists.
+ */
+ if (revs->prune_fn)
+ revs->prune_fn(revs, commit);
+
+ parent = commit->parents;
+ while (parent) {
+ struct commit *p = parent->item;
+
+ parent = parent->next;
+
+ parse_commit(p);
+ if (p->object.flags & SEEN)
+ continue;
+ p->object.flags |= SEEN;
+ insert_by_date(p, list);
+ }
+}
+
+static void limit_list(struct rev_info *revs)
+{
+ struct commit_list *list = revs->commits;
+ struct commit_list *newlist = NULL;
+ struct commit_list **p = &newlist;
+
+ while (list) {
+ struct commit_list *entry = list;
+ struct commit *commit = list->item;
+ struct object *obj = &commit->object;
+
+ list = list->next;
+ free(entry);
+
+ if (revs->max_age != -1 && (commit->date < revs->max_age))
+ obj->flags |= UNINTERESTING;
+ if (revs->unpacked && has_sha1_pack(obj->sha1))
+ obj->flags |= UNINTERESTING;
+ add_parents_to_list(revs, commit, &list);
+ if (obj->flags & UNINTERESTING) {
+ mark_parents_uninteresting(commit);
+ if (everybody_uninteresting(list))
+ break;
+ continue;
+ }
+ if (revs->min_age != -1 && (commit->date > revs->min_age))
+ continue;
+ p = &commit_list_insert(commit, p)->next;
+ }
+ if (revs->boundary) {
+ /* mark the ones that are on the result list first */
+ for (list = newlist; list; list = list->next) {
+ struct commit *commit = list->item;
+ commit->object.flags |= TMP_MARK;
+ }
+ for (list = newlist; list; list = list->next) {
+ struct commit *commit = list->item;
+ struct object *obj = &commit->object;
+ struct commit_list *parent;
+ if (obj->flags & UNINTERESTING)
+ continue;
+ for (parent = commit->parents;
+ parent;
+ parent = parent->next) {
+ struct commit *pcommit = parent->item;
+ if (!(pcommit->object.flags & UNINTERESTING))
+ continue;
+ pcommit->object.flags |= BOUNDARY;
+ if (pcommit->object.flags & TMP_MARK)
+ continue;
+ pcommit->object.flags |= TMP_MARK;
+ p = &commit_list_insert(pcommit, p)->next;
+ }
+ }
+ for (list = newlist; list; list = list->next) {
+ struct commit *commit = list->item;
+ commit->object.flags &= ~TMP_MARK;
+ }
+ }
+ revs->commits = newlist;
+}
+
+static void add_one_commit(struct commit *commit, struct rev_info *revs)
+{
+ if (!commit || (commit->object.flags & SEEN))
+ return;
+ commit->object.flags |= SEEN;
+ commit_list_insert(commit, &revs->commits);
+}
+
+static int all_flags;
+static struct rev_info *all_revs;
+
+static int handle_one_ref(const char *path, const unsigned char *sha1)
+{
+ struct commit *commit = get_commit_reference(all_revs, path, sha1, all_flags);
+ add_one_commit(commit, all_revs);
+ return 0;
+}
+
+static void handle_all(struct rev_info *revs, unsigned flags)
+{
+ all_revs = revs;
+ all_flags = flags;
+ for_each_ref(handle_one_ref);
+}
+
+void init_revisions(struct rev_info *revs)
+{
+ memset(revs, 0, sizeof(*revs));
+ revs->lifo = 1;
+ revs->dense = 1;
+ revs->prefix = setup_git_directory();
+ revs->max_age = -1;
+ revs->min_age = -1;
+ revs->max_count = -1;
+
+ revs->prune_fn = NULL;
+ revs->prune_data = NULL;
+
+ revs->topo_setter = topo_sort_default_setter;
+ revs->topo_getter = topo_sort_default_getter;
+}
+
+/*
+ * Parse revision information, filling in the "rev_info" structure,
+ * and removing the used arguments from the argument list.
+ *
+ * Returns the number of arguments left that weren't recognized
+ * (which are also moved to the head of the argument list)
+ */
+int setup_revisions(int argc, const char **argv, struct rev_info *revs, const char *def)
+{
+ int i, flags, seen_dashdash;
+ const char **unrecognized = argv + 1;
+ int left = 1;
+
+ init_revisions(revs);
+
+ /* First, search for "--" */
+ seen_dashdash = 0;
+ for (i = 1; i < argc; i++) {
+ const char *arg = argv[i];
+ if (strcmp(arg, "--"))
+ continue;
+ argv[i] = NULL;
+ argc = i;
+ revs->prune_data = get_pathspec(revs->prefix, argv + i + 1);
+ seen_dashdash = 1;
+ break;
+ }
+
+ flags = 0;
+ for (i = 1; i < argc; i++) {
+ struct commit *commit;
+ const char *arg = argv[i];
+ unsigned char sha1[20];
+ char *dotdot;
+ int local_flags;
+
+ if (*arg == '-') {
+ if (!strncmp(arg, "--max-count=", 12)) {
+ revs->max_count = atoi(arg + 12);
+ continue;
+ }
+ /* accept -<digit>, like traditilnal "head" */
+ if ((*arg == '-') && isdigit(arg[1])) {
+ revs->max_count = atoi(arg + 1);
+ continue;
+ }
+ if (!strcmp(arg, "-n")) {
+ if (argc <= i + 1)
+ die("-n requires an argument");
+ revs->max_count = atoi(argv[++i]);
+ continue;
+ }
+ if (!strncmp(arg,"-n",2)) {
+ revs->max_count = atoi(arg + 2);
+ continue;
+ }
+ if (!strncmp(arg, "--max-age=", 10)) {
+ revs->max_age = atoi(arg + 10);
+ continue;
+ }
+ if (!strncmp(arg, "--since=", 8)) {
+ revs->max_age = approxidate(arg + 8);
+ continue;
+ }
+ if (!strncmp(arg, "--after=", 8)) {
+ revs->max_age = approxidate(arg + 8);
+ continue;
+ }
+ if (!strncmp(arg, "--min-age=", 10)) {
+ revs->min_age = atoi(arg + 10);
+ continue;
+ }
+ if (!strncmp(arg, "--before=", 9)) {
+ revs->min_age = approxidate(arg + 9);
+ continue;
+ }
+ if (!strncmp(arg, "--until=", 8)) {
+ revs->min_age = approxidate(arg + 8);
+ continue;
+ }
+ if (!strcmp(arg, "--all")) {
+ handle_all(revs, flags);
+ continue;
+ }
+ if (!strcmp(arg, "--not")) {
+ flags ^= UNINTERESTING;
+ continue;
+ }
+ if (!strcmp(arg, "--default")) {
+ if (++i >= argc)
+ die("bad --default argument");
+ def = argv[i];
+ continue;
+ }
+ if (!strcmp(arg, "--topo-order")) {
+ revs->topo_order = 1;
+ continue;
+ }
+ if (!strcmp(arg, "--date-order")) {
+ revs->lifo = 0;
+ revs->topo_order = 1;
+ continue;
+ }
+ if (!strcmp(arg, "--parents")) {
+ revs->parents = 1;
+ continue;
+ }
+ if (!strcmp(arg, "--dense")) {
+ revs->dense = 1;
+ continue;
+ }
+ if (!strcmp(arg, "--sparse")) {
+ revs->dense = 0;
+ continue;
+ }
+ if (!strcmp(arg, "--remove-empty")) {
+ revs->remove_empty_trees = 1;
+ continue;
+ }
+ if (!strcmp(arg, "--no-merges")) {
+ revs->no_merges = 1;
+ continue;
+ }
+ if (!strcmp(arg, "--boundary")) {
+ revs->boundary = 1;
+ continue;
+ }
+ if (!strcmp(arg, "--objects")) {
+ revs->tag_objects = 1;
+ revs->tree_objects = 1;
+ revs->blob_objects = 1;
+ continue;
+ }
+ if (!strcmp(arg, "--objects-edge")) {
+ revs->tag_objects = 1;
+ revs->tree_objects = 1;
+ revs->blob_objects = 1;
+ revs->edge_hint = 1;
+ continue;
+ }
+ if (!strcmp(arg, "--unpacked")) {
+ revs->unpacked = 1;
+ continue;
+ }
+ *unrecognized++ = arg;
+ left++;
+ continue;
+ }
+ dotdot = strstr(arg, "..");
+ if (dotdot) {
+ unsigned char from_sha1[20];
+ const char *next = dotdot + 2;
+ const char *this = arg;
+ *dotdot = 0;
+ if (!*next)
+ next = "HEAD";
+ if (dotdot == arg)
+ this = "HEAD";
+ if (!get_sha1(this, from_sha1) &&
+ !get_sha1(next, sha1)) {
+ struct commit *exclude;
+ struct commit *include;
+
+ exclude = get_commit_reference(revs, this, from_sha1, flags ^ UNINTERESTING);
+ include = get_commit_reference(revs, next, sha1, flags);
+ if (!exclude || !include)
+ die("Invalid revision range %s..%s", arg, next);
+ add_one_commit(exclude, revs);
+ add_one_commit(include, revs);
+ continue;
+ }
+ *dotdot = '.';
+ }
+ local_flags = 0;
+ if (*arg == '^') {
+ local_flags = UNINTERESTING;
+ arg++;
+ }
+ if (get_sha1(arg, sha1) < 0) {
+ struct stat st;
+ int j;
+
+ if (seen_dashdash || local_flags)
+ die("bad revision '%s'", arg);
+
+ /* If we didn't have a "--", all filenames must exist */
+ for (j = i; j < argc; j++) {
+ if (lstat(argv[j], &st) < 0)
+ die("'%s': %s", argv[j], strerror(errno));
+ }
+ revs->prune_data = get_pathspec(revs->prefix, argv + i);
+ break;
+ }
+ commit = get_commit_reference(revs, arg, sha1, flags ^ local_flags);
+ add_one_commit(commit, revs);
+ }
+ if (def && !revs->commits) {
+ unsigned char sha1[20];
+ struct commit *commit;
+ if (get_sha1(def, sha1) < 0)
+ die("bad default revision '%s'", def);
+ commit = get_commit_reference(revs, def, sha1, 0);
+ add_one_commit(commit, revs);
+ }
+
+ if (revs->topo_order || revs->unpacked)
+ revs->limited = 1;
+
+ if (revs->prune_data) {
+ diff_tree_setup_paths(revs->prune_data);
+ revs->prune_fn = try_to_simplify_commit;
+
+ /*
+ * If we fix up parent data, we currently cannot
+ * do that on-the-fly.
+ */
+ if (revs->parents)
+ revs->limited = 1;
+ }
+
+ return left;
+}
+
+void prepare_revision_walk(struct rev_info *revs)
+{
+ sort_by_date(&revs->commits);
+ if (revs->limited)
+ limit_list(revs);
+ if (revs->topo_order)
+ sort_in_topological_order_fn(&revs->commits, revs->lifo,
+ revs->topo_setter,
+ revs->topo_getter);
+}
+
+static int rewrite_one(struct commit **pp)
+{
+ for (;;) {
+ struct commit *p = *pp;
+ if (p->object.flags & (TREECHANGE | UNINTERESTING))
+ return 0;
+ if (!p->parents)
+ return -1;
+ *pp = p->parents->item;
+ }
+}
+
+static void rewrite_parents(struct commit *commit)
+{
+ struct commit_list **pp = &commit->parents;
+ while (*pp) {
+ struct commit_list *parent = *pp;
+ if (rewrite_one(&parent->item) < 0) {
+ *pp = parent->next;
+ continue;
+ }
+ pp = &parent->next;
+ }
+}
+
+struct commit *get_revision(struct rev_info *revs)
+{
+ struct commit_list *list = revs->commits;
+
+ if (!list)
+ return NULL;
+
+ /* Check the max_count ... */
+ switch (revs->max_count) {
+ case -1:
+ break;
+ case 0:
+ return NULL;
+ default:
+ revs->max_count--;
+ }
+
+ do {
+ struct commit *commit = revs->commits->item;
+
+ revs->commits = revs->commits->next;
+
+ /*
+ * If we haven't done the list limiting, we need to look at
+ * the parents here. We also need to do the date-based limiting
+ * that we'd otherwise have done in limit_list().
+ */
+ if (!revs->limited) {
+ if ((revs->unpacked &&
+ has_sha1_pack(commit->object.sha1)) ||
+ (revs->max_age != -1 &&
+ (commit->date < revs->max_age)))
+ continue;
+ add_parents_to_list(revs, commit, &revs->commits);
+ }
+ if (commit->object.flags & SHOWN)
+ continue;
+ if (!(commit->object.flags & BOUNDARY) &&
+ (commit->object.flags & UNINTERESTING))
+ continue;
+ if (revs->min_age != -1 && (commit->date > revs->min_age))
+ continue;
+ if (revs->no_merges &&
+ commit->parents && commit->parents->next)
+ continue;
+ if (revs->prune_fn && revs->dense) {
+ if (!(commit->object.flags & TREECHANGE))
+ continue;
+ if (revs->parents)
+ rewrite_parents(commit);
+ }
+ commit->object.flags |= SHOWN;
+ return commit;
+ } while (revs->commits);
+ return NULL;
+}
diff --git a/revision.h b/revision.h
new file mode 100644
index 0000000..0caeecf
--- /dev/null
+++ b/revision.h
@@ -0,0 +1,76 @@
+#ifndef REVISION_H
+#define REVISION_H
+
+#define SEEN (1u<<0)
+#define UNINTERESTING (1u<<1)
+#define TREECHANGE (1u<<2)
+#define SHOWN (1u<<3)
+#define TMP_MARK (1u<<4) /* for isolated cases; clean after use */
+#define BOUNDARY (1u<<5)
+
+struct rev_info;
+
+typedef void (prune_fn_t)(struct rev_info *revs, struct commit *commit);
+
+struct rev_info {
+ /* Starting list */
+ struct commit_list *commits;
+ struct object_list *pending_objects;
+
+ /* Basic information */
+ const char *prefix;
+ void *prune_data;
+ prune_fn_t *prune_fn;
+
+ /* Traversal flags */
+ unsigned int dense:1,
+ no_merges:1,
+ remove_empty_trees:1,
+ lifo:1,
+ topo_order:1,
+ tag_objects:1,
+ tree_objects:1,
+ blob_objects:1,
+ edge_hint:1,
+ limited:1,
+ unpacked:1,
+ boundary:1,
+ parents:1;
+
+ /* special limits */
+ int max_count;
+ unsigned long max_age;
+ unsigned long min_age;
+
+ topo_sort_set_fn_t topo_setter;
+ topo_sort_get_fn_t topo_getter;
+};
+
+#define REV_TREE_SAME 0
+#define REV_TREE_NEW 1
+#define REV_TREE_DIFFERENT 2
+
+/* revision.c */
+extern int rev_same_tree_as_empty(struct tree *t1);
+extern int rev_compare_tree(struct tree *t1, struct tree *t2);
+
+extern void init_revisions(struct rev_info *revs);
+extern int setup_revisions(int argc, const char **argv, struct rev_info *revs, const char *def);
+extern void prepare_revision_walk(struct rev_info *revs);
+extern struct commit *get_revision(struct rev_info *revs);
+
+extern void mark_parents_uninteresting(struct commit *commit);
+extern void mark_tree_uninteresting(struct tree *tree);
+
+struct name_path {
+ struct name_path *up;
+ int elem_len;
+ const char *elem;
+};
+
+extern struct object_list **add_object(struct object *obj,
+ struct object_list **p,
+ struct name_path *path,
+ const char *name);
+
+#endif
diff --git a/run-command.c b/run-command.c
index b3d287e..ca67ee9 100644
--- a/run-command.c
+++ b/run-command.c
@@ -3,7 +3,7 @@
#include <sys/wait.h>
#include "exec_cmd.h"
-int run_command_v_opt(int argc, char **argv, int flags)
+int run_command_v_opt(int argc, const char **argv, int flags)
{
pid_t pid = fork();
@@ -47,7 +47,7 @@ int run_command_v_opt(int argc, char **argv, int flags)
}
}
-int run_command_v(int argc, char **argv)
+int run_command_v(int argc, const char **argv)
{
return run_command_v_opt(argc, argv, 0);
}
@@ -55,7 +55,7 @@ int run_command_v(int argc, char **argv)
int run_command(const char *cmd, ...)
{
int argc;
- char *argv[MAX_RUN_COMMAND_ARGS];
+ const char *argv[MAX_RUN_COMMAND_ARGS];
const char *arg;
va_list param;
diff --git a/run-command.h b/run-command.h
index ef3ee05..70b477a 100644
--- a/run-command.h
+++ b/run-command.h
@@ -13,8 +13,8 @@ enum {
#define RUN_COMMAND_NO_STDIO 1
#define RUN_GIT_CMD 2 /*If this is to be git sub-command */
-int run_command_v_opt(int argc, char **argv, int opt);
-int run_command_v(int argc, char **argv);
+int run_command_v_opt(int argc, const char **argv, int opt);
+int run_command_v(int argc, const char **argv);
int run_command(const char *cmd, ...);
#endif
diff --git a/send-pack.c b/send-pack.c
index 990be3f..409f188 100644
--- a/send-pack.c
+++ b/send-pack.c
@@ -12,6 +12,7 @@ static const char *exec = "git-receive-pack";
static int verbose = 0;
static int send_all = 0;
static int force_update = 0;
+static int use_thin_pack = 0;
static int is_zero_sha1(const unsigned char *sha1)
{
@@ -26,7 +27,7 @@ static int is_zero_sha1(const unsigned char *sha1)
static void exec_pack_objects(void)
{
- static char *args[] = {
+ static const char *args[] = {
"pack-objects",
"--stdout",
NULL
@@ -37,26 +38,47 @@ static void exec_pack_objects(void)
static void exec_rev_list(struct ref *refs)
{
- static char *args[1000];
- int i = 0;
+ struct ref *ref;
+ static const char *args[1000];
+ int i = 0, j;
args[i++] = "rev-list"; /* 0 */
- args[i++] = "--objects"; /* 1 */
- while (refs) {
- char *buf = malloc(100);
- if (i > 900)
+ if (use_thin_pack) /* 1 */
+ args[i++] = "--objects-edge";
+ else
+ args[i++] = "--objects";
+
+ /* First send the ones we care about most */
+ for (ref = refs; ref; ref = ref->next) {
+ if (900 < i)
die("git-rev-list environment overflow");
- if (!is_zero_sha1(refs->old_sha1) &&
- has_sha1_file(refs->old_sha1)) {
+ if (!is_zero_sha1(ref->new_sha1)) {
+ char *buf = malloc(100);
args[i++] = buf;
- snprintf(buf, 50, "^%s", sha1_to_hex(refs->old_sha1));
+ snprintf(buf, 50, "%s", sha1_to_hex(ref->new_sha1));
buf += 50;
+ if (!is_zero_sha1(ref->old_sha1) &&
+ has_sha1_file(ref->old_sha1)) {
+ args[i++] = buf;
+ snprintf(buf, 50, "^%s",
+ sha1_to_hex(ref->old_sha1));
+ }
}
- if (!is_zero_sha1(refs->new_sha1)) {
+ }
+
+ /* Then a handful of the remainder
+ * NEEDSWORK: we would be better off if used the newer ones first.
+ */
+ for (ref = refs, j = i + 16;
+ i < 900 && i < j && ref;
+ ref = ref->next) {
+ if (is_zero_sha1(ref->new_sha1) &&
+ !is_zero_sha1(ref->old_sha1) &&
+ has_sha1_file(ref->old_sha1)) {
+ char *buf = malloc(42);
args[i++] = buf;
- snprintf(buf, 50, "%s", sha1_to_hex(refs->new_sha1));
+ snprintf(buf, 42, "^%s", sha1_to_hex(ref->old_sha1));
}
- refs = refs->next;
}
args[i] = NULL;
execv_git_cmd(args);
@@ -340,6 +362,8 @@ int main(int argc, char **argv)
pid_t pid;
setup_git_directory();
+ git_config(git_default_config);
+
argv++;
for (i = 1; i < argc; i++, argv++) {
char *arg = *argv;
@@ -361,6 +385,10 @@ int main(int argc, char **argv)
verbose = 1;
continue;
}
+ if (!strcmp(arg, "--thin")) {
+ use_thin_pack = 1;
+ continue;
+ }
usage(send_pack_usage);
}
if (!dest) {
diff --git a/sha1_file.c b/sha1_file.c
index aa09b46..ba8c4f7 100644
--- a/sha1_file.c
+++ b/sha1_file.c
@@ -9,6 +9,10 @@
#include "cache.h"
#include "delta.h"
#include "pack.h"
+#include "blob.h"
+#include "commit.h"
+#include "tag.h"
+#include "tree.h"
#ifndef O_NOATIME
#if defined(__linux__) && (defined(__i386__) || defined(__PPC__))
@@ -864,7 +868,7 @@ void packed_object_info_detail(struct pack_entry *e,
char *type,
unsigned long *size,
unsigned long *store_size,
- int *delta_chain_length,
+ unsigned int *delta_chain_length,
unsigned char *base_sha1)
{
struct packed_git *p = e->p;
@@ -878,7 +882,7 @@ void packed_object_info_detail(struct pack_entry *e,
if (kind != OBJ_DELTA)
*delta_chain_length = 0;
else {
- int chain_length = 0;
+ unsigned int chain_length = 0;
memcpy(base_sha1, pack, 20);
do {
struct pack_entry base_ent;
@@ -894,16 +898,16 @@ void packed_object_info_detail(struct pack_entry *e,
}
switch (kind) {
case OBJ_COMMIT:
- strcpy(type, "commit");
+ strcpy(type, commit_type);
break;
case OBJ_TREE:
- strcpy(type, "tree");
+ strcpy(type, tree_type);
break;
case OBJ_BLOB:
- strcpy(type, "blob");
+ strcpy(type, blob_type);
break;
case OBJ_TAG:
- strcpy(type, "tag");
+ strcpy(type, tag_type);
break;
default:
die("corrupted pack file %s containing object of kind %d",
@@ -934,16 +938,16 @@ static int packed_object_info(struct pack_entry *entry,
unuse_packed_git(p);
return retval;
case OBJ_COMMIT:
- strcpy(type, "commit");
+ strcpy(type, commit_type);
break;
case OBJ_TREE:
- strcpy(type, "tree");
+ strcpy(type, tree_type);
break;
case OBJ_BLOB:
- strcpy(type, "blob");
+ strcpy(type, blob_type);
break;
case OBJ_TAG:
- strcpy(type, "tag");
+ strcpy(type, tag_type);
break;
default:
die("corrupted pack file %s containing object of kind %d",
@@ -973,6 +977,16 @@ static void *unpack_delta_entry(unsigned char *base_sha1,
if (left < 20)
die("truncated pack file");
+
+ /* The base entry _must_ be in the same pack */
+ if (!find_pack_entry_one(base_sha1, &base_ent, p))
+ die("failed to find delta-pack base object %s",
+ sha1_to_hex(base_sha1));
+ base = unpack_entry_gently(&base_ent, type, &base_size);
+ if (!base)
+ die("failed to read delta-pack base object %s",
+ sha1_to_hex(base_sha1));
+
data = base_sha1 + 20;
data_size = left - 20;
delta_data = xmalloc(delta_size);
@@ -990,14 +1004,6 @@ static void *unpack_delta_entry(unsigned char *base_sha1,
if ((st != Z_STREAM_END) || stream.total_out != delta_size)
die("delta data unpack failed");
- /* The base entry _must_ be in the same pack */
- if (!find_pack_entry_one(base_sha1, &base_ent, p))
- die("failed to find delta-pack base object %s",
- sha1_to_hex(base_sha1));
- base = unpack_entry_gently(&base_ent, type, &base_size);
- if (!base)
- die("failed to read delta-pack base object %s",
- sha1_to_hex(base_sha1));
result = patch_delta(base, base_size,
delta_data, delta_size,
&result_size);
@@ -1069,16 +1075,16 @@ void *unpack_entry_gently(struct pack_entry *entry,
retval = unpack_delta_entry(pack, size, left, type, sizep, p);
return retval;
case OBJ_COMMIT:
- strcpy(type, "commit");
+ strcpy(type, commit_type);
break;
case OBJ_TREE:
- strcpy(type, "tree");
+ strcpy(type, tree_type);
break;
case OBJ_BLOB:
- strcpy(type, "blob");
+ strcpy(type, blob_type);
break;
case OBJ_TAG:
- strcpy(type, "tag");
+ strcpy(type, tag_type);
break;
default:
return NULL;
@@ -1239,9 +1245,9 @@ void *read_object_with_reference(const unsigned char *sha1,
return buffer;
}
/* Handle references */
- else if (!strcmp(type, "commit"))
+ else if (!strcmp(type, commit_type))
ref_type = "tree ";
- else if (!strcmp(type, "tag"))
+ else if (!strcmp(type, tag_type))
ref_type = "object ";
else {
free(buffer);
@@ -1623,7 +1629,7 @@ int index_pipe(unsigned char *sha1, int fd, const char *type, int write_object)
return -1;
}
if (!type)
- type = "blob";
+ type = blob_type;
if (write_object)
ret = write_sha1_file(buf, off, type, sha1);
else {
@@ -1650,7 +1656,7 @@ int index_fd(unsigned char *sha1, int fd, struct stat *st, int write_object, con
return -1;
if (!type)
- type = "blob";
+ type = blob_type;
if (write_object)
ret = write_sha1_file(buf, size, type, sha1);
else {
@@ -1688,9 +1694,9 @@ int index_path(unsigned char *sha1, const char *path, struct stat *st, int write
if (!write_object) {
unsigned char hdr[50];
int hdrlen;
- write_sha1_file_prepare(target, st->st_size, "blob",
+ write_sha1_file_prepare(target, st->st_size, blob_type,
sha1, hdr, &hdrlen);
- } else if (write_sha1_file(target, st->st_size, "blob", sha1))
+ } else if (write_sha1_file(target, st->st_size, blob_type, sha1))
return error("%s: failed to insert into database",
path);
free(target);
diff --git a/sha1_name.c b/sha1_name.c
index d67de18..4f92e12 100644
--- a/sha1_name.c
+++ b/sha1_name.c
@@ -235,14 +235,21 @@ static int ambiguous_path(const char *path, int len)
static int get_sha1_basic(const char *str, int len, unsigned char *sha1)
{
- static const char *prefix[] = {
- "",
- "refs",
- "refs/tags",
- "refs/heads",
+ static const char *fmt[] = {
+ "%.*s",
+ "refs/%.*s",
+ "refs/tags/%.*s",
+ "refs/heads/%.*s",
+ "refs/remotes/%.*s",
+ "refs/remotes/%.*s/HEAD",
NULL
};
const char **p;
+ const char *warning = "warning: refname '%.*s' is ambiguous.\n";
+ char *pathname;
+ int already_found = 0;
+ unsigned char *this_result;
+ unsigned char sha1_from_ref[20];
if (len == 40 && !get_sha1_hex(str, sha1))
return 0;
@@ -251,11 +258,21 @@ static int get_sha1_basic(const char *str, int len, unsigned char *sha1)
if (ambiguous_path(str, len))
return -1;
- for (p = prefix; *p; p++) {
- char *pathname = git_path("%s/%.*s", *p, len, str);
- if (!read_ref(pathname, sha1))
- return 0;
+ for (p = fmt; *p; p++) {
+ this_result = already_found ? sha1_from_ref : sha1;
+ pathname = git_path(*p, len, str);
+ if (!read_ref(pathname, this_result)) {
+ if (warn_ambiguous_refs) {
+ if (already_found)
+ fprintf(stderr, warning, len, str);
+ already_found++;
+ }
+ else
+ return 0;
+ }
}
+ if (already_found)
+ return 0;
return -1;
}
diff --git a/shell.c b/shell.c
index fc0c73c..8c08cf0 100644
--- a/shell.c
+++ b/shell.c
@@ -15,7 +15,7 @@ static int do_generic_cmd(const char *me, char *arg)
my_argv[1] = arg;
my_argv[2] = NULL;
- return execv_git_cmd((char**) my_argv);
+ return execv_git_cmd(my_argv);
}
static struct commands {
diff --git a/show-branch.c b/show-branch.c
index 511fd3b..24efb65 100644
--- a/show-branch.c
+++ b/show-branch.c
@@ -5,7 +5,7 @@
#include "refs.h"
static const char show_branch_usage[] =
-"git-show-branch [--current] [--all] [--heads] [--tags] [--topo-order] [--more=count | --list | --independent | --merge-base ] [<refs>...]";
+"git-show-branch [--current] [--all] [--heads] [--tags] [--topo-order] [--more=count | --list | --independent | --merge-base ] [--topics] [<refs>...]";
static int default_num = 0;
static int default_alloc = 0;
@@ -535,6 +535,7 @@ int main(int ac, char **av)
int num_rev, i, extra = 0;
int all_heads = 0, all_tags = 0;
int all_mask, all_revs;
+ int lifo = 1;
char head_path[128];
const char *head_path_p;
int head_path_len;
@@ -544,9 +545,9 @@ int main(int ac, char **av)
int no_name = 0;
int sha1_name = 0;
int shown_merge_point = 0;
- int topo_order = 0;
int with_current_branch = 0;
int head_at = -1;
+ int topics = 0;
setup_git_directory();
git_config(git_show_branch_config);
@@ -586,7 +587,11 @@ int main(int ac, char **av)
else if (!strcmp(arg, "--independent"))
independent = 1;
else if (!strcmp(arg, "--topo-order"))
- topo_order = 1;
+ lifo = 1;
+ else if (!strcmp(arg, "--topics"))
+ topics = 1;
+ else if (!strcmp(arg, "--date-order"))
+ lifo = 0;
else
usage(show_branch_usage);
ac--; av++;
@@ -710,8 +715,7 @@ int main(int ac, char **av)
exit(0);
/* Sort topologically */
- if (topo_order)
- sort_in_topological_order(&seen);
+ sort_in_topological_order(&seen, lifo);
/* Give names to commits */
if (!sha1_name && !no_name)
@@ -723,11 +727,17 @@ int main(int ac, char **av)
while (seen) {
struct commit *commit = pop_one_commit(&seen);
int this_flag = commit->object.flags;
+ int is_merge_point = ((this_flag & all_revs) == all_revs);
- shown_merge_point |= ((this_flag & all_revs) == all_revs);
+ shown_merge_point |= is_merge_point;
if (1 < num_rev) {
int is_merge = !!(commit->parents && commit->parents->next);
+ if (topics &&
+ !is_merge_point &&
+ (this_flag & (1u << REV_SHIFT)))
+ continue;
+
for (i = 0; i < num_rev; i++) {
int mark;
if (!(this_flag & (1u << (i + REV_SHIFT))))
diff --git a/t/.gitignore b/t/.gitignore
new file mode 100644
index 0000000..fad67c0
--- /dev/null
+++ b/t/.gitignore
@@ -0,0 +1 @@
+trash
diff --git a/t/Makefile b/t/Makefile
index 5c5a620..fe65f53 100644
--- a/t/Makefile
+++ b/t/Makefile
@@ -8,17 +8,18 @@ SHELL_PATH ?= $(SHELL)
TAR ?= $(TAR)
# Shell quote;
-# Result of this needs to be placed inside ''
-shq = $(subst ','\'',$(1))
-# This has surrounding ''
-shellquote = '$(call shq,$(1))'
+SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH))
T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh)
+ifdef NO_PYTHON
+ GIT_TEST_OPTS += --no-python
+endif
+
all: $(T) clean
$(T):
- @echo "*** $@ ***"; $(call shellquote,$(SHELL_PATH)) $@ $(GIT_TEST_OPTS)
+ @echo "*** $@ ***"; '$(SHELL_PATH_SQ)' $@ $(GIT_TEST_OPTS)
clean:
rm -fr trash
diff --git a/t/annotate-tests.sh b/t/annotate-tests.sh
new file mode 100644
index 0000000..114938c
--- /dev/null
+++ b/t/annotate-tests.sh
@@ -0,0 +1,121 @@
+# This file isn't used as a test script directly, instead it is
+# sourced from t8001-annotate.sh and t8001-blame.sh.
+
+check_count () {
+ head=
+ case "$1" in -h) head="$2"; shift; shift ;; esac
+ $PROG file $head | perl -e '
+ my %expect = (@ARGV);
+ my %count = ();
+ while (<STDIN>) {
+ if (/^[0-9a-f]+\t\(([^\t]+)\t/) {
+ my $author = $1;
+ for ($author) { s/^\s*//; s/\s*$//; }
+ if (exists $expect{$author}) {
+ $count{$author}++;
+ }
+ }
+ }
+ my $bad = 0;
+ while (my ($author, $count) = each %count) {
+ my $ok;
+ if ($expect{$author} != $count) {
+ $bad = 1;
+ $ok = "bad";
+ }
+ else {
+ $ok = "good";
+ }
+ print STDERR "Author $author (expected $expect{$author}, attributed $count) $ok\n";
+ }
+ exit($bad);
+ ' "$@"
+}
+
+test_expect_success \
+ 'prepare reference tree' \
+ 'echo "1A quick brown fox jumps over the" >file &&
+ echo "lazy dog" >>file &&
+ git add file
+ GIT_AUTHOR_NAME="A" git commit -a -m "Initial."'
+
+test_expect_success \
+ 'check all lines blamed on A' \
+ 'check_count A 2'
+
+test_expect_success \
+ 'Setup new lines blamed on B' \
+ 'echo "2A quick brown fox jumps over the" >>file &&
+ echo "lazy dog" >> file &&
+ GIT_AUTHOR_NAME="B" git commit -a -m "Second."'
+
+test_expect_success \
+ 'Two lines blamed on A, two on B' \
+ 'check_count A 2 B 2'
+
+test_expect_success \
+ 'merge-setup part 1' \
+ 'git checkout -b branch1 master &&
+ echo "3A slow green fox jumps into the" >> file &&
+ echo "well." >> file &&
+ GIT_AUTHOR_NAME="B1" git commit -a -m "Branch1-1"'
+
+test_expect_success \
+ 'Two lines blamed on A, two on B, two on B1' \
+ 'check_count A 2 B 2 B1 2'
+
+test_expect_success \
+ 'merge-setup part 2' \
+ 'git checkout -b branch2 master &&
+ sed -e "s/2A quick brown/4A quick brown lazy dog/" < file > file.new &&
+ mv file.new file &&
+ GIT_AUTHOR_NAME="B2" git commit -a -m "Branch2-1"'
+
+test_expect_success \
+ 'Two lines blamed on A, one on B, one on B2' \
+ 'check_count A 2 B 1 B2 1'
+
+test_expect_success \
+ 'merge-setup part 3' \
+ 'git pull . branch1'
+
+test_expect_success \
+ 'Two lines blamed on A, one on B, two on B1, one on B2' \
+ 'check_count A 2 B 1 B1 2 B2 1'
+
+test_expect_success \
+ 'Annotating an old revision works' \
+ 'check_count -h master A 2 B 2'
+
+test_expect_success \
+ 'Annotating an old revision works' \
+ 'check_count -h master^ A 2'
+
+test_expect_success \
+ 'merge-setup part 4' \
+ 'echo "evil merge." >>file &&
+ EDITOR=: VISUAL=: git commit -a --amend'
+
+test_expect_success \
+ 'Two lines blamed on A, one on B, two on B1, one on B2, one on A U Thor' \
+ 'check_count A 2 B 1 B1 2 B2 1 "A U Thor" 1'
+
+test_expect_success \
+ 'an incomplete line added' \
+ 'echo "incomplete" | tr -d "\\012" >>file &&
+ GIT_AUTHOR_NAME="C" git commit -a -m "Incomplete"'
+
+test_expect_success \
+ 'With incomplete lines.' \
+ 'check_count A 2 B 1 B1 2 B2 1 "A U Thor" 1 C 1'
+
+test_expect_success \
+ 'some edit' \
+ 'mv file file1 &&
+ sed -e 1d -e "5s/3A/99/" file1 >file &&
+ rm -f file1 &&
+ GIT_AUTHOR_NAME="D" git commit -a -m "edit"'
+
+test_expect_success \
+ 'some edit' \
+ 'check_count A 1 B 1 B1 1 B2 1 "A U Thor" 1 C 1 D 1'
diff --git a/t/t0000-basic.sh b/t/t0000-basic.sh
index c339a36..6729a18 100755
--- a/t/t0000-basic.sh
+++ b/t/t0000-basic.sh
@@ -42,7 +42,7 @@ fi
. ./test-lib.sh
-"$PYTHON" -c 'import subprocess' || {
+test "$no_python" || "$PYTHON" -c 'import subprocess' || {
echo >&2 'Your python seem to lack "subprocess" module.
Please check INSTALL document.'
exit 1
diff --git a/t/t1200-tutorial.sh b/t/t1200-tutorial.sh
index c8a85f9..1002413 100755
--- a/t/t1200-tutorial.sh
+++ b/t/t1200-tutorial.sh
@@ -128,7 +128,7 @@ test_expect_success 'git show-branch' 'cmp show-branch.expect show-branch.output
git checkout mybranch
cat > resolve.expect << EOF
-Updating from VARIABLE to VARIABLE.
+Updating from VARIABLE to VARIABLE
example | 1 +
hello | 1 +
2 files changed, 2 insertions(+), 0 deletions(-)
diff --git a/t/t1300-repo-config.sh b/t/t1300-repo-config.sh
index 207dd3d..ab4dd5c 100755
--- a/t/t1300-repo-config.sh
+++ b/t/t1300-repo-config.sh
@@ -247,5 +247,13 @@ EOF
test_expect_success 'hierarchical section value' 'cmp .git/config expect'
+cat > .git/config << EOF
+[novalue]
+ variable
+EOF
+
+test_expect_success 'get variable with no value' \
+ 'git-repo-config --get novalue.variable ^$'
+
test_done
diff --git a/t/t2004-checkout-cache-temp.sh b/t/t2004-checkout-cache-temp.sh
new file mode 100755
index 0000000..c100959
--- /dev/null
+++ b/t/t2004-checkout-cache-temp.sh
@@ -0,0 +1,212 @@
+#!/bin/sh
+#
+# Copyright (c) 2006 Shawn Pearce
+#
+
+test_description='git-checkout-index --temp test.
+
+With --temp flag, git-checkout-index writes to temporary merge files
+rather than the tracked path.'
+
+. ./test-lib.sh
+
+test_expect_success \
+'preparation' '
+mkdir asubdir &&
+echo tree1path0 >path0 &&
+echo tree1path1 >path1 &&
+echo tree1path3 >path3 &&
+echo tree1path4 >path4 &&
+echo tree1asubdir/path5 >asubdir/path5 &&
+git-update-index --add path0 path1 path3 path4 asubdir/path5 &&
+t1=$(git-write-tree) &&
+rm -f path* .merge_* out .git/index &&
+echo tree2path0 >path0 &&
+echo tree2path1 >path1 &&
+echo tree2path2 >path2 &&
+echo tree2path4 >path4 &&
+git-update-index --add path0 path1 path2 path4 &&
+t2=$(git-write-tree) &&
+rm -f path* .merge_* out .git/index &&
+echo tree2path0 >path0 &&
+echo tree3path1 >path1 &&
+echo tree3path2 >path2 &&
+echo tree3path3 >path3 &&
+git-update-index --add path0 path1 path2 path3 &&
+t3=$(git-write-tree)'
+
+test_expect_success \
+'checkout one stage 0 to temporary file' '
+rm -f path* .merge_* out .git/index &&
+git-read-tree $t1 &&
+git-checkout-index --temp -- path1 >out &&
+test $(wc -l <out) = 1 &&
+test $(cut "-d " -f2 out) = path1 &&
+p=$(cut "-d " -f1 out) &&
+test -f $p &&
+test $(cat $p) = tree1path1'
+
+test_expect_success \
+'checkout all stage 0 to temporary files' '
+rm -f path* .merge_* out .git/index &&
+git-read-tree $t1 &&
+git-checkout-index -a --temp >out &&
+test $(wc -l <out) = 5 &&
+for f in path0 path1 path3 path4 asubdir/path5
+do
+ test $(grep $f out | cut "-d " -f2) = $f &&
+ p=$(grep $f out | cut "-d " -f1) &&
+ test -f $p &&
+ test $(cat $p) = tree1$f
+done'
+
+test_expect_success \
+'prepare 3-way merge' '
+rm -f path* .merge_* out .git/index &&
+git-read-tree -m $t1 $t2 $t3'
+
+test_expect_success \
+'checkout one stage 2 to temporary file' '
+rm -f path* .merge_* out &&
+git-checkout-index --stage=2 --temp -- path1 >out &&
+test $(wc -l <out) = 1 &&
+test $(cut "-d " -f2 out) = path1 &&
+p=$(cut "-d " -f1 out) &&
+test -f $p &&
+test $(cat $p) = tree2path1'
+
+test_expect_success \
+'checkout all stage 2 to temporary files' '
+rm -f path* .merge_* out &&
+git-checkout-index --all --stage=2 --temp >out &&
+test $(wc -l <out) = 3 &&
+for f in path1 path2 path4
+do
+ test $(grep $f out | cut "-d " -f2) = $f &&
+ p=$(grep $f out | cut "-d " -f1) &&
+ test -f $p &&
+ test $(cat $p) = tree2$f
+done'
+
+test_expect_success \
+'checkout all stages/one file to nothing' '
+rm -f path* .merge_* out &&
+git-checkout-index --stage=all --temp -- path0 >out &&
+test $(wc -l <out) = 0'
+
+test_expect_success \
+'checkout all stages/one file to temporary files' '
+rm -f path* .merge_* out &&
+git-checkout-index --stage=all --temp -- path1 >out &&
+test $(wc -l <out) = 1 &&
+test $(cut "-d " -f2 out) = path1 &&
+cut "-d " -f1 out | (read s1 s2 s3 &&
+test -f $s1 &&
+test -f $s2 &&
+test -f $s3 &&
+test $(cat $s1) = tree1path1 &&
+test $(cat $s2) = tree2path1 &&
+test $(cat $s3) = tree3path1)'
+
+test_expect_success \
+'checkout some stages/one file to temporary files' '
+rm -f path* .merge_* out &&
+git-checkout-index --stage=all --temp -- path2 >out &&
+test $(wc -l <out) = 1 &&
+test $(cut "-d " -f2 out) = path2 &&
+cut "-d " -f1 out | (read s1 s2 s3 &&
+test $s1 = . &&
+test -f $s2 &&
+test -f $s3 &&
+test $(cat $s2) = tree2path2 &&
+test $(cat $s3) = tree3path2)'
+
+test_expect_success \
+'checkout all stages/all files to temporary files' '
+rm -f path* .merge_* out &&
+git-checkout-index -a --stage=all --temp >out &&
+test $(wc -l <out) = 5'
+
+test_expect_success \
+'-- path0: no entry' '
+test x$(grep path0 out | cut "-d " -f2) = x'
+
+test_expect_success \
+'-- path1: all 3 stages' '
+test $(grep path1 out | cut "-d " -f2) = path1 &&
+grep path1 out | cut "-d " -f1 | (read s1 s2 s3 &&
+test -f $s1 &&
+test -f $s2 &&
+test -f $s3 &&
+test $(cat $s1) = tree1path1 &&
+test $(cat $s2) = tree2path1 &&
+test $(cat $s3) = tree3path1)'
+
+test_expect_success \
+'-- path2: no stage 1, have stage 2 and 3' '
+test $(grep path2 out | cut "-d " -f2) = path2 &&
+grep path2 out | cut "-d " -f1 | (read s1 s2 s3 &&
+test $s1 = . &&
+test -f $s2 &&
+test -f $s3 &&
+test $(cat $s2) = tree2path2 &&
+test $(cat $s3) = tree3path2)'
+
+test_expect_success \
+'-- path3: no stage 2, have stage 1 and 3' '
+test $(grep path3 out | cut "-d " -f2) = path3 &&
+grep path3 out | cut "-d " -f1 | (read s1 s2 s3 &&
+test -f $s1 &&
+test $s2 = . &&
+test -f $s3 &&
+test $(cat $s1) = tree1path3 &&
+test $(cat $s3) = tree3path3)'
+
+test_expect_success \
+'-- path4: no stage 3, have stage 1 and 3' '
+test $(grep path4 out | cut "-d " -f2) = path4 &&
+grep path4 out | cut "-d " -f1 | (read s1 s2 s3 &&
+test -f $s1 &&
+test -f $s2 &&
+test $s3 = . &&
+test $(cat $s1) = tree1path4 &&
+test $(cat $s2) = tree2path4)'
+
+test_expect_success \
+'-- asubdir/path5: no stage 2 and 3 have stage 1' '
+test $(grep asubdir/path5 out | cut "-d " -f2) = asubdir/path5 &&
+grep asubdir/path5 out | cut "-d " -f1 | (read s1 s2 s3 &&
+test -f $s1 &&
+test $s2 = . &&
+test $s3 = . &&
+test $(cat $s1) = tree1asubdir/path5)'
+
+test_expect_success \
+'checkout --temp within subdir' '
+(cd asubdir &&
+ git-checkout-index -a --stage=all >out &&
+ test $(wc -l <out) = 1 &&
+ test $(grep path5 out | cut "-d " -f2) = path5 &&
+ grep path5 out | cut "-d " -f1 | (read s1 s2 s3 &&
+ test -f ../$s1 &&
+ test $s2 = . &&
+ test $s3 = . &&
+ test $(cat ../$s1) = tree1asubdir/path5)
+)'
+
+test_expect_success \
+'checkout --temp symlink' '
+rm -f path* .merge_* out .git/index &&
+ln -s b a &&
+git-update-index --add a &&
+t4=$(git-write-tree) &&
+rm -f .git/index &&
+git-read-tree $t4 &&
+git-checkout-index --temp -a >out &&
+test $(wc -l <out) = 1 &&
+test $(cut "-d " -f2 out) = a &&
+p=$(cut "-d " -f1 out) &&
+test -f $p &&
+test $(cat $p) = b'
+
+test_done
diff --git a/t/t3020-ls-files-error-unmatch.sh b/t/t3020-ls-files-error-unmatch.sh
new file mode 100755
index 0000000..d55559e
--- /dev/null
+++ b/t/t3020-ls-files-error-unmatch.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+#
+# Copyright (c) 2006 Carl D. Worth
+#
+
+test_description='git-ls-files test for --error-unmatch option
+
+This test runs git-ls-files --error-unmatch to ensure it correctly
+returns an error when a non-existent path is provided on the command
+line.
+'
+. ./test-lib.sh
+
+touch foo bar
+git-update-index --add foo bar
+git-commit -m "add foo bar"
+
+test_expect_failure \
+ 'git-ls-files --error-unmatch should fail with unmatched path.' \
+ 'git-ls-files --error-unmatch foo bar-does-not-match'
+
+test_expect_success \
+ 'git-ls-files --error-unmatch should succeed eith matched paths.' \
+ 'git-ls-files --error-unmatch foo bar'
+
+test_done
+1
diff --git a/t/t3600-rm.sh b/t/t3600-rm.sh
new file mode 100755
index 0000000..d1947e1
--- /dev/null
+++ b/t/t3600-rm.sh
@@ -0,0 +1,71 @@
+#!/bin/sh
+#
+# Copyright (c) 2006 Carl D. Worth
+#
+
+test_description='Test of the various options to git-rm.'
+
+. ./test-lib.sh
+
+# Setup some files to be removed, some with funny characters
+touch -- foo bar baz 'space embedded' -q
+git-add -- foo bar baz 'space embedded' -q
+git-commit -m "add normal files"
+test_tabs=y
+if touch -- 'tab embedded' 'newline
+embedded'
+then
+git-add -- 'tab embedded' 'newline
+embedded'
+git-commit -m "add files with tabs and newlines"
+else
+ say 'Your filesystem does not allow tabs in filenames.'
+ test_tabs=n
+fi
+
+test_expect_success \
+ 'Pre-check that foo exists and is in index before git-rm foo' \
+ '[ -f foo ] && git-ls-files --error-unmatch foo'
+
+test_expect_success \
+ 'Test that git-rm foo succeeds' \
+ 'git-rm foo'
+
+test_expect_success \
+ 'Post-check that foo exists but is not in index after git-rm foo' \
+ '[ -f foo ] && ! git-ls-files --error-unmatch foo'
+
+test_expect_success \
+ 'Pre-check that bar exists and is in index before "git-rm -f bar"' \
+ '[ -f bar ] && git-ls-files --error-unmatch bar'
+
+test_expect_success \
+ 'Test that "git-rm -f bar" succeeds' \
+ 'git-rm -f bar'
+
+test_expect_success \
+ 'Post-check that bar does not exist and is not in index after "git-rm -f bar"' \
+ '! [ -f bar ] && ! git-ls-files --error-unmatch bar'
+
+test_expect_success \
+ 'Test that "git-rm -- -q" succeeds (remove a file that looks like an option)' \
+ 'git-rm -- -q'
+
+test "$test_tabs" = y && test_expect_success \
+ "Test that \"git-rm -f\" succeeds with embedded space, tab, or newline characters." \
+ "git-rm -f 'space embedded' 'tab embedded' 'newline
+embedded'"
+
+if test "$test_tabs" = y; then
+chmod u-w .
+test_expect_failure \
+ 'Test that "git-rm -f" fails if its rm fails' \
+ 'git-rm -f baz'
+chmod u+w .
+fi
+
+test_expect_success \
+ 'When the rm in "git-rm -f" fails, it should not remove the file from the index' \
+ 'git-ls-files --error-unmatch baz'
+
+test_done
diff --git a/t/t5000-tar-tree.sh b/t/t5000-tar-tree.sh
index adc5e93..278eb66 100755
--- a/t/t5000-tar-tree.sh
+++ b/t/t5000-tar-tree.sh
@@ -34,6 +34,9 @@ test_expect_success \
mkdir a/bin &&
cp /bin/sh a/bin &&
ln -s a a/l1 &&
+ (p=long_path_to_a_file && cd a &&
+ for depth in 1 2 3 4 5; do mkdir $p && cd $p; done &&
+ echo text >file_with_long_path) &&
(cd a && find .) | sort >a.lst'
test_expect_success \
diff --git a/t/t5600-clone-fail-cleanup.sh b/t/t5600-clone-fail-cleanup.sh
new file mode 100755
index 0000000..0c6a363
--- /dev/null
+++ b/t/t5600-clone-fail-cleanup.sh
@@ -0,0 +1,36 @@
+#!/bin/sh
+#
+# Copyright (C) 2006 Carl D. Worth <cworth@cworth.org>
+#
+
+test_description='test git-clone to cleanup after failure
+
+This test covers the fact that if git-clone fails, it should remove
+the directory it created, to avoid the user having to manually
+remove the directory before attempting a clone again.'
+
+. ./test-lib.sh
+
+test_expect_failure \
+ 'clone of non-existent source should fail' \
+ 'git-clone foo bar'
+
+test_expect_failure \
+ 'failed clone should not leave a directory' \
+ 'cd bar'
+
+# Need a repo to clone
+test_create_repo foo
+
+# clone doesn't like it if there is no HEAD. Is that a bug?
+(cd foo && touch file && git add file && git commit -m 'add file' >/dev/null 2>&1)
+
+test_expect_success \
+ 'clone should work now that source exists' \
+ 'git-clone foo bar'
+
+test_expect_success \
+ 'successfull clone must leave the directory' \
+ 'cd bar'
+
+test_done
diff --git a/t/t6001-rev-list-merge-order.sh b/t/t6001-rev-list-merge-order.sh
deleted file mode 100755
index 7724e8a..0000000
--- a/t/t6001-rev-list-merge-order.sh
+++ /dev/null
@@ -1,462 +0,0 @@
-#!/bin/sh
-#
-# Copyright (c) 2005 Jon Seymour
-#
-
-test_description='Tests git-rev-list --merge-order functionality'
-
-. ./test-lib.sh
-. ../t6000lib.sh # t6xxx specific functions
-
-# test-case specific test function
-check_adjacency()
-{
- read previous
- echo "= $previous"
- while read next
- do
- if ! (git-cat-file commit $previous | grep "^parent $next" >/dev/null)
- then
- echo "^ $next"
- else
- echo "| $next"
- fi
- previous=$next
- done
-}
-
-list_duplicates()
-{
- "$@" | sort | uniq -d
-}
-
-grep_stderr()
-{
- args=$1
- shift 1
- "$@" 2>&1 | grep "$args"
-}
-
-date >path0
-git-update-index --add path0
-save_tag tree git-write-tree
-on_committer_date "1971-08-16 00:00:00" hide_error save_tag root unique_commit root tree
-on_committer_date "1971-08-16 00:00:01" save_tag l0 unique_commit l0 tree -p root
-on_committer_date "1971-08-16 00:00:02" save_tag l1 unique_commit l1 tree -p l0
-on_committer_date "1971-08-16 00:00:03" save_tag l2 unique_commit l2 tree -p l1
-on_committer_date "1971-08-16 00:00:04" save_tag a0 unique_commit a0 tree -p l2
-on_committer_date "1971-08-16 00:00:05" save_tag a1 unique_commit a1 tree -p a0
-on_committer_date "1971-08-16 00:00:06" save_tag b1 unique_commit b1 tree -p a0
-on_committer_date "1971-08-16 00:00:07" save_tag c1 unique_commit c1 tree -p b1
-on_committer_date "1971-08-16 00:00:08" as_author foobar@example.com save_tag b2 unique_commit b2 tree -p b1
-on_committer_date "1971-08-16 00:00:09" save_tag b3 unique_commit b2 tree -p b2
-on_committer_date "1971-08-16 00:00:10" save_tag c2 unique_commit c2 tree -p c1 -p b2
-on_committer_date "1971-08-16 00:00:11" save_tag c3 unique_commit c3 tree -p c2
-on_committer_date "1971-08-16 00:00:12" save_tag a2 unique_commit a2 tree -p a1
-on_committer_date "1971-08-16 00:00:13" save_tag a3 unique_commit a3 tree -p a2
-on_committer_date "1971-08-16 00:00:14" save_tag b4 unique_commit b4 tree -p b3 -p a3
-on_committer_date "1971-08-16 00:00:15" save_tag a4 unique_commit a4 tree -p a3 -p b4 -p c3
-on_committer_date "1971-08-16 00:00:16" save_tag l3 unique_commit l3 tree -p a4
-on_committer_date "1971-08-16 00:00:17" save_tag l4 unique_commit l4 tree -p l3
-on_committer_date "1971-08-16 00:00:18" save_tag l5 unique_commit l5 tree -p l4
-on_committer_date "1971-08-16 00:00:19" save_tag m1 unique_commit m1 tree -p a4 -p c3
-on_committer_date "1971-08-16 00:00:20" save_tag m2 unique_commit m2 tree -p c3 -p a4
-on_committer_date "1971-08-16 00:00:21" hide_error save_tag alt_root unique_commit alt_root tree
-on_committer_date "1971-08-16 00:00:22" save_tag r0 unique_commit r0 tree -p alt_root
-on_committer_date "1971-08-16 00:00:23" save_tag r1 unique_commit r1 tree -p r0
-on_committer_date "1971-08-16 00:00:24" save_tag l5r1 unique_commit l5r1 tree -p l5 -p r1
-on_committer_date "1971-08-16 00:00:25" save_tag r1l5 unique_commit r1l5 tree -p r1 -p l5
-
-
-#
-# note: as of 20/6, it isn't possible to create duplicate parents, so this
-# can't be tested.
-#
-#on_committer_date "1971-08-16 00:00:20" save_tag m3 unique_commit m3 tree -p c3 -p a4 -p c3
-hide_error save_tag e1 as_author e@example.com unique_commit e1 tree
-save_tag e2 as_author e@example.com unique_commit e2 tree -p e1
-save_tag f1 as_author f@example.com unique_commit f1 tree -p e1
-save_tag e3 as_author e@example.com unique_commit e3 tree -p e2
-save_tag f2 as_author f@example.com unique_commit f2 tree -p f1
-save_tag e4 as_author e@example.com unique_commit e4 tree -p e3 -p f2
-save_tag e5 as_author e@example.com unique_commit e5 tree -p e4
-save_tag f3 as_author f@example.com unique_commit f3 tree -p f2
-save_tag f4 as_author f@example.com unique_commit f4 tree -p f3
-save_tag e6 as_author e@example.com unique_commit e6 tree -p e5 -p f4
-save_tag f5 as_author f@example.com unique_commit f5 tree -p f4
-save_tag f6 as_author f@example.com unique_commit f6 tree -p f5 -p e6
-save_tag e7 as_author e@example.com unique_commit e7 tree -p e6
-save_tag e8 as_author e@example.com unique_commit e8 tree -p e7
-save_tag e9 as_author e@example.com unique_commit e9 tree -p e8
-save_tag f7 as_author f@example.com unique_commit f7 tree -p f6
-save_tag f8 as_author f@example.com unique_commit f8 tree -p f7
-save_tag f9 as_author f@example.com unique_commit f9 tree -p f8
-save_tag e10 as_author e@example.com unique_commit e1 tree -p e9 -p f8
-
-hide_error save_tag g0 unique_commit g0 tree
-save_tag g1 unique_commit g1 tree -p g0
-save_tag h1 unique_commit g2 tree -p g0
-save_tag g2 unique_commit g3 tree -p g1 -p h1
-save_tag h2 unique_commit g4 tree -p g2
-save_tag g3 unique_commit g5 tree -p g2
-save_tag g4 unique_commit g6 tree -p g3 -p h2
-
-git-update-ref HEAD $(tag l5)
-
-test_output_expect_success 'rev-list has correct number of entries' 'git-rev-list HEAD | wc -l | tr -d \" \"' <<EOF
-19
-EOF
-
-if git-rev-list --merge-order HEAD 2>&1 | grep 'OpenSSL not linked' >/dev/null
-then
- test_expect_success 'skipping merge-order test' :
- test_done
- exit
-fi
-
-normal_adjacency_count=$(git-rev-list HEAD | check_adjacency | grep -c "\^" | tr -d ' ')
-merge_order_adjacency_count=$(git-rev-list --merge-order HEAD | check_adjacency | grep -c "\^" | tr -d ' ')
-test_expect_success '--merge-order produces as many or fewer discontinuities' '[ $merge_order_adjacency_count -le $normal_adjacency_count ]'
-test_output_expect_success 'simple merge order' 'git-rev-list --merge-order --show-breaks HEAD' <<EOF
-= l5
-| l4
-| l3
-= a4
-| c3
-| c2
-| c1
-^ b4
-| b3
-| b2
-| b1
-^ a3
-| a2
-| a1
-= a0
-| l2
-| l1
-| l0
-= root
-EOF
-
-test_output_expect_success 'two diamonds merge order (g6)' 'git-rev-list --merge-order --show-breaks g4' <<EOF
-= g4
-| h2
-^ g3
-= g2
-| h1
-^ g1
-= g0
-EOF
-
-test_output_expect_success 'multiple heads' 'git-rev-list --merge-order a3 b3 c3' <<EOF
-c3
-c2
-c1
-b3
-b2
-b1
-a3
-a2
-a1
-a0
-l2
-l1
-l0
-root
-EOF
-
-test_output_expect_success 'multiple heads, prune at a1' 'git-rev-list --merge-order a3 b3 c3 ^a1' <<EOF
-c3
-c2
-c1
-b3
-b2
-b1
-a3
-a2
-EOF
-
-test_output_expect_success 'multiple heads, prune at l1' 'git-rev-list --merge-order a3 b3 c3 ^l1' <<EOF
-c3
-c2
-c1
-b3
-b2
-b1
-a3
-a2
-a1
-a0
-l2
-EOF
-
-test_output_expect_success 'cross-epoch, head at l5, prune at l1' 'git-rev-list --merge-order l5 ^l1' <<EOF
-l5
-l4
-l3
-a4
-c3
-c2
-c1
-b4
-b3
-b2
-b1
-a3
-a2
-a1
-a0
-l2
-EOF
-
-test_output_expect_success 'duplicated head arguments' 'git-rev-list --merge-order l5 l5 ^l1' <<EOF
-l5
-l4
-l3
-a4
-c3
-c2
-c1
-b4
-b3
-b2
-b1
-a3
-a2
-a1
-a0
-l2
-EOF
-
-test_output_expect_success 'prune near merge' 'git-rev-list --merge-order a4 ^c3' <<EOF
-a4
-b4
-b3
-a3
-a2
-a1
-EOF
-
-test_output_expect_success "head has no parent" 'git-rev-list --merge-order --show-breaks root' <<EOF
-= root
-EOF
-
-test_output_expect_success "two nodes - one head, one base" 'git-rev-list --merge-order --show-breaks l0' <<EOF
-= l0
-= root
-EOF
-
-test_output_expect_success "three nodes one head, one internal, one base" 'git-rev-list --merge-order --show-breaks l1' <<EOF
-= l1
-| l0
-= root
-EOF
-
-test_output_expect_success "linear prune l2 ^root" 'git-rev-list --merge-order --show-breaks l2 ^root' <<EOF
-^ l2
-| l1
-| l0
-EOF
-
-test_output_expect_success "linear prune l2 ^l0" 'git-rev-list --merge-order --show-breaks l2 ^l0' <<EOF
-^ l2
-| l1
-EOF
-
-test_output_expect_success "linear prune l2 ^l1" 'git-rev-list --merge-order --show-breaks l2 ^l1' <<EOF
-^ l2
-EOF
-
-test_output_expect_success "linear prune l5 ^a4" 'git-rev-list --merge-order --show-breaks l5 ^a4' <<EOF
-^ l5
-| l4
-| l3
-EOF
-
-test_output_expect_success "linear prune l5 ^l3" 'git-rev-list --merge-order --show-breaks l5 ^l3' <<EOF
-^ l5
-| l4
-EOF
-
-test_output_expect_success "linear prune l5 ^l4" 'git-rev-list --merge-order --show-breaks l5 ^l4' <<EOF
-^ l5
-EOF
-
-test_output_expect_success "max-count 10 - merge order" 'git-rev-list --merge-order --show-breaks --max-count=10 l5' <<EOF
-= l5
-| l4
-| l3
-= a4
-| c3
-| c2
-| c1
-^ b4
-| b3
-| b2
-EOF
-
-test_output_expect_success "max-count 10 - non merge order" 'git-rev-list --max-count=10 l5' <<EOF
-l5
-l4
-l3
-a4
-b4
-a3
-a2
-c3
-c2
-b3
-EOF
-
-test_output_expect_success '--max-age=c3, no --merge-order' "git-rev-list --max-age=$(commit_date c3) l5" <<EOF
-l5
-l4
-l3
-a4
-b4
-a3
-a2
-c3
-EOF
-
-test_output_expect_success '--max-age=c3, --merge-order' "git-rev-list --merge-order --max-age=$(commit_date c3) l5" <<EOF
-l5
-l4
-l3
-a4
-c3
-b4
-a3
-a2
-EOF
-
-test_output_expect_success 'one specified head reachable from another a4, c3, --merge-order' "list_duplicates git-rev-list --merge-order a4 c3" <<EOF
-EOF
-
-test_output_expect_success 'one specified head reachable from another c3, a4, --merge-order' "list_duplicates git-rev-list --merge-order c3 a4" <<EOF
-EOF
-
-test_output_expect_success 'one specified head reachable from another a4, c3, no --merge-order' "list_duplicates git-rev-list a4 c3" <<EOF
-EOF
-
-test_output_expect_success 'one specified head reachable from another c3, a4, no --merge-order' "list_duplicates git-rev-list c3 a4" <<EOF
-EOF
-
-test_output_expect_success 'graph with c3 and a4 parents of head' "list_duplicates git-rev-list m1" <<EOF
-EOF
-
-test_output_expect_success 'graph with a4 and c3 parents of head' "list_duplicates git-rev-list m2" <<EOF
-EOF
-
-test_expect_success "head ^head --merge-order" 'git-rev-list --merge-order --show-breaks a3 ^a3' <<EOF
-EOF
-
-#
-# can't test this now - duplicate parents can't be created
-#
-#test_output_expect_success 'duplicate parents' 'git-rev-list --parents --merge-order --show-breaks m3' <<EOF
-#= m3 c3 a4 c3
-#| a4 c3 b4 a3
-#| b4 a3 b3
-#| b3 b2
-#^ a3 a2
-#| a2 a1
-#| a1 a0
-#^ c3 c2
-#| c2 b2 c1
-#| b2 b1
-#^ c1 b1
-#| b1 a0
-#= a0 l2
-#| l2 l1
-#| l1 l0
-#| l0 root
-#= root
-#EOF
-
-test_expect_success "head ^head no --merge-order" 'git-rev-list a3 ^a3' <<EOF
-EOF
-
-test_output_expect_success 'simple merge order (l5r1)' 'git-rev-list --merge-order --show-breaks l5r1' <<EOF
-= l5r1
-| r1
-| r0
-| alt_root
-^ l5
-| l4
-| l3
-| a4
-| c3
-| c2
-| c1
-^ b4
-| b3
-| b2
-| b1
-^ a3
-| a2
-| a1
-| a0
-| l2
-| l1
-| l0
-= root
-EOF
-
-test_output_expect_success 'simple merge order (r1l5)' 'git-rev-list --merge-order --show-breaks r1l5' <<EOF
-= r1l5
-| l5
-| l4
-| l3
-| a4
-| c3
-| c2
-| c1
-^ b4
-| b3
-| b2
-| b1
-^ a3
-| a2
-| a1
-| a0
-| l2
-| l1
-| l0
-| root
-^ r1
-| r0
-= alt_root
-EOF
-
-test_output_expect_success "don't print things unreachable from one branch" "git-rev-list a3 ^b3 --merge-order" <<EOF
-a3
-a2
-a1
-EOF
-
-test_output_expect_success "--merge-order a4 l3" "git-rev-list --merge-order a4 l3" <<EOF
-l3
-a4
-c3
-c2
-c1
-b4
-b3
-b2
-b1
-a3
-a2
-a1
-a0
-l2
-l1
-l0
-root
-EOF
-
-#
-#
-
-test_done
diff --git a/t/t6021-merge-criss-cross.sh b/t/t6021-merge-criss-cross.sh
index e8606c7..2623813 100755
--- a/t/t6021-merge-criss-cross.sh
+++ b/t/t6021-merge-criss-cross.sh
@@ -10,6 +10,12 @@
test_description='Test criss-cross merge'
. ./test-lib.sh
+if test "$no_python"; then
+ echo "Skipping: no python => no recursive merge"
+ test_done
+ exit 0
+fi
+
test_expect_success 'prepare repository' \
'echo "1
2
diff --git a/t/t6022-merge-rename.sh b/t/t6022-merge-rename.sh
index 1292caf..a2d24b5 100755
--- a/t/t6022-merge-rename.sh
+++ b/t/t6022-merge-rename.sh
@@ -3,6 +3,12 @@
test_description='Merge-recursive merging renames'
. ./test-lib.sh
+if test "$no_python"; then
+ echo "Skipping: no python => no recursive merge"
+ test_done
+ exit 0
+fi
+
test_expect_success setup \
'
cat >A <<\EOF &&
diff --git a/t/t7001-mv.sh b/t/t7001-mv.sh
index 43d74c5..811a479 100755
--- a/t/t7001-mv.sh
+++ b/t/t7001-mv.sh
@@ -11,17 +11,31 @@ test_expect_success \
git-commit -m add -a'
test_expect_success \
- 'moving the file' \
+ 'moving the file out of subdirectory' \
'cd path0 && git-mv COPYING ../path1/COPYING'
# in path0 currently
test_expect_success \
'commiting the change' \
- 'cd .. && git-commit -m move -a'
+ 'cd .. && git-commit -m move-out -a'
test_expect_success \
'checking the commit' \
'git-diff-tree -r -M --name-status HEAD^ HEAD | \
grep -E "^R100.+path0/COPYING.+path1/COPYING"'
+test_expect_success \
+ 'moving the file back into subdirectory' \
+ 'cd path0 && git-mv ../path1/COPYING COPYING'
+
+# in path0 currently
+test_expect_success \
+ 'commiting the change' \
+ 'cd .. && git-commit -m move-in -a'
+
+test_expect_success \
+ 'checking the commit' \
+ 'git-diff-tree -r -M --name-status HEAD^ HEAD | \
+ grep -E "^R100.+path1/COPYING.+path0/COPYING"'
+
test_done
diff --git a/t/t8001-annotate.sh b/t/t8001-annotate.sh
new file mode 100755
index 0000000..2496397
--- /dev/null
+++ b/t/t8001-annotate.sh
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+test_description='git-annotate'
+. ./test-lib.sh
+
+PROG='git annotate'
+. ../annotate-tests.sh
+
+test_done
diff --git a/t/t8002-blame.sh b/t/t8002-blame.sh
new file mode 100755
index 0000000..9777393
--- /dev/null
+++ b/t/t8002-blame.sh
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+test_description='git-blame'
+. ./test-lib.sh
+
+PROG='git blame -c'
+. ../annotate-tests.sh
+
+test_done
diff --git a/t/test-lib.sh b/t/test-lib.sh
index 66f62b9..05f6e79 100755
--- a/t/test-lib.sh
+++ b/t/test-lib.sh
@@ -63,6 +63,8 @@ do
exit 0 ;;
-v|--v|--ve|--ver|--verb|--verbo|--verbos|--verbose)
verbose=t; shift ;;
+ --no-python)
+ no_python=t; shift ;;
*)
break ;;
esac
diff --git a/tag.c b/tag.c
index ac0e573..f390ee7 100644
--- a/tag.c
+++ b/tag.c
@@ -19,8 +19,7 @@ struct tag *lookup_tag(const unsigned char *sha1)
{
struct object *obj = lookup_object(sha1);
if (!obj) {
- struct tag *ret = xmalloc(sizeof(struct tag));
- memset(ret, 0, sizeof(struct tag));
+ struct tag *ret = xcalloc(1, sizeof(struct tag));
created_object(sha1, &ret->object);
ret->object.type = tag_type;
return ret;
diff --git a/tar-tree.c b/tar-tree.c
index e478e13..fc60a90 100644
--- a/tar-tree.c
+++ b/tar-tree.c
@@ -1,37 +1,23 @@
/*
- * Copyright (c) 2005 Rene Scharfe
+ * Copyright (c) 2005, 2006 Rene Scharfe
*/
#include <time.h>
#include "cache.h"
-#include "diff.h"
+#include "tree-walk.h"
#include "commit.h"
+#include "strbuf.h"
+#include "tar.h"
#define RECORDSIZE (512)
#define BLOCKSIZE (RECORDSIZE * 20)
-#define TYPEFLAG_AUTO '\0'
-#define TYPEFLAG_REG '0'
-#define TYPEFLAG_LNK '2'
-#define TYPEFLAG_DIR '5'
-#define TYPEFLAG_GLOBAL_HEADER 'g'
-#define TYPEFLAG_EXT_HEADER 'x'
-
-#define EXT_HEADER_PATH 1
-#define EXT_HEADER_LINKPATH 2
-
static const char tar_tree_usage[] = "git-tar-tree <key> [basedir]";
static char block[BLOCKSIZE];
static unsigned long offset;
-static const char *basedir;
static time_t archive_time;
-struct path_prefix {
- struct path_prefix *prev;
- const char *name;
-};
-
/* tries hard to write, either succeeds or dies in the attempt */
static void reliable_write(void *buf, unsigned long size)
{
@@ -108,7 +94,6 @@ static void write_blocked(void *buf, unsigned long size)
}
if (size) {
memcpy(block + offset, buf, size);
- buf += size;
offset += size;
}
tail = offset % RECORDSIZE;
@@ -119,230 +104,170 @@ static void write_blocked(void *buf, unsigned long size)
write_if_needed();
}
-static void append_string(char **p, const char *s)
-{
- unsigned int len = strlen(s);
- memcpy(*p, s, len);
- *p += len;
-}
-
-static void append_char(char **p, char c)
-{
- **p = c;
- *p += 1;
-}
-
-static void append_path_prefix(char **buffer, struct path_prefix *prefix)
+static void strbuf_append_string(struct strbuf *sb, const char *s)
{
- if (!prefix)
- return;
- append_path_prefix(buffer, prefix->prev);
- append_string(buffer, prefix->name);
- append_char(buffer, '/');
-}
-
-static unsigned int path_prefix_len(struct path_prefix *prefix)
-{
- if (!prefix)
- return 0;
- return path_prefix_len(prefix->prev) + strlen(prefix->name) + 1;
-}
-
-static void append_path(char **p, int is_dir, const char *basepath,
- struct path_prefix *prefix, const char *path)
-{
- if (basepath) {
- append_string(p, basepath);
- append_char(p, '/');
+ int slen = strlen(s);
+ int total = sb->len + slen;
+ if (total > sb->alloc) {
+ sb->buf = xrealloc(sb->buf, total);
+ sb->alloc = total;
}
- append_path_prefix(p, prefix);
- append_string(p, path);
- if (is_dir)
- append_char(p, '/');
+ memcpy(sb->buf + sb->len, s, slen);
+ sb->len = total;
}
-static unsigned int path_len(int is_dir, const char *basepath,
- struct path_prefix *prefix, const char *path)
-{
- unsigned int len = 0;
- if (basepath)
- len += strlen(basepath) + 1;
- len += path_prefix_len(prefix) + strlen(path);
- if (is_dir)
- len++;
- return len;
-}
-
-static void append_extended_header_prefix(char **p, unsigned int size,
- const char *keyword)
+/*
+ * pax extended header records have the format "%u %s=%s\n". %u contains
+ * the size of the whole string (including the %u), the first %s is the
+ * keyword, the second one is the value. This function constructs such a
+ * string and appends it to a struct strbuf.
+ */
+static void strbuf_append_ext_header(struct strbuf *sb, const char *keyword,
+ const char *value, unsigned int valuelen)
{
- int len = sprintf(*p, "%u %s=", size, keyword);
- *p += len;
-}
+ char *p;
+ int len, total, tmp;
-static unsigned int extended_header_len(const char *keyword,
- unsigned int valuelen)
-{
/* "%u %s=%s\n" */
- unsigned int len = 1 + 1 + strlen(keyword) + 1 + valuelen + 1;
- if (len > 9)
- len++;
- if (len > 99)
+ len = 1 + 1 + strlen(keyword) + 1 + valuelen + 1;
+ for (tmp = len; tmp > 9; tmp /= 10)
len++;
- return len;
-}
-static void append_extended_header(char **p, const char *keyword,
- const char *value, unsigned int len)
-{
- unsigned int size = extended_header_len(keyword, len);
- append_extended_header_prefix(p, size, keyword);
- memcpy(*p, value, len);
- *p += len;
- append_char(p, '\n');
-}
+ total = sb->len + len;
+ if (total > sb->alloc) {
+ sb->buf = xrealloc(sb->buf, total);
+ sb->alloc = total;
+ }
-static void write_header(const unsigned char *, char, const char *, struct path_prefix *,
- const char *, unsigned int, void *, unsigned long);
+ p = sb->buf;
+ p += sprintf(p, "%u %s=", len, keyword);
+ memcpy(p, value, valuelen);
+ p += valuelen;
+ *p = '\n';
+ sb->len = total;
+}
-/* stores a pax extended header directly in the block buffer */
-static void write_extended_header(const char *headerfilename, int is_dir,
- unsigned int flags, const char *basepath,
- struct path_prefix *prefix,
- const char *path, unsigned int namelen,
- void *content, unsigned int contentsize)
+static unsigned int ustar_header_chksum(const struct ustar_header *header)
{
- char *buffer, *p;
- unsigned int pathlen, size, linkpathlen = 0;
-
- size = pathlen = extended_header_len("path", namelen);
- if (flags & EXT_HEADER_LINKPATH) {
- linkpathlen = extended_header_len("linkpath", contentsize);
- size += linkpathlen;
- }
- write_header(NULL, TYPEFLAG_EXT_HEADER, NULL, NULL, headerfilename,
- 0100600, NULL, size);
-
- buffer = p = malloc(size);
- if (!buffer)
- die("git-tar-tree: %s", strerror(errno));
- append_extended_header_prefix(&p, pathlen, "path");
- append_path(&p, is_dir, basepath, prefix, path);
- append_char(&p, '\n');
- if (flags & EXT_HEADER_LINKPATH)
- append_extended_header(&p, "linkpath", content, contentsize);
- write_blocked(buffer, size);
- free(buffer);
+ char *p = (char *)header;
+ unsigned int chksum = 0;
+ while (p < header->chksum)
+ chksum += *p++;
+ chksum += sizeof(header->chksum) * ' ';
+ p += sizeof(header->chksum);
+ while (p < (char *)header + sizeof(struct ustar_header))
+ chksum += *p++;
+ return chksum;
}
-static void write_global_extended_header(const unsigned char *sha1)
+static int get_path_prefix(const struct strbuf *path, int maxlen)
{
- char *p;
- unsigned int size;
-
- size = extended_header_len("comment", 40);
- write_header(NULL, TYPEFLAG_GLOBAL_HEADER, NULL, NULL,
- "pax_global_header", 0100600, NULL, size);
-
- p = get_record();
- append_extended_header(&p, "comment", sha1_to_hex(sha1), 40);
- write_if_needed();
+ int i = path->len;
+ if (i > maxlen)
+ i = maxlen;
+ while (i > 0 && path->buf[i] != '/')
+ i--;
+ return i;
}
-/* stores a ustar header directly in the block buffer */
-static void write_header(const unsigned char *sha1, char typeflag, const char *basepath,
- struct path_prefix *prefix, const char *path,
- unsigned int mode, void *buffer, unsigned long size)
+static void write_entry(const unsigned char *sha1, struct strbuf *path,
+ unsigned int mode, void *buffer, unsigned long size)
{
- unsigned int namelen;
- char *header = NULL;
- unsigned int checksum = 0;
- int i;
- unsigned int ext_header = 0;
-
- if (typeflag == TYPEFLAG_AUTO) {
- if (S_ISDIR(mode))
- typeflag = TYPEFLAG_DIR;
- else if (S_ISLNK(mode))
- typeflag = TYPEFLAG_LNK;
- else
- typeflag = TYPEFLAG_REG;
- }
-
- namelen = path_len(S_ISDIR(mode), basepath, prefix, path);
- if (namelen > 100)
- ext_header |= EXT_HEADER_PATH;
- if (typeflag == TYPEFLAG_LNK && size > 100)
- ext_header |= EXT_HEADER_LINKPATH;
-
- /* the extended header must be written before the normal one */
- if (ext_header) {
- char headerfilename[51];
- sprintf(headerfilename, "%s.paxheader", sha1_to_hex(sha1));
- write_extended_header(headerfilename, S_ISDIR(mode),
- ext_header, basepath, prefix, path,
- namelen, buffer, size);
- }
-
- header = get_record();
-
- if (ext_header) {
- sprintf(header, "%s.data", sha1_to_hex(sha1));
+ struct ustar_header header;
+ struct strbuf ext_header;
+
+ memset(&header, 0, sizeof(header));
+ ext_header.buf = NULL;
+ ext_header.len = ext_header.alloc = 0;
+
+ if (!sha1) {
+ *header.typeflag = TYPEFLAG_GLOBAL_HEADER;
+ mode = 0100666;
+ strcpy(header.name, "pax_global_header");
+ } else if (!path) {
+ *header.typeflag = TYPEFLAG_EXT_HEADER;
+ mode = 0100666;
+ sprintf(header.name, "%s.paxheader", sha1_to_hex(sha1));
} else {
- char *p = header;
- append_path(&p, S_ISDIR(mode), basepath, prefix, path);
+ if (S_ISDIR(mode)) {
+ *header.typeflag = TYPEFLAG_DIR;
+ mode |= 0777;
+ } else if (S_ISLNK(mode)) {
+ *header.typeflag = TYPEFLAG_LNK;
+ mode |= 0777;
+ } else if (S_ISREG(mode)) {
+ *header.typeflag = TYPEFLAG_REG;
+ mode |= (mode & 0100) ? 0777 : 0666;
+ } else {
+ error("unsupported file mode: 0%o (SHA1: %s)",
+ mode, sha1_to_hex(sha1));
+ return;
+ }
+ if (path->len > sizeof(header.name)) {
+ int plen = get_path_prefix(path, sizeof(header.prefix));
+ int rest = path->len - plen - 1;
+ if (plen > 0 && rest <= sizeof(header.name)) {
+ memcpy(header.prefix, path->buf, plen);
+ memcpy(header.name, path->buf + plen + 1, rest);
+ } else {
+ sprintf(header.name, "%s.data",
+ sha1_to_hex(sha1));
+ strbuf_append_ext_header(&ext_header, "path",
+ path->buf, path->len);
+ }
+ } else
+ memcpy(header.name, path->buf, path->len);
}
- if (typeflag == TYPEFLAG_LNK) {
- if (ext_header & EXT_HEADER_LINKPATH) {
- sprintf(&header[157], "see %s.paxheader",
+ if (S_ISLNK(mode) && buffer) {
+ if (size > sizeof(header.linkname)) {
+ sprintf(header.linkname, "see %s.paxheader",
sha1_to_hex(sha1));
- } else {
- if (buffer)
- strncpy(&header[157], buffer, size);
- }
+ strbuf_append_ext_header(&ext_header, "linkpath",
+ buffer, size);
+ } else
+ memcpy(header.linkname, buffer, size);
}
- if (S_ISDIR(mode))
- mode |= 0777;
- else if (S_ISREG(mode))
- mode |= (mode & 0100) ? 0777 : 0666;
- else if (S_ISLNK(mode))
- mode |= 0777;
- sprintf(&header[100], "%07o", mode & 07777);
+ sprintf(header.mode, "%07o", mode & 07777);
+ sprintf(header.size, "%011lo", S_ISREG(mode) ? size : 0);
+ sprintf(header.mtime, "%011lo", archive_time);
/* XXX: should we provide more meaningful info here? */
- sprintf(&header[108], "%07o", 0); /* uid */
- sprintf(&header[116], "%07o", 0); /* gid */
- strncpy(&header[265], "git", 31); /* uname */
- strncpy(&header[297], "git", 31); /* gname */
-
- if (S_ISDIR(mode) || S_ISLNK(mode))
- size = 0;
- sprintf(&header[124], "%011lo", size);
- sprintf(&header[136], "%011lo", archive_time);
+ sprintf(header.uid, "%07o", 0);
+ sprintf(header.gid, "%07o", 0);
+ strncpy(header.uname, "git", 31);
+ strncpy(header.gname, "git", 31);
+ sprintf(header.devmajor, "%07o", 0);
+ sprintf(header.devminor, "%07o", 0);
- header[156] = typeflag;
+ memcpy(header.magic, "ustar", 6);
+ memcpy(header.version, "00", 2);
- memcpy(&header[257], "ustar", 6);
- memcpy(&header[263], "00", 2);
+ sprintf(header.chksum, "%07o", ustar_header_chksum(&header));
- sprintf(&header[329], "%07o", 0); /* devmajor */
- sprintf(&header[337], "%07o", 0); /* devminor */
-
- memset(&header[148], ' ', 8);
- for (i = 0; i < RECORDSIZE; i++)
- checksum += header[i];
- sprintf(&header[148], "%07o", checksum & 0x1fffff);
+ if (ext_header.len > 0) {
+ write_entry(sha1, NULL, 0, ext_header.buf, ext_header.len);
+ free(ext_header.buf);
+ }
+ write_blocked(&header, sizeof(header));
+ if (S_ISREG(mode) && buffer && size > 0)
+ write_blocked(buffer, size);
+}
- write_if_needed();
+static void write_global_extended_header(const unsigned char *sha1)
+{
+ struct strbuf ext_header;
+ ext_header.buf = NULL;
+ ext_header.len = ext_header.alloc = 0;
+ strbuf_append_ext_header(&ext_header, "comment", sha1_to_hex(sha1), 40);
+ write_entry(NULL, NULL, 0, ext_header.buf, ext_header.len);
+ free(ext_header.buf);
}
-static void traverse_tree(struct tree_desc *tree,
- struct path_prefix *prefix)
+static void traverse_tree(struct tree_desc *tree, struct strbuf *path)
{
- struct path_prefix this_prefix;
- this_prefix.prev = prefix;
+ int pathlen = path->len;
while (tree->size) {
const char *name;
@@ -358,16 +283,19 @@ static void traverse_tree(struct tree_desc *tree,
eltbuf = read_sha1_file(sha1, elttype, &eltsize);
if (!eltbuf)
die("cannot read %s", sha1_to_hex(sha1));
- write_header(sha1, TYPEFLAG_AUTO, basedir,
- prefix, name, mode, eltbuf, eltsize);
+
+ path->len = pathlen;
+ strbuf_append_string(path, name);
+ if (S_ISDIR(mode))
+ strbuf_append_string(path, "/");
+
+ write_entry(sha1, path, mode, eltbuf, eltsize);
+
if (S_ISDIR(mode)) {
struct tree_desc subtree;
subtree.buf = eltbuf;
subtree.size = eltsize;
- this_prefix.name = name;
- traverse_tree(&subtree, &this_prefix);
- } else if (!S_ISLNK(mode)) {
- write_blocked(eltbuf, eltsize);
+ traverse_tree(&subtree, path);
}
free(eltbuf);
}
@@ -375,15 +303,22 @@ static void traverse_tree(struct tree_desc *tree,
int main(int argc, char **argv)
{
- unsigned char sha1[20];
+ unsigned char sha1[20], tree_sha1[20];
struct commit *commit;
struct tree_desc tree;
+ struct strbuf current_path;
+
+ current_path.buf = xmalloc(PATH_MAX);
+ current_path.alloc = PATH_MAX;
+ current_path.len = current_path.eof = 0;
setup_git_directory();
+ git_config(git_default_config);
switch (argc) {
case 3:
- basedir = argv[2];
+ strbuf_append_string(&current_path, argv[2]);
+ strbuf_append_string(&current_path, "/");
/* FALLTHROUGH */
case 2:
if (get_sha1(argv[1], sha1) < 0)
@@ -397,17 +332,19 @@ int main(int argc, char **argv)
if (commit) {
write_global_extended_header(commit->object.sha1);
archive_time = commit->date;
- }
- tree.buf = read_object_with_reference(sha1, "tree", &tree.size, NULL);
+ } else
+ archive_time = time(NULL);
+
+ tree.buf = read_object_with_reference(sha1, tree_type, &tree.size,
+ tree_sha1);
if (!tree.buf)
die("not a reference to a tag, commit or tree object: %s",
sha1_to_hex(sha1));
- if (!archive_time)
- archive_time = time(NULL);
- if (basedir)
- write_header((unsigned char *)"0", TYPEFLAG_DIR, NULL, NULL,
- basedir, 040777, NULL, 0);
- traverse_tree(&tree, NULL);
+
+ if (current_path.len > 0)
+ write_entry(tree_sha1, &current_path, 040777, NULL, 0);
+ traverse_tree(&tree, &current_path);
write_trailer();
+ free(current_path.buf);
return 0;
}
diff --git a/tar.h b/tar.h
new file mode 100644
index 0000000..3467705
--- /dev/null
+++ b/tar.h
@@ -0,0 +1,25 @@
+#define TYPEFLAG_AUTO '\0'
+#define TYPEFLAG_REG '0'
+#define TYPEFLAG_LNK '2'
+#define TYPEFLAG_DIR '5'
+#define TYPEFLAG_GLOBAL_HEADER 'g'
+#define TYPEFLAG_EXT_HEADER 'x'
+
+struct ustar_header {
+ char name[100]; /* 0 */
+ char mode[8]; /* 100 */
+ char uid[8]; /* 108 */
+ char gid[8]; /* 116 */
+ char size[12]; /* 124 */
+ char mtime[12]; /* 136 */
+ char chksum[8]; /* 148 */
+ char typeflag[1]; /* 156 */
+ char linkname[100]; /* 157 */
+ char magic[6]; /* 257 */
+ char version[2]; /* 263 */
+ char uname[32]; /* 265 */
+ char gname[32]; /* 297 */
+ char devmajor[8]; /* 329 */
+ char devminor[8]; /* 337 */
+ char prefix[155]; /* 345 */
+};
diff --git a/templates/hooks--pre-rebase b/templates/hooks--pre-rebase
new file mode 100644
index 0000000..981c454
--- /dev/null
+++ b/templates/hooks--pre-rebase
@@ -0,0 +1,150 @@
+#!/bin/sh
+#
+# Copyright (c) 2006 Junio C Hamano
+#
+
+publish=next
+basebranch="$1"
+if test "$#" = 2
+then
+ topic="refs/heads/$2"
+else
+ topic=`git symbolic-ref HEAD`
+fi
+
+case "$basebranch,$topic" in
+master,refs/heads/??/*)
+ ;;
+*)
+ exit 0 ;# we do not interrupt others.
+ ;;
+esac
+
+# Now we are dealing with a topic branch being rebased
+# on top of master. Is it OK to rebase it?
+
+# Is topic fully merged to master?
+not_in_master=`git-rev-list --pretty=oneline ^master "$topic"`
+if test -z "$not_in_master"
+then
+ echo >&2 "$topic is fully merged to master; better remove it."
+ exit 1 ;# we could allow it, but there is no point.
+fi
+
+# Is topic ever merged to next? If so you should not be rebasing it.
+only_next_1=`git-rev-list ^master "^$topic" ${publish} | sort`
+only_next_2=`git-rev-list ^master ${publish} | sort`
+if test "$only_next_1" = "$only_next_2"
+then
+ not_in_topic=`git-rev-list "^$topic" master`
+ if test -z "$not_in_topic"
+ then
+ echo >&2 "$topic is already up-to-date with master"
+ exit 1 ;# we could allow it, but there is no point.
+ else
+ exit 0
+ fi
+else
+ not_in_next=`git-rev-list --pretty=oneline ^${publish} "$topic"`
+ perl -e '
+ my $topic = $ARGV[0];
+ my $msg = "* $topic has commits already merged to public branch:\n";
+ my (%not_in_next) = map {
+ /^([0-9a-f]+) /;
+ ($1 => 1);
+ } split(/\n/, $ARGV[1]);
+ for my $elem (map {
+ /^([0-9a-f]+) (.*)$/;
+ [$1 => $2];
+ } split(/\n/, $ARGV[2])) {
+ if (!exists $not_in_next{$elem->[0]}) {
+ if ($msg) {
+ print STDERR $msg;
+ undef $msg;
+ }
+ print STDERR " $elem->[1]\n";
+ }
+ }
+ ' "$topic" "$not_in_next" "$not_in_master"
+ exit 1
+fi
+
+exit 0
+
+################################################################
+
+This sample hook safeguards topic branches that have been
+published from being rewound.
+
+The workflow assumed here is:
+
+ * Once a topic branch forks from "master", "master" is never
+ merged into it again (either directly or indirectly).
+
+ * Once a topic branch is fully cooked and merged into "master",
+ it is deleted. If you need to build on top of it to correct
+ earlier mistakes, a new topic branch is created by forking at
+ the tip of the "master". This is not strictly necessary, but
+ it makes it easier to keep your history simple.
+
+ * Whenever you need to test or publish your changes to topic
+ branches, merge them into "next" branch.
+
+The script, being an example, hardcodes the publish branch name
+to be "next", but it is trivial to make it configurable via
+$GIT_DIR/config mechanism.
+
+With this workflow, you would want to know:
+
+(1) ... if a topic branch has ever been merged to "next". Young
+ topic branches can have stupid mistakes you would rather
+ clean up before publishing, and things that have not been
+ merged into other branches can be easily rebased without
+ affecting other people. But once it is published, you would
+ not want to rewind it.
+
+(2) ... if a topic branch has been fully merged to "master".
+ Then you can delete it. More importantly, you should not
+ build on top of it -- other people may already want to
+ change things related to the topic as patches against your
+ "master", so if you need further changes, it is better to
+ fork the topic (perhaps with the same name) afresh from the
+ tip of "master".
+
+Let's look at this example:
+
+ o---o---o---o---o---o---o---o---o---o "next"
+ / / / /
+ / a---a---b A / /
+ / / / /
+ / / c---c---c---c B /
+ / / / \ /
+ / / / b---b C \ /
+ / / / / \ /
+ ---o---o---o---o---o---o---o---o---o---o---o "master"
+
+
+A, B and C are topic branches.
+
+ * A has one fix since it was merged up to "next".
+
+ * B has finished. It has been fully merged up to "master" and "next",
+ and is ready to be deleted.
+
+ * C has not merged to "next" at all.
+
+We would want to allow C to be rebased, refuse A, and encourage
+B to be deleted.
+
+To compute (1):
+
+ git-rev-list ^master ^topic next
+ git-rev-list ^master next
+
+ if these match, topic has not merged in next at all.
+
+To compute (2):
+
+ git-rev-list master..topic
+
+ if this is empty, it is fully merged to "master".
diff --git a/tree-diff.c b/tree-diff.c
index d978428..701fbba 100644
--- a/tree-diff.c
+++ b/tree-diff.c
@@ -3,40 +3,13 @@
*/
#include "cache.h"
#include "diff.h"
+#include "tree.h"
// What paths are we interested in?
static int nr_paths = 0;
static const char **paths = NULL;
static int *pathlens = NULL;
-void update_tree_entry(struct tree_desc *desc)
-{
- void *buf = desc->buf;
- unsigned long size = desc->size;
- int len = strlen(buf) + 1 + 20;
-
- if (size < len)
- die("corrupt tree file");
- desc->buf = buf + len;
- desc->size = size - len;
-}
-
-const unsigned char *tree_entry_extract(struct tree_desc *desc, const char **pathp, unsigned int *modep)
-{
- void *tree = desc->buf;
- unsigned long size = desc->size;
- int len = strlen(tree)+1;
- const unsigned char *sha1 = tree + len;
- const char *path = strchr(tree, ' ');
- unsigned int mode;
-
- if (!path || size < len + 20 || sscanf(tree, "%o", &mode) != 1)
- die("corrupt tree file");
- *pathp = path+1;
- *modep = DIFF_FILE_CANON_MODE(mode);
- return sha1;
-}
-
static char *malloc_base(const char *base, const char *path, int pathlen)
{
int baselen = strlen(base);
@@ -176,7 +149,7 @@ static int show_entry(struct diff_options *opt, const char *prefix, struct tree_
void *tree;
tree = read_sha1_file(sha1, type, &inner.size);
- if (!tree || strcmp(type, "tree"))
+ if (!tree || strcmp(type, tree_type))
die("corrupt tree sha %s", sha1_to_hex(sha1));
inner.buf = tree;
@@ -234,10 +207,10 @@ int diff_tree_sha1(const unsigned char *old, const unsigned char *new, const cha
struct tree_desc t1, t2;
int retval;
- tree1 = read_object_with_reference(old, "tree", &t1.size, NULL);
+ tree1 = read_object_with_reference(old, tree_type, &t1.size, NULL);
if (!tree1)
die("unable to read source tree (%s)", sha1_to_hex(old));
- tree2 = read_object_with_reference(new, "tree", &t2.size, NULL);
+ tree2 = read_object_with_reference(new, tree_type, &t2.size, NULL);
if (!tree2)
die("unable to read destination tree (%s)", sha1_to_hex(new));
t1.buf = tree1;
diff --git a/tree-walk.c b/tree-walk.c
new file mode 100644
index 0000000..bf8bfdf
--- /dev/null
+++ b/tree-walk.c
@@ -0,0 +1,117 @@
+#include "cache.h"
+#include "tree-walk.h"
+#include "tree.h"
+
+void *fill_tree_descriptor(struct tree_desc *desc, const unsigned char *sha1)
+{
+ unsigned long size = 0;
+ void *buf = NULL;
+
+ if (sha1) {
+ buf = read_object_with_reference(sha1, tree_type, &size, NULL);
+ if (!buf)
+ die("unable to read tree %s", sha1_to_hex(sha1));
+ }
+ desc->size = size;
+ desc->buf = buf;
+ return buf;
+}
+
+static int entry_compare(struct name_entry *a, struct name_entry *b)
+{
+ return base_name_compare(
+ a->path, a->pathlen, a->mode,
+ b->path, b->pathlen, b->mode);
+}
+
+static void entry_clear(struct name_entry *a)
+{
+ memset(a, 0, sizeof(*a));
+}
+
+static void entry_extract(struct tree_desc *t, struct name_entry *a)
+{
+ a->sha1 = tree_entry_extract(t, &a->path, &a->mode);
+ a->pathlen = strlen(a->path);
+}
+
+void update_tree_entry(struct tree_desc *desc)
+{
+ void *buf = desc->buf;
+ unsigned long size = desc->size;
+ int len = strlen(buf) + 1 + 20;
+
+ if (size < len)
+ die("corrupt tree file");
+ desc->buf = buf + len;
+ desc->size = size - len;
+}
+
+const unsigned char *tree_entry_extract(struct tree_desc *desc, const char **pathp, unsigned int *modep)
+{
+ void *tree = desc->buf;
+ unsigned long size = desc->size;
+ int len = strlen(tree)+1;
+ const unsigned char *sha1 = tree + len;
+ const char *path = strchr(tree, ' ');
+ unsigned int mode;
+
+ if (!path || size < len + 20 || sscanf(tree, "%o", &mode) != 1)
+ die("corrupt tree file");
+ *pathp = path+1;
+ *modep = canon_mode(mode);
+ return sha1;
+}
+
+void traverse_trees(int n, struct tree_desc *t, const char *base, traverse_callback_t callback)
+{
+ struct name_entry *entry = xmalloc(n*sizeof(*entry));
+
+ for (;;) {
+ struct name_entry entry[3];
+ unsigned long mask = 0;
+ int i, last;
+
+ last = -1;
+ for (i = 0; i < n; i++) {
+ if (!t[i].size)
+ continue;
+ entry_extract(t+i, entry+i);
+ if (last >= 0) {
+ int cmp = entry_compare(entry+i, entry+last);
+
+ /*
+ * Is the new name bigger than the old one?
+ * Ignore it
+ */
+ if (cmp > 0)
+ continue;
+ /*
+ * Is the new name smaller than the old one?
+ * Ignore all old ones
+ */
+ if (cmp < 0)
+ mask = 0;
+ }
+ mask |= 1ul << i;
+ last = i;
+ }
+ if (!mask)
+ break;
+
+ /*
+ * Update the tree entries we've walked, and clear
+ * all the unused name-entries.
+ */
+ for (i = 0; i < n; i++) {
+ if (mask & (1ul << i)) {
+ update_tree_entry(t+i);
+ continue;
+ }
+ entry_clear(entry + i);
+ }
+ callback(n, mask, entry, base);
+ }
+ free(entry);
+}
+
diff --git a/tree-walk.h b/tree-walk.h
new file mode 100644
index 0000000..76893e3
--- /dev/null
+++ b/tree-walk.h
@@ -0,0 +1,25 @@
+#ifndef TREE_WALK_H
+#define TREE_WALK_H
+
+struct tree_desc {
+ void *buf;
+ unsigned long size;
+};
+
+struct name_entry {
+ const unsigned char *sha1;
+ const char *path;
+ unsigned int mode;
+ int pathlen;
+};
+
+void update_tree_entry(struct tree_desc *);
+const unsigned char *tree_entry_extract(struct tree_desc *, const char **, unsigned int *);
+
+void *fill_tree_descriptor(struct tree_desc *desc, const unsigned char *sha1);
+
+typedef void (*traverse_callback_t)(int n, unsigned long mask, struct name_entry *entry, const char *base);
+
+void traverse_trees(int n, struct tree_desc *t, const char *base, traverse_callback_t callback);
+
+#endif
diff --git a/tree.c b/tree.c
index 87e0d74..d599fb5 100644
--- a/tree.c
+++ b/tree.c
@@ -18,9 +18,7 @@ static int read_one_entry(unsigned char *sha1, const char *base, int baselen, co
len = strlen(pathname);
size = cache_entry_size(baselen + len);
- ce = xmalloc(size);
-
- memset(ce, 0, size);
+ ce = xcalloc(1, size);
ce->ce_mode = create_ce_mode(mode);
ce->ce_flags = create_ce_flags(baselen + len, stage);
@@ -130,8 +128,7 @@ struct tree *lookup_tree(const unsigned char *sha1)
{
struct object *obj = lookup_object(sha1);
if (!obj) {
- struct tree *ret = xmalloc(sizeof(struct tree));
- memset(ret, 0, sizeof(struct tree));
+ struct tree *ret = xcalloc(1, sizeof(struct tree));
created_object(sha1, &ret->object);
ret->object.type = tree_type;
return ret;
diff --git a/unpack-file.c b/unpack-file.c
index 07303f8..23a8562 100644
--- a/unpack-file.c
+++ b/unpack-file.c
@@ -1,4 +1,5 @@
#include "cache.h"
+#include "blob.h"
static char *create_temp_file(unsigned char *sha1)
{
@@ -9,7 +10,7 @@ static char *create_temp_file(unsigned char *sha1)
int fd;
buf = read_sha1_file(sha1, type, &size);
- if (!buf || strcmp(type, "blob"))
+ if (!buf || strcmp(type, blob_type))
die("unable to read blob object %s", sha1_to_hex(sha1));
strcpy(path, ".merge_file_XXXXXX");
@@ -30,6 +31,7 @@ int main(int argc, char **argv)
usage("git-unpack-file <sha1>");
setup_git_directory();
+ git_config(git_default_config);
puts(create_temp_file(sha1));
return 0;
diff --git a/unpack-objects.c b/unpack-objects.c
index 815a1b3..3b824b0 100644
--- a/unpack-objects.c
+++ b/unpack-objects.c
@@ -2,6 +2,10 @@
#include "object.h"
#include "delta.h"
#include "pack.h"
+#include "blob.h"
+#include "commit.h"
+#include "tag.h"
+#include "tree.h"
#include <sys/time.h>
@@ -148,10 +152,10 @@ static int unpack_non_delta_entry(enum object_type kind, unsigned long size)
const char *type;
switch (kind) {
- case OBJ_COMMIT: type = "commit"; break;
- case OBJ_TREE: type = "tree"; break;
- case OBJ_BLOB: type = "blob"; break;
- case OBJ_TAG: type = "tag"; break;
+ case OBJ_COMMIT: type = commit_type; break;
+ case OBJ_TREE: type = tree_type; break;
+ case OBJ_BLOB: type = blob_type; break;
+ case OBJ_TAG: type = tag_type; break;
default: die("bad type %d", kind);
}
if (!dry_run)
diff --git a/update-index.c b/update-index.c
index afec98d..1efac27 100644
--- a/update-index.c
+++ b/update-index.c
@@ -23,6 +23,10 @@ static int quiet; /* --refresh needing update is not error */
static int info_only;
static int force_remove;
static int verbose;
+static int mark_valid_only = 0;
+#define MARK_VALID 1
+#define UNMARK_VALID 2
+
/* Three functions to allow overloaded pointer return; see linux/err.h */
static inline void *ERR_PTR(long error)
@@ -53,6 +57,25 @@ static void report(const char *fmt, ...)
va_end(vp);
}
+static int mark_valid(const char *path)
+{
+ int namelen = strlen(path);
+ int pos = cache_name_pos(path, namelen);
+ if (0 <= pos) {
+ switch (mark_valid_only) {
+ case MARK_VALID:
+ active_cache[pos]->ce_flags |= htons(CE_VALID);
+ break;
+ case UNMARK_VALID:
+ active_cache[pos]->ce_flags &= ~htons(CE_VALID);
+ break;
+ }
+ active_cache_changed = 1;
+ return 0;
+ }
+ return -1;
+}
+
static int add_file_to_cache(const char *path)
{
int size, namelen, option, status;
@@ -91,9 +114,9 @@ static int add_file_to_cache(const char *path)
namelen = strlen(path);
size = cache_entry_size(namelen);
- ce = xmalloc(size);
- memset(ce, 0, size);
+ ce = xcalloc(1, size);
memcpy(ce->name, path, namelen);
+ ce->ce_flags = htons(namelen);
fill_stat_cache_info(ce, &st);
ce->ce_mode = create_ce_mode(st.st_mode);
@@ -105,7 +128,6 @@ static int add_file_to_cache(const char *path)
if (0 <= pos)
ce->ce_mode = active_cache[pos]->ce_mode;
}
- ce->ce_flags = htons(namelen);
if (index_path(ce->sha1, path, &st, !info_only))
return -1;
@@ -128,7 +150,7 @@ static int add_file_to_cache(const char *path)
* For example, you'd want to do this after doing a "git-read-tree",
* to link up the stat cache details with the proper files.
*/
-static struct cache_entry *refresh_entry(struct cache_entry *ce)
+static struct cache_entry *refresh_entry(struct cache_entry *ce, int really)
{
struct stat st;
struct cache_entry *updated;
@@ -137,21 +159,36 @@ static struct cache_entry *refresh_entry(struct cache_entry *ce)
if (lstat(ce->name, &st) < 0)
return ERR_PTR(-errno);
- changed = ce_match_stat(ce, &st);
- if (!changed)
- return NULL;
+ changed = ce_match_stat(ce, &st, really);
+ if (!changed) {
+ if (really && assume_unchanged &&
+ !(ce->ce_flags & htons(CE_VALID)))
+ ; /* mark this one VALID again */
+ else
+ return NULL;
+ }
- if (ce_modified(ce, &st))
+ if (ce_modified(ce, &st, really))
return ERR_PTR(-EINVAL);
size = ce_size(ce);
updated = xmalloc(size);
memcpy(updated, ce, size);
fill_stat_cache_info(updated, &st);
+
+ /* In this case, if really is not set, we should leave
+ * CE_VALID bit alone. Otherwise, paths marked with
+ * --no-assume-unchanged (i.e. things to be edited) will
+ * reacquire CE_VALID bit automatically, which is not
+ * really what we want.
+ */
+ if (!really && assume_unchanged && !(ce->ce_flags & htons(CE_VALID)))
+ updated->ce_flags &= ~htons(CE_VALID);
+
return updated;
}
-static int refresh_cache(void)
+static int refresh_cache(int really)
{
int i;
int has_errors = 0;
@@ -171,12 +208,19 @@ static int refresh_cache(void)
continue;
}
- new = refresh_entry(ce);
+ new = refresh_entry(ce, really);
if (!new)
continue;
if (IS_ERR(new)) {
if (not_new && PTR_ERR(new) == -ENOENT)
continue;
+ if (really && PTR_ERR(new) == -EINVAL) {
+ /* If we are doing --really-refresh that
+ * means the index is not valid anymore.
+ */
+ ce->ce_flags &= ~htons(CE_VALID);
+ active_cache_changed = 1;
+ }
if (quiet)
continue;
printf("%s: needs update\n", ce->name);
@@ -267,13 +311,14 @@ static int add_cacheinfo(unsigned int mode, const unsigned char *sha1,
len = strlen(path);
size = cache_entry_size(len);
- ce = xmalloc(size);
- memset(ce, 0, size);
+ ce = xcalloc(1, size);
memcpy(ce->sha1, sha1, 20);
memcpy(ce->name, path, len);
ce->ce_flags = create_ce_flags(len, stage);
ce->ce_mode = create_ce_mode(mode);
+ if (assume_unchanged)
+ ce->ce_flags |= htons(CE_VALID);
option = allow_add ? ADD_CACHE_OK_TO_ADD : 0;
option |= allow_replace ? ADD_CACHE_OK_TO_REPLACE : 0;
if (add_cache_entry(ce, option))
@@ -317,6 +362,12 @@ static void update_one(const char *path, const char *prefix, int prefix_length)
fprintf(stderr, "Ignoring path %s\n", path);
return;
}
+ if (mark_valid_only) {
+ if (mark_valid(p))
+ die("Unable to mark file %s", path);
+ return;
+ }
+
if (force_remove) {
if (remove_file_from_cache(p))
die("git-update-index: unable to remove %s", path);
@@ -467,7 +518,11 @@ int main(int argc, const char **argv)
continue;
}
if (!strcmp(path, "--refresh")) {
- has_errors |= refresh_cache();
+ has_errors |= refresh_cache(0);
+ continue;
+ }
+ if (!strcmp(path, "--really-refresh")) {
+ has_errors |= refresh_cache(1);
continue;
}
if (!strcmp(path, "--cacheinfo")) {
@@ -493,6 +548,14 @@ int main(int argc, const char **argv)
die("git-update-index: %s cannot chmod %s", path, argv[i]);
continue;
}
+ if (!strcmp(path, "--assume-unchanged")) {
+ mark_valid_only = MARK_VALID;
+ continue;
+ }
+ if (!strcmp(path, "--no-assume-unchanged")) {
+ mark_valid_only = UNMARK_VALID;
+ continue;
+ }
if (!strcmp(path, "--info-only")) {
info_only = 1;
continue;
@@ -512,9 +575,11 @@ int main(int argc, const char **argv)
break;
}
if (!strcmp(path, "--index-info")) {
+ if (i != argc - 1)
+ die("--index-info must be at the end");
allow_add = allow_replace = allow_remove = 1;
read_index_info(line_termination);
- continue;
+ break;
}
if (!strcmp(path, "--ignore-missing")) {
not_new = 1;
diff --git a/update-ref.c b/update-ref.c
index e6fbddb..ba4bf51 100644
--- a/update-ref.c
+++ b/update-ref.c
@@ -25,6 +25,7 @@ int main(int argc, char **argv)
int fd, written;
setup_git_directory();
+ git_config(git_default_config);
if (argc < 3 || argc > 4)
usage(git_update_ref_usage);
diff --git a/upload-pack.c b/upload-pack.c
index 3606529..47560c9 100644
--- a/upload-pack.c
+++ b/upload-pack.c
@@ -14,6 +14,7 @@ static const char upload_pack_usage[] = "git-upload-pack [--strict] [--timeout=n
#define MAX_HAS 256
#define MAX_NEEDS 256
static int nr_has = 0, nr_needs = 0, multi_ack = 0, nr_our_refs = 0;
+static int use_thin_pack = 0;
static unsigned char has_sha1[MAX_HAS][20];
static unsigned char needs_sha1[MAX_NEEDS][20];
static unsigned int timeout = 0;
@@ -45,24 +46,26 @@ static void create_pack_file(void)
if (!pid) {
int i;
int args;
- char **argv;
+ const char **argv;
char *buf;
char **p;
- if (create_full_pack)
+ if (create_full_pack) {
args = 10;
+ use_thin_pack = 0; /* no point doing it */
+ }
else
args = nr_has + nr_needs + 5;
- argv = xmalloc(args * sizeof(char *));
+ p = xmalloc(args * sizeof(char *));
+ argv = (const char **) p;
buf = xmalloc(args * 45);
- p = argv;
dup2(fd[1], 1);
close(0);
close(fd[0]);
close(fd[1]);
*p++ = "rev-list";
- *p++ = "--objects";
+ *p++ = use_thin_pack ? "--objects-edge" : "--objects";
if (create_full_pack || MAX_NEEDS <= nr_needs)
*p++ = "--all";
else {
@@ -192,6 +195,8 @@ static int receive_needs(void)
"expected to get sha, not '%s'", line);
if (strstr(line+45, "multi_ack"))
multi_ack = 1;
+ if (strstr(line+45, "thin-pack"))
+ use_thin_pack = 1;
/* We have sent all our refs already, and the other end
* should have chosen out of them; otherwise they are
@@ -213,7 +218,7 @@ static int receive_needs(void)
static int send_ref(const char *refname, const unsigned char *sha1)
{
- static char *capabilities = "multi_ack";
+ static char *capabilities = "multi_ack thin-pack";
struct object *o = parse_object(sha1);
if (!o)
diff --git a/write-tree.c b/write-tree.c
index f866059..dcad6e6 100644
--- a/write-tree.c
+++ b/write-tree.c
@@ -4,6 +4,7 @@
* Copyright (C) Linus Torvalds, 2005
*/
#include "cache.h"
+#include "tree.h"
static int missing_ok = 0;
@@ -78,7 +79,7 @@ static int write_tree(struct cache_entry **cachep, int maxentries, const char *b
nr++;
}
- write_sha1_file(buffer, offset, "tree", returnsha1);
+ write_sha1_file(buffer, offset, tree_type, returnsha1);
free(buffer);
return nr;
}
@@ -111,7 +112,7 @@ int main(int argc, char **argv)
funny = 0;
for (i = 0; i < entries; i++) {
struct cache_entry *ce = active_cache[i];
- if (ntohs(ce->ce_flags) & ~CE_NAMEMASK) {
+ if (ce_stage(ce)) {
if (10 < ++funny) {
fprintf(stderr, "...\n");
break;
diff --git a/xdiff/xdiff.h b/xdiff/xdiff.h
new file mode 100644
index 0000000..2540e8a
--- /dev/null
+++ b/xdiff/xdiff.h
@@ -0,0 +1,94 @@
+/*
+ * LibXDiff by Davide Libenzi ( File Differential Library )
+ * Copyright (C) 2003 Davide Libenzi
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ *
+ * Davide Libenzi <davidel@xmailserver.org>
+ *
+ */
+
+#if !defined(XDIFF_H)
+#define XDIFF_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* #ifdef __cplusplus */
+
+
+#define XDF_NEED_MINIMAL (1 << 1)
+
+#define XDL_PATCH_NORMAL '-'
+#define XDL_PATCH_REVERSE '+'
+#define XDL_PATCH_MODEMASK ((1 << 8) - 1)
+#define XDL_PATCH_IGNOREBSPACE (1 << 8)
+
+#define XDL_EMIT_FUNCNAMES (1 << 0)
+
+#define XDL_MMB_READONLY (1 << 0)
+
+#define XDL_MMF_ATOMIC (1 << 0)
+
+#define XDL_BDOP_INS 1
+#define XDL_BDOP_CPY 2
+#define XDL_BDOP_INSB 3
+
+
+typedef struct s_mmfile {
+ char *ptr;
+ long size;
+} mmfile_t;
+
+typedef struct s_mmbuffer {
+ char *ptr;
+ long size;
+} mmbuffer_t;
+
+typedef struct s_xpparam {
+ unsigned long flags;
+} xpparam_t;
+
+typedef struct s_xdemitcb {
+ void *priv;
+ int (*outf)(void *, mmbuffer_t *, int);
+} xdemitcb_t;
+
+typedef struct s_xdemitconf {
+ long ctxlen;
+ unsigned long flags;
+} xdemitconf_t;
+
+typedef struct s_bdiffparam {
+ long bsize;
+} bdiffparam_t;
+
+
+#define xdl_malloc(x) malloc(x)
+#define xdl_free(ptr) free(ptr)
+#define xdl_realloc(ptr,x) realloc(ptr,x)
+
+void *xdl_mmfile_first(mmfile_t *mmf, long *size);
+void *xdl_mmfile_next(mmfile_t *mmf, long *size);
+long xdl_mmfile_size(mmfile_t *mmf);
+
+int xdl_diff(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp,
+ xdemitconf_t const *xecfg, xdemitcb_t *ecb);
+
+#ifdef __cplusplus
+}
+#endif /* #ifdef __cplusplus */
+
+#endif /* #if !defined(XDIFF_H) */
+
diff --git a/xdiff/xdiffi.c b/xdiff/xdiffi.c
new file mode 100644
index 0000000..e81bca6
--- /dev/null
+++ b/xdiff/xdiffi.c
@@ -0,0 +1,464 @@
+/*
+ * LibXDiff by Davide Libenzi ( File Differential Library )
+ * Copyright (C) 2003 Davide Libenzi
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ *
+ * Davide Libenzi <davidel@xmailserver.org>
+ *
+ */
+
+#include "xinclude.h"
+
+
+
+#define XDL_MAX_COST_MIN 256
+#define XDL_HEUR_MIN_COST 256
+#define XDL_LINE_MAX (long)((1UL << (8 * sizeof(long) - 1)) - 1)
+#define XDL_SNAKE_CNT 20
+#define XDL_K_HEUR 4
+
+
+
+typedef struct s_xdpsplit {
+ long i1, i2;
+ int min_lo, min_hi;
+} xdpsplit_t;
+
+
+
+
+static long xdl_split(unsigned long const *ha1, long off1, long lim1,
+ unsigned long const *ha2, long off2, long lim2,
+ long *kvdf, long *kvdb, int need_min, xdpsplit_t *spl,
+ xdalgoenv_t *xenv);
+static xdchange_t *xdl_add_change(xdchange_t *xscr, long i1, long i2, long chg1, long chg2);
+
+
+
+
+/*
+ * See "An O(ND) Difference Algorithm and its Variations", by Eugene Myers.
+ * Basically considers a "box" (off1, off2, lim1, lim2) and scan from both
+ * the forward diagonal starting from (off1, off2) and the backward diagonal
+ * starting from (lim1, lim2). If the K values on the same diagonal crosses
+ * returns the furthest point of reach. We might end up having to expensive
+ * cases using this algorithm is full, so a little bit of heuristic is needed
+ * to cut the search and to return a suboptimal point.
+ */
+static long xdl_split(unsigned long const *ha1, long off1, long lim1,
+ unsigned long const *ha2, long off2, long lim2,
+ long *kvdf, long *kvdb, int need_min, xdpsplit_t *spl,
+ xdalgoenv_t *xenv) {
+ long dmin = off1 - lim2, dmax = lim1 - off2;
+ long fmid = off1 - off2, bmid = lim1 - lim2;
+ long odd = (fmid - bmid) & 1;
+ long fmin = fmid, fmax = fmid;
+ long bmin = bmid, bmax = bmid;
+ long ec, d, i1, i2, prev1, best, dd, v, k;
+
+ /*
+ * Set initial diagonal values for both forward and backward path.
+ */
+ kvdf[fmid] = off1;
+ kvdb[bmid] = lim1;
+
+ for (ec = 1;; ec++) {
+ int got_snake = 0;
+
+ /*
+ * We need to extent the diagonal "domain" by one. If the next
+ * values exits the box boundaries we need to change it in the
+ * opposite direction because (max - min) must be a power of two.
+ * Also we initialize the extenal K value to -1 so that we can
+ * avoid extra conditions check inside the core loop.
+ */
+ if (fmin > dmin)
+ kvdf[--fmin - 1] = -1;
+ else
+ ++fmin;
+ if (fmax < dmax)
+ kvdf[++fmax + 1] = -1;
+ else
+ --fmax;
+
+ for (d = fmax; d >= fmin; d -= 2) {
+ if (kvdf[d - 1] >= kvdf[d + 1])
+ i1 = kvdf[d - 1] + 1;
+ else
+ i1 = kvdf[d + 1];
+ prev1 = i1;
+ i2 = i1 - d;
+ for (; i1 < lim1 && i2 < lim2 && ha1[i1] == ha2[i2]; i1++, i2++);
+ if (i1 - prev1 > xenv->snake_cnt)
+ got_snake = 1;
+ kvdf[d] = i1;
+ if (odd && bmin <= d && d <= bmax && kvdb[d] <= i1) {
+ spl->i1 = i1;
+ spl->i2 = i2;
+ spl->min_lo = spl->min_hi = 1;
+ return ec;
+ }
+ }
+
+ /*
+ * We need to extent the diagonal "domain" by one. If the next
+ * values exits the box boundaries we need to change it in the
+ * opposite direction because (max - min) must be a power of two.
+ * Also we initialize the extenal K value to -1 so that we can
+ * avoid extra conditions check inside the core loop.
+ */
+ if (bmin > dmin)
+ kvdb[--bmin - 1] = XDL_LINE_MAX;
+ else
+ ++bmin;
+ if (bmax < dmax)
+ kvdb[++bmax + 1] = XDL_LINE_MAX;
+ else
+ --bmax;
+
+ for (d = bmax; d >= bmin; d -= 2) {
+ if (kvdb[d - 1] < kvdb[d + 1])
+ i1 = kvdb[d - 1];
+ else
+ i1 = kvdb[d + 1] - 1;
+ prev1 = i1;
+ i2 = i1 - d;
+ for (; i1 > off1 && i2 > off2 && ha1[i1 - 1] == ha2[i2 - 1]; i1--, i2--);
+ if (prev1 - i1 > xenv->snake_cnt)
+ got_snake = 1;
+ kvdb[d] = i1;
+ if (!odd && fmin <= d && d <= fmax && i1 <= kvdf[d]) {
+ spl->i1 = i1;
+ spl->i2 = i2;
+ spl->min_lo = spl->min_hi = 1;
+ return ec;
+ }
+ }
+
+ if (need_min)
+ continue;
+
+ /*
+ * If the edit cost is above the heuristic trigger and if
+ * we got a good snake, we sample current diagonals to see
+ * if some of the, have reached an "interesting" path. Our
+ * measure is a function of the distance from the diagonal
+ * corner (i1 + i2) penalized with the distance from the
+ * mid diagonal itself. If this value is above the current
+ * edit cost times a magic factor (XDL_K_HEUR) we consider
+ * it interesting.
+ */
+ if (got_snake && ec > xenv->heur_min) {
+ for (best = 0, d = fmax; d >= fmin; d -= 2) {
+ dd = d > fmid ? d - fmid: fmid - d;
+ i1 = kvdf[d];
+ i2 = i1 - d;
+ v = (i1 - off1) + (i2 - off2) - dd;
+
+ if (v > XDL_K_HEUR * ec && v > best &&
+ off1 + xenv->snake_cnt <= i1 && i1 < lim1 &&
+ off2 + xenv->snake_cnt <= i2 && i2 < lim2) {
+ for (k = 1; ha1[i1 - k] == ha2[i2 - k]; k++)
+ if (k == xenv->snake_cnt) {
+ best = v;
+ spl->i1 = i1;
+ spl->i2 = i2;
+ break;
+ }
+ }
+ }
+ if (best > 0) {
+ spl->min_lo = 1;
+ spl->min_hi = 0;
+ return ec;
+ }
+
+ for (best = 0, d = bmax; d >= bmin; d -= 2) {
+ dd = d > bmid ? d - bmid: bmid - d;
+ i1 = kvdb[d];
+ i2 = i1 - d;
+ v = (lim1 - i1) + (lim2 - i2) - dd;
+
+ if (v > XDL_K_HEUR * ec && v > best &&
+ off1 < i1 && i1 <= lim1 - xenv->snake_cnt &&
+ off2 < i2 && i2 <= lim2 - xenv->snake_cnt) {
+ for (k = 0; ha1[i1 + k] == ha2[i2 + k]; k++)
+ if (k == xenv->snake_cnt - 1) {
+ best = v;
+ spl->i1 = i1;
+ spl->i2 = i2;
+ break;
+ }
+ }
+ }
+ if (best > 0) {
+ spl->min_lo = 0;
+ spl->min_hi = 1;
+ return ec;
+ }
+ }
+
+ /*
+ * Enough is enough. We spent too much time here and now we collect
+ * the furthest reaching path using the (i1 + i2) measure.
+ */
+ if (ec >= xenv->mxcost) {
+ long fbest, fbest1, bbest, bbest1;
+
+ fbest = -1;
+ for (d = fmax; d >= fmin; d -= 2) {
+ i1 = XDL_MIN(kvdf[d], lim1);
+ i2 = i1 - d;
+ if (lim2 < i2)
+ i1 = lim2 + d, i2 = lim2;
+ if (fbest < i1 + i2) {
+ fbest = i1 + i2;
+ fbest1 = i1;
+ }
+ }
+
+ bbest = XDL_LINE_MAX;
+ for (d = bmax; d >= bmin; d -= 2) {
+ i1 = XDL_MAX(off1, kvdb[d]);
+ i2 = i1 - d;
+ if (i2 < off2)
+ i1 = off2 + d, i2 = off2;
+ if (i1 + i2 < bbest) {
+ bbest = i1 + i2;
+ bbest1 = i1;
+ }
+ }
+
+ if ((lim1 + lim2) - bbest < fbest - (off1 + off2)) {
+ spl->i1 = fbest1;
+ spl->i2 = fbest - fbest1;
+ spl->min_lo = 1;
+ spl->min_hi = 0;
+ } else {
+ spl->i1 = bbest1;
+ spl->i2 = bbest - bbest1;
+ spl->min_lo = 0;
+ spl->min_hi = 1;
+ }
+ return ec;
+ }
+ }
+
+ return -1;
+}
+
+
+/*
+ * Rule: "Divide et Impera". Recursively split the box in sub-boxes by calling
+ * the box splitting function. Note that the real job (marking changed lines)
+ * is done in the two boundary reaching checks.
+ */
+int xdl_recs_cmp(diffdata_t *dd1, long off1, long lim1,
+ diffdata_t *dd2, long off2, long lim2,
+ long *kvdf, long *kvdb, int need_min, xdalgoenv_t *xenv) {
+ unsigned long const *ha1 = dd1->ha, *ha2 = dd2->ha;
+
+ /*
+ * Shrink the box by walking through each diagonal snake (SW and NE).
+ */
+ for (; off1 < lim1 && off2 < lim2 && ha1[off1] == ha2[off2]; off1++, off2++);
+ for (; off1 < lim1 && off2 < lim2 && ha1[lim1 - 1] == ha2[lim2 - 1]; lim1--, lim2--);
+
+ /*
+ * If one dimension is empty, then all records on the other one must
+ * be obviously changed.
+ */
+ if (off1 == lim1) {
+ char *rchg2 = dd2->rchg;
+ long *rindex2 = dd2->rindex;
+
+ for (; off2 < lim2; off2++)
+ rchg2[rindex2[off2]] = 1;
+ } else if (off2 == lim2) {
+ char *rchg1 = dd1->rchg;
+ long *rindex1 = dd1->rindex;
+
+ for (; off1 < lim1; off1++)
+ rchg1[rindex1[off1]] = 1;
+ } else {
+ long ec;
+ xdpsplit_t spl;
+
+ /*
+ * Divide ...
+ */
+ if ((ec = xdl_split(ha1, off1, lim1, ha2, off2, lim2, kvdf, kvdb,
+ need_min, &spl, xenv)) < 0) {
+
+ return -1;
+ }
+
+ /*
+ * ... et Impera.
+ */
+ if (xdl_recs_cmp(dd1, off1, spl.i1, dd2, off2, spl.i2,
+ kvdf, kvdb, spl.min_lo, xenv) < 0 ||
+ xdl_recs_cmp(dd1, spl.i1, lim1, dd2, spl.i2, lim2,
+ kvdf, kvdb, spl.min_hi, xenv) < 0) {
+
+ return -1;
+ }
+ }
+
+ return 0;
+}
+
+
+int xdl_do_diff(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp,
+ xdfenv_t *xe) {
+ long ndiags;
+ long *kvd, *kvdf, *kvdb;
+ xdalgoenv_t xenv;
+ diffdata_t dd1, dd2;
+
+ if (xdl_prepare_env(mf1, mf2, xpp, xe) < 0) {
+
+ return -1;
+ }
+
+ /*
+ * Allocate and setup K vectors to be used by the differential algorithm.
+ * One is to store the forward path and one to store the backward path.
+ */
+ ndiags = xe->xdf1.nreff + xe->xdf2.nreff + 3;
+ if (!(kvd = (long *) xdl_malloc((2 * ndiags + 2) * sizeof(long)))) {
+
+ xdl_free_env(xe);
+ return -1;
+ }
+ kvdf = kvd;
+ kvdb = kvdf + ndiags;
+ kvdf += xe->xdf2.nreff + 1;
+ kvdb += xe->xdf2.nreff + 1;
+
+ xenv.mxcost = xdl_bogosqrt(ndiags);
+ if (xenv.mxcost < XDL_MAX_COST_MIN)
+ xenv.mxcost = XDL_MAX_COST_MIN;
+ xenv.snake_cnt = XDL_SNAKE_CNT;
+ xenv.heur_min = XDL_HEUR_MIN_COST;
+
+ dd1.nrec = xe->xdf1.nreff;
+ dd1.ha = xe->xdf1.ha;
+ dd1.rchg = xe->xdf1.rchg;
+ dd1.rindex = xe->xdf1.rindex;
+ dd2.nrec = xe->xdf2.nreff;
+ dd2.ha = xe->xdf2.ha;
+ dd2.rchg = xe->xdf2.rchg;
+ dd2.rindex = xe->xdf2.rindex;
+
+ if (xdl_recs_cmp(&dd1, 0, dd1.nrec, &dd2, 0, dd2.nrec,
+ kvdf, kvdb, (xpp->flags & XDF_NEED_MINIMAL) != 0, &xenv) < 0) {
+
+ xdl_free(kvd);
+ xdl_free_env(xe);
+ return -1;
+ }
+
+ xdl_free(kvd);
+
+ return 0;
+}
+
+
+static xdchange_t *xdl_add_change(xdchange_t *xscr, long i1, long i2, long chg1, long chg2) {
+ xdchange_t *xch;
+
+ if (!(xch = (xdchange_t *) xdl_malloc(sizeof(xdchange_t))))
+ return NULL;
+
+ xch->next = xscr;
+ xch->i1 = i1;
+ xch->i2 = i2;
+ xch->chg1 = chg1;
+ xch->chg2 = chg2;
+
+ return xch;
+}
+
+
+int xdl_build_script(xdfenv_t *xe, xdchange_t **xscr) {
+ xdchange_t *cscr = NULL, *xch;
+ char *rchg1 = xe->xdf1.rchg, *rchg2 = xe->xdf2.rchg;
+ long i1, i2, l1, l2;
+
+ /*
+ * Trivial. Collects "groups" of changes and creates an edit script.
+ */
+ for (i1 = xe->xdf1.nrec, i2 = xe->xdf2.nrec; i1 >= 0 || i2 >= 0; i1--, i2--)
+ if (rchg1[i1 - 1] || rchg2[i2 - 1]) {
+ for (l1 = i1; rchg1[i1 - 1]; i1--);
+ for (l2 = i2; rchg2[i2 - 1]; i2--);
+
+ if (!(xch = xdl_add_change(cscr, i1, i2, l1 - i1, l2 - i2))) {
+ xdl_free_script(cscr);
+ return -1;
+ }
+ cscr = xch;
+ }
+
+ *xscr = cscr;
+
+ return 0;
+}
+
+
+void xdl_free_script(xdchange_t *xscr) {
+ xdchange_t *xch;
+
+ while ((xch = xscr) != NULL) {
+ xscr = xscr->next;
+ xdl_free(xch);
+ }
+}
+
+
+int xdl_diff(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp,
+ xdemitconf_t const *xecfg, xdemitcb_t *ecb) {
+ xdchange_t *xscr;
+ xdfenv_t xe;
+
+ if (xdl_do_diff(mf1, mf2, xpp, &xe) < 0) {
+
+ return -1;
+ }
+
+ if (xdl_build_script(&xe, &xscr) < 0) {
+
+ xdl_free_env(&xe);
+ return -1;
+ }
+
+ if (xscr) {
+ if (xdl_emit_diff(&xe, xscr, ecb, xecfg) < 0) {
+
+ xdl_free_script(xscr);
+ xdl_free_env(&xe);
+ return -1;
+ }
+
+ xdl_free_script(xscr);
+ }
+
+ xdl_free_env(&xe);
+
+ return 0;
+}
+
diff --git a/xdiff/xdiffi.h b/xdiff/xdiffi.h
new file mode 100644
index 0000000..dd8f3c9
--- /dev/null
+++ b/xdiff/xdiffi.h
@@ -0,0 +1,60 @@
+/*
+ * LibXDiff by Davide Libenzi ( File Differential Library )
+ * Copyright (C) 2003 Davide Libenzi
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ *
+ * Davide Libenzi <davidel@xmailserver.org>
+ *
+ */
+
+#if !defined(XDIFFI_H)
+#define XDIFFI_H
+
+
+typedef struct s_diffdata {
+ long nrec;
+ unsigned long const *ha;
+ long *rindex;
+ char *rchg;
+} diffdata_t;
+
+typedef struct s_xdalgoenv {
+ long mxcost;
+ long snake_cnt;
+ long heur_min;
+} xdalgoenv_t;
+
+typedef struct s_xdchange {
+ struct s_xdchange *next;
+ long i1, i2;
+ long chg1, chg2;
+} xdchange_t;
+
+
+
+int xdl_recs_cmp(diffdata_t *dd1, long off1, long lim1,
+ diffdata_t *dd2, long off2, long lim2,
+ long *kvdf, long *kvdb, int need_min, xdalgoenv_t *xenv);
+int xdl_do_diff(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp,
+ xdfenv_t *xe);
+int xdl_build_script(xdfenv_t *xe, xdchange_t **xscr);
+void xdl_free_script(xdchange_t *xscr);
+int xdl_emit_diff(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
+ xdemitconf_t const *xecfg);
+
+
+#endif /* #if !defined(XDIFFI_H) */
+
diff --git a/xdiff/xemit.c b/xdiff/xemit.c
new file mode 100644
index 0000000..ad5bfb1
--- /dev/null
+++ b/xdiff/xemit.c
@@ -0,0 +1,180 @@
+/*
+ * LibXDiff by Davide Libenzi ( File Differential Library )
+ * Copyright (C) 2003 Davide Libenzi
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ *
+ * Davide Libenzi <davidel@xmailserver.org>
+ *
+ */
+
+#include "xinclude.h"
+
+
+
+
+static long xdl_get_rec(xdfile_t *xdf, long ri, char const **rec);
+static int xdl_emit_record(xdfile_t *xdf, long ri, char const *pre, xdemitcb_t *ecb);
+static xdchange_t *xdl_get_hunk(xdchange_t *xscr, xdemitconf_t const *xecfg);
+
+
+
+
+static long xdl_get_rec(xdfile_t *xdf, long ri, char const **rec) {
+
+ *rec = xdf->recs[ri]->ptr;
+
+ return xdf->recs[ri]->size;
+}
+
+
+static int xdl_emit_record(xdfile_t *xdf, long ri, char const *pre, xdemitcb_t *ecb) {
+ long size, psize = strlen(pre);
+ char const *rec;
+
+ size = xdl_get_rec(xdf, ri, &rec);
+ if (xdl_emit_diffrec(rec, size, pre, psize, ecb) < 0) {
+
+ return -1;
+ }
+
+ return 0;
+}
+
+
+/*
+ * Starting at the passed change atom, find the latest change atom to be included
+ * inside the differential hunk according to the specified configuration.
+ */
+static xdchange_t *xdl_get_hunk(xdchange_t *xscr, xdemitconf_t const *xecfg) {
+ xdchange_t *xch, *xchp;
+
+ for (xchp = xscr, xch = xscr->next; xch; xchp = xch, xch = xch->next)
+ if (xch->i1 - (xchp->i1 + xchp->chg1) > 2 * xecfg->ctxlen)
+ break;
+
+ return xchp;
+}
+
+
+static void xdl_find_func(xdfile_t *xf, long i, char *buf, long sz, long *ll) {
+
+ /*
+ * Be quite stupid about this for now. Find a line in the old file
+ * before the start of the hunk (and context) which starts with a
+ * plausible character.
+ */
+
+ const char *rec;
+ long len;
+
+ *ll = 0;
+ while (i-- > 0) {
+ len = xdl_get_rec(xf, i, &rec);
+ if (len > 0 &&
+ (isalpha((unsigned char)*rec) || /* identifier? */
+ *rec == '_' || /* also identifier? */
+ *rec == '(' || /* lisp defun? */
+ *rec == '#')) { /* #define? */
+ if (len > sz)
+ len = sz;
+ if (len && rec[len - 1] == '\n')
+ len--;
+ memcpy(buf, rec, len);
+ *ll = len;
+ return;
+ }
+ }
+}
+
+
+int xdl_emit_diff(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
+ xdemitconf_t const *xecfg) {
+ long s1, s2, e1, e2, lctx;
+ xdchange_t *xch, *xche;
+ char funcbuf[40];
+ long funclen = 0;
+
+ for (xch = xche = xscr; xch; xch = xche->next) {
+ xche = xdl_get_hunk(xch, xecfg);
+
+ s1 = XDL_MAX(xch->i1 - xecfg->ctxlen, 0);
+ s2 = XDL_MAX(xch->i2 - xecfg->ctxlen, 0);
+
+ lctx = xecfg->ctxlen;
+ lctx = XDL_MIN(lctx, xe->xdf1.nrec - (xche->i1 + xche->chg1));
+ lctx = XDL_MIN(lctx, xe->xdf2.nrec - (xche->i2 + xche->chg2));
+
+ e1 = xche->i1 + xche->chg1 + lctx;
+ e2 = xche->i2 + xche->chg2 + lctx;
+
+ /*
+ * Emit current hunk header.
+ */
+
+ if (xecfg->flags & XDL_EMIT_FUNCNAMES) {
+ xdl_find_func(&xe->xdf1, s1, funcbuf,
+ sizeof(funcbuf), &funclen);
+ }
+ if (xdl_emit_hunk_hdr(s1 + 1, e1 - s1, s2 + 1, e2 - s2,
+ funcbuf, funclen, ecb) < 0)
+ return -1;
+
+ /*
+ * Emit pre-context.
+ */
+ for (; s1 < xch->i1; s1++)
+ if (xdl_emit_record(&xe->xdf1, s1, " ", ecb) < 0)
+ return -1;
+
+ for (s1 = xch->i1, s2 = xch->i2;; xch = xch->next) {
+ /*
+ * Merge previous with current change atom.
+ */
+ for (; s1 < xch->i1 && s2 < xch->i2; s1++, s2++)
+ if (xdl_emit_record(&xe->xdf1, s1, " ", ecb) < 0)
+ return -1;
+
+ /*
+ * Removes lines from the first file.
+ */
+ for (s1 = xch->i1; s1 < xch->i1 + xch->chg1; s1++)
+ if (xdl_emit_record(&xe->xdf1, s1, "-", ecb) < 0)
+ return -1;
+
+ /*
+ * Adds lines from the second file.
+ */
+ for (s2 = xch->i2; s2 < xch->i2 + xch->chg2; s2++)
+ if (xdl_emit_record(&xe->xdf2, s2, "+", ecb) < 0)
+ return -1;
+
+ if (xch == xche)
+ break;
+ s1 = xch->i1 + xch->chg1;
+ s2 = xch->i2 + xch->chg2;
+ }
+
+ /*
+ * Emit post-context.
+ */
+ for (s1 = xche->i1 + xche->chg1; s1 < e1; s1++)
+ if (xdl_emit_record(&xe->xdf1, s1, " ", ecb) < 0)
+ return -1;
+ }
+
+ return 0;
+}
+
diff --git a/xdiff/xemit.h b/xdiff/xemit.h
new file mode 100644
index 0000000..e629417
--- /dev/null
+++ b/xdiff/xemit.h
@@ -0,0 +1,34 @@
+/*
+ * LibXDiff by Davide Libenzi ( File Differential Library )
+ * Copyright (C) 2003 Davide Libenzi
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ *
+ * Davide Libenzi <davidel@xmailserver.org>
+ *
+ */
+
+#if !defined(XEMIT_H)
+#define XEMIT_H
+
+
+
+int xdl_emit_diff(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
+ xdemitconf_t const *xecfg);
+
+
+
+#endif /* #if !defined(XEMIT_H) */
+
diff --git a/xdiff/xinclude.h b/xdiff/xinclude.h
new file mode 100644
index 0000000..04a9da8
--- /dev/null
+++ b/xdiff/xinclude.h
@@ -0,0 +1,43 @@
+/*
+ * LibXDiff by Davide Libenzi ( File Differential Library )
+ * Copyright (C) 2003 Davide Libenzi
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ *
+ * Davide Libenzi <davidel@xmailserver.org>
+ *
+ */
+
+#if !defined(XINCLUDE_H)
+#define XINCLUDE_H
+
+#include <ctype.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <unistd.h>
+#include <string.h>
+#include <limits.h>
+
+#include "xmacros.h"
+#include "xdiff.h"
+#include "xtypes.h"
+#include "xutils.h"
+#include "xprepare.h"
+#include "xdiffi.h"
+#include "xemit.h"
+
+
+#endif /* #if !defined(XINCLUDE_H) */
+
diff --git a/xdiff/xmacros.h b/xdiff/xmacros.h
new file mode 100644
index 0000000..4c2fde8
--- /dev/null
+++ b/xdiff/xmacros.h
@@ -0,0 +1,53 @@
+/*
+ * LibXDiff by Davide Libenzi ( File Differential Library )
+ * Copyright (C) 2003 Davide Libenzi
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ *
+ * Davide Libenzi <davidel@xmailserver.org>
+ *
+ */
+
+#if !defined(XMACROS_H)
+#define XMACROS_H
+
+
+#define GR_PRIME 0x9e370001UL
+
+
+#define XDL_MIN(a, b) ((a) < (b) ? (a): (b))
+#define XDL_MAX(a, b) ((a) > (b) ? (a): (b))
+#define XDL_ABS(v) ((v) >= 0 ? (v): -(v))
+#define XDL_ISDIGIT(c) ((c) >= '0' && (c) <= '9')
+#define XDL_HASHLONG(v, b) (((unsigned long)(v) * GR_PRIME) >> ((CHAR_BIT * sizeof(unsigned long)) - (b)))
+#define XDL_PTRFREE(p) do { if (p) { xdl_free(p); (p) = NULL; } } while (0)
+#define XDL_LE32_PUT(p, v) \
+do { \
+ unsigned char *__p = (unsigned char *) (p); \
+ *__p++ = (unsigned char) (v); \
+ *__p++ = (unsigned char) ((v) >> 8); \
+ *__p++ = (unsigned char) ((v) >> 16); \
+ *__p = (unsigned char) ((v) >> 24); \
+} while (0)
+#define XDL_LE32_GET(p, v) \
+do { \
+ unsigned char const *__p = (unsigned char const *) (p); \
+ (v) = (unsigned long) __p[0] | ((unsigned long) __p[1]) << 8 | \
+ ((unsigned long) __p[2]) << 16 | ((unsigned long) __p[3]) << 24; \
+} while (0)
+
+
+#endif /* #if !defined(XMACROS_H) */
+
diff --git a/xdiff/xprepare.c b/xdiff/xprepare.c
new file mode 100644
index 0000000..add5a75
--- /dev/null
+++ b/xdiff/xprepare.c
@@ -0,0 +1,465 @@
+/*
+ * LibXDiff by Davide Libenzi ( File Differential Library )
+ * Copyright (C) 2003 Davide Libenzi
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ *
+ * Davide Libenzi <davidel@xmailserver.org>
+ *
+ */
+
+#include "xinclude.h"
+
+
+
+#define XDL_KPDIS_RUN 4
+#define XDL_MAX_EQLIMIT 1024
+
+
+
+typedef struct s_xdlclass {
+ struct s_xdlclass *next;
+ unsigned long ha;
+ char const *line;
+ long size;
+ long idx;
+} xdlclass_t;
+
+typedef struct s_xdlclassifier {
+ unsigned int hbits;
+ long hsize;
+ xdlclass_t **rchash;
+ chastore_t ncha;
+ long count;
+} xdlclassifier_t;
+
+
+
+
+static int xdl_init_classifier(xdlclassifier_t *cf, long size);
+static void xdl_free_classifier(xdlclassifier_t *cf);
+static int xdl_classify_record(xdlclassifier_t *cf, xrecord_t **rhash, unsigned int hbits,
+ xrecord_t *rec);
+static int xdl_prepare_ctx(mmfile_t *mf, long narec, xpparam_t const *xpp,
+ xdlclassifier_t *cf, xdfile_t *xdf);
+static void xdl_free_ctx(xdfile_t *xdf);
+static int xdl_clean_mmatch(char const *dis, long i, long s, long e);
+static int xdl_cleanup_records(xdfile_t *xdf1, xdfile_t *xdf2);
+static int xdl_trim_ends(xdfile_t *xdf1, xdfile_t *xdf2);
+static int xdl_optimize_ctxs(xdfile_t *xdf1, xdfile_t *xdf2);
+
+
+
+
+static int xdl_init_classifier(xdlclassifier_t *cf, long size) {
+ long i;
+
+ cf->hbits = xdl_hashbits((unsigned int) size);
+ cf->hsize = 1 << cf->hbits;
+
+ if (xdl_cha_init(&cf->ncha, sizeof(xdlclass_t), size / 4 + 1) < 0) {
+
+ return -1;
+ }
+ if (!(cf->rchash = (xdlclass_t **) xdl_malloc(cf->hsize * sizeof(xdlclass_t *)))) {
+
+ xdl_cha_free(&cf->ncha);
+ return -1;
+ }
+ for (i = 0; i < cf->hsize; i++)
+ cf->rchash[i] = NULL;
+
+ cf->count = 0;
+
+ return 0;
+}
+
+
+static void xdl_free_classifier(xdlclassifier_t *cf) {
+
+ xdl_free(cf->rchash);
+ xdl_cha_free(&cf->ncha);
+}
+
+
+static int xdl_classify_record(xdlclassifier_t *cf, xrecord_t **rhash, unsigned int hbits,
+ xrecord_t *rec) {
+ long hi;
+ char const *line;
+ xdlclass_t *rcrec;
+
+ line = rec->ptr;
+ hi = (long) XDL_HASHLONG(rec->ha, cf->hbits);
+ for (rcrec = cf->rchash[hi]; rcrec; rcrec = rcrec->next)
+ if (rcrec->ha == rec->ha && rcrec->size == rec->size &&
+ !memcmp(line, rcrec->line, rec->size))
+ break;
+
+ if (!rcrec) {
+ if (!(rcrec = xdl_cha_alloc(&cf->ncha))) {
+
+ return -1;
+ }
+ rcrec->idx = cf->count++;
+ rcrec->line = line;
+ rcrec->size = rec->size;
+ rcrec->ha = rec->ha;
+ rcrec->next = cf->rchash[hi];
+ cf->rchash[hi] = rcrec;
+ }
+
+ rec->ha = (unsigned long) rcrec->idx;
+
+ hi = (long) XDL_HASHLONG(rec->ha, hbits);
+ rec->next = rhash[hi];
+ rhash[hi] = rec;
+
+ return 0;
+}
+
+
+static int xdl_prepare_ctx(mmfile_t *mf, long narec, xpparam_t const *xpp,
+ xdlclassifier_t *cf, xdfile_t *xdf) {
+ unsigned int hbits;
+ long i, nrec, hsize, bsize;
+ unsigned long hav;
+ char const *blk, *cur, *top, *prev;
+ xrecord_t *crec;
+ xrecord_t **recs, **rrecs;
+ xrecord_t **rhash;
+ unsigned long *ha;
+ char *rchg;
+ long *rindex;
+
+ if (xdl_cha_init(&xdf->rcha, sizeof(xrecord_t), narec / 4 + 1) < 0) {
+
+ return -1;
+ }
+ if (!(recs = (xrecord_t **) xdl_malloc(narec * sizeof(xrecord_t *)))) {
+
+ xdl_cha_free(&xdf->rcha);
+ return -1;
+ }
+
+ hbits = xdl_hashbits((unsigned int) narec);
+ hsize = 1 << hbits;
+ if (!(rhash = (xrecord_t **) xdl_malloc(hsize * sizeof(xrecord_t *)))) {
+
+ xdl_free(recs);
+ xdl_cha_free(&xdf->rcha);
+ return -1;
+ }
+ for (i = 0; i < hsize; i++)
+ rhash[i] = NULL;
+
+ nrec = 0;
+ if ((cur = blk = xdl_mmfile_first(mf, &bsize)) != NULL) {
+ for (top = blk + bsize;;) {
+ if (cur >= top) {
+ if (!(cur = blk = xdl_mmfile_next(mf, &bsize)))
+ break;
+ top = blk + bsize;
+ }
+ prev = cur;
+ hav = xdl_hash_record(&cur, top);
+ if (nrec >= narec) {
+ narec *= 2;
+ if (!(rrecs = (xrecord_t **) xdl_realloc(recs, narec * sizeof(xrecord_t *)))) {
+
+ xdl_free(rhash);
+ xdl_free(recs);
+ xdl_cha_free(&xdf->rcha);
+ return -1;
+ }
+ recs = rrecs;
+ }
+ if (!(crec = xdl_cha_alloc(&xdf->rcha))) {
+
+ xdl_free(rhash);
+ xdl_free(recs);
+ xdl_cha_free(&xdf->rcha);
+ return -1;
+ }
+ crec->ptr = prev;
+ crec->size = (long) (cur - prev);
+ crec->ha = hav;
+ recs[nrec++] = crec;
+
+ if (xdl_classify_record(cf, rhash, hbits, crec) < 0) {
+
+ xdl_free(rhash);
+ xdl_free(recs);
+ xdl_cha_free(&xdf->rcha);
+ return -1;
+ }
+ }
+ }
+
+ if (!(rchg = (char *) xdl_malloc((nrec + 2) * sizeof(char)))) {
+
+ xdl_free(rhash);
+ xdl_free(recs);
+ xdl_cha_free(&xdf->rcha);
+ return -1;
+ }
+ memset(rchg, 0, (nrec + 2) * sizeof(char));
+
+ if (!(rindex = (long *) xdl_malloc((nrec + 1) * sizeof(long)))) {
+
+ xdl_free(rchg);
+ xdl_free(rhash);
+ xdl_free(recs);
+ xdl_cha_free(&xdf->rcha);
+ return -1;
+ }
+ if (!(ha = (unsigned long *) xdl_malloc((nrec + 1) * sizeof(unsigned long)))) {
+
+ xdl_free(rindex);
+ xdl_free(rchg);
+ xdl_free(rhash);
+ xdl_free(recs);
+ xdl_cha_free(&xdf->rcha);
+ return -1;
+ }
+
+ xdf->nrec = nrec;
+ xdf->recs = recs;
+ xdf->hbits = hbits;
+ xdf->rhash = rhash;
+ xdf->rchg = rchg + 1;
+ xdf->rindex = rindex;
+ xdf->nreff = 0;
+ xdf->ha = ha;
+ xdf->dstart = 0;
+ xdf->dend = nrec - 1;
+
+ return 0;
+}
+
+
+static void xdl_free_ctx(xdfile_t *xdf) {
+
+ xdl_free(xdf->rhash);
+ xdl_free(xdf->rindex);
+ xdl_free(xdf->rchg - 1);
+ xdl_free(xdf->ha);
+ xdl_free(xdf->recs);
+ xdl_cha_free(&xdf->rcha);
+}
+
+
+int xdl_prepare_env(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp,
+ xdfenv_t *xe) {
+ long enl1, enl2;
+ xdlclassifier_t cf;
+
+ enl1 = xdl_guess_lines(mf1) + 1;
+ enl2 = xdl_guess_lines(mf2) + 1;
+
+ if (xdl_init_classifier(&cf, enl1 + enl2 + 1) < 0) {
+
+ return -1;
+ }
+
+ if (xdl_prepare_ctx(mf1, enl1, xpp, &cf, &xe->xdf1) < 0) {
+
+ xdl_free_classifier(&cf);
+ return -1;
+ }
+ if (xdl_prepare_ctx(mf2, enl2, xpp, &cf, &xe->xdf2) < 0) {
+
+ xdl_free_ctx(&xe->xdf1);
+ xdl_free_classifier(&cf);
+ return -1;
+ }
+
+ xdl_free_classifier(&cf);
+
+ if (xdl_optimize_ctxs(&xe->xdf1, &xe->xdf2) < 0) {
+
+ xdl_free_ctx(&xe->xdf2);
+ xdl_free_ctx(&xe->xdf1);
+ return -1;
+ }
+
+ return 0;
+}
+
+
+void xdl_free_env(xdfenv_t *xe) {
+
+ xdl_free_ctx(&xe->xdf2);
+ xdl_free_ctx(&xe->xdf1);
+}
+
+
+static int xdl_clean_mmatch(char const *dis, long i, long s, long e) {
+ long r, rdis0, rpdis0, rdis1, rpdis1;
+
+ /*
+ * Scans the lines before 'i' to find a run of lines that either
+ * have no match (dis[j] == 0) or have multiple matches (dis[j] > 1).
+ * Note that we always call this function with dis[i] > 1, so the
+ * current line (i) is already a multimatch line.
+ */
+ for (r = 1, rdis0 = 0, rpdis0 = 1; (i - r) >= s; r++) {
+ if (!dis[i - r])
+ rdis0++;
+ else if (dis[i - r] == 2)
+ rpdis0++;
+ else
+ break;
+ }
+ /*
+ * If the run before the line 'i' found only multimatch lines, we
+ * return 0 and hence we don't make the current line (i) discarded.
+ * We want to discard multimatch lines only when they appear in the
+ * middle of runs with nomatch lines (dis[j] == 0).
+ */
+ if (rdis0 == 0)
+ return 0;
+ for (r = 1, rdis1 = 0, rpdis1 = 1; (i + r) <= e; r++) {
+ if (!dis[i + r])
+ rdis1++;
+ else if (dis[i + r] == 2)
+ rpdis1++;
+ else
+ break;
+ }
+ /*
+ * If the run after the line 'i' found only multimatch lines, we
+ * return 0 and hence we don't make the current line (i) discarded.
+ */
+ if (rdis1 == 0)
+ return 0;
+ rdis1 += rdis0;
+ rpdis1 += rpdis0;
+
+ return rpdis1 * XDL_KPDIS_RUN < (rpdis1 + rdis1);
+}
+
+
+/*
+ * Try to reduce the problem complexity, discard records that have no
+ * matches on the other file. Also, lines that have multiple matches
+ * might be potentially discarded if they happear in a run of discardable.
+ */
+static int xdl_cleanup_records(xdfile_t *xdf1, xdfile_t *xdf2) {
+ long i, nm, rhi, nreff, mlim;
+ unsigned long hav;
+ xrecord_t **recs;
+ xrecord_t *rec;
+ char *dis, *dis1, *dis2;
+
+ if (!(dis = (char *) xdl_malloc(xdf1->nrec + xdf2->nrec + 2))) {
+
+ return -1;
+ }
+ memset(dis, 0, xdf1->nrec + xdf2->nrec + 2);
+ dis1 = dis;
+ dis2 = dis1 + xdf1->nrec + 1;
+
+ if ((mlim = xdl_bogosqrt(xdf1->nrec)) > XDL_MAX_EQLIMIT)
+ mlim = XDL_MAX_EQLIMIT;
+ for (i = xdf1->dstart, recs = &xdf1->recs[xdf1->dstart]; i <= xdf1->dend; i++, recs++) {
+ hav = (*recs)->ha;
+ rhi = (long) XDL_HASHLONG(hav, xdf2->hbits);
+ for (nm = 0, rec = xdf2->rhash[rhi]; rec; rec = rec->next)
+ if (rec->ha == hav && ++nm == mlim)
+ break;
+ dis1[i] = (nm == 0) ? 0: (nm >= mlim) ? 2: 1;
+ }
+
+ if ((mlim = xdl_bogosqrt(xdf2->nrec)) > XDL_MAX_EQLIMIT)
+ mlim = XDL_MAX_EQLIMIT;
+ for (i = xdf2->dstart, recs = &xdf2->recs[xdf2->dstart]; i <= xdf2->dend; i++, recs++) {
+ hav = (*recs)->ha;
+ rhi = (long) XDL_HASHLONG(hav, xdf1->hbits);
+ for (nm = 0, rec = xdf1->rhash[rhi]; rec; rec = rec->next)
+ if (rec->ha == hav && ++nm == mlim)
+ break;
+ dis2[i] = (nm == 0) ? 0: (nm >= mlim) ? 2: 1;
+ }
+
+ for (nreff = 0, i = xdf1->dstart, recs = &xdf1->recs[xdf1->dstart];
+ i <= xdf1->dend; i++, recs++) {
+ if (dis1[i] == 1 ||
+ (dis1[i] == 2 && !xdl_clean_mmatch(dis1, i, xdf1->dstart, xdf1->dend))) {
+ xdf1->rindex[nreff] = i;
+ xdf1->ha[nreff] = (*recs)->ha;
+ nreff++;
+ } else
+ xdf1->rchg[i] = 1;
+ }
+ xdf1->nreff = nreff;
+
+ for (nreff = 0, i = xdf2->dstart, recs = &xdf2->recs[xdf2->dstart];
+ i <= xdf2->dend; i++, recs++) {
+ if (dis2[i] == 1 ||
+ (dis2[i] == 2 && !xdl_clean_mmatch(dis2, i, xdf2->dstart, xdf2->dend))) {
+ xdf2->rindex[nreff] = i;
+ xdf2->ha[nreff] = (*recs)->ha;
+ nreff++;
+ } else
+ xdf2->rchg[i] = 1;
+ }
+ xdf2->nreff = nreff;
+
+ xdl_free(dis);
+
+ return 0;
+}
+
+
+/*
+ * Early trim initial and terminal matching records.
+ */
+static int xdl_trim_ends(xdfile_t *xdf1, xdfile_t *xdf2) {
+ long i, lim;
+ xrecord_t **recs1, **recs2;
+
+ recs1 = xdf1->recs;
+ recs2 = xdf2->recs;
+ for (i = 0, lim = XDL_MIN(xdf1->nrec, xdf2->nrec); i < lim;
+ i++, recs1++, recs2++)
+ if ((*recs1)->ha != (*recs2)->ha)
+ break;
+
+ xdf1->dstart = xdf2->dstart = i;
+
+ recs1 = xdf1->recs + xdf1->nrec - 1;
+ recs2 = xdf2->recs + xdf2->nrec - 1;
+ for (lim -= i, i = 0; i < lim; i++, recs1--, recs2--)
+ if ((*recs1)->ha != (*recs2)->ha)
+ break;
+
+ xdf1->dend = xdf1->nrec - i - 1;
+ xdf2->dend = xdf2->nrec - i - 1;
+
+ return 0;
+}
+
+
+static int xdl_optimize_ctxs(xdfile_t *xdf1, xdfile_t *xdf2) {
+
+ if (xdl_trim_ends(xdf1, xdf2) < 0 ||
+ xdl_cleanup_records(xdf1, xdf2) < 0) {
+
+ return -1;
+ }
+
+ return 0;
+}
+
diff --git a/xdiff/xprepare.h b/xdiff/xprepare.h
new file mode 100644
index 0000000..344c569
--- /dev/null
+++ b/xdiff/xprepare.h
@@ -0,0 +1,35 @@
+/*
+ * LibXDiff by Davide Libenzi ( File Differential Library )
+ * Copyright (C) 2003 Davide Libenzi
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ *
+ * Davide Libenzi <davidel@xmailserver.org>
+ *
+ */
+
+#if !defined(XPREPARE_H)
+#define XPREPARE_H
+
+
+
+int xdl_prepare_env(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp,
+ xdfenv_t *xe);
+void xdl_free_env(xdfenv_t *xe);
+
+
+
+#endif /* #if !defined(XPREPARE_H) */
+
diff --git a/xdiff/xtypes.h b/xdiff/xtypes.h
new file mode 100644
index 0000000..3593a66
--- /dev/null
+++ b/xdiff/xtypes.h
@@ -0,0 +1,68 @@
+/*
+ * LibXDiff by Davide Libenzi ( File Differential Library )
+ * Copyright (C) 2003 Davide Libenzi
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ *
+ * Davide Libenzi <davidel@xmailserver.org>
+ *
+ */
+
+#if !defined(XTYPES_H)
+#define XTYPES_H
+
+
+
+typedef struct s_chanode {
+ struct s_chanode *next;
+ long icurr;
+} chanode_t;
+
+typedef struct s_chastore {
+ chanode_t *head, *tail;
+ long isize, nsize;
+ chanode_t *ancur;
+ chanode_t *sncur;
+ long scurr;
+} chastore_t;
+
+typedef struct s_xrecord {
+ struct s_xrecord *next;
+ char const *ptr;
+ long size;
+ unsigned long ha;
+} xrecord_t;
+
+typedef struct s_xdfile {
+ chastore_t rcha;
+ long nrec;
+ unsigned int hbits;
+ xrecord_t **rhash;
+ long dstart, dend;
+ xrecord_t **recs;
+ char *rchg;
+ long *rindex;
+ long nreff;
+ unsigned long *ha;
+} xdfile_t;
+
+typedef struct s_xdfenv {
+ xdfile_t xdf1, xdf2;
+} xdfenv_t;
+
+
+
+#endif /* #if !defined(XTYPES_H) */
+
diff --git a/xdiff/xutils.c b/xdiff/xutils.c
new file mode 100644
index 0000000..21ab8e7
--- /dev/null
+++ b/xdiff/xutils.c
@@ -0,0 +1,299 @@
+/*
+ * LibXDiff by Davide Libenzi ( File Differential Library )
+ * Copyright (C) 2003 Davide Libenzi
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ *
+ * Davide Libenzi <davidel@xmailserver.org>
+ *
+ */
+
+#include "xinclude.h"
+
+
+
+#define XDL_GUESS_NLINES 256
+
+
+
+
+long xdl_bogosqrt(long n) {
+ long i;
+
+ /*
+ * Classical integer square root approximation using shifts.
+ */
+ for (i = 1; n > 0; n >>= 2)
+ i <<= 1;
+
+ return i;
+}
+
+
+int xdl_emit_diffrec(char const *rec, long size, char const *pre, long psize,
+ xdemitcb_t *ecb) {
+ mmbuffer_t mb[3];
+ int i;
+
+ mb[0].ptr = (char *) pre;
+ mb[0].size = psize;
+ mb[1].ptr = (char *) rec;
+ mb[1].size = size;
+ i = 2;
+
+ if (!size || rec[size-1] != '\n') {
+ mb[2].ptr = "\n\\ No newline at end of file\n";
+ mb[2].size = strlen(mb[2].ptr);
+ i = 3;
+ }
+
+ if (ecb->outf(ecb->priv, mb, i) < 0) {
+
+ return -1;
+ }
+
+ return 0;
+}
+
+void *xdl_mmfile_first(mmfile_t *mmf, long *size)
+{
+ *size = mmf->size;
+ return mmf->ptr;
+}
+
+
+void *xdl_mmfile_next(mmfile_t *mmf, long *size)
+{
+ return NULL;
+}
+
+
+long xdl_mmfile_size(mmfile_t *mmf)
+{
+ return mmf->size;
+}
+
+
+int xdl_cha_init(chastore_t *cha, long isize, long icount) {
+
+ cha->head = cha->tail = NULL;
+ cha->isize = isize;
+ cha->nsize = icount * isize;
+ cha->ancur = cha->sncur = NULL;
+ cha->scurr = 0;
+
+ return 0;
+}
+
+
+void xdl_cha_free(chastore_t *cha) {
+ chanode_t *cur, *tmp;
+
+ for (cur = cha->head; (tmp = cur) != NULL;) {
+ cur = cur->next;
+ xdl_free(tmp);
+ }
+}
+
+
+void *xdl_cha_alloc(chastore_t *cha) {
+ chanode_t *ancur;
+ void *data;
+
+ if (!(ancur = cha->ancur) || ancur->icurr == cha->nsize) {
+ if (!(ancur = (chanode_t *) xdl_malloc(sizeof(chanode_t) + cha->nsize))) {
+
+ return NULL;
+ }
+ ancur->icurr = 0;
+ ancur->next = NULL;
+ if (cha->tail)
+ cha->tail->next = ancur;
+ if (!cha->head)
+ cha->head = ancur;
+ cha->tail = ancur;
+ cha->ancur = ancur;
+ }
+
+ data = (char *) ancur + sizeof(chanode_t) + ancur->icurr;
+ ancur->icurr += cha->isize;
+
+ return data;
+}
+
+
+void *xdl_cha_first(chastore_t *cha) {
+ chanode_t *sncur;
+
+ if (!(cha->sncur = sncur = cha->head))
+ return NULL;
+
+ cha->scurr = 0;
+
+ return (char *) sncur + sizeof(chanode_t) + cha->scurr;
+}
+
+
+void *xdl_cha_next(chastore_t *cha) {
+ chanode_t *sncur;
+
+ if (!(sncur = cha->sncur))
+ return NULL;
+ cha->scurr += cha->isize;
+ if (cha->scurr == sncur->icurr) {
+ if (!(sncur = cha->sncur = sncur->next))
+ return NULL;
+ cha->scurr = 0;
+ }
+
+ return (char *) sncur + sizeof(chanode_t) + cha->scurr;
+}
+
+
+long xdl_guess_lines(mmfile_t *mf) {
+ long nl = 0, size, tsize = 0;
+ char const *data, *cur, *top;
+
+ if ((cur = data = xdl_mmfile_first(mf, &size)) != NULL) {
+ for (top = data + size; nl < XDL_GUESS_NLINES;) {
+ if (cur >= top) {
+ tsize += (long) (cur - data);
+ if (!(cur = data = xdl_mmfile_next(mf, &size)))
+ break;
+ top = data + size;
+ }
+ nl++;
+ if (!(cur = memchr(cur, '\n', top - cur)))
+ cur = top;
+ else
+ cur++;
+ }
+ tsize += (long) (cur - data);
+ }
+
+ if (nl && tsize)
+ nl = xdl_mmfile_size(mf) / (tsize / nl);
+
+ return nl + 1;
+}
+
+
+unsigned long xdl_hash_record(char const **data, char const *top) {
+ unsigned long ha = 5381;
+ char const *ptr = *data;
+
+ for (; ptr < top && *ptr != '\n'; ptr++) {
+ ha += (ha << 5);
+ ha ^= (unsigned long) *ptr;
+ }
+ *data = ptr < top ? ptr + 1: ptr;
+
+ return ha;
+}
+
+
+unsigned int xdl_hashbits(unsigned int size) {
+ unsigned int val = 1, bits = 0;
+
+ for (; val < size && bits < CHAR_BIT * sizeof(unsigned int); val <<= 1, bits++);
+ return bits ? bits: 1;
+}
+
+
+int xdl_num_out(char *out, long val) {
+ char *ptr, *str = out;
+ char buf[32];
+
+ ptr = buf + sizeof(buf) - 1;
+ *ptr = '\0';
+ if (val < 0) {
+ *--ptr = '-';
+ val = -val;
+ }
+ for (; val && ptr > buf; val /= 10)
+ *--ptr = "0123456789"[val % 10];
+ if (*ptr)
+ for (; *ptr; ptr++, str++)
+ *str = *ptr;
+ else
+ *str++ = '0';
+ *str = '\0';
+
+ return str - out;
+}
+
+
+long xdl_atol(char const *str, char const **next) {
+ long val, base;
+ char const *top;
+
+ for (top = str; XDL_ISDIGIT(*top); top++);
+ if (next)
+ *next = top;
+ for (val = 0, base = 1, top--; top >= str; top--, base *= 10)
+ val += base * (long)(*top - '0');
+ return val;
+}
+
+
+int xdl_emit_hunk_hdr(long s1, long c1, long s2, long c2,
+ const char *func, long funclen, xdemitcb_t *ecb) {
+ int nb = 0;
+ mmbuffer_t mb;
+ char buf[128];
+
+ memcpy(buf, "@@ -", 4);
+ nb += 4;
+
+ nb += xdl_num_out(buf + nb, c1 ? s1: s1 - 1);
+
+ if (c1 != 1) {
+ memcpy(buf + nb, ",", 1);
+ nb += 1;
+
+ nb += xdl_num_out(buf + nb, c1);
+ }
+
+ memcpy(buf + nb, " +", 2);
+ nb += 2;
+
+ nb += xdl_num_out(buf + nb, c2 ? s2: s2 - 1);
+
+ if (c2 != 1) {
+ memcpy(buf + nb, ",", 1);
+ nb += 1;
+
+ nb += xdl_num_out(buf + nb, c2);
+ }
+
+ memcpy(buf + nb, " @@", 3);
+ nb += 3;
+ if (func && funclen) {
+ buf[nb++] = ' ';
+ if (funclen > sizeof(buf) - nb - 1)
+ funclen = sizeof(buf) - nb - 1;
+ memcpy(buf + nb, func, funclen);
+ nb += funclen;
+ }
+ buf[nb++] = '\n';
+
+ mb.ptr = buf;
+ mb.size = nb;
+ if (ecb->outf(ecb->priv, &mb, 1) < 0)
+ return -1;
+
+ return 0;
+}
+
diff --git a/xdiff/xutils.h b/xdiff/xutils.h
new file mode 100644
index 0000000..ea38ee9
--- /dev/null
+++ b/xdiff/xutils.h
@@ -0,0 +1,46 @@
+/*
+ * LibXDiff by Davide Libenzi ( File Differential Library )
+ * Copyright (C) 2003 Davide Libenzi
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ *
+ * Davide Libenzi <davidel@xmailserver.org>
+ *
+ */
+
+#if !defined(XUTILS_H)
+#define XUTILS_H
+
+
+long xdl_bogosqrt(long n);
+int xdl_emit_diffrec(char const *rec, long size, char const *pre, long psize,
+ xdemitcb_t *ecb);
+int xdl_cha_init(chastore_t *cha, long isize, long icount);
+void xdl_cha_free(chastore_t *cha);
+void *xdl_cha_alloc(chastore_t *cha);
+void *xdl_cha_first(chastore_t *cha);
+void *xdl_cha_next(chastore_t *cha);
+long xdl_guess_lines(mmfile_t *mf);
+unsigned long xdl_hash_record(char const **data, char const *top);
+unsigned int xdl_hashbits(unsigned int size);
+int xdl_num_out(char *out, long val);
+long xdl_atol(char const *str, char const **next);
+int xdl_emit_hunk_hdr(long s1, long c1, long s2, long c2,
+ const char *func, long funclen, xdemitcb_t *ecb);
+
+
+
+#endif /* #if !defined(XUTILS_H) */
+