summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore5
-rw-r--r--Documentation/Makefile18
-rw-r--r--Documentation/RelNotes/1.7.12.txt145
-rw-r--r--Documentation/config.txt40
-rw-r--r--Documentation/git-apply.txt11
-rw-r--r--Documentation/git-clone.txt19
-rw-r--r--Documentation/git-config.txt17
-rw-r--r--Documentation/git-credential.txt154
-rw-r--r--Documentation/git-p4.txt10
-rw-r--r--Documentation/git-rebase.txt52
-rw-r--r--Documentation/git-rev-parse.txt6
-rw-r--r--Documentation/gitattributes.txt2
-rw-r--r--Documentation/gitignore.txt4
-rw-r--r--Documentation/technical/api-credentials.txt39
-rwxr-xr-xGIT-VERSION-GEN2
-rw-r--r--Makefile288
l---------RelNotes2
-rw-r--r--attr.c5
-rw-r--r--branch.c38
-rw-r--r--builtin.h5
-rw-r--r--builtin/apply.c563
-rw-r--r--builtin/blame.c3
-rw-r--r--builtin/cat-file.c2
-rw-r--r--builtin/checkout.c3
-rw-r--r--builtin/clone.c10
-rw-r--r--builtin/commit-tree.c4
-rw-r--r--builtin/commit.c2
-rw-r--r--builtin/config.c35
-rw-r--r--builtin/credential.c31
-rw-r--r--builtin/help.c25
-rw-r--r--builtin/index-pack.c199
-rw-r--r--builtin/init-db.c1
-rw-r--r--builtin/log.c4
-rw-r--r--builtin/pack-objects.c75
-rw-r--r--builtin/reflog.c6
-rw-r--r--builtin/reset.c10
-rw-r--r--builtin/rev-parse.c14
-rw-r--r--builtin/update-index.c9
-rw-r--r--cache.h51
-rw-r--r--command-list.txt1
-rw-r--r--commit.c2
-rw-r--r--compat/precompose_utf8.c190
-rw-r--r--compat/precompose_utf8.h45
-rw-r--r--compat/terminal.c1
-rw-r--r--config.c28
-rw-r--r--config.mak.in1
-rw-r--r--configure.ac63
-rw-r--r--connect.c18
-rw-r--r--[-rwxr-xr-x]contrib/completion/git-completion.bash262
-rw-r--r--contrib/completion/git-prompt.sh289
-rw-r--r--contrib/mw-to-git/Makefile47
-rwxr-xr-xcontrib/mw-to-git/git-remote-mediawiki777
-rw-r--r--contrib/mw-to-git/t/.gitignore4
-rw-r--r--contrib/mw-to-git/t/Makefile31
-rw-r--r--contrib/mw-to-git/t/README124
-rwxr-xr-xcontrib/mw-to-git/t/install-wiki.sh45
-rw-r--r--contrib/mw-to-git/t/install-wiki/.gitignore1
-rw-r--r--contrib/mw-to-git/t/install-wiki/LocalSettings.php129
-rw-r--r--contrib/mw-to-git/t/install-wiki/db_install.php120
-rw-r--r--contrib/mw-to-git/t/push-pull-tests.sh144
-rwxr-xr-xcontrib/mw-to-git/t/t9360-mw-to-git-clone.sh257
-rwxr-xr-xcontrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh24
-rwxr-xr-xcontrib/mw-to-git/t/t9362-mw-to-git-utf8.sh321
-rwxr-xr-xcontrib/mw-to-git/t/t9363-mw-to-git-export-import.sh198
-rwxr-xr-xcontrib/mw-to-git/t/t9364-pull-by-rev.sh17
-rwxr-xr-xcontrib/mw-to-git/t/test-gitmw-lib.sh435
-rwxr-xr-xcontrib/mw-to-git/t/test-gitmw.pl225
-rw-r--r--contrib/mw-to-git/t/test.config35
-rw-r--r--credential.c4
-rw-r--r--credential.h1
-rw-r--r--diff.c2
-rw-r--r--dir.c148
-rw-r--r--dir.h2
-rw-r--r--environment.c1
-rwxr-xr-xgit-am.sh68
-rw-r--r--git-compat-util.h9
-rwxr-xr-xgit-difftool.perl7
-rwxr-xr-xgit-p4.py274
-rw-r--r--git-rebase--am.sh2
-rw-r--r--git-rebase--interactive.sh84
-rw-r--r--git-rebase--merge.sh2
-rwxr-xr-xgit-rebase.sh119
-rwxr-xr-xgit-submodule.sh69
-rwxr-xr-xgit-svn.perl3202
-rw-r--r--git.c3
-rwxr-xr-xgitweb/gitweb.perl33
-rw-r--r--help.c15
-rw-r--r--http.c3
-rw-r--r--merge-recursive.c151
-rw-r--r--notes-merge.c6
-rw-r--r--parse-options.c1
-rw-r--r--path.c41
-rw-r--r--perl/.gitignore1
-rw-r--r--perl/Git/IndexInfo.pm33
-rw-r--r--perl/Git/SVN.pm2349
-rw-r--r--perl/Git/SVN/Fetcher.pm1
-rw-r--r--perl/Git/SVN/GlobSpec.pm59
-rw-r--r--perl/Git/SVN/Log.pm395
-rw-r--r--perl/Git/SVN/Migration.pm258
-rw-r--r--perl/Git/SVN/Utils.pm59
-rw-r--r--perl/Makefile25
-rw-r--r--perl/Makefile.PL35
-rw-r--r--pkt-line.c32
-rw-r--r--pkt-line.h1
-rw-r--r--po/de.po1603
-rw-r--r--po/git.pot1501
-rw-r--r--po/sv.po1558
-rw-r--r--po/vi.po1662
-rw-r--r--po/zh_CN.po1630
-rw-r--r--read-cache.c71
-rw-r--r--rerere.c12
-rw-r--r--revision.c77
-rw-r--r--revision.h5
-rw-r--r--setup.c8
-rw-r--r--sha1_name.c494
-rw-r--r--t/Git-SVN/00compile.t14
-rw-r--r--t/Git-SVN/Utils/can_compress.t11
-rw-r--r--t/Git-SVN/Utils/fatal.t34
-rw-r--r--t/lib-bash.sh18
-rwxr-xr-xt/lib-credential.sh39
-rw-r--r--t/lib-git-p4.sh60
-rw-r--r--t/lib-httpd.sh4
-rw-r--r--t/lib-httpd/apache.conf5
-rwxr-xr-xt/t0201-gettext-fallbacks.sh8
-rwxr-xr-xt/t0300-credentials.sh14
-rwxr-xr-xt/t1050-large.sh17
-rwxr-xr-xt/t1100-commit-tree-options.sh1
-rwxr-xr-xt/t1304-default-acl.sh19
-rwxr-xr-xt/t1306-xdg-files.sh197
-rwxr-xr-xt/t1512-rev-parse-disambiguation.sh264
-rwxr-xr-xt/t3400-rebase.sh8
-rwxr-xr-xt/t3404-rebase-interactive.sh156
-rwxr-xr-xt/t3405-rebase-malformed.sh32
-rwxr-xr-xt/t3406-rebase-message.sh9
-rwxr-xr-xt/t3412-rebase-root.sh7
-rwxr-xr-xt/t3910-mac-os-precompose.sh164
-rwxr-xr-xt/t4012-diff-binary.sh94
-rwxr-xr-xt/t4108-apply-threeway.sh157
-rwxr-xr-xt/t4117-apply-reject.sh8
-rwxr-xr-xt/t5300-pack-object.sh5
-rwxr-xr-xt/t5512-ls-remote.sh16
-rwxr-xr-xt/t5701-clone-local.sh10
-rwxr-xr-xt/t6022-merge-rename.sh16
-rwxr-xr-xt/t6042-merge-rename-corner-cases.sh2
-rwxr-xr-xt/t7060-wtstatus.sh96
-rwxr-xr-xt/t7400-submodule-basic.sh149
-rwxr-xr-xt/t7403-submodule-sync.sh90
-rwxr-xr-xt/t7409-submodule-detached-worktree.sh78
-rwxr-xr-xt/t7502-commit.sh75
-rwxr-xr-xt/t7512-status-help.sh649
-rwxr-xr-xt/t9163-git-svn-reset-clears-caches.sh78
-rwxr-xr-xt/t9164-git-svn-dcommit-concrrent.sh216
-rwxr-xr-xt/t9800-git-p4-basic.sh421
-rwxr-xr-xt/t9805-git-p4-skip-submit-edit.sh9
-rwxr-xr-xt/t9806-git-p4-options.sh17
-rwxr-xr-xt/t9807-git-p4-submit.sh155
-rwxr-xr-xt/t9808-git-p4-chdir.sh4
-rwxr-xr-xt/t9810-git-p4-rcs.sh8
-rwxr-xr-xt/t9812-git-p4-wildcards.sh147
-rwxr-xr-xt/t9813-git-p4-preserve-users.sh153
-rwxr-xr-xt/t9814-git-p4-rename.sh206
-rwxr-xr-xt/t9902-completion.sh14
-rwxr-xr-xt/t9903-bash-prompt.sh456
-rw-r--r--t/test-lib.sh46
-rw-r--r--test-credential.c38
-rw-r--r--test-line-buffer.c1
-rw-r--r--test-svn-fe.c2
-rw-r--r--transport-helper.c15
-rw-r--r--tree.c7
-rw-r--r--unpack-trees.c5
-rw-r--r--utf8.c26
-rw-r--r--utf8.h1
-rw-r--r--vcs-svn/fast_export.c11
-rw-r--r--vcs-svn/fast_export.h1
-rw-r--r--vcs-svn/line_buffer.c4
-rw-r--r--vcs-svn/line_buffer.h1
-rw-r--r--vcs-svn/sliding_window.c2
-rw-r--r--vcs-svn/svndiff.c15
-rw-r--r--vcs-svn/svndump.c37
-rw-r--r--version.c17
-rw-r--r--version.h8
-rw-r--r--wt-status.c244
-rw-r--r--wt-status.h10
183 files changed, 18781 insertions, 7979 deletions
diff --git a/.gitignore b/.gitignore
index bf66648..bb5c91e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,6 +2,9 @@
/GIT-CFLAGS
/GIT-LDFLAGS
/GIT-GUI-VARS
+/GIT-PREFIX
+/GIT-SCRIPT-DEFINES
+/GIT-USER-AGENT
/GIT-VERSION-FILE
/bin-wrappers/
/git
@@ -31,6 +34,7 @@
/git-commit-tree
/git-config
/git-count-objects
+/git-credential
/git-credential-cache
/git-credential-cache--daemon
/git-credential-store
@@ -172,7 +176,6 @@
/gitweb/static/gitweb.js
/gitweb/static/gitweb.min.*
/test-chmtime
-/test-credential
/test-ctype
/test-date
/test-delta
diff --git a/Documentation/Makefile b/Documentation/Makefile
index 5d76a84..063fa69 100644
--- a/Documentation/Makefile
+++ b/Documentation/Makefile
@@ -66,12 +66,6 @@ endif
-include ../config.mak
#
-# For asciidoc ...
-# -7.1.2, set ASCIIDOC7
-# 8.0-, no extra settings are needed
-#
-
-#
# For docbook-xsl ...
# -1.68.1, no extra settings are needed?
# 1.69.0, set ASCIIDOC_ROFF?
@@ -81,9 +75,6 @@ endif
# 1.73.0-, no extra settings are needed
#
-ifndef ASCIIDOC7
-ASCIIDOC_EXTRA += -a asciidoc7compatible
-endif
ifdef DOCBOOK_XSL_172
ASCIIDOC_EXTRA += -a git-asciidoc-no-roff
MANPAGE_XSL = manpage-1.72.xsl
@@ -134,15 +125,6 @@ DEFAULT_EDITOR_SQ = $(subst ','\'',$(DEFAULT_EDITOR))
ASCIIDOC_EXTRA += -a 'git-default-editor=$(DEFAULT_EDITOR_SQ)'
endif
-#
-# Please note that there is a minor bug in asciidoc.
-# The version after 6.0.3 _will_ include the patch found here:
-# http://marc.theaimsgroup.com/?l=git&m=111558757202243&w=2
-#
-# Until that version is released you may have to apply the patch
-# yourself - yes, all 6 characters of it!
-#
-
QUIET_SUBDIR0 = +$(MAKE) -C # space to separate -C and subdir
QUIET_SUBDIR1 =
diff --git a/Documentation/RelNotes/1.7.12.txt b/Documentation/RelNotes/1.7.12.txt
new file mode 100644
index 0000000..fb7f761
--- /dev/null
+++ b/Documentation/RelNotes/1.7.12.txt
@@ -0,0 +1,145 @@
+Git v1.7.12 Release Notes
+=========================
+
+Updates since v1.7.11
+---------------------
+
+UI, Workflows & Features
+
+ * Git can be told to normalize pathnames it read from readdir(3) and
+ all arguments it got from the command line into precomposed UTF-8
+ (assuming that they come as decomposed UTF-8), in order to work
+ around issues on Mac OS.
+
+ I think there still are other places that need conversion
+ (e.g. paths that are read from stdin for some commands), but this
+ should be a good first step in the right direction.
+
+ * Per-user $HOME/.gitconfig file can optionally be stored in
+ $HOME/.config/git/config instead, which is in line with XDG.
+
+ * The value of core.attributesfile and core.excludesfile default to
+ $HOME/.config/git/attributes and $HOME/.config/git/ignore respectively
+ when these files exist.
+
+ * Logic to disambiguate abbreviated object names have been taught to
+ take advantage of object types that are expected in the context,
+ e.g. XXXXXX in the "git describe" output v1.2.3-gXXXXXX must be a
+ commit object, not a blob nor a tree. This will help us prolong
+ the lifetime of abbreviated object names.
+
+ * "git apply" learned to wiggle the base version and perform three-way
+ merge when a patch does not exactly apply to the version you have.
+
+ * Scripted Porcelain writers now have access to the credential API via
+ the "git credential" plumbing command.
+
+ * "git help" used to always default to "man" format even on platforms
+ where "man" viewer is not widely available.
+
+ * "git clone --local $path" started its life as an experiment to
+ optionally use link/copy when cloning a repository on the disk, but
+ we didn't deprecate it after we made the option a no-op to always
+ use the optimization. The command learned "--no-local" option to
+ turn this off, as a more explicit alternative over use of file://
+ URL.
+
+ * "git fetch" and friends used to say "remote side hung up
+ unexpectedly" when they failed to get response they expect from the
+ other side, but one common reason why they don't get expected
+ response is that the remote repository does not exist or cannot be
+ read. The error message in this case was updated to give better
+ hints to the user.
+
+ * "git help -w $cmd" can show HTML version of documentation for
+ "git-$cmd" by setting help.htmlpath to somewhere other than the
+ default location where the build procedure installs them locally;
+ the variable can even point at a http:// URL.
+
+ * "git rebase [-i] --root $tip" can now be used to rewrite all the
+ history leading to "$tip" down to the root commit.
+
+ * "git rebase -i" learned "-x <cmd>" to insert "exec <cmd>" after
+ each commit in the resulting history.
+
+ * "git status" gives finer classification to various states of paths
+ in conflicted state and offer advice messages in its output.
+
+ * "git submodule" learned to deal with nested submodule structure
+ where a module is contained within a module whose origin is
+ specified as a relative URL to its superproject's origin.
+
+ * A rather heavy-ish "git completion" script has been split to create
+ a separate "git prompting" script, to help lazy-autoloading of the
+ completion part while making prompting part always available.
+
+ * "gitweb" pays attention to various forms of credits that are
+ similar to "Signed-off-by:" lines in the commit objects and
+ highlights them accordingly.
+
+
+Foreign Interface
+
+ * "mediawiki" remote helper (in contrib/) learned to handle file
+ attachments.
+
+ * "git p4" now uses "Jobs:" and "p4 move" when appropriate.
+
+ * vcs-svn has been updated to clean-up compilation, lift 32-bit
+ limitations, etc.
+
+
+Performance, Internal Implementation, etc. (please report possible regressions)
+
+ * Some tests showed false failures caused by a bug in ecryptofs.
+
+ * We no longer use AsciiDoc7 syntax in our documentation and favor a
+ more modern style.
+
+ * "git am --rebasing" codepath was taught to grab authorship, log
+ message and the patch text directly out of existing commits. This
+ will help rebasing commits that have confusing "diff" output in
+ their log messages.
+
+ * "git index-pack" and "git pack-objects" use streaming API to read
+ from the object store to avoid having to hold a large blob object
+ in-core while they are doing their thing.
+
+ * Code to match paths with exclude patterns learned to avoid calling
+ fnmatch() by comparing fixed leading substring literally when
+ possible.
+
+ * "git log -n 1 -- rarely-touched-path" was spending unnecessary
+ cycles after showing the first change to find the next one, only to
+ discard it.
+
+ * "git svn" got a large-looking code reorganization at the last
+ minute before the code freeze.
+
+Also contains minor documentation updates and code clean-ups.
+
+
+Fixes since v1.7.11
+-------------------
+
+Unless otherwise noted, all the fixes since v1.7.11 in the maintenance
+releases are contained in this release (see release notes to them for
+details).
+
+ * "git grep" stopped spawning an external "grep" long time ago, but a
+ duplicated test to check internal and external "grep" was left
+ behind.
+ (merge 4ca9453 rj/maint-grep-remove-redundant-test later to maint).
+
+ * The code to avoid mistaken attempt to add the object directory
+ itself as its own alternate could read beyond end of a string while
+ comparison.
+ (merge cb2912c hv/link-alt-odb-entry later to maint).
+
+ * "git submodule add" was confused when the superproject did not have
+ its repository in its usual place in the working tree and GIT_DIR
+ and GIT_WORK_TREE was used to access it.
+
+ * "git commit --amend" let the user edit the log message and then died
+ when the human-readable committer name was given insufficiently by
+ getpwent(3).
diff --git a/Documentation/config.txt b/Documentation/config.txt
index b49feb5..a95e5a4 100644
--- a/Documentation/config.txt
+++ b/Documentation/config.txt
@@ -159,9 +159,10 @@ advice.*::
specified a refspec that isn't your current branch) and
it resulted in a non-fast-forward error.
statusHints::
- Directions on how to stage/unstage/add shown in the
- output of linkgit:git-status[1] and the template shown
- when writing commit messages.
+ Show directions on how to proceed from the current
+ state in the output of linkgit:git-status[1] and in
+ the template shown when writing commit messages in
+ linkgit:git-commit[1].
commitBeforeMerge::
Advice shown when linkgit:git-merge[1] refuses to
merge to avoid overwriting local changes.
@@ -213,6 +214,15 @@ The default is false, except linkgit:git-clone[1] or linkgit:git-init[1]
will probe and set core.ignorecase true if appropriate when the repository
is created.
+core.precomposeunicode::
+ This option is only used by Mac OS implementation of git.
+ When core.precomposeunicode=true, git reverts the unicode decomposition
+ of filenames done by Mac OS. This is useful when sharing a repository
+ between Mac OS and Linux or Windows.
+ (Git for Windows 1.7.10 or higher is needed, or git under cygwin 1.7).
+ When false, file names are handled fully transparent by git,
+ which is backward compatible with older versions of git.
+
core.trustctime::
If false, the ctime differences between the index and the
working tree are ignored; useful when the inode change time
@@ -486,7 +496,9 @@ core.excludesfile::
'.git/info/exclude', git looks into this file for patterns
of files which are not meant to be tracked. "`~/`" is expanded
to the value of `$HOME` and "`~user/`" to the specified user's
- home directory. See linkgit:gitignore[5].
+ home directory. Its default value is $XDG_CONFIG_HOME/git/ignore.
+ If $XDG_CONFIG_HOME is either not set or empty, $HOME/.config/git/ignore
+ is used instead. See linkgit:gitignore[5].
core.askpass::
Some commands (e.g. svn and http interfaces) that interactively
@@ -501,7 +513,9 @@ core.attributesfile::
In addition to '.gitattributes' (per-directory) and
'.git/info/attributes', git looks into this file for attributes
(see linkgit:gitattributes[5]). Path expansions are made the same
- way as for `core.excludesfile`.
+ way as for `core.excludesfile`. Its default value is
+ $XDG_CONFIG_HOME/git/attributes. If $XDG_CONFIG_HOME is either not
+ set or empty, $HOME/.config/git/attributes is used instead.
core.editor::
Commands such as `commit` and `tag` that lets you edit
@@ -883,7 +897,7 @@ column.ui::
make equal size columns
--
+
- This option defaults to 'never'.
+This option defaults to 'never'.
column.branch::
Specify whether to output branch listing in `git branch` in columns.
@@ -1723,6 +1737,7 @@ push.default::
no refspec is implied by any of the options given on the command
line. Possible values are:
+
+--
* `nothing` - do not push anything.
* `matching` - push all branches having the same name in both ends.
This is for those who prepare all the branches into a publishable
@@ -1742,12 +1757,13 @@ push.default::
option and is well-suited for beginners. It will become the default
in Git 2.0.
* `current` - push the current branch to a branch of the same name.
- +
- The `simple`, `current` and `upstream` modes are for those who want to
- push out a single branch after finishing work, even when the other
- branches are not yet ready to be pushed out. If you are working with
- other people to push into the same shared repository, you would want
- to use one of these.
+--
++
+The `simple`, `current` and `upstream` modes are for those who want to
+push out a single branch after finishing work, even when the other
+branches are not yet ready to be pushed out. If you are working with
+other people to push into the same shared repository, you would want
+to use one of these.
rebase.stat::
Whether to show a diffstat of what changed upstream since the last
diff --git a/Documentation/git-apply.txt b/Documentation/git-apply.txt
index afd2c9a..634b84e 100644
--- a/Documentation/git-apply.txt
+++ b/Documentation/git-apply.txt
@@ -9,7 +9,7 @@ git-apply - Apply a patch to files and/or to the index
SYNOPSIS
--------
[verse]
-'git apply' [--stat] [--numstat] [--summary] [--check] [--index]
+'git apply' [--stat] [--numstat] [--summary] [--check] [--index] [--3way]
[--apply] [--no-add] [--build-fake-ancestor=<file>] [-R | --reverse]
[--allow-binary-replacement | --binary] [--reject] [-z]
[-p<n>] [-C<n>] [--inaccurate-eof] [--recount] [--cached]
@@ -72,6 +72,15 @@ OPTIONS
cached data, apply the patch, and store the result in the index
without using the working tree. This implies `--index`.
+-3::
+--3way::
+ When the patch does not apply cleanly, fall back on 3-way merge if
+ the patch records the identity of blobs it is supposed to apply to,
+ and we have those blobs available locally, possibly leaving the
+ conflict markers in the files in the working tree for the user to
+ resolve. This option implies the `--index` option, and is incompatible
+ with the `--reject` and the `--cached` options.
+
--build-fake-ancestor=<file>::
Newer 'git diff' output has embedded 'index information'
for each blob to help identify the original version that
diff --git a/Documentation/git-clone.txt b/Documentation/git-clone.txt
index 6e22522..c1ddd4c 100644
--- a/Documentation/git-clone.txt
+++ b/Documentation/git-clone.txt
@@ -46,13 +46,18 @@ OPTIONS
mechanism and clones the repository by making a copy of
HEAD and everything under objects and refs directories.
The files under `.git/objects/` directory are hardlinked
- to save space when possible. This is now the default when
- the source repository is specified with `/path/to/repo`
- syntax, so it essentially is a no-op option. To force
- copying instead of hardlinking (which may be desirable
- if you are trying to make a back-up of your repository),
- but still avoid the usual "git aware" transport
- mechanism, `--no-hardlinks` can be used.
+ to save space when possible.
++
+If the repository is specified as a local path (e.g., `/path/to/repo`),
+this is the default, and --local is essentially a no-op. If the
+repository is specified as a URL, then this flag is ignored (and we
+never use the local optimizations). Specifying `--no-local` will
+override the default when `/path/to/repo` is given, using the regular
+git transport instead.
++
+To force copying instead of hardlinking (which may be desirable if you
+are trying to make a back-up of your repository), but still avoid the
+usual "git aware" transport mechanism, `--no-hardlinks` can be used.
--no-hardlinks::
Optimize the cloning process from a repository on a
diff --git a/Documentation/git-config.txt b/Documentation/git-config.txt
index d9463cb..2d6ef32 100644
--- a/Documentation/git-config.txt
+++ b/Documentation/git-config.txt
@@ -97,10 +97,11 @@ OPTIONS
--global::
For writing options: write to global ~/.gitconfig file rather than
- the repository .git/config.
+ the repository .git/config, write to $XDG_CONFIG_HOME/git/config file
+ if this file exists and the ~/.gitconfig file doesn't.
+
-For reading options: read only from global ~/.gitconfig rather than
-from all available files.
+For reading options: read only from global ~/.gitconfig and from
+$XDG_CONFIG_HOME/git/config rather than from all available files.
+
See also <<FILES>>.
@@ -194,7 +195,7 @@ See also <<FILES>>.
FILES
-----
-If not set explicitly with '--file', there are three files where
+If not set explicitly with '--file', there are four files where
'git config' will search for configuration options:
$GIT_DIR/config::
@@ -204,6 +205,14 @@ $GIT_DIR/config::
User-specific configuration file. Also called "global"
configuration file.
+$XDG_CONFIG_HOME/git/config::
+ Second user-specific configuration file. If $XDG_CONFIG_HOME is not set
+ or empty, $HOME/.config/git/config will be used. Any single-valued
+ variable set in this file will be overwritten by whatever is in
+ ~/.gitconfig. It is a good idea not to create this file if
+ you sometimes use older versions of Git, as support for this
+ file was added fairly recently.
+
$(prefix)/etc/gitconfig::
System-wide configuration file.
diff --git a/Documentation/git-credential.txt b/Documentation/git-credential.txt
new file mode 100644
index 0000000..810e957
--- /dev/null
+++ b/Documentation/git-credential.txt
@@ -0,0 +1,154 @@
+git-credential(1)
+=================
+
+NAME
+----
+git-credential - Retrieve and store user credentials
+
+SYNOPSIS
+--------
+------------------
+git credential <fill|approve|reject>
+------------------
+
+DESCRIPTION
+-----------
+
+Git has an internal interface for storing and retrieving credentials
+from system-specific helpers, as well as prompting the user for
+usernames and passwords. The git-credential command exposes this
+interface to scripts which may want to retrieve, store, or prompt for
+credentials in the same manner as git. The design of this scriptable
+interface models the internal C API; see
+link:technical/api-credentials.txt[the git credential API] for more
+background on the concepts.
+
+git-credential takes an "action" option on the command-line (one of
+`fill`, `approve`, or `reject`) and reads a credential description
+on stdin (see <<IOFMT,INPUT/OUTPUT FORMAT>>).
+
+If the action is `fill`, git-credential will attempt to add "username"
+and "password" attributes to the description by reading config files,
+by contacting any configured credential helpers, or by prompting the
+user. The username and password attributes of the credential
+description are then printed to stdout together with the attributes
+already provided.
+
+If the action is `approve`, git-credential will send the description
+to any configured credential helpers, which may store the credential
+for later use.
+
+If the action is `reject`, git-credential will send the description to
+any configured credential helpers, which may erase any stored
+credential matching the description.
+
+If the action is `approve` or `reject`, no output should be emitted.
+
+TYPICAL USE OF GIT CREDENTIAL
+-----------------------------
+
+An application using git-credential will typically use `git
+credential` following these steps:
+
+ 1. Generate a credential description based on the context.
++
+For example, if we want a password for
+`https://example.com/foo.git`, we might generate the following
+credential description (don't forget the blank line at the end; it
+tells `git credential` that the application finished feeding all the
+infomation it has):
+
+ protocol=https
+ host=example.com
+ path=foo.git
+
+ 2. Ask git-credential to give us a username and password for this
+ description. This is done by running `git credential fill`,
+ feeding the description from step (1) to its standard input. The complete
+ credential description (including the credential per se, i.e. the
+ login and password) will be produced on standard output, like:
+
+ protocol=https
+ host=example.com
+ username=bob
+ password=secr3t
++
+In most cases, this means the attributes given in the input will be
+repeated in the output, but git may also modify the credential
+description, for example by removing the `path` attribute when the
+protocol is HTTP(s) and `credential.useHttpPath` is false.
++
+If the `git credential` knew about the password, this step may
+not have involved the user actually typing this password (the
+user may have typed a password to unlock the keychain instead,
+or no user interaction was done if the keychain was already
+unlocked) before it returned `password=secr3t`.
+
+ 3. Use the credential (e.g., access the URL with the username and
+ password from step (2)), and see if it's accepted.
+
+ 4. Report on the success or failure of the password. If the
+ credential allowed the operation to complete successfully, then
+ it can be marked with an "approve" action to tell `git
+ credential` to reuse it in its next invocation. If the credential
+ was rejected during the operation, use the "reject" action so
+ that `git credential` will ask for a new password in its next
+ invocation. In either case, `git credential` should be fed with
+ the credential description obtained from step (2) (which also
+ contain the ones provided in step (1)).
+
+[[IOFMT]]
+INPUT/OUTPUT FORMAT
+-------------------
+
+`git credential` reads and/or writes (depending on the action used)
+credential information in its standard input/output. This information
+can correspond either to keys for which `git credential` will obtain
+the login/password information (e.g. host, protocol, path), or to the
+actual credential data to be obtained (login/password).
+
+The credential is split into a set of named attributes, with one
+attribute per line. Each attribute is
+specified by a key-value pair, separated by an `=` (equals) sign,
+followed by a newline. The key may contain any bytes except `=`,
+newline, or NUL. The value may contain any bytes except newline or NUL.
+In both cases, all bytes are treated as-is (i.e., there is no quoting,
+and one cannot transmit a value with newline or NUL in it). The list of
+attributes is terminated by a blank line or end-of-file.
+Git understands the following attributes:
+
+`protocol`::
+
+ The protocol over which the credential will be used (e.g.,
+ `https`).
+
+`host`::
+
+ The remote hostname for a network credential.
+
+`path`::
+
+ The path with which the credential will be used. E.g., for
+ accessing a remote https repository, this will be the
+ repository's path on the server.
+
+`username`::
+
+ The credential's username, if we already have one (e.g., from a
+ URL, from the user, or from a previously run helper).
+
+`password`::
+
+ The credential's password, if we are asking it to be stored.
+
+`url`::
+
+ When this special attribute is read by `git credential`, the
+ value is parsed as a URL and treated as if its constituent parts
+ were read (e.g., `url=https://example.com` would behave as if
+ `protocol=https` and `host=example.com` had been provided). This
+ can help callers avoid parsing URLs themselves. Note that any
+ components which are missing from the URL (e.g., there is no
+ username in the example above) will be set to empty; if you want
+ to provide a URL and override some attributes, provide the URL
+ attribute first, followed by any overrides.
diff --git a/Documentation/git-p4.txt b/Documentation/git-p4.txt
index fe1f49b..8228f33 100644
--- a/Documentation/git-p4.txt
+++ b/Documentation/git-p4.txt
@@ -255,7 +255,7 @@ These options can be used to modify 'git p4 submit' behavior.
p4. By default, this is the most recent p4 commit reachable
from 'HEAD'.
--M[<n>]::
+-M::
Detect renames. See linkgit:git-diff[1]. Renames will be
represented in p4 using explicit 'move' operations. There
is no corresponding option to detect copies, but there are
@@ -465,13 +465,15 @@ git-p4.useClientSpec::
Submit variables
~~~~~~~~~~~~~~~~
git-p4.detectRenames::
- Detect renames. See linkgit:git-diff[1].
+ Detect renames. See linkgit:git-diff[1]. This can be true,
+ false, or a score as expected by 'git diff -M'.
git-p4.detectCopies::
- Detect copies. See linkgit:git-diff[1].
+ Detect copies. See linkgit:git-diff[1]. This can be true,
+ false, or a score as expected by 'git diff -C'.
git-p4.detectCopiesHarder::
- Detect copies harder. See linkgit:git-diff[1].
+ Detect copies harder. See linkgit:git-diff[1]. A boolean.
git-p4.preserveUser::
On submit, re-author changes to reflect the git author,
diff --git a/Documentation/git-rebase.txt b/Documentation/git-rebase.txt
index feb51a6..fd535b0 100644
--- a/Documentation/git-rebase.txt
+++ b/Documentation/git-rebase.txt
@@ -8,9 +8,9 @@ git-rebase - Forward-port local commits to the updated upstream head
SYNOPSIS
--------
[verse]
-'git rebase' [-i | --interactive] [options] [--onto <newbase>]
+'git rebase' [-i | --interactive] [options] [--exec <cmd>] [--onto <newbase>]
[<upstream>] [<branch>]
-'git rebase' [-i | --interactive] [options] --onto <newbase>
+'git rebase' [-i | --interactive] [options] [--exec <cmd>] [--onto <newbase>]
--root [<branch>]
'git rebase' --continue | --skip | --abort
@@ -210,7 +210,7 @@ rebase.autosquash::
OPTIONS
-------
-<newbase>::
+--onto <newbase>::
Starting point at which to create the new commits. If the
--onto option is not specified, the starting point is
<upstream>. May be any valid commit, and not just an
@@ -344,14 +344,36 @@ This uses the `--interactive` machinery internally, but combining it
with the `--interactive` option explicitly is generally not a good
idea unless you know what you are doing (see BUGS below).
+-x <cmd>::
+--exec <cmd>::
+ Append "exec <cmd>" after each line creating a commit in the
+ final history. <cmd> will be interpreted as one or more shell
+ commands.
++
+This option can only be used with the `--interactive` option
+(see INTERACTIVE MODE below).
++
+You may execute several commands by either using one instance of `--exec`
+with several commands:
++
+ git rebase -i --exec "cmd1 && cmd2 && ..."
++
+or by giving more than one `--exec`:
++
+ git rebase -i --exec "cmd1" --exec "cmd2" --exec ...
++
+If `--autosquash` is used, "exec" lines will not be appended for
+the intermediate commits, and will only appear at the end of each
+squash/fixup series.
--root::
Rebase all commits reachable from <branch>, instead of
limiting them with an <upstream>. This allows you to rebase
- the root commit(s) on a branch. Must be used with --onto, and
+ the root commit(s) on a branch. When used with --onto, it
will skip changes already contained in <newbase> (instead of
- <upstream>). When used together with --preserve-merges, 'all'
- root commits will be rewritten to have <newbase> as parent
+ <upstream>) whereas without --onto it will operate on every change.
+ When used together with both --onto and --preserve-merges,
+ 'all' root commits will be rewritten to have <newbase> as parent
instead.
--autosquash::
@@ -521,6 +543,24 @@ in `$SHELL`, or the default shell if `$SHELL` is not set), so you can
use shell features (like "cd", ">", ";" ...). The command is run from
the root of the working tree.
+----------------------------------
+$ git rebase -i --exec "make test"
+----------------------------------
+
+This command lets you check that intermediate commits are compilable.
+The todo list becomes like that:
+
+--------------------
+pick 5928aea one
+exec make test
+pick 04d0fda two
+exec make test
+pick ba46169 three
+exec make test
+pick f4593f9 four
+exec make test
+--------------------
+
SPLITTING COMMITS
-----------------
diff --git a/Documentation/git-rev-parse.txt b/Documentation/git-rev-parse.txt
index 4cc3e95..3c63561 100644
--- a/Documentation/git-rev-parse.txt
+++ b/Documentation/git-rev-parse.txt
@@ -101,6 +101,12 @@ OPTIONS
The option core.warnAmbiguousRefs is used to select the strict
abbreviation mode.
+--disambiguate=<prefix>::
+ Show every object whose name begins with the given prefix.
+ The <prefix> must be at least 4 hexadecimal digits long to
+ avoid listing each and every object in the repository by
+ mistake.
+
--all::
Show all refs found in `refs/`.
diff --git a/Documentation/gitattributes.txt b/Documentation/gitattributes.txt
index 80120ea..e16f3e1 100644
--- a/Documentation/gitattributes.txt
+++ b/Documentation/gitattributes.txt
@@ -75,6 +75,8 @@ repositories (i.e., attributes of interest to all users) should go into
`.gitattributes` files. Attributes that should affect all repositories
for a single user should be placed in a file specified by the
`core.attributesfile` configuration option (see linkgit:git-config[1]).
+Its default value is $XDG_CONFIG_HOME/git/attributes. If $XDG_CONFIG_HOME
+is either not set or empty, $HOME/.config/git/attributes is used instead.
Attributes for all users on a system should be placed in the
`$(prefix)/etc/gitattributes` file.
diff --git a/Documentation/gitignore.txt b/Documentation/gitignore.txt
index 2e7328b..c1f692a 100644
--- a/Documentation/gitignore.txt
+++ b/Documentation/gitignore.txt
@@ -50,7 +50,9 @@ the repository but are specific to one user's workflow) should go into
the `$GIT_DIR/info/exclude` file. Patterns which a user wants git to
ignore in all situations (e.g., backup or temporary files generated by
the user's editor of choice) generally go into a file specified by
-`core.excludesfile` in the user's `~/.gitconfig`.
+`core.excludesfile` in the user's `~/.gitconfig`. Its default value is
+$XDG_CONFIG_HOME/git/ignore. If $XDG_CONFIG_HOME is either not set or empty,
+$HOME/.config/git/ignore is used instead.
The underlying git plumbing tools, such as
'git ls-files' and 'git read-tree', read
diff --git a/Documentation/technical/api-credentials.txt b/Documentation/technical/api-credentials.txt
index adb6f0c..5977b58 100644
--- a/Documentation/technical/api-credentials.txt
+++ b/Documentation/technical/api-credentials.txt
@@ -241,42 +241,9 @@ appended to its command line, which is one of:
Remove a matching credential, if any, from the helper's storage.
The details of the credential will be provided on the helper's stdin
-stream. The credential is split into a set of named attributes.
-Attributes are provided to the helper, one per line. Each attribute is
-specified by a key-value pair, separated by an `=` (equals) sign,
-followed by a newline. The key may contain any bytes except `=`,
-newline, or NUL. The value may contain any bytes except newline or NUL.
-In both cases, all bytes are treated as-is (i.e., there is no quoting,
-and one cannot transmit a value with newline or NUL in it). The list of
-attributes is terminated by a blank line or end-of-file.
-
-Git will send the following attributes (but may not send all of
-them for a given credential; for example, a `host` attribute makes no
-sense when dealing with a non-network protocol):
-
-`protocol`::
-
- The protocol over which the credential will be used (e.g.,
- `https`).
-
-`host`::
-
- The remote hostname for a network credential.
-
-`path`::
-
- The path with which the credential will be used. E.g., for
- accessing a remote https repository, this will be the
- repository's path on the server.
-
-`username`::
-
- The credential's username, if we already have one (e.g., from a
- URL, from the user, or from a previously run helper).
-
-`password`::
-
- The credential's password, if we are asking it to be stored.
+stream. The exact format is the same as the input/output format of the
+`git credential` plumbing command (see the section `INPUT/OUTPUT
+FORMAT` in linkgit:git-credential[7] for a detailed specification).
For a `get` operation, the helper should produce a list of attributes
on stdout in the same format. A helper is free to produce a subset, or
diff --git a/GIT-VERSION-GEN b/GIT-VERSION-GEN
index bab4056..54c1fc8 100755
--- a/GIT-VERSION-GEN
+++ b/GIT-VERSION-GEN
@@ -1,7 +1,7 @@
#!/bin/sh
GVF=GIT-VERSION-FILE
-DEF_VER=v1.7.11.5
+DEF_VER=v1.7.12-rc2
LF='
'
diff --git a/Makefile b/Makefile
index df58303..6b0c961 100644
--- a/Makefile
+++ b/Makefile
@@ -205,8 +205,6 @@ all::
# Define NO_ST_BLOCKS_IN_STRUCT_STAT if your platform does not have st_blocks
# field that counts the on-disk footprint in 512-byte blocks.
#
-# Define ASCIIDOC7 if you want to format documentation with AsciiDoc 7
-#
# Define DOCBOOK_XSL_172 if you want to format man pages with DocBook XSL v1.72
# (not v1.73 or v1.71).
#
@@ -298,6 +296,13 @@ all::
# the diff algorithm. It gives a nice speedup if your processor has
# fast unaligned word loads. Does NOT work on big-endian systems!
# Enabled by default on x86_64.
+#
+# Define GIT_USER_AGENT if you want to change how git identifies itself during
+# network interactions. The default is "git/$(GIT_VERSION)".
+#
+# Define DEFAULT_HELP_FORMAT to "man", "info" or "html"
+# (defaults to "man") if you want to have a different default when
+# "git help" is called without a parameter specifying the format.
GIT-VERSION-FILE: FORCE
@$(SHELL_PATH) ./GIT-VERSION-GEN
@@ -391,12 +396,9 @@ BUILTIN_OBJS =
BUILT_INS =
COMPAT_CFLAGS =
COMPAT_OBJS =
-XDIFF_H =
XDIFF_OBJS =
-VCSSVN_H =
VCSSVN_OBJS =
-VCSSVN_TEST_OBJS =
-MISC_H =
+GENERATED_H =
EXTRA_CPPFLAGS =
LIB_H =
LIB_OBJS =
@@ -482,7 +484,6 @@ X =
PROGRAMS += $(patsubst %.o,git-%$X,$(PROGRAM_OBJS))
TEST_PROGRAMS_NEED_X += test-chmtime
-TEST_PROGRAMS_NEED_X += test-credential
TEST_PROGRAMS_NEED_X += test-ctype
TEST_PROGRAMS_NEED_X += test-date
TEST_PROGRAMS_NEED_X += test-delta
@@ -557,51 +558,44 @@ LIB_FILE=libgit.a
XDIFF_LIB=xdiff/lib.a
VCSSVN_LIB=vcs-svn/lib.a
-XDIFF_H += xdiff/xinclude.h
-XDIFF_H += xdiff/xmacros.h
-XDIFF_H += xdiff/xdiff.h
-XDIFF_H += xdiff/xtypes.h
-XDIFF_H += xdiff/xutils.h
-XDIFF_H += xdiff/xprepare.h
-XDIFF_H += xdiff/xdiffi.h
-XDIFF_H += xdiff/xemit.h
-
-VCSSVN_H += vcs-svn/line_buffer.h
-VCSSVN_H += vcs-svn/sliding_window.h
-VCSSVN_H += vcs-svn/repo_tree.h
-VCSSVN_H += vcs-svn/fast_export.h
-VCSSVN_H += vcs-svn/svndiff.h
-VCSSVN_H += vcs-svn/svndump.h
-
-MISC_H += bisect.h
-MISC_H += branch.h
-MISC_H += bundle.h
-MISC_H += common-cmds.h
-MISC_H += fetch-pack.h
-MISC_H += reachable.h
-MISC_H += send-pack.h
-MISC_H += shortlog.h
-MISC_H += tar.h
-MISC_H += thread-utils.h
-MISC_H += url.h
-MISC_H += walker.h
-MISC_H += wt-status.h
+LIB_H += xdiff/xinclude.h
+LIB_H += xdiff/xmacros.h
+LIB_H += xdiff/xdiff.h
+LIB_H += xdiff/xtypes.h
+LIB_H += xdiff/xutils.h
+LIB_H += xdiff/xprepare.h
+LIB_H += xdiff/xdiffi.h
+LIB_H += xdiff/xemit.h
+
+LIB_H += vcs-svn/line_buffer.h
+LIB_H += vcs-svn/sliding_window.h
+LIB_H += vcs-svn/repo_tree.h
+LIB_H += vcs-svn/fast_export.h
+LIB_H += vcs-svn/svndiff.h
+LIB_H += vcs-svn/svndump.h
+
+GENERATED_H += common-cmds.h
LIB_H += advice.h
LIB_H += archive.h
LIB_H += argv-array.h
LIB_H += attr.h
+LIB_H += bisect.h
LIB_H += blob.h
+LIB_H += branch.h
LIB_H += builtin.h
LIB_H += bulk-checkin.h
-LIB_H += cache.h
+LIB_H += bundle.h
LIB_H += cache-tree.h
+LIB_H += cache.h
LIB_H += color.h
+LIB_H += column.h
LIB_H += commit.h
LIB_H += compat/bswap.h
LIB_H += compat/cygwin.h
LIB_H += compat/mingw.h
LIB_H += compat/obstack.h
+LIB_H += compat/precompose_utf8.h
LIB_H += compat/terminal.h
LIB_H += compat/win32/dirent.h
LIB_H += compat/win32/poll.h
@@ -617,6 +611,7 @@ LIB_H += diff.h
LIB_H += diffcore.h
LIB_H += dir.h
LIB_H += exec_cmd.h
+LIB_H += fetch-pack.h
LIB_H += fmt-merge-msg.h
LIB_H += fsck.h
LIB_H += gettext.h
@@ -626,6 +621,7 @@ LIB_H += graph.h
LIB_H += grep.h
LIB_H += hash.h
LIB_H += help.h
+LIB_H += http.h
LIB_H += kwset.h
LIB_H += levenshtein.h
LIB_H += list-objects.h
@@ -635,19 +631,20 @@ LIB_H += mailmap.h
LIB_H += merge-file.h
LIB_H += merge-recursive.h
LIB_H += mergesort.h
-LIB_H += notes.h
LIB_H += notes-cache.h
LIB_H += notes-merge.h
+LIB_H += notes.h
LIB_H += object.h
-LIB_H += pack.h
LIB_H += pack-refs.h
LIB_H += pack-revindex.h
+LIB_H += pack.h
LIB_H += parse-options.h
LIB_H += patch-ids.h
LIB_H += pkt-line.h
LIB_H += progress.h
LIB_H += prompt.h
LIB_H += quote.h
+LIB_H += reachable.h
LIB_H += reflog-walk.h
LIB_H += refs.h
LIB_H += remote.h
@@ -655,9 +652,11 @@ LIB_H += rerere.h
LIB_H += resolve-undo.h
LIB_H += revision.h
LIB_H += run-command.h
+LIB_H += send-pack.h
LIB_H += sequencer.h
LIB_H += sha1-array.h
LIB_H += sha1-lookup.h
+LIB_H += shortlog.h
LIB_H += sideband.h
LIB_H += sigchain.h
LIB_H += strbuf.h
@@ -665,14 +664,18 @@ LIB_H += streaming.h
LIB_H += string-list.h
LIB_H += submodule.h
LIB_H += tag.h
+LIB_H += tar.h
LIB_H += thread-utils.h
LIB_H += transport.h
-LIB_H += tree.h
LIB_H += tree-walk.h
+LIB_H += tree.h
LIB_H += unpack-trees.h
+LIB_H += url.h
LIB_H += userdiff.h
LIB_H += utf8.h
LIB_H += varint.h
+LIB_H += walker.h
+LIB_H += wt-status.h
LIB_H += xdiff-interface.h
LIB_H += xdiff/xdiff.h
@@ -801,6 +804,7 @@ LIB_OBJS += usage.o
LIB_OBJS += userdiff.o
LIB_OBJS += utf8.o
LIB_OBJS += varint.o
+LIB_OBJS += version.o
LIB_OBJS += walker.o
LIB_OBJS += wrapper.o
LIB_OBJS += write_or_die.o
@@ -829,6 +833,7 @@ BUILTIN_OBJS += builtin/commit-tree.o
BUILTIN_OBJS += builtin/commit.o
BUILTIN_OBJS += builtin/config.o
BUILTIN_OBJS += builtin/count-objects.o
+BUILTIN_OBJS += builtin/credential.o
BUILTIN_OBJS += builtin/describe.o
BUILTIN_OBJS += builtin/diff-files.o
BUILTIN_OBJS += builtin/diff-index.o
@@ -906,6 +911,8 @@ BUILTIN_OBJS += builtin/write-tree.o
GITLIBS = $(LIB_FILE) $(XDIFF_LIB)
EXTLIBS =
+GIT_USER_AGENT = git/$(GIT_VERSION)
+
#
# Platform specific tweaks
#
@@ -992,6 +999,8 @@ ifeq ($(uname_S),Darwin)
NO_MEMMEM = YesPlease
USE_ST_TIMESPEC = YesPlease
HAVE_DEV_TTY = YesPlease
+ COMPAT_OBJS += compat/precompose_utf8.o
+ BASIC_CFLAGS += -DPRECOMPOSE_UNICODE
endif
ifeq ($(uname_S),SunOS)
NEEDS_SOCKET = YesPlease
@@ -1005,6 +1014,7 @@ ifeq ($(uname_S),SunOS)
NO_REGEX = YesPlease
NO_FNMATCH_CASEFOLD = YesPlease
NO_MSGFMT_EXTENDED_OPTIONS = YesPlease
+ HAVE_DEV_TTY = YesPlease
ifeq ($(uname_R),5.6)
SOCKLEN_T = int
NO_HSTRERROR = YesPlease
@@ -1239,6 +1249,7 @@ ifeq ($(uname_S),Windows)
BLK_SHA1 = YesPlease
NO_POSIX_GOODIES = UnfortunatelyYes
NATIVE_CRLF = YesPlease
+ DEFAULT_HELP_FORMAT = html
CC = compat/vcbuild/scripts/clink.pl
AR = compat/vcbuild/scripts/lib.pl
@@ -1841,10 +1852,6 @@ ifndef V
endif
endif
-ifdef ASCIIDOC7
- export ASCIIDOC7
-endif
-
ifdef NO_INSTALL_HARDLINKS
export NO_INSTALL_HARDLINKS
endif
@@ -1922,6 +1929,18 @@ SHELL_PATH_CQ_SQ = $(subst ','\'',$(SHELL_PATH_CQ))
BASIC_CFLAGS += -DSHELL_PATH='$(SHELL_PATH_CQ_SQ)'
endif
+GIT_USER_AGENT_SQ = $(subst ','\'',$(GIT_USER_AGENT))
+GIT_USER_AGENT_CQ = "$(subst ",\",$(subst \,\\,$(GIT_USER_AGENT)))"
+GIT_USER_AGENT_CQ_SQ = $(subst ','\'',$(GIT_USER_AGENT_CQ))
+GIT-USER-AGENT: FORCE
+ @if test x'$(GIT_USER_AGENT_SQ)' != x"`cat GIT-USER-AGENT 2>/dev/null`"; then \
+ echo '$(GIT_USER_AGENT_SQ)' >GIT-USER-AGENT; \
+ fi
+
+ifdef DEFAULT_HELP_FORMAT
+BASIC_CFLAGS += -DDEFAULT_HELP_FORMAT='"$(DEFAULT_HELP_FORMAT)"'
+endif
+
ALL_CFLAGS += $(BASIC_CFLAGS)
ALL_LDFLAGS += $(BASIC_LDFLAGS)
@@ -1968,8 +1987,41 @@ shell_compatibility_test: please_set_SHELL_PATH_to_a_more_modern_shell
strip: $(PROGRAMS) git$X
$(STRIP) $(STRIP_OPTS) $(PROGRAMS) git$X
-git.o: common-cmds.h
-git.sp git.s git.o: EXTRA_CPPFLAGS = -DGIT_VERSION='"$(GIT_VERSION)"' \
+### Target-specific flags and dependencies
+
+# The generic compilation pattern rule and automatically
+# computed header dependencies (falling back to a dependency on
+# LIB_H) are enough to describe how most targets should be built,
+# but some targets are special enough to need something a little
+# different.
+#
+# - When a source file "foo.c" #includes a generated header file,
+# we need to list that dependency for the "foo.o" target.
+#
+# We also list it from other targets that are built from foo.c
+# like "foo.sp" and "foo.s", even though that is easy to forget
+# to do because the generated header is already present around
+# after a regular build attempt.
+#
+# - Some code depends on configuration kept in makefile
+# variables. The target-specific variable EXTRA_CPPFLAGS can
+# be used to convey that information to the C preprocessor
+# using -D options.
+#
+# The "foo.o" target should have a corresponding dependency on
+# a file that changes when the value of the makefile variable
+# changes. For example, targets making use of the
+# $(GIT_VERSION) variable depend on GIT-VERSION-FILE.
+#
+# Technically the ".sp" and ".s" targets do not need this
+# dependency because they are force-built, but they get the
+# same dependency for consistency. This way, you do not have to
+# know how each target is implemented. And it means the
+# dependencies here will not need to change if the force-build
+# details change some day.
+
+git.sp git.s git.o: GIT-PREFIX
+git.sp git.s git.o: EXTRA_CPPFLAGS = \
'-DGIT_HTML_PATH="$(htmldir_SQ)"' \
'-DGIT_MAN_PATH="$(mandir_SQ)"' \
'-DGIT_INFO_PATH="$(infodir_SQ)"'
@@ -1978,14 +2030,19 @@ git$X: git.o GIT-LDFLAGS $(BUILTIN_OBJS) $(GITLIBS)
$(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ git.o \
$(BUILTIN_OBJS) $(ALL_LDFLAGS) $(LIBS)
-help.sp help.o: common-cmds.h
+help.sp help.s help.o: common-cmds.h
-builtin/help.sp builtin/help.o: common-cmds.h
+builtin/help.sp builtin/help.s builtin/help.o: common-cmds.h GIT-PREFIX
builtin/help.sp builtin/help.s builtin/help.o: EXTRA_CPPFLAGS = \
'-DGIT_HTML_PATH="$(htmldir_SQ)"' \
'-DGIT_MAN_PATH="$(mandir_SQ)"' \
'-DGIT_INFO_PATH="$(infodir_SQ)"'
+version.sp version.s version.o: GIT-VERSION-FILE GIT-USER-AGENT
+version.sp version.s version.o: EXTRA_CPPFLAGS = \
+ '-DGIT_VERSION="$(GIT_VERSION)"' \
+ '-DGIT_USER_AGENT=$(GIT_USER_AGENT_CQ_SQ)'
+
$(BUILT_INS): git$X
$(QUIET_BUILT_IN)$(RM) $@ && \
ln git$X $@ 2>/dev/null || \
@@ -1997,35 +2054,54 @@ common-cmds.h: ./generate-cmdlist.sh command-list.txt
common-cmds.h: $(wildcard Documentation/git-*.txt)
$(QUIET_GEN)./generate-cmdlist.sh > $@+ && mv $@+ $@
+SCRIPT_DEFINES = $(SHELL_PATH_SQ):$(DIFF_SQ):$(GIT_VERSION):\
+ $(localedir_SQ):$(NO_CURL):$(USE_GETTEXT_SCHEME):$(SANE_TOOL_PATH_SQ):\
+ $(gitwebdir_SQ):$(PERL_PATH_SQ)
define cmd_munge_script
$(RM) $@ $@+ && \
sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' \
-e 's|@SHELL_PATH@|$(SHELL_PATH_SQ)|' \
-e 's|@@DIFF@@|$(DIFF_SQ)|' \
- -e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
-e 's|@@LOCALEDIR@@|$(localedir_SQ)|g' \
-e 's/@@NO_CURL@@/$(NO_CURL)/g' \
-e 's/@@USE_GETTEXT_SCHEME@@/$(USE_GETTEXT_SCHEME)/g' \
-e $(BROKEN_PATH_FIX) \
+ -e 's|@@GITWEBDIR@@|$(gitwebdir_SQ)|g' \
+ -e 's|@@PERL@@|$(PERL_PATH_SQ)|g' \
$@.sh >$@+
endef
-$(patsubst %.sh,%,$(SCRIPT_SH)) : % : %.sh
+GIT-SCRIPT-DEFINES: FORCE
+ @FLAGS='$(SCRIPT_DEFINES)'; \
+ if test x"$$FLAGS" != x"`cat $@ 2>/dev/null`" ; then \
+ echo 1>&2 " * new script parameters"; \
+ echo "$$FLAGS" >$@; \
+ fi
+
+
+$(patsubst %.sh,%,$(SCRIPT_SH)) : % : %.sh GIT-SCRIPT-DEFINES
$(QUIET_GEN)$(cmd_munge_script) && \
chmod +x $@+ && \
mv $@+ $@
-$(SCRIPT_LIB) : % : %.sh
+$(SCRIPT_LIB) : % : %.sh GIT-SCRIPT-DEFINES
$(QUIET_GEN)$(cmd_munge_script) && \
mv $@+ $@
ifndef NO_PERL
$(patsubst %.perl,%,$(SCRIPT_PERL)): perl/perl.mak
-perl/perl.mak: GIT-CFLAGS perl/Makefile perl/Makefile.PL
+perl/perl.mak: perl/PM.stamp
+
+perl/PM.stamp: FORCE
+ $(QUIET_GEN)$(FIND) perl -type f -name '*.pm' | sort >$@+ && \
+ { cmp $@+ $@ >/dev/null 2>/dev/null || mv $@+ $@; } && \
+ $(RM) $@+
+
+perl/perl.mak: GIT-CFLAGS GIT-PREFIX perl/Makefile perl/Makefile.PL
$(QUIET_SUBDIR0)perl $(QUIET_SUBDIR1) PERL_PATH='$(PERL_PATH_SQ)' prefix='$(prefix_SQ)' $(@F)
-$(patsubst %.perl,%,$(SCRIPT_PERL)): % : %.perl
+$(patsubst %.perl,%,$(SCRIPT_PERL)): % : %.perl GIT-VERSION-FILE
$(QUIET_GEN)$(RM) $@ $@+ && \
INSTLIBDIR=`MAKEFLAGS= $(MAKE) -C perl -s --no-print-directory instlibdir` && \
sed -e '1{' \
@@ -2045,14 +2121,8 @@ $(patsubst %.perl,%,$(SCRIPT_PERL)): % : %.perl
gitweb:
$(QUIET_SUBDIR0)gitweb $(QUIET_SUBDIR1) all
-git-instaweb: git-instaweb.sh gitweb
- $(QUIET_GEN)$(RM) $@ $@+ && \
- sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' \
- -e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
- -e 's/@@NO_CURL@@/$(NO_CURL)/g' \
- -e 's|@@GITWEBDIR@@|$(gitwebdir_SQ)|g' \
- -e 's|@@PERL@@|$(PERL_PATH_SQ)|g' \
- $@.sh > $@+ && \
+git-instaweb: git-instaweb.sh gitweb GIT-SCRIPT-DEFINES
+ $(QUIET_GEN)$(cmd_munge_script) && \
chmod +x $@+ && \
mv $@+ $@
else # NO_PERL
@@ -2066,7 +2136,7 @@ $(patsubst %.perl,%,$(SCRIPT_PERL)) git-instaweb: % : unimplemented.sh
endif # NO_PERL
ifndef NO_PYTHON
-$(patsubst %.py,%,$(SCRIPT_PYTHON)): GIT-CFLAGS
+$(patsubst %.py,%,$(SCRIPT_PYTHON)): GIT-CFLAGS GIT-PREFIX
$(patsubst %.py,%,$(SCRIPT_PYTHON)): % : %.py
$(QUIET_GEN)$(RM) $@ $@+ && \
INSTLIBDIR=`MAKEFLAGS= $(MAKE) -C git_remote_helpers -s \
@@ -2088,24 +2158,23 @@ $(patsubst %.py,%,$(SCRIPT_PYTHON)): % : unimplemented.sh
mv $@+ $@
endif # NO_PYTHON
-configure: configure.ac
+configure: configure.ac GIT-VERSION-FILE
$(QUIET_GEN)$(RM) $@ $<+ && \
sed -e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
$< > $<+ && \
autoconf -o $@ $<+ && \
$(RM) $<+
-# These can record GIT_VERSION
-git.o git.spec http.o \
- $(patsubst %.sh,%,$(SCRIPT_SH)) \
- $(patsubst %.perl,%,$(SCRIPT_PERL)) \
- : GIT-VERSION-FILE
-
-TEST_OBJS := $(patsubst test-%$X,test-%.o,$(TEST_PROGRAMS))
-GIT_OBJS := $(LIB_OBJS) $(BUILTIN_OBJS) $(PROGRAM_OBJS) $(TEST_OBJS) \
- git.o
-ifndef NO_CURL
- GIT_OBJS += http.o http-walker.o remote-curl.o
+ifdef AUTOCONFIGURED
+config.status: configure
+ $(QUIET_GEN)if test -f config.status; then \
+ ./config.status --recheck; \
+ else \
+ ./configure; \
+ fi
+reconfigure config.mak.autogen: config.status
+ $(QUIET_GEN)./config.status
+.PHONY: reconfigure # This is a convenience target.
endif
XDIFF_OBJS += xdiff/xdiffi.o
@@ -2123,9 +2192,14 @@ VCSSVN_OBJS += vcs-svn/fast_export.o
VCSSVN_OBJS += vcs-svn/svndiff.o
VCSSVN_OBJS += vcs-svn/svndump.o
-VCSSVN_TEST_OBJS += test-line-buffer.o
-
-OBJECTS := $(GIT_OBJS) $(XDIFF_OBJS) $(VCSSVN_OBJS)
+TEST_OBJS := $(patsubst test-%$X,test-%.o,$(TEST_PROGRAMS))
+OBJECTS := $(LIB_OBJS) $(BUILTIN_OBJS) $(PROGRAM_OBJS) $(TEST_OBJS) \
+ $(XDIFF_OBJS) \
+ $(VCSSVN_OBJS) \
+ git.o
+ifndef NO_CURL
+ OBJECTS += http.o http-walker.o remote-curl.o
+endif
dep_files := $(foreach f,$(OBJECTS),$(dir $f).depend/$(notdir $f).d)
dep_dirs := $(addsuffix .depend,$(sort $(dir $(OBJECTS))))
@@ -2224,51 +2298,32 @@ else
# Dependencies on automatically generated headers such as common-cmds.h
# should _not_ be included here, since they are necessary even when
# building an object for the first time.
-#
-# XXX. Please check occasionally that these include all dependencies
-# gcc detects!
-
-$(GIT_OBJS): $(LIB_H)
-builtin/branch.o builtin/checkout.o builtin/clone.o builtin/reset.o branch.o transport.o: branch.h
-builtin/bundle.o bundle.o transport.o: bundle.h
-builtin/bisect--helper.o builtin/rev-list.o bisect.o: bisect.h
-builtin/clone.o builtin/fetch-pack.o transport.o: fetch-pack.h
-builtin/index-pack.o builtin/grep.o builtin/pack-objects.o transport-helper.o thread-utils.o: thread-utils.h
-builtin/send-pack.o transport.o: send-pack.h
-builtin/log.o builtin/shortlog.o: shortlog.h
-builtin/prune.o builtin/reflog.o reachable.o: reachable.h
-builtin/commit.o builtin/revert.o wt-status.o: wt-status.h
-builtin/tar-tree.o archive-tar.o: tar.h
-connect.o transport.o url.o http-backend.o: url.h
-builtin/branch.o builtin/commit.o builtin/tag.o column.o help.o pager.o: column.h
-http-fetch.o http-walker.o remote-curl.o transport.o walker.o: walker.h
-http.o http-walker.o http-push.o http-fetch.o remote-curl.o: http.h url.h
-xdiff-interface.o $(XDIFF_OBJS): $(XDIFF_H)
-
-$(VCSSVN_OBJS) $(VCSSVN_TEST_OBJS): $(LIB_H) $(VCSSVN_H)
+$(OBJECTS): $(LIB_H)
endif
+exec_cmd.sp exec_cmd.s exec_cmd.o: GIT-PREFIX
exec_cmd.sp exec_cmd.s exec_cmd.o: EXTRA_CPPFLAGS = \
'-DGIT_EXEC_PATH="$(gitexecdir_SQ)"' \
'-DBINDIR="$(bindir_relative_SQ)"' \
'-DPREFIX="$(prefix_SQ)"'
+builtin/init-db.sp builtin/init-db.s builtin/init-db.o: GIT-PREFIX
builtin/init-db.sp builtin/init-db.s builtin/init-db.o: EXTRA_CPPFLAGS = \
-DDEFAULT_GIT_TEMPLATE_DIR='"$(template_dir_SQ)"'
+config.sp config.s config.o: GIT-PREFIX
config.sp config.s config.o: EXTRA_CPPFLAGS = \
-DETC_GITCONFIG='"$(ETC_GITCONFIG_SQ)"'
+attr.sp attr.s attr.o: GIT-PREFIX
attr.sp attr.s attr.o: EXTRA_CPPFLAGS = \
-DETC_GITATTRIBUTES='"$(ETC_GITATTRIBUTES_SQ)"'
+gettext.sp gettext.s gettext.o: GIT-PREFIX
gettext.sp gettext.s gettext.o: EXTRA_CPPFLAGS = \
-DGIT_LOCALE_PATH='"$(localedir_SQ)"'
-http.sp http.s http.o: EXTRA_CPPFLAGS = \
- -DGIT_HTTP_USER_AGENT='"git/$(GIT_VERSION)"'
-
ifdef NO_EXPAT
http-walker.sp http-walker.s http-walker.o: EXTRA_CPPFLAGS = -DNO_EXPAT
endif
@@ -2340,9 +2395,10 @@ XGETTEXT_FLAGS = \
--from-code=UTF-8
XGETTEXT_FLAGS_C = $(XGETTEXT_FLAGS) --language=C \
--keyword=_ --keyword=N_ --keyword="Q_:1,2"
-XGETTEXT_FLAGS_SH = $(XGETTEXT_FLAGS) --language=Shell
+XGETTEXT_FLAGS_SH = $(XGETTEXT_FLAGS) --language=Shell \
+ --keyword=gettextln --keyword=eval_gettextln
XGETTEXT_FLAGS_PERL = $(XGETTEXT_FLAGS) --keyword=__ --language=Perl
-LOCALIZED_C := $(C_OBJ:o=c) $(LIB_H) $(XDIFF_H) $(VCSSVN_H) $(MISC_H)
+LOCALIZED_C := $(C_OBJ:o=c) $(LIB_H) $(GENERATED_H)
LOCALIZED_SH := $(SCRIPT_SH)
LOCALIZED_PERL := $(SCRIPT_PERL)
@@ -2389,14 +2445,22 @@ cscope:
$(FIND_SOURCE_FILES) | xargs cscope -b
### Detect prefix changes
-TRACK_CFLAGS = $(CC):$(subst ','\'',$(ALL_CFLAGS)):\
- $(bindir_SQ):$(gitexecdir_SQ):$(template_dir_SQ):$(prefix_SQ):\
- $(localedir_SQ):$(USE_GETTEXT_SCHEME)
+TRACK_PREFIX = $(bindir_SQ):$(gitexecdir_SQ):$(template_dir_SQ):$(prefix_SQ):\
+ $(localedir_SQ)
+
+GIT-PREFIX: FORCE
+ @FLAGS='$(TRACK_PREFIX)'; \
+ if test x"$$FLAGS" != x"`cat GIT-PREFIX 2>/dev/null`" ; then \
+ echo 1>&2 " * new prefix flags"; \
+ echo "$$FLAGS" >GIT-PREFIX; \
+ fi
+
+TRACK_CFLAGS = $(CC):$(subst ','\'',$(ALL_CFLAGS)):$(USE_GETTEXT_SCHEME)
GIT-CFLAGS: FORCE
@FLAGS='$(TRACK_CFLAGS)'; \
if test x"$$FLAGS" != x"`cat GIT-CFLAGS 2>/dev/null`" ; then \
- echo 1>&2 " * new build flags or prefix"; \
+ echo 1>&2 " * new build flags"; \
echo "$$FLAGS" >GIT-CFLAGS; \
fi
@@ -2648,7 +2712,7 @@ quick-install-html:
### Maintainer's dist rules
-git.spec: git.spec.in
+git.spec: git.spec.in GIT-VERSION-FILE
sed -e 's/@@VERSION@@/$(GIT_VERSION)/g' < $< > $@+
mv $@+ $@
@@ -2698,6 +2762,9 @@ dist-doc:
distclean: clean
$(RM) configure
+ $(RM) config.log config.status config.cache
+ $(RM) config.mak.autogen config.mak.append
+ $(RM) -r autom4te.cache
profile-clean:
$(RM) $(addsuffix *.gcda,$(addprefix $(PROFILE_DIR)/, $(object_dirs)))
@@ -2712,8 +2779,6 @@ clean: profile-clean
$(RM) -r $(dep_dirs)
$(RM) -r po/build/
$(RM) *.spec *.pyc *.pyo */*.pyc */*.pyo common-cmds.h $(ETAGS_TARGET) tags cscope*
- $(RM) -r autom4te.cache
- $(RM) config.log config.mak.autogen config.mak.append config.status config.cache
$(RM) -r $(GIT_TARNAME) .doc-tmp-dir
$(RM) $(GIT_TARNAME).tar.gz git-core_$(GIT_VERSION)-*.tar.gz
$(RM) $(htmldocs).tar.gz $(manpages).tar.gz
@@ -2732,6 +2797,7 @@ ifndef NO_TCLTK
$(MAKE) -C git-gui clean
endif
$(RM) GIT-VERSION-FILE GIT-CFLAGS GIT-LDFLAGS GIT-GUI-VARS GIT-BUILD-OPTIONS
+ $(RM) GIT-USER-AGENT GIT-PREFIX GIT-SCRIPT-DEFINES
.PHONY: all install profile-clean clean strip
.PHONY: shell_compatibility_test please_set_SHELL_PATH_to_a_more_modern_shell
diff --git a/RelNotes b/RelNotes
index f6490be..19bb2eb 120000
--- a/RelNotes
+++ b/RelNotes
@@ -1 +1 @@
-Documentation/RelNotes/1.7.11.5.txt \ No newline at end of file
+Documentation/RelNotes/1.7.12.txt \ No newline at end of file
diff --git a/attr.c b/attr.c
index 303751f..b52efb5 100644
--- a/attr.c
+++ b/attr.c
@@ -497,6 +497,7 @@ static int git_attr_system(void)
static void bootstrap_attr_stack(void)
{
struct attr_stack *elem;
+ char *xdg_attributes_file;
if (attr_stack)
return;
@@ -515,6 +516,10 @@ static void bootstrap_attr_stack(void)
}
}
+ if (!git_attributes_file) {
+ home_config_paths(NULL, &xdg_attributes_file, "attributes");
+ git_attributes_file = xdg_attributes_file;
+ }
if (git_attributes_file) {
elem = read_attr_from_file(git_attributes_file, 1);
if (elem) {
diff --git a/branch.c b/branch.c
index eccdaf9..2bef1e7 100644
--- a/branch.c
+++ b/branch.c
@@ -74,25 +74,33 @@ void install_branch_config(int flag, const char *local, const char *origin, cons
strbuf_addf(&key, "branch.%s.rebase", local);
git_config_set(key.buf, "true");
}
+ strbuf_release(&key);
if (flag & BRANCH_CONFIG_VERBOSE) {
- strbuf_reset(&key);
-
- strbuf_addstr(&key, origin ? "remote" : "local");
-
- /* Are we tracking a proper "branch"? */
- if (remote_is_branch) {
- strbuf_addf(&key, " branch %s", shortname);
- if (origin)
- strbuf_addf(&key, " from %s", origin);
- }
+ if (remote_is_branch && origin)
+ printf(rebasing ?
+ "Branch %s set up to track remote branch %s from %s by rebasing.\n" :
+ "Branch %s set up to track remote branch %s from %s.\n",
+ local, shortname, origin);
+ else if (remote_is_branch && !origin)
+ printf(rebasing ?
+ "Branch %s set up to track local branch %s by rebasing.\n" :
+ "Branch %s set up to track local branch %s.\n",
+ local, shortname);
+ else if (!remote_is_branch && origin)
+ printf(rebasing ?
+ "Branch %s set up to track remote ref %s by rebasing.\n" :
+ "Branch %s set up to track remote ref %s.\n",
+ local, remote);
+ else if (!remote_is_branch && !origin)
+ printf(rebasing ?
+ "Branch %s set up to track local ref %s by rebasing.\n" :
+ "Branch %s set up to track local ref %s.\n",
+ local, remote);
else
- strbuf_addf(&key, " ref %s", remote);
- printf("Branch %s set up to track %s%s.\n",
- local, key.buf,
- rebasing ? " by rebasing" : "");
+ die("BUG: impossible combination of %d and %p",
+ remote_is_branch, origin);
}
- strbuf_release(&key);
}
/*
diff --git a/builtin.h b/builtin.h
index e426de3..ba6626b 100644
--- a/builtin.h
+++ b/builtin.h
@@ -9,7 +9,6 @@
#define DEFAULT_MERGE_LOG_LEN 20
-extern const char git_version_string[];
extern const char git_usage_string[];
extern const char git_more_info_string[];
@@ -68,6 +67,7 @@ extern int cmd_commit(int argc, const char **argv, const char *prefix);
extern int cmd_commit_tree(int argc, const char **argv, const char *prefix);
extern int cmd_config(int argc, const char **argv, const char *prefix);
extern int cmd_count_objects(int argc, const char **argv, const char *prefix);
+extern int cmd_credential(int argc, const char **argv, const char *prefix);
extern int cmd_describe(int argc, const char **argv, const char *prefix);
extern int cmd_diff_files(int argc, const char **argv, const char *prefix);
extern int cmd_diff_index(int argc, const char **argv, const char *prefix);
@@ -85,7 +85,6 @@ extern int cmd_get_tar_commit_id(int argc, const char **argv, const char *prefix
extern int cmd_grep(int argc, const char **argv, const char *prefix);
extern int cmd_hash_object(int argc, const char **argv, const char *prefix);
extern int cmd_help(int argc, const char **argv, const char *prefix);
-extern int cmd_http_fetch(int argc, const char **argv, const char *prefix);
extern int cmd_index_pack(int argc, const char **argv, const char *prefix);
extern int cmd_init_db(int argc, const char **argv, const char *prefix);
extern int cmd_log(int argc, const char **argv, const char *prefix);
@@ -110,7 +109,6 @@ extern int cmd_notes(int argc, const char **argv, const char *prefix);
extern int cmd_pack_objects(int argc, const char **argv, const char *prefix);
extern int cmd_pack_redundant(int argc, const char **argv, const char *prefix);
extern int cmd_patch_id(int argc, const char **argv, const char *prefix);
-extern int cmd_pickaxe(int argc, const char **argv, const char *prefix);
extern int cmd_prune(int argc, const char **argv, const char *prefix);
extern int cmd_prune_packed(int argc, const char **argv, const char *prefix);
extern int cmd_push(int argc, const char **argv, const char *prefix);
@@ -143,7 +141,6 @@ extern int cmd_update_ref(int argc, const char **argv, const char *prefix);
extern int cmd_update_server_info(int argc, const char **argv, const char *prefix);
extern int cmd_upload_archive(int argc, const char **argv, const char *prefix);
extern int cmd_upload_archive_writer(int argc, const char **argv, const char *prefix);
-extern int cmd_upload_tar(int argc, const char **argv, const char *prefix);
extern int cmd_var(int argc, const char **argv, const char *prefix);
extern int cmd_verify_tag(int argc, const char **argv, const char *prefix);
extern int cmd_version(int argc, const char **argv, const char *prefix);
diff --git a/builtin/apply.c b/builtin/apply.c
index b4428ea..d453c83 100644
--- a/builtin/apply.c
+++ b/builtin/apply.c
@@ -16,6 +16,9 @@
#include "dir.h"
#include "diff.h"
#include "parse-options.h"
+#include "xdiff-interface.h"
+#include "ll-merge.h"
+#include "rerere.h"
/*
* --check turns on checking that the working tree matches the
@@ -46,6 +49,7 @@ static int apply_with_reject;
static int apply_verbosely;
static int allow_overlap;
static int no_add;
+static int threeway;
static const char *fake_ancestor;
static int line_termination = '\n';
static unsigned int p_context = UINT_MAX;
@@ -193,12 +197,17 @@ struct patch {
unsigned int is_copy:1;
unsigned int is_rename:1;
unsigned int recount:1;
+ unsigned int conflicted_threeway:1;
+ unsigned int direct_to_threeway:1;
struct fragment *fragments;
char *result;
size_t resultsize;
char old_sha1_prefix[41];
char new_sha1_prefix[41];
struct patch *next;
+
+ /* three-way fallback result */
+ unsigned char threeway_stage[3][20];
};
static void free_fragment_list(struct fragment *list)
@@ -371,8 +380,8 @@ static void prepare_image(struct image *image, char *buf, size_t len,
static void clear_image(struct image *image)
{
free(image->buf);
- image->buf = NULL;
- image->len = 0;
+ free(image->line_allocated);
+ memset(image, 0, sizeof(*image));
}
/* fmt must contain _one_ %s and no other substitution */
@@ -2937,20 +2946,17 @@ static int apply_fragments(struct image *img, struct patch *patch)
return 0;
}
-static int read_file_or_gitlink(struct cache_entry *ce, struct strbuf *buf)
+static int read_blob_object(struct strbuf *buf, const unsigned char *sha1, unsigned mode)
{
- if (!ce)
- return 0;
-
- if (S_ISGITLINK(ce->ce_mode)) {
+ if (S_ISGITLINK(mode)) {
strbuf_grow(buf, 100);
- strbuf_addf(buf, "Subproject commit %s\n", sha1_to_hex(ce->sha1));
+ strbuf_addf(buf, "Subproject commit %s\n", sha1_to_hex(sha1));
} else {
enum object_type type;
unsigned long sz;
char *result;
- result = read_sha1_file(ce->sha1, &type, &sz);
+ result = read_sha1_file(sha1, &type, &sz);
if (!result)
return -1;
/* XXX read_sha1_file NUL-terminates */
@@ -2959,6 +2965,13 @@ static int read_file_or_gitlink(struct cache_entry *ce, struct strbuf *buf)
return 0;
}
+static int read_file_or_gitlink(struct cache_entry *ce, struct strbuf *buf)
+{
+ if (!ce)
+ return 0;
+ return read_blob_object(buf, ce->sha1, ce->ce_mode);
+}
+
static struct patch *in_fn_table(const char *name)
{
struct string_list_item *item;
@@ -2977,9 +2990,15 @@ static struct patch *in_fn_table(const char *name)
* item->util in the filename table records the status of the path.
* Usually it points at a patch (whose result records the contents
* of it after applying it), but it could be PATH_WAS_DELETED for a
- * path that a previously applied patch has already removed.
+ * path that a previously applied patch has already removed, or
+ * PATH_TO_BE_DELETED for a path that a later patch would remove.
+ *
+ * The latter is needed to deal with a case where two paths A and B
+ * are swapped by first renaming A to B and then renaming B to A;
+ * moving A to B should not be prevented due to presense of B as we
+ * will remove it in a later patch.
*/
- #define PATH_TO_BE_DELETED ((struct patch *) -2)
+#define PATH_TO_BE_DELETED ((struct patch *) -2)
#define PATH_WAS_DELETED ((struct patch *) -1)
static int to_be_deleted(struct patch *patch)
@@ -3031,127 +3050,324 @@ static void prepare_fn_table(struct patch *patch)
}
}
-static int apply_data(struct patch *patch, struct stat *st, struct cache_entry *ce)
+static int checkout_target(struct cache_entry *ce, struct stat *st)
+{
+ struct checkout costate;
+
+ memset(&costate, 0, sizeof(costate));
+ costate.base_dir = "";
+ costate.refresh_cache = 1;
+ if (checkout_entry(ce, &costate, NULL) || lstat(ce->name, st))
+ return error(_("cannot checkout %s"), ce->name);
+ return 0;
+}
+
+static struct patch *previous_patch(struct patch *patch, int *gone)
+{
+ struct patch *previous;
+
+ *gone = 0;
+ if (patch->is_copy || patch->is_rename)
+ return NULL; /* "git" patches do not depend on the order */
+
+ previous = in_fn_table(patch->old_name);
+ if (!previous)
+ return NULL;
+
+ if (to_be_deleted(previous))
+ return NULL; /* the deletion hasn't happened yet */
+
+ if (was_deleted(previous))
+ *gone = 1;
+
+ return previous;
+}
+
+static int verify_index_match(struct cache_entry *ce, struct stat *st)
+{
+ if (S_ISGITLINK(ce->ce_mode)) {
+ if (!S_ISDIR(st->st_mode))
+ return -1;
+ return 0;
+ }
+ return ce_match_stat(ce, st, CE_MATCH_IGNORE_VALID|CE_MATCH_IGNORE_SKIP_WORKTREE);
+}
+
+#define SUBMODULE_PATCH_WITHOUT_INDEX 1
+
+static int load_patch_target(struct strbuf *buf,
+ struct cache_entry *ce,
+ struct stat *st,
+ const char *name,
+ unsigned expected_mode)
+{
+ if (cached) {
+ if (read_file_or_gitlink(ce, buf))
+ return error(_("read of %s failed"), name);
+ } else if (name) {
+ if (S_ISGITLINK(expected_mode)) {
+ if (ce)
+ return read_file_or_gitlink(ce, buf);
+ else
+ return SUBMODULE_PATCH_WITHOUT_INDEX;
+ } else {
+ if (read_old_data(st, name, buf))
+ return error(_("read of %s failed"), name);
+ }
+ }
+ return 0;
+}
+
+/*
+ * We are about to apply "patch"; populate the "image" with the
+ * current version we have, from the working tree or from the index,
+ * depending on the situation e.g. --cached/--index. If we are
+ * applying a non-git patch that incrementally updates the tree,
+ * we read from the result of a previous diff.
+ */
+static int load_preimage(struct image *image,
+ struct patch *patch, struct stat *st, struct cache_entry *ce)
{
struct strbuf buf = STRBUF_INIT;
- struct image image;
size_t len;
char *img;
- struct patch *tpatch;
+ struct patch *previous;
+ int status;
- if (!(patch->is_copy || patch->is_rename) &&
- (tpatch = in_fn_table(patch->old_name)) != NULL && !to_be_deleted(tpatch)) {
- if (was_deleted(tpatch)) {
- return error(_("patch %s has been renamed/deleted"),
- patch->old_name);
- }
+ previous = previous_patch(patch, &status);
+ if (status)
+ return error(_("path %s has been renamed/deleted"),
+ patch->old_name);
+ if (previous) {
/* We have a patched copy in memory; use that. */
- strbuf_add(&buf, tpatch->result, tpatch->resultsize);
- } else if (cached) {
- if (read_file_or_gitlink(ce, &buf))
+ strbuf_add(&buf, previous->result, previous->resultsize);
+ } else {
+ status = load_patch_target(&buf, ce, st,
+ patch->old_name, patch->old_mode);
+ if (status < 0)
+ return status;
+ else if (status == SUBMODULE_PATCH_WITHOUT_INDEX) {
+ /*
+ * There is no way to apply subproject
+ * patch without looking at the index.
+ * NEEDSWORK: shouldn't this be flagged
+ * as an error???
+ */
+ free_fragment_list(patch->fragments);
+ patch->fragments = NULL;
+ } else if (status) {
return error(_("read of %s failed"), patch->old_name);
- } else if (patch->old_name) {
- if (S_ISGITLINK(patch->old_mode)) {
- if (ce) {
- read_file_or_gitlink(ce, &buf);
- } else {
- /*
- * There is no way to apply subproject
- * patch without looking at the index.
- * NEEDSWORK: shouldn't this be flagged
- * as an error???
- */
- free_fragment_list(patch->fragments);
- patch->fragments = NULL;
- }
- } else {
- if (read_old_data(st, patch->old_name, &buf))
- return error(_("read of %s failed"), patch->old_name);
}
}
img = strbuf_detach(&buf, &len);
- prepare_image(&image, img, len, !patch->is_binary);
+ prepare_image(image, img, len, !patch->is_binary);
+ return 0;
+}
- if (apply_fragments(&image, patch) < 0)
- return -1; /* note with --reject this succeeds. */
- patch->result = image.buf;
- patch->resultsize = image.len;
- add_to_fn_table(patch);
- free(image.line_allocated);
+static int three_way_merge(struct image *image,
+ char *path,
+ const unsigned char *base,
+ const unsigned char *ours,
+ const unsigned char *theirs)
+{
+ mmfile_t base_file, our_file, their_file;
+ mmbuffer_t result = { NULL };
+ int status;
- if (0 < patch->is_delete && patch->resultsize)
- return error(_("removal patch leaves file contents"));
+ read_mmblob(&base_file, base);
+ read_mmblob(&our_file, ours);
+ read_mmblob(&their_file, theirs);
+ status = ll_merge(&result, path,
+ &base_file, "base",
+ &our_file, "ours",
+ &their_file, "theirs", NULL);
+ free(base_file.ptr);
+ free(our_file.ptr);
+ free(their_file.ptr);
+ if (status < 0 || !result.ptr) {
+ free(result.ptr);
+ return -1;
+ }
+ clear_image(image);
+ image->buf = result.ptr;
+ image->len = result.size;
+ return status;
+}
+
+/*
+ * When directly falling back to add/add three-way merge, we read from
+ * the current contents of the new_name. In no cases other than that
+ * this function will be called.
+ */
+static int load_current(struct image *image, struct patch *patch)
+{
+ struct strbuf buf = STRBUF_INIT;
+ int status, pos;
+ size_t len;
+ char *img;
+ struct stat st;
+ struct cache_entry *ce;
+ char *name = patch->new_name;
+ unsigned mode = patch->new_mode;
+
+ if (!patch->is_new)
+ die("BUG: patch to %s is not a creation", patch->old_name);
+
+ pos = cache_name_pos(name, strlen(name));
+ if (pos < 0)
+ return error(_("%s: does not exist in index"), name);
+ ce = active_cache[pos];
+ if (lstat(name, &st)) {
+ if (errno != ENOENT)
+ return error(_("%s: %s"), name, strerror(errno));
+ if (checkout_target(ce, &st))
+ return -1;
+ }
+ if (verify_index_match(ce, &st))
+ return error(_("%s: does not match index"), name);
+
+ status = load_patch_target(&buf, ce, &st, name, mode);
+ if (status < 0)
+ return status;
+ else if (status)
+ return -1;
+ img = strbuf_detach(&buf, &len);
+ prepare_image(image, img, len, !patch->is_binary);
return 0;
}
-static int check_to_create_blob(const char *new_name, int ok_if_exists)
+static int try_threeway(struct image *image, struct patch *patch,
+ struct stat *st, struct cache_entry *ce)
{
- struct stat nst;
- if (!lstat(new_name, &nst)) {
- if (S_ISDIR(nst.st_mode) || ok_if_exists)
- return 0;
- /*
- * A leading component of new_name might be a symlink
- * that is going to be removed with this patch, but
- * still pointing at somewhere that has the path.
- * In such a case, path "new_name" does not exist as
- * far as git is concerned.
- */
- if (has_symlink_leading_path(new_name, strlen(new_name)))
- return 0;
+ unsigned char pre_sha1[20], post_sha1[20], our_sha1[20];
+ struct strbuf buf = STRBUF_INIT;
+ size_t len;
+ int status;
+ char *img;
+ struct image tmp_image;
+
+ /* No point falling back to 3-way merge in these cases */
+ if (patch->is_delete ||
+ S_ISGITLINK(patch->old_mode) || S_ISGITLINK(patch->new_mode))
+ return -1;
- return error(_("%s: already exists in working directory"), new_name);
+ /* Preimage the patch was prepared for */
+ if (patch->is_new)
+ write_sha1_file("", 0, blob_type, pre_sha1);
+ else if (get_sha1(patch->old_sha1_prefix, pre_sha1) ||
+ read_blob_object(&buf, pre_sha1, patch->old_mode))
+ return error("repository lacks the necessary blob to fall back on 3-way merge.");
+
+ fprintf(stderr, "Falling back to three-way merge...\n");
+
+ img = strbuf_detach(&buf, &len);
+ prepare_image(&tmp_image, img, len, 1);
+ /* Apply the patch to get the post image */
+ if (apply_fragments(&tmp_image, patch) < 0) {
+ clear_image(&tmp_image);
+ return -1;
+ }
+ /* post_sha1[] is theirs */
+ write_sha1_file(tmp_image.buf, tmp_image.len, blob_type, post_sha1);
+ clear_image(&tmp_image);
+
+ /* our_sha1[] is ours */
+ if (patch->is_new) {
+ if (load_current(&tmp_image, patch))
+ return error("cannot read the current contents of '%s'",
+ patch->new_name);
+ } else {
+ if (load_preimage(&tmp_image, patch, st, ce))
+ return error("cannot read the current contents of '%s'",
+ patch->old_name);
+ }
+ write_sha1_file(tmp_image.buf, tmp_image.len, blob_type, our_sha1);
+ clear_image(&tmp_image);
+
+ /* in-core three-way merge between post and our using pre as base */
+ status = three_way_merge(image, patch->new_name,
+ pre_sha1, our_sha1, post_sha1);
+ if (status < 0) {
+ fprintf(stderr, "Failed to fall back on three-way merge...\n");
+ return status;
+ }
+
+ if (status) {
+ patch->conflicted_threeway = 1;
+ if (patch->is_new)
+ hashclr(patch->threeway_stage[0]);
+ else
+ hashcpy(patch->threeway_stage[0], pre_sha1);
+ hashcpy(patch->threeway_stage[1], our_sha1);
+ hashcpy(patch->threeway_stage[2], post_sha1);
+ fprintf(stderr, "Applied patch to '%s' with conflicts.\n", patch->new_name);
+ } else {
+ fprintf(stderr, "Applied patch to '%s' cleanly.\n", patch->new_name);
}
- else if ((errno != ENOENT) && (errno != ENOTDIR))
- return error("%s: %s", new_name, strerror(errno));
return 0;
}
-static int verify_index_match(struct cache_entry *ce, struct stat *st)
+static int apply_data(struct patch *patch, struct stat *st, struct cache_entry *ce)
{
- if (S_ISGITLINK(ce->ce_mode)) {
- if (!S_ISDIR(st->st_mode))
+ struct image image;
+
+ if (load_preimage(&image, patch, st, ce) < 0)
+ return -1;
+
+ if (patch->direct_to_threeway ||
+ apply_fragments(&image, patch) < 0) {
+ /* Note: with --reject, apply_fragments() returns 0 */
+ if (!threeway || try_threeway(&image, patch, st, ce) < 0)
return -1;
- return 0;
}
- return ce_match_stat(ce, st, CE_MATCH_IGNORE_VALID|CE_MATCH_IGNORE_SKIP_WORKTREE);
+ patch->result = image.buf;
+ patch->resultsize = image.len;
+ add_to_fn_table(patch);
+ free(image.line_allocated);
+
+ if (0 < patch->is_delete && patch->resultsize)
+ return error(_("removal patch leaves file contents"));
+
+ return 0;
}
+/*
+ * If "patch" that we are looking at modifies or deletes what we have,
+ * we would want it not to lose any local modification we have, either
+ * in the working tree or in the index.
+ *
+ * This also decides if a non-git patch is a creation patch or a
+ * modification to an existing empty file. We do not check the state
+ * of the current tree for a creation patch in this function; the caller
+ * check_patch() separately makes sure (and errors out otherwise) that
+ * the path the patch creates does not exist in the current tree.
+ */
static int check_preimage(struct patch *patch, struct cache_entry **ce, struct stat *st)
{
const char *old_name = patch->old_name;
- struct patch *tpatch = NULL;
- int stat_ret = 0;
+ struct patch *previous = NULL;
+ int stat_ret = 0, status;
unsigned st_mode = 0;
- /*
- * Make sure that we do not have local modifications from the
- * index when we are looking at the index. Also make sure
- * we have the preimage file to be patched in the work tree,
- * unless --cached, which tells git to apply only in the index.
- */
if (!old_name)
return 0;
assert(patch->is_new <= 0);
+ previous = previous_patch(patch, &status);
- if (!(patch->is_copy || patch->is_rename) &&
- (tpatch = in_fn_table(old_name)) != NULL && !to_be_deleted(tpatch)) {
- if (was_deleted(tpatch))
- return error(_("%s: has been deleted/renamed"), old_name);
- st_mode = tpatch->new_mode;
+ if (status)
+ return error(_("path %s has been renamed/deleted"), old_name);
+ if (previous) {
+ st_mode = previous->new_mode;
} else if (!cached) {
stat_ret = lstat(old_name, st);
if (stat_ret && errno != ENOENT)
return error(_("%s: %s"), old_name, strerror(errno));
}
- if (to_be_deleted(tpatch))
- tpatch = NULL;
-
- if (check_index && !tpatch) {
+ if (check_index && !previous) {
int pos = cache_name_pos(old_name, strlen(old_name));
if (pos < 0) {
if (patch->is_new < 0)
@@ -3160,13 +3376,7 @@ static int check_preimage(struct patch *patch, struct cache_entry **ce, struct s
}
*ce = active_cache[pos];
if (stat_ret < 0) {
- struct checkout costate;
- /* checkout */
- memset(&costate, 0, sizeof(costate));
- costate.base_dir = "";
- costate.refresh_cache = 1;
- if (checkout_entry(*ce, &costate, NULL) ||
- lstat(old_name, st))
+ if (checkout_target(*ce, st))
return -1;
}
if (!cached && verify_index_match(*ce, st))
@@ -3179,7 +3389,7 @@ static int check_preimage(struct patch *patch, struct cache_entry **ce, struct s
return error(_("%s: %s"), old_name, strerror(errno));
}
- if (!cached && !tpatch)
+ if (!cached && !previous)
st_mode = ce_mode_from_stat(*ce, st->st_mode);
if (patch->is_new < 0)
@@ -3203,6 +3413,41 @@ static int check_preimage(struct patch *patch, struct cache_entry **ce, struct s
return 0;
}
+
+#define EXISTS_IN_INDEX 1
+#define EXISTS_IN_WORKTREE 2
+
+static int check_to_create(const char *new_name, int ok_if_exists)
+{
+ struct stat nst;
+
+ if (check_index &&
+ cache_name_pos(new_name, strlen(new_name)) >= 0 &&
+ !ok_if_exists)
+ return EXISTS_IN_INDEX;
+ if (cached)
+ return 0;
+
+ if (!lstat(new_name, &nst)) {
+ if (S_ISDIR(nst.st_mode) || ok_if_exists)
+ return 0;
+ /*
+ * A leading component of new_name might be a symlink
+ * that is going to be removed with this patch, but
+ * still pointing at somewhere that has the path.
+ * In such a case, path "new_name" does not exist as
+ * far as git is concerned.
+ */
+ if (has_symlink_leading_path(new_name, strlen(new_name)))
+ return 0;
+
+ return EXISTS_IN_WORKTREE;
+ } else if ((errno != ENOENT) && (errno != ENOTDIR)) {
+ return error("%s: %s", new_name, strerror(errno));
+ }
+ return 0;
+}
+
/*
* Check and apply the patch in-core; leave the result in patch->result
* for the caller to write it out to the final destination.
@@ -3225,31 +3470,45 @@ static int check_patch(struct patch *patch)
return status;
old_name = patch->old_name;
+ /*
+ * A type-change diff is always split into a patch to delete
+ * old, immediately followed by a patch to create new (see
+ * diff.c::run_diff()); in such a case it is Ok that the entry
+ * to be deleted by the previous patch is still in the working
+ * tree and in the index.
+ *
+ * A patch to swap-rename between A and B would first rename A
+ * to B and then rename B to A. While applying the first one,
+ * the presense of B should not stop A from getting renamed to
+ * B; ask to_be_deleted() about the later rename. Removal of
+ * B and rename from A to B is handled the same way by asking
+ * was_deleted().
+ */
if ((tpatch = in_fn_table(new_name)) &&
- (was_deleted(tpatch) || to_be_deleted(tpatch)))
- /*
- * A type-change diff is always split into a patch to
- * delete old, immediately followed by a patch to
- * create new (see diff.c::run_diff()); in such a case
- * it is Ok that the entry to be deleted by the
- * previous patch is still in the working tree and in
- * the index.
- */
+ (was_deleted(tpatch) || to_be_deleted(tpatch)))
ok_if_exists = 1;
else
ok_if_exists = 0;
if (new_name &&
((0 < patch->is_new) | (0 < patch->is_rename) | patch->is_copy)) {
- if (check_index &&
- cache_name_pos(new_name, strlen(new_name)) >= 0 &&
- !ok_if_exists)
+ int err = check_to_create(new_name, ok_if_exists);
+
+ if (err && threeway) {
+ patch->direct_to_threeway = 1;
+ } else switch (err) {
+ case 0:
+ break; /* happy */
+ case EXISTS_IN_INDEX:
return error(_("%s: already exists in index"), new_name);
- if (!cached) {
- int err = check_to_create_blob(new_name, ok_if_exists);
- if (err)
- return err;
+ break;
+ case EXISTS_IN_WORKTREE:
+ return error(_("%s: already exists in working directory"),
+ new_name);
+ default:
+ return err;
}
+
if (!patch->new_mode) {
if (0 < patch->is_new)
patch->new_mode = S_IFREG | 0644;
@@ -3330,7 +3589,7 @@ static void build_fake_ancestor(struct patch *list, const char *filename)
name = patch->old_name ? patch->old_name : patch->new_name;
if (0 < patch->is_new)
continue;
- else if (get_sha1(patch->old_sha1_prefix, sha1))
+ else if (get_sha1_blob(patch->old_sha1_prefix, sha1))
/* git diff has no index line for mode/type changes */
if (!patch->lines_added && !patch->lines_deleted) {
if (get_current_sha1(patch->old_name, sha1))
@@ -3510,7 +3769,8 @@ static void add_index_file(const char *path, unsigned mode, void *buf, unsigned
ce = xcalloc(1, ce_size);
memcpy(ce->name, path, namelen);
ce->ce_mode = create_ce_mode(mode);
- ce->ce_flags = namelen;
+ ce->ce_flags = create_ce_flags(0);
+ ce->ce_namelen = namelen;
if (S_ISGITLINK(mode)) {
const char *s = buf;
@@ -3612,6 +3872,33 @@ static void create_one_file(char *path, unsigned mode, const char *buf, unsigned
die_errno(_("unable to write file '%s' mode %o"), path, mode);
}
+static void add_conflicted_stages_file(struct patch *patch)
+{
+ int stage, namelen;
+ unsigned ce_size, mode;
+ struct cache_entry *ce;
+
+ if (!update_index)
+ return;
+ namelen = strlen(patch->new_name);
+ ce_size = cache_entry_size(namelen);
+ mode = patch->new_mode ? patch->new_mode : (S_IFREG | 0644);
+
+ remove_file_from_cache(patch->new_name);
+ for (stage = 1; stage < 4; stage++) {
+ if (is_null_sha1(patch->threeway_stage[stage - 1]))
+ continue;
+ ce = xcalloc(1, ce_size);
+ memcpy(ce->name, patch->new_name, namelen);
+ ce->ce_mode = create_ce_mode(mode);
+ ce->ce_flags = create_ce_flags(stage);
+ ce->ce_namelen = namelen;
+ hashcpy(ce->sha1, patch->threeway_stage[stage - 1]);
+ if (add_cache_entry(ce, ADD_CACHE_OK_TO_ADD) < 0)
+ die(_("unable to add cache entry for %s"), patch->new_name);
+ }
+}
+
static void create_file(struct patch *patch)
{
char *path = patch->new_name;
@@ -3622,7 +3909,11 @@ static void create_file(struct patch *patch)
if (!mode)
mode = S_IFREG | 0644;
create_one_file(path, mode, buf, size);
- add_index_file(path, mode, buf, size);
+
+ if (patch->conflicted_threeway)
+ add_conflicted_stages_file(patch);
+ else
+ add_index_file(path, mode, buf, size);
}
/* phase zero is to remove, phase one is to create */
@@ -3724,6 +4015,7 @@ static int write_out_results(struct patch *list)
int phase;
int errs = 0;
struct patch *l;
+ struct string_list cpath = STRING_LIST_INIT_DUP;
for (phase = 0; phase < 2; phase++) {
l = list;
@@ -3732,12 +4024,30 @@ static int write_out_results(struct patch *list)
errs = 1;
else {
write_out_one_result(l, phase);
- if (phase == 1 && write_out_one_reject(l))
- errs = 1;
+ if (phase == 1) {
+ if (write_out_one_reject(l))
+ errs = 1;
+ if (l->conflicted_threeway) {
+ string_list_append(&cpath, l->new_name);
+ errs = 1;
+ }
+ }
}
l = l->next;
}
}
+
+ if (cpath.nr) {
+ struct string_list_item *item;
+
+ sort_string_list(&cpath);
+ for_each_string_list_item(item, &cpath)
+ fprintf(stderr, "U %s\n", item->string);
+ string_list_clear(&cpath, 0);
+
+ rerere(0);
+ }
+
return errs;
}
@@ -3860,8 +4170,12 @@ static int apply_patch(int fd, const char *filename, int options)
!apply_with_reject)
exit(1);
- if (apply && write_out_results(list))
- exit(1);
+ if (apply && write_out_results(list)) {
+ if (apply_with_reject)
+ exit(1);
+ /* with --3way, we still need to write the index out */
+ return 1;
+ }
if (fake_ancestor)
build_fake_ancestor(list, fake_ancestor);
@@ -3994,6 +4308,8 @@ int cmd_apply(int argc, const char **argv, const char *prefix_)
N_("apply a patch without touching the working tree")),
OPT_BOOLEAN(0, "apply", &force_apply,
N_("also apply the patch (use with --stat/--summary/--check)")),
+ OPT_BOOL('3', "3way", &threeway,
+ N_( "attempt three-way merge if a patch does not apply")),
OPT_FILENAME(0, "build-fake-ancestor", &fake_ancestor,
N_("build a temporary index based on embedded index information")),
{ OPTION_CALLBACK, 'z', NULL, NULL, NULL,
@@ -4042,6 +4358,15 @@ int cmd_apply(int argc, const char **argv, const char *prefix_)
argc = parse_options(argc, argv, prefix, builtin_apply_options,
apply_usage, 0);
+ if (apply_with_reject && threeway)
+ die("--reject and --3way cannot be used together.");
+ if (cached && threeway)
+ die("--cached and --3way cannot be used together.");
+ if (threeway) {
+ if (is_not_gitdir)
+ die(_("--3way outside a repository"));
+ check_index = 1;
+ }
if (apply_with_reject)
apply = apply_verbosely = 1;
if (!force_apply && (diffstat || numstat || summary || check || fake_ancestor))
diff --git a/builtin/blame.c b/builtin/blame.c
index 960c58d..0d50273 100644
--- a/builtin/blame.c
+++ b/builtin/blame.c
@@ -2171,7 +2171,8 @@ static struct commit *fake_working_tree_commit(struct diff_options *opt,
ce = xcalloc(1, size);
hashcpy(ce->sha1, origin->blob_sha1);
memcpy(ce->name, path, len);
- ce->ce_flags = create_ce_flags(len, 0);
+ ce->ce_flags = create_ce_flags(0);
+ ce->ce_namelen = len;
ce->ce_mode = create_ce_mode(mode);
add_cache_entry(ce, ADD_CACHE_OK_TO_ADD|ADD_CACHE_OK_TO_REPLACE);
diff --git a/builtin/cat-file.c b/builtin/cat-file.c
index 36a9104..af74e77 100644
--- a/builtin/cat-file.c
+++ b/builtin/cat-file.c
@@ -91,7 +91,7 @@ static int cat_one_file(int opt, const char *exp_type, const char *obj_name)
unsigned long size;
struct object_context obj_context;
- if (get_sha1_with_context(obj_name, sha1, &obj_context))
+ if (get_sha1_with_context(obj_name, 0, sha1, &obj_context))
die("Not a valid object name %s", obj_name);
buf = NULL;
diff --git a/builtin/checkout.c b/builtin/checkout.c
index e060efb..d812219 100644
--- a/builtin/checkout.c
+++ b/builtin/checkout.c
@@ -73,7 +73,8 @@ static int update_some(const unsigned char *sha1, const char *base, int baselen,
hashcpy(ce->sha1, sha1);
memcpy(ce->name, base, baselen);
memcpy(ce->name + baselen, pathname, len - baselen);
- ce->ce_flags = create_ce_flags(len, 0) | CE_UPDATE;
+ ce->ce_flags = create_ce_flags(0) | CE_UPDATE;
+ ce->ce_namelen = len;
ce->ce_mode = create_ce_mode(mode);
add_cache_entry(ce, ADD_CACHE_OK_TO_ADD | ADD_CACHE_OK_TO_REPLACE);
return 0;
diff --git a/builtin/clone.c b/builtin/clone.c
index 920ef7f..e314b0b 100644
--- a/builtin/clone.c
+++ b/builtin/clone.c
@@ -38,7 +38,7 @@ static const char * const builtin_clone_usage[] = {
};
static int option_no_checkout, option_bare, option_mirror, option_single_branch = -1;
-static int option_local, option_no_hardlinks, option_shared, option_recursive;
+static int option_local = -1, option_no_hardlinks, option_shared, option_recursive;
static char *option_template, *option_depth;
static char *option_origin = NULL;
static char *option_branch = NULL;
@@ -70,8 +70,8 @@ static struct option builtin_clone_options[] = {
PARSE_OPT_NOARG | PARSE_OPT_HIDDEN },
OPT_BOOLEAN(0, "mirror", &option_mirror,
"create a mirror repository (implies bare)"),
- OPT_BOOLEAN('l', "local", &option_local,
- "to clone from a local repository"),
+ OPT_BOOL('l', "local", &option_local,
+ "to clone from a local repository"),
OPT_BOOLEAN(0, "no-hardlinks", &option_no_hardlinks,
"don't use local hardlinks, always copy"),
OPT_BOOLEAN('s', "shared", &option_shared,
@@ -342,7 +342,7 @@ static void copy_or_link_directory(struct strbuf *src, struct strbuf *dest,
if (!option_no_hardlinks) {
if (!link(src->buf, dest->buf))
continue;
- if (option_local)
+ if (option_local > 0)
die_errno(_("failed to create link '%s'"), dest->buf);
option_no_hardlinks = 1;
}
@@ -671,7 +671,7 @@ int cmd_clone(int argc, const char **argv, const char *prefix)
die(_("repository '%s' does not exist"), repo_name);
else
repo = repo_name;
- is_local = path && !is_bundle;
+ is_local = option_local != 0 && path && !is_bundle;
if (is_local && option_depth)
warning(_("--depth is ignored in local clones; use file:// instead."));
diff --git a/builtin/commit-tree.c b/builtin/commit-tree.c
index a0df12c..eac901a 100644
--- a/builtin/commit-tree.c
+++ b/builtin/commit-tree.c
@@ -54,7 +54,7 @@ int cmd_commit_tree(int argc, const char **argv, const char *prefix)
unsigned char sha1[20];
if (argc <= ++i)
usage(commit_tree_usage);
- if (get_sha1(argv[i], sha1))
+ if (get_sha1_commit(argv[i], sha1))
die("Not a valid object name %s", argv[i]);
assert_sha1_type(sha1, OBJ_COMMIT);
new_parent(lookup_commit(sha1), &parents);
@@ -101,7 +101,7 @@ int cmd_commit_tree(int argc, const char **argv, const char *prefix)
continue;
}
- if (get_sha1(arg, tree_sha1))
+ if (get_sha1_tree(arg, tree_sha1))
die("Not a valid object name %s", arg);
if (got_tree)
die("Cannot give more than one trees");
diff --git a/builtin/commit.c b/builtin/commit.c
index 95eeab1..20cef95 100644
--- a/builtin/commit.c
+++ b/builtin/commit.c
@@ -725,7 +725,7 @@ static int prepare_to_commit(const char *index_file, const char *prefix,
strbuf_release(&sb);
/* This checks if committer ident is explicitly given */
- strbuf_addstr(&committer_ident, git_committer_info(0));
+ strbuf_addstr(&committer_ident, git_committer_info(IDENT_STRICT));
if (use_editor && include_status) {
char *ai_tmp, *ci_tmp;
if (whence != FROM_COMMIT)
diff --git a/builtin/config.c b/builtin/config.c
index 33c8820..8cd08da 100644
--- a/builtin/config.c
+++ b/builtin/config.c
@@ -161,7 +161,7 @@ static int show_config(const char *key_, const char *value_, void *cb)
static int get_value(const char *key_, const char *regex_)
{
int ret = -1;
- char *global = NULL, *repo_config = NULL;
+ char *global = NULL, *xdg = NULL, *repo_config = NULL;
const char *system_wide = NULL, *local;
struct config_include_data inc = CONFIG_INCLUDE_INIT;
config_fn_t fn;
@@ -169,12 +169,10 @@ static int get_value(const char *key_, const char *regex_)
local = given_config_file;
if (!local) {
- const char *home = getenv("HOME");
local = repo_config = git_pathdup("config");
- if (home)
- global = xstrdup(mkpath("%s/.gitconfig", home));
if (git_config_system())
system_wide = git_etc_gitconfig();
+ home_config_paths(&global, &xdg, "config");
}
if (use_key_regexp) {
@@ -229,6 +227,8 @@ static int get_value(const char *key_, const char *regex_)
if (do_all && system_wide)
git_config_from_file(fn, system_wide, data);
+ if (do_all && xdg)
+ git_config_from_file(fn, xdg, data);
if (do_all && global)
git_config_from_file(fn, global, data);
if (do_all)
@@ -238,6 +238,8 @@ static int get_value(const char *key_, const char *regex_)
git_config_from_file(fn, local, data);
if (!do_all && !seen && global)
git_config_from_file(fn, global, data);
+ if (!do_all && !seen && xdg)
+ git_config_from_file(fn, xdg, data);
if (!do_all && !seen && system_wide)
git_config_from_file(fn, system_wide, data);
@@ -255,6 +257,7 @@ static int get_value(const char *key_, const char *regex_)
free_strings:
free(repo_config);
free(global);
+ free(xdg);
return ret;
}
@@ -379,13 +382,25 @@ int cmd_config(int argc, const char **argv, const char *prefix)
}
if (use_global_config) {
- char *home = getenv("HOME");
- if (home) {
- char *user_config = xstrdup(mkpath("%s/.gitconfig", home));
- given_config_file = user_config;
- } else {
+ char *user_config = NULL;
+ char *xdg_config = NULL;
+
+ home_config_paths(&user_config, &xdg_config, "config");
+
+ if (!user_config)
+ /*
+ * It is unknown if HOME/.gitconfig exists, so
+ * we do not know if we should write to XDG
+ * location; error out even if XDG_CONFIG_HOME
+ * is set and points at a sane location.
+ */
die("$HOME not set");
- }
+
+ if (access(user_config, R_OK) &&
+ xdg_config && !access(xdg_config, R_OK))
+ given_config_file = xdg_config;
+ else
+ given_config_file = user_config;
}
else if (use_system_config)
given_config_file = git_etc_gitconfig();
diff --git a/builtin/credential.c b/builtin/credential.c
new file mode 100644
index 0000000..0412fa0
--- /dev/null
+++ b/builtin/credential.c
@@ -0,0 +1,31 @@
+#include "git-compat-util.h"
+#include "credential.h"
+#include "builtin.h"
+
+static const char usage_msg[] =
+ "git credential [fill|approve|reject]";
+
+int cmd_credential(int argc, const char **argv, const char *prefix)
+{
+ const char *op;
+ struct credential c = CREDENTIAL_INIT;
+
+ op = argv[1];
+ if (!op)
+ usage(usage_msg);
+
+ if (credential_read(&c, stdin) < 0)
+ die("unable to read credential from stdin");
+
+ if (!strcmp(op, "fill")) {
+ credential_fill(&c);
+ credential_write(&c, stdout);
+ } else if (!strcmp(op, "approve")) {
+ credential_approve(&c);
+ } else if (!strcmp(op, "reject")) {
+ credential_reject(&c);
+ } else {
+ usage(usage_msg);
+ }
+ return 0;
+}
diff --git a/builtin/help.c b/builtin/help.c
index 43d3c84..efea4f5 100644
--- a/builtin/help.c
+++ b/builtin/help.c
@@ -12,6 +12,10 @@
#include "column.h"
#include "help.h"
+#ifndef DEFAULT_HELP_FORMAT
+#define DEFAULT_HELP_FORMAT "man"
+#endif
+
static struct man_viewer_list {
struct man_viewer_list *next;
char name[FLEX_ARRAY];
@@ -30,6 +34,8 @@ enum help_format {
HELP_FORMAT_WEB
};
+static const char *html_path;
+
static int show_all = 0;
static unsigned int colopts;
static enum help_format help_format = HELP_FORMAT_NONE;
@@ -261,6 +267,12 @@ static int git_help_config(const char *var, const char *value, void *cb)
help_format = parse_help_format(value);
return 0;
}
+ if (!strcmp(var, "help.htmlpath")) {
+ if (!value)
+ return config_error_nonbool(var);
+ html_path = xstrdup(value);
+ return 0;
+ }
if (!strcmp(var, "man.viewer")) {
if (!value)
return config_error_nonbool(var);
@@ -383,12 +395,15 @@ static void show_info_page(const char *git_cmd)
static void get_html_page_path(struct strbuf *page_path, const char *page)
{
struct stat st;
- const char *html_path = system_path(GIT_HTML_PATH);
+ if (!html_path)
+ html_path = system_path(GIT_HTML_PATH);
/* Check that we have a git documentation directory. */
- if (stat(mkpath("%s/git.html", html_path), &st)
- || !S_ISREG(st.st_mode))
- die(_("'%s': not a documentation directory."), html_path);
+ if (!strstr(html_path, "://")) {
+ if (stat(mkpath("%s/git.html", html_path), &st)
+ || !S_ISREG(st.st_mode))
+ die("'%s': not a documentation directory.", html_path);
+ }
strbuf_init(page_path, 0);
strbuf_addf(page_path, "%s/%s.html", html_path, page);
@@ -447,6 +462,8 @@ int cmd_help(int argc, const char **argv, const char *prefix)
if (parsed_help_format != HELP_FORMAT_NONE)
help_format = parsed_help_format;
+ if (help_format == HELP_FORMAT_NONE)
+ help_format = parse_help_format(DEFAULT_HELP_FORMAT);
alias = alias_lookup(argv[0]);
if (alias && !is_git_command(argv[0])) {
diff --git a/builtin/index-pack.c b/builtin/index-pack.c
index 4705478..953dd30 100644
--- a/builtin/index-pack.c
+++ b/builtin/index-pack.c
@@ -9,6 +9,7 @@
#include "progress.h"
#include "fsck.h"
#include "exec_cmd.h"
+#include "streaming.h"
#include "thread-utils.h"
static const char index_pack_usage[] =
@@ -384,30 +385,62 @@ static void unlink_base_data(struct base_data *c)
free_base_data(c);
}
-static void *unpack_entry_data(unsigned long offset, unsigned long size)
+static int is_delta_type(enum object_type type)
+{
+ return (type == OBJ_REF_DELTA || type == OBJ_OFS_DELTA);
+}
+
+static void *unpack_entry_data(unsigned long offset, unsigned long size,
+ enum object_type type, unsigned char *sha1)
{
+ static char fixed_buf[8192];
int status;
git_zstream stream;
- void *buf = xmalloc(size);
+ void *buf;
+ git_SHA_CTX c;
+ char hdr[32];
+ int hdrlen;
+
+ if (!is_delta_type(type)) {
+ hdrlen = sprintf(hdr, "%s %lu", typename(type), size) + 1;
+ git_SHA1_Init(&c);
+ git_SHA1_Update(&c, hdr, hdrlen);
+ } else
+ sha1 = NULL;
+ if (type == OBJ_BLOB && size > big_file_threshold)
+ buf = fixed_buf;
+ else
+ buf = xmalloc(size);
memset(&stream, 0, sizeof(stream));
git_inflate_init(&stream);
stream.next_out = buf;
- stream.avail_out = size;
+ stream.avail_out = buf == fixed_buf ? sizeof(fixed_buf) : size;
do {
+ unsigned char *last_out = stream.next_out;
stream.next_in = fill(1);
stream.avail_in = input_len;
status = git_inflate(&stream, 0);
use(input_len - stream.avail_in);
+ if (sha1)
+ git_SHA1_Update(&c, last_out, stream.next_out - last_out);
+ if (buf == fixed_buf) {
+ stream.next_out = buf;
+ stream.avail_out = sizeof(fixed_buf);
+ }
} while (status == Z_OK);
if (stream.total_out != size || status != Z_STREAM_END)
bad_object(offset, _("inflate returned %d"), status);
git_inflate_end(&stream);
- return buf;
+ if (sha1)
+ git_SHA1_Final(sha1, &c);
+ return buf == fixed_buf ? NULL : buf;
}
-static void *unpack_raw_entry(struct object_entry *obj, union delta_base *delta_base)
+static void *unpack_raw_entry(struct object_entry *obj,
+ union delta_base *delta_base,
+ unsigned char *sha1)
{
unsigned char *p;
unsigned long size, c;
@@ -467,12 +500,14 @@ static void *unpack_raw_entry(struct object_entry *obj, union delta_base *delta_
}
obj->hdr_size = consumed_bytes - obj->idx.offset;
- data = unpack_entry_data(obj->idx.offset, obj->size);
+ data = unpack_entry_data(obj->idx.offset, obj->size, obj->type, sha1);
obj->idx.crc32 = input_crc32;
return data;
}
-static void *get_data_from_pack(struct object_entry *obj)
+static void *unpack_data(struct object_entry *obj,
+ int (*consume)(const unsigned char *, unsigned long, void *),
+ void *cb_data)
{
off_t from = obj[0].idx.offset + obj[0].hdr_size;
unsigned long len = obj[1].idx.offset - from;
@@ -480,13 +515,13 @@ static void *get_data_from_pack(struct object_entry *obj)
git_zstream stream;
int status;
- data = xmalloc(obj->size);
+ data = xmalloc(consume ? 64*1024 : obj->size);
inbuf = xmalloc((len < 64*1024) ? len : 64*1024);
memset(&stream, 0, sizeof(stream));
git_inflate_init(&stream);
stream.next_out = data;
- stream.avail_out = obj->size;
+ stream.avail_out = consume ? 64*1024 : obj->size;
do {
ssize_t n = (len < 64*1024) ? len : 64*1024;
@@ -502,7 +537,20 @@ static void *get_data_from_pack(struct object_entry *obj)
len -= n;
stream.next_in = inbuf;
stream.avail_in = n;
- status = git_inflate(&stream, 0);
+ if (!consume)
+ status = git_inflate(&stream, 0);
+ else {
+ do {
+ status = git_inflate(&stream, 0);
+ if (consume(data, stream.next_out - data, cb_data)) {
+ free(inbuf);
+ free(data);
+ return NULL;
+ }
+ stream.next_out = data;
+ stream.avail_out = 64*1024;
+ } while (status == Z_OK && stream.avail_in);
+ }
} while (len && status == Z_OK && !stream.avail_in);
/* This has been inflated OK when first encountered, so... */
@@ -511,9 +559,18 @@ static void *get_data_from_pack(struct object_entry *obj)
git_inflate_end(&stream);
free(inbuf);
+ if (consume) {
+ free(data);
+ data = NULL;
+ }
return data;
}
+static void *get_data_from_pack(struct object_entry *obj)
+{
+ return unpack_data(obj, NULL, NULL);
+}
+
static int compare_delta_bases(const union delta_base *base1,
const union delta_base *base2,
enum object_type type1,
@@ -568,25 +625,102 @@ static void find_delta_children(const union delta_base *base,
*last_index = last;
}
-static void sha1_object(const void *data, unsigned long size,
- enum object_type type, unsigned char *sha1)
+struct compare_data {
+ struct object_entry *entry;
+ struct git_istream *st;
+ unsigned char *buf;
+ unsigned long buf_size;
+};
+
+static int compare_objects(const unsigned char *buf, unsigned long size,
+ void *cb_data)
+{
+ struct compare_data *data = cb_data;
+
+ if (data->buf_size < size) {
+ free(data->buf);
+ data->buf = xmalloc(size);
+ data->buf_size = size;
+ }
+
+ while (size) {
+ ssize_t len = read_istream(data->st, data->buf, size);
+ if (len == 0)
+ die(_("SHA1 COLLISION FOUND WITH %s !"),
+ sha1_to_hex(data->entry->idx.sha1));
+ if (len < 0)
+ die(_("unable to read %s"),
+ sha1_to_hex(data->entry->idx.sha1));
+ if (memcmp(buf, data->buf, len))
+ die(_("SHA1 COLLISION FOUND WITH %s !"),
+ sha1_to_hex(data->entry->idx.sha1));
+ size -= len;
+ buf += len;
+ }
+ return 0;
+}
+
+static int check_collison(struct object_entry *entry)
+{
+ struct compare_data data;
+ enum object_type type;
+ unsigned long size;
+
+ if (entry->size <= big_file_threshold || entry->type != OBJ_BLOB)
+ return -1;
+
+ memset(&data, 0, sizeof(data));
+ data.entry = entry;
+ data.st = open_istream(entry->idx.sha1, &type, &size, NULL);
+ if (!data.st)
+ return -1;
+ if (size != entry->size || type != entry->type)
+ die(_("SHA1 COLLISION FOUND WITH %s !"),
+ sha1_to_hex(entry->idx.sha1));
+ unpack_data(entry, compare_objects, &data);
+ close_istream(data.st);
+ free(data.buf);
+ return 0;
+}
+
+static void sha1_object(const void *data, struct object_entry *obj_entry,
+ unsigned long size, enum object_type type,
+ const unsigned char *sha1)
{
- hash_sha1_file(data, size, typename(type), sha1);
+ void *new_data = NULL;
+ int collision_test_needed;
+
+ assert(data || obj_entry);
+
read_lock();
- if (has_sha1_file(sha1)) {
+ collision_test_needed = has_sha1_file(sha1);
+ read_unlock();
+
+ if (collision_test_needed && !data) {
+ read_lock();
+ if (!check_collison(obj_entry))
+ collision_test_needed = 0;
+ read_unlock();
+ }
+ if (collision_test_needed) {
void *has_data;
enum object_type has_type;
unsigned long has_size;
+ read_lock();
+ has_type = sha1_object_info(sha1, &has_size);
+ if (has_type != type || has_size != size)
+ die(_("SHA1 COLLISION FOUND WITH %s !"), sha1_to_hex(sha1));
has_data = read_sha1_file(sha1, &has_type, &has_size);
read_unlock();
+ if (!data)
+ data = new_data = get_data_from_pack(obj_entry);
if (!has_data)
die(_("cannot read existing object %s"), sha1_to_hex(sha1));
if (size != has_size || type != has_type ||
memcmp(data, has_data, size) != 0)
die(_("SHA1 COLLISION FOUND WITH %s !"), sha1_to_hex(sha1));
free(has_data);
- } else
- read_unlock();
+ }
if (strict) {
read_lock();
@@ -601,6 +735,9 @@ static void sha1_object(const void *data, unsigned long size,
int eaten;
void *buf = (void *) data;
+ if (!buf)
+ buf = new_data = get_data_from_pack(obj_entry);
+
/*
* we do not need to free the memory here, as the
* buf is deleted by the caller.
@@ -625,11 +762,8 @@ static void sha1_object(const void *data, unsigned long size,
}
read_unlock();
}
-}
-static int is_delta_type(enum object_type type)
-{
- return (type == OBJ_REF_DELTA || type == OBJ_OFS_DELTA);
+ free(new_data);
}
/*
@@ -711,7 +845,9 @@ static void resolve_delta(struct object_entry *delta_obj,
free(delta_data);
if (!result->data)
bad_object(delta_obj->idx.offset, _("failed to apply delta"));
- sha1_object(result->data, result->size, delta_obj->real_type,
+ hash_sha1_file(result->data, result->size,
+ typename(delta_obj->real_type), delta_obj->idx.sha1);
+ sha1_object(result->data, NULL, result->size, delta_obj->real_type,
delta_obj->idx.sha1);
counter_lock();
nr_resolved_deltas++;
@@ -841,7 +977,7 @@ static void *threaded_second_pass(void *data)
*/
static void parse_pack_objects(unsigned char *sha1)
{
- int i;
+ int i, nr_delays = 0;
struct delta_entry *delta = deltas;
struct stat st;
@@ -851,14 +987,18 @@ static void parse_pack_objects(unsigned char *sha1)
nr_objects);
for (i = 0; i < nr_objects; i++) {
struct object_entry *obj = &objects[i];
- void *data = unpack_raw_entry(obj, &delta->base);
+ void *data = unpack_raw_entry(obj, &delta->base, obj->idx.sha1);
obj->real_type = obj->type;
if (is_delta_type(obj->type)) {
nr_deltas++;
delta->obj_no = i;
delta++;
+ } else if (!data) {
+ /* large blobs, check later */
+ obj->real_type = OBJ_BAD;
+ nr_delays++;
} else
- sha1_object(data, obj->size, obj->type, obj->idx.sha1);
+ sha1_object(data, NULL, obj->size, obj->type, obj->idx.sha1);
free(data);
display_progress(progress, i+1);
}
@@ -878,6 +1018,17 @@ static void parse_pack_objects(unsigned char *sha1)
if (S_ISREG(st.st_mode) &&
lseek(input_fd, 0, SEEK_CUR) - input_len != st.st_size)
die(_("pack has junk at the end"));
+
+ for (i = 0; i < nr_objects; i++) {
+ struct object_entry *obj = &objects[i];
+ if (obj->real_type != OBJ_BAD)
+ continue;
+ obj->real_type = obj->type;
+ sha1_object(NULL, obj, obj->size, obj->type, obj->idx.sha1);
+ nr_delays--;
+ }
+ if (nr_delays)
+ die(_("confusion beyond insanity in parse_pack_objects()"));
}
/*
diff --git a/builtin/init-db.c b/builtin/init-db.c
index 0dacb8b..244fb7f 100644
--- a/builtin/init-db.c
+++ b/builtin/init-db.c
@@ -290,6 +290,7 @@ static int create_default_files(const char *template_path)
strcpy(path + len, "CoNfIg");
if (!access(path, F_OK))
git_config_set("core.ignorecase", "true");
+ probe_utf8_pathname_composition(path, len);
}
return reinit;
diff --git a/builtin/log.c b/builtin/log.c
index 54f24e2..ecc2793 100644
--- a/builtin/log.c
+++ b/builtin/log.c
@@ -21,6 +21,7 @@
#include "parse-options.h"
#include "branch.h"
#include "streaming.h"
+#include "version.h"
/* Set a default date-time format for git log ("log.date" config variable) */
static const char *default_date_mode = NULL;
@@ -366,6 +367,7 @@ int cmd_whatchanged(int argc, const char **argv, const char *prefix)
rev.simplify_history = 0;
memset(&opt, 0, sizeof(opt));
opt.def = "HEAD";
+ opt.revarg_opt = REVARG_COMMITTISH;
cmd_log_init(argc, argv, prefix, &rev, &opt);
if (!rev.diffopt.output_format)
rev.diffopt.output_format = DIFF_FORMAT_RAW;
@@ -556,6 +558,7 @@ int cmd_log(int argc, const char **argv, const char *prefix)
rev.always_show_header = 1;
memset(&opt, 0, sizeof(opt));
opt.def = "HEAD";
+ opt.revarg_opt = REVARG_COMMITTISH;
cmd_log_init(argc, argv, prefix, &rev, &opt);
return cmd_log_walk(&rev);
}
@@ -1131,6 +1134,7 @@ int cmd_format_patch(int argc, const char **argv, const char *prefix)
rev.subject_prefix = fmt_patch_subject_prefix;
memset(&s_r_opt, 0, sizeof(s_r_opt));
s_r_opt.def = "HEAD";
+ s_r_opt.revarg_opt = REVARG_COMMITTISH;
if (default_attach) {
rev.mime_boundary = default_attach;
diff --git a/builtin/pack-objects.c b/builtin/pack-objects.c
index ccfcbad..782e7d0 100644
--- a/builtin/pack-objects.c
+++ b/builtin/pack-objects.c
@@ -16,6 +16,7 @@
#include "list-objects.h"
#include "progress.h"
#include "refs.h"
+#include "streaming.h"
#include "thread-utils.h"
static const char *pack_usage[] = {
@@ -150,6 +151,46 @@ static unsigned long do_compress(void **pptr, unsigned long size)
return stream.total_out;
}
+static unsigned long write_large_blob_data(struct git_istream *st, struct sha1file *f,
+ const unsigned char *sha1)
+{
+ git_zstream stream;
+ unsigned char ibuf[1024 * 16];
+ unsigned char obuf[1024 * 16];
+ unsigned long olen = 0;
+
+ memset(&stream, 0, sizeof(stream));
+ git_deflate_init(&stream, pack_compression_level);
+
+ for (;;) {
+ ssize_t readlen;
+ int zret = Z_OK;
+ readlen = read_istream(st, ibuf, sizeof(ibuf));
+ if (readlen == -1)
+ die(_("unable to read %s"), sha1_to_hex(sha1));
+
+ stream.next_in = ibuf;
+ stream.avail_in = readlen;
+ while ((stream.avail_in || readlen == 0) &&
+ (zret == Z_OK || zret == Z_BUF_ERROR)) {
+ stream.next_out = obuf;
+ stream.avail_out = sizeof(obuf);
+ zret = git_deflate(&stream, readlen ? 0 : Z_FINISH);
+ sha1write(f, obuf, stream.next_out - obuf);
+ olen += stream.next_out - obuf;
+ }
+ if (stream.avail_in)
+ die(_("deflate error (%d)"), zret);
+ if (readlen == 0) {
+ if (zret != Z_STREAM_END)
+ die(_("deflate error (%d)"), zret);
+ break;
+ }
+ }
+ git_deflate_end(&stream);
+ return olen;
+}
+
/*
* we are going to reuse the existing object data as is. make
* sure it is not corrupt.
@@ -208,11 +249,18 @@ static unsigned long write_no_reuse_object(struct sha1file *f, struct object_ent
unsigned hdrlen;
enum object_type type;
void *buf;
+ struct git_istream *st = NULL;
if (!usable_delta) {
- buf = read_sha1_file(entry->idx.sha1, &type, &size);
- if (!buf)
- die("unable to read %s", sha1_to_hex(entry->idx.sha1));
+ if (entry->type == OBJ_BLOB &&
+ entry->size > big_file_threshold &&
+ (st = open_istream(entry->idx.sha1, &type, &size, NULL)) != NULL)
+ buf = NULL;
+ else {
+ buf = read_sha1_file(entry->idx.sha1, &type, &size);
+ if (!buf)
+ die(_("unable to read %s"), sha1_to_hex(entry->idx.sha1));
+ }
/*
* make sure no cached delta data remains from a
* previous attempt before a pack split occurred.
@@ -233,7 +281,9 @@ static unsigned long write_no_reuse_object(struct sha1file *f, struct object_ent
OBJ_OFS_DELTA : OBJ_REF_DELTA;
}
- if (entry->z_delta_size)
+ if (st) /* large blob case, just assume we don't compress well */
+ datalen = size;
+ else if (entry->z_delta_size)
datalen = entry->z_delta_size;
else
datalen = do_compress(&buf, size);
@@ -256,6 +306,8 @@ static unsigned long write_no_reuse_object(struct sha1file *f, struct object_ent
while (ofs >>= 7)
dheader[--pos] = 128 | (--ofs & 127);
if (limit && hdrlen + sizeof(dheader) - pos + datalen + 20 >= limit) {
+ if (st)
+ close_istream(st);
free(buf);
return 0;
}
@@ -268,6 +320,8 @@ static unsigned long write_no_reuse_object(struct sha1file *f, struct object_ent
* an additional 20 bytes for the base sha1.
*/
if (limit && hdrlen + 20 + datalen + 20 >= limit) {
+ if (st)
+ close_istream(st);
free(buf);
return 0;
}
@@ -276,13 +330,20 @@ static unsigned long write_no_reuse_object(struct sha1file *f, struct object_ent
hdrlen += 20;
} else {
if (limit && hdrlen + datalen + 20 >= limit) {
+ if (st)
+ close_istream(st);
free(buf);
return 0;
}
sha1write(f, header, hdrlen);
}
- sha1write(f, buf, datalen);
- free(buf);
+ if (st) {
+ datalen = write_large_blob_data(st, f, entry->idx.sha1);
+ close_istream(st);
+ } else {
+ sha1write(f, buf, datalen);
+ free(buf);
+ }
return hdrlen + datalen;
}
@@ -2312,7 +2373,7 @@ static void get_object_list(int ac, const char **av)
}
die("not a rev '%s'", line);
}
- if (handle_revision_arg(line, &revs, flags, 1))
+ if (handle_revision_arg(line, &revs, flags, REVARG_CANNOT_BE_FILENAME))
die("bad revision '%s'", line);
}
diff --git a/builtin/reflog.c b/builtin/reflog.c
index 062d7da..b3c9e27 100644
--- a/builtin/reflog.c
+++ b/builtin/reflog.c
@@ -330,8 +330,10 @@ static int expire_reflog_ent(unsigned char *osha1, unsigned char *nsha1,
printf("keep %s", message);
return 0;
prune:
- if (!cb->newlog || cb->cmd->verbose)
- printf("%sprune %s", cb->newlog ? "" : "would ", message);
+ if (!cb->newlog)
+ printf("would prune %s", message);
+ else if (cb->cmd->verbose)
+ printf("prune %s", message);
return 0;
}
diff --git a/builtin/reset.c b/builtin/reset.c
index 4cc34c9..74442bd 100644
--- a/builtin/reset.c
+++ b/builtin/reset.c
@@ -276,7 +276,7 @@ int cmd_reset(int argc, const char **argv, const char *prefix)
* Otherwise, argv[i] could be either <rev> or <paths> and
* has to be unambiguous.
*/
- else if (!get_sha1(argv[i], sha1)) {
+ else if (!get_sha1_committish(argv[i], sha1)) {
/*
* Ok, argv[i] looks like a rev; it should not
* be a filename.
@@ -289,9 +289,15 @@ int cmd_reset(int argc, const char **argv, const char *prefix)
}
}
- if (get_sha1(rev, sha1))
+ if (get_sha1_committish(rev, sha1))
die(_("Failed to resolve '%s' as a valid ref."), rev);
+ /*
+ * NOTE: As "git reset $treeish -- $path" should be usable on
+ * any tree-ish, this is not strictly correct. We are not
+ * moving the HEAD to any commit; we are merely resetting the
+ * entries in the index to that of a treeish.
+ */
commit = lookup_commit_reference(sha1);
if (!commit)
die(_("Could not parse object '%s'."), rev);
diff --git a/builtin/rev-parse.c b/builtin/rev-parse.c
index 13495b8..32788a9 100644
--- a/builtin/rev-parse.c
+++ b/builtin/rev-parse.c
@@ -195,6 +195,12 @@ static int anti_reference(const char *refname, const unsigned char *sha1, int fl
return 0;
}
+static int show_abbrev(const unsigned char *sha1, void *cb_data)
+{
+ show_rev(NORMAL, sha1, NULL);
+ return 0;
+}
+
static void show_datestring(const char *flag, const char *datestr)
{
static char buffer[100];
@@ -238,7 +244,7 @@ static int try_difference(const char *arg)
next = "HEAD";
if (dotdot == arg)
this = "HEAD";
- if (!get_sha1(this, sha1) && !get_sha1(next, end)) {
+ if (!get_sha1_committish(this, sha1) && !get_sha1_committish(next, end)) {
show_rev(NORMAL, end, next);
show_rev(symmetric ? NORMAL : REVERSED, sha1, this);
if (symmetric) {
@@ -278,7 +284,7 @@ static int try_parent_shorthands(const char *arg)
return 0;
*dotdot = 0;
- if (get_sha1(arg, sha1))
+ if (get_sha1_committish(arg, sha1))
return 0;
if (!parents_only)
@@ -589,6 +595,10 @@ int cmd_rev_parse(int argc, const char **argv, const char *prefix)
for_each_ref(show_reference, NULL);
continue;
}
+ if (!prefixcmp(arg, "--disambiguate=")) {
+ for_each_abbrev(arg + 15, show_abbrev, NULL);
+ continue;
+ }
if (!strcmp(arg, "--bisect")) {
for_each_ref_in("refs/bisect/bad", show_reference, NULL);
for_each_ref_in("refs/bisect/good", anti_reference, NULL);
diff --git a/builtin/update-index.c b/builtin/update-index.c
index 5a4e9ea..4ce341c 100644
--- a/builtin/update-index.c
+++ b/builtin/update-index.c
@@ -95,7 +95,8 @@ static int add_one_path(struct cache_entry *old, const char *path, int len, stru
size = cache_entry_size(len);
ce = xcalloc(1, size);
memcpy(ce->name, path, len);
- ce->ce_flags = len;
+ ce->ce_flags = create_ce_flags(0);
+ ce->ce_namelen = len;
fill_stat_cache_info(ce, st);
ce->ce_mode = ce_mode_from_stat(old, st->st_mode);
@@ -229,7 +230,8 @@ static int add_cacheinfo(unsigned int mode, const unsigned char *sha1,
hashcpy(ce->sha1, sha1);
memcpy(ce->name, path, len);
- ce->ce_flags = create_ce_flags(len, stage);
+ ce->ce_flags = create_ce_flags(stage);
+ ce->ce_namelen = len;
ce->ce_mode = create_ce_mode(mode);
if (assume_unchanged)
ce->ce_flags |= CE_VALID;
@@ -427,7 +429,8 @@ static struct cache_entry *read_one_ent(const char *which,
hashcpy(ce->sha1, sha1);
memcpy(ce->name, path, namelen);
- ce->ce_flags = create_ce_flags(namelen, stage);
+ ce->ce_flags = create_ce_flags(stage);
+ ce->ce_namelen = namelen;
ce->ce_mode = create_ce_mode(mode);
return ce;
}
diff --git a/cache.h b/cache.h
index 8958104..67f28b4 100644
--- a/cache.h
+++ b/cache.h
@@ -128,13 +128,13 @@ struct cache_entry {
unsigned int ce_gid;
unsigned int ce_size;
unsigned int ce_flags;
+ unsigned int ce_namelen;
unsigned char sha1[20];
struct cache_entry *next;
struct cache_entry *dir_next;
char name[FLEX_ARRAY]; /* more */
};
-#define CE_NAMEMASK (0x0fff)
#define CE_STAGEMASK (0x3000)
#define CE_EXTENDED (0x4000)
#define CE_VALID (0x8000)
@@ -198,21 +198,12 @@ static inline void copy_cache_entry(struct cache_entry *dst, struct cache_entry
dst->ce_flags = (dst->ce_flags & ~CE_STATE_MASK) | state;
}
-static inline unsigned create_ce_flags(size_t len, unsigned stage)
+static inline unsigned create_ce_flags(unsigned stage)
{
- if (len >= CE_NAMEMASK)
- len = CE_NAMEMASK;
- return (len | (stage << CE_STAGESHIFT));
-}
-
-static inline size_t ce_namelen(const struct cache_entry *ce)
-{
- size_t len = ce->ce_flags & CE_NAMEMASK;
- if (len < CE_NAMEMASK)
- return len;
- return strlen(ce->name + CE_NAMEMASK) + CE_NAMEMASK;
+ return (stage << CE_STAGESHIFT);
}
+#define ce_namelen(ce) ((ce)->ce_namelen)
#define ce_size(ce) cache_entry_size(ce_namelen(ce))
#define ce_stage(ce) ((CE_STAGEMASK & (ce)->ce_flags) >> CE_STAGESHIFT)
#define ce_uptodate(ce) ((ce)->ce_flags & CE_UPTODATE)
@@ -451,6 +442,7 @@ extern int discard_index(struct index_state *);
extern int unmerged_index(const struct index_state *);
extern int verify_path(const char *path);
extern struct cache_entry *index_name_exists(struct index_state *istate, const char *name, int namelen, int igncase);
+extern int index_name_stage_pos(const struct index_state *, const char *name, int namelen, int stage);
extern int index_name_pos(const struct index_state *, const char *name, int namelen);
#define ADD_CACHE_OK_TO_ADD 1 /* Ok to add */
#define ADD_CACHE_OK_TO_REPLACE 2 /* Ok to replace file/directory */
@@ -563,6 +555,7 @@ extern int read_replace_refs;
extern int fsync_object_files;
extern int core_preload_index;
extern int core_apply_sparse_checkout;
+extern int precomposed_unicode;
enum branch_track {
BRANCH_TRACK_UNSPECIFIED = -1,
@@ -622,6 +615,8 @@ extern char *git_snpath(char *buf, size_t n, const char *fmt, ...)
__attribute__((format (printf, 3, 4)));
extern char *git_pathdup(const char *fmt, ...)
__attribute__((format (printf, 1, 2)));
+extern char *mkpathdup(const char *fmt, ...)
+ __attribute__((format (printf, 1, 2)));
/* Return a statically allocated filename matching the sha1 signature */
extern char *mkpath(const char *fmt, ...) __attribute__((format (printf, 1, 2)));
@@ -711,6 +706,7 @@ int set_shared_perm(const char *path, int mode);
int safe_create_leading_directories(char *path);
int safe_create_leading_directories_const(const char *path);
int mkdir_in_gitdir(const char *path);
+extern void home_config_paths(char **global, char **xdg, char *file);
extern char *expand_user_path(const char *path);
const char *enter_repo(const char *path, int strict);
static inline int is_absolute_path(const char *path)
@@ -786,17 +782,25 @@ struct object_context {
unsigned mode;
};
+#define GET_SHA1_QUIETLY 01
+#define GET_SHA1_COMMIT 02
+#define GET_SHA1_COMMITTISH 04
+#define GET_SHA1_TREE 010
+#define GET_SHA1_TREEISH 020
+#define GET_SHA1_BLOB 040
+#define GET_SHA1_ONLY_TO_DIE 04000
+
extern int get_sha1(const char *str, unsigned char *sha1);
-extern int get_sha1_with_mode_1(const char *str, unsigned char *sha1, unsigned *mode, int only_to_die, const char *prefix);
-static inline int get_sha1_with_mode(const char *str, unsigned char *sha1, unsigned *mode)
-{
- return get_sha1_with_mode_1(str, sha1, mode, 0, NULL);
-}
-extern int get_sha1_with_context_1(const char *name, unsigned char *sha1, struct object_context *orc, int only_to_die, const char *prefix);
-static inline int get_sha1_with_context(const char *str, unsigned char *sha1, struct object_context *orc)
-{
- return get_sha1_with_context_1(str, sha1, orc, 0, NULL);
-}
+extern int get_sha1_commit(const char *str, unsigned char *sha1);
+extern int get_sha1_committish(const char *str, unsigned char *sha1);
+extern int get_sha1_tree(const char *str, unsigned char *sha1);
+extern int get_sha1_treeish(const char *str, unsigned char *sha1);
+extern int get_sha1_blob(const char *str, unsigned char *sha1);
+extern void maybe_die_on_misspelt_object_name(const char *name, const char *prefix);
+extern int get_sha1_with_context(const char *str, unsigned flags, unsigned char *sha1, struct object_context *orc);
+
+typedef int each_abbrev_fn(const unsigned char *sha1, void *);
+extern int for_each_abbrev(const char *prefix, each_abbrev_fn, void *);
/*
* Try to read a SHA1 in hexadecimal format from the 40 characters
@@ -860,6 +864,7 @@ extern int validate_headref(const char *ref);
extern int base_name_compare(const char *name1, int len1, int mode1, const char *name2, int len2, int mode2);
extern int df_name_compare(const char *name1, int len1, int mode1, const char *name2, int len2, int mode2);
extern int cache_name_compare(const char *name1, int len1, const char *name2, int len2);
+extern int cache_name_stage_compare(const char *name1, int len1, int stage1, const char *name2, int len2, int stage2);
extern void *read_object_with_reference(const unsigned char *sha1,
const char *required_type,
diff --git a/command-list.txt b/command-list.txt
index 14ea67a..ec64cac 100644
--- a/command-list.txt
+++ b/command-list.txt
@@ -25,6 +25,7 @@ git-commit mainporcelain common
git-commit-tree plumbingmanipulators
git-config ancillarymanipulators
git-count-objects ancillaryinterrogators
+git-credential purehelpers
git-cvsexportcommit foreignscminterface
git-cvsimport foreignscminterface
git-cvsserver foreignscminterface
diff --git a/commit.c b/commit.c
index 8248a99..42af4c1 100644
--- a/commit.c
+++ b/commit.c
@@ -68,7 +68,7 @@ struct commit *lookup_commit_reference_by_name(const char *name)
unsigned char sha1[20];
struct commit *commit;
- if (get_sha1(name, sha1))
+ if (get_sha1_committish(name, sha1))
return NULL;
commit = lookup_commit_reference(sha1);
if (!commit || parse_commit(commit))
diff --git a/compat/precompose_utf8.c b/compat/precompose_utf8.c
new file mode 100644
index 0000000..d40d1b3
--- /dev/null
+++ b/compat/precompose_utf8.c
@@ -0,0 +1,190 @@
+/*
+ * Converts filenames from decomposed unicode into precomposed unicode.
+ * Used on MacOS X.
+*/
+
+
+#define PRECOMPOSE_UNICODE_C
+
+#include "cache.h"
+#include "utf8.h"
+#include "precompose_utf8.h"
+
+typedef char *iconv_ibp;
+const static char *repo_encoding = "UTF-8";
+const static char *path_encoding = "UTF-8-MAC";
+
+
+static size_t has_utf8(const char *s, size_t maxlen, size_t *strlen_c)
+{
+ const uint8_t *utf8p = (const uint8_t*) s;
+ size_t strlen_chars = 0;
+ size_t ret = 0;
+
+ if ((!utf8p) || (!*utf8p)) {
+ return 0;
+ }
+
+ while((*utf8p) && maxlen) {
+ if (*utf8p & 0x80)
+ ret++;
+ strlen_chars++;
+ utf8p++;
+ maxlen--;
+ }
+ if (strlen_c)
+ *strlen_c = strlen_chars;
+
+ return ret;
+}
+
+
+void probe_utf8_pathname_composition(char *path, int len)
+{
+ const static char *auml_nfc = "\xc3\xa4";
+ const static char *auml_nfd = "\x61\xcc\x88";
+ int output_fd;
+ if (precomposed_unicode != -1)
+ return; /* We found it defined in the global config, respect it */
+ path[len] = 0;
+ strcpy(path + len, auml_nfc);
+ output_fd = open(path, O_CREAT|O_EXCL|O_RDWR, 0600);
+ if (output_fd >=0) {
+ close(output_fd);
+ path[len] = 0;
+ strcpy(path + len, auml_nfd);
+ /* Indicate to the user, that we can configure it to true */
+ if (0 == access(path, R_OK))
+ git_config_set("core.precomposeunicode", "false");
+ /* To be backward compatible, set precomposed_unicode to 0 */
+ precomposed_unicode = 0;
+ path[len] = 0;
+ strcpy(path + len, auml_nfc);
+ unlink(path);
+ }
+}
+
+
+void precompose_argv(int argc, const char **argv)
+{
+ int i = 0;
+ const char *oldarg;
+ char *newarg;
+ iconv_t ic_precompose;
+
+ if (precomposed_unicode != 1)
+ return;
+
+ ic_precompose = iconv_open(repo_encoding, path_encoding);
+ if (ic_precompose == (iconv_t) -1)
+ return;
+
+ while (i < argc) {
+ size_t namelen;
+ oldarg = argv[i];
+ if (has_utf8(oldarg, (size_t)-1, &namelen)) {
+ newarg = reencode_string_iconv(oldarg, namelen, ic_precompose);
+ if (newarg)
+ argv[i] = newarg;
+ }
+ i++;
+ }
+ iconv_close(ic_precompose);
+}
+
+
+PREC_DIR *precompose_utf8_opendir(const char *dirname)
+{
+ PREC_DIR *prec_dir = xmalloc(sizeof(PREC_DIR));
+ prec_dir->dirent_nfc = xmalloc(sizeof(dirent_prec_psx));
+ prec_dir->dirent_nfc->max_name_len = sizeof(prec_dir->dirent_nfc->d_name);
+
+ prec_dir->dirp = opendir(dirname);
+ if (!prec_dir->dirp) {
+ free(prec_dir->dirent_nfc);
+ free(prec_dir);
+ return NULL;
+ } else {
+ int ret_errno = errno;
+ prec_dir->ic_precompose = iconv_open(repo_encoding, path_encoding);
+ /* if iconv_open() fails, die() in readdir() if needed */
+ errno = ret_errno;
+ }
+
+ return prec_dir;
+}
+
+struct dirent_prec_psx *precompose_utf8_readdir(PREC_DIR *prec_dir)
+{
+ struct dirent *res;
+ res = readdir(prec_dir->dirp);
+ if (res) {
+ size_t namelenz = strlen(res->d_name) + 1; /* \0 */
+ size_t new_maxlen = namelenz;
+
+ int ret_errno = errno;
+
+ if (new_maxlen > prec_dir->dirent_nfc->max_name_len) {
+ size_t new_len = sizeof(dirent_prec_psx) + new_maxlen -
+ sizeof(prec_dir->dirent_nfc->d_name);
+
+ prec_dir->dirent_nfc = xrealloc(prec_dir->dirent_nfc, new_len);
+ prec_dir->dirent_nfc->max_name_len = new_maxlen;
+ }
+
+ prec_dir->dirent_nfc->d_ino = res->d_ino;
+ prec_dir->dirent_nfc->d_type = res->d_type;
+
+ if ((precomposed_unicode == 1) && has_utf8(res->d_name, (size_t)-1, NULL)) {
+ if (prec_dir->ic_precompose == (iconv_t)-1) {
+ die("iconv_open(%s,%s) failed, but needed:\n"
+ " precomposed unicode is not supported.\n"
+ " If you wnat to use decomposed unicode, run\n"
+ " \"git config core.precomposeunicode false\"\n",
+ repo_encoding, path_encoding);
+ } else {
+ iconv_ibp cp = (iconv_ibp)res->d_name;
+ size_t inleft = namelenz;
+ char *outpos = &prec_dir->dirent_nfc->d_name[0];
+ size_t outsz = prec_dir->dirent_nfc->max_name_len;
+ size_t cnt;
+ errno = 0;
+ cnt = iconv(prec_dir->ic_precompose, &cp, &inleft, &outpos, &outsz);
+ if (errno || inleft) {
+ /*
+ * iconv() failed and errno could be E2BIG, EILSEQ, EINVAL, EBADF
+ * MacOS X avoids illegal byte sequemces.
+ * If they occur on a mounted drive (e.g. NFS) it is not worth to
+ * die() for that, but rather let the user see the original name
+ */
+ namelenz = 0; /* trigger strlcpy */
+ }
+ }
+ }
+ else
+ namelenz = 0;
+
+ if (!namelenz)
+ strlcpy(prec_dir->dirent_nfc->d_name, res->d_name,
+ prec_dir->dirent_nfc->max_name_len);
+
+ errno = ret_errno;
+ return prec_dir->dirent_nfc;
+ }
+ return NULL;
+}
+
+
+int precompose_utf8_closedir(PREC_DIR *prec_dir)
+{
+ int ret_value;
+ int ret_errno;
+ ret_value = closedir(prec_dir->dirp);
+ ret_errno = errno;
+ if (prec_dir->ic_precompose != (iconv_t)-1)
+ iconv_close(prec_dir->ic_precompose);
+ free(prec_dir->dirent_nfc);
+ free(prec_dir);
+ errno = ret_errno;
+ return ret_value;
+}
diff --git a/compat/precompose_utf8.h b/compat/precompose_utf8.h
new file mode 100644
index 0000000..3b73585
--- /dev/null
+++ b/compat/precompose_utf8.h
@@ -0,0 +1,45 @@
+#ifndef PRECOMPOSE_UNICODE_H
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <dirent.h>
+#include <iconv.h>
+
+
+typedef struct dirent_prec_psx {
+ ino_t d_ino; /* Posix */
+ size_t max_name_len; /* See below */
+ unsigned char d_type; /* available on all systems git runs on */
+
+ /*
+ * See http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/dirent.h.html
+ * NAME_MAX + 1 should be enough, but some systems have
+ * NAME_MAX=255 and strlen(d_name) may return 508 or 510
+ * Solution: allocate more when needed, see precompose_utf8_readdir()
+ */
+ char d_name[NAME_MAX+1];
+} dirent_prec_psx;
+
+
+typedef struct {
+ iconv_t ic_precompose;
+ DIR *dirp;
+ struct dirent_prec_psx *dirent_nfc;
+} PREC_DIR;
+
+void precompose_argv(int argc, const char **argv);
+void probe_utf8_pathname_composition(char *, int);
+
+PREC_DIR *precompose_utf8_opendir(const char *dirname);
+struct dirent_prec_psx *precompose_utf8_readdir(PREC_DIR *dirp);
+int precompose_utf8_closedir(PREC_DIR *dirp);
+
+#ifndef PRECOMPOSE_UNICODE_C
+#define dirent dirent_prec_psx
+#define opendir(n) precompose_utf8_opendir(n)
+#define readdir(d) precompose_utf8_readdir(d)
+#define closedir(d) precompose_utf8_closedir(d)
+#define DIR PREC_DIR
+#endif /* PRECOMPOSE_UNICODE_C */
+
+#define PRECOMPOSE_UNICODE_H
+#endif /* PRECOMPOSE_UNICODE_H */
diff --git a/compat/terminal.c b/compat/terminal.c
index 6d16c8f..bbb038d 100644
--- a/compat/terminal.c
+++ b/compat/terminal.c
@@ -59,6 +59,7 @@ char *git_terminal_prompt(const char *prompt, int echo)
r = strbuf_getline(&buf, fh, '\n');
if (!echo) {
+ fseek(fh, SEEK_CUR, 0);
putc('\n', fh);
fflush(fh);
}
diff --git a/config.c b/config.c
index 71ef171..2b706ea 100644
--- a/config.c
+++ b/config.c
@@ -758,6 +758,11 @@ static int git_default_core_config(const char *var, const char *value)
return 0;
}
+ if (!strcmp(var, "core.precomposeunicode")) {
+ precomposed_unicode = git_config_bool(var, value);
+ return 0;
+ }
+
/* Add other config variables here and to Documentation/config.txt. */
return 0;
}
@@ -929,7 +934,10 @@ int git_config_system(void)
int git_config_early(config_fn_t fn, void *data, const char *repo_config)
{
int ret = 0, found = 0;
- const char *home = NULL;
+ char *xdg_config = NULL;
+ char *user_config = NULL;
+
+ home_config_paths(&user_config, &xdg_config, "config");
if (git_config_system() && !access(git_etc_gitconfig(), R_OK)) {
ret += git_config_from_file(fn, git_etc_gitconfig(),
@@ -937,14 +945,14 @@ int git_config_early(config_fn_t fn, void *data, const char *repo_config)
found += 1;
}
- home = getenv("HOME");
- if (home) {
- char buf[PATH_MAX];
- char *user_config = mksnpath(buf, sizeof(buf), "%s/.gitconfig", home);
- if (!access(user_config, R_OK)) {
- ret += git_config_from_file(fn, user_config, data);
- found += 1;
- }
+ if (xdg_config && !access(xdg_config, R_OK)) {
+ ret += git_config_from_file(fn, xdg_config, data);
+ found += 1;
+ }
+
+ if (user_config && !access(user_config, R_OK)) {
+ ret += git_config_from_file(fn, user_config, data);
+ found += 1;
}
if (repo_config && !access(repo_config, R_OK)) {
@@ -963,6 +971,8 @@ int git_config_early(config_fn_t fn, void *data, const char *repo_config)
break;
}
+ free(xdg_config);
+ free(user_config);
return ret == 0 ? found : ret;
}
diff --git a/config.mak.in b/config.mak.in
index b2ba710..802d342 100644
--- a/config.mak.in
+++ b/config.mak.in
@@ -28,7 +28,6 @@ VPATH = @srcdir@
export exec_prefix mandir
export srcdir VPATH
-ASCIIDOC7=@ASCIIDOC7@
NEEDS_SSL_WITH_CRYPTO=@NEEDS_SSL_WITH_CRYPTO@
NO_OPENSSL=@NO_OPENSSL@
NO_CURL=@NO_CURL@
diff --git a/configure.ac b/configure.ac
index e125550..df7e376 100644
--- a/configure.ac
+++ b/configure.ac
@@ -3,11 +3,24 @@
## Definitions of private macros.
-# GIT_CONF_APPEND_LINE(LINE)
-# --------------------------
-# Append LINE to file ${config_append}
-AC_DEFUN([GIT_CONF_APPEND_LINE],
- [echo "$1" >> "${config_append}"])
+# GIT_CONF_SUBST(VAL, VAR)
+# ------------------------
+# Cause the line "VAR=VAL" to be eventually appended to ${config_file}.
+AC_DEFUN([GIT_CONF_SUBST],
+ [AC_REQUIRE([GIT_CONF_SUBST_INIT])
+ config_appended_defs="$config_appended_defs${newline}$1=$2"])
+
+# GIT_CONF_SUBST_INIT
+# -------------------
+# Prepare shell variables and autoconf machine required by later calls
+# to GIT_CONF_SUBST.
+AC_DEFUN([GIT_CONF_SUBST_INIT],
+ [config_appended_defs=; newline='
+'
+ AC_CONFIG_COMMANDS([$config_file],
+ [echo "$config_appended_defs" >> "$config_file"],
+ [config_file=$config_file
+ config_appended_defs="$config_appended_defs"])])
# GIT_ARG_SET_PATH(PROGRAM)
# -------------------------
@@ -29,13 +42,12 @@ AC_DEFUN([GIT_ARG_SET_PATH],
# --without-PROGRAM is used.
AC_DEFUN([GIT_CONF_APPEND_PATH],
[m4_pushdef([GIT_UC_PROGRAM], m4_toupper([$1]))dnl
- PROGRAM=GIT_UC_PROGRAM
if test "$withval" = "no"; then
if test -n "$2"; then
GIT_UC_PROGRAM[]_PATH=$withval
- AC_MSG_NOTICE([Disabling use of ${PROGRAM}])
- GIT_CONF_APPEND_LINE(NO_${PROGRAM}=YesPlease)
- GIT_CONF_APPEND_LINE(${PROGRAM}_PATH=)
+ AC_MSG_NOTICE([Disabling use of GIT_UC_PROGRAM])
+ GIT_CONF_SUBST([NO_]GIT_UC_PROGRAM, [YesPlease])
+ GIT_CONF_SUBST(GIT_UC_PROGRAM[]_PATH, [])
else
AC_MSG_ERROR([You cannot use git without $1])
fi
@@ -45,7 +57,7 @@ AC_DEFUN([GIT_CONF_APPEND_PATH],
else
GIT_UC_PROGRAM[]_PATH=$withval
AC_MSG_NOTICE([Setting GIT_UC_PROGRAM[]_PATH to $withval])
- GIT_CONF_APPEND_LINE(${PROGRAM}_PATH=$withval)
+ GIT_CONF_SUBST(GIT_UC_PROGRAM[]_PATH, [$withval])
fi
fi
m4_popdef([GIT_UC_PROGRAM])])
@@ -58,7 +70,6 @@ AC_DEFUN([GIT_CONF_APPEND_PATH],
# * Unset NO_PACKAGE for --with-PACKAGE without ARG
AC_DEFUN([GIT_PARSE_WITH],
[m4_pushdef([GIT_UC_PACKAGE], m4_toupper([$1]))dnl
- PACKAGE=GIT_UC_PACKAGE
if test "$withval" = "no"; then
NO_[]GIT_UC_PACKAGE=YesPlease
elif test "$withval" = "yes"; then
@@ -67,7 +78,7 @@ AC_DEFUN([GIT_PARSE_WITH],
NO_[]GIT_UC_PACKAGE=
GIT_UC_PACKAGE[]DIR=$withval
AC_MSG_NOTICE([Setting GIT_UC_PACKAGE[]DIR to $withval])
- GIT_CONF_APPEND_LINE(${PACKAGE}DIR=$withval)
+ GIT_CONF_SUBST(GIT_UC_PACKAGE[DIR], [$withval])
fi
m4_popdef([GIT_UC_PACKAGE])])
@@ -87,7 +98,7 @@ AC_DEFUN([GIT_PARSE_WITH_SET_MAKE_VAR],
[a value for $1 ($2). Maybe you do...?])
fi
AC_MSG_NOTICE([Setting $2 to $withval])
- GIT_CONF_APPEND_LINE($2=$withval)
+ GIT_CONF_SUBST([$2], [$withval])
fi)])# GIT_PARSE_WITH_SET_MAKE_VAR
#
@@ -135,10 +146,9 @@ AC_INIT([git], [@@GIT_VERSION@@], [git@vger.kernel.org])
AC_CONFIG_SRCDIR([git.c])
config_file=config.mak.autogen
-config_append=config.mak.append
config_in=config.mak.in
-echo "# ${config_append}. Generated by configure." > "${config_append}"
+GIT_CONF_SUBST([AUTOCONFIGURED], [YesPlease])
# Directories holding "saner" versions of common or POSIX binaries.
AC_ARG_WITH([sane-tool-path],
@@ -150,7 +160,7 @@ AC_ARG_WITH([sane-tool-path],
else
AC_MSG_NOTICE([Setting SANE_TOOL_PATH to '$withval'])
fi
- GIT_CONF_APPEND_LINE([SANE_TOOL_PATH=$withval])],
+ GIT_CONF_SUBST([SANE_TOOL_PATH], [$withval])],
[# If the "--with-sane-tool-path" option was not given, don't touch
# SANE_TOOL_PATH here, but let defaults in Makefile take care of it.
# This should minimize spurious differences in the behaviour of the
@@ -169,7 +179,7 @@ AC_ARG_WITH([lib],
else
lib=$withval
AC_MSG_NOTICE([Setting lib to '$lib'])
- GIT_CONF_APPEND_LINE(lib=$withval)
+ GIT_CONF_SUBST([lib], [$withval])
fi])
if test -z "$lib"; then
@@ -205,7 +215,7 @@ AC_ARG_ENABLE([jsmin],
[
JSMIN=$enableval;
AC_MSG_NOTICE([Setting JSMIN to '$JSMIN' to enable JavaScript minifying])
- GIT_CONF_APPEND_LINE(JSMIN=$enableval);
+ GIT_CONF_SUBST([JSMIN], [$enableval])
])
# Define option to enable CSS minification
@@ -215,7 +225,7 @@ AC_ARG_ENABLE([cssmin],
[
CSSMIN=$enableval;
AC_MSG_NOTICE([Setting CSSMIN to '$CSSMIN' to enable CSS minifying])
- GIT_CONF_APPEND_LINE(CSSMIN=$enableval);
+ GIT_CONF_SUBST([CSSMIN], [$enableval])
])
## Site configuration (override autodetection)
@@ -256,7 +266,7 @@ AS_HELP_STRING([], [ARG can be also prefix for libpcre library and hea
USE_LIBPCRE=YesPlease
LIBPCREDIR=$withval
AC_MSG_NOTICE([Setting LIBPCREDIR to $withval])
- GIT_CONF_APPEND_LINE(LIBPCREDIR=$withval)
+ GIT_CONF_SUBST([LIBPCREDIR], [$withval])
fi)
#
# Define NO_CURL if you do not have curl installed. git-http-pull and
@@ -437,21 +447,14 @@ if test -n "$ASCIIDOC"; then
AC_MSG_CHECKING([for asciidoc version])
asciidoc_version=`$ASCIIDOC --version 2>/dev/null`
case "${asciidoc_version}" in
- asciidoc' '7*)
- ASCIIDOC7=YesPlease
- AC_MSG_RESULT([${asciidoc_version} > 7])
- ;;
asciidoc' '8*)
- ASCIIDOC7=
AC_MSG_RESULT([${asciidoc_version}])
;;
*)
- ASCIIDOC7=
AC_MSG_RESULT([${asciidoc_version} (unknown)])
;;
esac
fi
-AC_SUBST(ASCIIDOC7)
## Checks for libraries.
@@ -1050,9 +1053,5 @@ AC_SUBST(PTHREAD_LIBS)
AC_SUBST(NO_PTHREADS)
## Output files
-AC_CONFIG_FILES(["${config_file}":"${config_in}":"${config_append}"])
+AC_CONFIG_FILES(["${config_file}":"${config_in}"])
AC_OUTPUT
-
-
-## Cleanup
-rm -f "${config_append}"
diff --git a/connect.c b/connect.c
index 41b7400..55a85ad 100644
--- a/connect.c
+++ b/connect.c
@@ -49,6 +49,16 @@ static void add_extra_have(struct extra_have_objects *extra, unsigned char *sha1
extra->nr++;
}
+static void die_initial_contact(int got_at_least_one_head)
+{
+ if (got_at_least_one_head)
+ die("The remote end hung up upon initial contact");
+ else
+ die("Could not read from remote repository.\n\n"
+ "Please make sure you have the correct access rights\n"
+ "and the repository exists.");
+}
+
/*
* Read all the refs from the other end
*/
@@ -56,6 +66,8 @@ struct ref **get_remote_heads(int in, struct ref **list,
unsigned int flags,
struct extra_have_objects *extra_have)
{
+ int got_at_least_one_head = 0;
+
*list = NULL;
for (;;) {
struct ref *ref;
@@ -64,7 +76,10 @@ struct ref **get_remote_heads(int in, struct ref **list,
char *name;
int len, name_len;
- len = packet_read_line(in, buffer, sizeof(buffer));
+ len = packet_read(in, buffer, sizeof(buffer));
+ if (len < 0)
+ die_initial_contact(got_at_least_one_head);
+
if (!len)
break;
if (buffer[len-1] == '\n')
@@ -95,6 +110,7 @@ struct ref **get_remote_heads(int in, struct ref **list,
hashcpy(ref->old_sha1, old_sha1);
*list = ref;
list = &ref->next;
+ got_at_least_one_head = 1;
}
return list;
}
diff --git a/contrib/completion/git-completion.bash b/contrib/completion/git-completion.bash
index 2e1b5e1..ffedce7 100755..100644
--- a/contrib/completion/git-completion.bash
+++ b/contrib/completion/git-completion.bash
@@ -20,46 +20,8 @@
# 1) Copy this file to somewhere (e.g. ~/.git-completion.sh).
# 2) Add the following line to your .bashrc/.zshrc:
# source ~/.git-completion.sh
-#
-# 3) Consider changing your PS1 to also show the current branch:
-# Bash: PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ '
-# ZSH: PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ '
-#
-# The argument to __git_ps1 will be displayed only if you
-# are currently in a git repository. The %s token will be
-# the name of the current branch.
-#
-# In addition, if you set GIT_PS1_SHOWDIRTYSTATE to a nonempty
-# value, unstaged (*) and staged (+) changes will be shown next
-# to the branch name. You can configure this per-repository
-# with the bash.showDirtyState variable, which defaults to true
-# once GIT_PS1_SHOWDIRTYSTATE is enabled.
-#
-# You can also see if currently something is stashed, by setting
-# GIT_PS1_SHOWSTASHSTATE to a nonempty value. If something is stashed,
-# then a '$' will be shown next to the branch name.
-#
-# If you would like to see if there're untracked files, then you can
-# set GIT_PS1_SHOWUNTRACKEDFILES to a nonempty value. If there're
-# untracked files, then a '%' will be shown next to the branch name.
-#
-# If you would like to see the difference between HEAD and its
-# upstream, set GIT_PS1_SHOWUPSTREAM="auto". A "<" indicates
-# you are behind, ">" indicates you are ahead, and "<>"
-# indicates you have diverged. You can further control
-# behaviour by setting GIT_PS1_SHOWUPSTREAM to a space-separated
-# list of values:
-# verbose show number of commits ahead/behind (+/-) upstream
-# legacy don't use the '--count' option available in recent
-# versions of git-rev-list
-# git always compare HEAD to @{upstream}
-# svn always compare HEAD to your SVN upstream
-# By default, __git_ps1 will compare HEAD to your SVN upstream
-# if it can find one, or @{upstream} otherwise. Once you have
-# set GIT_PS1_SHOWUPSTREAM, you can override it on a
-# per-repository basis by setting the bash.showUpstream config
-# variable.
-#
+# 3) Consider changing your PS1 to also show the current branch,
+# see git-prompt.sh for details.
if [[ -n ${ZSH_VERSION-} ]]; then
autoload -U +X bashcompinit && bashcompinit
@@ -74,9 +36,14 @@ esac
# returns location of .git repo
__gitdir ()
{
+ # Note: this function is duplicated in git-prompt.sh
+ # When updating it, make sure you update the other one to match.
if [ -z "${1-}" ]; then
if [ -n "${__git_dir-}" ]; then
echo "$__git_dir"
+ elif [ -n "${GIT_DIR-}" ]; then
+ test -d "${GIT_DIR-}" || return 1
+ echo "$GIT_DIR"
elif [ -d .git ]; then
echo .git
else
@@ -89,221 +56,6 @@ __gitdir ()
fi
}
-# stores the divergence from upstream in $p
-# used by GIT_PS1_SHOWUPSTREAM
-__git_ps1_show_upstream ()
-{
- local key value
- local svn_remote svn_url_pattern count n
- local upstream=git legacy="" verbose=""
-
- svn_remote=()
- # get some config options from git-config
- local output="$(git config -z --get-regexp '^(svn-remote\..*\.url|bash\.showupstream)$' 2>/dev/null | tr '\0\n' '\n ')"
- while read -r key value; do
- case "$key" in
- bash.showupstream)
- GIT_PS1_SHOWUPSTREAM="$value"
- if [[ -z "${GIT_PS1_SHOWUPSTREAM}" ]]; then
- p=""
- return
- fi
- ;;
- svn-remote.*.url)
- svn_remote[ $((${#svn_remote[@]} + 1)) ]="$value"
- svn_url_pattern+="\\|$value"
- upstream=svn+git # default upstream is SVN if available, else git
- ;;
- esac
- done <<< "$output"
-
- # parse configuration values
- for option in ${GIT_PS1_SHOWUPSTREAM}; do
- case "$option" in
- git|svn) upstream="$option" ;;
- verbose) verbose=1 ;;
- legacy) legacy=1 ;;
- esac
- done
-
- # Find our upstream
- case "$upstream" in
- git) upstream="@{upstream}" ;;
- svn*)
- # get the upstream from the "git-svn-id: ..." in a commit message
- # (git-svn uses essentially the same procedure internally)
- local svn_upstream=($(git log --first-parent -1 \
- --grep="^git-svn-id: \(${svn_url_pattern#??}\)" 2>/dev/null))
- if [[ 0 -ne ${#svn_upstream[@]} ]]; then
- svn_upstream=${svn_upstream[ ${#svn_upstream[@]} - 2 ]}
- svn_upstream=${svn_upstream%@*}
- local n_stop="${#svn_remote[@]}"
- for ((n=1; n <= n_stop; n++)); do
- svn_upstream=${svn_upstream#${svn_remote[$n]}}
- done
-
- if [[ -z "$svn_upstream" ]]; then
- # default branch name for checkouts with no layout:
- upstream=${GIT_SVN_ID:-git-svn}
- else
- upstream=${svn_upstream#/}
- fi
- elif [[ "svn+git" = "$upstream" ]]; then
- upstream="@{upstream}"
- fi
- ;;
- esac
-
- # Find how many commits we are ahead/behind our upstream
- if [[ -z "$legacy" ]]; then
- count="$(git rev-list --count --left-right \
- "$upstream"...HEAD 2>/dev/null)"
- else
- # produce equivalent output to --count for older versions of git
- local commits
- if commits="$(git rev-list --left-right "$upstream"...HEAD 2>/dev/null)"
- then
- local commit behind=0 ahead=0
- for commit in $commits
- do
- case "$commit" in
- "<"*) ((behind++)) ;;
- *) ((ahead++)) ;;
- esac
- done
- count="$behind $ahead"
- else
- count=""
- fi
- fi
-
- # calculate the result
- if [[ -z "$verbose" ]]; then
- case "$count" in
- "") # no upstream
- p="" ;;
- "0 0") # equal to upstream
- p="=" ;;
- "0 "*) # ahead of upstream
- p=">" ;;
- *" 0") # behind upstream
- p="<" ;;
- *) # diverged from upstream
- p="<>" ;;
- esac
- else
- case "$count" in
- "") # no upstream
- p="" ;;
- "0 0") # equal to upstream
- p=" u=" ;;
- "0 "*) # ahead of upstream
- p=" u+${count#0 }" ;;
- *" 0") # behind upstream
- p=" u-${count% 0}" ;;
- *) # diverged from upstream
- p=" u+${count#* }-${count% *}" ;;
- esac
- fi
-
-}
-
-
-# __git_ps1 accepts 0 or 1 arguments (i.e., format string)
-# returns text to add to bash PS1 prompt (includes branch name)
-__git_ps1 ()
-{
- local g="$(__gitdir)"
- if [ -n "$g" ]; then
- local r=""
- local b=""
- if [ -f "$g/rebase-merge/interactive" ]; then
- r="|REBASE-i"
- b="$(cat "$g/rebase-merge/head-name")"
- elif [ -d "$g/rebase-merge" ]; then
- r="|REBASE-m"
- b="$(cat "$g/rebase-merge/head-name")"
- else
- if [ -d "$g/rebase-apply" ]; then
- if [ -f "$g/rebase-apply/rebasing" ]; then
- r="|REBASE"
- elif [ -f "$g/rebase-apply/applying" ]; then
- r="|AM"
- else
- r="|AM/REBASE"
- fi
- elif [ -f "$g/MERGE_HEAD" ]; then
- r="|MERGING"
- elif [ -f "$g/CHERRY_PICK_HEAD" ]; then
- r="|CHERRY-PICKING"
- elif [ -f "$g/BISECT_LOG" ]; then
- r="|BISECTING"
- fi
-
- b="$(git symbolic-ref HEAD 2>/dev/null)" || {
-
- b="$(
- case "${GIT_PS1_DESCRIBE_STYLE-}" in
- (contains)
- git describe --contains HEAD ;;
- (branch)
- git describe --contains --all HEAD ;;
- (describe)
- git describe HEAD ;;
- (* | default)
- git describe --tags --exact-match HEAD ;;
- esac 2>/dev/null)" ||
-
- b="$(cut -c1-7 "$g/HEAD" 2>/dev/null)..." ||
- b="unknown"
- b="($b)"
- }
- fi
-
- local w=""
- local i=""
- local s=""
- local u=""
- local c=""
- local p=""
-
- if [ "true" = "$(git rev-parse --is-inside-git-dir 2>/dev/null)" ]; then
- if [ "true" = "$(git rev-parse --is-bare-repository 2>/dev/null)" ]; then
- c="BARE:"
- else
- b="GIT_DIR!"
- fi
- elif [ "true" = "$(git rev-parse --is-inside-work-tree 2>/dev/null)" ]; then
- if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ]; then
- if [ "$(git config --bool bash.showDirtyState)" != "false" ]; then
- git diff --no-ext-diff --quiet --exit-code || w="*"
- if git rev-parse --quiet --verify HEAD >/dev/null; then
- git diff-index --cached --quiet HEAD -- || i="+"
- else
- i="#"
- fi
- fi
- fi
- if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ]; then
- git rev-parse --verify refs/stash >/dev/null 2>&1 && s="$"
- fi
-
- if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ]; then
- if [ -n "$(git ls-files --others --exclude-standard)" ]; then
- u="%"
- fi
- fi
-
- if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then
- __git_ps1_show_upstream
- fi
- fi
-
- local f="$w$i$s$u"
- printf -- "${1:- (%s)}" "$c${b##refs/heads/}${f:+ $f}$r$p"
- fi
-}
-
__gitcomp_1 ()
{
local c IFS=$' \t\n'
diff --git a/contrib/completion/git-prompt.sh b/contrib/completion/git-prompt.sh
new file mode 100644
index 0000000..29b1ec9
--- /dev/null
+++ b/contrib/completion/git-prompt.sh
@@ -0,0 +1,289 @@
+# bash/zsh git prompt support
+#
+# Copyright (C) 2006,2007 Shawn O. Pearce <spearce@spearce.org>
+# Distributed under the GNU General Public License, version 2.0.
+#
+# This script allows you to see the current branch in your prompt.
+#
+# To enable:
+#
+# 1) Copy this file to somewhere (e.g. ~/.git-prompt.sh).
+# 2) Add the following line to your .bashrc/.zshrc:
+# source ~/.git-prompt.sh
+# 3) Change your PS1 to also show the current branch:
+# Bash: PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ '
+# ZSH: PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ '
+#
+# The argument to __git_ps1 will be displayed only if you are currently
+# in a git repository. The %s token will be the name of the current
+# branch.
+#
+# In addition, if you set GIT_PS1_SHOWDIRTYSTATE to a nonempty value,
+# unstaged (*) and staged (+) changes will be shown next to the branch
+# name. You can configure this per-repository with the
+# bash.showDirtyState variable, which defaults to true once
+# GIT_PS1_SHOWDIRTYSTATE is enabled.
+#
+# You can also see if currently something is stashed, by setting
+# GIT_PS1_SHOWSTASHSTATE to a nonempty value. If something is stashed,
+# then a '$' will be shown next to the branch name.
+#
+# If you would like to see if there're untracked files, then you can set
+# GIT_PS1_SHOWUNTRACKEDFILES to a nonempty value. If there're untracked
+# files, then a '%' will be shown next to the branch name.
+#
+# If you would like to see the difference between HEAD and its upstream,
+# set GIT_PS1_SHOWUPSTREAM="auto". A "<" indicates you are behind, ">"
+# indicates you are ahead, and "<>" indicates you have diverged. You
+# can further control behaviour by setting GIT_PS1_SHOWUPSTREAM to a
+# space-separated list of values:
+#
+# verbose show number of commits ahead/behind (+/-) upstream
+# legacy don't use the '--count' option available in recent
+# versions of git-rev-list
+# git always compare HEAD to @{upstream}
+# svn always compare HEAD to your SVN upstream
+#
+# By default, __git_ps1 will compare HEAD to your SVN upstream if it can
+# find one, or @{upstream} otherwise. Once you have set
+# GIT_PS1_SHOWUPSTREAM, you can override it on a per-repository basis by
+# setting the bash.showUpstream config variable.
+
+# __gitdir accepts 0 or 1 arguments (i.e., location)
+# returns location of .git repo
+__gitdir ()
+{
+ # Note: this function is duplicated in git-completion.bash
+ # When updating it, make sure you update the other one to match.
+ if [ -z "${1-}" ]; then
+ if [ -n "${__git_dir-}" ]; then
+ echo "$__git_dir"
+ elif [ -n "${GIT_DIR-}" ]; then
+ test -d "${GIT_DIR-}" || return 1
+ echo "$GIT_DIR"
+ elif [ -d .git ]; then
+ echo .git
+ else
+ git rev-parse --git-dir 2>/dev/null
+ fi
+ elif [ -d "$1/.git" ]; then
+ echo "$1/.git"
+ else
+ echo "$1"
+ fi
+}
+
+# stores the divergence from upstream in $p
+# used by GIT_PS1_SHOWUPSTREAM
+__git_ps1_show_upstream ()
+{
+ local key value
+ local svn_remote svn_url_pattern count n
+ local upstream=git legacy="" verbose=""
+
+ svn_remote=()
+ # get some config options from git-config
+ local output="$(git config -z --get-regexp '^(svn-remote\..*\.url|bash\.showupstream)$' 2>/dev/null | tr '\0\n' '\n ')"
+ while read -r key value; do
+ case "$key" in
+ bash.showupstream)
+ GIT_PS1_SHOWUPSTREAM="$value"
+ if [[ -z "${GIT_PS1_SHOWUPSTREAM}" ]]; then
+ p=""
+ return
+ fi
+ ;;
+ svn-remote.*.url)
+ svn_remote[ $((${#svn_remote[@]} + 1)) ]="$value"
+ svn_url_pattern+="\\|$value"
+ upstream=svn+git # default upstream is SVN if available, else git
+ ;;
+ esac
+ done <<< "$output"
+
+ # parse configuration values
+ for option in ${GIT_PS1_SHOWUPSTREAM}; do
+ case "$option" in
+ git|svn) upstream="$option" ;;
+ verbose) verbose=1 ;;
+ legacy) legacy=1 ;;
+ esac
+ done
+
+ # Find our upstream
+ case "$upstream" in
+ git) upstream="@{upstream}" ;;
+ svn*)
+ # get the upstream from the "git-svn-id: ..." in a commit message
+ # (git-svn uses essentially the same procedure internally)
+ local svn_upstream=($(git log --first-parent -1 \
+ --grep="^git-svn-id: \(${svn_url_pattern#??}\)" 2>/dev/null))
+ if [[ 0 -ne ${#svn_upstream[@]} ]]; then
+ svn_upstream=${svn_upstream[ ${#svn_upstream[@]} - 2 ]}
+ svn_upstream=${svn_upstream%@*}
+ local n_stop="${#svn_remote[@]}"
+ for ((n=1; n <= n_stop; n++)); do
+ svn_upstream=${svn_upstream#${svn_remote[$n]}}
+ done
+
+ if [[ -z "$svn_upstream" ]]; then
+ # default branch name for checkouts with no layout:
+ upstream=${GIT_SVN_ID:-git-svn}
+ else
+ upstream=${svn_upstream#/}
+ fi
+ elif [[ "svn+git" = "$upstream" ]]; then
+ upstream="@{upstream}"
+ fi
+ ;;
+ esac
+
+ # Find how many commits we are ahead/behind our upstream
+ if [[ -z "$legacy" ]]; then
+ count="$(git rev-list --count --left-right \
+ "$upstream"...HEAD 2>/dev/null)"
+ else
+ # produce equivalent output to --count for older versions of git
+ local commits
+ if commits="$(git rev-list --left-right "$upstream"...HEAD 2>/dev/null)"
+ then
+ local commit behind=0 ahead=0
+ for commit in $commits
+ do
+ case "$commit" in
+ "<"*) ((behind++)) ;;
+ *) ((ahead++)) ;;
+ esac
+ done
+ count="$behind $ahead"
+ else
+ count=""
+ fi
+ fi
+
+ # calculate the result
+ if [[ -z "$verbose" ]]; then
+ case "$count" in
+ "") # no upstream
+ p="" ;;
+ "0 0") # equal to upstream
+ p="=" ;;
+ "0 "*) # ahead of upstream
+ p=">" ;;
+ *" 0") # behind upstream
+ p="<" ;;
+ *) # diverged from upstream
+ p="<>" ;;
+ esac
+ else
+ case "$count" in
+ "") # no upstream
+ p="" ;;
+ "0 0") # equal to upstream
+ p=" u=" ;;
+ "0 "*) # ahead of upstream
+ p=" u+${count#0 }" ;;
+ *" 0") # behind upstream
+ p=" u-${count% 0}" ;;
+ *) # diverged from upstream
+ p=" u+${count#* }-${count% *}" ;;
+ esac
+ fi
+
+}
+
+
+# __git_ps1 accepts 0 or 1 arguments (i.e., format string)
+# returns text to add to bash PS1 prompt (includes branch name)
+__git_ps1 ()
+{
+ local g="$(__gitdir)"
+ if [ -n "$g" ]; then
+ local r=""
+ local b=""
+ if [ -f "$g/rebase-merge/interactive" ]; then
+ r="|REBASE-i"
+ b="$(cat "$g/rebase-merge/head-name")"
+ elif [ -d "$g/rebase-merge" ]; then
+ r="|REBASE-m"
+ b="$(cat "$g/rebase-merge/head-name")"
+ else
+ if [ -d "$g/rebase-apply" ]; then
+ if [ -f "$g/rebase-apply/rebasing" ]; then
+ r="|REBASE"
+ elif [ -f "$g/rebase-apply/applying" ]; then
+ r="|AM"
+ else
+ r="|AM/REBASE"
+ fi
+ elif [ -f "$g/MERGE_HEAD" ]; then
+ r="|MERGING"
+ elif [ -f "$g/CHERRY_PICK_HEAD" ]; then
+ r="|CHERRY-PICKING"
+ elif [ -f "$g/BISECT_LOG" ]; then
+ r="|BISECTING"
+ fi
+
+ b="$(git symbolic-ref HEAD 2>/dev/null)" || {
+
+ b="$(
+ case "${GIT_PS1_DESCRIBE_STYLE-}" in
+ (contains)
+ git describe --contains HEAD ;;
+ (branch)
+ git describe --contains --all HEAD ;;
+ (describe)
+ git describe HEAD ;;
+ (* | default)
+ git describe --tags --exact-match HEAD ;;
+ esac 2>/dev/null)" ||
+
+ b="$(cut -c1-7 "$g/HEAD" 2>/dev/null)..." ||
+ b="unknown"
+ b="($b)"
+ }
+ fi
+
+ local w=""
+ local i=""
+ local s=""
+ local u=""
+ local c=""
+ local p=""
+
+ if [ "true" = "$(git rev-parse --is-inside-git-dir 2>/dev/null)" ]; then
+ if [ "true" = "$(git rev-parse --is-bare-repository 2>/dev/null)" ]; then
+ c="BARE:"
+ else
+ b="GIT_DIR!"
+ fi
+ elif [ "true" = "$(git rev-parse --is-inside-work-tree 2>/dev/null)" ]; then
+ if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ]; then
+ if [ "$(git config --bool bash.showDirtyState)" != "false" ]; then
+ git diff --no-ext-diff --quiet --exit-code || w="*"
+ if git rev-parse --quiet --verify HEAD >/dev/null; then
+ git diff-index --cached --quiet HEAD -- || i="+"
+ else
+ i="#"
+ fi
+ fi
+ fi
+ if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ]; then
+ git rev-parse --verify refs/stash >/dev/null 2>&1 && s="$"
+ fi
+
+ if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ]; then
+ if [ -n "$(git ls-files --others --exclude-standard)" ]; then
+ u="%"
+ fi
+ fi
+
+ if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then
+ __git_ps1_show_upstream
+ fi
+ fi
+
+ local f="$w$i$s$u"
+ printf -- "${1:- (%s)}" "$c${b##refs/heads/}${f:+ $f}$r$p"
+ fi
+}
diff --git a/contrib/mw-to-git/Makefile b/contrib/mw-to-git/Makefile
new file mode 100644
index 0000000..3ed728b
--- /dev/null
+++ b/contrib/mw-to-git/Makefile
@@ -0,0 +1,47 @@
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+## Build git-remote-mediawiki
+
+-include ../../config.mak.autogen
+-include ../../config.mak
+
+ifndef PERL_PATH
+ PERL_PATH = /usr/bin/perl
+endif
+ifndef gitexecdir
+ gitexecdir = $(shell git --exec-path)
+endif
+
+PERL_PATH_SQ = $(subst ','\'',$(PERL_PATH))
+gitexecdir_SQ = $(subst ','\'',$(gitexecdir))
+SCRIPT = git-remote-mediawiki
+
+.PHONY: install help doc test clean
+
+help:
+ @echo 'This is the help target of the Makefile. Current configuration:'
+ @echo ' gitexecdir = $(gitexecdir_SQ)'
+ @echo ' PERL_PATH = $(PERL_PATH_SQ)'
+ @echo 'Run "$(MAKE) install" to install $(SCRIPT) in gitexecdir'
+ @echo 'Run "$(MAKE) test" to run the testsuite'
+
+install:
+ sed -e '1s|#!.*/perl|#!$(PERL_PATH_SQ)|' $(SCRIPT) \
+ > '$(gitexecdir_SQ)/$(SCRIPT)'
+ chmod +x '$(gitexecdir)/$(SCRIPT)'
+
+doc:
+ @echo 'Sorry, "make doc" is not implemented yet for $(SCRIPT)'
+
+test:
+ $(MAKE) -C t/ test
+
+clean:
+ $(RM) '$(gitexecdir)/$(SCRIPT)'
+ $(MAKE) -C t/ clean
diff --git a/contrib/mw-to-git/git-remote-mediawiki b/contrib/mw-to-git/git-remote-mediawiki
index c18bfa1..68555d4 100755
--- a/contrib/mw-to-git/git-remote-mediawiki
+++ b/contrib/mw-to-git/git-remote-mediawiki
@@ -9,40 +9,19 @@
# License: GPL v2 or later
# Gateway between Git and MediaWiki.
-# https://github.com/Bibzball/Git-Mediawiki/wiki
-#
-# Known limitations:
-#
-# - Only wiki pages are managed, no support for [[File:...]]
-# attachments.
-#
-# - Poor performance in the best case: it takes forever to check
-# whether we're up-to-date (on fetch or push) or to fetch a few
-# revisions from a large wiki, because we use exclusively a
-# page-based synchronization. We could switch to a wiki-wide
-# synchronization when the synchronization involves few revisions
-# but the wiki is large.
-#
-# - Git renames could be turned into MediaWiki renames (see TODO
-# below)
-#
-# - login/password support requires the user to write the password
-# cleartext in a file (see TODO below).
-#
-# - No way to import "one page, and all pages included in it"
-#
-# - Multiple remote MediaWikis have not been very well tested.
+# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
use strict;
use MediaWiki::API;
use DateTime::Format::ISO8601;
-use encoding 'utf8';
-# use encoding 'utf8' doesn't change STDERROR
-# but we're going to output UTF-8 filenames to STDERR
+# By default, use UTF-8 to communicate with Git and the user
binmode STDERR, ":utf8";
+binmode STDOUT, ":utf8";
use URI::Escape;
+use IPC::Open2;
+
use warnings;
# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
@@ -59,6 +38,9 @@ use constant EMPTY_CONTENT => "<!-- empty page -->\n";
# used to reflect file creation or deletion in diff.
use constant NULL_SHA1 => "0000000000000000000000000000000000000000";
+# Used on Git's side to reflect empty edit messages on the wiki
+use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
+
my $remotename = $ARGV[0];
my $url = $ARGV[1];
@@ -71,10 +53,18 @@ chomp(@tracked_pages);
my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories"));
chomp(@tracked_categories);
+# Import media files on pull
+my $import_media = run_git("config --get --bool remote.". $remotename .".mediaimport");
+chomp($import_media);
+$import_media = ($import_media eq "true");
+
+# Export media files on push
+my $export_media = run_git("config --get --bool remote.". $remotename .".mediaexport");
+chomp($export_media);
+$export_media = !($export_media eq "false");
+
my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin");
-# TODO: ideally, this should be able to read from keyboard, but we're
-# inside a remote helper, so our stdin is connect to git, not to a
-# terminal.
+# Note: mwPassword is discourraged. Use the credential system instead.
my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword");
my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain");
chomp($wiki_login);
@@ -86,6 +76,21 @@ my $shallow_import = run_git("config --get --bool remote.". $remotename .".shall
chomp($shallow_import);
$shallow_import = ($shallow_import eq "true");
+# Fetch (clone and pull) by revisions instead of by pages. This behavior
+# is more efficient when we have a wiki with lots of pages and we fetch
+# the revisions quite often so that they concern only few pages.
+# Possible values:
+# - by_rev: perform one query per new revision on the remote wiki
+# - by_page: query each tracked page for new revision
+my $fetch_strategy = run_git("config --get remote.$remotename.fetchStrategy");
+unless ($fetch_strategy) {
+ $fetch_strategy = run_git("config --get mediawiki.fetchStrategy");
+}
+chomp($fetch_strategy);
+unless ($fetch_strategy) {
+ $fetch_strategy = "by_page";
+}
+
# Dumb push: don't update notes and mediawiki ref to reflect the last push.
#
# Configurable with mediawiki.dumbPush, or per-remote with
@@ -151,32 +156,152 @@ while (<STDIN>) {
########################## Functions ##############################
+## credential API management (generic functions)
+
+sub credential_read {
+ my %credential;
+ my $reader = shift;
+ my $op = shift;
+ while (<$reader>) {
+ my ($key, $value) = /([^=]*)=(.*)/;
+ if (not defined $key) {
+ die "ERROR receiving response from git credential $op:\n$_\n";
+ }
+ $credential{$key} = $value;
+ }
+ return %credential;
+}
+
+sub credential_write {
+ my $credential = shift;
+ my $writer = shift;
+ # url overwrites other fields, so it must come first
+ print $writer "url=$credential->{url}\n" if exists $credential->{url};
+ while (my ($key, $value) = each(%$credential) ) {
+ if (length $value && $key ne 'url') {
+ print $writer "$key=$value\n";
+ }
+ }
+}
+
+sub credential_run {
+ my $op = shift;
+ my $credential = shift;
+ my $pid = open2(my $reader, my $writer, "git credential $op");
+ credential_write($credential, $writer);
+ print $writer "\n";
+ close($writer);
+
+ if ($op eq "fill") {
+ %$credential = credential_read($reader, $op);
+ } else {
+ if (<$reader>) {
+ die "ERROR while running git credential $op:\n$_";
+ }
+ }
+ close($reader);
+ waitpid($pid, 0);
+ my $child_exit_status = $? >> 8;
+ if ($child_exit_status != 0) {
+ die "'git credential $op' failed with code $child_exit_status.";
+ }
+}
+
# MediaWiki API instance, created lazily.
my $mediawiki;
sub mw_connect_maybe {
if ($mediawiki) {
- return;
+ return;
}
$mediawiki = MediaWiki::API->new;
$mediawiki->{config}->{api_url} = "$url/api.php";
if ($wiki_login) {
- if (!$mediawiki->login({
- lgname => $wiki_login,
- lgpassword => $wiki_passwd,
- lgdomain => $wiki_domain,
- })) {
- print STDERR "Failed to log in mediawiki user \"$wiki_login\" on $url\n";
- print STDERR "(error " .
- $mediawiki->{error}->{code} . ': ' .
- $mediawiki->{error}->{details} . ")\n";
- exit 1;
+ my %credential = (url => $url);
+ $credential{username} = $wiki_login;
+ $credential{password} = $wiki_passwd;
+ credential_run("fill", \%credential);
+ my $request = {lgname => $credential{username},
+ lgpassword => $credential{password},
+ lgdomain => $wiki_domain};
+ if ($mediawiki->login($request)) {
+ credential_run("approve", \%credential);
+ print STDERR "Logged in mediawiki user \"$credential{username}\".\n";
} else {
- print STDERR "Logged in with user \"$wiki_login\".\n";
+ print STDERR "Failed to log in mediawiki user \"$credential{username}\" on $url\n";
+ print STDERR " (error " .
+ $mediawiki->{error}->{code} . ': ' .
+ $mediawiki->{error}->{details} . ")\n";
+ credential_run("reject", \%credential);
+ exit 1;
+ }
+ }
+}
+
+## Functions for listing pages on the remote wiki
+sub get_mw_tracked_pages {
+ my $pages = shift;
+ get_mw_page_list(\@tracked_pages, $pages);
+}
+
+sub get_mw_page_list {
+ my $page_list = shift;
+ my $pages = shift;
+ my @some_pages = @$page_list;
+ while (@some_pages) {
+ my $last = 50;
+ if ($#some_pages < $last) {
+ $last = $#some_pages;
+ }
+ my @slice = @some_pages[0..$last];
+ get_mw_first_pages(\@slice, $pages);
+ @some_pages = @some_pages[51..$#some_pages];
+ }
+}
+
+sub get_mw_tracked_categories {
+ my $pages = shift;
+ foreach my $category (@tracked_categories) {
+ if (index($category, ':') < 0) {
+ # Mediawiki requires the Category
+ # prefix, but let's not force the user
+ # to specify it.
+ $category = "Category:" . $category;
+ }
+ my $mw_pages = $mediawiki->list( {
+ action => 'query',
+ list => 'categorymembers',
+ cmtitle => $category,
+ cmlimit => 'max' } )
+ || die $mediawiki->{error}->{code} . ': '
+ . $mediawiki->{error}->{details};
+ foreach my $page (@{$mw_pages}) {
+ $pages->{$page->{title}} = $page;
}
}
}
+sub get_mw_all_pages {
+ my $pages = shift;
+ # No user-provided list, get the list of pages from the API.
+ my $mw_pages = $mediawiki->list({
+ action => 'query',
+ list => 'allpages',
+ aplimit => 'max'
+ });
+ if (!defined($mw_pages)) {
+ print STDERR "fatal: could not get the list of wiki pages.\n";
+ print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
+ print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+ exit 1;
+ }
+ foreach my $page (@{$mw_pages}) {
+ $pages->{$page->{title}} = $page;
+ }
+}
+
+# queries the wiki for a set of pages. Meant to be used within a loop
+# querying the wiki for slices of page list.
sub get_mw_first_pages {
my $some_pages = shift;
my @some_pages = @{$some_pages};
@@ -205,70 +330,45 @@ sub get_mw_first_pages {
}
}
+# Get the list of pages to be fetched according to configuration.
sub get_mw_pages {
mw_connect_maybe();
+ print STDERR "Listing pages on remote wiki...\n";
+
my %pages; # hash on page titles to avoid duplicates
my $user_defined;
if (@tracked_pages) {
$user_defined = 1;
# The user provided a list of pages titles, but we
# still need to query the API to get the page IDs.
-
- my @some_pages = @tracked_pages;
- while (@some_pages) {
- my $last = 50;
- if ($#some_pages < $last) {
- $last = $#some_pages;
- }
- my @slice = @some_pages[0..$last];
- get_mw_first_pages(\@slice, \%pages);
- @some_pages = @some_pages[51..$#some_pages];
- }
+ get_mw_tracked_pages(\%pages);
}
if (@tracked_categories) {
$user_defined = 1;
- foreach my $category (@tracked_categories) {
- if (index($category, ':') < 0) {
- # Mediawiki requires the Category
- # prefix, but let's not force the user
- # to specify it.
- $category = "Category:" . $category;
- }
- my $mw_pages = $mediawiki->list( {
- action => 'query',
- list => 'categorymembers',
- cmtitle => $category,
- cmlimit => 'max' } )
- || die $mediawiki->{error}->{code} . ': ' . $mediawiki->{error}->{details};
- foreach my $page (@{$mw_pages}) {
- $pages{$page->{title}} = $page;
- }
- }
+ get_mw_tracked_categories(\%pages);
}
if (!$user_defined) {
- # No user-provided list, get the list of pages from
- # the API.
- my $mw_pages = $mediawiki->list({
- action => 'query',
- list => 'allpages',
- aplimit => 500,
- });
- if (!defined($mw_pages)) {
- print STDERR "fatal: could not get the list of wiki pages.\n";
- print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
- print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
- exit 1;
- }
- foreach my $page (@{$mw_pages}) {
- $pages{$page->{title}} = $page;
+ get_mw_all_pages(\%pages);
+ }
+ if ($import_media) {
+ print STDERR "Getting media files for selected pages...\n";
+ if ($user_defined) {
+ get_linked_mediafiles(\%pages);
+ } else {
+ get_all_mediafiles(\%pages);
}
}
- return values(%pages);
+ print STDERR (scalar keys %pages) . " pages found.\n";
+ return %pages;
}
+# usage: $out = run_git("command args");
+# $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
sub run_git {
- open(my $git, "-|:encoding(UTF-8)", "git " . $_[0]);
+ my $args = shift;
+ my $encoding = (shift || "encoding(UTF-8)");
+ open(my $git, "-|:$encoding", "git " . $args);
my $res = do { local $/; <$git> };
close($git);
@@ -276,6 +376,123 @@ sub run_git {
}
+sub get_all_mediafiles {
+ my $pages = shift;
+ # Attach list of all pages for media files from the API,
+ # they are in a different namespace, only one namespace
+ # can be queried at the same moment
+ my $mw_pages = $mediawiki->list({
+ action => 'query',
+ list => 'allpages',
+ apnamespace => get_mw_namespace_id("File"),
+ aplimit => 'max'
+ });
+ if (!defined($mw_pages)) {
+ print STDERR "fatal: could not get the list of pages for media files.\n";
+ print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
+ print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+ exit 1;
+ }
+ foreach my $page (@{$mw_pages}) {
+ $pages->{$page->{title}} = $page;
+ }
+}
+
+sub get_linked_mediafiles {
+ my $pages = shift;
+ my @titles = map $_->{title}, values(%{$pages});
+
+ # The query is split in small batches because of the MW API limit of
+ # the number of links to be returned (500 links max).
+ my $batch = 10;
+ while (@titles) {
+ if ($#titles < $batch) {
+ $batch = $#titles;
+ }
+ my @slice = @titles[0..$batch];
+
+ # pattern 'page1|page2|...' required by the API
+ my $mw_titles = join('|', @slice);
+
+ # Media files could be included or linked from
+ # a page, get all related
+ my $query = {
+ action => 'query',
+ prop => 'links|images',
+ titles => $mw_titles,
+ plnamespace => get_mw_namespace_id("File"),
+ pllimit => 'max'
+ };
+ my $result = $mediawiki->api($query);
+
+ while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
+ my @media_titles;
+ if (defined($page->{links})) {
+ my @link_titles = map $_->{title}, @{$page->{links}};
+ push(@media_titles, @link_titles);
+ }
+ if (defined($page->{images})) {
+ my @image_titles = map $_->{title}, @{$page->{images}};
+ push(@media_titles, @image_titles);
+ }
+ if (@media_titles) {
+ get_mw_page_list(\@media_titles, $pages);
+ }
+ }
+
+ @titles = @titles[($batch+1)..$#titles];
+ }
+}
+
+sub get_mw_mediafile_for_page_revision {
+ # Name of the file on Wiki, with the prefix.
+ my $filename = shift;
+ my $timestamp = shift;
+ my %mediafile;
+
+ # Search if on a media file with given timestamp exists on
+ # MediaWiki. In that case download the file.
+ my $query = {
+ action => 'query',
+ prop => 'imageinfo',
+ titles => "File:" . $filename,
+ iistart => $timestamp,
+ iiend => $timestamp,
+ iiprop => 'timestamp|archivename|url',
+ iilimit => 1
+ };
+ my $result = $mediawiki->api($query);
+
+ my ($fileid, $file) = each( %{$result->{query}->{pages}} );
+ # If not defined it means there is no revision of the file for
+ # given timestamp.
+ if (defined($file->{imageinfo})) {
+ $mediafile{title} = $filename;
+
+ my $fileinfo = pop(@{$file->{imageinfo}});
+ $mediafile{timestamp} = $fileinfo->{timestamp};
+ # Mediawiki::API's download function doesn't support https URLs
+ # and can't download old versions of files.
+ print STDERR "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
+ $mediafile{content} = download_mw_mediafile($fileinfo->{url});
+ }
+ return %mediafile;
+}
+
+sub download_mw_mediafile {
+ my $url = shift;
+
+ my $response = $mediawiki->{ua}->get($url);
+ if ($response->code == 200) {
+ return $response->decoded_content;
+ } else {
+ print STDERR "Error downloading mediafile from :\n";
+ print STDERR "URL: $url\n";
+ print STDERR "Server response: " . $response->code . " " . $response->message . "\n";
+ exit 1;
+ }
+}
+
sub get_last_local_revision {
# Get note regarding last mediawiki revision
my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null");
@@ -297,13 +514,36 @@ sub get_last_local_revision {
# Remember the timestamp corresponding to a revision id.
my %basetimestamps;
+# Get the last remote revision without taking in account which pages are
+# tracked or not. This function makes a single request to the wiki thus
+# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
+# option.
+sub get_last_global_remote_rev {
+ mw_connect_maybe();
+
+ my $query = {
+ action => 'query',
+ list => 'recentchanges',
+ prop => 'revisions',
+ rclimit => '1',
+ rcdir => 'older',
+ };
+ my $result = $mediawiki->api($query);
+ return $result->{query}->{recentchanges}[0]->{revid};
+}
+
+# Get the last remote revision concerning the tracked pages and the tracked
+# categories.
sub get_last_remote_revision {
mw_connect_maybe();
- my @pages = get_mw_pages();
+ my %pages_hash = get_mw_pages();
+ my @pages = values(%pages_hash);
my $max_rev_num = 0;
+ print STDERR "Getting last revision id on tracked pages...\n";
+
foreach my $page (@pages) {
my $id = $page->{pageid};
@@ -379,6 +619,16 @@ sub literal_data {
print STDOUT "data ", bytes::length($content), "\n", $content;
}
+sub literal_data_raw {
+ # Output possibly binary content.
+ my ($content) = @_;
+ # Avoid confusion between size in bytes and in characters
+ utf8::downgrade($content);
+ binmode STDOUT, ":raw";
+ print STDOUT "data ", bytes::length($content), "\n", $content;
+ binmode STDOUT, ":utf8";
+}
+
sub mw_capabilities {
# Revisions are imported to the private namespace
# refs/mediawiki/$remotename/ by the helper and fetched into
@@ -466,6 +716,11 @@ sub import_file_revision {
my %commit = %{$commit};
my $full_import = shift;
my $n = shift;
+ my $mediafile = shift;
+ my %mediafile;
+ if ($mediafile) {
+ %mediafile = %{$mediafile};
+ }
my $title = $commit{title};
my $comment = $commit{comment};
@@ -485,6 +740,10 @@ sub import_file_revision {
if ($content ne DELETED_CONTENT) {
print STDOUT "M 644 inline $title.mw\n";
literal_data($content);
+ if (%mediafile) {
+ print STDOUT "M 644 inline $mediafile{title}\n";
+ literal_data_raw($mediafile{content});
+ }
print STDOUT "\n\n";
} else {
print STDOUT "D $title.mw\n";
@@ -547,8 +806,6 @@ sub mw_import_ref {
mw_connect_maybe();
- my @pages = get_mw_pages();
-
print STDERR "Searching revisions...\n";
my $last_local = get_last_local_revision();
my $fetch_from = $last_local + 1;
@@ -557,36 +814,111 @@ sub mw_import_ref {
} else {
print STDERR ", fetching from here.\n";
}
+
+ my $n = 0;
+ if ($fetch_strategy eq "by_rev") {
+ print STDERR "Fetching & writing export data by revs...\n";
+ $n = mw_import_ref_by_revs($fetch_from);
+ } elsif ($fetch_strategy eq "by_page") {
+ print STDERR "Fetching & writing export data by pages...\n";
+ $n = mw_import_ref_by_pages($fetch_from);
+ } else {
+ print STDERR "fatal: invalid fetch strategy \"$fetch_strategy\".\n";
+ print STDERR "Check your configuration variables remote.$remotename.fetchStrategy and mediawiki.fetchStrategy\n";
+ exit 1;
+ }
+
+ if ($fetch_from == 1 && $n == 0) {
+ print STDERR "You appear to have cloned an empty MediaWiki.\n";
+ # Something has to be done remote-helper side. If nothing is done, an error is
+ # thrown saying that HEAD is refering to unknown object 0000000000000000000
+ # and the clone fails.
+ }
+}
+
+sub mw_import_ref_by_pages {
+
+ my $fetch_from = shift;
+ my %pages_hash = get_mw_pages();
+ my @pages = values(%pages_hash);
+
my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
- # Creation of the fast-import stream
- print STDERR "Fetching & writing export data...\n";
+ @revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
+ my @revision_ids = map $_->{revid}, @revisions;
- $n = 0;
+ return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
+}
+
+sub mw_import_ref_by_revs {
+
+ my $fetch_from = shift;
+ my %pages_hash = get_mw_pages();
+
+ my $last_remote = get_last_global_remote_rev();
+ my @revision_ids = $fetch_from..$last_remote;
+ return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
+}
+
+# Import revisions given in second argument (array of integers).
+# Only pages appearing in the third argument (hash indexed by page titles)
+# will be imported.
+sub mw_import_revids {
+ my $fetch_from = shift;
+ my $revision_ids = shift;
+ my $pages = shift;
+
+ my $n = 0;
+ my $n_actual = 0;
my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
- foreach my $pagerevid (sort {$a->{revid} <=> $b->{revid}} @revisions) {
+ foreach my $pagerevid (@$revision_ids) {
+ # Count page even if we skip it, since we display
+ # $n/$total and $total includes skipped pages.
+ $n++;
+
# fetch the content of the pages
my $query = {
action => 'query',
prop => 'revisions',
rvprop => 'content|timestamp|comment|user|ids',
- revids => $pagerevid->{revid},
+ revids => $pagerevid,
};
my $result = $mediawiki->api($query);
- my $rev = pop(@{$result->{query}->{pages}->{$pagerevid->{pageid}}->{revisions}});
+ if (!$result) {
+ die "Failed to retrieve modified page for revision $pagerevid";
+ }
- $n++;
+ if (defined($result->{query}->{badrevids}->{$pagerevid})) {
+ # The revision id does not exist on the remote wiki.
+ next;
+ }
+
+ if (!defined($result->{query}->{pages})) {
+ die "Invalid revision $pagerevid.";
+ }
+
+ my @result_pages = values(%{$result->{query}->{pages}});
+ my $result_page = $result_pages[0];
+ my $rev = $result_pages[0]->{revisions}->[0];
+
+ my $page_title = $result_page->{title};
+
+ if (!exists($pages->{$page_title})) {
+ print STDERR "$n/", scalar(@$revision_ids),
+ ": Skipping revision #$rev->{revid} of $page_title\n";
+ next;
+ }
+
+ $n_actual++;
my %commit;
$commit{author} = $rev->{user} || 'Anonymous';
- $commit{comment} = $rev->{comment} || '*Empty MediaWiki Message*';
- $commit{title} = mediawiki_smudge_filename(
- $result->{query}->{pages}->{$pagerevid->{pageid}}->{title}
- );
- $commit{mw_revision} = $pagerevid->{revid};
+ $commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
+ $commit{title} = mediawiki_smudge_filename($page_title);
+ $commit{mw_revision} = $rev->{revid};
$commit{content} = mediawiki_smudge($rev->{'*'});
if (!defined($rev->{timestamp})) {
@@ -596,17 +928,23 @@ sub mw_import_ref {
}
$commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
- print STDERR "$n/", scalar(@revisions), ": Revision #$pagerevid->{revid} of $commit{title}\n";
-
- import_file_revision(\%commit, ($fetch_from == 1), $n);
+ # Differentiates classic pages and media files.
+ my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
+ my %mediafile;
+ if ($namespace) {
+ my $id = get_mw_namespace_id($namespace);
+ if ($id && $id == get_mw_namespace_id("File")) {
+ %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
+ }
+ }
+ # If this is a revision of the media page for new version
+ # of a file do one common commit for both file and media page.
+ # Else do commit only for that page.
+ print STDERR "$n/", scalar(@$revision_ids), ": Revision #$rev->{revid} of $commit{title}\n";
+ import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
}
- if ($fetch_from == 1 && $n == 0) {
- print STDERR "You appear to have cloned an empty MediaWiki.\n";
- # Something has to be done remote-helper side. If nothing is done, an error is
- # thrown saying that HEAD is refering to unknown object 0000000000000000000
- # and the clone fails.
- }
+ return $n_actual;
}
sub error_non_fast_forward {
@@ -624,6 +962,63 @@ sub error_non_fast_forward {
return 0;
}
+sub mw_upload_file {
+ my $complete_file_name = shift;
+ my $new_sha1 = shift;
+ my $extension = shift;
+ my $file_deleted = shift;
+ my $summary = shift;
+ my $newrevid;
+ my $path = "File:" . $complete_file_name;
+ my %hashFiles = get_allowed_file_extensions();
+ if (!exists($hashFiles{$extension})) {
+ print STDERR "$complete_file_name is not a permitted file on this wiki.\n";
+ print STDERR "Check the configuration of file uploads in your mediawiki.\n";
+ return $newrevid;
+ }
+ # Deleting and uploading a file requires a priviledged user
+ if ($file_deleted) {
+ mw_connect_maybe();
+ my $query = {
+ action => 'delete',
+ title => $path,
+ reason => $summary
+ };
+ if (!$mediawiki->edit($query)) {
+ print STDERR "Failed to delete file on remote wiki\n";
+ print STDERR "Check your permissions on the remote site. Error code:\n";
+ print STDERR $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
+ exit 1;
+ }
+ } else {
+ # Don't let perl try to interpret file content as UTF-8 => use "raw"
+ my $content = run_git("cat-file blob $new_sha1", "raw");
+ if ($content ne "") {
+ mw_connect_maybe();
+ $mediawiki->{config}->{upload_url} =
+ "$url/index.php/Special:Upload";
+ $mediawiki->edit({
+ action => 'upload',
+ filename => $complete_file_name,
+ comment => $summary,
+ file => [undef,
+ $complete_file_name,
+ Content => $content],
+ ignorewarnings => 1,
+ }, {
+ skip_encoding => 1
+ } ) || die $mediawiki->{error}->{code} . ':'
+ . $mediawiki->{error}->{details};
+ my $last_file_page = $mediawiki->get_page({title => $path});
+ $newrevid = $last_file_page->{revid};
+ print STDERR "Pushed file: $new_sha1 - $complete_file_name.\n";
+ } else {
+ print STDERR "Empty file $complete_file_name not pushed.\n";
+ }
+ }
+ return $newrevid;
+}
+
sub mw_push_file {
my $diff_info = shift;
# $diff_info contains a string in this format:
@@ -636,7 +1031,12 @@ sub mw_push_file {
my $summary = shift;
# MediaWiki revision number. Keep the previous one by default,
# in case there's no edit to perform.
- my $newrevid = shift;
+ my $oldrevid = shift;
+ my $newrevid;
+
+ if ($summary eq EMPTY_MESSAGE) {
+ $summary = '';
+ }
my $new_sha1 = $diff_info_split[3];
my $old_sha1 = $diff_info_split[2];
@@ -644,9 +1044,16 @@ sub mw_push_file {
my $page_deleted = ($new_sha1 eq NULL_SHA1);
$complete_file_name = mediawiki_clean_filename($complete_file_name);
- if (substr($complete_file_name,-3) eq ".mw") {
- my $title = substr($complete_file_name,0,-3);
-
+ my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
+ if (!defined($extension)) {
+ $extension = "";
+ }
+ if ($extension eq "mw") {
+ my $ns = get_mw_namespace_id_for_page($complete_file_name);
+ if ($ns && $ns == get_mw_namespace_id("File") && (!$export_media)) {
+ print STDERR "Ignoring media file related page: $complete_file_name\n";
+ return ($oldrevid, "ok");
+ }
my $file_content;
if ($page_deleted) {
# Deleting a page usually requires
@@ -664,7 +1071,7 @@ sub mw_push_file {
action => 'edit',
summary => $summary,
title => $title,
- basetimestamp => $basetimestamps{$newrevid},
+ basetimestamp => $basetimestamps{$oldrevid},
text => mediawiki_clean($file_content, $page_created),
}, {
skip_encoding => 1 # Helps with names with accentuated characters
@@ -676,7 +1083,7 @@ sub mw_push_file {
$mediawiki->{error}->{code} .
' from mediwiki: ' . $mediawiki->{error}->{details} .
".\n";
- return ($newrevid, "non-fast-forward");
+ return ($oldrevid, "non-fast-forward");
} else {
# Other errors. Shouldn't happen => just die()
die 'Fatal: Error ' .
@@ -686,9 +1093,14 @@ sub mw_push_file {
}
$newrevid = $result->{edit}->{newrevid};
print STDERR "Pushed file: $new_sha1 - $title\n";
+ } elsif ($export_media) {
+ $newrevid = mw_upload_file($complete_file_name, $new_sha1,
+ $extension, $page_deleted,
+ $summary);
} else {
- print STDERR "$complete_file_name not a mediawiki file (Not pushable on this version of git-remote-mediawiki).\n"
+ print STDERR "Ignoring media file $title\n";
}
+ $newrevid = ($newrevid or $oldrevid);
return ($newrevid, "ok");
}
@@ -760,16 +1172,26 @@ sub mw_push_revision {
if ($last_local_revid > 0) {
my $parsed_sha1 = $remoteorigin_sha1;
# Find a path from last MediaWiki commit to pushed commit
+ print STDERR "Computing path from local to remote ...\n";
+ my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents $local ^$parsed_sha1"));
+ my %local_ancestry;
+ foreach my $line (@local_ancestry) {
+ if (my ($child, $parents) = $line =~ m/^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
+ foreach my $parent (split(' ', $parents)) {
+ $local_ancestry{$parent} = $child;
+ }
+ } elsif (!$line =~ m/^([a-f0-9]+)/) {
+ die "Unexpected output from git rev-list: $line";
+ }
+ }
while ($parsed_sha1 ne $HEAD_sha1) {
- my @commit_info = grep(/^$parsed_sha1/, split(/\n/, run_git("rev-list --children $local")));
- if (!@commit_info) {
+ my $child = $local_ancestry{$parsed_sha1};
+ if (!$child) {
+ printf STDERR "Cannot find a path in history from remote commit to last commit\n";
return error_non_fast_forward($remote);
}
- my @commit_info_split = split(/ |\n/, $commit_info[0]);
- # $commit_info_split[1] is the sha1 of the commit to export
- # $commit_info_split[0] is the sha1 of its direct child
- push(@commit_pairs, \@commit_info_split);
- $parsed_sha1 = $commit_info_split[1];
+ push(@commit_pairs, [$parsed_sha1, $child]);
+ $parsed_sha1 = $child;
}
} else {
# No remote mediawiki revision. Export the whole
@@ -791,8 +1213,8 @@ sub mw_push_revision {
# TODO: we could detect rename, and encode them with a #redirect on the wiki.
# TODO: for now, it's just a delete+add
my @diff_info_list = split(/\0/, $diff_infos);
- # Keep the first line of the commit message as mediawiki comment for the revision
- my $commit_msg = (split(/\n/, run_git("show --pretty=format:\"%s\" $sha1_commit")))[0];
+ # Keep the subject line of the commit message as mediawiki comment for the revision
+ my $commit_msg = run_git("log --no-walk --format=\"%s\" $sha1_commit");
chomp($commit_msg);
# Push every blob
while (@diff_info_list) {
@@ -817,7 +1239,7 @@ sub mw_push_revision {
}
}
unless ($dumb_push) {
- run_git("notes --ref=$remotename/mediawiki add -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
+ run_git("notes --ref=$remotename/mediawiki add -f -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child");
}
}
@@ -825,3 +1247,104 @@ sub mw_push_revision {
print STDOUT "ok $remote\n";
return 1;
}
+
+sub get_allowed_file_extensions {
+ mw_connect_maybe();
+
+ my $query = {
+ action => 'query',
+ meta => 'siteinfo',
+ siprop => 'fileextensions'
+ };
+ my $result = $mediawiki->api($query);
+ my @file_extensions= map $_->{ext},@{$result->{query}->{fileextensions}};
+ my %hashFile = map {$_ => 1}@file_extensions;
+
+ return %hashFile;
+}
+
+# In memory cache for MediaWiki namespace ids.
+my %namespace_id;
+
+# Namespaces whose id is cached in the configuration file
+# (to avoid duplicates)
+my %cached_mw_namespace_id;
+
+# Return MediaWiki id for a canonical namespace name.
+# Ex.: "File", "Project".
+sub get_mw_namespace_id {
+ mw_connect_maybe();
+ my $name = shift;
+
+ if (!exists $namespace_id{$name}) {
+ # Look at configuration file, if the record for that namespace is
+ # already cached. Namespaces are stored in form:
+ # "Name_of_namespace:Id_namespace", ex.: "File:6".
+ my @temp = split(/[\n]/, run_git("config --get-all remote."
+ . $remotename .".namespaceCache"));
+ chomp(@temp);
+ foreach my $ns (@temp) {
+ my ($n, $id) = split(/:/, $ns);
+ if ($id eq 'notANameSpace') {
+ $namespace_id{$n} = {is_namespace => 0};
+ } else {
+ $namespace_id{$n} = {is_namespace => 1, id => $id};
+ }
+ $cached_mw_namespace_id{$n} = 1;
+ }
+ }
+
+ if (!exists $namespace_id{$name}) {
+ print STDERR "Namespace $name not found in cache, querying the wiki ...\n";
+ # NS not found => get namespace id from MW and store it in
+ # configuration file.
+ my $query = {
+ action => 'query',
+ meta => 'siteinfo',
+ siprop => 'namespaces'
+ };
+ my $result = $mediawiki->api($query);
+
+ while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
+ if (defined($ns->{id}) && defined($ns->{canonical})) {
+ $namespace_id{$ns->{canonical}} = {is_namespace => 1, id => $ns->{id}};
+ if ($ns->{'*'}) {
+ # alias (e.g. french Fichier: as alias for canonical File:)
+ $namespace_id{$ns->{'*'}} = {is_namespace => 1, id => $ns->{id}};
+ }
+ }
+ }
+ }
+
+ my $ns = $namespace_id{$name};
+ my $id;
+
+ unless (defined $ns) {
+ print STDERR "No such namespace $name on MediaWiki.\n";
+ $ns = {is_namespace => 0};
+ $namespace_id{$name} = $ns;
+ }
+
+ if ($ns->{is_namespace}) {
+ $id = $ns->{id};
+ }
+
+ # Store "notANameSpace" as special value for inexisting namespaces
+ my $store_id = ($id || 'notANameSpace');
+
+ # Store explicitely requested namespaces on disk
+ if (!exists $cached_mw_namespace_id{$name}) {
+ run_git("config --add remote.". $remotename
+ .".namespaceCache \"". $name .":". $store_id ."\"");
+ $cached_mw_namespace_id{$name} = 1;
+ }
+ return $id;
+}
+
+sub get_mw_namespace_id_for_page {
+ if (my ($namespace) = $_[0] =~ /^([^:]*):/) {
+ return get_mw_namespace_id($namespace);
+ } else {
+ return;
+ }
+}
diff --git a/contrib/mw-to-git/t/.gitignore b/contrib/mw-to-git/t/.gitignore
new file mode 100644
index 0000000..a7a40b4
--- /dev/null
+++ b/contrib/mw-to-git/t/.gitignore
@@ -0,0 +1,4 @@
+WEB/
+wiki/
+trash directory.t*/
+test-results/
diff --git a/contrib/mw-to-git/t/Makefile b/contrib/mw-to-git/t/Makefile
new file mode 100644
index 0000000..f422203
--- /dev/null
+++ b/contrib/mw-to-git/t/Makefile
@@ -0,0 +1,31 @@
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+## Test git-remote-mediawiki
+
+all: test
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh)
+
+.PHONY: help test clean all
+
+help:
+ @echo 'Run "$(MAKE) test" to launch test scripts'
+ @echo 'Run "$(MAKE) clean" to remove trash folders'
+
+test:
+ @for t in $(T); do \
+ echo "$$t"; \
+ "./$$t" || exit 1; \
+ done
+
+clean:
+ $(RM) -r 'trash directory'.*
diff --git a/contrib/mw-to-git/t/README b/contrib/mw-to-git/t/README
new file mode 100644
index 0000000..96e9739
--- /dev/null
+++ b/contrib/mw-to-git/t/README
@@ -0,0 +1,124 @@
+Tests for Mediawiki-to-Git
+==========================
+
+Introduction
+------------
+This manual describes how to install the git-remote-mediawiki test
+environment on a machine with git installed on it.
+
+Prerequisite
+------------
+
+In order to run this test environment correctly, you will need to
+install the following packages (Debian/Ubuntu names, may need to be
+adapted for another distribution):
+
+* lighttpd
+* php5
+* php5-cgi
+* php5-cli
+* php5-curl
+* php5-sqlite
+
+Principles and Technical Choices
+--------------------------------
+
+The test environment makes it easy to install and manipulate one or
+several MediaWiki instances. To allow developers to run the testsuite
+easily, the environment does not require root priviledge (except to
+install the required packages if needed). It starts a webserver
+instance on the user's account (using lighttpd greatly helps for
+that), and does not need a separate database daemon (thanks to the use
+of sqlite).
+
+Run the test environment
+------------------------
+
+Install a new wiki
+~~~~~~~~~~~~~~~~~~
+
+Once you have all the prerequisite, you need to install a MediaWiki
+instance on your machine. If you already have one, it is still
+strongly recommended to install one with the script provided. Here's
+how to work it:
+
+a. change directory to contrib/mw-to-git/t/
+b. if needed, edit test.config to choose your installation parameters
+c. run `./install-wiki.sh install`
+d. check on your favourite web browser if your wiki is correctly
+ installed.
+
+Remove an existing wiki
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Edit the file test.config to fit the wiki you want to delete, and then
+execute the command `./install-wiki.sh delete` from the
+contrib/mw-to-git/t directory.
+
+Run the existing tests
+~~~~~~~~~~~~~~~~~~~~~~
+
+The provided tests are currently in the `contrib/mw-to-git/t` directory.
+The files are all the t936[0-9]-*.sh shell scripts.
+
+a. Run all tests:
+To do so, run "make test" from the contrib/mw-to-git/ directory.
+
+b. Run a specific test:
+To run a given test <test_name>, run ./<test_name> from the
+contrib/mw-to-git/t directory.
+
+How to create new tests
+-----------------------
+
+Available functions
+~~~~~~~~~~~~~~~~~~~
+
+The test environment of git-remote-mediawiki provides some functions
+useful to test its behaviour. for more details about the functions'
+parameters, please refer to the `test-gitmw-lib.sh` and
+`test-gitmw.pl` files.
+
+** `test_check_wiki_precond`:
+Check if the tests must be skipped or not. Please use this function
+at the beggining of each new test file.
+
+** `wiki_getpage`:
+Fetch a given page from the wiki and puts its content in the
+directory in parameter.
+
+** `wiki_delete_page`:
+Delete a given page from the wiki.
+
+** `wiki_edit_page`:
+Create or modify a given page in the wiki. You can specify several
+parameters like a summary for the page edition, or add the page to a
+given category.
+See test-gitmw.pl for more details.
+
+** `wiki_getallpage`:
+Fetch all pages from the wiki into a given directory. The directory
+is created if it does not exists.
+
+** `test_diff_directories`:
+Compare the content of two directories. The content must be the same.
+Use this function to compare the content of a git directory and a wiki
+one created by wiki_getallpage.
+
+** `test_contains_N_files`:
+Check if the given directory contains a given number of file.
+
+** `wiki_page_exists`:
+Tests if a given page exists on the wiki.
+
+** `wiki_reset`:
+Reset the wiki, i.e. flush the database. Use this function at the
+begining of each new test, except if the test re-uses the same wiki
+(and history) as the previous test.
+
+How to write a new test
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Please, follow the standards given by git. See git/t/README.
+New file should be named as t936[0-9]-*.sh.
+Be sure to reset your wiki regulary with the function `wiki_reset`.
diff --git a/contrib/mw-to-git/t/install-wiki.sh b/contrib/mw-to-git/t/install-wiki.sh
new file mode 100755
index 0000000..c6d6fa3
--- /dev/null
+++ b/contrib/mw-to-git/t/install-wiki.sh
@@ -0,0 +1,45 @@
+#!/bin/sh
+
+# This script installs or deletes a MediaWiki on your computer.
+# It requires a web server with PHP and SQLite running. In addition, if you
+# do not have MediaWiki sources on your computer, the option 'install'
+# downloads them for you.
+# Please set the CONFIGURATION VARIABLES in ./test-gitmw-lib.sh
+
+WIKI_TEST_DIR=$(cd "$(dirname "$0")" && pwd)
+
+if test -z "$WIKI_TEST_DIR"
+then
+ WIKI_TEST_DIR=.
+fi
+
+. "$WIKI_TEST_DIR"/test-gitmw-lib.sh
+usage () {
+ echo "Usage: "
+ echo " ./install-wiki.sh <install | delete | --help>"
+ echo " install | -i : Install a wiki on your computer."
+ echo " delete | -d : Delete the wiki and all its pages and "
+ echo " content."
+}
+
+
+# Argument: install, delete, --help | -h
+case "$1" in
+ "install" | "-i")
+ wiki_install
+ exit 0
+ ;;
+ "delete" | "-d")
+ wiki_delete
+ exit 0
+ ;;
+ "--help" | "-h")
+ usage
+ exit 0
+ ;;
+ *)
+ echo "Invalid argument: $1"
+ usage
+ exit 1
+ ;;
+esac
diff --git a/contrib/mw-to-git/t/install-wiki/.gitignore b/contrib/mw-to-git/t/install-wiki/.gitignore
new file mode 100644
index 0000000..b5a2a44
--- /dev/null
+++ b/contrib/mw-to-git/t/install-wiki/.gitignore
@@ -0,0 +1 @@
+wikidb.sqlite
diff --git a/contrib/mw-to-git/t/install-wiki/LocalSettings.php b/contrib/mw-to-git/t/install-wiki/LocalSettings.php
new file mode 100644
index 0000000..29f1251
--- /dev/null
+++ b/contrib/mw-to-git/t/install-wiki/LocalSettings.php
@@ -0,0 +1,129 @@
+<?php
+# This file was automatically generated by the MediaWiki 1.19.0
+# installer. If you make manual changes, please keep track in case you
+# need to recreate them later.
+#
+# See includes/DefaultSettings.php for all configurable settings
+# and their default values, but don't forget to make changes in _this_
+# file, not there.
+#
+# Further documentation for configuration settings may be found at:
+# http://www.mediawiki.org/wiki/Manual:Configuration_settings
+
+# Protect against web entry
+if ( !defined( 'MEDIAWIKI' ) ) {
+ exit;
+}
+
+## Uncomment this to disable output compression
+# $wgDisableOutputCompression = true;
+
+$wgSitename = "Git-MediaWiki-Test";
+$wgMetaNamespace = "Git-MediaWiki-Test";
+
+## The URL base path to the directory containing the wiki;
+## defaults for all runtime URL paths are based off of this.
+## For more information on customizing the URLs please see:
+## http://www.mediawiki.org/wiki/Manual:Short_URL
+$wgScriptPath = "@WG_SCRIPT_PATH@";
+$wgScriptExtension = ".php";
+
+## The protocol and server name to use in fully-qualified URLs
+$wgServer = "@WG_SERVER@";
+
+## The relative URL path to the skins directory
+$wgStylePath = "$wgScriptPath/skins";
+
+## The relative URL path to the logo. Make sure you change this from the default,
+## or else you'll overwrite your logo when you upgrade!
+$wgLogo = "$wgStylePath/common/images/wiki.png";
+
+## UPO means: this is also a user preference option
+
+$wgEnableEmail = true;
+$wgEnableUserEmail = true; # UPO
+
+$wgEmergencyContact = "apache@localhost";
+$wgPasswordSender = "apache@localhost";
+
+$wgEnotifUserTalk = false; # UPO
+$wgEnotifWatchlist = false; # UPO
+$wgEmailAuthentication = true;
+
+## Database settings
+$wgDBtype = "sqlite";
+$wgDBserver = "";
+$wgDBname = "@WG_SQLITE_DATAFILE@";
+$wgDBuser = "";
+$wgDBpassword = "";
+
+# SQLite-specific settings
+$wgSQLiteDataDir = "@WG_SQLITE_DATADIR@";
+
+
+## Shared memory settings
+$wgMainCacheType = CACHE_NONE;
+$wgMemCachedServers = array();
+
+## To enable image uploads, make sure the 'images' directory
+## is writable, then set this to true:
+$wgEnableUploads = true;
+$wgUseImageMagick = true;
+$wgImageMagickConvertCommand ="@CONVERT@";
+$wgFileExtensions[] = 'txt';
+
+# InstantCommons allows wiki to use images from http://commons.wikimedia.org
+$wgUseInstantCommons = false;
+
+## If you use ImageMagick (or any other shell command) on a
+## Linux server, this will need to be set to the name of an
+## available UTF-8 locale
+$wgShellLocale = "en_US.utf8";
+
+## If you want to use image uploads under safe mode,
+## create the directories images/archive, images/thumb and
+## images/temp, and make them all writable. Then uncomment
+## this, if it's not already uncommented:
+#$wgHashedUploadDirectory = false;
+
+## Set $wgCacheDirectory to a writable directory on the web server
+## to make your wiki go slightly faster. The directory should not
+## be publically accessible from the web.
+#$wgCacheDirectory = "$IP/cache";
+
+# Site language code, should be one of the list in ./languages/Names.php
+$wgLanguageCode = "en";
+
+$wgSecretKey = "1c912bfe3519fb70f5dc523ecc698111cd43d81a11c585b3eefb28f29c2699b7";
+#$wgSecretKey = "@SECRETKEY@";
+
+
+# Site upgrade key. Must be set to a string (default provided) to turn on the
+# web installer while LocalSettings.php is in place
+$wgUpgradeKey = "ddae7dc87cd0a645";
+
+## Default skin: you can change the default skin. Use the internal symbolic
+## names, ie 'standard', 'nostalgia', 'cologneblue', 'monobook', 'vector':
+$wgDefaultSkin = "vector";
+
+## For attaching licensing metadata to pages, and displaying an
+## appropriate copyright notice / icon. GNU Free Documentation
+## License and Creative Commons licenses are supported so far.
+$wgRightsPage = ""; # Set to the title of a wiki page that describes your license/copyright
+$wgRightsUrl = "";
+$wgRightsText = "";
+$wgRightsIcon = "";
+
+# Path to the GNU diff3 utility. Used for conflict resolution.
+$wgDiff3 = "/usr/bin/diff3";
+
+# Query string length limit for ResourceLoader. You should only set this if
+# your web server has a query string length limit (then set it to that limit),
+# or if you have suhosin.get.max_value_length set in php.ini (then set it to
+# that value)
+$wgResourceLoaderMaxQueryLength = -1;
+
+
+
+# End of automatically generated settings.
+# Add more configuration options below.
diff --git a/contrib/mw-to-git/t/install-wiki/db_install.php b/contrib/mw-to-git/t/install-wiki/db_install.php
new file mode 100644
index 0000000..0f3f4e0
--- /dev/null
+++ b/contrib/mw-to-git/t/install-wiki/db_install.php
@@ -0,0 +1,120 @@
+<?php
+/**
+ * This script generates a SQLite database for a MediaWiki version 1.19.0
+ * You must specify the login of the admin (argument 1) and its
+ * password (argument 2) and the folder where the database file
+ * is located (absolute path in argument 3).
+ * It is used by the script install-wiki.sh in order to make easy the
+ * installation of a MediaWiki.
+ *
+ * In order to generate a SQLite database file, MediaWiki ask the user
+ * to submit some forms in its web browser. This script simulates this
+ * behavior though the functions <get> and <submit>
+ *
+ */
+$argc = $_SERVER['argc'];
+$argv = $_SERVER['argv'];
+
+$login = $argv[2];
+$pass = $argv[3];
+$tmp = $argv[4];
+$port = $argv[5];
+
+$url = 'http://localhost:'.$port.'/wiki/mw-config/index.php';
+$db_dir = urlencode($tmp);
+$tmp_cookie = tempnam($tmp, "COOKIE_");
+/*
+ * Fetchs a page with cURL.
+ */
+function get($page_name = "") {
+ $curl = curl_init();
+ $page_name_add = "";
+ if ($page_name != "") {
+ $page_name_add = '?page='.$page_name;
+ }
+ $url = $GLOBALS['url'].$page_name_add;
+ $tmp_cookie = $GLOBALS['tmp_cookie'];
+ curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie);
+ curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
+ curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true);
+ curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie);
+ curl_setopt($curl, CURLOPT_HEADER, true);
+ curl_setopt($curl, CURLOPT_URL, $url);
+
+ $page = curl_exec($curl);
+ if (!$page) {
+ die("Could not get page: $url\n");
+ }
+ curl_close($curl);
+ return $page;
+}
+
+/*
+ * Submits a form with cURL.
+ */
+function submit($page_name, $option = "") {
+ $curl = curl_init();
+ $datapost = 'submit-continue=Continue+%E2%86%92';
+ if ($option != "") {
+ $datapost = $option.'&'.$datapost;
+ }
+ $url = $GLOBALS['url'].'?page='.$page_name;
+ $tmp_cookie = $GLOBALS['tmp_cookie'];
+ curl_setopt($curl, CURLOPT_URL, $url);
+ curl_setopt($curl, CURLOPT_POST, true);
+ curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true);
+ curl_setopt($curl, CURLOPT_POSTFIELDS, $datapost);
+ curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
+ curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie);
+ curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie);
+
+ $page = curl_exec($curl);
+ if (!$page) {
+ die("Could not get page: $url\n");
+ }
+ curl_close($curl);
+ return "$page";
+}
+
+/*
+ * Here starts this script: simulates the behavior of the user
+ * submitting forms to generates the database file.
+ * Note this simulation was made for the MediaWiki version 1.19.0,
+ * we can't assume it works with other versions.
+ *
+ */
+
+$page = get();
+if (!preg_match('/input type="hidden" value="([0-9]+)" name="LanguageRequestTime"/',
+ $page, $matches)) {
+ echo "Unexpected content for page downloaded:\n";
+ echo "$page";
+ die;
+};
+$timestamp = $matches[1];
+$language = "LanguageRequestTime=$timestamp&uselang=en&ContLang=en";
+$page = submit('Language', $language);
+
+submit('Welcome');
+
+$db_config = 'DBType=sqlite';
+$db_config = $db_config.'&sqlite_wgSQLiteDataDir='.$db_dir;
+$db_config = $db_config.'&sqlite_wgDBname='.$argv[1];
+submit('DBConnect', $db_config);
+
+$wiki_config = 'config_wgSitename=TEST';
+$wiki_config = $wiki_config.'&config__NamespaceType=site-name';
+$wiki_config = $wiki_config.'&config_wgMetaNamespace=MyWiki';
+$wiki_config = $wiki_config.'&config__AdminName='.$login;
+
+$wiki_config = $wiki_config.'&config__AdminPassword='.$pass;
+$wiki_config = $wiki_config.'&config__AdminPassword2='.$pass;
+
+$wiki_config = $wiki_config.'&wiki__configEmail=email%40email.org';
+$wiki_config = $wiki_config.'&config__SkipOptional=skip';
+submit('Name', $wiki_config);
+submit('Install');
+submit('Install');
+
+unlink($tmp_cookie);
+?>
diff --git a/contrib/mw-to-git/t/push-pull-tests.sh b/contrib/mw-to-git/t/push-pull-tests.sh
new file mode 100644
index 0000000..9da2dc5
--- /dev/null
+++ b/contrib/mw-to-git/t/push-pull-tests.sh
@@ -0,0 +1,144 @@
+test_push_pull () {
+
+ test_expect_success 'Git pull works after adding a new wiki page' '
+ wiki_reset &&
+
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
+ wiki_editpage Foo "page created after the git clone" false &&
+
+ (
+ cd mw_dir_1 &&
+ git pull
+ ) &&
+
+ wiki_getallpage ref_page_1 &&
+ test_diff_directories mw_dir_1 ref_page_1
+ '
+
+ test_expect_success 'Git pull works after editing a wiki page' '
+ wiki_reset &&
+
+ wiki_editpage Foo "page created before the git clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
+ wiki_editpage Foo "new line added on the wiki" true &&
+
+ (
+ cd mw_dir_2 &&
+ git pull
+ ) &&
+
+ wiki_getallpage ref_page_2 &&
+ test_diff_directories mw_dir_2 ref_page_2
+ '
+
+ test_expect_success 'git pull works on conflict handled by auto-merge' '
+ wiki_reset &&
+
+ wiki_editpage Foo "1 init
+3
+5
+ " false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_3 &&
+
+ wiki_editpage Foo "1 init
+2 content added on wiki after clone
+3
+5
+ " false &&
+
+ (
+ cd mw_dir_3 &&
+ echo "1 init
+3
+4 content added on git after clone
+5
+" >Foo.mw &&
+ git commit -am "conflicting change on foo" &&
+ git pull &&
+ git push
+ )
+ '
+
+ test_expect_success 'Git push works after adding a file .mw' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_4 &&
+ wiki_getallpage ref_page_4 &&
+ (
+ cd mw_dir_4 &&
+ test_path_is_missing Foo.mw &&
+ touch Foo.mw &&
+ echo "hello world" >>Foo.mw &&
+ git add Foo.mw &&
+ git commit -m "Foo" &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_4 &&
+ test_diff_directories mw_dir_4 ref_page_4
+ '
+
+ test_expect_success 'Git push works after editing a file .mw' '
+ wiki_reset &&
+ wiki_editpage "Foo" "page created before the git clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_5 &&
+
+ (
+ cd mw_dir_5 &&
+ echo "new line added in the file Foo.mw" >>Foo.mw &&
+ git commit -am "edit file Foo.mw" &&
+ git push
+ ) &&
+
+ wiki_getallpage ref_page_5 &&
+ test_diff_directories mw_dir_5 ref_page_5
+ '
+
+ test_expect_failure 'Git push works after deleting a file' '
+ wiki_reset &&
+ wiki_editpage Foo "wiki page added before git clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
+
+ (
+ cd mw_dir_6 &&
+ git rm Foo.mw &&
+ git commit -am "page Foo.mw deleted" &&
+ git push
+ ) &&
+
+ test_must_fail wiki_page_exist Foo
+ '
+
+ test_expect_success 'Merge conflict expected and solving it' '
+ wiki_reset &&
+
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
+ wiki_editpage Foo "1 conflict
+3 wiki
+4" false &&
+
+ (
+ cd mw_dir_7 &&
+ echo "1 conflict
+2 git
+4" >Foo.mw &&
+ git add Foo.mw &&
+ git commit -m "conflict created" &&
+ test_must_fail git pull &&
+ "$PERL_PATH" -pi -e "s/[<=>].*//g" Foo.mw &&
+ git commit -am "merge conflict solved" &&
+ git push
+ )
+ '
+
+ test_expect_failure 'git pull works after deleting a wiki page' '
+ wiki_reset &&
+ wiki_editpage Foo "wiki page added before the git clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_8 &&
+
+ wiki_delete_page Foo &&
+ (
+ cd mw_dir_8 &&
+ git pull &&
+ test_path_is_missing Foo.mw
+ )
+ '
+}
diff --git a/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh b/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh
new file mode 100755
index 0000000..811a90c
--- /dev/null
+++ b/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh
@@ -0,0 +1,257 @@
+#!/bin/sh
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+
+test_description='Test the Git Mediawiki remote helper: git clone'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+
+test_check_precond
+
+
+test_expect_success 'Git clone creates the expected git log with one file' '
+ wiki_reset &&
+ wiki_editpage foo "this is not important" false -c cat -s "this must be the same" &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
+ (
+ cd mw_dir_1 &&
+ git log --format=%s HEAD^..HEAD >log.tmp
+ ) &&
+ echo "this must be the same" >msg.tmp &&
+ diff -b mw_dir_1/log.tmp msg.tmp
+'
+
+
+test_expect_success 'Git clone creates the expected git log with multiple files' '
+ wiki_reset &&
+ wiki_editpage daddy "this is not important" false -s="this must be the same" &&
+ wiki_editpage daddy "neither is this" true -s="this must also be the same" &&
+ wiki_editpage daddy "neither is this" true -s="same same same" &&
+ wiki_editpage dj "dont care" false -s="identical" &&
+ wiki_editpage dj "dont care either" true -s="identical too" &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
+ (
+ cd mw_dir_2 &&
+ git log --format=%s Daddy.mw >logDaddy.tmp &&
+ git log --format=%s Dj.mw >logDj.tmp
+ ) &&
+ echo "same same same" >msgDaddy.tmp &&
+ echo "this must also be the same" >>msgDaddy.tmp &&
+ echo "this must be the same" >>msgDaddy.tmp &&
+ echo "identical too" >msgDj.tmp &&
+ echo "identical" >>msgDj.tmp &&
+ diff -b mw_dir_2/logDaddy.tmp msgDaddy.tmp &&
+ diff -b mw_dir_2/logDj.tmp msgDj.tmp
+'
+
+
+test_expect_success 'Git clone creates only Main_Page.mw with an empty wiki' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_3 &&
+ test_contains_N_files mw_dir_3 1 &&
+ test_path_is_file mw_dir_3/Main_Page.mw
+'
+
+test_expect_success 'Git clone does not fetch a deleted page' '
+ wiki_reset &&
+ wiki_editpage foo "this page must be deleted before the clone" false &&
+ wiki_delete_page foo &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_4 &&
+ test_contains_N_files mw_dir_4 1 &&
+ test_path_is_file mw_dir_4/Main_Page.mw &&
+ test_path_is_missing mw_dir_4/Foo.mw
+'
+
+test_expect_success 'Git clone works with page added' '
+ wiki_reset &&
+ wiki_editpage foo " I will be cloned" false &&
+ wiki_editpage bar "I will be cloned" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_5 &&
+ wiki_getallpage ref_page_5 &&
+ test_diff_directories mw_dir_5 ref_page_5 &&
+ wiki_delete_page foo &&
+ wiki_delete_page bar
+'
+
+test_expect_success 'Git clone works with an edited page ' '
+ wiki_reset &&
+ wiki_editpage foo "this page will be edited" \
+ false -s "first edition of page foo"&&
+ wiki_editpage foo "this page has been edited and must be on the clone " true &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
+ test_path_is_file mw_dir_6/Foo.mw &&
+ test_path_is_file mw_dir_6/Main_Page.mw &&
+ wiki_getallpage mw_dir_6/page_ref_6 &&
+ test_diff_directories mw_dir_6 mw_dir_6/page_ref_6 &&
+ (
+ cd mw_dir_6 &&
+ git log --format=%s HEAD^ Foo.mw > ../Foo.log
+ ) &&
+ echo "first edition of page foo" > FooExpect.log &&
+ diff FooExpect.log Foo.log
+'
+
+
+test_expect_success 'Git clone works with several pages and some deleted ' '
+ wiki_reset &&
+ wiki_editpage foo "this page will not be deleted" false &&
+ wiki_editpage bar "I must not be erased" false &&
+ wiki_editpage namnam "I will not be there at the end" false &&
+ wiki_editpage nyancat "nyan nyan nyan delete me" false &&
+ wiki_delete_page namnam &&
+ wiki_delete_page nyancat &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
+ test_path_is_file mw_dir_7/Foo.mw &&
+ test_path_is_file mw_dir_7/Bar.mw &&
+ test_path_is_missing mw_dir_7/Namnam.mw &&
+ test_path_is_missing mw_dir_7/Nyancat.mw &&
+ wiki_getallpage mw_dir_7/page_ref_7 &&
+ test_diff_directories mw_dir_7 mw_dir_7/page_ref_7
+'
+
+
+test_expect_success 'Git clone works with one specific page cloned ' '
+ wiki_reset &&
+ wiki_editpage foo "I will not be cloned" false &&
+ wiki_editpage bar "Do not clone me" false &&
+ wiki_editpage namnam "I will be cloned :)" false -s="this log must stay" &&
+ wiki_editpage nyancat "nyan nyan nyan you cant clone me" false &&
+ git clone -c remote.origin.pages=namnam \
+ mediawiki::'"$WIKI_URL"' mw_dir_8 &&
+ test_contains_N_files mw_dir_8 1 &&
+ test_path_is_file mw_dir_8/Namnam.mw &&
+ test_path_is_missing mw_dir_8/Main_Page.mw &&
+ (
+ cd mw_dir_8 &&
+ echo "this log must stay" >msg.tmp &&
+ git log --format=%s >log.tmp &&
+ diff -b msg.tmp log.tmp
+ ) &&
+ wiki_check_content mw_dir_8/Namnam.mw Namnam
+'
+
+test_expect_success 'Git clone works with multiple specific page cloned ' '
+ wiki_reset &&
+ wiki_editpage foo "I will be there" false &&
+ wiki_editpage bar "I will not disapear" false &&
+ wiki_editpage namnam "I be erased" false &&
+ wiki_editpage nyancat "nyan nyan nyan you will not erase me" false &&
+ wiki_delete_page namnam &&
+ git clone -c remote.origin.pages="foo bar nyancat namnam" \
+ mediawiki::'"$WIKI_URL"' mw_dir_9 &&
+ test_contains_N_files mw_dir_9 3 &&
+ test_path_is_missing mw_dir_9/Namnam.mw &&
+ test_path_is_file mw_dir_9/Foo.mw &&
+ test_path_is_file mw_dir_9/Nyancat.mw &&
+ test_path_is_file mw_dir_9/Bar.mw &&
+ wiki_check_content mw_dir_9/Foo.mw Foo &&
+ wiki_check_content mw_dir_9/Bar.mw Bar &&
+ wiki_check_content mw_dir_9/Nyancat.mw Nyancat
+'
+
+test_expect_success 'Mediawiki-clone of several specific pages on wiki' '
+ wiki_reset &&
+ wiki_editpage foo "foo 1" false &&
+ wiki_editpage bar "bar 1" false &&
+ wiki_editpage dummy "dummy 1" false &&
+ wiki_editpage cloned_1 "cloned_1 1" false &&
+ wiki_editpage cloned_2 "cloned_2 2" false &&
+ wiki_editpage cloned_3 "cloned_3 3" false &&
+ mkdir -p ref_page_10 &&
+ wiki_getpage cloned_1 ref_page_10 &&
+ wiki_getpage cloned_2 ref_page_10 &&
+ wiki_getpage cloned_3 ref_page_10 &&
+ git clone -c remote.origin.pages="cloned_1 cloned_2 cloned_3" \
+ mediawiki::'"$WIKI_URL"' mw_dir_10 &&
+ test_diff_directories mw_dir_10 ref_page_10
+'
+
+test_expect_success 'Git clone works with the shallow option' '
+ wiki_reset &&
+ wiki_editpage foo "1st revision, should be cloned" false &&
+ wiki_editpage bar "1st revision, should be cloned" false &&
+ wiki_editpage nyan "1st revision, should not be cloned" false &&
+ wiki_editpage nyan "2nd revision, should be cloned" false &&
+ git -c remote.origin.shallow=true clone \
+ mediawiki::'"$WIKI_URL"' mw_dir_11 &&
+ test_contains_N_files mw_dir_11 4 &&
+ test_path_is_file mw_dir_11/Nyan.mw &&
+ test_path_is_file mw_dir_11/Foo.mw &&
+ test_path_is_file mw_dir_11/Bar.mw &&
+ test_path_is_file mw_dir_11/Main_Page.mw &&
+ (
+ cd mw_dir_11 &&
+ test `git log --oneline Nyan.mw | wc -l` -eq 1 &&
+ test `git log --oneline Foo.mw | wc -l` -eq 1 &&
+ test `git log --oneline Bar.mw | wc -l` -eq 1 &&
+ test `git log --oneline Main_Page.mw | wc -l ` -eq 1
+ ) &&
+ wiki_check_content mw_dir_11/Nyan.mw Nyan &&
+ wiki_check_content mw_dir_11/Foo.mw Foo &&
+ wiki_check_content mw_dir_11/Bar.mw Bar &&
+ wiki_check_content mw_dir_11/Main_Page.mw Main_Page
+'
+
+test_expect_success 'Git clone works with the shallow option with a delete page' '
+ wiki_reset &&
+ wiki_editpage foo "1st revision, will be deleted" false &&
+ wiki_editpage bar "1st revision, should be cloned" false &&
+ wiki_editpage nyan "1st revision, should not be cloned" false &&
+ wiki_editpage nyan "2nd revision, should be cloned" false &&
+ wiki_delete_page foo &&
+ git -c remote.origin.shallow=true clone \
+ mediawiki::'"$WIKI_URL"' mw_dir_12 &&
+ test_contains_N_files mw_dir_12 3 &&
+ test_path_is_file mw_dir_12/Nyan.mw &&
+ test_path_is_missing mw_dir_12/Foo.mw &&
+ test_path_is_file mw_dir_12/Bar.mw &&
+ test_path_is_file mw_dir_12/Main_Page.mw &&
+ (
+ cd mw_dir_12 &&
+ test `git log --oneline Nyan.mw | wc -l` -eq 1 &&
+ test `git log --oneline Bar.mw | wc -l` -eq 1 &&
+ test `git log --oneline Main_Page.mw | wc -l ` -eq 1
+ ) &&
+ wiki_check_content mw_dir_12/Nyan.mw Nyan &&
+ wiki_check_content mw_dir_12/Bar.mw Bar &&
+ wiki_check_content mw_dir_12/Main_Page.mw Main_Page
+'
+
+test_expect_success 'Test of fetching a category' '
+ wiki_reset &&
+ wiki_editpage Foo "I will be cloned" false -c=Category &&
+ wiki_editpage Bar "Meet me on the repository" false -c=Category &&
+ wiki_editpage Dummy "I will not come" false &&
+ wiki_editpage BarWrong "I will stay online only" false -c=NotCategory &&
+ git clone -c remote.origin.categories="Category" \
+ mediawiki::'"$WIKI_URL"' mw_dir_13 &&
+ wiki_getallpage ref_page_13 Category &&
+ test_diff_directories mw_dir_13 ref_page_13
+'
+
+test_expect_success 'Test of resistance to modification of category on wiki for clone' '
+ wiki_reset &&
+ wiki_editpage Tobedeleted "this page will be deleted" false -c=Catone &&
+ wiki_editpage Tobeedited "this page will be modified" false -c=Catone &&
+ wiki_editpage Normalone "this page wont be modified and will be on git" false -c=Catone &&
+ wiki_editpage Notconsidered "this page will not appear on local" false &&
+ wiki_editpage Othercategory "this page will not appear on local" false -c=Cattwo &&
+ wiki_editpage Tobeedited "this page have been modified" true -c=Catone &&
+ wiki_delete_page Tobedeleted
+ git clone -c remote.origin.categories="Catone" \
+ mediawiki::'"$WIKI_URL"' mw_dir_14 &&
+ wiki_getallpage ref_page_14 Catone &&
+ test_diff_directories mw_dir_14 ref_page_14
+'
+
+test_done
diff --git a/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh b/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh
new file mode 100755
index 0000000..9ea2014
--- /dev/null
+++ b/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+# tests for git-remote-mediawiki
+
+test_description='Test the Git Mediawiki remote helper: git push and git pull simple test cases'
+
+. ./test-gitmw-lib.sh
+. ./push-pull-tests.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+test_check_precond
+
+test_push_pull
+
+test_done
diff --git a/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh b/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh
new file mode 100755
index 0000000..246d47d
--- /dev/null
+++ b/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh
@@ -0,0 +1,321 @@
+#!/bin/sh
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+# tests for git-remote-mediawiki
+
+test_description='Test git-mediawiki with special characters in filenames'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+
+test_check_precond
+
+
+test_expect_success 'Git clone works for a wiki with accents in the page names' '
+ wiki_reset &&
+ wiki_editpage féé "This page must be délétéd before clone" false &&
+ wiki_editpage kèè "This page must be deleted before clone" false &&
+ wiki_editpage hàà "This page must be deleted before clone" false &&
+ wiki_editpage kîî "This page must be deleted before clone" false &&
+ wiki_editpage foo "This page must be deleted before clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
+ wiki_getallpage ref_page_1 &&
+ test_diff_directories mw_dir_1 ref_page_1
+'
+
+
+test_expect_success 'Git pull works with a wiki with accents in the pages names' '
+ wiki_reset &&
+ wiki_editpage kîî "this page must be cloned" false &&
+ wiki_editpage foo "this page must be cloned" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
+ wiki_editpage éàîôû "This page must be pulled" false &&
+ (
+ cd mw_dir_2 &&
+ git pull
+ ) &&
+ wiki_getallpage ref_page_2 &&
+ test_diff_directories mw_dir_2 ref_page_2
+'
+
+
+test_expect_success 'Cloning a chosen page works with accents' '
+ wiki_reset &&
+ wiki_editpage kîî "this page must be cloned" false &&
+ git clone -c remote.origin.pages=kîî \
+ mediawiki::'"$WIKI_URL"' mw_dir_3 &&
+ wiki_check_content mw_dir_3/Kîî.mw Kîî &&
+ test_path_is_file mw_dir_3/Kîî.mw &&
+ rm -rf mw_dir_3
+'
+
+
+test_expect_success 'The shallow option works with accents' '
+ wiki_reset &&
+ wiki_editpage néoà "1st revision, should not be cloned" false &&
+ wiki_editpage néoà "2nd revision, should be cloned" false &&
+ git -c remote.origin.shallow=true clone \
+ mediawiki::'"$WIKI_URL"' mw_dir_4 &&
+ test_contains_N_files mw_dir_4 2 &&
+ test_path_is_file mw_dir_4/Néoà.mw &&
+ test_path_is_file mw_dir_4/Main_Page.mw &&
+ (
+ cd mw_dir_4 &&
+ test `git log --oneline Néoà.mw | wc -l` -eq 1 &&
+ test `git log --oneline Main_Page.mw | wc -l ` -eq 1
+ ) &&
+ wiki_check_content mw_dir_4/Néoà.mw Néoà &&
+ wiki_check_content mw_dir_4/Main_Page.mw Main_Page
+'
+
+
+test_expect_success 'Cloning works when page name first letter has an accent' '
+ wiki_reset &&
+ wiki_editpage îî "this page must be cloned" false &&
+ git clone -c remote.origin.pages=îî \
+ mediawiki::'"$WIKI_URL"' mw_dir_5 &&
+ test_path_is_file mw_dir_5/Îî.mw &&
+ wiki_check_content mw_dir_5/Îî.mw Îî
+'
+
+
+test_expect_success 'Git push works with a wiki with accents' '
+ wiki_reset &&
+ wiki_editpage féé "lots of accents : éèàÖ" false &&
+ wiki_editpage foo "this page must be cloned" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
+ (
+ cd mw_dir_6 &&
+ echo "A wild Pîkächû appears on the wiki" >Pîkächû.mw &&
+ git add Pîkächû.mw &&
+ git commit -m "A new page appears" &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_6 &&
+ test_diff_directories mw_dir_6 ref_page_6
+'
+
+test_expect_success 'Git clone works with accentsand spaces' '
+ wiki_reset &&
+ wiki_editpage "é à î" "this page must be délété before the clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
+ wiki_getallpage ref_page_7 &&
+ test_diff_directories mw_dir_7 ref_page_7
+'
+
+test_expect_success 'character $ in page name (mw -> git)' '
+ wiki_reset &&
+ wiki_editpage file_\$_foo "expect to be called file_$_foo" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_8 &&
+ test_path_is_file mw_dir_8/File_\$_foo.mw &&
+ wiki_getallpage ref_page_8 &&
+ test_diff_directories mw_dir_8 ref_page_8
+'
+
+
+
+test_expect_success 'character $ in file name (git -> mw) ' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_9 &&
+ (
+ cd mw_dir_9 &&
+ echo "this file is called File_\$_foo.mw" >File_\$_foo.mw &&
+ git add . &&
+ git commit -am "file File_\$_foo.mw" &&
+ git pull &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_9 &&
+ test_diff_directories mw_dir_9 ref_page_9
+'
+
+
+test_expect_failure 'capital at the begining of file names' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_10 &&
+ (
+ cd mw_dir_10 &&
+ echo "my new file foo" >foo.mw &&
+ echo "my new file Foo... Finger crossed" >Foo.mw &&
+ git add . &&
+ git commit -am "file foo.mw" &&
+ git pull &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_10 &&
+ test_diff_directories mw_dir_10 ref_page_10
+'
+
+
+test_expect_failure 'special character at the begining of file name from mw to git' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_11 &&
+ wiki_editpage {char_1 "expect to be renamed {char_1" false &&
+ wiki_editpage [char_2 "expect to be renamed [char_2" false &&
+ (
+ cd mw_dir_11 &&
+ git pull
+ ) &&
+ test_path_is_file mw_dir_11/{char_1 &&
+ test_path_is_file mw_dir_11/[char_2
+'
+
+test_expect_success 'Pull page with title containing ":" other than namespace separator' '
+ wiki_editpage Foo:Bar content false &&
+ (
+ cd mw_dir_11 &&
+ git pull
+ ) &&
+ test_path_is_file mw_dir_11/Foo:Bar.mw
+'
+
+test_expect_success 'Push page with title containing ":" other than namespace separator' '
+ (
+ cd mw_dir_11 &&
+ echo content >NotANameSpace:Page.mw &&
+ git add NotANameSpace:Page.mw &&
+ git commit -m "add page with colon" &&
+ git push
+ ) &&
+ wiki_page_exist NotANameSpace:Page
+'
+
+test_expect_success 'test of correct formating for file name from mw to git' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_12 &&
+ wiki_editpage char_%_7b_1 "expect to be renamed char{_1" false &&
+ wiki_editpage char_%_5b_2 "expect to be renamed char{_2" false &&
+ (
+ cd mw_dir_12 &&
+ git pull
+ ) &&
+ test_path_is_file mw_dir_12/Char\{_1.mw &&
+ test_path_is_file mw_dir_12/Char\[_2.mw &&
+ wiki_getallpage ref_page_12 &&
+ mv ref_page_12/Char_%_7b_1.mw ref_page_12/Char\{_1.mw &&
+ mv ref_page_12/Char_%_5b_2.mw ref_page_12/Char\[_2.mw &&
+ test_diff_directories mw_dir_12 ref_page_12
+'
+
+
+test_expect_failure 'test of correct formating for file name begining with special character' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_13 &&
+ (
+ cd mw_dir_13 &&
+ echo "my new file {char_1" >\{char_1.mw &&
+ echo "my new file [char_2" >\[char_2.mw &&
+ git add . &&
+ git commit -am "commiting some exotic file name..." &&
+ git push &&
+ git pull
+ ) &&
+ wiki_getallpage ref_page_13 &&
+ test_path_is_file ref_page_13/{char_1.mw &&
+ test_path_is_file ref_page_13/[char_2.mw &&
+ test_diff_directories mw_dir_13 ref_page_13
+'
+
+
+test_expect_success 'test of correct formating for file name from git to mw' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_14 &&
+ (
+ cd mw_dir_14 &&
+ echo "my new file char{_1" >Char\{_1.mw &&
+ echo "my new file char[_2" >Char\[_2.mw &&
+ git add . &&
+ git commit -m "commiting some exotic file name..." &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_14 &&
+ mv mw_dir_14/Char\{_1.mw mw_dir_14/Char_%_7b_1.mw &&
+ mv mw_dir_14/Char\[_2.mw mw_dir_14/Char_%_5b_2.mw &&
+ test_diff_directories mw_dir_14 ref_page_14
+'
+
+
+test_expect_success 'git clone with /' '
+ wiki_reset &&
+ wiki_editpage \/fo\/o "this is not important" false -c=Deleted &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_15 &&
+ test_path_is_file mw_dir_15/%2Ffo%2Fo.mw &&
+ wiki_check_content mw_dir_15/%2Ffo%2Fo.mw \/fo\/o
+'
+
+
+test_expect_success 'git push with /' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_16 &&
+ echo "I will be on the wiki" >mw_dir_16/%2Ffo%2Fo.mw &&
+ (
+ cd mw_dir_16 &&
+ git add %2Ffo%2Fo.mw &&
+ git commit -m " %2Ffo%2Fo added" &&
+ git push
+ ) &&
+ wiki_page_exist \/fo\/o &&
+ wiki_check_content mw_dir_16/%2Ffo%2Fo.mw \/fo\/o
+
+'
+
+
+test_expect_success 'git clone with \' '
+ wiki_reset &&
+ wiki_editpage \\ko\\o "this is not important" false -c=Deleted &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_17 &&
+ test_path_is_file mw_dir_17/\\ko\\o.mw &&
+ wiki_check_content mw_dir_17/\\ko\\o.mw \\ko\\o
+'
+
+
+test_expect_success 'git push with \' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_18 &&
+ echo "I will be on the wiki" >mw_dir_18/\\ko\\o.mw &&
+ (
+ cd mw_dir_18 &&
+ git add \\ko\\o.mw &&
+ git commit -m " \\ko\\o added" &&
+ git push
+ )&&
+ wiki_page_exist \\ko\\o &&
+ wiki_check_content mw_dir_18/\\ko\\o.mw \\ko\\o
+
+'
+
+test_expect_success 'git clone with \ in format control' '
+ wiki_reset &&
+ wiki_editpage \\no\\o "this is not important" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_19 &&
+ test_path_is_file mw_dir_19/\\no\\o.mw &&
+ wiki_check_content mw_dir_19/\\no\\o.mw \\no\\o
+'
+
+
+test_expect_success 'git push with \ in format control' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_20 &&
+ echo "I will be on the wiki" >mw_dir_20/\\fo\\o.mw &&
+ (
+ cd mw_dir_20 &&
+ git add \\fo\\o.mw &&
+ git commit -m " \\fo\\o added" &&
+ git push
+ )&&
+ wiki_page_exist \\fo\\o &&
+ wiki_check_content mw_dir_20/\\fo\\o.mw \\fo\\o
+
+'
+
+
+test_done
diff --git a/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh b/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh
new file mode 100755
index 0000000..5a03739
--- /dev/null
+++ b/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh
@@ -0,0 +1,198 @@
+#!/bin/sh
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+# tests for git-remote-mediawiki
+
+test_description='Test the Git Mediawiki remote helper: git push and git pull simple test cases'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+
+test_check_precond
+
+
+test_git_reimport () {
+ git -c remote.origin.dumbPush=true push &&
+ git -c remote.origin.mediaImport=true pull --rebase
+}
+
+# Don't bother with permissions, be administrator by default
+test_expect_success 'setup config' '
+ git config --global remote.origin.mwLogin WikiAdmin &&
+ git config --global remote.origin.mwPassword AdminPass &&
+ test_might_fail git config --global --unset remote.origin.mediaImport
+'
+
+test_expect_success 'git push can upload media (File:) files' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ (
+ cd mw_dir &&
+ echo "hello world" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "add a text file" &&
+ git push &&
+ "$PERL_PATH" -e "print STDOUT \"binary content: \".chr(255);" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "add a text file with binary content" &&
+ git push
+ )
+'
+
+test_expect_success 'git clone works on previously created wiki with media files' '
+ test_when_finished "rm -rf mw_dir mw_dir_clone" &&
+ git clone -c remote.origin.mediaimport=true \
+ mediawiki::'"$WIKI_URL"' mw_dir_clone &&
+ test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt &&
+ (cd mw_dir_clone && git checkout HEAD^) &&
+ (cd mw_dir && git checkout HEAD^) &&
+ test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt
+'
+
+test_expect_success 'git push & pull work with locally renamed media files' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ echo "A File" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "add a file" &&
+ git mv Foo.txt Bar.txt &&
+ git commit -m "Rename a file" &&
+ test_git_reimport &&
+ echo "A File" >expect &&
+ test_cmp expect Bar.txt &&
+ test_path_is_missing Foo.txt
+ )
+'
+
+test_expect_success 'git push can propagate local page deletion' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ test_path_is_missing Foo.mw &&
+ echo "hello world" >Foo.mw &&
+ git add Foo.mw &&
+ git commit -m "Add the page Foo" &&
+ git push &&
+ rm -f Foo.mw &&
+ git commit -am "Delete the page Foo" &&
+ test_git_reimport &&
+ test_path_is_missing Foo.mw
+ )
+'
+
+test_expect_success 'git push can propagate local media file deletion' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ echo "hello world" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "Add the text file Foo" &&
+ git rm Foo.txt &&
+ git commit -m "Delete the file Foo" &&
+ test_git_reimport &&
+ test_path_is_missing Foo.txt
+ )
+'
+
+# test failure: the file is correctly uploaded, and then deleted but
+# as no page link to it, the import (which looks at page revisions)
+# doesn't notice the file deletion on the wiki. We fetch the list of
+# files from the wiki, but as the file is deleted, it doesn't appear.
+test_expect_failure 'git pull correctly imports media file deletion when no page link to it' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ echo "hello world" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "Add the text file Foo" &&
+ git push &&
+ git rm Foo.txt &&
+ git commit -m "Delete the file Foo" &&
+ test_git_reimport &&
+ test_path_is_missing Foo.txt
+ )
+'
+
+test_expect_success 'git push properly warns about insufficient permissions' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ echo "A File" >foo.forbidden &&
+ git add foo.forbidden &&
+ git commit -m "add a file" &&
+ git push 2>actual &&
+ test_i18ngrep "foo.forbidden is not a permitted file" actual
+ )
+'
+
+test_expect_success 'setup a repository with media files' '
+ wiki_reset &&
+ wiki_editpage testpage "I am linking a file [[File:File.txt]]" false &&
+ echo "File content" >File.txt &&
+ wiki_upload_file File.txt &&
+ echo "Another file content" >AnotherFile.txt &&
+ wiki_upload_file AnotherFile.txt
+'
+
+test_expect_success 'git clone works with one specific page cloned and mediaimport=true' '
+ git clone -c remote.origin.pages=testpage \
+ -c remote.origin.mediaimport=true \
+ mediawiki::'"$WIKI_URL"' mw_dir_15 &&
+ test_when_finished "rm -rf mw_dir_15" &&
+ test_contains_N_files mw_dir_15 3 &&
+ test_path_is_file mw_dir_15/Testpage.mw &&
+ test_path_is_file mw_dir_15/File:File.txt.mw &&
+ test_path_is_file mw_dir_15/File.txt &&
+ test_path_is_missing mw_dir_15/Main_Page.mw &&
+ test_path_is_missing mw_dir_15/File:AnotherFile.txt.mw &&
+ test_path_is_missing mw_dir_15/AnothetFile.txt &&
+ wiki_check_content mw_dir_15/Testpage.mw Testpage &&
+ test_cmp mw_dir_15/File.txt File.txt
+'
+
+test_expect_success 'git clone works with one specific page cloned and mediaimport=false' '
+ test_when_finished "rm -rf mw_dir_16" &&
+ git clone -c remote.origin.pages=testpage \
+ mediawiki::'"$WIKI_URL"' mw_dir_16 &&
+ test_contains_N_files mw_dir_16 1 &&
+ test_path_is_file mw_dir_16/Testpage.mw &&
+ test_path_is_missing mw_dir_16/File:File.txt.mw &&
+ test_path_is_missing mw_dir_16/File.txt &&
+ test_path_is_missing mw_dir_16/Main_Page.mw &&
+ wiki_check_content mw_dir_16/Testpage.mw Testpage
+'
+
+# should behave like mediaimport=false
+test_expect_success 'git clone works with one specific page cloned and mediaimport unset' '
+ test_when_finished "rm -fr mw_dir_17" &&
+ git clone -c remote.origin.pages=testpage \
+ mediawiki::'"$WIKI_URL"' mw_dir_17 &&
+ test_contains_N_files mw_dir_17 1 &&
+ test_path_is_file mw_dir_17/Testpage.mw &&
+ test_path_is_missing mw_dir_17/File:File.txt.mw &&
+ test_path_is_missing mw_dir_17/File.txt &&
+ test_path_is_missing mw_dir_17/Main_Page.mw &&
+ wiki_check_content mw_dir_17/Testpage.mw Testpage
+'
+
+test_done
diff --git a/contrib/mw-to-git/t/t9364-pull-by-rev.sh b/contrib/mw-to-git/t/t9364-pull-by-rev.sh
new file mode 100755
index 0000000..5c22457
--- /dev/null
+++ b/contrib/mw-to-git/t/t9364-pull-by-rev.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+test_description='Test the Git Mediawiki remote helper: git pull by revision'
+
+. ./test-gitmw-lib.sh
+. ./push-pull-tests.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+test_check_precond
+
+test_expect_success 'configuration' '
+ git config --global mediawiki.fetchStrategy by_rev
+'
+
+test_push_pull
+
+test_done
diff --git a/contrib/mw-to-git/t/test-gitmw-lib.sh b/contrib/mw-to-git/t/test-gitmw-lib.sh
new file mode 100755
index 0000000..3b2cfac
--- /dev/null
+++ b/contrib/mw-to-git/t/test-gitmw-lib.sh
@@ -0,0 +1,435 @@
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+# License: GPL v2 or later
+
+#
+# CONFIGURATION VARIABLES
+# You might want to change these ones
+#
+
+. ./test.config
+
+WIKI_URL=http://"$SERVER_ADDR:$PORT/$WIKI_DIR_NAME"
+CURR_DIR=$(pwd)
+TEST_OUTPUT_DIRECTORY=$(pwd)
+TEST_DIRECTORY="$CURR_DIR"/../../../t
+
+export TEST_OUTPUT_DIRECTORY TEST_DIRECTORY CURR_DIR
+
+if test "$LIGHTTPD" = "false" ; then
+ PORT=80
+else
+ WIKI_DIR_INST="$CURR_DIR/$WEB_WWW"
+fi
+
+wiki_upload_file () {
+ "$CURR_DIR"/test-gitmw.pl upload_file "$@"
+}
+
+wiki_getpage () {
+ "$CURR_DIR"/test-gitmw.pl get_page "$@"
+}
+
+wiki_delete_page () {
+ "$CURR_DIR"/test-gitmw.pl delete_page "$@"
+}
+
+wiki_editpage () {
+ "$CURR_DIR"/test-gitmw.pl edit_page "$@"
+}
+
+die () {
+ die_with_status 1 "$@"
+}
+
+die_with_status () {
+ status=$1
+ shift
+ echo >&2 "$*"
+ exit "$status"
+}
+
+
+# Check the preconditions to run git-remote-mediawiki's tests
+test_check_precond () {
+ if ! test_have_prereq PERL
+ then
+ skip_all='skipping gateway git-mw tests, perl not available'
+ test_done
+ fi
+
+ if [ ! -f "$GIT_BUILD_DIR"/git-remote-mediawiki ];
+ then
+ echo "No remote mediawiki for git found. Copying it in git"
+ echo "cp $GIT_BUILD_DIR/contrib/mw-to-git/git-remote-mediawiki $GIT_BUILD_DIR/"
+ ln -s "$GIT_BUILD_DIR"/contrib/mw-to-git/git-remote-mediawiki "$GIT_BUILD_DIR"
+ fi
+
+ if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ];
+ then
+ skip_all='skipping gateway git-mw tests, no mediawiki found'
+ test_done
+ fi
+}
+
+# test_diff_directories <dir_git> <dir_wiki>
+#
+# Compare the contents of directories <dir_git> and <dir_wiki> with diff
+# and errors if they do not match. The program will
+# not look into .git in the process.
+# Warning: the first argument MUST be the directory containing the git data
+test_diff_directories () {
+ rm -rf "$1_tmp"
+ mkdir -p "$1_tmp"
+ cp "$1"/*.mw "$1_tmp"
+ diff -r -b "$1_tmp" "$2"
+}
+
+# $1=<dir>
+# $2=<N>
+#
+# Check that <dir> contains exactly <N> files
+test_contains_N_files () {
+ if test `ls -- "$1" | wc -l` -ne "$2"; then
+ echo "directory $1 sould contain $2 files"
+ echo "it contains these files:"
+ ls "$1"
+ false
+ fi
+}
+
+
+# wiki_check_content <file_name> <page_name>
+#
+# Compares the contents of the file <file_name> and the wiki page
+# <page_name> and exits with error 1 if they do not match.
+wiki_check_content () {
+ mkdir -p wiki_tmp
+ wiki_getpage "$2" wiki_tmp
+ # replacement of forbidden character in file name
+ page_name=$(printf "%s\n" "$2" | sed -e "s/\//%2F/g")
+
+ diff -b "$1" wiki_tmp/"$page_name".mw
+ if test $? -ne 0
+ then
+ rm -rf wiki_tmp
+ error "ERROR: file $2 not found on wiki"
+ fi
+ rm -rf wiki_tmp
+}
+
+# wiki_page_exist <page_name>
+#
+# Check the existence of the page <page_name> on the wiki and exits
+# with error if it is absent from it.
+wiki_page_exist () {
+ mkdir -p wiki_tmp
+ wiki_getpage "$1" wiki_tmp
+ page_name=$(printf "%s\n" "$1" | sed "s/\//%2F/g")
+ if test -f wiki_tmp/"$page_name".mw ; then
+ rm -rf wiki_tmp
+ else
+ rm -rf wiki_tmp
+ error "test failed: file $1 not found on wiki"
+ fi
+}
+
+# wiki_getallpagename
+#
+# Fetch the name of each page on the wiki.
+wiki_getallpagename () {
+ "$CURR_DIR"/test-gitmw.pl getallpagename
+}
+
+# wiki_getallpagecategory <category>
+#
+# Fetch the name of each page belonging to <category> on the wiki.
+wiki_getallpagecategory () {
+ "$CURR_DIR"/test-gitmw.pl getallpagename "$@"
+}
+
+# wiki_getallpage <dest_dir> [<category>]
+#
+# Fetch all the pages from the wiki and place them in the directory
+# <dest_dir>.
+# If <category> is define, then wiki_getallpage fetch the pages included
+# in <category>.
+wiki_getallpage () {
+ if test -z "$2";
+ then
+ wiki_getallpagename
+ else
+ wiki_getallpagecategory "$2"
+ fi
+ mkdir -p "$1"
+ while read -r line; do
+ wiki_getpage "$line" $1;
+ done < all.txt
+}
+
+# ================= Install part =================
+
+error () {
+ echo "$@" >&2
+ exit 1
+}
+
+# config_lighttpd
+#
+# Create the configuration files and the folders necessary to start lighttpd.
+# Overwrite any existing file.
+config_lighttpd () {
+ mkdir -p $WEB
+ mkdir -p $WEB_TMP
+ mkdir -p $WEB_WWW
+ cat > $WEB/lighttpd.conf <<EOF
+ server.document-root = "$CURR_DIR/$WEB_WWW"
+ server.port = $PORT
+ server.pid-file = "$CURR_DIR/$WEB_TMP/pid"
+
+ server.modules = (
+ "mod_rewrite",
+ "mod_redirect",
+ "mod_access",
+ "mod_accesslog",
+ "mod_fastcgi"
+ )
+
+ index-file.names = ("index.php" , "index.html")
+
+ mimetype.assign = (
+ ".pdf" => "application/pdf",
+ ".sig" => "application/pgp-signature",
+ ".spl" => "application/futuresplash",
+ ".class" => "application/octet-stream",
+ ".ps" => "application/postscript",
+ ".torrent" => "application/x-bittorrent",
+ ".dvi" => "application/x-dvi",
+ ".gz" => "application/x-gzip",
+ ".pac" => "application/x-ns-proxy-autoconfig",
+ ".swf" => "application/x-shockwave-flash",
+ ".tar.gz" => "application/x-tgz",
+ ".tgz" => "application/x-tgz",
+ ".tar" => "application/x-tar",
+ ".zip" => "application/zip",
+ ".mp3" => "audio/mpeg",
+ ".m3u" => "audio/x-mpegurl",
+ ".wma" => "audio/x-ms-wma",
+ ".wax" => "audio/x-ms-wax",
+ ".ogg" => "application/ogg",
+ ".wav" => "audio/x-wav",
+ ".gif" => "image/gif",
+ ".jpg" => "image/jpeg",
+ ".jpeg" => "image/jpeg",
+ ".png" => "image/png",
+ ".xbm" => "image/x-xbitmap",
+ ".xpm" => "image/x-xpixmap",
+ ".xwd" => "image/x-xwindowdump",
+ ".css" => "text/css",
+ ".html" => "text/html",
+ ".htm" => "text/html",
+ ".js" => "text/javascript",
+ ".asc" => "text/plain",
+ ".c" => "text/plain",
+ ".cpp" => "text/plain",
+ ".log" => "text/plain",
+ ".conf" => "text/plain",
+ ".text" => "text/plain",
+ ".txt" => "text/plain",
+ ".dtd" => "text/xml",
+ ".xml" => "text/xml",
+ ".mpeg" => "video/mpeg",
+ ".mpg" => "video/mpeg",
+ ".mov" => "video/quicktime",
+ ".qt" => "video/quicktime",
+ ".avi" => "video/x-msvideo",
+ ".asf" => "video/x-ms-asf",
+ ".asx" => "video/x-ms-asf",
+ ".wmv" => "video/x-ms-wmv",
+ ".bz2" => "application/x-bzip",
+ ".tbz" => "application/x-bzip-compressed-tar",
+ ".tar.bz2" => "application/x-bzip-compressed-tar",
+ "" => "text/plain"
+ )
+
+ fastcgi.server = ( ".php" =>
+ ("localhost" =>
+ ( "socket" => "$CURR_DIR/$WEB_TMP/php.socket",
+ "bin-path" => "$PHP_DIR/php-cgi -c $CURR_DIR/$WEB/php.ini"
+
+ )
+ )
+ )
+EOF
+
+ cat > $WEB/php.ini <<EOF
+ session.save_path ='$CURR_DIR/$WEB_TMP'
+EOF
+}
+
+# start_lighttpd
+#
+# Start or restart daemon lighttpd. If restart, rewrite configuration files.
+start_lighttpd () {
+ if test -f "$WEB_TMP/pid"; then
+ echo "Instance already running. Restarting..."
+ stop_lighttpd
+ fi
+ config_lighttpd
+ "$LIGHTTPD_DIR"/lighttpd -f "$WEB"/lighttpd.conf
+
+ if test $? -ne 0 ; then
+ echo "Could not execute http deamon lighttpd"
+ exit 1
+ fi
+}
+
+# stop_lighttpd
+#
+# Kill daemon lighttpd and removes files and folders associated.
+stop_lighttpd () {
+ test -f "$WEB_TMP/pid" && kill $(cat "$WEB_TMP/pid")
+ rm -rf "$WEB"
+}
+
+# Create the SQLite database of the MediaWiki. If the database file already
+# exists, it will be deleted.
+# This script should be runned from the directory where $FILES_FOLDER is
+# located.
+create_db () {
+ rm -f "$TMP/$DB_FILE"
+
+ echo "Generating the SQLite database file. It can take some time ..."
+ # Run the php script to generate the SQLite database file
+ # with cURL calls.
+ php "$FILES_FOLDER/$DB_INSTALL_SCRIPT" $(basename "$DB_FILE" .sqlite) \
+ "$WIKI_ADMIN" "$WIKI_PASSW" "$TMP" "$PORT"
+
+ if [ ! -f "$TMP/$DB_FILE" ] ; then
+ error "Can't create database file $TMP/$DB_FILE. Try to run ./install-wiki.sh delete first."
+ fi
+
+ # Copy the generated database file into the directory the
+ # user indicated.
+ cp "$TMP/$DB_FILE" "$FILES_FOLDER" ||
+ error "Unable to copy $TMP/$DB_FILE to $FILES_FOLDER"
+}
+
+# Install a wiki in your web server directory.
+wiki_install () {
+ if test $LIGHTTPD = "true" ; then
+ start_lighttpd
+ fi
+
+ SERVER_ADDR=$SERVER_ADDR:$PORT
+ # In this part, we change directory to $TMP in order to download,
+ # unpack and copy the files of MediaWiki
+ (
+ mkdir -p "$WIKI_DIR_INST/$WIKI_DIR_NAME"
+ if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ] ; then
+ error "Folder $WIKI_DIR_INST/$WIKI_DIR_NAME doesn't exist.
+ Please create it and launch the script again."
+ fi
+
+ # Fetch MediaWiki's archive if not already present in the TMP directory
+ cd "$TMP"
+ if [ ! -f "$MW_VERSION.tar.gz" ] ; then
+ echo "Downloading $MW_VERSION sources ..."
+ wget "http://download.wikimedia.org/mediawiki/1.19/mediawiki-1.19.0.tar.gz" ||
+ error "Unable to download "\
+ "http://download.wikimedia.org/mediawiki/1.19/"\
+ "mediawiki-1.19.0.tar.gz. "\
+ "Please fix your connection and launch the script again."
+ echo "$MW_VERSION.tar.gz downloaded in `pwd`. "\
+ "You can delete it later if you want."
+ else
+ echo "Reusing existing $MW_VERSION.tar.gz downloaded in `pwd`."
+ fi
+ archive_abs_path=$(pwd)/"$MW_VERSION.tar.gz"
+ cd "$WIKI_DIR_INST/$WIKI_DIR_NAME/" ||
+ error "can't cd to $WIKI_DIR_INST/$WIKI_DIR_NAME/"
+ tar xzf "$archive_abs_path" --strip-components=1 ||
+ error "Unable to extract WikiMedia's files from $archive_abs_path to "\
+ "$WIKI_DIR_INST/$WIKI_DIR_NAME"
+ ) || exit 1
+
+ create_db
+
+ # Copy the generic LocalSettings.php in the web server's directory
+ # And modify parameters according to the ones set at the top
+ # of this script.
+ # Note that LocalSettings.php is never modified.
+ if [ ! -f "$FILES_FOLDER/LocalSettings.php" ] ; then
+ error "Can't find $FILES_FOLDER/LocalSettings.php " \
+ "in the current folder. "\
+ "Please run the script inside its folder."
+ fi
+ cp "$FILES_FOLDER/LocalSettings.php" \
+ "$FILES_FOLDER/LocalSettings-tmp.php" ||
+ error "Unable to copy $FILES_FOLDER/LocalSettings.php " \
+ "to $FILES_FOLDER/LocalSettings-tmp.php"
+
+ # Parse and set the LocalSettings file of the user according to the
+ # CONFIGURATION VARIABLES section at the beginning of this script
+ file_swap="$FILES_FOLDER/LocalSettings-swap.php"
+ sed "s,@WG_SCRIPT_PATH@,/$WIKI_DIR_NAME," \
+ "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+ mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+ sed "s,@WG_SERVER@,http://$SERVER_ADDR," \
+ "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+ mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+ sed "s,@WG_SQLITE_DATADIR@,$TMP," \
+ "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+ mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+ sed "s,@WG_SQLITE_DATAFILE@,$( basename $DB_FILE .sqlite)," \
+ "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+ mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+
+ mv "$FILES_FOLDER/LocalSettings-tmp.php" \
+ "$WIKI_DIR_INST/$WIKI_DIR_NAME/LocalSettings.php" ||
+ error "Unable to move $FILES_FOLDER/LocalSettings-tmp.php" \
+ "in $WIKI_DIR_INST/$WIKI_DIR_NAME"
+ echo "File $FILES_FOLDER/LocalSettings.php is set in" \
+ " $WIKI_DIR_INST/$WIKI_DIR_NAME"
+
+ echo "Your wiki has been installed. You can check it at
+ http://$SERVER_ADDR/$WIKI_DIR_NAME"
+}
+
+# Reset the database of the wiki and the password of the admin
+#
+# Warning: This function must be called only in a subdirectory of t/ directory
+wiki_reset () {
+ # Copy initial database of the wiki
+ if [ ! -f "../$FILES_FOLDER/$DB_FILE" ] ; then
+ error "Can't find ../$FILES_FOLDER/$DB_FILE in the current folder."
+ fi
+ cp "../$FILES_FOLDER/$DB_FILE" "$TMP" ||
+ error "Can't copy ../$FILES_FOLDER/$DB_FILE in $TMP"
+ echo "File $FILES_FOLDER/$DB_FILE is set in $TMP"
+}
+
+# Delete the wiki created in the web server's directory and all its content
+# saved in the database.
+wiki_delete () {
+ if test $LIGHTTPD = "true"; then
+ stop_lighttpd
+ else
+ # Delete the wiki's directory.
+ rm -rf "$WIKI_DIR_INST/$WIKI_DIR_NAME" ||
+ error "Wiki's directory $WIKI_DIR_INST/" \
+ "$WIKI_DIR_NAME could not be deleted"
+ # Delete the wiki's SQLite database.
+ rm -f "$TMP/$DB_FILE" ||
+ error "Database $TMP/$DB_FILE could not be deleted."
+ fi
+
+ # Delete the wiki's SQLite database
+ rm -f "$TMP/$DB_FILE" || error "Database $TMP/$DB_FILE could not be deleted."
+ rm -f "$FILES_FOLDER/$DB_FILE"
+ rm -rf "$TMP/$MW_VERSION"
+}
diff --git a/contrib/mw-to-git/t/test-gitmw.pl b/contrib/mw-to-git/t/test-gitmw.pl
new file mode 100755
index 0000000..0ff7625
--- /dev/null
+++ b/contrib/mw-to-git/t/test-gitmw.pl
@@ -0,0 +1,225 @@
+#!/usr/bin/perl -w -s
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+# License: GPL v2 or later
+
+# Usage:
+# ./test-gitmw.pl <command> [argument]*
+# Execute in terminal using the name of the function to call as first
+# parameter, and the function's arguments as following parameters
+#
+# Example:
+# ./test-gitmw.pl "get_page" foo .
+# will call <wiki_getpage> with arguments <foo> and <.>
+#
+# Available functions are:
+# "get_page"
+# "delete_page"
+# "edit_page"
+# "getallpagename"
+
+use MediaWiki::API;
+use Getopt::Long;
+use encoding 'utf8';
+use DateTime::Format::ISO8601;
+use open ':encoding(utf8)';
+use constant SLASH_REPLACEMENT => "%2F";
+
+#Parsing of the config file
+
+my $configfile = "$ENV{'CURR_DIR'}/test.config";
+my %config;
+open my $CONFIG, "<", $configfile or die "can't open $configfile: $!";
+while (<$CONFIG>)
+{
+ chomp;
+ s/#.*//;
+ s/^\s+//;
+ s/\s+$//;
+ next unless length;
+ my ($key, $value) = split (/\s*=\s*/,$_, 2);
+ $config{$key} = $value;
+ last if ($key eq 'LIGHTTPD' and $value eq 'false');
+ last if ($key eq 'PORT');
+}
+close $CONFIG or die "can't close $configfile: $!";
+
+my $wiki_address = "http://$config{'SERVER_ADDR'}".":"."$config{'PORT'}";
+my $wiki_url = "$wiki_address/$config{'WIKI_DIR_NAME'}/api.php";
+my $wiki_admin = "$config{'WIKI_ADMIN'}";
+my $wiki_admin_pass = "$config{'WIKI_PASSW'}";
+my $mw = MediaWiki::API->new;
+$mw->{config}->{api_url} = $wiki_url;
+
+
+# wiki_login <name> <password>
+#
+# Logs the user with <name> and <password> in the global variable
+# of the mediawiki $mw
+sub wiki_login {
+ $mw->login( { lgname => "$_[0]",lgpassword => "$_[1]" } )
+ || die "getpage: login failed";
+}
+
+# wiki_getpage <wiki_page> <dest_path>
+#
+# fetch a page <wiki_page> from the wiki referenced in the global variable
+# $mw and copies its content in directory dest_path
+sub wiki_getpage {
+ my $pagename = $_[0];
+ my $destdir = $_[1];
+
+ my $page = $mw->get_page( { title => $pagename } );
+ if (!defined($page)) {
+ die "getpage: wiki does not exist";
+ }
+
+ my $content = $page->{'*'};
+ if (!defined($content)) {
+ die "getpage: page does not exist";
+ }
+
+ $pagename=$page->{'title'};
+ # Replace spaces by underscore in the page name
+ $pagename =~ s/ /_/g;
+ $pagename =~ s/\//%2F/g;
+ open(my $file, ">$destdir/$pagename.mw");
+ print $file "$content";
+ close ($file);
+
+}
+
+# wiki_delete_page <page_name>
+#
+# delete the page with name <page_name> from the wiki referenced
+# in the global variable $mw
+sub wiki_delete_page {
+ my $pagename = $_[0];
+
+ my $exist=$mw->get_page({title => $pagename});
+
+ if (defined($exist->{'*'})){
+ $mw->edit({ action => 'delete',
+ title => $pagename})
+ || die $mw->{error}->{code} . ": " . $mw->{error}->{details};
+ } else {
+ die "no page with such name found: $pagename\n";
+ }
+}
+
+# wiki_editpage <wiki_page> <wiki_content> <wiki_append> [-c=<category>] [-s=<summary>]
+#
+# Edit a page named <wiki_page> with content <wiki_content> on the wiki
+# referenced with the global variable $mw
+# If <wiki_append> == true : append <wiki_content> at the end of the actual
+# content of the page <wiki_page>
+# If <wik_page> doesn't exist, that page is created with the <wiki_content>
+sub wiki_editpage {
+ my $wiki_page = $_[0];
+ my $wiki_content = $_[1];
+ my $wiki_append = $_[2];
+ my $summary = "";
+ my ($summ, $cat) = ();
+ GetOptions('s=s' => \$summ, 'c=s' => \$cat);
+
+ my $append = 0;
+ if (defined($wiki_append) && $wiki_append eq 'true') {
+ $append=1;
+ }
+
+ my $previous_text ="";
+
+ if ($append) {
+ my $ref = $mw->get_page( { title => $wiki_page } );
+ $previous_text = $ref->{'*'};
+ }
+
+ my $text = $wiki_content;
+ if (defined($previous_text)) {
+ $text="$previous_text$text";
+ }
+
+ # Eventually, add this page to a category.
+ if (defined($cat)) {
+ my $category_name="[[Category:$cat]]";
+ $text="$text\n $category_name";
+ }
+ if(defined($summ)){
+ $summary=$summ;
+ }
+
+ $mw->edit( { action => 'edit', title => $wiki_page, summary => $summary, text => "$text"} );
+}
+
+# wiki_getallpagename [<category>]
+#
+# Fetch all pages of the wiki referenced by the global variable $mw
+# and print the names of each one in the file all.txt with a new line
+# ("\n") between these.
+# If the argument <category> is defined, then this function get only the pages
+# belonging to <category>.
+sub wiki_getallpagename {
+ # fetch the pages of the wiki
+ if (defined($_[0])) {
+ my $mw_pages = $mw->list ( { action => 'query',
+ list => 'categorymembers',
+ cmtitle => "Category:$_[0]",
+ cmnamespace => 0,
+ cmlimit => 500 },
+ )
+ || die $mw->{error}->{code}.": ".$mw->{error}->{details};
+ open(my $file, ">all.txt");
+ foreach my $page (@{$mw_pages}) {
+ print $file "$page->{title}\n";
+ }
+ close ($file);
+
+ } else {
+ my $mw_pages = $mw->list({
+ action => 'query',
+ list => 'allpages',
+ aplimit => 500,
+ })
+ || die $mw->{error}->{code}.": ".$mw->{error}->{details};
+ open(my $file, ">all.txt");
+ foreach my $page (@{$mw_pages}) {
+ print $file "$page->{title}\n";
+ }
+ close ($file);
+ }
+}
+
+sub wiki_upload_file {
+ my $file_name = $_[0];
+ my $resultat = $mw->edit ( {
+ action => 'upload',
+ filename => $file_name,
+ comment => 'upload a file',
+ file => [ $file_name ],
+ ignorewarnings=>1,
+ }, {
+ skip_encoding => 1
+ } ) || die $mw->{error}->{code} . ' : ' . $mw->{error}->{details};
+}
+
+
+
+# Main part of this script: parse the command line arguments
+# and select which function to execute
+my $fct_to_call = shift;
+
+wiki_login($wiki_admin, $wiki_admin_pass);
+
+my %functions_to_call = qw(
+ upload_file wiki_upload_file
+ get_page wiki_getpage
+ delete_page wiki_delete_page
+ edit_page wiki_editpage
+ getallpagename wiki_getallpagename
+);
+die "$0 ERROR: wrong argument" unless exists $functions_to_call{$fct_to_call};
+&{$functions_to_call{$fct_to_call}}(@ARGV);
diff --git a/contrib/mw-to-git/t/test.config b/contrib/mw-to-git/t/test.config
new file mode 100644
index 0000000..958b37b
--- /dev/null
+++ b/contrib/mw-to-git/t/test.config
@@ -0,0 +1,35 @@
+# Name of the web server's directory dedicated to the wiki is WIKI_DIR_NAME
+WIKI_DIR_NAME=wiki
+
+# Login and password of the wiki's admin
+WIKI_ADMIN=WikiAdmin
+WIKI_PASSW=AdminPass
+
+# Address of the web server
+SERVER_ADDR=localhost
+
+# SQLite database of the wiki, named DB_FILE, is located in TMP
+TMP=/tmp
+DB_FILE=wikidb.sqlite
+
+# If LIGHTTPD is not set to true, the script will use the defaut
+# web server running in WIKI_DIR_INST.
+WIKI_DIR_INST=/var/www
+
+# If LIGHTTPD is set to true, the script will use Lighttpd to run
+# the wiki.
+LIGHTTPD=true
+
+# The variables below are useful only if LIGHTTPD is set to true.
+PORT=1234
+PHP_DIR=/usr/bin
+LIGHTTPD_DIR=/usr/sbin
+WEB=WEB
+WEB_TMP=$WEB/tmp
+WEB_WWW=$WEB/www
+
+# The variables below are used by the script to install a wiki.
+# You should not modify these unless you are modifying the script itself.
+MW_VERSION=mediawiki-1.19.0
+FILES_FOLDER=install-wiki
+DB_INSTALL_SCRIPT=db_install.php
diff --git a/credential.c b/credential.c
index 62d1c56..e54753c 100644
--- a/credential.c
+++ b/credential.c
@@ -172,6 +172,8 @@ int credential_read(struct credential *c, FILE *fp)
} else if (!strcmp(key, "path")) {
free(c->path);
c->path = xstrdup(value);
+ } else if (!strcmp(key, "url")) {
+ credential_from_url(c, value);
}
/*
* Ignore other lines; we don't know what they mean, but
@@ -191,7 +193,7 @@ static void credential_write_item(FILE *fp, const char *key, const char *value)
fprintf(fp, "%s=%s\n", key, value);
}
-static void credential_write(const struct credential *c, FILE *fp)
+void credential_write(const struct credential *c, FILE *fp)
{
credential_write_item(fp, "protocol", c->protocol);
credential_write_item(fp, "host", c->host);
diff --git a/credential.h b/credential.h
index 96ea41b..0c3e85e 100644
--- a/credential.h
+++ b/credential.h
@@ -26,6 +26,7 @@ void credential_approve(struct credential *);
void credential_reject(struct credential *);
int credential_read(struct credential *, FILE *);
+void credential_write(const struct credential *, FILE *);
void credential_from_url(struct credential *, const char *url);
int credential_match(const struct credential *have,
const struct credential *want);
diff --git a/diff.c b/diff.c
index 62cbe14..95706a5 100644
--- a/diff.c
+++ b/diff.c
@@ -1397,7 +1397,7 @@ int print_stat_summary(FILE *fp, int files, int insertions, int deletions)
if (!files) {
assert(insertions == 0 && deletions == 0);
- return fputs(_(" 0 files changed\n"), fp);
+ return fprintf(fp, "%s\n", _(" 0 files changed"));
}
strbuf_addf(&sb,
diff --git a/dir.c b/dir.c
index 2c02b31..240bf0c 100644
--- a/dir.c
+++ b/dir.c
@@ -288,9 +288,24 @@ int match_pathspec_depth(const struct pathspec *ps,
return retval;
}
+/*
+ * Return the length of the "simple" part of a path match limiter.
+ */
+static int simple_length(const char *match)
+{
+ int len = -1;
+
+ for (;;) {
+ unsigned char c = *match++;
+ len++;
+ if (c == '\0' || is_glob_special(c))
+ return len;
+ }
+}
+
static int no_wildcard(const char *string)
{
- return string[strcspn(string, "*?[{\\")] == '\0';
+ return string[simple_length(string)] == '\0';
}
void add_exclude(const char *string, const char *base,
@@ -326,8 +341,7 @@ void add_exclude(const char *string, const char *base,
x->flags = flags;
if (!strchr(string, '/'))
x->flags |= EXC_FLAG_NODIR;
- if (no_wildcard(string))
- x->flags |= EXC_FLAG_NOWILDCARD;
+ x->nowildcardlen = simple_length(string);
if (*string == '*' && no_wildcard(string+1))
x->flags |= EXC_FLAG_ENDSWITH;
ALLOC_GROW(which->excludes, which->nr + 1, which->alloc);
@@ -498,57 +512,69 @@ int excluded_from_list(const char *pathname,
{
int i;
- if (el->nr) {
- for (i = el->nr - 1; 0 <= i; i--) {
- struct exclude *x = el->excludes[i];
- const char *exclude = x->pattern;
- int to_exclude = x->to_exclude;
-
- if (x->flags & EXC_FLAG_MUSTBEDIR) {
- if (*dtype == DT_UNKNOWN)
- *dtype = get_dtype(NULL, pathname, pathlen);
- if (*dtype != DT_DIR)
- continue;
- }
+ if (!el->nr)
+ return -1; /* undefined */
- if (x->flags & EXC_FLAG_NODIR) {
- /* match basename */
- if (x->flags & EXC_FLAG_NOWILDCARD) {
- if (!strcmp_icase(exclude, basename))
- return to_exclude;
- } else if (x->flags & EXC_FLAG_ENDSWITH) {
- if (x->patternlen - 1 <= pathlen &&
- !strcmp_icase(exclude + 1, pathname + pathlen - x->patternlen + 1))
- return to_exclude;
- } else {
- if (fnmatch_icase(exclude, basename, 0) == 0)
- return to_exclude;
- }
- }
- else {
- /* match with FNM_PATHNAME:
- * exclude has base (baselen long) implicitly
- * in front of it.
- */
- int baselen = x->baselen;
- if (*exclude == '/')
- exclude++;
-
- if (pathlen < baselen ||
- (baselen && pathname[baselen-1] != '/') ||
- strncmp_icase(pathname, x->base, baselen))
- continue;
-
- if (x->flags & EXC_FLAG_NOWILDCARD) {
- if (!strcmp_icase(exclude, pathname + baselen))
- return to_exclude;
- } else {
- if (fnmatch_icase(exclude, pathname+baselen,
- FNM_PATHNAME) == 0)
- return to_exclude;
- }
+ for (i = el->nr - 1; 0 <= i; i--) {
+ struct exclude *x = el->excludes[i];
+ const char *name, *exclude = x->pattern;
+ int to_exclude = x->to_exclude;
+ int namelen, prefix = x->nowildcardlen;
+
+ if (x->flags & EXC_FLAG_MUSTBEDIR) {
+ if (*dtype == DT_UNKNOWN)
+ *dtype = get_dtype(NULL, pathname, pathlen);
+ if (*dtype != DT_DIR)
+ continue;
+ }
+
+ if (x->flags & EXC_FLAG_NODIR) {
+ /* match basename */
+ if (prefix == x->patternlen) {
+ if (!strcmp_icase(exclude, basename))
+ return to_exclude;
+ } else if (x->flags & EXC_FLAG_ENDSWITH) {
+ if (x->patternlen - 1 <= pathlen &&
+ !strcmp_icase(exclude + 1, pathname + pathlen - x->patternlen + 1))
+ return to_exclude;
+ } else {
+ if (fnmatch_icase(exclude, basename, 0) == 0)
+ return to_exclude;
}
+ continue;
+ }
+
+ /* match with FNM_PATHNAME:
+ * exclude has base (baselen long) implicitly in front of it.
+ */
+ if (*exclude == '/') {
+ exclude++;
+ prefix--;
}
+
+ if (pathlen < x->baselen ||
+ (x->baselen && pathname[x->baselen-1] != '/') ||
+ strncmp_icase(pathname, x->base, x->baselen))
+ continue;
+
+ namelen = x->baselen ? pathlen - x->baselen : pathlen;
+ name = pathname + pathlen - namelen;
+
+ /* if the non-wildcard part is longer than the
+ remaining pathname, surely it cannot match */
+ if (prefix > namelen)
+ continue;
+
+ if (prefix) {
+ if (strncmp_icase(exclude, name, prefix))
+ continue;
+ exclude += prefix;
+ name += prefix;
+ namelen -= prefix;
+ }
+
+ if (!namelen || !fnmatch_icase(exclude, name, FNM_PATHNAME))
+ return to_exclude;
}
return -1; /* undecided */
}
@@ -1055,21 +1081,6 @@ static int cmp_name(const void *p1, const void *p2)
e2->name, e2->len);
}
-/*
- * Return the length of the "simple" part of a path match limiter.
- */
-static int simple_length(const char *match)
-{
- int len = -1;
-
- for (;;) {
- unsigned char c = *match++;
- len++;
- if (c == '\0' || is_glob_special(c))
- return len;
- }
-}
-
static struct path_simplify *create_simplify(const char **pathspec)
{
int nr, alloc = 0;
@@ -1292,9 +1303,14 @@ int remove_dir_recursively(struct strbuf *path, int flag)
void setup_standard_excludes(struct dir_struct *dir)
{
const char *path;
+ char *xdg_path;
dir->exclude_per_dir = ".gitignore";
path = git_path("info/exclude");
+ if (!excludes_file) {
+ home_config_paths(NULL, &xdg_path, "ignore");
+ excludes_file = xdg_path;
+ }
if (!access(path, R_OK))
add_excludes_from_file(dir, path);
if (excludes_file && !access(excludes_file, R_OK))
diff --git a/dir.h b/dir.h
index 6c73e41..893465a 100644
--- a/dir.h
+++ b/dir.h
@@ -9,7 +9,6 @@ struct dir_entry {
};
#define EXC_FLAG_NODIR 1
-#define EXC_FLAG_NOWILDCARD 2
#define EXC_FLAG_ENDSWITH 4
#define EXC_FLAG_MUSTBEDIR 8
@@ -19,6 +18,7 @@ struct exclude_list {
struct exclude {
const char *pattern;
int patternlen;
+ int nowildcardlen;
const char *base;
int baselen;
int to_exclude;
diff --git a/environment.c b/environment.c
index 669e498..85edd7f 100644
--- a/environment.c
+++ b/environment.c
@@ -58,6 +58,7 @@ char *notes_ref_name;
int grafts_replace_parents = 1;
int core_apply_sparse_checkout;
int merge_log_config = -1;
+int precomposed_unicode = -1; /* see probe_utf8_pathname_composition() */
struct startup_info *startup_info;
unsigned long pack_size_limit_cfg;
diff --git a/git-am.sh b/git-am.sh
index 9abad36..bd9620c 100755
--- a/git-am.sh
+++ b/git-am.sh
@@ -92,7 +92,7 @@ safe_to_abort () {
then
return 0
fi
- gettextln "You seem to have moved HEAD since the last 'am' failure.
+ gettextln "You seem to have moved HEAD since the last 'am' failure.
Not rewinding to ORIG_HEAD" >&2
return 1
}
@@ -102,9 +102,9 @@ stop_here_user_resolve () {
printf '%s\n' "$resolvemsg"
stop_here $1
fi
- eval_gettextln "When you have resolved this problem run \"\$cmdline --resolved\".
-If you would prefer to skip this patch, instead run \"\$cmdline --skip\".
-To restore the original branch and stop patching run \"\$cmdline --abort\"."
+ eval_gettextln "When you have resolved this problem, run \"\$cmdline --resolved\".
+If you prefer to skip this patch, run \"\$cmdline --skip\" instead.
+To restore the original branch and stop patching, run \"\$cmdline --abort\"."
stop_here $1
}
@@ -136,7 +136,7 @@ fall_back_3way () {
git write-tree >"$dotest/patch-merge-base+" ||
cannot_fallback "$(gettext "Repository lacks necessary blobs to fall back on 3-way merge.")"
- say Using index info to reconstruct a base tree...
+ say "$(gettext "Using index info to reconstruct a base tree...")"
cmd='GIT_INDEX_FILE="$dotest/patch-merge-tmp-index"'
@@ -176,8 +176,7 @@ It does not apply to blobs recorded in its index.")"
fi
git-merge-recursive $orig_tree -- HEAD $his_tree || {
git rerere $allow_rerere_autoupdate
- echo Failed to merge in the changes.
- exit 1
+ die "$(gettext "Failed to merge in the changes.")"
}
unset GITHEAD_$his_tree
}
@@ -260,7 +259,7 @@ check_patch_format () {
split_patches () {
case "$patch_format" in
mbox)
- if test -n "$rebasing" || test t = "$keepcr"
+ if test t = "$keepcr"
then
keep_cr=--keep-cr
else
@@ -387,8 +386,8 @@ do
-i|--interactive)
interactive=t ;;
-b|--binary)
- echo >&2 "The $1 option has been a no-op for long time, and"
- echo >&2 "it will be removed. Please do not use it anymore."
+ gettextln >&2 "The -b/--binary option has been a no-op for long time, and
+it will be removed. Please do not use it anymore."
;;
-3|--3way)
threeway=t ;;
@@ -413,10 +412,7 @@ do
--abort)
abort=t ;;
--rebasing)
- rebasing=t threeway=t keep=t scissors=f no_inbody_headers=t ;;
- -d|--dotest)
- die "$(gettext "-d option is no longer supported. Do not use.")"
- ;;
+ rebasing=t threeway=t ;;
--resolvemsg)
shift; resolvemsg=$1 ;;
--whitespace|--directory|--exclude|--include)
@@ -658,32 +654,34 @@ do
# by the user, or the user can tell us to do so by --resolved flag.
case "$resume" in
'')
- git mailinfo $keep $no_inbody_headers $scissors $utf8 "$dotest/msg" "$dotest/patch" \
- <"$dotest/$msgnum" >"$dotest/info" ||
- stop_here $this
-
- # skip pine's internal folder data
- sane_grep '^Author: Mail System Internal Data$' \
- <"$dotest"/info >/dev/null &&
- go_next && continue
-
- test -s "$dotest/patch" || {
- eval_gettextln "Patch is empty. Was it split wrong?
-If you would prefer to skip this patch, instead run \"\$cmdline --skip\".
-To restore the original branch and stop patching run \"\$cmdline --abort\"."
- stop_here $this
- }
- rm -f "$dotest/original-commit" "$dotest/author-script"
- if test -f "$dotest/rebasing" &&
+ if test -f "$dotest/rebasing"
+ then
commit=$(sed -e 's/^From \([0-9a-f]*\) .*/\1/' \
-e q "$dotest/$msgnum") &&
- test "$(git cat-file -t "$commit")" = commit
- then
+ test "$(git cat-file -t "$commit")" = commit ||
+ stop_here $this
git cat-file commit "$commit" |
sed -e '1,/^$/d' >"$dotest/msg-clean"
- echo "$commit" > "$dotest/original-commit"
- get_author_ident_from_commit "$commit" > "$dotest/author-script"
+ echo "$commit" >"$dotest/original-commit"
+ get_author_ident_from_commit "$commit" >"$dotest/author-script"
+ git diff-tree --root --binary "$commit" >"$dotest/patch"
else
+ git mailinfo $keep $no_inbody_headers $scissors $utf8 "$dotest/msg" "$dotest/patch" \
+ <"$dotest/$msgnum" >"$dotest/info" ||
+ stop_here $this
+
+ # skip pine's internal folder data
+ sane_grep '^Author: Mail System Internal Data$' \
+ <"$dotest"/info >/dev/null &&
+ go_next && continue
+
+ test -s "$dotest/patch" || {
+ eval_gettextln "Patch is empty. Was it split wrong?
+If you would prefer to skip this patch, instead run \"\$cmdline --skip\".
+To restore the original branch and stop patching run \"\$cmdline --abort\"."
+ stop_here $this
+ }
+ rm -f "$dotest/original-commit" "$dotest/author-script"
{
sed -n '/^Subject/ s/Subject: //p' "$dotest/info"
echo
diff --git a/git-compat-util.h b/git-compat-util.h
index 5bd9ad7..35b095e 100644
--- a/git-compat-util.h
+++ b/git-compat-util.h
@@ -153,6 +153,15 @@
#endif
#endif
+/* used on Mac OS X */
+#ifdef PRECOMPOSE_UNICODE
+#include "compat/precompose_utf8.h"
+#else
+#define precompose_str(in,i_nfd2nfc)
+#define precompose_argv(c,v)
+#define probe_utf8_pathname_composition(a,b)
+#endif
+
#ifndef NO_LIBGEN_H
#include <libgen.h>
#else
diff --git a/git-difftool.perl b/git-difftool.perl
index ae1e052..c079854 100755
--- a/git-difftool.perl
+++ b/git-difftool.perl
@@ -15,6 +15,7 @@ use strict;
use warnings;
use File::Basename qw(dirname);
use File::Copy;
+use File::Compare;
use File::Find;
use File::stat;
use File::Path qw(mkpath);
@@ -336,8 +337,10 @@ if (defined($dirdiff)) {
# files were modified during the diff, then the changes
# should be copied back to the working tree
for my $file (@working_tree) {
- copy("$b/$file", "$workdir/$file") or die $!;
- chmod(stat("$b/$file")->mode, "$workdir/$file") or die $!;
+ if (-e "$b/$file" && compare("$b/$file", "$workdir/$file")) {
+ copy("$b/$file", "$workdir/$file") or die $!;
+ chmod(stat("$b/$file")->mode, "$workdir/$file") or die $!;
+ }
}
} else {
if (defined($prompt)) {
diff --git a/git-p4.py b/git-p4.py
index f895a24..e67d37d 100755
--- a/git-p4.py
+++ b/git-p4.py
@@ -120,6 +120,15 @@ def p4_read_pipe_lines(c):
real_cmd = p4_build_cmd(c)
return read_pipe_lines(real_cmd)
+def p4_has_command(cmd):
+ """Ask p4 for help on this command. If it returns an error, the
+ command does not exist in this version of p4."""
+ real_cmd = p4_build_cmd(["help", cmd])
+ p = subprocess.Popen(real_cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ p.communicate()
+ return p.returncode == 0
+
def system(cmd):
expand = isinstance(cmd,basestring)
if verbose:
@@ -157,6 +166,9 @@ def p4_revert(f):
def p4_reopen(type, f):
p4_system(["reopen", "-t", type, wildcard_encode(f)])
+def p4_move(src, dest):
+ p4_system(["move", "-k", wildcard_encode(src), wildcard_encode(dest)])
+
#
# Canonicalize the p4 type and return a tuple of the
# base type, plus any modifiers. See "p4 help filetypes"
@@ -844,20 +856,45 @@ class P4Submit(Command, P4UserMap):
]
self.description = "Submit changes from git to the perforce depot."
self.usage += " [name of git branch to submit into perforce depot]"
- self.interactive = True
self.origin = ""
self.detectRenames = False
self.preserveUser = gitConfig("git-p4.preserveUser").lower() == "true"
self.isWindows = (platform.system() == "Windows")
self.exportLabels = False
+ self.p4HasMoveCommand = p4_has_command("move")
def check(self):
if len(p4CmdList("opened ...")) > 0:
die("You have files opened with perforce! Close them before starting the sync.")
- # replaces everything between 'Description:' and the next P4 submit template field with the
- # commit message
- def prepareLogMessage(self, template, message):
+ def separate_jobs_from_description(self, message):
+ """Extract and return a possible Jobs field in the commit
+ message. It goes into a separate section in the p4 change
+ specification.
+
+ A jobs line starts with "Jobs:" and looks like a new field
+ in a form. Values are white-space separated on the same
+ line or on following lines that start with a tab.
+
+ This does not parse and extract the full git commit message
+ like a p4 form. It just sees the Jobs: line as a marker
+ to pass everything from then on directly into the p4 form,
+ but outside the description section.
+
+ Return a tuple (stripped log message, jobs string)."""
+
+ m = re.search(r'^Jobs:', message, re.MULTILINE)
+ if m is None:
+ return (message, None)
+
+ jobtext = message[m.start():]
+ stripped_message = message[:m.start()].rstrip()
+ return (stripped_message, jobtext)
+
+ def prepareLogMessage(self, template, message, jobs):
+ """Edits the template returned from "p4 change -o" to insert
+ the message in the Description field, and the jobs text in
+ the Jobs field."""
result = ""
inDescriptionSection = False
@@ -870,6 +907,9 @@ class P4Submit(Command, P4UserMap):
if inDescriptionSection:
if line.startswith("Files:") or line.startswith("Jobs:"):
inDescriptionSection = False
+ # insert Jobs section
+ if jobs:
+ result += jobs + "\n"
else:
continue
else:
@@ -981,7 +1021,13 @@ class P4Submit(Command, P4UserMap):
return 0
def prepareSubmitTemplate(self):
- # remove lines in the Files section that show changes to files outside the depot path we're committing into
+ """Run "p4 change -o" to grab a change specification template.
+ This does not use "p4 -G", as it is nice to keep the submission
+ template in original order, since a human might edit it.
+
+ Remove lines in the Files section that show changes to files
+ outside the depot path we're committing into."""
+
template = ""
inFilesSection = False
for line in p4_read_pipe_lines(['change', '-o']):
@@ -1046,27 +1092,7 @@ class P4Submit(Command, P4UserMap):
(p4User, gitEmail) = self.p4UserForCommit(id)
- if not self.detectRenames:
- # If not explicitly set check the config variable
- self.detectRenames = gitConfig("git-p4.detectRenames")
-
- if self.detectRenames.lower() == "false" or self.detectRenames == "":
- diffOpts = ""
- elif self.detectRenames.lower() == "true":
- diffOpts = "-M"
- else:
- diffOpts = "-M%s" % self.detectRenames
-
- detectCopies = gitConfig("git-p4.detectCopies")
- if detectCopies.lower() == "true":
- diffOpts += " -C"
- elif detectCopies != "" and detectCopies.lower() != "false":
- diffOpts += " -C%s" % detectCopies
-
- if gitConfig("git-p4.detectCopiesHarder", "--bool") == "true":
- diffOpts += " --find-copies-harder"
-
- diff = read_pipe_lines("git diff-tree -r %s \"%s^\" \"%s\"" % (diffOpts, id, id))
+ diff = read_pipe_lines("git diff-tree -r %s \"%s^\" \"%s\"" % (self.diffOpts, id, id))
filesToAdd = set()
filesToDelete = set()
editedFiles = set()
@@ -1106,17 +1132,23 @@ class P4Submit(Command, P4UserMap):
editedFiles.add(dest)
elif modifier == "R":
src, dest = diff['src'], diff['dst']
- p4_integrate(src, dest)
- if diff['src_sha1'] != diff['dst_sha1']:
- p4_edit(dest)
+ if self.p4HasMoveCommand:
+ p4_edit(src) # src must be open before move
+ p4_move(src, dest) # opens for (move/delete, move/add)
else:
- pureRenameCopy.add(dest)
+ p4_integrate(src, dest)
+ if diff['src_sha1'] != diff['dst_sha1']:
+ p4_edit(dest)
+ else:
+ pureRenameCopy.add(dest)
if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
- p4_edit(dest)
+ if not self.p4HasMoveCommand:
+ p4_edit(dest) # with move: already open, writable
filesToChangeExecBit[dest] = diff['dst_mode']
- os.unlink(dest)
+ if not self.p4HasMoveCommand:
+ os.unlink(dest)
+ filesToDelete.add(src)
editedFiles.add(dest)
- filesToDelete.add(src)
else:
die("unknown modifier %s for %s" % (modifier, path))
@@ -1206,89 +1238,80 @@ class P4Submit(Command, P4UserMap):
logMessage = extractLogMessageFromGitCommit(id)
logMessage = logMessage.strip()
+ (logMessage, jobs) = self.separate_jobs_from_description(logMessage)
template = self.prepareSubmitTemplate()
+ submitTemplate = self.prepareLogMessage(template, logMessage, jobs)
- if self.interactive:
- submitTemplate = self.prepareLogMessage(template, logMessage)
-
- if self.preserveUser:
- submitTemplate = submitTemplate + ("\n######## Actual user %s, modified after commit\n" % p4User)
-
- if os.environ.has_key("P4DIFF"):
- del(os.environ["P4DIFF"])
- diff = ""
- for editedFile in editedFiles:
- diff += p4_read_pipe(['diff', '-du',
- wildcard_encode(editedFile)])
-
- newdiff = ""
- for newFile in filesToAdd:
- newdiff += "==== new file ====\n"
- newdiff += "--- /dev/null\n"
- newdiff += "+++ %s\n" % newFile
- f = open(newFile, "r")
- for line in f.readlines():
- newdiff += "+" + line
- f.close()
-
- if self.checkAuthorship and not self.p4UserIsMe(p4User):
- submitTemplate += "######## git author %s does not match your p4 account.\n" % gitEmail
- submitTemplate += "######## Use option --preserve-user to modify authorship.\n"
- submitTemplate += "######## Variable git-p4.skipUserNameCheck hides this message.\n"
-
- separatorLine = "######## everything below this line is just the diff #######\n"
-
- (handle, fileName) = tempfile.mkstemp()
- tmpFile = os.fdopen(handle, "w+")
- if self.isWindows:
- submitTemplate = submitTemplate.replace("\n", "\r\n")
- separatorLine = separatorLine.replace("\n", "\r\n")
- newdiff = newdiff.replace("\n", "\r\n")
- tmpFile.write(submitTemplate + separatorLine + diff + newdiff)
+ if self.preserveUser:
+ submitTemplate = submitTemplate + ("\n######## Actual user %s, modified after commit\n" % p4User)
+
+ if os.environ.has_key("P4DIFF"):
+ del(os.environ["P4DIFF"])
+ diff = ""
+ for editedFile in editedFiles:
+ diff += p4_read_pipe(['diff', '-du',
+ wildcard_encode(editedFile)])
+
+ newdiff = ""
+ for newFile in filesToAdd:
+ newdiff += "==== new file ====\n"
+ newdiff += "--- /dev/null\n"
+ newdiff += "+++ %s\n" % newFile
+ f = open(newFile, "r")
+ for line in f.readlines():
+ newdiff += "+" + line
+ f.close()
+
+ if self.checkAuthorship and not self.p4UserIsMe(p4User):
+ submitTemplate += "######## git author %s does not match your p4 account.\n" % gitEmail
+ submitTemplate += "######## Use option --preserve-user to modify authorship.\n"
+ submitTemplate += "######## Variable git-p4.skipUserNameCheck hides this message.\n"
+
+ separatorLine = "######## everything below this line is just the diff #######\n"
+
+ (handle, fileName) = tempfile.mkstemp()
+ tmpFile = os.fdopen(handle, "w+")
+ if self.isWindows:
+ submitTemplate = submitTemplate.replace("\n", "\r\n")
+ separatorLine = separatorLine.replace("\n", "\r\n")
+ newdiff = newdiff.replace("\n", "\r\n")
+ tmpFile.write(submitTemplate + separatorLine + diff + newdiff)
+ tmpFile.close()
+
+ if self.edit_template(fileName):
+ # read the edited message and submit
+ tmpFile = open(fileName, "rb")
+ message = tmpFile.read()
tmpFile.close()
+ submitTemplate = message[:message.index(separatorLine)]
+ if self.isWindows:
+ submitTemplate = submitTemplate.replace("\r\n", "\n")
+ p4_write_pipe(['submit', '-i'], submitTemplate)
- if self.edit_template(fileName):
- # read the edited message and submit
- tmpFile = open(fileName, "rb")
- message = tmpFile.read()
- tmpFile.close()
- submitTemplate = message[:message.index(separatorLine)]
- if self.isWindows:
- submitTemplate = submitTemplate.replace("\r\n", "\n")
- p4_write_pipe(['submit', '-i'], submitTemplate)
-
- if self.preserveUser:
- if p4User:
- # Get last changelist number. Cannot easily get it from
- # the submit command output as the output is
- # unmarshalled.
- changelist = self.lastP4Changelist()
- self.modifyChangelistUser(changelist, p4User)
-
- # The rename/copy happened by applying a patch that created a
- # new file. This leaves it writable, which confuses p4.
- for f in pureRenameCopy:
- p4_sync(f, "-f")
-
- else:
- # skip this patch
- print "Submission cancelled, undoing p4 changes."
- for f in editedFiles:
- p4_revert(f)
- for f in filesToAdd:
- p4_revert(f)
- os.remove(f)
+ if self.preserveUser:
+ if p4User:
+ # Get last changelist number. Cannot easily get it from
+ # the submit command output as the output is
+ # unmarshalled.
+ changelist = self.lastP4Changelist()
+ self.modifyChangelistUser(changelist, p4User)
+
+ # The rename/copy happened by applying a patch that created a
+ # new file. This leaves it writable, which confuses p4.
+ for f in pureRenameCopy:
+ p4_sync(f, "-f")
- os.remove(fileName)
else:
- fileName = "submit.txt"
- file = open(fileName, "w+")
- file.write(self.prepareLogMessage(template, logMessage))
- file.close()
- print ("Perforce submit template written as %s. "
- + "Please review/edit and then use p4 submit -i < %s to submit directly!"
- % (fileName, fileName))
+ # skip this patch
+ print "Submission cancelled, undoing p4 changes."
+ for f in editedFiles:
+ p4_revert(f)
+ for f in filesToAdd:
+ p4_revert(f)
+ os.remove(f)
+
+ os.remove(fileName)
# Export git tags as p4 labels. Create a p4 label and then tag
# with that.
@@ -1433,12 +1456,41 @@ class P4Submit(Command, P4UserMap):
if self.preserveUser:
self.checkValidP4Users(commits)
+ #
+ # Build up a set of options to be passed to diff when
+ # submitting each commit to p4.
+ #
+ if self.detectRenames:
+ # command-line -M arg
+ self.diffOpts = "-M"
+ else:
+ # If not explicitly set check the config variable
+ detectRenames = gitConfig("git-p4.detectRenames")
+
+ if detectRenames.lower() == "false" or detectRenames == "":
+ self.diffOpts = ""
+ elif detectRenames.lower() == "true":
+ self.diffOpts = "-M"
+ else:
+ self.diffOpts = "-M%s" % detectRenames
+
+ # no command-line arg for -C or --find-copies-harder, just
+ # config variables
+ detectCopies = gitConfig("git-p4.detectCopies")
+ if detectCopies.lower() == "false" or detectCopies == "":
+ pass
+ elif detectCopies.lower() == "true":
+ self.diffOpts += " -C"
+ else:
+ self.diffOpts += " -C%s" % detectCopies
+
+ if gitConfig("git-p4.detectCopiesHarder", "--bool") == "true":
+ self.diffOpts += " --find-copies-harder"
+
while len(commits) > 0:
commit = commits[0]
commits = commits[1:]
self.applyCommit(commit)
- if not self.interactive:
- break
if len(commits) == 0:
print "All changes applied!"
diff --git a/git-rebase--am.sh b/git-rebase--am.sh
index 04d8941..392ebc9 100644
--- a/git-rebase--am.sh
+++ b/git-rebase--am.sh
@@ -3,8 +3,6 @@
# Copyright (c) 2010 Junio C Hamano.
#
-. git-sh-setup
-
case "$action" in
continue)
git am --resolved --resolvemsg="$resolvemsg" &&
diff --git a/git-rebase--interactive.sh b/git-rebase--interactive.sh
index 0c19b7c..0d2056f 100644
--- a/git-rebase--interactive.sh
+++ b/git-rebase--interactive.sh
@@ -9,9 +9,7 @@
#
# The original idea comes from Eric W. Biederman, in
# http://article.gmane.org/gmane.comp.version-control.git/22407
-
-. git-sh-setup
-
+#
# The file containing rebase commands, comments, and empty lines.
# This file is created by "git rebase -i" then edited by the user. As
# the lines are processed, they are removed from the front of this
@@ -417,6 +415,29 @@ record_in_rewritten() {
esac
}
+do_pick () {
+ if test "$(git rev-parse HEAD)" = "$squash_onto"
+ then
+ # Set the correct commit message and author info on the
+ # sentinel root before cherry-picking the original changes
+ # without committing (-n). Finally, update the sentinel again
+ # to include these changes. If the cherry-pick results in a
+ # conflict, this means our behaviour is similar to a standard
+ # failed cherry-pick during rebase, with a dirty index to
+ # resolve before manually running git commit --amend then git
+ # rebase --continue.
+ git commit --allow-empty --allow-empty-message --amend \
+ --no-post-rewrite -n -q -C $1 &&
+ pick_one -n $1 &&
+ git commit --allow-empty --allow-empty-message \
+ --amend --no-post-rewrite -n -q -C $1 ||
+ die_with_patch $1 "Could not apply $1... $2"
+ else
+ pick_one $1 ||
+ die_with_patch $1 "Could not apply $1... $2"
+ fi
+}
+
do_next () {
rm -f "$msg" "$author_script" "$amend" || exit
read -r command sha1 rest < "$todo"
@@ -428,16 +449,14 @@ do_next () {
comment_for_reflog pick
mark_action_done
- pick_one $sha1 ||
- die_with_patch $sha1 "Could not apply $sha1... $rest"
+ do_pick $sha1 "$rest"
record_in_rewritten $sha1
;;
reword|r)
comment_for_reflog reword
mark_action_done
- pick_one $sha1 ||
- die_with_patch $sha1 "Could not apply $sha1... $rest"
+ do_pick $sha1 "$rest"
git commit --amend --no-post-rewrite || {
warn "Could not amend commit after successfully picking $sha1... $rest"
warn "This is most likely due to an empty commit message, or the pre-commit hook"
@@ -451,8 +470,7 @@ do_next () {
comment_for_reflog edit
mark_action_done
- pick_one $sha1 ||
- die_with_patch $sha1 "Could not apply $sha1... $rest"
+ do_pick $sha1 "$rest"
warn "Stopped at $sha1... $rest"
exit_with_patch $sha1 0
;;
@@ -475,25 +493,28 @@ do_next () {
author_script_content=$(get_author_ident_from_commit HEAD)
echo "$author_script_content" > "$author_script"
eval "$author_script_content"
- output git reset --soft HEAD^
- pick_one -n $sha1 || die_failed_squash $sha1 "$rest"
+ if ! pick_one -n $sha1
+ then
+ git rev-parse --verify HEAD >"$amend"
+ die_failed_squash $sha1 "$rest"
+ fi
case "$(peek_next_command)" in
squash|s|fixup|f)
# This is an intermediate commit; its message will only be
# used in case of trouble. So use the long version:
- do_with_author output git commit --no-verify -F "$squash_msg" ||
+ do_with_author output git commit --amend --no-verify -F "$squash_msg" ||
die_failed_squash $sha1 "$rest"
;;
*)
# This is the final command of this squash/fixup group
if test -f "$fixup_msg"
then
- do_with_author git commit --no-verify -F "$fixup_msg" ||
+ do_with_author git commit --amend --no-verify -F "$fixup_msg" ||
die_failed_squash $sha1 "$rest"
else
cp "$squash_msg" "$GIT_DIR"/SQUASH_MSG || exit
rm -f "$GIT_DIR"/MERGE_MSG
- do_with_author git commit --no-verify -e ||
+ do_with_author git commit --amend --no-verify -F "$GIT_DIR"/SQUASH_MSG -e ||
die_failed_squash $sha1 "$rest"
fi
rm -f "$squash_msg" "$fixup_msg"
@@ -684,6 +705,27 @@ rearrange_squash () {
rm -f "$1.sq" "$1.rearranged"
}
+# Add commands after a pick or after a squash/fixup serie
+# in the todo list.
+add_exec_commands () {
+ {
+ first=t
+ while read -r insn rest
+ do
+ case $insn in
+ pick)
+ test -n "$first" ||
+ printf "%s" "$cmd"
+ ;;
+ esac
+ printf "%s %s\n" "$insn" "$rest"
+ first=
+ done
+ printf "%s" "$cmd"
+ } <"$1" >"$1.new" &&
+ mv "$1.new" "$1"
+}
+
case "$action" in
continue)
# do we have anything to commit?
@@ -709,7 +751,6 @@ In both case, once you're done, continue with:
fi
. "$author_script" ||
die "Error trying to find the author identity to amend commit"
- current_head=
if test -f "$amend"
then
current_head=$(git rev-parse --verify HEAD)
@@ -717,13 +758,12 @@ In both case, once you're done, continue with:
die "\
You have uncommitted changes in your working tree. Please, commit them
first and then run 'git rebase --continue' again."
- git reset --soft HEAD^ ||
- die "Cannot rewind the HEAD"
+ do_with_author git commit --amend --no-verify -F "$msg" -e ||
+ die "Could not commit staged changes."
+ else
+ do_with_author git commit --no-verify -F "$msg" -e ||
+ die "Could not commit staged changes."
fi
- do_with_author git commit --no-verify -F "$msg" -e || {
- test -n "$current_head" && git reset --soft $current_head
- die "Could not commit staged changes."
- }
fi
record_in_rewritten "$(cat "$state_dir"/stopped-sha)"
@@ -857,6 +897,8 @@ fi
test -s "$todo" || echo noop >> "$todo"
test -n "$autosquash" && rearrange_squash "$todo"
+test -n "$cmd" && add_exec_commands "$todo"
+
cat >> "$todo" << EOF
# Rebase $shortrevisions onto $shortonto
diff --git a/git-rebase--merge.sh b/git-rebase--merge.sh
index dc59907..b10f2cf 100644
--- a/git-rebase--merge.sh
+++ b/git-rebase--merge.sh
@@ -3,8 +3,6 @@
# Copyright (c) 2010 Junio C Hamano.
#
-. git-sh-setup
-
prec=4
read_state () {
diff --git a/git-rebase.sh b/git-rebase.sh
index e616737..15da926 100755
--- a/git-rebase.sh
+++ b/git-rebase.sh
@@ -3,35 +3,11 @@
# Copyright (c) 2005 Junio C Hamano.
#
-USAGE='[--interactive | -i] [-v] [--force-rebase | -f] [--no-ff] [--onto <newbase>] [<upstream>|--root] [<branch>] [--quiet | -q]'
-LONG_USAGE='git-rebase replaces <branch> with a new branch of the
-same name. When the --onto option is provided the new branch starts
-out with a HEAD equal to <newbase>, otherwise it is equal to <upstream>
-It then attempts to create a new commit for each commit from the original
-<branch> that does not exist in the <upstream> branch.
-
-It is possible that a merge failure will prevent this process from being
-completely automatic. You will have to resolve any such merge failure
-and run git rebase --continue. Another option is to bypass the commit
-that caused the merge failure with git rebase --skip. To check out the
-original <branch> and remove the .git/rebase-apply working files, use the
-command git rebase --abort instead.
-
-Note that if <branch> is not specified on the command line, the
-currently checked out branch is used.
-
-Example: git-rebase master~1 topic
-
- A---B---C topic A'\''--B'\''--C'\'' topic
- / --> /
- D---E---F---G master D---E---F---G master
-'
-
SUBDIRECTORY_OK=Yes
OPTIONS_KEEPDASHDASH=
OPTIONS_SPEC="\
-git rebase [-i] [options] [--onto <newbase>] [<upstream>] [<branch>]
-git rebase [-i] [options] --onto <newbase> --root [<branch>]
+git rebase [-i] [options] [--exec <cmd>] [--onto <newbase>] [<upstream>] [<branch>]
+git rebase [-i] [options] [--exec <cmd>] [--onto <newbase>] --root [<branch>]
git-rebase [-i] --continue | --abort | --skip
--
Available options are
@@ -43,6 +19,7 @@ s,strategy=! use the given merge strategy
no-ff! cherry-pick all commits, even if unchanged
m,merge! use merging strategies to rebase
i,interactive! let the user edit the list of commits to rebase
+x,exec=! add exec lines after each commit of the editable list
k,keep-empty preserve empty commits during rebase
f,force-rebase! force rebase even if branch is up to date
X,strategy-option=! pass the argument through to the merge strategy
@@ -63,6 +40,7 @@ abort! abort and check out the original branch
skip! skip current patch and continue
"
. git-sh-setup
+. git-sh-i18n
set_reflog_action rebase
require_work_tree_exists
cd_to_toplevel
@@ -71,11 +49,12 @@ LF='
'
ok_to_skip_pre_rebase=
resolvemsg="
-When you have resolved this problem run \"git rebase --continue\".
-If you would prefer to skip this patch, instead run \"git rebase --skip\".
-To check out the original branch and stop rebasing run \"git rebase --abort\".
+$(gettext 'When you have resolved this problem, run "git rebase --continue".
+If you prefer to skip this patch, run "git rebase --skip" instead.
+To check out the original branch and stop rebasing, run "git rebase --abort".')
"
unset onto
+cmd=
strategy=
strategy_opts=
do_merge=
@@ -158,7 +137,7 @@ move_to_original_branch () {
git symbolic-ref \
-m "rebase finished: returning to $head_name" \
HEAD $head_name ||
- die "Could not move back to $head_name"
+ die "$(gettext "Could not move back to $head_name")"
;;
esac
}
@@ -177,12 +156,12 @@ run_pre_rebase_hook () {
test -x "$GIT_DIR/hooks/pre-rebase"
then
"$GIT_DIR/hooks/pre-rebase" ${1+"$@"} ||
- die "The pre-rebase hook refused to rebase."
+ die "$(gettext "The pre-rebase hook refused to rebase.")"
fi
}
test -f "$apply_dir"/applying &&
- die 'It looks like git-am is in progress. Cannot rebase.'
+ die "$(gettext "It looks like git-am is in progress. Cannot rebase.")"
if test -d "$apply_dir"
then
@@ -220,6 +199,11 @@ do
onto="$2"
shift
;;
+ -x)
+ test 2 -le "$#" || usage
+ cmd="${cmd}exec $2${LF}"
+ shift
+ ;;
-i)
interactive_rebase=explicit
;;
@@ -305,9 +289,15 @@ do
done
test $# -gt 2 && usage
+if test -n "$cmd" &&
+ test "$interactive_rebase" != explicit
+then
+ die "$(gettext "The --exec option must be used with the --interactive option")"
+fi
+
if test -n "$action"
then
- test -z "$in_progress" && die "No rebase in progress?"
+ test -z "$in_progress" && die "$(gettext "No rebase in progress?")"
# Only interactive rebase uses detailed reflog messages
if test "$type" = interactive && test "$GIT_REFLOG_ACTION" = rebase
then
@@ -320,11 +310,11 @@ case "$action" in
continue)
# Sanity check
git rev-parse --verify HEAD >/dev/null ||
- die "Cannot read HEAD"
+ die "$(gettext "Cannot read HEAD")"
git update-index --ignore-submodules --refresh &&
git diff-files --quiet --ignore-submodules || {
- echo "You must edit all merge conflicts and then"
- echo "mark them as resolved using git add"
+ echo "$(gettext "You must edit all merge conflicts and then
+mark them as resolved using git add")"
exit 1
}
read_basic_state
@@ -341,7 +331,7 @@ abort)
case "$head_name" in
refs/*)
git symbolic-ref -m "rebase: aborting" HEAD $head_name ||
- die "Could not move back to $head_name"
+ die "$(eval_gettext "Could not move back to \$head_name")"
;;
esac
output git reset --hard $orig_head
@@ -353,15 +343,23 @@ esac
# Make sure no rebase is in progress
if test -n "$in_progress"
then
- die '
-It seems that there is already a '"${state_dir##*/}"' directory, and
+ state_dir_base=${state_dir##*/}
+ cmd_live_rebase="git rebase (--continue | --abort | --skip)"
+ cmd_clear_stale_rebase="rm -fr \"$state_dir\""
+ die "
+$(eval_gettext 'It seems that there is already a $state_dir_base directory, and
I wonder if you are in the middle of another rebase. If that is the
case, please try
- git rebase (--continue | --abort | --skip)
+ $cmd_live_rebase
If that is not the case, please
- rm -fr '"$state_dir"'
+ $cmd_clear_stale_rebase
and run me again. I am stopping in case you still have something
-valuable there.'
+valuable there.')"
+fi
+
+if test -n "$rebase_root" && test -z "$onto"
+then
+ test -z "$interactive_rebase" && interactive_rebase=implied
fi
if test -n "$interactive_rebase"
@@ -394,12 +392,18 @@ then
;;
esac
upstream=`git rev-parse --verify "${upstream_name}^0"` ||
- die "invalid upstream $upstream_name"
+ die "$(eval_gettext "invalid upstream \$upstream_name")"
upstream_arg="$upstream_name"
else
- test -z "$onto" && die "You must specify --onto when using --root"
+ if test -z "$onto"
+ then
+ empty_tree=`git hash-object -t tree /dev/null`
+ onto=`git commit-tree $empty_tree </dev/null`
+ squash_onto="$onto"
+ fi
unset upstream_name
unset upstream
+ test $# -gt 1 && usage
upstream_arg=--root
fi
@@ -412,19 +416,19 @@ case "$onto_name" in
then
case "$onto" in
?*"$LF"?*)
- die "$onto_name: there are more than one merge bases"
+ die "$(eval_gettext "\$onto_name: there are more than one merge bases")"
;;
'')
- die "$onto_name: there is no merge base"
+ die "$(eval_gettext "\$onto_name: there is no merge base")"
;;
esac
else
- die "$onto_name: there is no merge base"
+ die "$(eval_gettext "\$onto_name: there is no merge base")"
fi
;;
*)
onto=$(git rev-parse --verify "${onto_name}^0") ||
- die "Does not point to a valid commit: $onto_name"
+ die "$(eval_gettext "Does not point to a valid commit: \$onto_name")"
;;
esac
@@ -447,10 +451,10 @@ case "$#" in
then
head_name="detached HEAD"
else
- die "fatal: no such branch: $1"
+ die "$(eval_gettext "fatal: no such branch: \$branch_name")"
fi
;;
-*)
+0)
# Do not need to switch branches, we are already on it.
if branch_name=`git symbolic-ref -q HEAD`
then
@@ -462,9 +466,12 @@ case "$#" in
fi
orig_head=$(git rev-parse --verify "${branch_name}^0") || exit
;;
+*)
+ die "BUG: unexpected number of arguments left to parse"
+ ;;
esac
-require_clean_work_tree "rebase" "Please commit or stash them."
+require_clean_work_tree "rebase" "$(gettext "Please commit or stash them.")"
# Now we are rebasing commits $upstream..$orig_head (or with --root,
# everything leading up to $orig_head) on top of $onto
@@ -482,10 +489,10 @@ then
then
# Lazily switch to the target branch if needed...
test -z "$switch_to" || git checkout "$switch_to" --
- say "Current branch $branch_name is up to date."
+ say "$(eval_gettext "Current branch \$branch_name is up to date.")"
exit 0
else
- say "Current branch $branch_name is up to date, rebase forced."
+ say "$(eval_gettext "Current branch \$branch_name is up to date, rebase forced.")"
fi
fi
@@ -496,7 +503,7 @@ if test -n "$diffstat"
then
if test -n "$verbose"
then
- echo "Changes from $mb to $onto:"
+ echo "$(eval_gettext "Changes from \$mb to \$onto:")"
fi
# We want color (if set), but no pager
GIT_PAGER='' git diff --stat --summary "$mb" "$onto"
@@ -505,7 +512,7 @@ fi
test "$type" = interactive && run_specific_rebase
# Detach HEAD and reset the tree
-say "First, rewinding head to replay your work on top of it..."
+say "$(gettext "First, rewinding head to replay your work on top of it...")"
git checkout -q "$onto^0" || die "could not detach HEAD"
git update-ref ORIG_HEAD $orig_head
@@ -513,7 +520,7 @@ git update-ref ORIG_HEAD $orig_head
# we just fast-forwarded.
if test "$mb" = "$orig_head"
then
- say "Fast-forwarded $branch_name to $onto_name."
+ say "$(eval_gettext "Fast-forwarded \$branch_name to \$onto_name.")"
move_to_original_branch
exit 0
fi
diff --git a/git-submodule.sh b/git-submodule.sh
index 30fa93a..aac575e 100755
--- a/git-submodule.sh
+++ b/git-submodule.sh
@@ -30,7 +30,22 @@ nofetch=
update=
prefix=
-# Resolve relative url by appending to parent's url
+# The function takes at most 2 arguments. The first argument is the
+# URL that navigates to the submodule origin repo. When relative, this URL
+# is relative to the superproject origin URL repo. The second up_path
+# argument, if specified, is the relative path that navigates
+# from the submodule working tree to the superproject working tree.
+#
+# The output of the function is the origin URL of the submodule.
+#
+# The output will either be an absolute URL or filesystem path (if the
+# superproject origin URL is an absolute URL or filesystem path,
+# respectively) or a relative file system path (if the superproject
+# origin URL is a relative file system path).
+#
+# When the output is a relative file system path, the path is either
+# relative to the submodule working tree, if up_path is specified, or to
+# the superproject working tree otherwise.
resolve_relative_url ()
{
remote=$(get_default_remote)
@@ -39,6 +54,21 @@ resolve_relative_url ()
url="$1"
remoteurl=${remoteurl%/}
sep=/
+ up_path="$2"
+
+ case "$remoteurl" in
+ *:*|/*)
+ is_relative=
+ ;;
+ ./*|../*)
+ is_relative=t
+ ;;
+ *)
+ is_relative=t
+ remoteurl="./$remoteurl"
+ ;;
+ esac
+
while test -n "$url"
do
case "$url" in
@@ -53,7 +83,12 @@ resolve_relative_url ()
sep=:
;;
*)
- die "$(eval_gettext "cannot strip one component off url '\$remoteurl'")"
+ if test -z "$is_relative" || test "." = "$remoteurl"
+ then
+ die "$(eval_gettext "cannot strip one component off url '\$remoteurl'")"
+ else
+ remoteurl=.
+ fi
;;
esac
;;
@@ -64,7 +99,8 @@ resolve_relative_url ()
break;;
esac
done
- echo "$remoteurl$sep${url%/}"
+ remoteurl="$remoteurl$sep${url%/}"
+ echo "${is_relative:+${up_path}}${remoteurl#./}"
}
#
@@ -145,8 +181,11 @@ module_clone()
rm -f "$gitdir/index"
else
mkdir -p "$gitdir_base"
- git clone $quiet -n ${reference:+"$reference"} \
- --separate-git-dir "$gitdir" "$url" "$sm_path" ||
+ (
+ clear_local_git_env
+ git clone $quiet -n ${reference:+"$reference"} \
+ --separate-git-dir "$gitdir" "$url" "$sm_path"
+ ) ||
die "$(eval_gettext "Clone of '\$url' into submodule path '\$sm_path' failed")"
fi
@@ -712,7 +751,7 @@ cmd_summary() {
if [ -n "$files" ]
then
test -n "$cached" &&
- die "$(gettext -- "--cached cannot be used with --files")"
+ die "$(gettext "The --cached option cannot be used with the --files option")"
diff_cmd=diff-files
head=
fi
@@ -967,14 +1006,26 @@ cmd_sync()
# Possibly a url relative to parent
case "$url" in
./*|../*)
- url=$(resolve_relative_url "$url") || exit
+ # rewrite foo/bar as ../.. to find path from
+ # submodule work tree to superproject work tree
+ up_path="$(echo "$sm_path" | sed "s/[^/][^/]*/../g")" &&
+ # guarantee a trailing /
+ up_path=${up_path%/}/ &&
+ # path from submodule work tree to submodule origin repo
+ sub_origin_url=$(resolve_relative_url "$url" "$up_path") &&
+ # path from superproject work tree to submodule origin repo
+ super_config_url=$(resolve_relative_url "$url") || exit
+ ;;
+ *)
+ sub_origin_url="$url"
+ super_config_url="$url"
;;
esac
if git config "submodule.$name.url" >/dev/null 2>/dev/null
then
say "$(eval_gettext "Synchronizing submodule url for '\$name'")"
- git config submodule."$name".url "$url"
+ git config submodule."$name".url "$super_config_url"
if test -e "$sm_path"/.git
then
@@ -982,7 +1033,7 @@ cmd_sync()
clear_local_git_env
cd "$sm_path"
remote=$(get_default_remote)
- git config remote."$remote".url "$url"
+ git config remote."$remote".url "$sub_origin_url"
)
fi
fi
diff --git a/git-svn.perl b/git-svn.perl
index 0b074c4..828b8f0 100755
--- a/git-svn.perl
+++ b/git-svn.perl
@@ -10,6 +10,43 @@ use vars qw/ $AUTHOR $VERSION
$AUTHOR = 'Eric Wong <normalperson@yhbt.net>';
$VERSION = '@@GIT_VERSION@@';
+use Carp qw/croak/;
+use Digest::MD5;
+use IO::File qw//;
+use File::Basename qw/dirname basename/;
+use File::Path qw/mkpath/;
+use File::Spec;
+use File::Find;
+use Getopt::Long qw/:config gnu_getopt no_ignore_case auto_abbrev/;
+use IPC::Open3;
+use Memoize;
+
+use Git::SVN;
+use Git::SVN::Editor;
+use Git::SVN::Fetcher;
+use Git::SVN::Ra;
+use Git::SVN::Prompt;
+use Git::SVN::Log;
+use Git::SVN::Migration;
+
+use Git::SVN::Utils qw(fatal can_compress);
+use Git qw(
+ git_cmd_try
+ command
+ command_oneline
+ command_noisy
+ command_output_pipe
+ command_close_pipe
+ command_bidi_pipe
+ command_close_bidi_pipe
+);
+
+BEGIN {
+ Memoize::memoize 'Git::config';
+ Memoize::memoize 'Git::config_bool';
+}
+
+
# From which subdir have we been invoked?
my $cmd_dir_prefix = eval {
command_oneline([qw/rev-parse --show-prefix/], STDERR => 0)
@@ -17,10 +54,7 @@ my $cmd_dir_prefix = eval {
my $git_dir_user_set = 1 if defined $ENV{GIT_DIR};
$ENV{GIT_DIR} ||= '.git';
-$Git::SVN::default_repo_id = 'svn';
-$Git::SVN::default_ref_id = $ENV{GIT_SVN_ID} || 'git-svn';
$Git::SVN::Ra::_log_window_size = 100;
-$Git::SVN::_minimize_url = 'unset';
if (! exists $ENV{SVN_SSH} && exists $ENV{GIT_SSH}) {
$ENV{SVN_SSH} = $ENV{GIT_SSH};
@@ -35,8 +69,6 @@ $Git::SVN::Log::TZ = $ENV{TZ};
$ENV{TZ} = 'UTC';
$| = 1; # unbuffer STDOUT
-sub fatal (@) { print STDERR "@_\n"; exit 1 }
-
# All SVN commands do it. Otherwise we may die on SIGPIPE when the remote
# repository decides to close the connection which we expect to be kept alive.
$SIG{PIPE} = 'IGNORE';
@@ -66,39 +98,6 @@ sub _req_svn {
fatal "Need SVN::Core 1.1.0 or better (got $SVN::Core::VERSION)";
}
}
-my $can_compress = eval { require Compress::Zlib; 1};
-use Carp qw/croak/;
-use Digest::MD5;
-use IO::File qw//;
-use File::Basename qw/dirname basename/;
-use File::Path qw/mkpath/;
-use File::Spec;
-use File::Find;
-use Getopt::Long qw/:config gnu_getopt no_ignore_case auto_abbrev/;
-use IPC::Open3;
-use Git;
-use Git::SVN::Editor qw//;
-use Git::SVN::Fetcher qw//;
-use Git::SVN::Ra qw//;
-use Git::SVN::Prompt qw//;
-use Memoize; # core since 5.8.0, Jul 2002
-
-BEGIN {
- # import functions from Git into our packages, en masse
- no strict 'refs';
- foreach (qw/command command_oneline command_noisy command_output_pipe
- command_input_pipe command_close_pipe
- command_bidi_pipe command_close_bidi_pipe/) {
- for my $package ( qw(Git::SVN::Migration Git::SVN::Log Git::SVN),
- __PACKAGE__) {
- *{"${package}::$_"} = \&{"Git::$_"};
- }
- }
- Memoize::memoize 'Git::config';
- Memoize::memoize 'Git::config_bool';
-}
-
-my ($SVN);
$sha1 = qr/[a-f\d]{40}/;
$sha1_short = qr/[a-f\d]{4,40}/;
@@ -108,8 +107,11 @@ my ($_stdin, $_help, $_edit,
$_version, $_fetch_all, $_no_rebase, $_fetch_parent,
$_merge, $_strategy, $_preserve_merges, $_dry_run, $_local,
$_prefix, $_no_checkout, $_url, $_verbose,
- $_git_format, $_commit_url, $_tag, $_merge_info, $_interactive);
-$Git::SVN::_follow_parent = 1;
+ $_commit_url, $_tag, $_merge_info, $_interactive);
+
+# This is a refactoring artifact so Git::SVN can get at this git-svn switch.
+sub opt_prefix { return $_prefix || '' }
+
$Git::SVN::Fetcher::_placeholder_filename = ".gitignore";
$_q ||= 0;
my %remote_opts = ( 'username=s' => \$Git::SVN::Prompt::_username,
@@ -269,7 +271,7 @@ my %cmd = (
{ 'url' => \$_url, } ],
'blame' => [ \&Git::SVN::Log::cmd_blame,
"Show what revision and author last modified each line of a file",
- { 'git-format' => \$_git_format } ],
+ { 'git-format' => \$Git::SVN::Log::_git_format } ],
'reset' => [ \&cmd_reset,
"Undo fetches back to the specified SVN revision",
{ 'revision|r=s' => \$_revision,
@@ -367,9 +369,9 @@ Git::SVN::init_vars();
eval {
Git::SVN::verify_remotes_sanity();
$cmd{$cmd}->[0]->(@ARGV);
+ post_fetch_checkout();
};
fatal $@ if $@;
-post_fetch_checkout();
exit 0;
####################### primary functions ######################
@@ -775,6 +777,44 @@ sub populate_merge_info {
return undef;
}
+sub dcommit_rebase {
+ my ($is_last, $current, $fetched_ref, $svn_error) = @_;
+ my @diff;
+
+ if ($svn_error) {
+ print STDERR "\nERROR from SVN:\n",
+ $svn_error->expanded_message, "\n";
+ }
+ unless ($_no_rebase) {
+ # we always want to rebase against the current HEAD,
+ # not any head that was passed to us
+ @diff = command('diff-tree', $current,
+ $fetched_ref, '--');
+ my @finish;
+ if (@diff) {
+ @finish = rebase_cmd();
+ print STDERR "W: $current and ", $fetched_ref,
+ " differ, using @finish:\n",
+ join("\n", @diff), "\n";
+ } elsif ($is_last) {
+ print "No changes between ", $current, " and ",
+ $fetched_ref,
+ "\nResetting to the latest ",
+ $fetched_ref, "\n";
+ @finish = qw/reset --mixed/;
+ }
+ command_noisy(@finish, $fetched_ref) if @finish;
+ }
+ if ($svn_error) {
+ die "ERROR: Not all changes have been committed into SVN"
+ .($_no_rebase ? ".\n" : ", however the committed\n"
+ ."ones (if any) seem to be successfully integrated "
+ ."into the working tree.\n")
+ ."Please see the above messages for details.\n";
+ }
+ return @diff;
+}
+
sub cmd_dcommit {
my $head = shift;
command_noisy(qw/update-index --refresh/);
@@ -902,6 +942,7 @@ sub cmd_dcommit {
}
my $rewritten_parent;
+ my $current_head = command_oneline(qw/rev-parse HEAD/);
Git::SVN::remove_username($expect_url);
if (defined($_merge_info)) {
$_merge_info =~ tr{ }{\n};
@@ -941,6 +982,14 @@ sub cmd_dcommit {
},
mergeinfo => $_merge_info,
svn_path => '');
+
+ my $err_handler = $SVN::Error::handler;
+ $SVN::Error::handler = sub {
+ my $err = shift;
+ dcommit_rebase(1, $current_head, $gs->refname,
+ $err);
+ };
+
if (!Git::SVN::Editor->new(\%ed_opts)->apply_diff) {
print "No changes\n$d~1 == $d\n";
} elsif ($parents->{$d} && @{$parents->{$d}}) {
@@ -948,31 +997,19 @@ sub cmd_dcommit {
$parents->{$d};
}
$_fetch_all ? $gs->fetch_all : $gs->fetch;
+ $SVN::Error::handler = $err_handler;
$last_rev = $cmt_rev;
next if $_no_rebase;
- # we always want to rebase against the current HEAD,
- # not any head that was passed to us
- my @diff = command('diff-tree', $d,
- $gs->refname, '--');
- my @finish;
- if (@diff) {
- @finish = rebase_cmd();
- print STDERR "W: $d and ", $gs->refname,
- " differ, using @finish:\n",
- join("\n", @diff), "\n";
- } else {
- print "No changes between current HEAD and ",
- $gs->refname,
- "\nResetting to the latest ",
- $gs->refname, "\n";
- @finish = qw/reset --mixed/;
- }
- command_noisy(@finish, $gs->refname);
+ my @diff = dcommit_rebase(@$linear_refs == 0, $d,
+ $gs->refname, undef);
- $rewritten_parent = command_oneline(qw/rev-parse HEAD/);
+ $rewritten_parent = command_oneline(qw/rev-parse/,
+ $gs->refname);
if (@diff) {
+ $current_head = command_oneline(qw/rev-parse
+ HEAD/);
@refs = ();
my ($url_, $rev_, $uuid_, $gs_) =
working_head_info('HEAD', \@refs);
@@ -1017,6 +1054,7 @@ sub cmd_dcommit {
}
$parents = \%p;
$linear_refs = \@l;
+ undef $last_rev;
}
}
}
@@ -1578,7 +1616,7 @@ sub cmd_reset {
}
sub cmd_gc {
- if (!$can_compress) {
+ if (!can_compress()) {
warn "Compress::Zlib could not be found; unhandled.log " .
"files will not be compressed.\n";
}
@@ -1598,8 +1636,8 @@ sub rebase_cmd {
sub post_fetch_checkout {
return if $_no_checkout;
+ return if verify_ref('HEAD^0');
my $gs = $Git::SVN::_head or return;
- return if verify_ref('refs/heads/master^0');
# look for "trunk" ref if it exists
my $remote = Git::SVN::read_all_remotes()->{$gs->{repo_id}};
@@ -1612,9 +1650,8 @@ sub post_fetch_checkout {
}
}
- my $valid_head = verify_ref('HEAD^0');
- command_noisy(qw(update-ref refs/heads/master), $gs->refname);
- return if ($valid_head || !verify_ref('HEAD^0'));
+ command_noisy(qw(update-ref HEAD), $gs->refname);
+ return unless verify_ref('HEAD^0');
return if $ENV{GIT_DIR} !~ m#^(?:.*/)?\.git$#;
my $index = $ENV{GIT_INDEX_FILE} || "$ENV{GIT_DIR}/index";
@@ -2014,13 +2051,13 @@ sub md5sum {
} elsif (!$ref) {
$md5->add($arg) or croak $!;
} else {
- ::fatal "Can't provide MD5 hash for unknown ref type: '", $ref, "'";
+ fatal "Can't provide MD5 hash for unknown ref type: '", $ref, "'";
}
return $md5->hexdigest();
}
sub gc_directory {
- if ($can_compress && -f $_ && basename($_) eq "unhandled.log") {
+ if (can_compress() && -f $_ && basename($_) eq "unhandled.log") {
my $out_filename = $_ . ".gz";
open my $in_fh, "<", $_ or die "Unable to open $_: $!\n";
binmode $in_fh;
@@ -2038,3035 +2075,6 @@ sub gc_directory {
}
}
-package Git::SVN;
-use strict;
-use warnings;
-use Fcntl qw/:DEFAULT :seek/;
-use constant rev_map_fmt => 'NH40';
-use vars qw/$default_repo_id $default_ref_id $_no_metadata $_follow_parent
- $_repack $_repack_flags $_use_svm_props $_head
- $_use_svnsync_props $no_reuse_existing $_minimize_url
- $_use_log_author $_add_author_from $_localtime/;
-use Carp qw/croak/;
-use File::Path qw/mkpath/;
-use File::Copy qw/copy/;
-use IPC::Open3;
-use Time::Local;
-use Memoize; # core since 5.8.0, Jul 2002
-use Memoize::Storable;
-use POSIX qw(:signal_h);
-my $can_use_yaml;
-BEGIN {
- $can_use_yaml = eval { require Git::SVN::Memoize::YAML; 1};
-}
-
-my ($_gc_nr, $_gc_period);
-
-# properties that we do not log:
-my %SKIP_PROP;
-BEGIN {
- %SKIP_PROP = map { $_ => 1 } qw/svn:wc:ra_dav:version-url
- svn:special svn:executable
- svn:entry:committed-rev
- svn:entry:last-author
- svn:entry:uuid
- svn:entry:committed-date/;
-
- # some options are read globally, but can be overridden locally
- # per [svn-remote "..."] section. Command-line options will *NOT*
- # override options set in an [svn-remote "..."] section
- no strict 'refs';
- for my $option (qw/follow_parent no_metadata use_svm_props
- use_svnsync_props/) {
- my $key = $option;
- $key =~ tr/_//d;
- my $prop = "-$option";
- *$option = sub {
- my ($self) = @_;
- return $self->{$prop} if exists $self->{$prop};
- my $k = "svn-remote.$self->{repo_id}.$key";
- eval { command_oneline(qw/config --get/, $k) };
- if ($@) {
- $self->{$prop} = ${"Git::SVN::_$option"};
- } else {
- my $v = command_oneline(qw/config --bool/,$k);
- $self->{$prop} = $v eq 'false' ? 0 : 1;
- }
- return $self->{$prop};
- }
- }
-}
-
-
-my (%LOCKFILES, %INDEX_FILES);
-END {
- unlink keys %LOCKFILES if %LOCKFILES;
- unlink keys %INDEX_FILES if %INDEX_FILES;
-}
-
-sub resolve_local_globs {
- my ($url, $fetch, $glob_spec) = @_;
- return unless defined $glob_spec;
- my $ref = $glob_spec->{ref};
- my $path = $glob_spec->{path};
- foreach (command(qw#for-each-ref --format=%(refname) refs/#)) {
- next unless m#^$ref->{regex}$#;
- my $p = $1;
- my $pathname = desanitize_refname($path->full_path($p));
- my $refname = desanitize_refname($ref->full_path($p));
- if (my $existing = $fetch->{$pathname}) {
- if ($existing ne $refname) {
- die "Refspec conflict:\n",
- "existing: $existing\n",
- " globbed: $refname\n";
- }
- my $u = (::cmt_metadata("$refname"))[0];
- $u =~ s!^\Q$url\E(/|$)!! or die
- "$refname: '$url' not found in '$u'\n";
- if ($pathname ne $u) {
- warn "W: Refspec glob conflict ",
- "(ref: $refname):\n",
- "expected path: $pathname\n",
- " real path: $u\n",
- "Continuing ahead with $u\n";
- next;
- }
- } else {
- $fetch->{$pathname} = $refname;
- }
- }
-}
-
-sub parse_revision_argument {
- my ($base, $head) = @_;
- if (!defined $::_revision || $::_revision eq 'BASE:HEAD') {
- return ($base, $head);
- }
- return ($1, $2) if ($::_revision =~ /^(\d+):(\d+)$/);
- return ($::_revision, $::_revision) if ($::_revision =~ /^\d+$/);
- return ($head, $head) if ($::_revision eq 'HEAD');
- return ($base, $1) if ($::_revision =~ /^BASE:(\d+)$/);
- return ($1, $head) if ($::_revision =~ /^(\d+):HEAD$/);
- die "revision argument: $::_revision not understood by git-svn\n";
-}
-
-sub fetch_all {
- my ($repo_id, $remotes) = @_;
- if (ref $repo_id) {
- my $gs = $repo_id;
- $repo_id = undef;
- $repo_id = $gs->{repo_id};
- }
- $remotes ||= read_all_remotes();
- my $remote = $remotes->{$repo_id} or
- die "[svn-remote \"$repo_id\"] unknown\n";
- my $fetch = $remote->{fetch};
- my $url = $remote->{url} or die "svn-remote.$repo_id.url not defined\n";
- my (@gs, @globs);
- my $ra = Git::SVN::Ra->new($url);
- my $uuid = $ra->get_uuid;
- my $head = $ra->get_latest_revnum;
-
- # ignore errors, $head revision may not even exist anymore
- eval { $ra->get_log("", $head, 0, 1, 0, 1, sub { $head = $_[1] }) };
- warn "W: $@\n" if $@;
-
- my $base = defined $fetch ? $head : 0;
-
- # read the max revs for wildcard expansion (branches/*, tags/*)
- foreach my $t (qw/branches tags/) {
- defined $remote->{$t} or next;
- push @globs, @{$remote->{$t}};
-
- my $max_rev = eval { tmp_config(qw/--int --get/,
- "svn-remote.$repo_id.${t}-maxRev") };
- if (defined $max_rev && ($max_rev < $base)) {
- $base = $max_rev;
- } elsif (!defined $max_rev) {
- $base = 0;
- }
- }
-
- if ($fetch) {
- foreach my $p (sort keys %$fetch) {
- my $gs = Git::SVN->new($fetch->{$p}, $repo_id, $p);
- my $lr = $gs->rev_map_max;
- if (defined $lr) {
- $base = $lr if ($lr < $base);
- }
- push @gs, $gs;
- }
- }
-
- ($base, $head) = parse_revision_argument($base, $head);
- $ra->gs_fetch_loop_common($base, $head, \@gs, \@globs);
-}
-
-sub read_all_remotes {
- my $r = {};
- my $use_svm_props = eval { command_oneline(qw/config --bool
- svn.useSvmProps/) };
- $use_svm_props = $use_svm_props eq 'true' if $use_svm_props;
- my $svn_refspec = qr{\s*(.*?)\s*:\s*(.+?)\s*};
- foreach (grep { s/^svn-remote\.// } command(qw/config -l/)) {
- if (m!^(.+)\.fetch=$svn_refspec$!) {
- my ($remote, $local_ref, $remote_ref) = ($1, $2, $3);
- die("svn-remote.$remote: remote ref '$remote_ref' "
- . "must start with 'refs/'\n")
- unless $remote_ref =~ m{^refs/};
- $local_ref = uri_decode($local_ref);
- $r->{$remote}->{fetch}->{$local_ref} = $remote_ref;
- $r->{$remote}->{svm} = {} if $use_svm_props;
- } elsif (m!^(.+)\.usesvmprops=\s*(.*)\s*$!) {
- $r->{$1}->{svm} = {};
- } elsif (m!^(.+)\.url=\s*(.*)\s*$!) {
- $r->{$1}->{url} = $2;
- } elsif (m!^(.+)\.pushurl=\s*(.*)\s*$!) {
- $r->{$1}->{pushurl} = $2;
- } elsif (m!^(.+)\.ignore-refs=\s*(.*)\s*$!) {
- $r->{$1}->{ignore_refs_regex} = $2;
- } elsif (m!^(.+)\.(branches|tags)=$svn_refspec$!) {
- my ($remote, $t, $local_ref, $remote_ref) =
- ($1, $2, $3, $4);
- die("svn-remote.$remote: remote ref '$remote_ref' ($t) "
- . "must start with 'refs/'\n")
- unless $remote_ref =~ m{^refs/};
- $local_ref = uri_decode($local_ref);
- my $rs = {
- t => $t,
- remote => $remote,
- path => Git::SVN::GlobSpec->new($local_ref, 1),
- ref => Git::SVN::GlobSpec->new($remote_ref, 0) };
- if (length($rs->{ref}->{right}) != 0) {
- die "The '*' glob character must be the last ",
- "character of '$remote_ref'\n";
- }
- push @{ $r->{$remote}->{$t} }, $rs;
- }
- }
-
- map {
- if (defined $r->{$_}->{svm}) {
- my $svm;
- eval {
- my $section = "svn-remote.$_";
- $svm = {
- source => tmp_config('--get',
- "$section.svm-source"),
- replace => tmp_config('--get',
- "$section.svm-replace"),
- }
- };
- $r->{$_}->{svm} = $svm;
- }
- } keys %$r;
-
- foreach my $remote (keys %$r) {
- foreach ( grep { defined $_ }
- map { $r->{$remote}->{$_} } qw(branches tags) ) {
- foreach my $rs ( @$_ ) {
- $rs->{ignore_refs_regex} =
- $r->{$remote}->{ignore_refs_regex};
- }
- }
- }
-
- $r;
-}
-
-sub init_vars {
- $_gc_nr = $_gc_period = 1000;
- if (defined $_repack || defined $_repack_flags) {
- warn "Repack options are obsolete; they have no effect.\n";
- }
-}
-
-sub verify_remotes_sanity {
- return unless -d $ENV{GIT_DIR};
- my %seen;
- foreach (command(qw/config -l/)) {
- if (m!^svn-remote\.(?:.+)\.fetch=.*:refs/remotes/(\S+)\s*$!) {
- if ($seen{$1}) {
- die "Remote ref refs/remote/$1 is tracked by",
- "\n \"$_\"\nand\n \"$seen{$1}\"\n",
- "Please resolve this ambiguity in ",
- "your git configuration file before ",
- "continuing\n";
- }
- $seen{$1} = $_;
- }
- }
-}
-
-sub find_existing_remote {
- my ($url, $remotes) = @_;
- return undef if $no_reuse_existing;
- my $existing;
- foreach my $repo_id (keys %$remotes) {
- my $u = $remotes->{$repo_id}->{url} or next;
- next if $u ne $url;
- $existing = $repo_id;
- last;
- }
- $existing;
-}
-
-sub init_remote_config {
- my ($self, $url, $no_write) = @_;
- $url =~ s!/+$!!; # strip trailing slash
- my $r = read_all_remotes();
- my $existing = find_existing_remote($url, $r);
- if ($existing) {
- unless ($no_write) {
- print STDERR "Using existing ",
- "[svn-remote \"$existing\"]\n";
- }
- $self->{repo_id} = $existing;
- } elsif ($_minimize_url) {
- my $min_url = Git::SVN::Ra->new($url)->minimize_url;
- $existing = find_existing_remote($min_url, $r);
- if ($existing) {
- unless ($no_write) {
- print STDERR "Using existing ",
- "[svn-remote \"$existing\"]\n";
- }
- $self->{repo_id} = $existing;
- }
- if ($min_url ne $url) {
- unless ($no_write) {
- print STDERR "Using higher level of URL: ",
- "$url => $min_url\n";
- }
- my $old_path = $self->{path};
- $self->{path} = $url;
- $self->{path} =~ s!^\Q$min_url\E(/|$)!!;
- if (length $old_path) {
- $self->{path} .= "/$old_path";
- }
- $url = $min_url;
- }
- }
- my $orig_url;
- if (!$existing) {
- # verify that we aren't overwriting anything:
- $orig_url = eval {
- command_oneline('config', '--get',
- "svn-remote.$self->{repo_id}.url")
- };
- if ($orig_url && ($orig_url ne $url)) {
- die "svn-remote.$self->{repo_id}.url already set: ",
- "$orig_url\nwanted to set to: $url\n";
- }
- }
- my ($xrepo_id, $xpath) = find_ref($self->refname);
- if (!$no_write && defined $xpath) {
- die "svn-remote.$xrepo_id.fetch already set to track ",
- "$xpath:", $self->refname, "\n";
- }
- unless ($no_write) {
- command_noisy('config',
- "svn-remote.$self->{repo_id}.url", $url);
- $self->{path} =~ s{^/}{};
- $self->{path} =~ s{%([0-9A-F]{2})}{chr hex($1)}ieg;
- command_noisy('config', '--add',
- "svn-remote.$self->{repo_id}.fetch",
- "$self->{path}:".$self->refname);
- }
- $self->{url} = $url;
-}
-
-sub find_by_url { # repos_root and, path are optional
- my ($class, $full_url, $repos_root, $path) = @_;
-
- return undef unless defined $full_url;
- remove_username($full_url);
- remove_username($repos_root) if defined $repos_root;
- my $remotes = read_all_remotes();
- if (defined $full_url && defined $repos_root && !defined $path) {
- $path = $full_url;
- $path =~ s#^\Q$repos_root\E(?:/|$)##;
- }
- foreach my $repo_id (keys %$remotes) {
- my $u = $remotes->{$repo_id}->{url} or next;
- remove_username($u);
- next if defined $repos_root && $repos_root ne $u;
-
- my $fetch = $remotes->{$repo_id}->{fetch} || {};
- foreach my $t (qw/branches tags/) {
- foreach my $globspec (@{$remotes->{$repo_id}->{$t}}) {
- resolve_local_globs($u, $fetch, $globspec);
- }
- }
- my $p = $path;
- my $rwr = rewrite_root({repo_id => $repo_id});
- my $svm = $remotes->{$repo_id}->{svm}
- if defined $remotes->{$repo_id}->{svm};
- unless (defined $p) {
- $p = $full_url;
- my $z = $u;
- my $prefix = '';
- if ($rwr) {
- $z = $rwr;
- remove_username($z);
- } elsif (defined $svm) {
- $z = $svm->{source};
- $prefix = $svm->{replace};
- $prefix =~ s#^\Q$u\E(?:/|$)##;
- $prefix =~ s#/$##;
- }
- $p =~ s#^\Q$z\E(?:/|$)#$prefix# or next;
- }
- foreach my $f (keys %$fetch) {
- next if $f ne $p;
- return Git::SVN->new($fetch->{$f}, $repo_id, $f);
- }
- }
- undef;
-}
-
-sub init {
- my ($class, $url, $path, $repo_id, $ref_id, $no_write) = @_;
- my $self = _new($class, $repo_id, $ref_id, $path);
- if (defined $url) {
- $self->init_remote_config($url, $no_write);
- }
- $self;
-}
-
-sub find_ref {
- my ($ref_id) = @_;
- foreach (command(qw/config -l/)) {
- next unless m!^svn-remote\.(.+)\.fetch=
- \s*(.*?)\s*:\s*(.+?)\s*$!x;
- my ($repo_id, $path, $ref) = ($1, $2, $3);
- if ($ref eq $ref_id) {
- $path = '' if ($path =~ m#^\./?#);
- return ($repo_id, $path);
- }
- }
- (undef, undef, undef);
-}
-
-sub new {
- my ($class, $ref_id, $repo_id, $path) = @_;
- if (defined $ref_id && !defined $repo_id && !defined $path) {
- ($repo_id, $path) = find_ref($ref_id);
- if (!defined $repo_id) {
- die "Could not find a \"svn-remote.*.fetch\" key ",
- "in the repository configuration matching: ",
- "$ref_id\n";
- }
- }
- my $self = _new($class, $repo_id, $ref_id, $path);
- if (!defined $self->{path} || !length $self->{path}) {
- my $fetch = command_oneline('config', '--get',
- "svn-remote.$repo_id.fetch",
- ":$ref_id\$") or
- die "Failed to read \"svn-remote.$repo_id.fetch\" ",
- "\":$ref_id\$\" in config\n";
- ($self->{path}, undef) = split(/\s*:\s*/, $fetch);
- }
- $self->{path} =~ s{/+}{/}g;
- $self->{path} =~ s{\A/}{};
- $self->{path} =~ s{/\z}{};
- $self->{url} = command_oneline('config', '--get',
- "svn-remote.$repo_id.url") or
- die "Failed to read \"svn-remote.$repo_id.url\" in config\n";
- $self->{pushurl} = eval { command_oneline('config', '--get',
- "svn-remote.$repo_id.pushurl") };
- $self->rebuild;
- $self;
-}
-
-sub refname {
- my ($refname) = $_[0]->{ref_id} ;
-
- # It cannot end with a slash /, we'll throw up on this because
- # SVN can't have directories with a slash in their name, either:
- if ($refname =~ m{/$}) {
- die "ref: '$refname' ends with a trailing slash, this is ",
- "not permitted by git nor Subversion\n";
- }
-
- # It cannot have ASCII control character space, tilde ~, caret ^,
- # colon :, question-mark ?, asterisk *, space, or open bracket [
- # anywhere.
- #
- # Additionally, % must be escaped because it is used for escaping
- # and we want our escaped refname to be reversible
- $refname =~ s{([ \%~\^:\?\*\[\t])}{uc sprintf('%%%02x',ord($1))}eg;
-
- # no slash-separated component can begin with a dot .
- # /.* becomes /%2E*
- $refname =~ s{/\.}{/%2E}g;
-
- # It cannot have two consecutive dots .. anywhere
- # .. becomes %2E%2E
- $refname =~ s{\.\.}{%2E%2E}g;
-
- # trailing dots and .lock are not allowed
- # .$ becomes %2E and .lock becomes %2Elock
- $refname =~ s{\.(?=$|lock$)}{%2E};
-
- # the sequence @{ is used to access the reflog
- # @{ becomes %40{
- $refname =~ s{\@\{}{%40\{}g;
-
- return $refname;
-}
-
-sub desanitize_refname {
- my ($refname) = @_;
- $refname =~ s{%(?:([0-9A-F]{2}))}{chr hex($1)}eg;
- return $refname;
-}
-
-sub svm_uuid {
- my ($self) = @_;
- return $self->{svm}->{uuid} if $self->svm;
- $self->ra;
- unless ($self->{svm}) {
- die "SVM UUID not cached, and reading remotely failed\n";
- }
- $self->{svm}->{uuid};
-}
-
-sub svm {
- my ($self) = @_;
- return $self->{svm} if $self->{svm};
- my $svm;
- # see if we have it in our config, first:
- eval {
- my $section = "svn-remote.$self->{repo_id}";
- $svm = {
- source => tmp_config('--get', "$section.svm-source"),
- uuid => tmp_config('--get', "$section.svm-uuid"),
- replace => tmp_config('--get', "$section.svm-replace"),
- }
- };
- if ($svm && $svm->{source} && $svm->{uuid} && $svm->{replace}) {
- $self->{svm} = $svm;
- }
- $self->{svm};
-}
-
-sub _set_svm_vars {
- my ($self, $ra) = @_;
- return $ra if $self->svm;
-
- my @err = ( "useSvmProps set, but failed to read SVM properties\n",
- "(svm:source, svm:uuid) ",
- "from the following URLs:\n" );
- sub read_svm_props {
- my ($self, $ra, $path, $r) = @_;
- my $props = ($ra->get_dir($path, $r))[2];
- my $src = $props->{'svm:source'};
- my $uuid = $props->{'svm:uuid'};
- return undef if (!$src || !$uuid);
-
- chomp($src, $uuid);
-
- $uuid =~ m{^[0-9a-f\-]{30,}$}i
- or die "doesn't look right - svm:uuid is '$uuid'\n";
-
- # the '!' is used to mark the repos_root!/relative/path
- $src =~ s{/?!/?}{/};
- $src =~ s{/+$}{}; # no trailing slashes please
- # username is of no interest
- $src =~ s{(^[a-z\+]*://)[^/@]*@}{$1};
-
- my $replace = $ra->{url};
- $replace .= "/$path" if length $path;
-
- my $section = "svn-remote.$self->{repo_id}";
- tmp_config("$section.svm-source", $src);
- tmp_config("$section.svm-replace", $replace);
- tmp_config("$section.svm-uuid", $uuid);
- $self->{svm} = {
- source => $src,
- uuid => $uuid,
- replace => $replace
- };
- }
-
- my $r = $ra->get_latest_revnum;
- my $path = $self->{path};
- my %tried;
- while (length $path) {
- unless ($tried{"$self->{url}/$path"}) {
- return $ra if $self->read_svm_props($ra, $path, $r);
- $tried{"$self->{url}/$path"} = 1;
- }
- $path =~ s#/?[^/]+$##;
- }
- die "Path: '$path' should be ''\n" if $path ne '';
- return $ra if $self->read_svm_props($ra, $path, $r);
- $tried{"$self->{url}/$path"} = 1;
-
- if ($ra->{repos_root} eq $self->{url}) {
- die @err, (map { " $_\n" } keys %tried), "\n";
- }
-
- # nope, make sure we're connected to the repository root:
- my $ok;
- my @tried_b;
- $path = $ra->{svn_path};
- $ra = Git::SVN::Ra->new($ra->{repos_root});
- while (length $path) {
- unless ($tried{"$ra->{url}/$path"}) {
- $ok = $self->read_svm_props($ra, $path, $r);
- last if $ok;
- $tried{"$ra->{url}/$path"} = 1;
- }
- $path =~ s#/?[^/]+$##;
- }
- die "Path: '$path' should be ''\n" if $path ne '';
- $ok ||= $self->read_svm_props($ra, $path, $r);
- $tried{"$ra->{url}/$path"} = 1;
- if (!$ok) {
- die @err, (map { " $_\n" } keys %tried), "\n";
- }
- Git::SVN::Ra->new($self->{url});
-}
-
-sub svnsync {
- my ($self) = @_;
- return $self->{svnsync} if $self->{svnsync};
-
- if ($self->no_metadata) {
- die "Can't have both 'noMetadata' and ",
- "'useSvnsyncProps' options set!\n";
- }
- if ($self->rewrite_root) {
- die "Can't have both 'useSvnsyncProps' and 'rewriteRoot' ",
- "options set!\n";
- }
- if ($self->rewrite_uuid) {
- die "Can't have both 'useSvnsyncProps' and 'rewriteUUID' ",
- "options set!\n";
- }
-
- my $svnsync;
- # see if we have it in our config, first:
- eval {
- my $section = "svn-remote.$self->{repo_id}";
-
- my $url = tmp_config('--get', "$section.svnsync-url");
- ($url) = ($url =~ m{^([a-z\+]+://\S+)$}) or
- die "doesn't look right - svn:sync-from-url is '$url'\n";
-
- my $uuid = tmp_config('--get', "$section.svnsync-uuid");
- ($uuid) = ($uuid =~ m{^([0-9a-f\-]{30,})$}i) or
- die "doesn't look right - svn:sync-from-uuid is '$uuid'\n";
-
- $svnsync = { url => $url, uuid => $uuid }
- };
- if ($svnsync && $svnsync->{url} && $svnsync->{uuid}) {
- return $self->{svnsync} = $svnsync;
- }
-
- my $err = "useSvnsyncProps set, but failed to read " .
- "svnsync property: svn:sync-from-";
- my $rp = $self->ra->rev_proplist(0);
-
- my $url = $rp->{'svn:sync-from-url'} or die $err . "url\n";
- ($url) = ($url =~ m{^([a-z\+]+://\S+)$}) or
- die "doesn't look right - svn:sync-from-url is '$url'\n";
-
- my $uuid = $rp->{'svn:sync-from-uuid'} or die $err . "uuid\n";
- ($uuid) = ($uuid =~ m{^([0-9a-f\-]{30,})$}i) or
- die "doesn't look right - svn:sync-from-uuid is '$uuid'\n";
-
- my $section = "svn-remote.$self->{repo_id}";
- tmp_config('--add', "$section.svnsync-uuid", $uuid);
- tmp_config('--add', "$section.svnsync-url", $url);
- return $self->{svnsync} = { url => $url, uuid => $uuid };
-}
-
-# this allows us to memoize our SVN::Ra UUID locally and avoid a
-# remote lookup (useful for 'git svn log').
-sub ra_uuid {
- my ($self) = @_;
- unless ($self->{ra_uuid}) {
- my $key = "svn-remote.$self->{repo_id}.uuid";
- my $uuid = eval { tmp_config('--get', $key) };
- if (!$@ && $uuid && $uuid =~ /^([a-f\d\-]{30,})$/i) {
- $self->{ra_uuid} = $uuid;
- } else {
- die "ra_uuid called without URL\n" unless $self->{url};
- $self->{ra_uuid} = $self->ra->get_uuid;
- tmp_config('--add', $key, $self->{ra_uuid});
- }
- }
- $self->{ra_uuid};
-}
-
-sub _set_repos_root {
- my ($self, $repos_root) = @_;
- my $k = "svn-remote.$self->{repo_id}.reposRoot";
- $repos_root ||= $self->ra->{repos_root};
- tmp_config($k, $repos_root);
- $repos_root;
-}
-
-sub repos_root {
- my ($self) = @_;
- my $k = "svn-remote.$self->{repo_id}.reposRoot";
- eval { tmp_config('--get', $k) } || $self->_set_repos_root;
-}
-
-sub ra {
- my ($self) = shift;
- my $ra = Git::SVN::Ra->new($self->{url});
- $self->_set_repos_root($ra->{repos_root});
- if ($self->use_svm_props && !$self->{svm}) {
- if ($self->no_metadata) {
- die "Can't have both 'noMetadata' and ",
- "'useSvmProps' options set!\n";
- } elsif ($self->use_svnsync_props) {
- die "Can't have both 'useSvnsyncProps' and ",
- "'useSvmProps' options set!\n";
- }
- $ra = $self->_set_svm_vars($ra);
- $self->{-want_revprops} = 1;
- }
- $ra;
-}
-
-# prop_walk(PATH, REV, SUB)
-# -------------------------
-# Recursively traverse PATH at revision REV and invoke SUB for each
-# directory that contains a SVN property. SUB will be invoked as
-# follows: &SUB(gs, path, props); where `gs' is this instance of
-# Git::SVN, `path' the path to the directory where the properties
-# `props' were found. The `path' will be relative to point of checkout,
-# that is, if url://repo/trunk is the current Git branch, and that
-# directory contains a sub-directory `d', SUB will be invoked with `/d/'
-# as `path' (note the trailing `/').
-sub prop_walk {
- my ($self, $path, $rev, $sub) = @_;
-
- $path =~ s#^/##;
- my ($dirent, undef, $props) = $self->ra->get_dir($path, $rev);
- $path =~ s#^/*#/#g;
- my $p = $path;
- # Strip the irrelevant part of the path.
- $p =~ s#^/+\Q$self->{path}\E(/|$)#/#;
- # Ensure the path is terminated by a `/'.
- $p =~ s#/*$#/#;
-
- # The properties contain all the internal SVN stuff nobody
- # (usually) cares about.
- my $interesting_props = 0;
- foreach (keys %{$props}) {
- # If it doesn't start with `svn:', it must be a
- # user-defined property.
- ++$interesting_props and next if $_ !~ /^svn:/;
- # FIXME: Fragile, if SVN adds new public properties,
- # this needs to be updated.
- ++$interesting_props if /^svn:(?:ignore|keywords|executable
- |eol-style|mime-type
- |externals|needs-lock)$/x;
- }
- &$sub($self, $p, $props) if $interesting_props;
-
- foreach (sort keys %$dirent) {
- next if $dirent->{$_}->{kind} != $SVN::Node::dir;
- $self->prop_walk($self->{path} . $p . $_, $rev, $sub);
- }
-}
-
-sub last_rev { ($_[0]->last_rev_commit)[0] }
-sub last_commit { ($_[0]->last_rev_commit)[1] }
-
-# returns the newest SVN revision number and newest commit SHA1
-sub last_rev_commit {
- my ($self) = @_;
- if (defined $self->{last_rev} && defined $self->{last_commit}) {
- return ($self->{last_rev}, $self->{last_commit});
- }
- my $c = ::verify_ref($self->refname.'^0');
- if ($c && !$self->use_svm_props && !$self->no_metadata) {
- my $rev = (::cmt_metadata($c))[1];
- if (defined $rev) {
- ($self->{last_rev}, $self->{last_commit}) = ($rev, $c);
- return ($rev, $c);
- }
- }
- my $map_path = $self->map_path;
- unless (-e $map_path) {
- ($self->{last_rev}, $self->{last_commit}) = (undef, undef);
- return (undef, undef);
- }
- my ($rev, $commit) = $self->rev_map_max(1);
- ($self->{last_rev}, $self->{last_commit}) = ($rev, $commit);
- return ($rev, $commit);
-}
-
-sub get_fetch_range {
- my ($self, $min, $max) = @_;
- $max ||= $self->ra->get_latest_revnum;
- $min ||= $self->rev_map_max;
- (++$min, $max);
-}
-
-sub tmp_config {
- my (@args) = @_;
- my $old_def_config = "$ENV{GIT_DIR}/svn/config";
- my $config = "$ENV{GIT_DIR}/svn/.metadata";
- if (! -f $config && -f $old_def_config) {
- rename $old_def_config, $config or
- die "Failed rename $old_def_config => $config: $!\n";
- }
- my $old_config = $ENV{GIT_CONFIG};
- $ENV{GIT_CONFIG} = $config;
- $@ = undef;
- my @ret = eval {
- unless (-f $config) {
- mkfile($config);
- open my $fh, '>', $config or
- die "Can't open $config: $!\n";
- print $fh "; This file is used internally by ",
- "git-svn\n" or die
- "Couldn't write to $config: $!\n";
- print $fh "; You should not have to edit it\n" or
- die "Couldn't write to $config: $!\n";
- close $fh or die "Couldn't close $config: $!\n";
- }
- command('config', @args);
- };
- my $err = $@;
- if (defined $old_config) {
- $ENV{GIT_CONFIG} = $old_config;
- } else {
- delete $ENV{GIT_CONFIG};
- }
- die $err if $err;
- wantarray ? @ret : $ret[0];
-}
-
-sub tmp_index_do {
- my ($self, $sub) = @_;
- my $old_index = $ENV{GIT_INDEX_FILE};
- $ENV{GIT_INDEX_FILE} = $self->{index};
- $@ = undef;
- my @ret = eval {
- my ($dir, $base) = ($self->{index} =~ m#^(.*?)/?([^/]+)$#);
- mkpath([$dir]) unless -d $dir;
- &$sub;
- };
- my $err = $@;
- if (defined $old_index) {
- $ENV{GIT_INDEX_FILE} = $old_index;
- } else {
- delete $ENV{GIT_INDEX_FILE};
- }
- die $err if $err;
- wantarray ? @ret : $ret[0];
-}
-
-sub assert_index_clean {
- my ($self, $treeish) = @_;
-
- $self->tmp_index_do(sub {
- command_noisy('read-tree', $treeish) unless -e $self->{index};
- my $x = command_oneline('write-tree');
- my ($y) = (command(qw/cat-file commit/, $treeish) =~
- /^tree ($::sha1)/mo);
- return if $y eq $x;
-
- warn "Index mismatch: $y != $x\nrereading $treeish\n";
- unlink $self->{index} or die "unlink $self->{index}: $!\n";
- command_noisy('read-tree', $treeish);
- $x = command_oneline('write-tree');
- if ($y ne $x) {
- ::fatal "trees ($treeish) $y != $x\n",
- "Something is seriously wrong...";
- }
- });
-}
-
-sub get_commit_parents {
- my ($self, $log_entry) = @_;
- my (%seen, @ret, @tmp);
- # legacy support for 'set-tree'; this is only used by set_tree_cb:
- if (my $ip = $self->{inject_parents}) {
- if (my $commit = delete $ip->{$log_entry->{revision}}) {
- push @tmp, $commit;
- }
- }
- if (my $cur = ::verify_ref($self->refname.'^0')) {
- push @tmp, $cur;
- }
- if (my $ipd = $self->{inject_parents_dcommit}) {
- if (my $commit = delete $ipd->{$log_entry->{revision}}) {
- push @tmp, @$commit;
- }
- }
- push @tmp, $_ foreach (@{$log_entry->{parents}}, @tmp);
- while (my $p = shift @tmp) {
- next if $seen{$p};
- $seen{$p} = 1;
- push @ret, $p;
- }
- @ret;
-}
-
-sub rewrite_root {
- my ($self) = @_;
- return $self->{-rewrite_root} if exists $self->{-rewrite_root};
- my $k = "svn-remote.$self->{repo_id}.rewriteRoot";
- my $rwr = eval { command_oneline(qw/config --get/, $k) };
- if ($rwr) {
- $rwr =~ s#/+$##;
- if ($rwr !~ m#^[a-z\+]+://#) {
- die "$rwr is not a valid URL (key: $k)\n";
- }
- }
- $self->{-rewrite_root} = $rwr;
-}
-
-sub rewrite_uuid {
- my ($self) = @_;
- return $self->{-rewrite_uuid} if exists $self->{-rewrite_uuid};
- my $k = "svn-remote.$self->{repo_id}.rewriteUUID";
- my $rwid = eval { command_oneline(qw/config --get/, $k) };
- if ($rwid) {
- $rwid =~ s#/+$##;
- if ($rwid !~ m#^[a-f0-9]{8}-(?:[a-f0-9]{4}-){3}[a-f0-9]{12}$#) {
- die "$rwid is not a valid UUID (key: $k)\n";
- }
- }
- $self->{-rewrite_uuid} = $rwid;
-}
-
-sub metadata_url {
- my ($self) = @_;
- ($self->rewrite_root || $self->{url}) .
- (length $self->{path} ? '/' . $self->{path} : '');
-}
-
-sub full_url {
- my ($self) = @_;
- $self->{url} . (length $self->{path} ? '/' . $self->{path} : '');
-}
-
-sub full_pushurl {
- my ($self) = @_;
- if ($self->{pushurl}) {
- return $self->{pushurl} . (length $self->{path} ? '/' .
- $self->{path} : '');
- } else {
- return $self->full_url;
- }
-}
-
-sub set_commit_header_env {
- my ($log_entry) = @_;
- my %env;
- foreach my $ned (qw/NAME EMAIL DATE/) {
- foreach my $ac (qw/AUTHOR COMMITTER/) {
- $env{"GIT_${ac}_${ned}"} = $ENV{"GIT_${ac}_${ned}"};
- }
- }
-
- $ENV{GIT_AUTHOR_NAME} = $log_entry->{name};
- $ENV{GIT_AUTHOR_EMAIL} = $log_entry->{email};
- $ENV{GIT_AUTHOR_DATE} = $ENV{GIT_COMMITTER_DATE} = $log_entry->{date};
-
- $ENV{GIT_COMMITTER_NAME} = (defined $log_entry->{commit_name})
- ? $log_entry->{commit_name}
- : $log_entry->{name};
- $ENV{GIT_COMMITTER_EMAIL} = (defined $log_entry->{commit_email})
- ? $log_entry->{commit_email}
- : $log_entry->{email};
- \%env;
-}
-
-sub restore_commit_header_env {
- my ($env) = @_;
- foreach my $ned (qw/NAME EMAIL DATE/) {
- foreach my $ac (qw/AUTHOR COMMITTER/) {
- my $k = "GIT_${ac}_${ned}";
- if (defined $env->{$k}) {
- $ENV{$k} = $env->{$k};
- } else {
- delete $ENV{$k};
- }
- }
- }
-}
-
-sub gc {
- command_noisy('gc', '--auto');
-};
-
-sub do_git_commit {
- my ($self, $log_entry) = @_;
- my $lr = $self->last_rev;
- if (defined $lr && $lr >= $log_entry->{revision}) {
- die "Last fetched revision of ", $self->refname,
- " was r$lr, but we are about to fetch: ",
- "r$log_entry->{revision}!\n";
- }
- if (my $c = $self->rev_map_get($log_entry->{revision})) {
- croak "$log_entry->{revision} = $c already exists! ",
- "Why are we refetching it?\n";
- }
- my $old_env = set_commit_header_env($log_entry);
- my $tree = $log_entry->{tree};
- if (!defined $tree) {
- $tree = $self->tmp_index_do(sub {
- command_oneline('write-tree') });
- }
- die "Tree is not a valid sha1: $tree\n" if $tree !~ /^$::sha1$/o;
-
- my @exec = ('git', 'commit-tree', $tree);
- foreach ($self->get_commit_parents($log_entry)) {
- push @exec, '-p', $_;
- }
- defined(my $pid = open3(my $msg_fh, my $out_fh, '>&STDERR', @exec))
- or croak $!;
- binmode $msg_fh;
-
- # we always get UTF-8 from SVN, but we may want our commits in
- # a different encoding.
- if (my $enc = Git::config('i18n.commitencoding')) {
- require Encode;
- Encode::from_to($log_entry->{log}, 'UTF-8', $enc);
- }
- print $msg_fh $log_entry->{log} or croak $!;
- restore_commit_header_env($old_env);
- unless ($self->no_metadata) {
- print $msg_fh "\ngit-svn-id: $log_entry->{metadata}\n"
- or croak $!;
- }
- $msg_fh->flush == 0 or croak $!;
- close $msg_fh or croak $!;
- chomp(my $commit = do { local $/; <$out_fh> });
- close $out_fh or croak $!;
- waitpid $pid, 0;
- croak $? if $?;
- if ($commit !~ /^$::sha1$/o) {
- die "Failed to commit, invalid sha1: $commit\n";
- }
-
- $self->rev_map_set($log_entry->{revision}, $commit, 1);
-
- $self->{last_rev} = $log_entry->{revision};
- $self->{last_commit} = $commit;
- print "r$log_entry->{revision}" unless $::_q > 1;
- if (defined $log_entry->{svm_revision}) {
- print " (\@$log_entry->{svm_revision})" unless $::_q > 1;
- $self->rev_map_set($log_entry->{svm_revision}, $commit,
- 0, $self->svm_uuid);
- }
- print " = $commit ($self->{ref_id})\n" unless $::_q > 1;
- if (--$_gc_nr == 0) {
- $_gc_nr = $_gc_period;
- gc();
- }
- return $commit;
-}
-
-sub match_paths {
- my ($self, $paths, $r) = @_;
- return 1 if $self->{path} eq '';
- if (my $path = $paths->{"/$self->{path}"}) {
- return ($path->{action} eq 'D') ? 0 : 1;
- }
- $self->{path_regex} ||= qr/^\/\Q$self->{path}\E\//;
- if (grep /$self->{path_regex}/, keys %$paths) {
- return 1;
- }
- my $c = '';
- foreach (split m#/#, $self->{path}) {
- $c .= "/$_";
- next unless ($paths->{$c} &&
- ($paths->{$c}->{action} =~ /^[AR]$/));
- if ($self->ra->check_path($self->{path}, $r) ==
- $SVN::Node::dir) {
- return 1;
- }
- }
- return 0;
-}
-
-sub find_parent_branch {
- my ($self, $paths, $rev) = @_;
- return undef unless $self->follow_parent;
- unless (defined $paths) {
- my $err_handler = $SVN::Error::handler;
- $SVN::Error::handler = \&Git::SVN::Ra::skip_unknown_revs;
- $self->ra->get_log([$self->{path}], $rev, $rev, 0, 1, 1,
- sub { $paths = $_[0] });
- $SVN::Error::handler = $err_handler;
- }
- return undef unless defined $paths;
-
- # look for a parent from another branch:
- my @b_path_components = split m#/#, $self->{path};
- my @a_path_components;
- my $i;
- while (@b_path_components) {
- $i = $paths->{'/'.join('/', @b_path_components)};
- last if $i && defined $i->{copyfrom_path};
- unshift(@a_path_components, pop(@b_path_components));
- }
- return undef unless defined $i && defined $i->{copyfrom_path};
- my $branch_from = $i->{copyfrom_path};
- if (@a_path_components) {
- print STDERR "branch_from: $branch_from => ";
- $branch_from .= '/'.join('/', @a_path_components);
- print STDERR $branch_from, "\n";
- }
- my $r = $i->{copyfrom_rev};
- my $repos_root = $self->ra->{repos_root};
- my $url = $self->ra->{url};
- my $new_url = $url . $branch_from;
- print STDERR "Found possible branch point: ",
- "$new_url => ", $self->full_url, ", $r\n"
- unless $::_q > 1;
- $branch_from =~ s#^/##;
- my $gs = $self->other_gs($new_url, $url,
- $branch_from, $r, $self->{ref_id});
- my ($r0, $parent) = $gs->find_rev_before($r, 1);
- {
- my ($base, $head);
- if (!defined $r0 || !defined $parent) {
- ($base, $head) = parse_revision_argument(0, $r);
- } else {
- if ($r0 < $r) {
- $gs->ra->get_log([$gs->{path}], $r0 + 1, $r, 1,
- 0, 1, sub { $base = $_[1] - 1 });
- }
- }
- if (defined $base && $base <= $r) {
- $gs->fetch($base, $r);
- }
- ($r0, $parent) = $gs->find_rev_before($r, 1);
- }
- if (defined $r0 && defined $parent) {
- print STDERR "Found branch parent: ($self->{ref_id}) $parent\n"
- unless $::_q > 1;
- my $ed;
- if ($self->ra->can_do_switch) {
- $self->assert_index_clean($parent);
- print STDERR "Following parent with do_switch\n"
- unless $::_q > 1;
- # do_switch works with svn/trunk >= r22312, but that
- # is not included with SVN 1.4.3 (the latest version
- # at the moment), so we can't rely on it
- $self->{last_rev} = $r0;
- $self->{last_commit} = $parent;
- $ed = Git::SVN::Fetcher->new($self, $gs->{path});
- $gs->ra->gs_do_switch($r0, $rev, $gs,
- $self->full_url, $ed)
- or die "SVN connection failed somewhere...\n";
- } elsif ($self->ra->trees_match($new_url, $r0,
- $self->full_url, $rev)) {
- print STDERR "Trees match:\n",
- " $new_url\@$r0\n",
- " ${\$self->full_url}\@$rev\n",
- "Following parent with no changes\n"
- unless $::_q > 1;
- $self->tmp_index_do(sub {
- command_noisy('read-tree', $parent);
- });
- $self->{last_commit} = $parent;
- } else {
- print STDERR "Following parent with do_update\n"
- unless $::_q > 1;
- $ed = Git::SVN::Fetcher->new($self);
- $self->ra->gs_do_update($rev, $rev, $self, $ed)
- or die "SVN connection failed somewhere...\n";
- }
- print STDERR "Successfully followed parent\n" unless $::_q > 1;
- return $self->make_log_entry($rev, [$parent], $ed);
- }
- return undef;
-}
-
-sub do_fetch {
- my ($self, $paths, $rev) = @_;
- my $ed;
- my ($last_rev, @parents);
- if (my $lc = $self->last_commit) {
- # we can have a branch that was deleted, then re-added
- # under the same name but copied from another path, in
- # which case we'll have multiple parents (we don't
- # want to break the original ref, nor lose copypath info):
- if (my $log_entry = $self->find_parent_branch($paths, $rev)) {
- push @{$log_entry->{parents}}, $lc;
- return $log_entry;
- }
- $ed = Git::SVN::Fetcher->new($self);
- $last_rev = $self->{last_rev};
- $ed->{c} = $lc;
- @parents = ($lc);
- } else {
- $last_rev = $rev;
- if (my $log_entry = $self->find_parent_branch($paths, $rev)) {
- return $log_entry;
- }
- $ed = Git::SVN::Fetcher->new($self);
- }
- unless ($self->ra->gs_do_update($last_rev, $rev, $self, $ed)) {
- die "SVN connection failed somewhere...\n";
- }
- $self->make_log_entry($rev, \@parents, $ed);
-}
-
-sub mkemptydirs {
- my ($self, $r) = @_;
-
- sub scan {
- my ($r, $empty_dirs, $line) = @_;
- if (defined $r && $line =~ /^r(\d+)$/) {
- return 0 if $1 > $r;
- } elsif ($line =~ /^ \+empty_dir: (.+)$/) {
- $empty_dirs->{$1} = 1;
- } elsif ($line =~ /^ \-empty_dir: (.+)$/) {
- my @d = grep {m[^\Q$1\E(/|$)]} (keys %$empty_dirs);
- delete @$empty_dirs{@d};
- }
- 1; # continue
- };
-
- my %empty_dirs = ();
- my $gz_file = "$self->{dir}/unhandled.log.gz";
- if (-f $gz_file) {
- if (!$can_compress) {
- warn "Compress::Zlib could not be found; ",
- "empty directories in $gz_file will not be read\n";
- } else {
- my $gz = Compress::Zlib::gzopen($gz_file, "rb") or
- die "Unable to open $gz_file: $!\n";
- my $line;
- while ($gz->gzreadline($line) > 0) {
- scan($r, \%empty_dirs, $line) or last;
- }
- $gz->gzclose;
- }
- }
-
- if (open my $fh, '<', "$self->{dir}/unhandled.log") {
- binmode $fh or croak "binmode: $!";
- while (<$fh>) {
- scan($r, \%empty_dirs, $_) or last;
- }
- close $fh;
- }
-
- my $strip = qr/\A\Q$self->{path}\E(?:\/|$)/;
- foreach my $d (sort keys %empty_dirs) {
- $d = uri_decode($d);
- $d =~ s/$strip//;
- next unless length($d);
- next if -d $d;
- if (-e $d) {
- warn "$d exists but is not a directory\n";
- } else {
- print "creating empty directory: $d\n";
- mkpath([$d]);
- }
- }
-}
-
-sub get_untracked {
- my ($self, $ed) = @_;
- my @out;
- my $h = $ed->{empty};
- foreach (sort keys %$h) {
- my $act = $h->{$_} ? '+empty_dir' : '-empty_dir';
- push @out, " $act: " . uri_encode($_);
- warn "W: $act: $_\n";
- }
- foreach my $t (qw/dir_prop file_prop/) {
- $h = $ed->{$t} or next;
- foreach my $path (sort keys %$h) {
- my $ppath = $path eq '' ? '.' : $path;
- foreach my $prop (sort keys %{$h->{$path}}) {
- next if $SKIP_PROP{$prop};
- my $v = $h->{$path}->{$prop};
- my $t_ppath_prop = "$t: " .
- uri_encode($ppath) . ' ' .
- uri_encode($prop);
- if (defined $v) {
- push @out, " +$t_ppath_prop " .
- uri_encode($v);
- } else {
- push @out, " -$t_ppath_prop";
- }
- }
- }
- }
- foreach my $t (qw/absent_file absent_directory/) {
- $h = $ed->{$t} or next;
- foreach my $parent (sort keys %$h) {
- foreach my $path (sort @{$h->{$parent}}) {
- push @out, " $t: " .
- uri_encode("$parent/$path");
- warn "W: $t: $parent/$path ",
- "Insufficient permissions?\n";
- }
- }
- }
- \@out;
-}
-
-sub get_tz {
- # some systmes don't handle or mishandle %z, so be creative.
- my $t = shift || time;
- my $gm = timelocal(gmtime($t));
- my $sign = qw( + + - )[ $t <=> $gm ];
- return sprintf("%s%02d%02d", $sign, (gmtime(abs($t - $gm)))[2,1]);
-}
-
-# parse_svn_date(DATE)
-# --------------------
-# Given a date (in UTC) from Subversion, return a string in the format
-# "<TZ Offset> <local date/time>" that Git will use.
-#
-# By default the parsed date will be in UTC; if $Git::SVN::_localtime
-# is true we'll convert it to the local timezone instead.
-sub parse_svn_date {
- my $date = shift || return '+0000 1970-01-01 00:00:00';
- my ($Y,$m,$d,$H,$M,$S) = ($date =~ /^(\d{4})\-(\d\d)\-(\d\d)T
- (\d\d)\:(\d\d)\:(\d\d)\.\d*Z$/x) or
- croak "Unable to parse date: $date\n";
- my $parsed_date; # Set next.
-
- if ($Git::SVN::_localtime) {
- # Translate the Subversion datetime to an epoch time.
- # Begin by switching ourselves to $date's timezone, UTC.
- my $old_env_TZ = $ENV{TZ};
- $ENV{TZ} = 'UTC';
-
- my $epoch_in_UTC =
- POSIX::strftime('%s', $S, $M, $H, $d, $m - 1, $Y - 1900);
-
- # Determine our local timezone (including DST) at the
- # time of $epoch_in_UTC. $Git::SVN::Log::TZ stored the
- # value of TZ, if any, at the time we were run.
- if (defined $Git::SVN::Log::TZ) {
- $ENV{TZ} = $Git::SVN::Log::TZ;
- } else {
- delete $ENV{TZ};
- }
-
- my $our_TZ = get_tz();
-
- # This converts $epoch_in_UTC into our local timezone.
- my ($sec, $min, $hour, $mday, $mon, $year,
- $wday, $yday, $isdst) = localtime($epoch_in_UTC);
-
- $parsed_date = sprintf('%s %04d-%02d-%02d %02d:%02d:%02d',
- $our_TZ, $year + 1900, $mon + 1,
- $mday, $hour, $min, $sec);
-
- # Reset us to the timezone in effect when we entered
- # this routine.
- if (defined $old_env_TZ) {
- $ENV{TZ} = $old_env_TZ;
- } else {
- delete $ENV{TZ};
- }
- } else {
- $parsed_date = "+0000 $Y-$m-$d $H:$M:$S";
- }
-
- return $parsed_date;
-}
-
-sub other_gs {
- my ($self, $new_url, $url,
- $branch_from, $r, $old_ref_id) = @_;
- my $gs = Git::SVN->find_by_url($new_url, $url, $branch_from);
- unless ($gs) {
- my $ref_id = $old_ref_id;
- $ref_id =~ s/\@\d+-*$//;
- $ref_id .= "\@$r";
- # just grow a tail if we're not unique enough :x
- $ref_id .= '-' while find_ref($ref_id);
- my ($u, $p, $repo_id) = ($new_url, '', $ref_id);
- if ($u =~ s#^\Q$url\E(/|$)##) {
- $p = $u;
- $u = $url;
- $repo_id = $self->{repo_id};
- }
- while (1) {
- # It is possible to tag two different subdirectories at
- # the same revision. If the url for an existing ref
- # does not match, we must either find a ref with a
- # matching url or create a new ref by growing a tail.
- $gs = Git::SVN->init($u, $p, $repo_id, $ref_id, 1);
- my (undef, $max_commit) = $gs->rev_map_max(1);
- last if (!$max_commit);
- my ($url) = ::cmt_metadata($max_commit);
- last if ($url eq $gs->metadata_url);
- $ref_id .= '-';
- }
- print STDERR "Initializing parent: $ref_id\n" unless $::_q > 1;
- }
- $gs
-}
-
-sub call_authors_prog {
- my ($orig_author) = @_;
- $orig_author = command_oneline('rev-parse', '--sq-quote', $orig_author);
- my $author = `$::_authors_prog $orig_author`;
- if ($? != 0) {
- die "$::_authors_prog failed with exit code $?\n"
- }
- if ($author =~ /^\s*(.+?)\s*<(.*)>\s*$/) {
- my ($name, $email) = ($1, $2);
- $email = undef if length $2 == 0;
- return [$name, $email];
- } else {
- die "Author: $orig_author: $::_authors_prog returned "
- . "invalid author format: $author\n";
- }
-}
-
-sub check_author {
- my ($author) = @_;
- if (!defined $author || length $author == 0) {
- $author = '(no author)';
- }
- if (!defined $::users{$author}) {
- if (defined $::_authors_prog) {
- $::users{$author} = call_authors_prog($author);
- } elsif (defined $::_authors) {
- die "Author: $author not defined in $::_authors file\n";
- }
- }
- $author;
-}
-
-sub find_extra_svk_parents {
- my ($self, $ed, $tickets, $parents) = @_;
- # aha! svk:merge property changed...
- my @tickets = split "\n", $tickets;
- my @known_parents;
- for my $ticket ( @tickets ) {
- my ($uuid, $path, $rev) = split /:/, $ticket;
- if ( $uuid eq $self->ra_uuid ) {
- my $url = $self->{url};
- my $repos_root = $url;
- my $branch_from = $path;
- $branch_from =~ s{^/}{};
- my $gs = $self->other_gs($repos_root."/".$branch_from,
- $url,
- $branch_from,
- $rev,
- $self->{ref_id});
- if ( my $commit = $gs->rev_map_get($rev, $uuid) ) {
- # wahey! we found it, but it might be
- # an old one (!)
- push @known_parents, [ $rev, $commit ];
- }
- }
- }
- # Ordering matters; highest-numbered commit merge tickets
- # first, as they may account for later merge ticket additions
- # or changes.
- @known_parents = map {$_->[1]} sort {$b->[0] <=> $a->[0]} @known_parents;
- for my $parent ( @known_parents ) {
- my @cmd = ('rev-list', $parent, map { "^$_" } @$parents );
- my ($msg_fh, $ctx) = command_output_pipe(@cmd);
- my $new;
- while ( <$msg_fh> ) {
- $new=1;last;
- }
- command_close_pipe($msg_fh, $ctx);
- if ( $new ) {
- print STDERR
- "Found merge parent (svk:merge ticket): $parent\n";
- push @$parents, $parent;
- }
- }
-}
-
-sub lookup_svn_merge {
- my $uuid = shift;
- my $url = shift;
- my $merge = shift;
-
- my ($source, $revs) = split ":", $merge;
- my $path = $source;
- $path =~ s{^/}{};
- my $gs = Git::SVN->find_by_url($url.$source, $url, $path);
- if ( !$gs ) {
- warn "Couldn't find revmap for $url$source\n";
- return;
- }
- my @ranges = split ",", $revs;
- my ($tip, $tip_commit);
- my @merged_commit_ranges;
- # find the tip
- for my $range ( @ranges ) {
- my ($bottom, $top) = split "-", $range;
- $top ||= $bottom;
- my $bottom_commit = $gs->find_rev_after( $bottom, 1, $top );
- my $top_commit = $gs->find_rev_before( $top, 1, $bottom );
-
- unless ($top_commit and $bottom_commit) {
- warn "W:unknown path/rev in svn:mergeinfo "
- ."dirprop: $source:$range\n";
- next;
- }
-
- if (scalar(command('rev-parse', "$bottom_commit^@"))) {
- push @merged_commit_ranges,
- "$bottom_commit^..$top_commit";
- } else {
- push @merged_commit_ranges, "$top_commit";
- }
-
- if ( !defined $tip or $top > $tip ) {
- $tip = $top;
- $tip_commit = $top_commit;
- }
- }
- return ($tip_commit, @merged_commit_ranges);
-}
-
-sub _rev_list {
- my ($msg_fh, $ctx) = command_output_pipe(
- "rev-list", @_,
- );
- my @rv;
- while ( <$msg_fh> ) {
- chomp;
- push @rv, $_;
- }
- command_close_pipe($msg_fh, $ctx);
- @rv;
-}
-
-sub check_cherry_pick {
- my $base = shift;
- my $tip = shift;
- my $parents = shift;
- my @ranges = @_;
- my %commits = map { $_ => 1 }
- _rev_list("--no-merges", $tip, "--not", $base, @$parents, "--");
- for my $range ( @ranges ) {
- delete @commits{_rev_list($range, "--")};
- }
- for my $commit (keys %commits) {
- if (has_no_changes($commit)) {
- delete $commits{$commit};
- }
- }
- return (keys %commits);
-}
-
-sub has_no_changes {
- my $commit = shift;
-
- my @revs = split / /, command_oneline(
- qw(rev-list --parents -1 -m), $commit);
-
- # Commits with no parents, e.g. the start of a partial branch,
- # have changes by definition.
- return 1 if (@revs < 2);
-
- # Commits with multiple parents, e.g a merge, have no changes
- # by definition.
- return 0 if (@revs > 2);
-
- return (command_oneline("rev-parse", "$commit^{tree}") eq
- command_oneline("rev-parse", "$commit~1^{tree}"));
-}
-
-sub tie_for_persistent_memoization {
- my $hash = shift;
- my $path = shift;
-
- if ($can_use_yaml) {
- tie %$hash => 'Git::SVN::Memoize::YAML', "$path.yaml";
- } else {
- tie %$hash => 'Memoize::Storable', "$path.db", 'nstore';
- }
-}
-
-# The GIT_DIR environment variable is not always set until after the command
-# line arguments are processed, so we can't memoize in a BEGIN block.
-{
- my $memoized = 0;
-
- sub memoize_svn_mergeinfo_functions {
- return if $memoized;
- $memoized = 1;
-
- my $cache_path = "$ENV{GIT_DIR}/svn/.caches/";
- mkpath([$cache_path]) unless -d $cache_path;
-
- my %lookup_svn_merge_cache;
- my %check_cherry_pick_cache;
- my %has_no_changes_cache;
-
- tie_for_persistent_memoization(\%lookup_svn_merge_cache,
- "$cache_path/lookup_svn_merge");
- memoize 'lookup_svn_merge',
- SCALAR_CACHE => 'FAULT',
- LIST_CACHE => ['HASH' => \%lookup_svn_merge_cache],
- ;
-
- tie_for_persistent_memoization(\%check_cherry_pick_cache,
- "$cache_path/check_cherry_pick");
- memoize 'check_cherry_pick',
- SCALAR_CACHE => 'FAULT',
- LIST_CACHE => ['HASH' => \%check_cherry_pick_cache],
- ;
-
- tie_for_persistent_memoization(\%has_no_changes_cache,
- "$cache_path/has_no_changes");
- memoize 'has_no_changes',
- SCALAR_CACHE => ['HASH' => \%has_no_changes_cache],
- LIST_CACHE => 'FAULT',
- ;
- }
-
- sub unmemoize_svn_mergeinfo_functions {
- return if not $memoized;
- $memoized = 0;
-
- Memoize::unmemoize 'lookup_svn_merge';
- Memoize::unmemoize 'check_cherry_pick';
- Memoize::unmemoize 'has_no_changes';
- }
-
- Memoize::memoize 'Git::SVN::repos_root';
-}
-
-END {
- # Force cache writeout explicitly instead of waiting for
- # global destruction to avoid segfault in Storable:
- # http://rt.cpan.org/Public/Bug/Display.html?id=36087
- unmemoize_svn_mergeinfo_functions();
-}
-
-sub parents_exclude {
- my $parents = shift;
- my @commits = @_;
- return unless @commits;
-
- my @excluded;
- my $excluded;
- do {
- my @cmd = ('rev-list', "-1", @commits, "--not", @$parents );
- $excluded = command_oneline(@cmd);
- if ( $excluded ) {
- my @new;
- my $found;
- for my $commit ( @commits ) {
- if ( $commit eq $excluded ) {
- push @excluded, $commit;
- $found++;
- last;
- }
- else {
- push @new, $commit;
- }
- }
- die "saw commit '$excluded' in rev-list output, "
- ."but we didn't ask for that commit (wanted: @commits --not @$parents)"
- unless $found;
- @commits = @new;
- }
- }
- while ($excluded and @commits);
-
- return @excluded;
-}
-
-
-# note: this function should only be called if the various dirprops
-# have actually changed
-sub find_extra_svn_parents {
- my ($self, $ed, $mergeinfo, $parents) = @_;
- # aha! svk:merge property changed...
-
- memoize_svn_mergeinfo_functions();
-
- # We first search for merged tips which are not in our
- # history. Then, we figure out which git revisions are in
- # that tip, but not this revision. If all of those revisions
- # are now marked as merge, we can add the tip as a parent.
- my @merges = split "\n", $mergeinfo;
- my @merge_tips;
- my $url = $self->{url};
- my $uuid = $self->ra_uuid;
- my %ranges;
- for my $merge ( @merges ) {
- my ($tip_commit, @ranges) =
- lookup_svn_merge( $uuid, $url, $merge );
- unless (!$tip_commit or
- grep { $_ eq $tip_commit } @$parents ) {
- push @merge_tips, $tip_commit;
- $ranges{$tip_commit} = \@ranges;
- } else {
- push @merge_tips, undef;
- }
- }
-
- my %excluded = map { $_ => 1 }
- parents_exclude($parents, grep { defined } @merge_tips);
-
- # check merge tips for new parents
- my @new_parents;
- for my $merge_tip ( @merge_tips ) {
- my $spec = shift @merges;
- next unless $merge_tip and $excluded{$merge_tip};
-
- my $ranges = $ranges{$merge_tip};
-
- # check out 'new' tips
- my $merge_base;
- eval {
- $merge_base = command_oneline(
- "merge-base",
- @$parents, $merge_tip,
- );
- };
- if ($@) {
- die "An error occurred during merge-base"
- unless $@->isa("Git::Error::Command");
-
- warn "W: Cannot find common ancestor between ".
- "@$parents and $merge_tip. Ignoring merge info.\n";
- next;
- }
-
- # double check that there are no missing non-merge commits
- my (@incomplete) = check_cherry_pick(
- $merge_base, $merge_tip,
- $parents,
- @$ranges,
- );
-
- if ( @incomplete ) {
- warn "W:svn cherry-pick ignored ($spec) - missing "
- .@incomplete." commit(s) (eg $incomplete[0])\n";
- } else {
- warn
- "Found merge parent (svn:mergeinfo prop): ",
- $merge_tip, "\n";
- push @new_parents, $merge_tip;
- }
- }
-
- # cater for merges which merge commits from multiple branches
- if ( @new_parents > 1 ) {
- for ( my $i = 0; $i <= $#new_parents; $i++ ) {
- for ( my $j = 0; $j <= $#new_parents; $j++ ) {
- next if $i == $j;
- next unless $new_parents[$i];
- next unless $new_parents[$j];
- my $revs = command_oneline(
- "rev-list", "-1",
- "$new_parents[$i]..$new_parents[$j]",
- );
- if ( !$revs ) {
- undef($new_parents[$j]);
- }
- }
- }
- }
- push @$parents, grep { defined } @new_parents;
-}
-
-sub make_log_entry {
- my ($self, $rev, $parents, $ed) = @_;
- my $untracked = $self->get_untracked($ed);
-
- my @parents = @$parents;
- my $ps = $ed->{path_strip} || "";
- for my $path ( grep { m/$ps/ } %{$ed->{dir_prop}} ) {
- my $props = $ed->{dir_prop}{$path};
- if ( $props->{"svk:merge"} ) {
- $self->find_extra_svk_parents
- ($ed, $props->{"svk:merge"}, \@parents);
- }
- if ( $props->{"svn:mergeinfo"} ) {
- $self->find_extra_svn_parents
- ($ed,
- $props->{"svn:mergeinfo"},
- \@parents);
- }
- }
-
- open my $un, '>>', "$self->{dir}/unhandled.log" or croak $!;
- print $un "r$rev\n" or croak $!;
- print $un $_, "\n" foreach @$untracked;
- my %log_entry = ( parents => \@parents, revision => $rev,
- log => '');
-
- my $headrev;
- my $logged = delete $self->{logged_rev_props};
- if (!$logged || $self->{-want_revprops}) {
- my $rp = $self->ra->rev_proplist($rev);
- foreach (sort keys %$rp) {
- my $v = $rp->{$_};
- if (/^svn:(author|date|log)$/) {
- $log_entry{$1} = $v;
- } elsif ($_ eq 'svm:headrev') {
- $headrev = $v;
- } else {
- print $un " rev_prop: ", uri_encode($_), ' ',
- uri_encode($v), "\n";
- }
- }
- } else {
- map { $log_entry{$_} = $logged->{$_} } keys %$logged;
- }
- close $un or croak $!;
-
- $log_entry{date} = parse_svn_date($log_entry{date});
- $log_entry{log} .= "\n";
- my $author = $log_entry{author} = check_author($log_entry{author});
- my ($name, $email) = defined $::users{$author} ? @{$::users{$author}}
- : ($author, undef);
-
- my ($commit_name, $commit_email) = ($name, $email);
- if ($_use_log_author) {
- my $name_field;
- if ($log_entry{log} =~ /From:\s+(.*\S)\s*\n/i) {
- $name_field = $1;
- } elsif ($log_entry{log} =~ /Signed-off-by:\s+(.*\S)\s*\n/i) {
- $name_field = $1;
- }
- if (!defined $name_field) {
- if (!defined $email) {
- $email = $name;
- }
- } elsif ($name_field =~ /(.*?)\s+<(.*)>/) {
- ($name, $email) = ($1, $2);
- } elsif ($name_field =~ /(.*)@/) {
- ($name, $email) = ($1, $name_field);
- } else {
- ($name, $email) = ($name_field, $name_field);
- }
- }
- if (defined $headrev && $self->use_svm_props) {
- if ($self->rewrite_root) {
- die "Can't have both 'useSvmProps' and 'rewriteRoot' ",
- "options set!\n";
- }
- if ($self->rewrite_uuid) {
- die "Can't have both 'useSvmProps' and 'rewriteUUID' ",
- "options set!\n";
- }
- my ($uuid, $r) = $headrev =~ m{^([a-f\d\-]{30,}):(\d+)$}i;
- # we don't want "SVM: initializing mirror for junk" ...
- return undef if $r == 0;
- my $svm = $self->svm;
- if ($uuid ne $svm->{uuid}) {
- die "UUID mismatch on SVM path:\n",
- "expected: $svm->{uuid}\n",
- " got: $uuid\n";
- }
- my $full_url = $self->full_url;
- $full_url =~ s#^\Q$svm->{replace}\E(/|$)#$svm->{source}$1# or
- die "Failed to replace '$svm->{replace}' with ",
- "'$svm->{source}' in $full_url\n";
- # throw away username for storing in records
- remove_username($full_url);
- $log_entry{metadata} = "$full_url\@$r $uuid";
- $log_entry{svm_revision} = $r;
- $email ||= "$author\@$uuid";
- $commit_email ||= "$author\@$uuid";
- } elsif ($self->use_svnsync_props) {
- my $full_url = $self->svnsync->{url};
- $full_url .= "/$self->{path}" if length $self->{path};
- remove_username($full_url);
- my $uuid = $self->svnsync->{uuid};
- $log_entry{metadata} = "$full_url\@$rev $uuid";
- $email ||= "$author\@$uuid";
- $commit_email ||= "$author\@$uuid";
- } else {
- my $url = $self->metadata_url;
- remove_username($url);
- my $uuid = $self->rewrite_uuid || $self->ra->get_uuid;
- $log_entry{metadata} = "$url\@$rev " . $uuid;
- $email ||= "$author\@" . $uuid;
- $commit_email ||= "$author\@" . $uuid;
- }
- $log_entry{name} = $name;
- $log_entry{email} = $email;
- $log_entry{commit_name} = $commit_name;
- $log_entry{commit_email} = $commit_email;
- \%log_entry;
-}
-
-sub fetch {
- my ($self, $min_rev, $max_rev, @parents) = @_;
- my ($last_rev, $last_commit) = $self->last_rev_commit;
- my ($base, $head) = $self->get_fetch_range($min_rev, $max_rev);
- $self->ra->gs_fetch_loop_common($base, $head, [$self]);
-}
-
-sub set_tree_cb {
- my ($self, $log_entry, $tree, $rev, $date, $author) = @_;
- $self->{inject_parents} = { $rev => $tree };
- $self->fetch(undef, undef);
-}
-
-sub set_tree {
- my ($self, $tree) = (shift, shift);
- my $log_entry = ::get_commit_entry($tree);
- unless ($self->{last_rev}) {
- ::fatal("Must have an existing revision to commit");
- }
- my %ed_opts = ( r => $self->{last_rev},
- log => $log_entry->{log},
- ra => $self->ra,
- tree_a => $self->{last_commit},
- tree_b => $tree,
- editor_cb => sub {
- $self->set_tree_cb($log_entry, $tree, @_) },
- svn_path => $self->{path} );
- if (!Git::SVN::Editor->new(\%ed_opts)->apply_diff) {
- print "No changes\nr$self->{last_rev} = $tree\n";
- }
-}
-
-sub rebuild_from_rev_db {
- my ($self, $path) = @_;
- my $r = -1;
- open my $fh, '<', $path or croak "open: $!";
- binmode $fh or croak "binmode: $!";
- while (<$fh>) {
- length($_) == 41 or croak "inconsistent size in ($_) != 41";
- chomp($_);
- ++$r;
- next if $_ eq ('0' x 40);
- $self->rev_map_set($r, $_);
- print "r$r = $_\n";
- }
- close $fh or croak "close: $!";
- unlink $path or croak "unlink: $!";
-}
-
-sub rebuild {
- my ($self) = @_;
- my $map_path = $self->map_path;
- my $partial = (-e $map_path && ! -z $map_path);
- return unless ::verify_ref($self->refname.'^0');
- if (!$partial && ($self->use_svm_props || $self->no_metadata)) {
- my $rev_db = $self->rev_db_path;
- $self->rebuild_from_rev_db($rev_db);
- if ($self->use_svm_props) {
- my $svm_rev_db = $self->rev_db_path($self->svm_uuid);
- $self->rebuild_from_rev_db($svm_rev_db);
- }
- $self->unlink_rev_db_symlink;
- return;
- }
- print "Rebuilding $map_path ...\n" if (!$partial);
- my ($base_rev, $head) = ($partial ? $self->rev_map_max_norebuild(1) :
- (undef, undef));
- my ($log, $ctx) =
- command_output_pipe(qw/rev-list --pretty=raw --reverse/,
- ($head ? "$head.." : "") . $self->refname,
- '--');
- my $metadata_url = $self->metadata_url;
- remove_username($metadata_url);
- my $svn_uuid = $self->rewrite_uuid || $self->ra_uuid;
- my $c;
- while (<$log>) {
- if ( m{^commit ($::sha1)$} ) {
- $c = $1;
- next;
- }
- next unless s{^\s*(git-svn-id:)}{$1};
- my ($url, $rev, $uuid) = ::extract_metadata($_);
- remove_username($url);
-
- # ignore merges (from set-tree)
- next if (!defined $rev || !$uuid);
-
- # if we merged or otherwise started elsewhere, this is
- # how we break out of it
- if (($uuid ne $svn_uuid) ||
- ($metadata_url && $url && ($url ne $metadata_url))) {
- next;
- }
- if ($partial && $head) {
- print "Partial-rebuilding $map_path ...\n";
- print "Currently at $base_rev = $head\n";
- $head = undef;
- }
-
- $self->rev_map_set($rev, $c);
- print "r$rev = $c\n";
- }
- command_close_pipe($log, $ctx);
- print "Done rebuilding $map_path\n" if (!$partial || !$head);
- my $rev_db_path = $self->rev_db_path;
- if (-f $self->rev_db_path) {
- unlink $self->rev_db_path or croak "unlink: $!";
- }
- $self->unlink_rev_db_symlink;
-}
-
-# rev_map:
-# Tie::File seems to be prone to offset errors if revisions get sparse,
-# it's not that fast, either. Tie::File is also not in Perl 5.6. So
-# one of my favorite modules is out :< Next up would be one of the DBM
-# modules, but I'm not sure which is most portable...
-#
-# This is the replacement for the rev_db format, which was too big
-# and inefficient for large repositories with a lot of sparse history
-# (mainly tags)
-#
-# The format is this:
-# - 24 bytes for every record,
-# * 4 bytes for the integer representing an SVN revision number
-# * 20 bytes representing the sha1 of a git commit
-# - No empty padding records like the old format
-# (except the last record, which can be overwritten)
-# - new records are written append-only since SVN revision numbers
-# increase monotonically
-# - lookups on SVN revision number are done via a binary search
-# - Piping the file to xxd -c24 is a good way of dumping it for
-# viewing or editing (piped back through xxd -r), should the need
-# ever arise.
-# - The last record can be padding revision with an all-zero sha1
-# This is used to optimize fetch performance when using multiple
-# "fetch" directives in .git/config
-#
-# These files are disposable unless noMetadata or useSvmProps is set
-
-sub _rev_map_set {
- my ($fh, $rev, $commit) = @_;
-
- binmode $fh or croak "binmode: $!";
- my $size = (stat($fh))[7];
- ($size % 24) == 0 or croak "inconsistent size: $size";
-
- my $wr_offset = 0;
- if ($size > 0) {
- sysseek($fh, -24, SEEK_END) or croak "seek: $!";
- my $read = sysread($fh, my $buf, 24) or croak "read: $!";
- $read == 24 or croak "read only $read bytes (!= 24)";
- my ($last_rev, $last_commit) = unpack(rev_map_fmt, $buf);
- if ($last_commit eq ('0' x40)) {
- if ($size >= 48) {
- sysseek($fh, -48, SEEK_END) or croak "seek: $!";
- $read = sysread($fh, $buf, 24) or
- croak "read: $!";
- $read == 24 or
- croak "read only $read bytes (!= 24)";
- ($last_rev, $last_commit) =
- unpack(rev_map_fmt, $buf);
- if ($last_commit eq ('0' x40)) {
- croak "inconsistent .rev_map\n";
- }
- }
- if ($last_rev >= $rev) {
- croak "last_rev is higher!: $last_rev >= $rev";
- }
- $wr_offset = -24;
- }
- }
- sysseek($fh, $wr_offset, SEEK_END) or croak "seek: $!";
- syswrite($fh, pack(rev_map_fmt, $rev, $commit), 24) == 24 or
- croak "write: $!";
-}
-
-sub _rev_map_reset {
- my ($fh, $rev, $commit) = @_;
- my $c = _rev_map_get($fh, $rev);
- $c eq $commit or die "_rev_map_reset(@_) commit $c does not match!\n";
- my $offset = sysseek($fh, 0, SEEK_CUR) or croak "seek: $!";
- truncate $fh, $offset or croak "truncate: $!";
-}
-
-sub mkfile {
- my ($path) = @_;
- unless (-e $path) {
- my ($dir, $base) = ($path =~ m#^(.*?)/?([^/]+)$#);
- mkpath([$dir]) unless -d $dir;
- open my $fh, '>>', $path or die "Couldn't create $path: $!\n";
- close $fh or die "Couldn't close (create) $path: $!\n";
- }
-}
-
-sub rev_map_set {
- my ($self, $rev, $commit, $update_ref, $uuid) = @_;
- defined $commit or die "missing arg3\n";
- length $commit == 40 or die "arg3 must be a full SHA1 hexsum\n";
- my $db = $self->map_path($uuid);
- my $db_lock = "$db.lock";
- my $sigmask;
- $update_ref ||= 0;
- if ($update_ref) {
- $sigmask = POSIX::SigSet->new();
- my $signew = POSIX::SigSet->new(SIGINT, SIGHUP, SIGTERM,
- SIGALRM, SIGUSR1, SIGUSR2);
- sigprocmask(SIG_BLOCK, $signew, $sigmask) or
- croak "Can't block signals: $!";
- }
- mkfile($db);
-
- $LOCKFILES{$db_lock} = 1;
- my $sync;
- # both of these options make our .rev_db file very, very important
- # and we can't afford to lose it because rebuild() won't work
- if ($self->use_svm_props || $self->no_metadata) {
- $sync = 1;
- copy($db, $db_lock) or die "rev_map_set(@_): ",
- "Failed to copy: ",
- "$db => $db_lock ($!)\n";
- } else {
- rename $db, $db_lock or die "rev_map_set(@_): ",
- "Failed to rename: ",
- "$db => $db_lock ($!)\n";
- }
-
- sysopen(my $fh, $db_lock, O_RDWR | O_CREAT)
- or croak "Couldn't open $db_lock: $!\n";
- $update_ref eq 'reset' ? _rev_map_reset($fh, $rev, $commit) :
- _rev_map_set($fh, $rev, $commit);
- if ($sync) {
- $fh->flush or die "Couldn't flush $db_lock: $!\n";
- $fh->sync or die "Couldn't sync $db_lock: $!\n";
- }
- close $fh or croak $!;
- if ($update_ref) {
- $_head = $self;
- my $note = "";
- $note = " ($update_ref)" if ($update_ref !~ /^\d*$/);
- command_noisy('update-ref', '-m', "r$rev$note",
- $self->refname, $commit);
- }
- rename $db_lock, $db or die "rev_map_set(@_): ", "Failed to rename: ",
- "$db_lock => $db ($!)\n";
- delete $LOCKFILES{$db_lock};
- if ($update_ref) {
- sigprocmask(SIG_SETMASK, $sigmask) or
- croak "Can't restore signal mask: $!";
- }
-}
-
-# If want_commit, this will return an array of (rev, commit) where
-# commit _must_ be a valid commit in the archive.
-# Otherwise, it'll return the max revision (whether or not the
-# commit is valid or just a 0x40 placeholder).
-sub rev_map_max {
- my ($self, $want_commit) = @_;
- $self->rebuild;
- my ($r, $c) = $self->rev_map_max_norebuild($want_commit);
- $want_commit ? ($r, $c) : $r;
-}
-
-sub rev_map_max_norebuild {
- my ($self, $want_commit) = @_;
- my $map_path = $self->map_path;
- stat $map_path or return $want_commit ? (0, undef) : 0;
- sysopen(my $fh, $map_path, O_RDONLY) or croak "open: $!";
- binmode $fh or croak "binmode: $!";
- my $size = (stat($fh))[7];
- ($size % 24) == 0 or croak "inconsistent size: $size";
-
- if ($size == 0) {
- close $fh or croak "close: $!";
- return $want_commit ? (0, undef) : 0;
- }
-
- sysseek($fh, -24, SEEK_END) or croak "seek: $!";
- sysread($fh, my $buf, 24) == 24 or croak "read: $!";
- my ($r, $c) = unpack(rev_map_fmt, $buf);
- if ($want_commit && $c eq ('0' x40)) {
- if ($size < 48) {
- return $want_commit ? (0, undef) : 0;
- }
- sysseek($fh, -48, SEEK_END) or croak "seek: $!";
- sysread($fh, $buf, 24) == 24 or croak "read: $!";
- ($r, $c) = unpack(rev_map_fmt, $buf);
- if ($c eq ('0'x40)) {
- croak "Penultimate record is all-zeroes in $map_path";
- }
- }
- close $fh or croak "close: $!";
- $want_commit ? ($r, $c) : $r;
-}
-
-sub rev_map_get {
- my ($self, $rev, $uuid) = @_;
- my $map_path = $self->map_path($uuid);
- return undef unless -e $map_path;
-
- sysopen(my $fh, $map_path, O_RDONLY) or croak "open: $!";
- my $c = _rev_map_get($fh, $rev);
- close($fh) or croak "close: $!";
- $c
-}
-
-sub _rev_map_get {
- my ($fh, $rev) = @_;
-
- binmode $fh or croak "binmode: $!";
- my $size = (stat($fh))[7];
- ($size % 24) == 0 or croak "inconsistent size: $size";
-
- if ($size == 0) {
- return undef;
- }
-
- my ($l, $u) = (0, $size - 24);
- my ($r, $c, $buf);
-
- while ($l <= $u) {
- my $i = int(($l/24 + $u/24) / 2) * 24;
- sysseek($fh, $i, SEEK_SET) or croak "seek: $!";
- sysread($fh, my $buf, 24) == 24 or croak "read: $!";
- my ($r, $c) = unpack(rev_map_fmt, $buf);
-
- if ($r < $rev) {
- $l = $i + 24;
- } elsif ($r > $rev) {
- $u = $i - 24;
- } else { # $r == $rev
- return $c eq ('0' x 40) ? undef : $c;
- }
- }
- undef;
-}
-
-# Finds the first svn revision that exists on (if $eq_ok is true) or
-# before $rev for the current branch. It will not search any lower
-# than $min_rev. Returns the git commit hash and svn revision number
-# if found, else (undef, undef).
-sub find_rev_before {
- my ($self, $rev, $eq_ok, $min_rev) = @_;
- --$rev unless $eq_ok;
- $min_rev ||= 1;
- my $max_rev = $self->rev_map_max;
- $rev = $max_rev if ($rev > $max_rev);
- while ($rev >= $min_rev) {
- if (my $c = $self->rev_map_get($rev)) {
- return ($rev, $c);
- }
- --$rev;
- }
- return (undef, undef);
-}
-
-# Finds the first svn revision that exists on (if $eq_ok is true) or
-# after $rev for the current branch. It will not search any higher
-# than $max_rev. Returns the git commit hash and svn revision number
-# if found, else (undef, undef).
-sub find_rev_after {
- my ($self, $rev, $eq_ok, $max_rev) = @_;
- ++$rev unless $eq_ok;
- $max_rev ||= $self->rev_map_max;
- while ($rev <= $max_rev) {
- if (my $c = $self->rev_map_get($rev)) {
- return ($rev, $c);
- }
- ++$rev;
- }
- return (undef, undef);
-}
-
-sub _new {
- my ($class, $repo_id, $ref_id, $path) = @_;
- unless (defined $repo_id && length $repo_id) {
- $repo_id = $Git::SVN::default_repo_id;
- }
- unless (defined $ref_id && length $ref_id) {
- $_prefix = '' unless defined($_prefix);
- $_[2] = $ref_id =
- "refs/remotes/$_prefix$Git::SVN::default_ref_id";
- }
- $_[1] = $repo_id;
- my $dir = "$ENV{GIT_DIR}/svn/$ref_id";
-
- # Older repos imported by us used $GIT_DIR/svn/foo instead of
- # $GIT_DIR/svn/refs/remotes/foo when tracking refs/remotes/foo
- if ($ref_id =~ m{^refs/remotes/(.*)}) {
- my $old_dir = "$ENV{GIT_DIR}/svn/$1";
- if (-d $old_dir && ! -d $dir) {
- $dir = $old_dir;
- }
- }
-
- $_[3] = $path = '' unless (defined $path);
- mkpath([$dir]);
- bless {
- ref_id => $ref_id, dir => $dir, index => "$dir/index",
- path => $path, config => "$ENV{GIT_DIR}/svn/config",
- map_root => "$dir/.rev_map", repo_id => $repo_id }, $class;
-}
-
-# for read-only access of old .rev_db formats
-sub unlink_rev_db_symlink {
- my ($self) = @_;
- my $link = $self->rev_db_path;
- $link =~ s/\.[\w-]+$// or croak "missing UUID at the end of $link";
- if (-l $link) {
- unlink $link or croak "unlink: $link failed!";
- }
-}
-
-sub rev_db_path {
- my ($self, $uuid) = @_;
- my $db_path = $self->map_path($uuid);
- $db_path =~ s{/\.rev_map\.}{/\.rev_db\.}
- or croak "map_path: $db_path does not contain '/.rev_map.' !";
- $db_path;
-}
-
-# the new replacement for .rev_db
-sub map_path {
- my ($self, $uuid) = @_;
- $uuid ||= $self->ra_uuid;
- "$self->{map_root}.$uuid";
-}
-
-sub uri_encode {
- my ($f) = @_;
- $f =~ s#([^a-zA-Z0-9\*!\:_\./\-])#uc sprintf("%%%02x",ord($1))#eg;
- $f
-}
-
-sub uri_decode {
- my ($f) = @_;
- $f =~ s#%([0-9a-fA-F]{2})#chr(hex($1))#eg;
- $f
-}
-
-sub remove_username {
- $_[0] =~ s{^([^:]*://)[^@]+@}{$1};
-}
-
-package Git::SVN::Log;
-use strict;
-use warnings;
-use POSIX qw/strftime/;
-use constant commit_log_separator => ('-' x 72) . "\n";
-use vars qw/$TZ $limit $color $pager $non_recursive $verbose $oneline
- %rusers $show_commit $incremental/;
-my $l_fmt;
-
-sub cmt_showable {
- my ($c) = @_;
- return 1 if defined $c->{r};
-
- # big commit message got truncated by the 16k pretty buffer in rev-list
- if ($c->{l} && $c->{l}->[-1] eq "...\n" &&
- $c->{a_raw} =~ /\@([a-f\d\-]+)>$/) {
- @{$c->{l}} = ();
- my @log = command(qw/cat-file commit/, $c->{c});
-
- # shift off the headers
- shift @log while ($log[0] ne '');
- shift @log;
-
- # TODO: make $c->{l} not have a trailing newline in the future
- @{$c->{l}} = map { "$_\n" } grep !/^git-svn-id: /, @log;
-
- (undef, $c->{r}, undef) = ::extract_metadata(
- (grep(/^git-svn-id: /, @log))[-1]);
- }
- return defined $c->{r};
-}
-
-sub log_use_color {
- return $color || Git->repository->get_colorbool('color.diff');
-}
-
-sub git_svn_log_cmd {
- my ($r_min, $r_max, @args) = @_;
- my $head = 'HEAD';
- my (@files, @log_opts);
- foreach my $x (@args) {
- if ($x eq '--' || @files) {
- push @files, $x;
- } else {
- if (::verify_ref("$x^0")) {
- $head = $x;
- } else {
- push @log_opts, $x;
- }
- }
- }
-
- my ($url, $rev, $uuid, $gs) = ::working_head_info($head);
- $gs ||= Git::SVN->_new;
- my @cmd = (qw/log --abbrev-commit --pretty=raw --default/,
- $gs->refname);
- push @cmd, '-r' unless $non_recursive;
- push @cmd, qw/--raw --name-status/ if $verbose;
- push @cmd, '--color' if log_use_color();
- push @cmd, @log_opts;
- if (defined $r_max && $r_max == $r_min) {
- push @cmd, '--max-count=1';
- if (my $c = $gs->rev_map_get($r_max)) {
- push @cmd, $c;
- }
- } elsif (defined $r_max) {
- if ($r_max < $r_min) {
- ($r_min, $r_max) = ($r_max, $r_min);
- }
- my (undef, $c_max) = $gs->find_rev_before($r_max, 1, $r_min);
- my (undef, $c_min) = $gs->find_rev_after($r_min, 1, $r_max);
- # If there are no commits in the range, both $c_max and $c_min
- # will be undefined. If there is at least 1 commit in the
- # range, both will be defined.
- return () if !defined $c_min || !defined $c_max;
- if ($c_min eq $c_max) {
- push @cmd, '--max-count=1', $c_min;
- } else {
- push @cmd, '--boundary', "$c_min..$c_max";
- }
- }
- return (@cmd, @files);
-}
-
-# adapted from pager.c
-sub config_pager {
- if (! -t *STDOUT) {
- $ENV{GIT_PAGER_IN_USE} = 'false';
- $pager = undef;
- return;
- }
- chomp($pager = command_oneline(qw(var GIT_PAGER)));
- if ($pager eq 'cat') {
- $pager = undef;
- }
- $ENV{GIT_PAGER_IN_USE} = defined($pager);
-}
-
-sub run_pager {
- return unless defined $pager;
- pipe my ($rfd, $wfd) or return;
- defined(my $pid = fork) or ::fatal "Can't fork: $!";
- if (!$pid) {
- open STDOUT, '>&', $wfd or
- ::fatal "Can't redirect to stdout: $!";
- return;
- }
- open STDIN, '<&', $rfd or ::fatal "Can't redirect stdin: $!";
- $ENV{LESS} ||= 'FRSX';
- exec $pager or ::fatal "Can't run pager: $! ($pager)";
-}
-
-sub format_svn_date {
- my $t = shift || time;
- my $gmoff = Git::SVN::get_tz($t);
- return strftime("%Y-%m-%d %H:%M:%S $gmoff (%a, %d %b %Y)", localtime($t));
-}
-
-sub parse_git_date {
- my ($t, $tz) = @_;
- # Date::Parse isn't in the standard Perl distro :(
- if ($tz =~ s/^\+//) {
- $t += tz_to_s_offset($tz);
- } elsif ($tz =~ s/^\-//) {
- $t -= tz_to_s_offset($tz);
- }
- return $t;
-}
-
-sub set_local_timezone {
- if (defined $TZ) {
- $ENV{TZ} = $TZ;
- } else {
- delete $ENV{TZ};
- }
-}
-
-sub tz_to_s_offset {
- my ($tz) = @_;
- $tz =~ s/(\d\d)$//;
- return ($1 * 60) + ($tz * 3600);
-}
-
-sub get_author_info {
- my ($dest, $author, $t, $tz) = @_;
- $author =~ s/(?:^\s*|\s*$)//g;
- $dest->{a_raw} = $author;
- my $au;
- if ($::_authors) {
- $au = $rusers{$author} || undef;
- }
- if (!$au) {
- ($au) = ($author =~ /<([^>]+)\@[^>]+>$/);
- }
- $dest->{t} = $t;
- $dest->{tz} = $tz;
- $dest->{a} = $au;
- $dest->{t_utc} = parse_git_date($t, $tz);
-}
-
-sub process_commit {
- my ($c, $r_min, $r_max, $defer) = @_;
- if (defined $r_min && defined $r_max) {
- if ($r_min == $c->{r} && $r_min == $r_max) {
- show_commit($c);
- return 0;
- }
- return 1 if $r_min == $r_max;
- if ($r_min < $r_max) {
- # we need to reverse the print order
- return 0 if (defined $limit && --$limit < 0);
- push @$defer, $c;
- return 1;
- }
- if ($r_min != $r_max) {
- return 1 if ($r_min < $c->{r});
- return 1 if ($r_max > $c->{r});
- }
- }
- return 0 if (defined $limit && --$limit < 0);
- show_commit($c);
- return 1;
-}
-
-sub show_commit {
- my $c = shift;
- if ($oneline) {
- my $x = "\n";
- if (my $l = $c->{l}) {
- while ($l->[0] =~ /^\s*$/) { shift @$l }
- $x = $l->[0];
- }
- $l_fmt ||= 'A' . length($c->{r});
- print 'r',pack($l_fmt, $c->{r}),' | ';
- print "$c->{c} | " if $show_commit;
- print $x;
- } else {
- show_commit_normal($c);
- }
-}
-
-sub show_commit_changed_paths {
- my ($c) = @_;
- return unless $c->{changed};
- print "Changed paths:\n", @{$c->{changed}};
-}
-
-sub show_commit_normal {
- my ($c) = @_;
- print commit_log_separator, "r$c->{r} | ";
- print "$c->{c} | " if $show_commit;
- print "$c->{a} | ", format_svn_date($c->{t_utc}), ' | ';
- my $nr_line = 0;
-
- if (my $l = $c->{l}) {
- while ($l->[$#$l] eq "\n" && $#$l > 0
- && $l->[($#$l - 1)] eq "\n") {
- pop @$l;
- }
- $nr_line = scalar @$l;
- if (!$nr_line) {
- print "1 line\n\n\n";
- } else {
- if ($nr_line == 1) {
- $nr_line = '1 line';
- } else {
- $nr_line .= ' lines';
- }
- print $nr_line, "\n";
- show_commit_changed_paths($c);
- print "\n";
- print $_ foreach @$l;
- }
- } else {
- print "1 line\n";
- show_commit_changed_paths($c);
- print "\n";
-
- }
- foreach my $x (qw/raw stat diff/) {
- if ($c->{$x}) {
- print "\n";
- print $_ foreach @{$c->{$x}}
- }
- }
-}
-
-sub cmd_show_log {
- my (@args) = @_;
- my ($r_min, $r_max);
- my $r_last = -1; # prevent dupes
- set_local_timezone();
- if (defined $::_revision) {
- if ($::_revision =~ /^(\d+):(\d+)$/) {
- ($r_min, $r_max) = ($1, $2);
- } elsif ($::_revision =~ /^\d+$/) {
- $r_min = $r_max = $::_revision;
- } else {
- ::fatal "-r$::_revision is not supported, use ",
- "standard 'git log' arguments instead";
- }
- }
-
- config_pager();
- @args = git_svn_log_cmd($r_min, $r_max, @args);
- if (!@args) {
- print commit_log_separator unless $incremental || $oneline;
- return;
- }
- my $log = command_output_pipe(@args);
- run_pager();
- my (@k, $c, $d, $stat);
- my $esc_color = qr/(?:\033\[(?:(?:\d+;)*\d*)?m)*/;
- while (<$log>) {
- if (/^${esc_color}commit (?:- )?($::sha1_short)/o) {
- my $cmt = $1;
- if ($c && cmt_showable($c) && $c->{r} != $r_last) {
- $r_last = $c->{r};
- process_commit($c, $r_min, $r_max, \@k) or
- goto out;
- }
- $d = undef;
- $c = { c => $cmt };
- } elsif (/^${esc_color}author (.+) (\d+) ([\-\+]?\d+)$/o) {
- get_author_info($c, $1, $2, $3);
- } elsif (/^${esc_color}(?:tree|parent|committer) /o) {
- # ignore
- } elsif (/^${esc_color}:\d{6} \d{6} $::sha1_short/o) {
- push @{$c->{raw}}, $_;
- } elsif (/^${esc_color}[ACRMDT]\t/) {
- # we could add $SVN->{svn_path} here, but that requires
- # remote access at the moment (repo_path_split)...
- s#^(${esc_color})([ACRMDT])\t#$1 $2 #o;
- push @{$c->{changed}}, $_;
- } elsif (/^${esc_color}diff /o) {
- $d = 1;
- push @{$c->{diff}}, $_;
- } elsif ($d) {
- push @{$c->{diff}}, $_;
- } elsif (/^\ .+\ \|\s*\d+\ $esc_color[\+\-]*
- $esc_color*[\+\-]*$esc_color$/x) {
- $stat = 1;
- push @{$c->{stat}}, $_;
- } elsif ($stat && /^ \d+ files changed, \d+ insertions/) {
- push @{$c->{stat}}, $_;
- $stat = undef;
- } elsif (/^${esc_color} (git-svn-id:.+)$/o) {
- ($c->{url}, $c->{r}, undef) = ::extract_metadata($1);
- } elsif (s/^${esc_color} //o) {
- push @{$c->{l}}, $_;
- }
- }
- if ($c && defined $c->{r} && $c->{r} != $r_last) {
- $r_last = $c->{r};
- process_commit($c, $r_min, $r_max, \@k);
- }
- if (@k) {
- ($r_min, $r_max) = ($r_max, $r_min);
- process_commit($_, $r_min, $r_max) foreach reverse @k;
- }
-out:
- close $log;
- print commit_log_separator unless $incremental || $oneline;
-}
-
-sub cmd_blame {
- my $path = pop;
-
- config_pager();
- run_pager();
-
- my ($fh, $ctx, $rev);
-
- if ($_git_format) {
- ($fh, $ctx) = command_output_pipe('blame', @_, $path);
- while (my $line = <$fh>) {
- if ($line =~ /^\^?([[:xdigit:]]+)\s/) {
- # Uncommitted edits show up as a rev ID of
- # all zeros, which we can't look up with
- # cmt_metadata
- if ($1 !~ /^0+$/) {
- (undef, $rev, undef) =
- ::cmt_metadata($1);
- $rev = '0' if (!$rev);
- } else {
- $rev = '0';
- }
- $rev = sprintf('%-10s', $rev);
- $line =~ s/^\^?[[:xdigit:]]+(\s)/$rev$1/;
- }
- print $line;
- }
- } else {
- ($fh, $ctx) = command_output_pipe('blame', '-p', @_, 'HEAD',
- '--', $path);
- my ($sha1);
- my %authors;
- my @buffer;
- my %dsha; #distinct sha keys
-
- while (my $line = <$fh>) {
- push @buffer, $line;
- if ($line =~ /^([[:xdigit:]]{40})\s\d+\s\d+/) {
- $dsha{$1} = 1;
- }
- }
-
- my $s2r = ::cmt_sha2rev_batch([keys %dsha]);
-
- foreach my $line (@buffer) {
- if ($line =~ /^([[:xdigit:]]{40})\s\d+\s\d+/) {
- $rev = $s2r->{$1};
- $rev = '0' if (!$rev)
- }
- elsif ($line =~ /^author (.*)/) {
- $authors{$rev} = $1;
- $authors{$rev} =~ s/\s/_/g;
- }
- elsif ($line =~ /^\t(.*)$/) {
- printf("%6s %10s %s\n", $rev, $authors{$rev}, $1);
- }
- }
- }
- command_close_pipe($fh, $ctx);
-}
-
-package Git::SVN::Migration;
-# these version numbers do NOT correspond to actual version numbers
-# of git nor git-svn. They are just relative.
-#
-# v0 layout: .git/$id/info/url, refs/heads/$id-HEAD
-#
-# v1 layout: .git/$id/info/url, refs/remotes/$id
-#
-# v2 layout: .git/svn/$id/info/url, refs/remotes/$id
-#
-# v3 layout: .git/svn/$id, refs/remotes/$id
-# - info/url may remain for backwards compatibility
-# - this is what we migrate up to this layout automatically,
-# - this will be used by git svn init on single branches
-# v3.1 layout (auto migrated):
-# - .rev_db => .rev_db.$UUID, .rev_db will remain as a symlink
-# for backwards compatibility
-#
-# v4 layout: .git/svn/$repo_id/$id, refs/remotes/$repo_id/$id
-# - this is only created for newly multi-init-ed
-# repositories. Similar in spirit to the
-# --use-separate-remotes option in git-clone (now default)
-# - we do not automatically migrate to this (following
-# the example set by core git)
-#
-# v5 layout: .rev_db.$UUID => .rev_map.$UUID
-# - newer, more-efficient format that uses 24-bytes per record
-# with no filler space.
-# - use xxd -c24 < .rev_map.$UUID to view and debug
-# - This is a one-way migration, repositories updated to the
-# new format will not be able to use old git-svn without
-# rebuilding the .rev_db. Rebuilding the rev_db is not
-# possible if noMetadata or useSvmProps are set; but should
-# be no problem for users that use the (sensible) defaults.
-use strict;
-use warnings;
-use Carp qw/croak/;
-use File::Path qw/mkpath/;
-use File::Basename qw/dirname basename/;
-use vars qw/$_minimize/;
-
-sub migrate_from_v0 {
- my $git_dir = $ENV{GIT_DIR};
- return undef unless -d $git_dir;
- my ($fh, $ctx) = command_output_pipe(qw/rev-parse --symbolic --all/);
- my $migrated = 0;
- while (<$fh>) {
- chomp;
- my ($id, $orig_ref) = ($_, $_);
- next unless $id =~ s#^refs/heads/(.+)-HEAD$#$1#;
- next unless -f "$git_dir/$id/info/url";
- my $new_ref = "refs/remotes/$id";
- if (::verify_ref("$new_ref^0")) {
- print STDERR "W: $orig_ref is probably an old ",
- "branch used by an ancient version of ",
- "git-svn.\n",
- "However, $new_ref also exists.\n",
- "We will not be able ",
- "to use this branch until this ",
- "ambiguity is resolved.\n";
- next;
- }
- print STDERR "Migrating from v0 layout...\n" if !$migrated;
- print STDERR "Renaming ref: $orig_ref => $new_ref\n";
- command_noisy('update-ref', $new_ref, $orig_ref);
- command_noisy('update-ref', '-d', $orig_ref, $orig_ref);
- $migrated++;
- }
- command_close_pipe($fh, $ctx);
- print STDERR "Done migrating from v0 layout...\n" if $migrated;
- $migrated;
-}
-
-sub migrate_from_v1 {
- my $git_dir = $ENV{GIT_DIR};
- my $migrated = 0;
- return $migrated unless -d $git_dir;
- my $svn_dir = "$git_dir/svn";
-
- # just in case somebody used 'svn' as their $id at some point...
- return $migrated if -d $svn_dir && ! -f "$svn_dir/info/url";
-
- print STDERR "Migrating from a git-svn v1 layout...\n";
- mkpath([$svn_dir]);
- print STDERR "Data from a previous version of git-svn exists, but\n\t",
- "$svn_dir\n\t(required for this version ",
- "($::VERSION) of git-svn) does not exist.\n";
- my ($fh, $ctx) = command_output_pipe(qw/rev-parse --symbolic --all/);
- while (<$fh>) {
- my $x = $_;
- next unless $x =~ s#^refs/remotes/##;
- chomp $x;
- next unless -f "$git_dir/$x/info/url";
- my $u = eval { ::file_to_s("$git_dir/$x/info/url") };
- next unless $u;
- my $dn = dirname("$git_dir/svn/$x");
- mkpath([$dn]) unless -d $dn;
- if ($x eq 'svn') { # they used 'svn' as GIT_SVN_ID:
- mkpath(["$git_dir/svn/svn"]);
- print STDERR " - $git_dir/$x/info => ",
- "$git_dir/svn/$x/info\n";
- rename "$git_dir/$x/info", "$git_dir/svn/$x/info" or
- croak "$!: $x";
- # don't worry too much about these, they probably
- # don't exist with repos this old (save for index,
- # and we can easily regenerate that)
- foreach my $f (qw/unhandled.log index .rev_db/) {
- rename "$git_dir/$x/$f", "$git_dir/svn/$x/$f";
- }
- } else {
- print STDERR " - $git_dir/$x => $git_dir/svn/$x\n";
- rename "$git_dir/$x", "$git_dir/svn/$x" or
- croak "$!: $x";
- }
- $migrated++;
- }
- command_close_pipe($fh, $ctx);
- print STDERR "Done migrating from a git-svn v1 layout\n";
- $migrated;
-}
-
-sub read_old_urls {
- my ($l_map, $pfx, $path) = @_;
- my @dir;
- foreach (<$path/*>) {
- if (-r "$_/info/url") {
- $pfx .= '/' if $pfx && $pfx !~ m!/$!;
- my $ref_id = $pfx . basename $_;
- my $url = ::file_to_s("$_/info/url");
- $l_map->{$ref_id} = $url;
- } elsif (-d $_) {
- push @dir, $_;
- }
- }
- foreach (@dir) {
- my $x = $_;
- $x =~ s!^\Q$ENV{GIT_DIR}\E/svn/!!o;
- read_old_urls($l_map, $x, $_);
- }
-}
-
-sub migrate_from_v2 {
- my @cfg = command(qw/config -l/);
- return if grep /^svn-remote\..+\.url=/, @cfg;
- my %l_map;
- read_old_urls(\%l_map, '', "$ENV{GIT_DIR}/svn");
- my $migrated = 0;
-
- foreach my $ref_id (sort keys %l_map) {
- eval { Git::SVN->init($l_map{$ref_id}, '', undef, $ref_id) };
- if ($@) {
- Git::SVN->init($l_map{$ref_id}, '', $ref_id, $ref_id);
- }
- $migrated++;
- }
- $migrated;
-}
-
-sub minimize_connections {
- my $r = Git::SVN::read_all_remotes();
- my $new_urls = {};
- my $root_repos = {};
- foreach my $repo_id (keys %$r) {
- my $url = $r->{$repo_id}->{url} or next;
- my $fetch = $r->{$repo_id}->{fetch} or next;
- my $ra = Git::SVN::Ra->new($url);
-
- # skip existing cases where we already connect to the root
- if (($ra->{url} eq $ra->{repos_root}) ||
- ($ra->{repos_root} eq $repo_id)) {
- $root_repos->{$ra->{url}} = $repo_id;
- next;
- }
-
- my $root_ra = Git::SVN::Ra->new($ra->{repos_root});
- my $root_path = $ra->{url};
- $root_path =~ s#^\Q$ra->{repos_root}\E(/|$)##;
- foreach my $path (keys %$fetch) {
- my $ref_id = $fetch->{$path};
- my $gs = Git::SVN->new($ref_id, $repo_id, $path);
-
- # make sure we can read when connecting to
- # a higher level of a repository
- my ($last_rev, undef) = $gs->last_rev_commit;
- if (!defined $last_rev) {
- $last_rev = eval {
- $root_ra->get_latest_revnum;
- };
- next if $@;
- }
- my $new = $root_path;
- $new .= length $path ? "/$path" : '';
- eval {
- $root_ra->get_log([$new], $last_rev, $last_rev,
- 0, 0, 1, sub { });
- };
- next if $@;
- $new_urls->{$ra->{repos_root}}->{$new} =
- { ref_id => $ref_id,
- old_repo_id => $repo_id,
- old_path => $path };
- }
- }
-
- my @emptied;
- foreach my $url (keys %$new_urls) {
- # see if we can re-use an existing [svn-remote "repo_id"]
- # instead of creating a(n ugly) new section:
- my $repo_id = $root_repos->{$url} || $url;
-
- my $fetch = $new_urls->{$url};
- foreach my $path (keys %$fetch) {
- my $x = $fetch->{$path};
- Git::SVN->init($url, $path, $repo_id, $x->{ref_id});
- my $pfx = "svn-remote.$x->{old_repo_id}";
-
- my $old_fetch = quotemeta("$x->{old_path}:".
- "$x->{ref_id}");
- command_noisy(qw/config --unset/,
- "$pfx.fetch", '^'. $old_fetch . '$');
- delete $r->{$x->{old_repo_id}}->
- {fetch}->{$x->{old_path}};
- if (!keys %{$r->{$x->{old_repo_id}}->{fetch}}) {
- command_noisy(qw/config --unset/,
- "$pfx.url");
- push @emptied, $x->{old_repo_id}
- }
- }
- }
- if (@emptied) {
- my $file = $ENV{GIT_CONFIG} || "$ENV{GIT_DIR}/config";
- print STDERR <<EOF;
-The following [svn-remote] sections in your config file ($file) are empty
-and can be safely removed:
-EOF
- print STDERR "[svn-remote \"$_\"]\n" foreach @emptied;
- }
-}
-
-sub migration_check {
- migrate_from_v0();
- migrate_from_v1();
- migrate_from_v2();
- minimize_connections() if $_minimize;
-}
-
-package Git::IndexInfo;
-use strict;
-use warnings;
-use Git qw/command_input_pipe command_close_pipe/;
-
-sub new {
- my ($class) = @_;
- my ($gui, $ctx) = command_input_pipe(qw/update-index -z --index-info/);
- bless { gui => $gui, ctx => $ctx, nr => 0}, $class;
-}
-
-sub remove {
- my ($self, $path) = @_;
- if (print { $self->{gui} } '0 ', 0 x 40, "\t", $path, "\0") {
- return ++$self->{nr};
- }
- undef;
-}
-
-sub update {
- my ($self, $mode, $hash, $path) = @_;
- if (print { $self->{gui} } $mode, ' ', $hash, "\t", $path, "\0") {
- return ++$self->{nr};
- }
- undef;
-}
-
-sub DESTROY {
- my ($self) = @_;
- command_close_pipe($self->{gui}, $self->{ctx});
-}
-
-package Git::SVN::GlobSpec;
-use strict;
-use warnings;
-
-sub new {
- my ($class, $glob, $pattern_ok) = @_;
- my $re = $glob;
- $re =~ s!/+$!!g; # no need for trailing slashes
- my (@left, @right, @patterns);
- my $state = "left";
- my $die_msg = "Only one set of wildcard directories " .
- "(e.g. '*' or '*/*/*') is supported: '$glob'\n";
- for my $part (split(m|/|, $glob)) {
- if ($part =~ /\*/ && $part ne "*") {
- die "Invalid pattern in '$glob': $part\n";
- } elsif ($pattern_ok && $part =~ /[{}]/ &&
- $part !~ /^\{[^{}]+\}/) {
- die "Invalid pattern in '$glob': $part\n";
- }
- if ($part eq "*") {
- die $die_msg if $state eq "right";
- $state = "pattern";
- push(@patterns, "[^/]*");
- } elsif ($pattern_ok && $part =~ /^\{(.*)\}$/) {
- die $die_msg if $state eq "right";
- $state = "pattern";
- my $p = quotemeta($1);
- $p =~ s/\\,/|/g;
- push(@patterns, "(?:$p)");
- } else {
- if ($state eq "left") {
- push(@left, $part);
- } else {
- push(@right, $part);
- $state = "right";
- }
- }
- }
- my $depth = @patterns;
- if ($depth == 0) {
- die "One '*' is needed in glob: '$glob'\n";
- }
- my $left = join('/', @left);
- my $right = join('/', @right);
- $re = join('/', @patterns);
- $re = join('\/',
- grep(length, quotemeta($left), "($re)", quotemeta($right)));
- my $left_re = qr/^\/\Q$left\E(\/|$)/;
- bless { left => $left, right => $right, left_regex => $left_re,
- regex => qr/$re/, glob => $glob, depth => $depth }, $class;
-}
-
-sub full_path {
- my ($self, $path) = @_;
- return (length $self->{left} ? "$self->{left}/" : '') .
- $path . (length $self->{right} ? "/$self->{right}" : '');
-}
-
__END__
Data structures:
diff --git a/git.c b/git.c
index d232de9..8788b32 100644
--- a/git.c
+++ b/git.c
@@ -256,8 +256,6 @@ static int handle_alias(int *argcp, const char ***argv)
return ret;
}
-const char git_version_string[] = GIT_VERSION;
-
#define RUN_SETUP (1<<0)
#define RUN_SETUP_GENTLY (1<<1)
#define USE_PAGER (1<<2)
@@ -353,6 +351,7 @@ static void handle_internal_command(int argc, const char **argv)
{ "commit-tree", cmd_commit_tree, RUN_SETUP },
{ "config", cmd_config, RUN_SETUP_GENTLY },
{ "count-objects", cmd_count_objects, RUN_SETUP },
+ { "credential", cmd_credential, RUN_SETUP_GENTLY },
{ "describe", cmd_describe, RUN_SETUP },
{ "diff", cmd_diff },
{ "diff-files", cmd_diff_files, RUN_SETUP | NEED_WORK_TREE },
diff --git a/gitweb/gitweb.perl b/gitweb/gitweb.perl
index 55e0e9e..3d6a705 100755
--- a/gitweb/gitweb.perl
+++ b/gitweb/gitweb.perl
@@ -4484,30 +4484,33 @@ sub git_print_log {
}
# print log
- my $signoff = 0;
- my $empty = 0;
+ my $skip_blank_line = 0;
foreach my $line (@$log) {
- if ($line =~ m/^ *(signed[ \-]off[ \-]by[ :]|acked[ \-]by[ :]|cc[ :])/i) {
- $signoff = 1;
- $empty = 0;
+ if ($line =~ m/^\s*([A-Z][-A-Za-z]*-[Bb]y|C[Cc]): /) {
if (! $opts{'-remove_signoff'}) {
print "<span class=\"signoff\">" . esc_html($line) . "</span><br/>\n";
- next;
- } else {
- # remove signoff lines
- next;
+ $skip_blank_line = 1;
}
- } else {
- $signoff = 0;
+ next;
+ }
+
+ if ($line =~ m,\s*([a-z]*link): (https?://\S+),i) {
+ if (! $opts{'-remove_signoff'}) {
+ print "<span class=\"signoff\">" . esc_html($1) . ": " .
+ "<a href=\"" . esc_html($2) . "\">" . esc_html($2) . "</a>" .
+ "</span><br/>\n";
+ $skip_blank_line = 1;
+ }
+ next;
}
# print only one empty line
# do not print empty line after signoff
if ($line eq "") {
- next if ($empty || $signoff);
- $empty = 1;
+ next if ($skip_blank_line);
+ $skip_blank_line = 1;
} else {
- $empty = 0;
+ $skip_blank_line = 0;
}
print format_log_line_html($line) . "<br/>\n";
@@ -4515,7 +4518,7 @@ sub git_print_log {
if ($opts{'-final_empty_line'}) {
# end with single empty line
- print "<br/>\n" unless $empty;
+ print "<br/>\n" unless $skip_blank_line;
}
}
diff --git a/help.c b/help.c
index 6012c07..2a42ec6 100644
--- a/help.c
+++ b/help.c
@@ -6,6 +6,7 @@
#include "common-cmds.h"
#include "string-list.h"
#include "column.h"
+#include "version.h"
void add_cmdname(struct cmdnames *cmds, const char *name, int len)
{
@@ -43,9 +44,12 @@ static void uniq(struct cmdnames *cmds)
if (!cmds->cnt)
return;
- for (i = j = 1; i < cmds->cnt; i++)
- if (strcmp(cmds->names[i]->name, cmds->names[i-1]->name))
+ for (i = j = 1; i < cmds->cnt; i++) {
+ if (!strcmp(cmds->names[i]->name, cmds->names[j-1]->name))
+ free(cmds->names[i]);
+ else
cmds->names[j++] = cmds->names[i];
+ }
cmds->cnt = j;
}
@@ -60,9 +64,10 @@ void exclude_cmds(struct cmdnames *cmds, struct cmdnames *excludes)
cmp = strcmp(cmds->names[ci]->name, excludes->names[ei]->name);
if (cmp < 0)
cmds->names[cj++] = cmds->names[ci++];
- else if (cmp == 0)
- ci++, ei++;
- else if (cmp > 0)
+ else if (cmp == 0) {
+ ei++;
+ free(cmds->names[ci++]);
+ } else if (cmp > 0)
ei++;
}
diff --git a/http.c b/http.c
index 5cb87f1..b61ac85 100644
--- a/http.c
+++ b/http.c
@@ -4,6 +4,7 @@
#include "run-command.h"
#include "url.h"
#include "credential.h"
+#include "version.h"
int active_requests;
int http_is_verbose;
@@ -299,7 +300,7 @@ static CURL *get_curl_handle(void)
curl_easy_setopt(result, CURLOPT_VERBOSE, 1);
curl_easy_setopt(result, CURLOPT_USERAGENT,
- user_agent ? user_agent : GIT_HTTP_USER_AGENT);
+ user_agent ? user_agent : git_user_agent());
if (curl_ftp_no_epsv)
curl_easy_setopt(result, CURLOPT_FTP_USE_EPSV, 0);
diff --git a/merge-recursive.c b/merge-recursive.c
index 680937c..39b2e16 100644
--- a/merge-recursive.c
+++ b/merge-recursive.c
@@ -187,7 +187,7 @@ static void output_commit_title(struct merge_options *o, struct commit *commit)
else {
printf("%s ", find_unique_abbrev(commit->object.sha1, DEFAULT_ABBREV));
if (parse_commit(commit) != 0)
- printf("(bad commit)\n");
+ printf(_("(bad commit)\n"));
else {
const char *title;
int len = find_commit_subject(commit->buffer, &title);
@@ -203,7 +203,7 @@ static int add_cacheinfo(unsigned int mode, const unsigned char *sha1,
struct cache_entry *ce;
ce = make_cache_entry(mode, sha1 ? sha1 : null_sha1, path, stage, refresh);
if (!ce)
- return error("addinfo_cache failed for path '%s'", path);
+ return error(_("addinfo_cache failed for path '%s'"), path);
return add_cache_entry(ce, options);
}
@@ -265,7 +265,7 @@ struct tree *write_tree_from_memory(struct merge_options *o)
if (!cache_tree_fully_valid(active_cache_tree) &&
cache_tree_update(active_cache_tree,
active_cache, active_nr, 0) < 0)
- die("error building trees");
+ die(_("error building trees"));
result = lookup_tree(active_cache_tree->sha1);
@@ -494,7 +494,7 @@ static struct string_list *get_renames(struct merge_options *o,
opts.show_rename_progress = o->show_rename_progress;
opts.output_format = DIFF_FORMAT_NO_OUTPUT;
if (diff_setup_done(&opts) < 0)
- die("diff setup failed");
+ die(_("diff setup failed"));
diff_tree_sha1(o_tree->object.sha1, tree->object.sha1, "", &opts);
diffcore_std(&opts);
if (opts.needed_rename_limit > o->needed_rename_limit)
@@ -624,7 +624,7 @@ static void flush_buffer(int fd, const char *buf, unsigned long size)
break;
die_errno("merge-recursive");
} else if (!ret) {
- die("merge-recursive: disk full?");
+ die(_("merge-recursive: disk full?"));
}
size -= ret;
buf += ret;
@@ -687,7 +687,7 @@ static int would_lose_untracked(const char *path)
static int make_room_for_path(struct merge_options *o, const char *path)
{
int status, i;
- const char *msg = "failed to create path '%s'%s";
+ const char *msg = _("failed to create path '%s'%s");
/* Unlink any D/F conflict files that are in the way */
for (i = 0; i < o->df_conflict_file_set.nr; i++) {
@@ -698,7 +698,7 @@ static int make_room_for_path(struct merge_options *o, const char *path)
path[df_pathlen] == '/' &&
strncmp(path, df_path, df_pathlen) == 0) {
output(o, 3,
- "Removing %s to make room for subdirectory\n",
+ _("Removing %s to make room for subdirectory\n"),
df_path);
unlink(df_path);
unsorted_string_list_delete_item(&o->df_conflict_file_set,
@@ -712,7 +712,7 @@ static int make_room_for_path(struct merge_options *o, const char *path)
if (status) {
if (status == -3) {
/* something else exists */
- error(msg, path, ": perhaps a D/F conflict?");
+ error(msg, path, _(": perhaps a D/F conflict?"));
return -1;
}
die(msg, path, "");
@@ -723,7 +723,7 @@ static int make_room_for_path(struct merge_options *o, const char *path)
* tracking it.
*/
if (would_lose_untracked(path))
- return error("refusing to lose untracked file at '%s'",
+ return error(_("refusing to lose untracked file at '%s'"),
path);
/* Successful unlink is good.. */
@@ -733,7 +733,7 @@ static int make_room_for_path(struct merge_options *o, const char *path)
if (errno == ENOENT)
return 0;
/* .. but not some other error (who really cares what?) */
- return error(msg, path, ": perhaps a D/F conflict?");
+ return error(msg, path, _(": perhaps a D/F conflict?"));
}
static void update_file_flags(struct merge_options *o,
@@ -763,9 +763,9 @@ static void update_file_flags(struct merge_options *o,
buf = read_sha1_file(sha, &type, &size);
if (!buf)
- die("cannot read object %s '%s'", sha1_to_hex(sha), path);
+ die(_("cannot read object %s '%s'"), sha1_to_hex(sha), path);
if (type != OBJ_BLOB)
- die("blob expected for %s '%s'", sha1_to_hex(sha), path);
+ die(_("blob expected for %s '%s'"), sha1_to_hex(sha), path);
if (S_ISREG(mode)) {
struct strbuf strbuf = STRBUF_INIT;
if (convert_to_working_tree(path, buf, size, &strbuf)) {
@@ -788,7 +788,7 @@ static void update_file_flags(struct merge_options *o,
mode = 0666;
fd = open(path, O_WRONLY | O_TRUNC | O_CREAT, mode);
if (fd < 0)
- die_errno("failed to open '%s'", path);
+ die_errno(_("failed to open '%s'"), path);
flush_buffer(fd, buf, size);
close(fd);
} else if (S_ISLNK(mode)) {
@@ -796,10 +796,10 @@ static void update_file_flags(struct merge_options *o,
safe_create_leading_directories_const(path);
unlink(path);
if (symlink(lnk, path))
- die_errno("failed to symlink '%s'", path);
+ die_errno(_("failed to symlink '%s'"), path);
free(lnk);
} else
- die("do not know what to do with %06o %s '%s'",
+ die(_("do not know what to do with %06o %s '%s'"),
mode, sha1_to_hex(sha), path);
free(buf);
}
@@ -936,11 +936,11 @@ static struct merge_file_info merge_file_1(struct merge_options *o,
branch1, branch2);
if ((merge_status < 0) || !result_buf.ptr)
- die("Failed to execute internal merge");
+ die(_("Failed to execute internal merge"));
if (write_sha1_file(result_buf.ptr, result_buf.size,
blob_type, result.sha))
- die("Unable to add %s to database",
+ die(_("Unable to add %s to database"),
a->path);
free(result_buf.ptr);
@@ -956,7 +956,7 @@ static struct merge_file_info merge_file_1(struct merge_options *o,
if (!sha_eq(a->sha1, b->sha1))
result.clean = 0;
} else {
- die("unsupported object type in the tree");
+ die(_("unsupported object type in the tree"));
}
}
@@ -1034,22 +1034,32 @@ static void handle_change_delete(struct merge_options *o,
remove_file_from_cache(path);
update_file(o, 0, o_sha, o_mode, renamed ? renamed : path);
} else if (!a_sha) {
- output(o, 1, "CONFLICT (%s/delete): %s deleted in %s "
- "and %s in %s. Version %s of %s left in tree%s%s.",
- change, path, o->branch1,
- change_past, o->branch2, o->branch2, path,
- NULL == renamed ? "" : " at ",
- NULL == renamed ? "" : renamed);
- update_file(o, 0, b_sha, b_mode, renamed ? renamed : path);
+ if (!renamed) {
+ output(o, 1, _("CONFLICT (%s/delete): %s deleted in %s "
+ "and %s in %s. Version %s of %s left in tree."),
+ change, path, o->branch1, change_past,
+ o->branch2, o->branch2, path);
+ update_file(o, 0, b_sha, b_mode, path);
+ } else {
+ output(o, 1, _("CONFLICT (%s/delete): %s deleted in %s "
+ "and %s in %s. Version %s of %s left in tree at %s."),
+ change, path, o->branch1, change_past,
+ o->branch2, o->branch2, path, renamed);
+ update_file(o, 0, b_sha, b_mode, renamed);
+ }
} else {
- output(o, 1, "CONFLICT (%s/delete): %s deleted in %s "
- "and %s in %s. Version %s of %s left in tree%s%s.",
- change, path, o->branch2,
- change_past, o->branch1, o->branch1, path,
- NULL == renamed ? "" : " at ",
- NULL == renamed ? "" : renamed);
- if (renamed)
+ if (!renamed) {
+ output(o, 1, _("CONFLICT (%s/delete): %s deleted in %s "
+ "and %s in %s. Version %s of %s left in tree."),
+ change, path, o->branch2, change_past,
+ o->branch1, o->branch1, path);
+ } else {
+ output(o, 1, _("CONFLICT (%s/delete): %s deleted in %s "
+ "and %s in %s. Version %s of %s left in tree at %s."),
+ change, path, o->branch2, change_past,
+ o->branch1, o->branch1, path, renamed);
update_file(o, 0, a_sha, a_mode, renamed);
+ }
/*
* No need to call update_file() on path when !renamed, since
* that would needlessly touch path. We could call
@@ -1085,7 +1095,7 @@ static void conflict_rename_delete(struct merge_options *o,
orig->sha1, orig->mode,
a_sha, a_mode,
b_sha, b_mode,
- "rename", "renamed");
+ _("rename"), _("renamed"));
if (o->call_depth) {
remove_file_from_cache(dest->path);
@@ -1141,7 +1151,7 @@ static void handle_file(struct merge_options *o,
} else {
if (dir_in_way(rename->path, !o->call_depth)) {
dst_name = unique_path(o, rename->path, cur_branch);
- output(o, 1, "%s is a directory in %s adding as %s instead",
+ output(o, 1, _("%s is a directory in %s adding as %s instead"),
rename->path, other_branch, dst_name);
}
}
@@ -1163,12 +1173,12 @@ static void conflict_rename_rename_1to2(struct merge_options *o,
struct diff_filespec *a = ci->pair1->two;
struct diff_filespec *b = ci->pair2->two;
- output(o, 1, "CONFLICT (rename/rename): "
+ output(o, 1, _("CONFLICT (rename/rename): "
"Rename \"%s\"->\"%s\" in branch \"%s\" "
- "rename \"%s\"->\"%s\" in \"%s\"%s",
+ "rename \"%s\"->\"%s\" in \"%s\"%s"),
one->path, a->path, ci->branch1,
one->path, b->path, ci->branch2,
- o->call_depth ? " (left unresolved)" : "");
+ o->call_depth ? _(" (left unresolved)") : "");
if (o->call_depth) {
struct merge_file_info mfi;
struct diff_filespec other;
@@ -1222,9 +1232,9 @@ static void conflict_rename_rename_2to1(struct merge_options *o,
struct merge_file_info mfi_c1;
struct merge_file_info mfi_c2;
- output(o, 1, "CONFLICT (rename/rename): "
+ output(o, 1, _("CONFLICT (rename/rename): "
"Rename %s->%s in %s. "
- "Rename %s->%s in %s",
+ "Rename %s->%s in %s"),
a->path, c1->path, ci->branch1,
b->path, c2->path, ci->branch2);
@@ -1252,7 +1262,7 @@ static void conflict_rename_rename_2to1(struct merge_options *o,
} else {
char *new_path1 = unique_path(o, path, ci->branch1);
char *new_path2 = unique_path(o, path, ci->branch2);
- output(o, 1, "Renaming %s to %s and %s to %s instead",
+ output(o, 1, _("Renaming %s to %s and %s to %s instead"),
a->path, new_path1, b->path, new_path2);
remove_file(o, 0, path, 0);
update_file(o, 0, mfi_c1.sha, mfi_c1.mode, new_path1);
@@ -1451,8 +1461,8 @@ static int process_renames(struct merge_options *o,
} else if (!sha_eq(dst_other.sha1, null_sha1)) {
clean_merge = 0;
try_merge = 1;
- output(o, 1, "CONFLICT (rename/add): Rename %s->%s in %s. "
- "%s added in %s",
+ output(o, 1, _("CONFLICT (rename/add): Rename %s->%s in %s. "
+ "%s added in %s"),
ren1_src, ren1_dst, branch1,
ren1_dst, branch2);
if (o->call_depth) {
@@ -1461,12 +1471,12 @@ static int process_renames(struct merge_options *o,
ren1->pair->two->sha1, ren1->pair->two->mode,
dst_other.sha1, dst_other.mode,
branch1, branch2);
- output(o, 1, "Adding merged %s", ren1_dst);
+ output(o, 1, _("Adding merged %s"), ren1_dst);
update_file(o, 0, mfi.sha, mfi.mode, ren1_dst);
try_merge = 0;
} else {
char *new_path = unique_path(o, ren1_dst, branch2);
- output(o, 1, "Adding as %s instead", new_path);
+ output(o, 1, _("Adding as %s instead"), new_path);
update_file(o, 0, dst_other.sha1, dst_other.mode, new_path);
free(new_path);
}
@@ -1517,10 +1527,10 @@ static int read_sha1_strbuf(const unsigned char *sha1, struct strbuf *dst)
unsigned long size;
buf = read_sha1_file(sha1, &type, &size);
if (!buf)
- return error("cannot read object %s", sha1_to_hex(sha1));
+ return error(_("cannot read object %s"), sha1_to_hex(sha1));
if (type != OBJ_BLOB) {
free(buf);
- return error("object %s is not a blob", sha1_to_hex(sha1));
+ return error(_("object %s is not a blob"), sha1_to_hex(sha1));
}
strbuf_attach(dst, buf, size, size + 1);
return 0;
@@ -1568,7 +1578,7 @@ static void handle_modify_delete(struct merge_options *o,
o_sha, o_mode,
a_sha, a_mode,
b_sha, b_mode,
- "modify", "modified");
+ _("modify"), _("modified"));
}
static int merge_content(struct merge_options *o,
@@ -1578,14 +1588,14 @@ static int merge_content(struct merge_options *o,
unsigned char *b_sha, int b_mode,
struct rename_conflict_info *rename_conflict_info)
{
- const char *reason = "content";
+ const char *reason = _("content");
const char *path1 = NULL, *path2 = NULL;
struct merge_file_info mfi;
struct diff_filespec one, a, b;
unsigned df_conflict_remains = 0;
if (!o_sha) {
- reason = "add/add";
+ reason = _("add/add");
o_sha = (unsigned char *)null_sha1;
}
one.path = a.path = b.path = (char *)path;
@@ -1619,7 +1629,7 @@ static int merge_content(struct merge_options *o,
if (mfi.clean && !df_conflict_remains &&
sha_eq(mfi.sha, a_sha) && mfi.mode == a_mode) {
int path_renamed_outside_HEAD;
- output(o, 3, "Skipped %s (merged same as existing)", path);
+ output(o, 3, _("Skipped %s (merged same as existing)"), path);
/*
* The content merge resulted in the same file contents we
* already had. We can return early if those file contents
@@ -1633,12 +1643,12 @@ static int merge_content(struct merge_options *o,
return mfi.clean;
}
} else
- output(o, 2, "Auto-merging %s", path);
+ output(o, 2, _("Auto-merging %s"), path);
if (!mfi.clean) {
if (S_ISGITLINK(mfi.mode))
- reason = "submodule";
- output(o, 1, "CONFLICT (%s): Merge conflict in %s",
+ reason = _("submodule");
+ output(o, 1, _("CONFLICT (%s): Merge conflict in %s"),
reason, path);
if (rename_conflict_info && !df_conflict_remains)
update_stages(path, &one, &a, &b);
@@ -1664,7 +1674,7 @@ static int merge_content(struct merge_options *o,
}
new_path = unique_path(o, path, rename_conflict_info->branch1);
- output(o, 1, "Adding as %s instead", new_path);
+ output(o, 1, _("Adding as %s instead"), new_path);
update_file(o, 0, mfi.sha, mfi.mode, new_path);
free(new_path);
mfi.clean = 0;
@@ -1728,7 +1738,7 @@ static int process_entry(struct merge_options *o,
/* Deleted in both or deleted in one and
* unchanged in the other */
if (a_sha)
- output(o, 2, "Removing %s", path);
+ output(o, 2, _("Removing %s"), path);
/* do not touch working file if it did not exist */
remove_file(o, 1, path, !a_sha);
} else {
@@ -1753,19 +1763,19 @@ static int process_entry(struct merge_options *o,
other_branch = o->branch2;
mode = a_mode;
sha = a_sha;
- conf = "file/directory";
+ conf = _("file/directory");
} else {
add_branch = o->branch2;
other_branch = o->branch1;
mode = b_mode;
sha = b_sha;
- conf = "directory/file";
+ conf = _("directory/file");
}
if (dir_in_way(path, !o->call_depth)) {
char *new_path = unique_path(o, path, add_branch);
clean_merge = 0;
- output(o, 1, "CONFLICT (%s): There is a directory with name %s in %s. "
- "Adding %s as %s",
+ output(o, 1, _("CONFLICT (%s): There is a directory with name %s in %s. "
+ "Adding %s as %s"),
conf, path, other_branch, path, new_path);
if (o->call_depth)
remove_file_from_cache(path);
@@ -1774,7 +1784,7 @@ static int process_entry(struct merge_options *o,
remove_file_from_cache(path);
free(new_path);
} else {
- output(o, 2, "Adding %s", path);
+ output(o, 2, _("Adding %s"), path);
/* do not overwrite file if already present */
update_file_flags(o, sha, mode, path, 1, !a_sha);
}
@@ -1791,7 +1801,7 @@ static int process_entry(struct merge_options *o,
*/
remove_file(o, 1, path, !a_mode);
} else
- die("Fatal merge failure, shouldn't happen.");
+ die(_("Fatal merge failure, shouldn't happen."));
return clean_merge;
}
@@ -1810,7 +1820,7 @@ int merge_trees(struct merge_options *o,
}
if (sha_eq(common->object.sha1, merge->object.sha1)) {
- output(o, 0, "Already up-to-date!");
+ output(o, 0, _("Already up-to-date!"));
*result = head;
return 1;
}
@@ -1819,7 +1829,7 @@ int merge_trees(struct merge_options *o,
if (code != 0) {
if (show(o, 4) || o->call_depth)
- die("merging of trees %s and %s failed",
+ die(_("merging of trees %s and %s failed"),
sha1_to_hex(head->object.sha1),
sha1_to_hex(merge->object.sha1));
else
@@ -1849,7 +1859,7 @@ int merge_trees(struct merge_options *o,
for (i = 0; i < entries->nr; i++) {
struct stage_data *e = entries->items[i].util;
if (!e->processed)
- die("Unprocessed path??? %s",
+ die(_("Unprocessed path??? %s"),
entries->items[i].string);
}
@@ -1894,7 +1904,7 @@ int merge_recursive(struct merge_options *o,
int clean;
if (show(o, 4)) {
- output(o, 4, "Merging:");
+ output(o, 4, _("Merging:"));
output_commit_title(o, h1);
output_commit_title(o, h2);
}
@@ -1905,7 +1915,10 @@ int merge_recursive(struct merge_options *o,
}
if (show(o, 5)) {
- output(o, 5, "found %u common ancestor(s):", commit_list_count(ca));
+ unsigned cnt = commit_list_count(ca);
+
+ output(o, 5, Q_("found %u common ancestor:",
+ "found %u common ancestors:", cnt), cnt);
for (iter = ca; iter; iter = iter->next)
output_commit_title(o, iter->item);
}
@@ -1941,7 +1954,7 @@ int merge_recursive(struct merge_options *o,
o->call_depth--;
if (!merged_common_ancestors)
- die("merge returned no commit");
+ die(_("merge returned no commit"));
}
discard_cache();
@@ -1998,7 +2011,7 @@ int merge_recursive_generic(struct merge_options *o,
for (i = 0; i < num_base_list; ++i) {
struct commit *base;
if (!(base = get_ref(base_list[i], sha1_to_hex(base_list[i]))))
- return error("Could not parse object '%s'",
+ return error(_("Could not parse object '%s'"),
sha1_to_hex(base_list[i]));
commit_list_insert(base, &ca);
}
@@ -2010,7 +2023,7 @@ int merge_recursive_generic(struct merge_options *o,
if (active_cache_changed &&
(write_cache(index_fd, active_cache, active_nr) ||
commit_locked_index(lock)))
- return error("Unable to write index.");
+ return error(_("Unable to write index."));
return clean ? 0 : 1;
}
diff --git a/notes-merge.c b/notes-merge.c
index 74aa77c..29c6411 100644
--- a/notes-merge.c
+++ b/notes-merge.c
@@ -524,8 +524,10 @@ static int merge_from_diffs(struct notes_merge_options *o,
free(changes);
if (o->verbosity >= 4)
- printf("Merge result: %i unmerged notes and a %s notes tree\n",
- conflicts, t->dirty ? "dirty" : "clean");
+ printf(t->dirty ?
+ "Merge result: %i unmerged notes and a dirty notes tree\n" :
+ "Merge result: %i unmerged notes and a clean notes tree\n",
+ conflicts);
return conflicts ? -1 : 1;
}
diff --git a/parse-options.c b/parse-options.c
index ab70c29..c1c66bd 100644
--- a/parse-options.c
+++ b/parse-options.c
@@ -476,6 +476,7 @@ int parse_options(int argc, const char **argv, const char *prefix,
usage_with_options(usagestr, options);
}
+ precompose_argv(argc, argv);
return parse_options_end(&ctx);
}
diff --git a/path.c b/path.c
index 6f2aa69..66acd24 100644
--- a/path.c
+++ b/path.c
@@ -87,6 +87,21 @@ char *git_pathdup(const char *fmt, ...)
return xstrdup(path);
}
+char *mkpathdup(const char *fmt, ...)
+{
+ char *path;
+ struct strbuf sb = STRBUF_INIT;
+ va_list args;
+
+ va_start(args, fmt);
+ strbuf_vaddf(&sb, fmt, args);
+ va_end(args);
+ path = xstrdup(cleanup_path(sb.buf));
+
+ strbuf_release(&sb);
+ return path;
+}
+
char *mkpath(const char *fmt, ...)
{
va_list args;
@@ -122,6 +137,32 @@ char *git_path(const char *fmt, ...)
return cleanup_path(pathname);
}
+void home_config_paths(char **global, char **xdg, char *file)
+{
+ char *xdg_home = getenv("XDG_CONFIG_HOME");
+ char *home = getenv("HOME");
+ char *to_free = NULL;
+
+ if (!home) {
+ if (global)
+ *global = NULL;
+ } else {
+ if (!xdg_home) {
+ to_free = mkpathdup("%s/.config", home);
+ xdg_home = to_free;
+ }
+ if (global)
+ *global = mkpathdup("%s/.gitconfig", home);
+ }
+
+ if (!xdg_home)
+ *xdg = NULL;
+ else
+ *xdg = mkpathdup("%s/git/%s", xdg_home, file);
+
+ free(to_free);
+}
+
char *git_path_submodule(const char *path, const char *fmt, ...)
{
char *pathname = get_pathname();
diff --git a/perl/.gitignore b/perl/.gitignore
index d5c6e22..0f1fc27 100644
--- a/perl/.gitignore
+++ b/perl/.gitignore
@@ -5,3 +5,4 @@ MYMETA.yml
blib
blibdirs
pm_to_blib
+PM.stamp
diff --git a/perl/Git/IndexInfo.pm b/perl/Git/IndexInfo.pm
new file mode 100644
index 0000000..a43108c
--- /dev/null
+++ b/perl/Git/IndexInfo.pm
@@ -0,0 +1,33 @@
+package Git::IndexInfo;
+use strict;
+use warnings;
+use Git qw/command_input_pipe command_close_pipe/;
+
+sub new {
+ my ($class) = @_;
+ my ($gui, $ctx) = command_input_pipe(qw/update-index -z --index-info/);
+ bless { gui => $gui, ctx => $ctx, nr => 0}, $class;
+}
+
+sub remove {
+ my ($self, $path) = @_;
+ if (print { $self->{gui} } '0 ', 0 x 40, "\t", $path, "\0") {
+ return ++$self->{nr};
+ }
+ undef;
+}
+
+sub update {
+ my ($self, $mode, $hash, $path) = @_;
+ if (print { $self->{gui} } $mode, ' ', $hash, "\t", $path, "\0") {
+ return ++$self->{nr};
+ }
+ undef;
+}
+
+sub DESTROY {
+ my ($self) = @_;
+ command_close_pipe($self->{gui}, $self->{ctx});
+}
+
+1;
diff --git a/perl/Git/SVN.pm b/perl/Git/SVN.pm
new file mode 100644
index 0000000..8478d0c
--- /dev/null
+++ b/perl/Git/SVN.pm
@@ -0,0 +1,2349 @@
+package Git::SVN;
+use strict;
+use warnings;
+use Fcntl qw/:DEFAULT :seek/;
+use constant rev_map_fmt => 'NH40';
+use vars qw/$_no_metadata
+ $_repack $_repack_flags $_use_svm_props $_head
+ $_use_svnsync_props $no_reuse_existing
+ $_use_log_author $_add_author_from $_localtime/;
+use Carp qw/croak/;
+use File::Path qw/mkpath/;
+use File::Copy qw/copy/;
+use IPC::Open3;
+use Time::Local;
+use Memoize; # core since 5.8.0, Jul 2002
+use Memoize::Storable;
+use POSIX qw(:signal_h);
+
+use Git qw(
+ command
+ command_oneline
+ command_noisy
+ command_output_pipe
+ command_close_pipe
+);
+use Git::SVN::Utils qw(fatal can_compress);
+
+my $can_use_yaml;
+BEGIN {
+ $can_use_yaml = eval { require Git::SVN::Memoize::YAML; 1};
+}
+
+our $_follow_parent = 1;
+our $_minimize_url = 'unset';
+our $default_repo_id = 'svn';
+our $default_ref_id = $ENV{GIT_SVN_ID} || 'git-svn';
+
+my ($_gc_nr, $_gc_period);
+
+# properties that we do not log:
+my %SKIP_PROP;
+BEGIN {
+ %SKIP_PROP = map { $_ => 1 } qw/svn:wc:ra_dav:version-url
+ svn:special svn:executable
+ svn:entry:committed-rev
+ svn:entry:last-author
+ svn:entry:uuid
+ svn:entry:committed-date/;
+
+ # some options are read globally, but can be overridden locally
+ # per [svn-remote "..."] section. Command-line options will *NOT*
+ # override options set in an [svn-remote "..."] section
+ no strict 'refs';
+ for my $option (qw/follow_parent no_metadata use_svm_props
+ use_svnsync_props/) {
+ my $key = $option;
+ $key =~ tr/_//d;
+ my $prop = "-$option";
+ *$option = sub {
+ my ($self) = @_;
+ return $self->{$prop} if exists $self->{$prop};
+ my $k = "svn-remote.$self->{repo_id}.$key";
+ eval { command_oneline(qw/config --get/, $k) };
+ if ($@) {
+ $self->{$prop} = ${"Git::SVN::_$option"};
+ } else {
+ my $v = command_oneline(qw/config --bool/,$k);
+ $self->{$prop} = $v eq 'false' ? 0 : 1;
+ }
+ return $self->{$prop};
+ }
+ }
+}
+
+
+my (%LOCKFILES, %INDEX_FILES);
+END {
+ unlink keys %LOCKFILES if %LOCKFILES;
+ unlink keys %INDEX_FILES if %INDEX_FILES;
+}
+
+sub resolve_local_globs {
+ my ($url, $fetch, $glob_spec) = @_;
+ return unless defined $glob_spec;
+ my $ref = $glob_spec->{ref};
+ my $path = $glob_spec->{path};
+ foreach (command(qw#for-each-ref --format=%(refname) refs/#)) {
+ next unless m#^$ref->{regex}$#;
+ my $p = $1;
+ my $pathname = desanitize_refname($path->full_path($p));
+ my $refname = desanitize_refname($ref->full_path($p));
+ if (my $existing = $fetch->{$pathname}) {
+ if ($existing ne $refname) {
+ die "Refspec conflict:\n",
+ "existing: $existing\n",
+ " globbed: $refname\n";
+ }
+ my $u = (::cmt_metadata("$refname"))[0];
+ $u =~ s!^\Q$url\E(/|$)!! or die
+ "$refname: '$url' not found in '$u'\n";
+ if ($pathname ne $u) {
+ warn "W: Refspec glob conflict ",
+ "(ref: $refname):\n",
+ "expected path: $pathname\n",
+ " real path: $u\n",
+ "Continuing ahead with $u\n";
+ next;
+ }
+ } else {
+ $fetch->{$pathname} = $refname;
+ }
+ }
+}
+
+sub parse_revision_argument {
+ my ($base, $head) = @_;
+ if (!defined $::_revision || $::_revision eq 'BASE:HEAD') {
+ return ($base, $head);
+ }
+ return ($1, $2) if ($::_revision =~ /^(\d+):(\d+)$/);
+ return ($::_revision, $::_revision) if ($::_revision =~ /^\d+$/);
+ return ($head, $head) if ($::_revision eq 'HEAD');
+ return ($base, $1) if ($::_revision =~ /^BASE:(\d+)$/);
+ return ($1, $head) if ($::_revision =~ /^(\d+):HEAD$/);
+ die "revision argument: $::_revision not understood by git-svn\n";
+}
+
+sub fetch_all {
+ my ($repo_id, $remotes) = @_;
+ if (ref $repo_id) {
+ my $gs = $repo_id;
+ $repo_id = undef;
+ $repo_id = $gs->{repo_id};
+ }
+ $remotes ||= read_all_remotes();
+ my $remote = $remotes->{$repo_id} or
+ die "[svn-remote \"$repo_id\"] unknown\n";
+ my $fetch = $remote->{fetch};
+ my $url = $remote->{url} or die "svn-remote.$repo_id.url not defined\n";
+ my (@gs, @globs);
+ my $ra = Git::SVN::Ra->new($url);
+ my $uuid = $ra->get_uuid;
+ my $head = $ra->get_latest_revnum;
+
+ # ignore errors, $head revision may not even exist anymore
+ eval { $ra->get_log("", $head, 0, 1, 0, 1, sub { $head = $_[1] }) };
+ warn "W: $@\n" if $@;
+
+ my $base = defined $fetch ? $head : 0;
+
+ # read the max revs for wildcard expansion (branches/*, tags/*)
+ foreach my $t (qw/branches tags/) {
+ defined $remote->{$t} or next;
+ push @globs, @{$remote->{$t}};
+
+ my $max_rev = eval { tmp_config(qw/--int --get/,
+ "svn-remote.$repo_id.${t}-maxRev") };
+ if (defined $max_rev && ($max_rev < $base)) {
+ $base = $max_rev;
+ } elsif (!defined $max_rev) {
+ $base = 0;
+ }
+ }
+
+ if ($fetch) {
+ foreach my $p (sort keys %$fetch) {
+ my $gs = Git::SVN->new($fetch->{$p}, $repo_id, $p);
+ my $lr = $gs->rev_map_max;
+ if (defined $lr) {
+ $base = $lr if ($lr < $base);
+ }
+ push @gs, $gs;
+ }
+ }
+
+ ($base, $head) = parse_revision_argument($base, $head);
+ $ra->gs_fetch_loop_common($base, $head, \@gs, \@globs);
+}
+
+sub read_all_remotes {
+ my $r = {};
+ my $use_svm_props = eval { command_oneline(qw/config --bool
+ svn.useSvmProps/) };
+ $use_svm_props = $use_svm_props eq 'true' if $use_svm_props;
+ my $svn_refspec = qr{\s*(.*?)\s*:\s*(.+?)\s*};
+ foreach (grep { s/^svn-remote\.// } command(qw/config -l/)) {
+ if (m!^(.+)\.fetch=$svn_refspec$!) {
+ my ($remote, $local_ref, $remote_ref) = ($1, $2, $3);
+ die("svn-remote.$remote: remote ref '$remote_ref' "
+ . "must start with 'refs/'\n")
+ unless $remote_ref =~ m{^refs/};
+ $local_ref = uri_decode($local_ref);
+ $r->{$remote}->{fetch}->{$local_ref} = $remote_ref;
+ $r->{$remote}->{svm} = {} if $use_svm_props;
+ } elsif (m!^(.+)\.usesvmprops=\s*(.*)\s*$!) {
+ $r->{$1}->{svm} = {};
+ } elsif (m!^(.+)\.url=\s*(.*)\s*$!) {
+ $r->{$1}->{url} = $2;
+ } elsif (m!^(.+)\.pushurl=\s*(.*)\s*$!) {
+ $r->{$1}->{pushurl} = $2;
+ } elsif (m!^(.+)\.ignore-refs=\s*(.*)\s*$!) {
+ $r->{$1}->{ignore_refs_regex} = $2;
+ } elsif (m!^(.+)\.(branches|tags)=$svn_refspec$!) {
+ my ($remote, $t, $local_ref, $remote_ref) =
+ ($1, $2, $3, $4);
+ die("svn-remote.$remote: remote ref '$remote_ref' ($t) "
+ . "must start with 'refs/'\n")
+ unless $remote_ref =~ m{^refs/};
+ $local_ref = uri_decode($local_ref);
+
+ require Git::SVN::GlobSpec;
+ my $rs = {
+ t => $t,
+ remote => $remote,
+ path => Git::SVN::GlobSpec->new($local_ref, 1),
+ ref => Git::SVN::GlobSpec->new($remote_ref, 0) };
+ if (length($rs->{ref}->{right}) != 0) {
+ die "The '*' glob character must be the last ",
+ "character of '$remote_ref'\n";
+ }
+ push @{ $r->{$remote}->{$t} }, $rs;
+ }
+ }
+
+ map {
+ if (defined $r->{$_}->{svm}) {
+ my $svm;
+ eval {
+ my $section = "svn-remote.$_";
+ $svm = {
+ source => tmp_config('--get',
+ "$section.svm-source"),
+ replace => tmp_config('--get',
+ "$section.svm-replace"),
+ }
+ };
+ $r->{$_}->{svm} = $svm;
+ }
+ } keys %$r;
+
+ foreach my $remote (keys %$r) {
+ foreach ( grep { defined $_ }
+ map { $r->{$remote}->{$_} } qw(branches tags) ) {
+ foreach my $rs ( @$_ ) {
+ $rs->{ignore_refs_regex} =
+ $r->{$remote}->{ignore_refs_regex};
+ }
+ }
+ }
+
+ $r;
+}
+
+sub init_vars {
+ $_gc_nr = $_gc_period = 1000;
+ if (defined $_repack || defined $_repack_flags) {
+ warn "Repack options are obsolete; they have no effect.\n";
+ }
+}
+
+sub verify_remotes_sanity {
+ return unless -d $ENV{GIT_DIR};
+ my %seen;
+ foreach (command(qw/config -l/)) {
+ if (m!^svn-remote\.(?:.+)\.fetch=.*:refs/remotes/(\S+)\s*$!) {
+ if ($seen{$1}) {
+ die "Remote ref refs/remote/$1 is tracked by",
+ "\n \"$_\"\nand\n \"$seen{$1}\"\n",
+ "Please resolve this ambiguity in ",
+ "your git configuration file before ",
+ "continuing\n";
+ }
+ $seen{$1} = $_;
+ }
+ }
+}
+
+sub find_existing_remote {
+ my ($url, $remotes) = @_;
+ return undef if $no_reuse_existing;
+ my $existing;
+ foreach my $repo_id (keys %$remotes) {
+ my $u = $remotes->{$repo_id}->{url} or next;
+ next if $u ne $url;
+ $existing = $repo_id;
+ last;
+ }
+ $existing;
+}
+
+sub init_remote_config {
+ my ($self, $url, $no_write) = @_;
+ $url =~ s!/+$!!; # strip trailing slash
+ my $r = read_all_remotes();
+ my $existing = find_existing_remote($url, $r);
+ if ($existing) {
+ unless ($no_write) {
+ print STDERR "Using existing ",
+ "[svn-remote \"$existing\"]\n";
+ }
+ $self->{repo_id} = $existing;
+ } elsif ($_minimize_url) {
+ my $min_url = Git::SVN::Ra->new($url)->minimize_url;
+ $existing = find_existing_remote($min_url, $r);
+ if ($existing) {
+ unless ($no_write) {
+ print STDERR "Using existing ",
+ "[svn-remote \"$existing\"]\n";
+ }
+ $self->{repo_id} = $existing;
+ }
+ if ($min_url ne $url) {
+ unless ($no_write) {
+ print STDERR "Using higher level of URL: ",
+ "$url => $min_url\n";
+ }
+ my $old_path = $self->{path};
+ $self->{path} = $url;
+ $self->{path} =~ s!^\Q$min_url\E(/|$)!!;
+ if (length $old_path) {
+ $self->{path} .= "/$old_path";
+ }
+ $url = $min_url;
+ }
+ }
+ my $orig_url;
+ if (!$existing) {
+ # verify that we aren't overwriting anything:
+ $orig_url = eval {
+ command_oneline('config', '--get',
+ "svn-remote.$self->{repo_id}.url")
+ };
+ if ($orig_url && ($orig_url ne $url)) {
+ die "svn-remote.$self->{repo_id}.url already set: ",
+ "$orig_url\nwanted to set to: $url\n";
+ }
+ }
+ my ($xrepo_id, $xpath) = find_ref($self->refname);
+ if (!$no_write && defined $xpath) {
+ die "svn-remote.$xrepo_id.fetch already set to track ",
+ "$xpath:", $self->refname, "\n";
+ }
+ unless ($no_write) {
+ command_noisy('config',
+ "svn-remote.$self->{repo_id}.url", $url);
+ $self->{path} =~ s{^/}{};
+ $self->{path} =~ s{%([0-9A-F]{2})}{chr hex($1)}ieg;
+ command_noisy('config', '--add',
+ "svn-remote.$self->{repo_id}.fetch",
+ "$self->{path}:".$self->refname);
+ }
+ $self->{url} = $url;
+}
+
+sub find_by_url { # repos_root and, path are optional
+ my ($class, $full_url, $repos_root, $path) = @_;
+
+ return undef unless defined $full_url;
+ remove_username($full_url);
+ remove_username($repos_root) if defined $repos_root;
+ my $remotes = read_all_remotes();
+ if (defined $full_url && defined $repos_root && !defined $path) {
+ $path = $full_url;
+ $path =~ s#^\Q$repos_root\E(?:/|$)##;
+ }
+ foreach my $repo_id (keys %$remotes) {
+ my $u = $remotes->{$repo_id}->{url} or next;
+ remove_username($u);
+ next if defined $repos_root && $repos_root ne $u;
+
+ my $fetch = $remotes->{$repo_id}->{fetch} || {};
+ foreach my $t (qw/branches tags/) {
+ foreach my $globspec (@{$remotes->{$repo_id}->{$t}}) {
+ resolve_local_globs($u, $fetch, $globspec);
+ }
+ }
+ my $p = $path;
+ my $rwr = rewrite_root({repo_id => $repo_id});
+ my $svm = $remotes->{$repo_id}->{svm}
+ if defined $remotes->{$repo_id}->{svm};
+ unless (defined $p) {
+ $p = $full_url;
+ my $z = $u;
+ my $prefix = '';
+ if ($rwr) {
+ $z = $rwr;
+ remove_username($z);
+ } elsif (defined $svm) {
+ $z = $svm->{source};
+ $prefix = $svm->{replace};
+ $prefix =~ s#^\Q$u\E(?:/|$)##;
+ $prefix =~ s#/$##;
+ }
+ $p =~ s#^\Q$z\E(?:/|$)#$prefix# or next;
+ }
+ foreach my $f (keys %$fetch) {
+ next if $f ne $p;
+ return Git::SVN->new($fetch->{$f}, $repo_id, $f);
+ }
+ }
+ undef;
+}
+
+sub init {
+ my ($class, $url, $path, $repo_id, $ref_id, $no_write) = @_;
+ my $self = _new($class, $repo_id, $ref_id, $path);
+ if (defined $url) {
+ $self->init_remote_config($url, $no_write);
+ }
+ $self;
+}
+
+sub find_ref {
+ my ($ref_id) = @_;
+ foreach (command(qw/config -l/)) {
+ next unless m!^svn-remote\.(.+)\.fetch=
+ \s*(.*?)\s*:\s*(.+?)\s*$!x;
+ my ($repo_id, $path, $ref) = ($1, $2, $3);
+ if ($ref eq $ref_id) {
+ $path = '' if ($path =~ m#^\./?#);
+ return ($repo_id, $path);
+ }
+ }
+ (undef, undef, undef);
+}
+
+sub new {
+ my ($class, $ref_id, $repo_id, $path) = @_;
+ if (defined $ref_id && !defined $repo_id && !defined $path) {
+ ($repo_id, $path) = find_ref($ref_id);
+ if (!defined $repo_id) {
+ die "Could not find a \"svn-remote.*.fetch\" key ",
+ "in the repository configuration matching: ",
+ "$ref_id\n";
+ }
+ }
+ my $self = _new($class, $repo_id, $ref_id, $path);
+ if (!defined $self->{path} || !length $self->{path}) {
+ my $fetch = command_oneline('config', '--get',
+ "svn-remote.$repo_id.fetch",
+ ":$ref_id\$") or
+ die "Failed to read \"svn-remote.$repo_id.fetch\" ",
+ "\":$ref_id\$\" in config\n";
+ ($self->{path}, undef) = split(/\s*:\s*/, $fetch);
+ }
+ $self->{path} =~ s{/+}{/}g;
+ $self->{path} =~ s{\A/}{};
+ $self->{path} =~ s{/\z}{};
+ $self->{url} = command_oneline('config', '--get',
+ "svn-remote.$repo_id.url") or
+ die "Failed to read \"svn-remote.$repo_id.url\" in config\n";
+ $self->{pushurl} = eval { command_oneline('config', '--get',
+ "svn-remote.$repo_id.pushurl") };
+ $self->rebuild;
+ $self;
+}
+
+sub refname {
+ my ($refname) = $_[0]->{ref_id} ;
+
+ # It cannot end with a slash /, we'll throw up on this because
+ # SVN can't have directories with a slash in their name, either:
+ if ($refname =~ m{/$}) {
+ die "ref: '$refname' ends with a trailing slash, this is ",
+ "not permitted by git nor Subversion\n";
+ }
+
+ # It cannot have ASCII control character space, tilde ~, caret ^,
+ # colon :, question-mark ?, asterisk *, space, or open bracket [
+ # anywhere.
+ #
+ # Additionally, % must be escaped because it is used for escaping
+ # and we want our escaped refname to be reversible
+ $refname =~ s{([ \%~\^:\?\*\[\t])}{uc sprintf('%%%02x',ord($1))}eg;
+
+ # no slash-separated component can begin with a dot .
+ # /.* becomes /%2E*
+ $refname =~ s{/\.}{/%2E}g;
+
+ # It cannot have two consecutive dots .. anywhere
+ # .. becomes %2E%2E
+ $refname =~ s{\.\.}{%2E%2E}g;
+
+ # trailing dots and .lock are not allowed
+ # .$ becomes %2E and .lock becomes %2Elock
+ $refname =~ s{\.(?=$|lock$)}{%2E};
+
+ # the sequence @{ is used to access the reflog
+ # @{ becomes %40{
+ $refname =~ s{\@\{}{%40\{}g;
+
+ return $refname;
+}
+
+sub desanitize_refname {
+ my ($refname) = @_;
+ $refname =~ s{%(?:([0-9A-F]{2}))}{chr hex($1)}eg;
+ return $refname;
+}
+
+sub svm_uuid {
+ my ($self) = @_;
+ return $self->{svm}->{uuid} if $self->svm;
+ $self->ra;
+ unless ($self->{svm}) {
+ die "SVM UUID not cached, and reading remotely failed\n";
+ }
+ $self->{svm}->{uuid};
+}
+
+sub svm {
+ my ($self) = @_;
+ return $self->{svm} if $self->{svm};
+ my $svm;
+ # see if we have it in our config, first:
+ eval {
+ my $section = "svn-remote.$self->{repo_id}";
+ $svm = {
+ source => tmp_config('--get', "$section.svm-source"),
+ uuid => tmp_config('--get', "$section.svm-uuid"),
+ replace => tmp_config('--get', "$section.svm-replace"),
+ }
+ };
+ if ($svm && $svm->{source} && $svm->{uuid} && $svm->{replace}) {
+ $self->{svm} = $svm;
+ }
+ $self->{svm};
+}
+
+sub _set_svm_vars {
+ my ($self, $ra) = @_;
+ return $ra if $self->svm;
+
+ my @err = ( "useSvmProps set, but failed to read SVM properties\n",
+ "(svm:source, svm:uuid) ",
+ "from the following URLs:\n" );
+ sub read_svm_props {
+ my ($self, $ra, $path, $r) = @_;
+ my $props = ($ra->get_dir($path, $r))[2];
+ my $src = $props->{'svm:source'};
+ my $uuid = $props->{'svm:uuid'};
+ return undef if (!$src || !$uuid);
+
+ chomp($src, $uuid);
+
+ $uuid =~ m{^[0-9a-f\-]{30,}$}i
+ or die "doesn't look right - svm:uuid is '$uuid'\n";
+
+ # the '!' is used to mark the repos_root!/relative/path
+ $src =~ s{/?!/?}{/};
+ $src =~ s{/+$}{}; # no trailing slashes please
+ # username is of no interest
+ $src =~ s{(^[a-z\+]*://)[^/@]*@}{$1};
+
+ my $replace = $ra->{url};
+ $replace .= "/$path" if length $path;
+
+ my $section = "svn-remote.$self->{repo_id}";
+ tmp_config("$section.svm-source", $src);
+ tmp_config("$section.svm-replace", $replace);
+ tmp_config("$section.svm-uuid", $uuid);
+ $self->{svm} = {
+ source => $src,
+ uuid => $uuid,
+ replace => $replace
+ };
+ }
+
+ my $r = $ra->get_latest_revnum;
+ my $path = $self->{path};
+ my %tried;
+ while (length $path) {
+ unless ($tried{"$self->{url}/$path"}) {
+ return $ra if $self->read_svm_props($ra, $path, $r);
+ $tried{"$self->{url}/$path"} = 1;
+ }
+ $path =~ s#/?[^/]+$##;
+ }
+ die "Path: '$path' should be ''\n" if $path ne '';
+ return $ra if $self->read_svm_props($ra, $path, $r);
+ $tried{"$self->{url}/$path"} = 1;
+
+ if ($ra->{repos_root} eq $self->{url}) {
+ die @err, (map { " $_\n" } keys %tried), "\n";
+ }
+
+ # nope, make sure we're connected to the repository root:
+ my $ok;
+ my @tried_b;
+ $path = $ra->{svn_path};
+ $ra = Git::SVN::Ra->new($ra->{repos_root});
+ while (length $path) {
+ unless ($tried{"$ra->{url}/$path"}) {
+ $ok = $self->read_svm_props($ra, $path, $r);
+ last if $ok;
+ $tried{"$ra->{url}/$path"} = 1;
+ }
+ $path =~ s#/?[^/]+$##;
+ }
+ die "Path: '$path' should be ''\n" if $path ne '';
+ $ok ||= $self->read_svm_props($ra, $path, $r);
+ $tried{"$ra->{url}/$path"} = 1;
+ if (!$ok) {
+ die @err, (map { " $_\n" } keys %tried), "\n";
+ }
+ Git::SVN::Ra->new($self->{url});
+}
+
+sub svnsync {
+ my ($self) = @_;
+ return $self->{svnsync} if $self->{svnsync};
+
+ if ($self->no_metadata) {
+ die "Can't have both 'noMetadata' and ",
+ "'useSvnsyncProps' options set!\n";
+ }
+ if ($self->rewrite_root) {
+ die "Can't have both 'useSvnsyncProps' and 'rewriteRoot' ",
+ "options set!\n";
+ }
+ if ($self->rewrite_uuid) {
+ die "Can't have both 'useSvnsyncProps' and 'rewriteUUID' ",
+ "options set!\n";
+ }
+
+ my $svnsync;
+ # see if we have it in our config, first:
+ eval {
+ my $section = "svn-remote.$self->{repo_id}";
+
+ my $url = tmp_config('--get', "$section.svnsync-url");
+ ($url) = ($url =~ m{^([a-z\+]+://\S+)$}) or
+ die "doesn't look right - svn:sync-from-url is '$url'\n";
+
+ my $uuid = tmp_config('--get', "$section.svnsync-uuid");
+ ($uuid) = ($uuid =~ m{^([0-9a-f\-]{30,})$}i) or
+ die "doesn't look right - svn:sync-from-uuid is '$uuid'\n";
+
+ $svnsync = { url => $url, uuid => $uuid }
+ };
+ if ($svnsync && $svnsync->{url} && $svnsync->{uuid}) {
+ return $self->{svnsync} = $svnsync;
+ }
+
+ my $err = "useSvnsyncProps set, but failed to read " .
+ "svnsync property: svn:sync-from-";
+ my $rp = $self->ra->rev_proplist(0);
+
+ my $url = $rp->{'svn:sync-from-url'} or die $err . "url\n";
+ ($url) = ($url =~ m{^([a-z\+]+://\S+)$}) or
+ die "doesn't look right - svn:sync-from-url is '$url'\n";
+
+ my $uuid = $rp->{'svn:sync-from-uuid'} or die $err . "uuid\n";
+ ($uuid) = ($uuid =~ m{^([0-9a-f\-]{30,})$}i) or
+ die "doesn't look right - svn:sync-from-uuid is '$uuid'\n";
+
+ my $section = "svn-remote.$self->{repo_id}";
+ tmp_config('--add', "$section.svnsync-uuid", $uuid);
+ tmp_config('--add', "$section.svnsync-url", $url);
+ return $self->{svnsync} = { url => $url, uuid => $uuid };
+}
+
+# this allows us to memoize our SVN::Ra UUID locally and avoid a
+# remote lookup (useful for 'git svn log').
+sub ra_uuid {
+ my ($self) = @_;
+ unless ($self->{ra_uuid}) {
+ my $key = "svn-remote.$self->{repo_id}.uuid";
+ my $uuid = eval { tmp_config('--get', $key) };
+ if (!$@ && $uuid && $uuid =~ /^([a-f\d\-]{30,})$/i) {
+ $self->{ra_uuid} = $uuid;
+ } else {
+ die "ra_uuid called without URL\n" unless $self->{url};
+ $self->{ra_uuid} = $self->ra->get_uuid;
+ tmp_config('--add', $key, $self->{ra_uuid});
+ }
+ }
+ $self->{ra_uuid};
+}
+
+sub _set_repos_root {
+ my ($self, $repos_root) = @_;
+ my $k = "svn-remote.$self->{repo_id}.reposRoot";
+ $repos_root ||= $self->ra->{repos_root};
+ tmp_config($k, $repos_root);
+ $repos_root;
+}
+
+sub repos_root {
+ my ($self) = @_;
+ my $k = "svn-remote.$self->{repo_id}.reposRoot";
+ eval { tmp_config('--get', $k) } || $self->_set_repos_root;
+}
+
+sub ra {
+ my ($self) = shift;
+ my $ra = Git::SVN::Ra->new($self->{url});
+ $self->_set_repos_root($ra->{repos_root});
+ if ($self->use_svm_props && !$self->{svm}) {
+ if ($self->no_metadata) {
+ die "Can't have both 'noMetadata' and ",
+ "'useSvmProps' options set!\n";
+ } elsif ($self->use_svnsync_props) {
+ die "Can't have both 'useSvnsyncProps' and ",
+ "'useSvmProps' options set!\n";
+ }
+ $ra = $self->_set_svm_vars($ra);
+ $self->{-want_revprops} = 1;
+ }
+ $ra;
+}
+
+# prop_walk(PATH, REV, SUB)
+# -------------------------
+# Recursively traverse PATH at revision REV and invoke SUB for each
+# directory that contains a SVN property. SUB will be invoked as
+# follows: &SUB(gs, path, props); where `gs' is this instance of
+# Git::SVN, `path' the path to the directory where the properties
+# `props' were found. The `path' will be relative to point of checkout,
+# that is, if url://repo/trunk is the current Git branch, and that
+# directory contains a sub-directory `d', SUB will be invoked with `/d/'
+# as `path' (note the trailing `/').
+sub prop_walk {
+ my ($self, $path, $rev, $sub) = @_;
+
+ $path =~ s#^/##;
+ my ($dirent, undef, $props) = $self->ra->get_dir($path, $rev);
+ $path =~ s#^/*#/#g;
+ my $p = $path;
+ # Strip the irrelevant part of the path.
+ $p =~ s#^/+\Q$self->{path}\E(/|$)#/#;
+ # Ensure the path is terminated by a `/'.
+ $p =~ s#/*$#/#;
+
+ # The properties contain all the internal SVN stuff nobody
+ # (usually) cares about.
+ my $interesting_props = 0;
+ foreach (keys %{$props}) {
+ # If it doesn't start with `svn:', it must be a
+ # user-defined property.
+ ++$interesting_props and next if $_ !~ /^svn:/;
+ # FIXME: Fragile, if SVN adds new public properties,
+ # this needs to be updated.
+ ++$interesting_props if /^svn:(?:ignore|keywords|executable
+ |eol-style|mime-type
+ |externals|needs-lock)$/x;
+ }
+ &$sub($self, $p, $props) if $interesting_props;
+
+ foreach (sort keys %$dirent) {
+ next if $dirent->{$_}->{kind} != $SVN::Node::dir;
+ $self->prop_walk($self->{path} . $p . $_, $rev, $sub);
+ }
+}
+
+sub last_rev { ($_[0]->last_rev_commit)[0] }
+sub last_commit { ($_[0]->last_rev_commit)[1] }
+
+# returns the newest SVN revision number and newest commit SHA1
+sub last_rev_commit {
+ my ($self) = @_;
+ if (defined $self->{last_rev} && defined $self->{last_commit}) {
+ return ($self->{last_rev}, $self->{last_commit});
+ }
+ my $c = ::verify_ref($self->refname.'^0');
+ if ($c && !$self->use_svm_props && !$self->no_metadata) {
+ my $rev = (::cmt_metadata($c))[1];
+ if (defined $rev) {
+ ($self->{last_rev}, $self->{last_commit}) = ($rev, $c);
+ return ($rev, $c);
+ }
+ }
+ my $map_path = $self->map_path;
+ unless (-e $map_path) {
+ ($self->{last_rev}, $self->{last_commit}) = (undef, undef);
+ return (undef, undef);
+ }
+ my ($rev, $commit) = $self->rev_map_max(1);
+ ($self->{last_rev}, $self->{last_commit}) = ($rev, $commit);
+ return ($rev, $commit);
+}
+
+sub get_fetch_range {
+ my ($self, $min, $max) = @_;
+ $max ||= $self->ra->get_latest_revnum;
+ $min ||= $self->rev_map_max;
+ (++$min, $max);
+}
+
+sub tmp_config {
+ my (@args) = @_;
+ my $old_def_config = "$ENV{GIT_DIR}/svn/config";
+ my $config = "$ENV{GIT_DIR}/svn/.metadata";
+ if (! -f $config && -f $old_def_config) {
+ rename $old_def_config, $config or
+ die "Failed rename $old_def_config => $config: $!\n";
+ }
+ my $old_config = $ENV{GIT_CONFIG};
+ $ENV{GIT_CONFIG} = $config;
+ $@ = undef;
+ my @ret = eval {
+ unless (-f $config) {
+ mkfile($config);
+ open my $fh, '>', $config or
+ die "Can't open $config: $!\n";
+ print $fh "; This file is used internally by ",
+ "git-svn\n" or die
+ "Couldn't write to $config: $!\n";
+ print $fh "; You should not have to edit it\n" or
+ die "Couldn't write to $config: $!\n";
+ close $fh or die "Couldn't close $config: $!\n";
+ }
+ command('config', @args);
+ };
+ my $err = $@;
+ if (defined $old_config) {
+ $ENV{GIT_CONFIG} = $old_config;
+ } else {
+ delete $ENV{GIT_CONFIG};
+ }
+ die $err if $err;
+ wantarray ? @ret : $ret[0];
+}
+
+sub tmp_index_do {
+ my ($self, $sub) = @_;
+ my $old_index = $ENV{GIT_INDEX_FILE};
+ $ENV{GIT_INDEX_FILE} = $self->{index};
+ $@ = undef;
+ my @ret = eval {
+ my ($dir, $base) = ($self->{index} =~ m#^(.*?)/?([^/]+)$#);
+ mkpath([$dir]) unless -d $dir;
+ &$sub;
+ };
+ my $err = $@;
+ if (defined $old_index) {
+ $ENV{GIT_INDEX_FILE} = $old_index;
+ } else {
+ delete $ENV{GIT_INDEX_FILE};
+ }
+ die $err if $err;
+ wantarray ? @ret : $ret[0];
+}
+
+sub assert_index_clean {
+ my ($self, $treeish) = @_;
+
+ $self->tmp_index_do(sub {
+ command_noisy('read-tree', $treeish) unless -e $self->{index};
+ my $x = command_oneline('write-tree');
+ my ($y) = (command(qw/cat-file commit/, $treeish) =~
+ /^tree ($::sha1)/mo);
+ return if $y eq $x;
+
+ warn "Index mismatch: $y != $x\nrereading $treeish\n";
+ unlink $self->{index} or die "unlink $self->{index}: $!\n";
+ command_noisy('read-tree', $treeish);
+ $x = command_oneline('write-tree');
+ if ($y ne $x) {
+ fatal "trees ($treeish) $y != $x\n",
+ "Something is seriously wrong...";
+ }
+ });
+}
+
+sub get_commit_parents {
+ my ($self, $log_entry) = @_;
+ my (%seen, @ret, @tmp);
+ # legacy support for 'set-tree'; this is only used by set_tree_cb:
+ if (my $ip = $self->{inject_parents}) {
+ if (my $commit = delete $ip->{$log_entry->{revision}}) {
+ push @tmp, $commit;
+ }
+ }
+ if (my $cur = ::verify_ref($self->refname.'^0')) {
+ push @tmp, $cur;
+ }
+ if (my $ipd = $self->{inject_parents_dcommit}) {
+ if (my $commit = delete $ipd->{$log_entry->{revision}}) {
+ push @tmp, @$commit;
+ }
+ }
+ push @tmp, $_ foreach (@{$log_entry->{parents}}, @tmp);
+ while (my $p = shift @tmp) {
+ next if $seen{$p};
+ $seen{$p} = 1;
+ push @ret, $p;
+ }
+ @ret;
+}
+
+sub rewrite_root {
+ my ($self) = @_;
+ return $self->{-rewrite_root} if exists $self->{-rewrite_root};
+ my $k = "svn-remote.$self->{repo_id}.rewriteRoot";
+ my $rwr = eval { command_oneline(qw/config --get/, $k) };
+ if ($rwr) {
+ $rwr =~ s#/+$##;
+ if ($rwr !~ m#^[a-z\+]+://#) {
+ die "$rwr is not a valid URL (key: $k)\n";
+ }
+ }
+ $self->{-rewrite_root} = $rwr;
+}
+
+sub rewrite_uuid {
+ my ($self) = @_;
+ return $self->{-rewrite_uuid} if exists $self->{-rewrite_uuid};
+ my $k = "svn-remote.$self->{repo_id}.rewriteUUID";
+ my $rwid = eval { command_oneline(qw/config --get/, $k) };
+ if ($rwid) {
+ $rwid =~ s#/+$##;
+ if ($rwid !~ m#^[a-f0-9]{8}-(?:[a-f0-9]{4}-){3}[a-f0-9]{12}$#) {
+ die "$rwid is not a valid UUID (key: $k)\n";
+ }
+ }
+ $self->{-rewrite_uuid} = $rwid;
+}
+
+sub metadata_url {
+ my ($self) = @_;
+ ($self->rewrite_root || $self->{url}) .
+ (length $self->{path} ? '/' . $self->{path} : '');
+}
+
+sub full_url {
+ my ($self) = @_;
+ $self->{url} . (length $self->{path} ? '/' . $self->{path} : '');
+}
+
+sub full_pushurl {
+ my ($self) = @_;
+ if ($self->{pushurl}) {
+ return $self->{pushurl} . (length $self->{path} ? '/' .
+ $self->{path} : '');
+ } else {
+ return $self->full_url;
+ }
+}
+
+sub set_commit_header_env {
+ my ($log_entry) = @_;
+ my %env;
+ foreach my $ned (qw/NAME EMAIL DATE/) {
+ foreach my $ac (qw/AUTHOR COMMITTER/) {
+ $env{"GIT_${ac}_${ned}"} = $ENV{"GIT_${ac}_${ned}"};
+ }
+ }
+
+ $ENV{GIT_AUTHOR_NAME} = $log_entry->{name};
+ $ENV{GIT_AUTHOR_EMAIL} = $log_entry->{email};
+ $ENV{GIT_AUTHOR_DATE} = $ENV{GIT_COMMITTER_DATE} = $log_entry->{date};
+
+ $ENV{GIT_COMMITTER_NAME} = (defined $log_entry->{commit_name})
+ ? $log_entry->{commit_name}
+ : $log_entry->{name};
+ $ENV{GIT_COMMITTER_EMAIL} = (defined $log_entry->{commit_email})
+ ? $log_entry->{commit_email}
+ : $log_entry->{email};
+ \%env;
+}
+
+sub restore_commit_header_env {
+ my ($env) = @_;
+ foreach my $ned (qw/NAME EMAIL DATE/) {
+ foreach my $ac (qw/AUTHOR COMMITTER/) {
+ my $k = "GIT_${ac}_${ned}";
+ if (defined $env->{$k}) {
+ $ENV{$k} = $env->{$k};
+ } else {
+ delete $ENV{$k};
+ }
+ }
+ }
+}
+
+sub gc {
+ command_noisy('gc', '--auto');
+};
+
+sub do_git_commit {
+ my ($self, $log_entry) = @_;
+ my $lr = $self->last_rev;
+ if (defined $lr && $lr >= $log_entry->{revision}) {
+ die "Last fetched revision of ", $self->refname,
+ " was r$lr, but we are about to fetch: ",
+ "r$log_entry->{revision}!\n";
+ }
+ if (my $c = $self->rev_map_get($log_entry->{revision})) {
+ croak "$log_entry->{revision} = $c already exists! ",
+ "Why are we refetching it?\n";
+ }
+ my $old_env = set_commit_header_env($log_entry);
+ my $tree = $log_entry->{tree};
+ if (!defined $tree) {
+ $tree = $self->tmp_index_do(sub {
+ command_oneline('write-tree') });
+ }
+ die "Tree is not a valid sha1: $tree\n" if $tree !~ /^$::sha1$/o;
+
+ my @exec = ('git', 'commit-tree', $tree);
+ foreach ($self->get_commit_parents($log_entry)) {
+ push @exec, '-p', $_;
+ }
+ defined(my $pid = open3(my $msg_fh, my $out_fh, '>&STDERR', @exec))
+ or croak $!;
+ binmode $msg_fh;
+
+ # we always get UTF-8 from SVN, but we may want our commits in
+ # a different encoding.
+ if (my $enc = Git::config('i18n.commitencoding')) {
+ require Encode;
+ Encode::from_to($log_entry->{log}, 'UTF-8', $enc);
+ }
+ print $msg_fh $log_entry->{log} or croak $!;
+ restore_commit_header_env($old_env);
+ unless ($self->no_metadata) {
+ print $msg_fh "\ngit-svn-id: $log_entry->{metadata}\n"
+ or croak $!;
+ }
+ $msg_fh->flush == 0 or croak $!;
+ close $msg_fh or croak $!;
+ chomp(my $commit = do { local $/; <$out_fh> });
+ close $out_fh or croak $!;
+ waitpid $pid, 0;
+ croak $? if $?;
+ if ($commit !~ /^$::sha1$/o) {
+ die "Failed to commit, invalid sha1: $commit\n";
+ }
+
+ $self->rev_map_set($log_entry->{revision}, $commit, 1);
+
+ $self->{last_rev} = $log_entry->{revision};
+ $self->{last_commit} = $commit;
+ print "r$log_entry->{revision}" unless $::_q > 1;
+ if (defined $log_entry->{svm_revision}) {
+ print " (\@$log_entry->{svm_revision})" unless $::_q > 1;
+ $self->rev_map_set($log_entry->{svm_revision}, $commit,
+ 0, $self->svm_uuid);
+ }
+ print " = $commit ($self->{ref_id})\n" unless $::_q > 1;
+ if (--$_gc_nr == 0) {
+ $_gc_nr = $_gc_period;
+ gc();
+ }
+ return $commit;
+}
+
+sub match_paths {
+ my ($self, $paths, $r) = @_;
+ return 1 if $self->{path} eq '';
+ if (my $path = $paths->{"/$self->{path}"}) {
+ return ($path->{action} eq 'D') ? 0 : 1;
+ }
+ $self->{path_regex} ||= qr/^\/\Q$self->{path}\E\//;
+ if (grep /$self->{path_regex}/, keys %$paths) {
+ return 1;
+ }
+ my $c = '';
+ foreach (split m#/#, $self->{path}) {
+ $c .= "/$_";
+ next unless ($paths->{$c} &&
+ ($paths->{$c}->{action} =~ /^[AR]$/));
+ if ($self->ra->check_path($self->{path}, $r) ==
+ $SVN::Node::dir) {
+ return 1;
+ }
+ }
+ return 0;
+}
+
+sub find_parent_branch {
+ my ($self, $paths, $rev) = @_;
+ return undef unless $self->follow_parent;
+ unless (defined $paths) {
+ my $err_handler = $SVN::Error::handler;
+ $SVN::Error::handler = \&Git::SVN::Ra::skip_unknown_revs;
+ $self->ra->get_log([$self->{path}], $rev, $rev, 0, 1, 1,
+ sub { $paths = $_[0] });
+ $SVN::Error::handler = $err_handler;
+ }
+ return undef unless defined $paths;
+
+ # look for a parent from another branch:
+ my @b_path_components = split m#/#, $self->{path};
+ my @a_path_components;
+ my $i;
+ while (@b_path_components) {
+ $i = $paths->{'/'.join('/', @b_path_components)};
+ last if $i && defined $i->{copyfrom_path};
+ unshift(@a_path_components, pop(@b_path_components));
+ }
+ return undef unless defined $i && defined $i->{copyfrom_path};
+ my $branch_from = $i->{copyfrom_path};
+ if (@a_path_components) {
+ print STDERR "branch_from: $branch_from => ";
+ $branch_from .= '/'.join('/', @a_path_components);
+ print STDERR $branch_from, "\n";
+ }
+ my $r = $i->{copyfrom_rev};
+ my $repos_root = $self->ra->{repos_root};
+ my $url = $self->ra->{url};
+ my $new_url = $url . $branch_from;
+ print STDERR "Found possible branch point: ",
+ "$new_url => ", $self->full_url, ", $r\n"
+ unless $::_q > 1;
+ $branch_from =~ s#^/##;
+ my $gs = $self->other_gs($new_url, $url,
+ $branch_from, $r, $self->{ref_id});
+ my ($r0, $parent) = $gs->find_rev_before($r, 1);
+ {
+ my ($base, $head);
+ if (!defined $r0 || !defined $parent) {
+ ($base, $head) = parse_revision_argument(0, $r);
+ } else {
+ if ($r0 < $r) {
+ $gs->ra->get_log([$gs->{path}], $r0 + 1, $r, 1,
+ 0, 1, sub { $base = $_[1] - 1 });
+ }
+ }
+ if (defined $base && $base <= $r) {
+ $gs->fetch($base, $r);
+ }
+ ($r0, $parent) = $gs->find_rev_before($r, 1);
+ }
+ if (defined $r0 && defined $parent) {
+ print STDERR "Found branch parent: ($self->{ref_id}) $parent\n"
+ unless $::_q > 1;
+ my $ed;
+ if ($self->ra->can_do_switch) {
+ $self->assert_index_clean($parent);
+ print STDERR "Following parent with do_switch\n"
+ unless $::_q > 1;
+ # do_switch works with svn/trunk >= r22312, but that
+ # is not included with SVN 1.4.3 (the latest version
+ # at the moment), so we can't rely on it
+ $self->{last_rev} = $r0;
+ $self->{last_commit} = $parent;
+ $ed = Git::SVN::Fetcher->new($self, $gs->{path});
+ $gs->ra->gs_do_switch($r0, $rev, $gs,
+ $self->full_url, $ed)
+ or die "SVN connection failed somewhere...\n";
+ } elsif ($self->ra->trees_match($new_url, $r0,
+ $self->full_url, $rev)) {
+ print STDERR "Trees match:\n",
+ " $new_url\@$r0\n",
+ " ${\$self->full_url}\@$rev\n",
+ "Following parent with no changes\n"
+ unless $::_q > 1;
+ $self->tmp_index_do(sub {
+ command_noisy('read-tree', $parent);
+ });
+ $self->{last_commit} = $parent;
+ } else {
+ print STDERR "Following parent with do_update\n"
+ unless $::_q > 1;
+ $ed = Git::SVN::Fetcher->new($self);
+ $self->ra->gs_do_update($rev, $rev, $self, $ed)
+ or die "SVN connection failed somewhere...\n";
+ }
+ print STDERR "Successfully followed parent\n" unless $::_q > 1;
+ return $self->make_log_entry($rev, [$parent], $ed);
+ }
+ return undef;
+}
+
+sub do_fetch {
+ my ($self, $paths, $rev) = @_;
+ my $ed;
+ my ($last_rev, @parents);
+ if (my $lc = $self->last_commit) {
+ # we can have a branch that was deleted, then re-added
+ # under the same name but copied from another path, in
+ # which case we'll have multiple parents (we don't
+ # want to break the original ref, nor lose copypath info):
+ if (my $log_entry = $self->find_parent_branch($paths, $rev)) {
+ push @{$log_entry->{parents}}, $lc;
+ return $log_entry;
+ }
+ $ed = Git::SVN::Fetcher->new($self);
+ $last_rev = $self->{last_rev};
+ $ed->{c} = $lc;
+ @parents = ($lc);
+ } else {
+ $last_rev = $rev;
+ if (my $log_entry = $self->find_parent_branch($paths, $rev)) {
+ return $log_entry;
+ }
+ $ed = Git::SVN::Fetcher->new($self);
+ }
+ unless ($self->ra->gs_do_update($last_rev, $rev, $self, $ed)) {
+ die "SVN connection failed somewhere...\n";
+ }
+ $self->make_log_entry($rev, \@parents, $ed);
+}
+
+sub mkemptydirs {
+ my ($self, $r) = @_;
+
+ sub scan {
+ my ($r, $empty_dirs, $line) = @_;
+ if (defined $r && $line =~ /^r(\d+)$/) {
+ return 0 if $1 > $r;
+ } elsif ($line =~ /^ \+empty_dir: (.+)$/) {
+ $empty_dirs->{$1} = 1;
+ } elsif ($line =~ /^ \-empty_dir: (.+)$/) {
+ my @d = grep {m[^\Q$1\E(/|$)]} (keys %$empty_dirs);
+ delete @$empty_dirs{@d};
+ }
+ 1; # continue
+ };
+
+ my %empty_dirs = ();
+ my $gz_file = "$self->{dir}/unhandled.log.gz";
+ if (-f $gz_file) {
+ if (!can_compress()) {
+ warn "Compress::Zlib could not be found; ",
+ "empty directories in $gz_file will not be read\n";
+ } else {
+ my $gz = Compress::Zlib::gzopen($gz_file, "rb") or
+ die "Unable to open $gz_file: $!\n";
+ my $line;
+ while ($gz->gzreadline($line) > 0) {
+ scan($r, \%empty_dirs, $line) or last;
+ }
+ $gz->gzclose;
+ }
+ }
+
+ if (open my $fh, '<', "$self->{dir}/unhandled.log") {
+ binmode $fh or croak "binmode: $!";
+ while (<$fh>) {
+ scan($r, \%empty_dirs, $_) or last;
+ }
+ close $fh;
+ }
+
+ my $strip = qr/\A\Q$self->{path}\E(?:\/|$)/;
+ foreach my $d (sort keys %empty_dirs) {
+ $d = uri_decode($d);
+ $d =~ s/$strip//;
+ next unless length($d);
+ next if -d $d;
+ if (-e $d) {
+ warn "$d exists but is not a directory\n";
+ } else {
+ print "creating empty directory: $d\n";
+ mkpath([$d]);
+ }
+ }
+}
+
+sub get_untracked {
+ my ($self, $ed) = @_;
+ my @out;
+ my $h = $ed->{empty};
+ foreach (sort keys %$h) {
+ my $act = $h->{$_} ? '+empty_dir' : '-empty_dir';
+ push @out, " $act: " . uri_encode($_);
+ warn "W: $act: $_\n";
+ }
+ foreach my $t (qw/dir_prop file_prop/) {
+ $h = $ed->{$t} or next;
+ foreach my $path (sort keys %$h) {
+ my $ppath = $path eq '' ? '.' : $path;
+ foreach my $prop (sort keys %{$h->{$path}}) {
+ next if $SKIP_PROP{$prop};
+ my $v = $h->{$path}->{$prop};
+ my $t_ppath_prop = "$t: " .
+ uri_encode($ppath) . ' ' .
+ uri_encode($prop);
+ if (defined $v) {
+ push @out, " +$t_ppath_prop " .
+ uri_encode($v);
+ } else {
+ push @out, " -$t_ppath_prop";
+ }
+ }
+ }
+ }
+ foreach my $t (qw/absent_file absent_directory/) {
+ $h = $ed->{$t} or next;
+ foreach my $parent (sort keys %$h) {
+ foreach my $path (sort @{$h->{$parent}}) {
+ push @out, " $t: " .
+ uri_encode("$parent/$path");
+ warn "W: $t: $parent/$path ",
+ "Insufficient permissions?\n";
+ }
+ }
+ }
+ \@out;
+}
+
+sub get_tz {
+ # some systmes don't handle or mishandle %z, so be creative.
+ my $t = shift || time;
+ my $gm = timelocal(gmtime($t));
+ my $sign = qw( + + - )[ $t <=> $gm ];
+ return sprintf("%s%02d%02d", $sign, (gmtime(abs($t - $gm)))[2,1]);
+}
+
+# parse_svn_date(DATE)
+# --------------------
+# Given a date (in UTC) from Subversion, return a string in the format
+# "<TZ Offset> <local date/time>" that Git will use.
+#
+# By default the parsed date will be in UTC; if $Git::SVN::_localtime
+# is true we'll convert it to the local timezone instead.
+sub parse_svn_date {
+ my $date = shift || return '+0000 1970-01-01 00:00:00';
+ my ($Y,$m,$d,$H,$M,$S) = ($date =~ /^(\d{4})\-(\d\d)\-(\d\d)T
+ (\d\d)\:(\d\d)\:(\d\d)\.\d*Z$/x) or
+ croak "Unable to parse date: $date\n";
+ my $parsed_date; # Set next.
+
+ if ($Git::SVN::_localtime) {
+ # Translate the Subversion datetime to an epoch time.
+ # Begin by switching ourselves to $date's timezone, UTC.
+ my $old_env_TZ = $ENV{TZ};
+ $ENV{TZ} = 'UTC';
+
+ my $epoch_in_UTC =
+ POSIX::strftime('%s', $S, $M, $H, $d, $m - 1, $Y - 1900);
+
+ # Determine our local timezone (including DST) at the
+ # time of $epoch_in_UTC. $Git::SVN::Log::TZ stored the
+ # value of TZ, if any, at the time we were run.
+ if (defined $Git::SVN::Log::TZ) {
+ $ENV{TZ} = $Git::SVN::Log::TZ;
+ } else {
+ delete $ENV{TZ};
+ }
+
+ my $our_TZ = get_tz();
+
+ # This converts $epoch_in_UTC into our local timezone.
+ my ($sec, $min, $hour, $mday, $mon, $year,
+ $wday, $yday, $isdst) = localtime($epoch_in_UTC);
+
+ $parsed_date = sprintf('%s %04d-%02d-%02d %02d:%02d:%02d',
+ $our_TZ, $year + 1900, $mon + 1,
+ $mday, $hour, $min, $sec);
+
+ # Reset us to the timezone in effect when we entered
+ # this routine.
+ if (defined $old_env_TZ) {
+ $ENV{TZ} = $old_env_TZ;
+ } else {
+ delete $ENV{TZ};
+ }
+ } else {
+ $parsed_date = "+0000 $Y-$m-$d $H:$M:$S";
+ }
+
+ return $parsed_date;
+}
+
+sub other_gs {
+ my ($self, $new_url, $url,
+ $branch_from, $r, $old_ref_id) = @_;
+ my $gs = Git::SVN->find_by_url($new_url, $url, $branch_from);
+ unless ($gs) {
+ my $ref_id = $old_ref_id;
+ $ref_id =~ s/\@\d+-*$//;
+ $ref_id .= "\@$r";
+ # just grow a tail if we're not unique enough :x
+ $ref_id .= '-' while find_ref($ref_id);
+ my ($u, $p, $repo_id) = ($new_url, '', $ref_id);
+ if ($u =~ s#^\Q$url\E(/|$)##) {
+ $p = $u;
+ $u = $url;
+ $repo_id = $self->{repo_id};
+ }
+ while (1) {
+ # It is possible to tag two different subdirectories at
+ # the same revision. If the url for an existing ref
+ # does not match, we must either find a ref with a
+ # matching url or create a new ref by growing a tail.
+ $gs = Git::SVN->init($u, $p, $repo_id, $ref_id, 1);
+ my (undef, $max_commit) = $gs->rev_map_max(1);
+ last if (!$max_commit);
+ my ($url) = ::cmt_metadata($max_commit);
+ last if ($url eq $gs->metadata_url);
+ $ref_id .= '-';
+ }
+ print STDERR "Initializing parent: $ref_id\n" unless $::_q > 1;
+ }
+ $gs
+}
+
+sub call_authors_prog {
+ my ($orig_author) = @_;
+ $orig_author = command_oneline('rev-parse', '--sq-quote', $orig_author);
+ my $author = `$::_authors_prog $orig_author`;
+ if ($? != 0) {
+ die "$::_authors_prog failed with exit code $?\n"
+ }
+ if ($author =~ /^\s*(.+?)\s*<(.*)>\s*$/) {
+ my ($name, $email) = ($1, $2);
+ $email = undef if length $2 == 0;
+ return [$name, $email];
+ } else {
+ die "Author: $orig_author: $::_authors_prog returned "
+ . "invalid author format: $author\n";
+ }
+}
+
+sub check_author {
+ my ($author) = @_;
+ if (!defined $author || length $author == 0) {
+ $author = '(no author)';
+ }
+ if (!defined $::users{$author}) {
+ if (defined $::_authors_prog) {
+ $::users{$author} = call_authors_prog($author);
+ } elsif (defined $::_authors) {
+ die "Author: $author not defined in $::_authors file\n";
+ }
+ }
+ $author;
+}
+
+sub find_extra_svk_parents {
+ my ($self, $ed, $tickets, $parents) = @_;
+ # aha! svk:merge property changed...
+ my @tickets = split "\n", $tickets;
+ my @known_parents;
+ for my $ticket ( @tickets ) {
+ my ($uuid, $path, $rev) = split /:/, $ticket;
+ if ( $uuid eq $self->ra_uuid ) {
+ my $url = $self->{url};
+ my $repos_root = $url;
+ my $branch_from = $path;
+ $branch_from =~ s{^/}{};
+ my $gs = $self->other_gs($repos_root."/".$branch_from,
+ $url,
+ $branch_from,
+ $rev,
+ $self->{ref_id});
+ if ( my $commit = $gs->rev_map_get($rev, $uuid) ) {
+ # wahey! we found it, but it might be
+ # an old one (!)
+ push @known_parents, [ $rev, $commit ];
+ }
+ }
+ }
+ # Ordering matters; highest-numbered commit merge tickets
+ # first, as they may account for later merge ticket additions
+ # or changes.
+ @known_parents = map {$_->[1]} sort {$b->[0] <=> $a->[0]} @known_parents;
+ for my $parent ( @known_parents ) {
+ my @cmd = ('rev-list', $parent, map { "^$_" } @$parents );
+ my ($msg_fh, $ctx) = command_output_pipe(@cmd);
+ my $new;
+ while ( <$msg_fh> ) {
+ $new=1;last;
+ }
+ command_close_pipe($msg_fh, $ctx);
+ if ( $new ) {
+ print STDERR
+ "Found merge parent (svk:merge ticket): $parent\n";
+ push @$parents, $parent;
+ }
+ }
+}
+
+sub lookup_svn_merge {
+ my $uuid = shift;
+ my $url = shift;
+ my $merge = shift;
+
+ my ($source, $revs) = split ":", $merge;
+ my $path = $source;
+ $path =~ s{^/}{};
+ my $gs = Git::SVN->find_by_url($url.$source, $url, $path);
+ if ( !$gs ) {
+ warn "Couldn't find revmap for $url$source\n";
+ return;
+ }
+ my @ranges = split ",", $revs;
+ my ($tip, $tip_commit);
+ my @merged_commit_ranges;
+ # find the tip
+ for my $range ( @ranges ) {
+ my ($bottom, $top) = split "-", $range;
+ $top ||= $bottom;
+ my $bottom_commit = $gs->find_rev_after( $bottom, 1, $top );
+ my $top_commit = $gs->find_rev_before( $top, 1, $bottom );
+
+ unless ($top_commit and $bottom_commit) {
+ warn "W:unknown path/rev in svn:mergeinfo "
+ ."dirprop: $source:$range\n";
+ next;
+ }
+
+ if (scalar(command('rev-parse', "$bottom_commit^@"))) {
+ push @merged_commit_ranges,
+ "$bottom_commit^..$top_commit";
+ } else {
+ push @merged_commit_ranges, "$top_commit";
+ }
+
+ if ( !defined $tip or $top > $tip ) {
+ $tip = $top;
+ $tip_commit = $top_commit;
+ }
+ }
+ return ($tip_commit, @merged_commit_ranges);
+}
+
+sub _rev_list {
+ my ($msg_fh, $ctx) = command_output_pipe(
+ "rev-list", @_,
+ );
+ my @rv;
+ while ( <$msg_fh> ) {
+ chomp;
+ push @rv, $_;
+ }
+ command_close_pipe($msg_fh, $ctx);
+ @rv;
+}
+
+sub check_cherry_pick {
+ my $base = shift;
+ my $tip = shift;
+ my $parents = shift;
+ my @ranges = @_;
+ my %commits = map { $_ => 1 }
+ _rev_list("--no-merges", $tip, "--not", $base, @$parents, "--");
+ for my $range ( @ranges ) {
+ delete @commits{_rev_list($range, "--")};
+ }
+ for my $commit (keys %commits) {
+ if (has_no_changes($commit)) {
+ delete $commits{$commit};
+ }
+ }
+ return (keys %commits);
+}
+
+sub has_no_changes {
+ my $commit = shift;
+
+ my @revs = split / /, command_oneline(
+ qw(rev-list --parents -1 -m), $commit);
+
+ # Commits with no parents, e.g. the start of a partial branch,
+ # have changes by definition.
+ return 1 if (@revs < 2);
+
+ # Commits with multiple parents, e.g a merge, have no changes
+ # by definition.
+ return 0 if (@revs > 2);
+
+ return (command_oneline("rev-parse", "$commit^{tree}") eq
+ command_oneline("rev-parse", "$commit~1^{tree}"));
+}
+
+sub tie_for_persistent_memoization {
+ my $hash = shift;
+ my $path = shift;
+
+ if ($can_use_yaml) {
+ tie %$hash => 'Git::SVN::Memoize::YAML', "$path.yaml";
+ } else {
+ tie %$hash => 'Memoize::Storable', "$path.db", 'nstore';
+ }
+}
+
+# The GIT_DIR environment variable is not always set until after the command
+# line arguments are processed, so we can't memoize in a BEGIN block.
+{
+ my $memoized = 0;
+
+ sub memoize_svn_mergeinfo_functions {
+ return if $memoized;
+ $memoized = 1;
+
+ my $cache_path = "$ENV{GIT_DIR}/svn/.caches/";
+ mkpath([$cache_path]) unless -d $cache_path;
+
+ my %lookup_svn_merge_cache;
+ my %check_cherry_pick_cache;
+ my %has_no_changes_cache;
+
+ tie_for_persistent_memoization(\%lookup_svn_merge_cache,
+ "$cache_path/lookup_svn_merge");
+ memoize 'lookup_svn_merge',
+ SCALAR_CACHE => 'FAULT',
+ LIST_CACHE => ['HASH' => \%lookup_svn_merge_cache],
+ ;
+
+ tie_for_persistent_memoization(\%check_cherry_pick_cache,
+ "$cache_path/check_cherry_pick");
+ memoize 'check_cherry_pick',
+ SCALAR_CACHE => 'FAULT',
+ LIST_CACHE => ['HASH' => \%check_cherry_pick_cache],
+ ;
+
+ tie_for_persistent_memoization(\%has_no_changes_cache,
+ "$cache_path/has_no_changes");
+ memoize 'has_no_changes',
+ SCALAR_CACHE => ['HASH' => \%has_no_changes_cache],
+ LIST_CACHE => 'FAULT',
+ ;
+ }
+
+ sub unmemoize_svn_mergeinfo_functions {
+ return if not $memoized;
+ $memoized = 0;
+
+ Memoize::unmemoize 'lookup_svn_merge';
+ Memoize::unmemoize 'check_cherry_pick';
+ Memoize::unmemoize 'has_no_changes';
+ }
+
+ sub clear_memoized_mergeinfo_caches {
+ die "Only call this method in non-memoized context" if ($memoized);
+
+ my $cache_path = "$ENV{GIT_DIR}/svn/.caches/";
+ return unless -d $cache_path;
+
+ for my $cache_file (("$cache_path/lookup_svn_merge",
+ "$cache_path/check_cherry_pick",
+ "$cache_path/has_no_changes")) {
+ for my $suffix (qw(yaml db)) {
+ my $file = "$cache_file.$suffix";
+ next unless -e $file;
+ unlink($file) or die "unlink($file) failed: $!\n";
+ }
+ }
+ }
+
+
+ Memoize::memoize 'Git::SVN::repos_root';
+}
+
+END {
+ # Force cache writeout explicitly instead of waiting for
+ # global destruction to avoid segfault in Storable:
+ # http://rt.cpan.org/Public/Bug/Display.html?id=36087
+ unmemoize_svn_mergeinfo_functions();
+}
+
+sub parents_exclude {
+ my $parents = shift;
+ my @commits = @_;
+ return unless @commits;
+
+ my @excluded;
+ my $excluded;
+ do {
+ my @cmd = ('rev-list', "-1", @commits, "--not", @$parents );
+ $excluded = command_oneline(@cmd);
+ if ( $excluded ) {
+ my @new;
+ my $found;
+ for my $commit ( @commits ) {
+ if ( $commit eq $excluded ) {
+ push @excluded, $commit;
+ $found++;
+ last;
+ }
+ else {
+ push @new, $commit;
+ }
+ }
+ die "saw commit '$excluded' in rev-list output, "
+ ."but we didn't ask for that commit (wanted: @commits --not @$parents)"
+ unless $found;
+ @commits = @new;
+ }
+ }
+ while ($excluded and @commits);
+
+ return @excluded;
+}
+
+
+# note: this function should only be called if the various dirprops
+# have actually changed
+sub find_extra_svn_parents {
+ my ($self, $ed, $mergeinfo, $parents) = @_;
+ # aha! svk:merge property changed...
+
+ memoize_svn_mergeinfo_functions();
+
+ # We first search for merged tips which are not in our
+ # history. Then, we figure out which git revisions are in
+ # that tip, but not this revision. If all of those revisions
+ # are now marked as merge, we can add the tip as a parent.
+ my @merges = split "\n", $mergeinfo;
+ my @merge_tips;
+ my $url = $self->{url};
+ my $uuid = $self->ra_uuid;
+ my %ranges;
+ for my $merge ( @merges ) {
+ my ($tip_commit, @ranges) =
+ lookup_svn_merge( $uuid, $url, $merge );
+ unless (!$tip_commit or
+ grep { $_ eq $tip_commit } @$parents ) {
+ push @merge_tips, $tip_commit;
+ $ranges{$tip_commit} = \@ranges;
+ } else {
+ push @merge_tips, undef;
+ }
+ }
+
+ my %excluded = map { $_ => 1 }
+ parents_exclude($parents, grep { defined } @merge_tips);
+
+ # check merge tips for new parents
+ my @new_parents;
+ for my $merge_tip ( @merge_tips ) {
+ my $spec = shift @merges;
+ next unless $merge_tip and $excluded{$merge_tip};
+
+ my $ranges = $ranges{$merge_tip};
+
+ # check out 'new' tips
+ my $merge_base;
+ eval {
+ $merge_base = command_oneline(
+ "merge-base",
+ @$parents, $merge_tip,
+ );
+ };
+ if ($@) {
+ die "An error occurred during merge-base"
+ unless $@->isa("Git::Error::Command");
+
+ warn "W: Cannot find common ancestor between ".
+ "@$parents and $merge_tip. Ignoring merge info.\n";
+ next;
+ }
+
+ # double check that there are no missing non-merge commits
+ my (@incomplete) = check_cherry_pick(
+ $merge_base, $merge_tip,
+ $parents,
+ @$ranges,
+ );
+
+ if ( @incomplete ) {
+ warn "W:svn cherry-pick ignored ($spec) - missing "
+ .@incomplete." commit(s) (eg $incomplete[0])\n";
+ } else {
+ warn
+ "Found merge parent (svn:mergeinfo prop): ",
+ $merge_tip, "\n";
+ push @new_parents, $merge_tip;
+ }
+ }
+
+ # cater for merges which merge commits from multiple branches
+ if ( @new_parents > 1 ) {
+ for ( my $i = 0; $i <= $#new_parents; $i++ ) {
+ for ( my $j = 0; $j <= $#new_parents; $j++ ) {
+ next if $i == $j;
+ next unless $new_parents[$i];
+ next unless $new_parents[$j];
+ my $revs = command_oneline(
+ "rev-list", "-1",
+ "$new_parents[$i]..$new_parents[$j]",
+ );
+ if ( !$revs ) {
+ undef($new_parents[$j]);
+ }
+ }
+ }
+ }
+ push @$parents, grep { defined } @new_parents;
+}
+
+sub make_log_entry {
+ my ($self, $rev, $parents, $ed) = @_;
+ my $untracked = $self->get_untracked($ed);
+
+ my @parents = @$parents;
+ my $ps = $ed->{path_strip} || "";
+ for my $path ( grep { m/$ps/ } %{$ed->{dir_prop}} ) {
+ my $props = $ed->{dir_prop}{$path};
+ if ( $props->{"svk:merge"} ) {
+ $self->find_extra_svk_parents
+ ($ed, $props->{"svk:merge"}, \@parents);
+ }
+ if ( $props->{"svn:mergeinfo"} ) {
+ $self->find_extra_svn_parents
+ ($ed,
+ $props->{"svn:mergeinfo"},
+ \@parents);
+ }
+ }
+
+ open my $un, '>>', "$self->{dir}/unhandled.log" or croak $!;
+ print $un "r$rev\n" or croak $!;
+ print $un $_, "\n" foreach @$untracked;
+ my %log_entry = ( parents => \@parents, revision => $rev,
+ log => '');
+
+ my $headrev;
+ my $logged = delete $self->{logged_rev_props};
+ if (!$logged || $self->{-want_revprops}) {
+ my $rp = $self->ra->rev_proplist($rev);
+ foreach (sort keys %$rp) {
+ my $v = $rp->{$_};
+ if (/^svn:(author|date|log)$/) {
+ $log_entry{$1} = $v;
+ } elsif ($_ eq 'svm:headrev') {
+ $headrev = $v;
+ } else {
+ print $un " rev_prop: ", uri_encode($_), ' ',
+ uri_encode($v), "\n";
+ }
+ }
+ } else {
+ map { $log_entry{$_} = $logged->{$_} } keys %$logged;
+ }
+ close $un or croak $!;
+
+ $log_entry{date} = parse_svn_date($log_entry{date});
+ $log_entry{log} .= "\n";
+ my $author = $log_entry{author} = check_author($log_entry{author});
+ my ($name, $email) = defined $::users{$author} ? @{$::users{$author}}
+ : ($author, undef);
+
+ my ($commit_name, $commit_email) = ($name, $email);
+ if ($_use_log_author) {
+ my $name_field;
+ if ($log_entry{log} =~ /From:\s+(.*\S)\s*\n/i) {
+ $name_field = $1;
+ } elsif ($log_entry{log} =~ /Signed-off-by:\s+(.*\S)\s*\n/i) {
+ $name_field = $1;
+ }
+ if (!defined $name_field) {
+ if (!defined $email) {
+ $email = $name;
+ }
+ } elsif ($name_field =~ /(.*?)\s+<(.*)>/) {
+ ($name, $email) = ($1, $2);
+ } elsif ($name_field =~ /(.*)@/) {
+ ($name, $email) = ($1, $name_field);
+ } else {
+ ($name, $email) = ($name_field, $name_field);
+ }
+ }
+ if (defined $headrev && $self->use_svm_props) {
+ if ($self->rewrite_root) {
+ die "Can't have both 'useSvmProps' and 'rewriteRoot' ",
+ "options set!\n";
+ }
+ if ($self->rewrite_uuid) {
+ die "Can't have both 'useSvmProps' and 'rewriteUUID' ",
+ "options set!\n";
+ }
+ my ($uuid, $r) = $headrev =~ m{^([a-f\d\-]{30,}):(\d+)$}i;
+ # we don't want "SVM: initializing mirror for junk" ...
+ return undef if $r == 0;
+ my $svm = $self->svm;
+ if ($uuid ne $svm->{uuid}) {
+ die "UUID mismatch on SVM path:\n",
+ "expected: $svm->{uuid}\n",
+ " got: $uuid\n";
+ }
+ my $full_url = $self->full_url;
+ $full_url =~ s#^\Q$svm->{replace}\E(/|$)#$svm->{source}$1# or
+ die "Failed to replace '$svm->{replace}' with ",
+ "'$svm->{source}' in $full_url\n";
+ # throw away username for storing in records
+ remove_username($full_url);
+ $log_entry{metadata} = "$full_url\@$r $uuid";
+ $log_entry{svm_revision} = $r;
+ $email ||= "$author\@$uuid";
+ $commit_email ||= "$author\@$uuid";
+ } elsif ($self->use_svnsync_props) {
+ my $full_url = $self->svnsync->{url};
+ $full_url .= "/$self->{path}" if length $self->{path};
+ remove_username($full_url);
+ my $uuid = $self->svnsync->{uuid};
+ $log_entry{metadata} = "$full_url\@$rev $uuid";
+ $email ||= "$author\@$uuid";
+ $commit_email ||= "$author\@$uuid";
+ } else {
+ my $url = $self->metadata_url;
+ remove_username($url);
+ my $uuid = $self->rewrite_uuid || $self->ra->get_uuid;
+ $log_entry{metadata} = "$url\@$rev " . $uuid;
+ $email ||= "$author\@" . $uuid;
+ $commit_email ||= "$author\@" . $uuid;
+ }
+ $log_entry{name} = $name;
+ $log_entry{email} = $email;
+ $log_entry{commit_name} = $commit_name;
+ $log_entry{commit_email} = $commit_email;
+ \%log_entry;
+}
+
+sub fetch {
+ my ($self, $min_rev, $max_rev, @parents) = @_;
+ my ($last_rev, $last_commit) = $self->last_rev_commit;
+ my ($base, $head) = $self->get_fetch_range($min_rev, $max_rev);
+ $self->ra->gs_fetch_loop_common($base, $head, [$self]);
+}
+
+sub set_tree_cb {
+ my ($self, $log_entry, $tree, $rev, $date, $author) = @_;
+ $self->{inject_parents} = { $rev => $tree };
+ $self->fetch(undef, undef);
+}
+
+sub set_tree {
+ my ($self, $tree) = (shift, shift);
+ my $log_entry = ::get_commit_entry($tree);
+ unless ($self->{last_rev}) {
+ fatal("Must have an existing revision to commit");
+ }
+ my %ed_opts = ( r => $self->{last_rev},
+ log => $log_entry->{log},
+ ra => $self->ra,
+ tree_a => $self->{last_commit},
+ tree_b => $tree,
+ editor_cb => sub {
+ $self->set_tree_cb($log_entry, $tree, @_) },
+ svn_path => $self->{path} );
+ if (!Git::SVN::Editor->new(\%ed_opts)->apply_diff) {
+ print "No changes\nr$self->{last_rev} = $tree\n";
+ }
+}
+
+sub rebuild_from_rev_db {
+ my ($self, $path) = @_;
+ my $r = -1;
+ open my $fh, '<', $path or croak "open: $!";
+ binmode $fh or croak "binmode: $!";
+ while (<$fh>) {
+ length($_) == 41 or croak "inconsistent size in ($_) != 41";
+ chomp($_);
+ ++$r;
+ next if $_ eq ('0' x 40);
+ $self->rev_map_set($r, $_);
+ print "r$r = $_\n";
+ }
+ close $fh or croak "close: $!";
+ unlink $path or croak "unlink: $!";
+}
+
+sub rebuild {
+ my ($self) = @_;
+ my $map_path = $self->map_path;
+ my $partial = (-e $map_path && ! -z $map_path);
+ return unless ::verify_ref($self->refname.'^0');
+ if (!$partial && ($self->use_svm_props || $self->no_metadata)) {
+ my $rev_db = $self->rev_db_path;
+ $self->rebuild_from_rev_db($rev_db);
+ if ($self->use_svm_props) {
+ my $svm_rev_db = $self->rev_db_path($self->svm_uuid);
+ $self->rebuild_from_rev_db($svm_rev_db);
+ }
+ $self->unlink_rev_db_symlink;
+ return;
+ }
+ print "Rebuilding $map_path ...\n" if (!$partial);
+ my ($base_rev, $head) = ($partial ? $self->rev_map_max_norebuild(1) :
+ (undef, undef));
+ my ($log, $ctx) =
+ command_output_pipe(qw/rev-list --pretty=raw --reverse/,
+ ($head ? "$head.." : "") . $self->refname,
+ '--');
+ my $metadata_url = $self->metadata_url;
+ remove_username($metadata_url);
+ my $svn_uuid = $self->rewrite_uuid || $self->ra_uuid;
+ my $c;
+ while (<$log>) {
+ if ( m{^commit ($::sha1)$} ) {
+ $c = $1;
+ next;
+ }
+ next unless s{^\s*(git-svn-id:)}{$1};
+ my ($url, $rev, $uuid) = ::extract_metadata($_);
+ remove_username($url);
+
+ # ignore merges (from set-tree)
+ next if (!defined $rev || !$uuid);
+
+ # if we merged or otherwise started elsewhere, this is
+ # how we break out of it
+ if (($uuid ne $svn_uuid) ||
+ ($metadata_url && $url && ($url ne $metadata_url))) {
+ next;
+ }
+ if ($partial && $head) {
+ print "Partial-rebuilding $map_path ...\n";
+ print "Currently at $base_rev = $head\n";
+ $head = undef;
+ }
+
+ $self->rev_map_set($rev, $c);
+ print "r$rev = $c\n";
+ }
+ command_close_pipe($log, $ctx);
+ print "Done rebuilding $map_path\n" if (!$partial || !$head);
+ my $rev_db_path = $self->rev_db_path;
+ if (-f $self->rev_db_path) {
+ unlink $self->rev_db_path or croak "unlink: $!";
+ }
+ $self->unlink_rev_db_symlink;
+}
+
+# rev_map:
+# Tie::File seems to be prone to offset errors if revisions get sparse,
+# it's not that fast, either. Tie::File is also not in Perl 5.6. So
+# one of my favorite modules is out :< Next up would be one of the DBM
+# modules, but I'm not sure which is most portable...
+#
+# This is the replacement for the rev_db format, which was too big
+# and inefficient for large repositories with a lot of sparse history
+# (mainly tags)
+#
+# The format is this:
+# - 24 bytes for every record,
+# * 4 bytes for the integer representing an SVN revision number
+# * 20 bytes representing the sha1 of a git commit
+# - No empty padding records like the old format
+# (except the last record, which can be overwritten)
+# - new records are written append-only since SVN revision numbers
+# increase monotonically
+# - lookups on SVN revision number are done via a binary search
+# - Piping the file to xxd -c24 is a good way of dumping it for
+# viewing or editing (piped back through xxd -r), should the need
+# ever arise.
+# - The last record can be padding revision with an all-zero sha1
+# This is used to optimize fetch performance when using multiple
+# "fetch" directives in .git/config
+#
+# These files are disposable unless noMetadata or useSvmProps is set
+
+sub _rev_map_set {
+ my ($fh, $rev, $commit) = @_;
+
+ binmode $fh or croak "binmode: $!";
+ my $size = (stat($fh))[7];
+ ($size % 24) == 0 or croak "inconsistent size: $size";
+
+ my $wr_offset = 0;
+ if ($size > 0) {
+ sysseek($fh, -24, SEEK_END) or croak "seek: $!";
+ my $read = sysread($fh, my $buf, 24) or croak "read: $!";
+ $read == 24 or croak "read only $read bytes (!= 24)";
+ my ($last_rev, $last_commit) = unpack(rev_map_fmt, $buf);
+ if ($last_commit eq ('0' x40)) {
+ if ($size >= 48) {
+ sysseek($fh, -48, SEEK_END) or croak "seek: $!";
+ $read = sysread($fh, $buf, 24) or
+ croak "read: $!";
+ $read == 24 or
+ croak "read only $read bytes (!= 24)";
+ ($last_rev, $last_commit) =
+ unpack(rev_map_fmt, $buf);
+ if ($last_commit eq ('0' x40)) {
+ croak "inconsistent .rev_map\n";
+ }
+ }
+ if ($last_rev >= $rev) {
+ croak "last_rev is higher!: $last_rev >= $rev";
+ }
+ $wr_offset = -24;
+ }
+ }
+ sysseek($fh, $wr_offset, SEEK_END) or croak "seek: $!";
+ syswrite($fh, pack(rev_map_fmt, $rev, $commit), 24) == 24 or
+ croak "write: $!";
+}
+
+sub _rev_map_reset {
+ my ($fh, $rev, $commit) = @_;
+ my $c = _rev_map_get($fh, $rev);
+ $c eq $commit or die "_rev_map_reset(@_) commit $c does not match!\n";
+ my $offset = sysseek($fh, 0, SEEK_CUR) or croak "seek: $!";
+ truncate $fh, $offset or croak "truncate: $!";
+}
+
+sub mkfile {
+ my ($path) = @_;
+ unless (-e $path) {
+ my ($dir, $base) = ($path =~ m#^(.*?)/?([^/]+)$#);
+ mkpath([$dir]) unless -d $dir;
+ open my $fh, '>>', $path or die "Couldn't create $path: $!\n";
+ close $fh or die "Couldn't close (create) $path: $!\n";
+ }
+}
+
+sub rev_map_set {
+ my ($self, $rev, $commit, $update_ref, $uuid) = @_;
+ defined $commit or die "missing arg3\n";
+ length $commit == 40 or die "arg3 must be a full SHA1 hexsum\n";
+ my $db = $self->map_path($uuid);
+ my $db_lock = "$db.lock";
+ my $sigmask;
+ $update_ref ||= 0;
+ if ($update_ref) {
+ $sigmask = POSIX::SigSet->new();
+ my $signew = POSIX::SigSet->new(SIGINT, SIGHUP, SIGTERM,
+ SIGALRM, SIGUSR1, SIGUSR2);
+ sigprocmask(SIG_BLOCK, $signew, $sigmask) or
+ croak "Can't block signals: $!";
+ }
+ mkfile($db);
+
+ $LOCKFILES{$db_lock} = 1;
+ my $sync;
+ # both of these options make our .rev_db file very, very important
+ # and we can't afford to lose it because rebuild() won't work
+ if ($self->use_svm_props || $self->no_metadata) {
+ $sync = 1;
+ copy($db, $db_lock) or die "rev_map_set(@_): ",
+ "Failed to copy: ",
+ "$db => $db_lock ($!)\n";
+ } else {
+ rename $db, $db_lock or die "rev_map_set(@_): ",
+ "Failed to rename: ",
+ "$db => $db_lock ($!)\n";
+ }
+
+ sysopen(my $fh, $db_lock, O_RDWR | O_CREAT)
+ or croak "Couldn't open $db_lock: $!\n";
+ if ($update_ref eq 'reset') {
+ clear_memoized_mergeinfo_caches();
+ _rev_map_reset($fh, $rev, $commit);
+ } else {
+ _rev_map_set($fh, $rev, $commit);
+ }
+
+ if ($sync) {
+ $fh->flush or die "Couldn't flush $db_lock: $!\n";
+ $fh->sync or die "Couldn't sync $db_lock: $!\n";
+ }
+ close $fh or croak $!;
+ if ($update_ref) {
+ $_head = $self;
+ my $note = "";
+ $note = " ($update_ref)" if ($update_ref !~ /^\d*$/);
+ command_noisy('update-ref', '-m', "r$rev$note",
+ $self->refname, $commit);
+ }
+ rename $db_lock, $db or die "rev_map_set(@_): ", "Failed to rename: ",
+ "$db_lock => $db ($!)\n";
+ delete $LOCKFILES{$db_lock};
+ if ($update_ref) {
+ sigprocmask(SIG_SETMASK, $sigmask) or
+ croak "Can't restore signal mask: $!";
+ }
+}
+
+# If want_commit, this will return an array of (rev, commit) where
+# commit _must_ be a valid commit in the archive.
+# Otherwise, it'll return the max revision (whether or not the
+# commit is valid or just a 0x40 placeholder).
+sub rev_map_max {
+ my ($self, $want_commit) = @_;
+ $self->rebuild;
+ my ($r, $c) = $self->rev_map_max_norebuild($want_commit);
+ $want_commit ? ($r, $c) : $r;
+}
+
+sub rev_map_max_norebuild {
+ my ($self, $want_commit) = @_;
+ my $map_path = $self->map_path;
+ stat $map_path or return $want_commit ? (0, undef) : 0;
+ sysopen(my $fh, $map_path, O_RDONLY) or croak "open: $!";
+ binmode $fh or croak "binmode: $!";
+ my $size = (stat($fh))[7];
+ ($size % 24) == 0 or croak "inconsistent size: $size";
+
+ if ($size == 0) {
+ close $fh or croak "close: $!";
+ return $want_commit ? (0, undef) : 0;
+ }
+
+ sysseek($fh, -24, SEEK_END) or croak "seek: $!";
+ sysread($fh, my $buf, 24) == 24 or croak "read: $!";
+ my ($r, $c) = unpack(rev_map_fmt, $buf);
+ if ($want_commit && $c eq ('0' x40)) {
+ if ($size < 48) {
+ return $want_commit ? (0, undef) : 0;
+ }
+ sysseek($fh, -48, SEEK_END) or croak "seek: $!";
+ sysread($fh, $buf, 24) == 24 or croak "read: $!";
+ ($r, $c) = unpack(rev_map_fmt, $buf);
+ if ($c eq ('0'x40)) {
+ croak "Penultimate record is all-zeroes in $map_path";
+ }
+ }
+ close $fh or croak "close: $!";
+ $want_commit ? ($r, $c) : $r;
+}
+
+sub rev_map_get {
+ my ($self, $rev, $uuid) = @_;
+ my $map_path = $self->map_path($uuid);
+ return undef unless -e $map_path;
+
+ sysopen(my $fh, $map_path, O_RDONLY) or croak "open: $!";
+ my $c = _rev_map_get($fh, $rev);
+ close($fh) or croak "close: $!";
+ $c
+}
+
+sub _rev_map_get {
+ my ($fh, $rev) = @_;
+
+ binmode $fh or croak "binmode: $!";
+ my $size = (stat($fh))[7];
+ ($size % 24) == 0 or croak "inconsistent size: $size";
+
+ if ($size == 0) {
+ return undef;
+ }
+
+ my ($l, $u) = (0, $size - 24);
+ my ($r, $c, $buf);
+
+ while ($l <= $u) {
+ my $i = int(($l/24 + $u/24) / 2) * 24;
+ sysseek($fh, $i, SEEK_SET) or croak "seek: $!";
+ sysread($fh, my $buf, 24) == 24 or croak "read: $!";
+ my ($r, $c) = unpack(rev_map_fmt, $buf);
+
+ if ($r < $rev) {
+ $l = $i + 24;
+ } elsif ($r > $rev) {
+ $u = $i - 24;
+ } else { # $r == $rev
+ return $c eq ('0' x 40) ? undef : $c;
+ }
+ }
+ undef;
+}
+
+# Finds the first svn revision that exists on (if $eq_ok is true) or
+# before $rev for the current branch. It will not search any lower
+# than $min_rev. Returns the git commit hash and svn revision number
+# if found, else (undef, undef).
+sub find_rev_before {
+ my ($self, $rev, $eq_ok, $min_rev) = @_;
+ --$rev unless $eq_ok;
+ $min_rev ||= 1;
+ my $max_rev = $self->rev_map_max;
+ $rev = $max_rev if ($rev > $max_rev);
+ while ($rev >= $min_rev) {
+ if (my $c = $self->rev_map_get($rev)) {
+ return ($rev, $c);
+ }
+ --$rev;
+ }
+ return (undef, undef);
+}
+
+# Finds the first svn revision that exists on (if $eq_ok is true) or
+# after $rev for the current branch. It will not search any higher
+# than $max_rev. Returns the git commit hash and svn revision number
+# if found, else (undef, undef).
+sub find_rev_after {
+ my ($self, $rev, $eq_ok, $max_rev) = @_;
+ ++$rev unless $eq_ok;
+ $max_rev ||= $self->rev_map_max;
+ while ($rev <= $max_rev) {
+ if (my $c = $self->rev_map_get($rev)) {
+ return ($rev, $c);
+ }
+ ++$rev;
+ }
+ return (undef, undef);
+}
+
+sub _new {
+ my ($class, $repo_id, $ref_id, $path) = @_;
+ unless (defined $repo_id && length $repo_id) {
+ $repo_id = $default_repo_id;
+ }
+ unless (defined $ref_id && length $ref_id) {
+ # Access the prefix option from the git-svn main program if it's loaded.
+ my $prefix = defined &::opt_prefix ? ::opt_prefix() : "";
+ $_[2] = $ref_id =
+ "refs/remotes/$prefix$default_ref_id";
+ }
+ $_[1] = $repo_id;
+ my $dir = "$ENV{GIT_DIR}/svn/$ref_id";
+
+ # Older repos imported by us used $GIT_DIR/svn/foo instead of
+ # $GIT_DIR/svn/refs/remotes/foo when tracking refs/remotes/foo
+ if ($ref_id =~ m{^refs/remotes/(.*)}) {
+ my $old_dir = "$ENV{GIT_DIR}/svn/$1";
+ if (-d $old_dir && ! -d $dir) {
+ $dir = $old_dir;
+ }
+ }
+
+ $_[3] = $path = '' unless (defined $path);
+ mkpath([$dir]);
+ bless {
+ ref_id => $ref_id, dir => $dir, index => "$dir/index",
+ path => $path, config => "$ENV{GIT_DIR}/svn/config",
+ map_root => "$dir/.rev_map", repo_id => $repo_id }, $class;
+}
+
+# for read-only access of old .rev_db formats
+sub unlink_rev_db_symlink {
+ my ($self) = @_;
+ my $link = $self->rev_db_path;
+ $link =~ s/\.[\w-]+$// or croak "missing UUID at the end of $link";
+ if (-l $link) {
+ unlink $link or croak "unlink: $link failed!";
+ }
+}
+
+sub rev_db_path {
+ my ($self, $uuid) = @_;
+ my $db_path = $self->map_path($uuid);
+ $db_path =~ s{/\.rev_map\.}{/\.rev_db\.}
+ or croak "map_path: $db_path does not contain '/.rev_map.' !";
+ $db_path;
+}
+
+# the new replacement for .rev_db
+sub map_path {
+ my ($self, $uuid) = @_;
+ $uuid ||= $self->ra_uuid;
+ "$self->{map_root}.$uuid";
+}
+
+sub uri_encode {
+ my ($f) = @_;
+ $f =~ s#([^a-zA-Z0-9\*!\:_\./\-])#uc sprintf("%%%02x",ord($1))#eg;
+ $f
+}
+
+sub uri_decode {
+ my ($f) = @_;
+ $f =~ s#%([0-9a-fA-F]{2})#chr(hex($1))#eg;
+ $f
+}
+
+sub remove_username {
+ $_[0] =~ s{^([^:]*://)[^@]+@}{$1};
+}
+
+1;
diff --git a/perl/Git/SVN/Fetcher.pm b/perl/Git/SVN/Fetcher.pm
index ef8e9ed..76fae9b 100644
--- a/perl/Git/SVN/Fetcher.pm
+++ b/perl/Git/SVN/Fetcher.pm
@@ -57,6 +57,7 @@ sub new {
$self->{file_prop} = {};
$self->{absent_dir} = {};
$self->{absent_file} = {};
+ require Git::IndexInfo;
$self->{gii} = $git_svn->tmp_index_do(sub { Git::IndexInfo->new });
$self->{pathnameencoding} = Git::config('svn.pathnameencoding');
$self;
diff --git a/perl/Git/SVN/GlobSpec.pm b/perl/Git/SVN/GlobSpec.pm
new file mode 100644
index 0000000..96cfd98
--- /dev/null
+++ b/perl/Git/SVN/GlobSpec.pm
@@ -0,0 +1,59 @@
+package Git::SVN::GlobSpec;
+use strict;
+use warnings;
+
+sub new {
+ my ($class, $glob, $pattern_ok) = @_;
+ my $re = $glob;
+ $re =~ s!/+$!!g; # no need for trailing slashes
+ my (@left, @right, @patterns);
+ my $state = "left";
+ my $die_msg = "Only one set of wildcard directories " .
+ "(e.g. '*' or '*/*/*') is supported: '$glob'\n";
+ for my $part (split(m|/|, $glob)) {
+ if ($part =~ /\*/ && $part ne "*") {
+ die "Invalid pattern in '$glob': $part\n";
+ } elsif ($pattern_ok && $part =~ /[{}]/ &&
+ $part !~ /^\{[^{}]+\}/) {
+ die "Invalid pattern in '$glob': $part\n";
+ }
+ if ($part eq "*") {
+ die $die_msg if $state eq "right";
+ $state = "pattern";
+ push(@patterns, "[^/]*");
+ } elsif ($pattern_ok && $part =~ /^\{(.*)\}$/) {
+ die $die_msg if $state eq "right";
+ $state = "pattern";
+ my $p = quotemeta($1);
+ $p =~ s/\\,/|/g;
+ push(@patterns, "(?:$p)");
+ } else {
+ if ($state eq "left") {
+ push(@left, $part);
+ } else {
+ push(@right, $part);
+ $state = "right";
+ }
+ }
+ }
+ my $depth = @patterns;
+ if ($depth == 0) {
+ die "One '*' is needed in glob: '$glob'\n";
+ }
+ my $left = join('/', @left);
+ my $right = join('/', @right);
+ $re = join('/', @patterns);
+ $re = join('\/',
+ grep(length, quotemeta($left), "($re)", quotemeta($right)));
+ my $left_re = qr/^\/\Q$left\E(\/|$)/;
+ bless { left => $left, right => $right, left_regex => $left_re,
+ regex => qr/$re/, glob => $glob, depth => $depth }, $class;
+}
+
+sub full_path {
+ my ($self, $path) = @_;
+ return (length $self->{left} ? "$self->{left}/" : '') .
+ $path . (length $self->{right} ? "/$self->{right}" : '');
+}
+
+1;
diff --git a/perl/Git/SVN/Log.pm b/perl/Git/SVN/Log.pm
new file mode 100644
index 0000000..3cc1c6f
--- /dev/null
+++ b/perl/Git/SVN/Log.pm
@@ -0,0 +1,395 @@
+package Git::SVN::Log;
+use strict;
+use warnings;
+use Git::SVN::Utils qw(fatal);
+use Git qw(command command_oneline command_output_pipe command_close_pipe);
+use POSIX qw/strftime/;
+use constant commit_log_separator => ('-' x 72) . "\n";
+use vars qw/$TZ $limit $color $pager $non_recursive $verbose $oneline
+ %rusers $show_commit $incremental/;
+
+# Option set in git-svn
+our $_git_format;
+
+sub cmt_showable {
+ my ($c) = @_;
+ return 1 if defined $c->{r};
+
+ # big commit message got truncated by the 16k pretty buffer in rev-list
+ if ($c->{l} && $c->{l}->[-1] eq "...\n" &&
+ $c->{a_raw} =~ /\@([a-f\d\-]+)>$/) {
+ @{$c->{l}} = ();
+ my @log = command(qw/cat-file commit/, $c->{c});
+
+ # shift off the headers
+ shift @log while ($log[0] ne '');
+ shift @log;
+
+ # TODO: make $c->{l} not have a trailing newline in the future
+ @{$c->{l}} = map { "$_\n" } grep !/^git-svn-id: /, @log;
+
+ (undef, $c->{r}, undef) = ::extract_metadata(
+ (grep(/^git-svn-id: /, @log))[-1]);
+ }
+ return defined $c->{r};
+}
+
+sub log_use_color {
+ return $color || Git->repository->get_colorbool('color.diff');
+}
+
+sub git_svn_log_cmd {
+ my ($r_min, $r_max, @args) = @_;
+ my $head = 'HEAD';
+ my (@files, @log_opts);
+ foreach my $x (@args) {
+ if ($x eq '--' || @files) {
+ push @files, $x;
+ } else {
+ if (::verify_ref("$x^0")) {
+ $head = $x;
+ } else {
+ push @log_opts, $x;
+ }
+ }
+ }
+
+ my ($url, $rev, $uuid, $gs) = ::working_head_info($head);
+
+ require Git::SVN;
+ $gs ||= Git::SVN->_new;
+ my @cmd = (qw/log --abbrev-commit --pretty=raw --default/,
+ $gs->refname);
+ push @cmd, '-r' unless $non_recursive;
+ push @cmd, qw/--raw --name-status/ if $verbose;
+ push @cmd, '--color' if log_use_color();
+ push @cmd, @log_opts;
+ if (defined $r_max && $r_max == $r_min) {
+ push @cmd, '--max-count=1';
+ if (my $c = $gs->rev_map_get($r_max)) {
+ push @cmd, $c;
+ }
+ } elsif (defined $r_max) {
+ if ($r_max < $r_min) {
+ ($r_min, $r_max) = ($r_max, $r_min);
+ }
+ my (undef, $c_max) = $gs->find_rev_before($r_max, 1, $r_min);
+ my (undef, $c_min) = $gs->find_rev_after($r_min, 1, $r_max);
+ # If there are no commits in the range, both $c_max and $c_min
+ # will be undefined. If there is at least 1 commit in the
+ # range, both will be defined.
+ return () if !defined $c_min || !defined $c_max;
+ if ($c_min eq $c_max) {
+ push @cmd, '--max-count=1', $c_min;
+ } else {
+ push @cmd, '--boundary', "$c_min..$c_max";
+ }
+ }
+ return (@cmd, @files);
+}
+
+# adapted from pager.c
+sub config_pager {
+ if (! -t *STDOUT) {
+ $ENV{GIT_PAGER_IN_USE} = 'false';
+ $pager = undef;
+ return;
+ }
+ chomp($pager = command_oneline(qw(var GIT_PAGER)));
+ if ($pager eq 'cat') {
+ $pager = undef;
+ }
+ $ENV{GIT_PAGER_IN_USE} = defined($pager);
+}
+
+sub run_pager {
+ return unless defined $pager;
+ pipe my ($rfd, $wfd) or return;
+ defined(my $pid = fork) or fatal "Can't fork: $!";
+ if (!$pid) {
+ open STDOUT, '>&', $wfd or
+ fatal "Can't redirect to stdout: $!";
+ return;
+ }
+ open STDIN, '<&', $rfd or fatal "Can't redirect stdin: $!";
+ $ENV{LESS} ||= 'FRSX';
+ exec $pager or fatal "Can't run pager: $! ($pager)";
+}
+
+sub format_svn_date {
+ my $t = shift || time;
+ require Git::SVN;
+ my $gmoff = Git::SVN::get_tz($t);
+ return strftime("%Y-%m-%d %H:%M:%S $gmoff (%a, %d %b %Y)", localtime($t));
+}
+
+sub parse_git_date {
+ my ($t, $tz) = @_;
+ # Date::Parse isn't in the standard Perl distro :(
+ if ($tz =~ s/^\+//) {
+ $t += tz_to_s_offset($tz);
+ } elsif ($tz =~ s/^\-//) {
+ $t -= tz_to_s_offset($tz);
+ }
+ return $t;
+}
+
+sub set_local_timezone {
+ if (defined $TZ) {
+ $ENV{TZ} = $TZ;
+ } else {
+ delete $ENV{TZ};
+ }
+}
+
+sub tz_to_s_offset {
+ my ($tz) = @_;
+ $tz =~ s/(\d\d)$//;
+ return ($1 * 60) + ($tz * 3600);
+}
+
+sub get_author_info {
+ my ($dest, $author, $t, $tz) = @_;
+ $author =~ s/(?:^\s*|\s*$)//g;
+ $dest->{a_raw} = $author;
+ my $au;
+ if ($::_authors) {
+ $au = $rusers{$author} || undef;
+ }
+ if (!$au) {
+ ($au) = ($author =~ /<([^>]+)\@[^>]+>$/);
+ }
+ $dest->{t} = $t;
+ $dest->{tz} = $tz;
+ $dest->{a} = $au;
+ $dest->{t_utc} = parse_git_date($t, $tz);
+}
+
+sub process_commit {
+ my ($c, $r_min, $r_max, $defer) = @_;
+ if (defined $r_min && defined $r_max) {
+ if ($r_min == $c->{r} && $r_min == $r_max) {
+ show_commit($c);
+ return 0;
+ }
+ return 1 if $r_min == $r_max;
+ if ($r_min < $r_max) {
+ # we need to reverse the print order
+ return 0 if (defined $limit && --$limit < 0);
+ push @$defer, $c;
+ return 1;
+ }
+ if ($r_min != $r_max) {
+ return 1 if ($r_min < $c->{r});
+ return 1 if ($r_max > $c->{r});
+ }
+ }
+ return 0 if (defined $limit && --$limit < 0);
+ show_commit($c);
+ return 1;
+}
+
+my $l_fmt;
+sub show_commit {
+ my $c = shift;
+ if ($oneline) {
+ my $x = "\n";
+ if (my $l = $c->{l}) {
+ while ($l->[0] =~ /^\s*$/) { shift @$l }
+ $x = $l->[0];
+ }
+ $l_fmt ||= 'A' . length($c->{r});
+ print 'r',pack($l_fmt, $c->{r}),' | ';
+ print "$c->{c} | " if $show_commit;
+ print $x;
+ } else {
+ show_commit_normal($c);
+ }
+}
+
+sub show_commit_changed_paths {
+ my ($c) = @_;
+ return unless $c->{changed};
+ print "Changed paths:\n", @{$c->{changed}};
+}
+
+sub show_commit_normal {
+ my ($c) = @_;
+ print commit_log_separator, "r$c->{r} | ";
+ print "$c->{c} | " if $show_commit;
+ print "$c->{a} | ", format_svn_date($c->{t_utc}), ' | ';
+ my $nr_line = 0;
+
+ if (my $l = $c->{l}) {
+ while ($l->[$#$l] eq "\n" && $#$l > 0
+ && $l->[($#$l - 1)] eq "\n") {
+ pop @$l;
+ }
+ $nr_line = scalar @$l;
+ if (!$nr_line) {
+ print "1 line\n\n\n";
+ } else {
+ if ($nr_line == 1) {
+ $nr_line = '1 line';
+ } else {
+ $nr_line .= ' lines';
+ }
+ print $nr_line, "\n";
+ show_commit_changed_paths($c);
+ print "\n";
+ print $_ foreach @$l;
+ }
+ } else {
+ print "1 line\n";
+ show_commit_changed_paths($c);
+ print "\n";
+
+ }
+ foreach my $x (qw/raw stat diff/) {
+ if ($c->{$x}) {
+ print "\n";
+ print $_ foreach @{$c->{$x}}
+ }
+ }
+}
+
+sub cmd_show_log {
+ my (@args) = @_;
+ my ($r_min, $r_max);
+ my $r_last = -1; # prevent dupes
+ set_local_timezone();
+ if (defined $::_revision) {
+ if ($::_revision =~ /^(\d+):(\d+)$/) {
+ ($r_min, $r_max) = ($1, $2);
+ } elsif ($::_revision =~ /^\d+$/) {
+ $r_min = $r_max = $::_revision;
+ } else {
+ fatal "-r$::_revision is not supported, use ",
+ "standard 'git log' arguments instead";
+ }
+ }
+
+ config_pager();
+ @args = git_svn_log_cmd($r_min, $r_max, @args);
+ if (!@args) {
+ print commit_log_separator unless $incremental || $oneline;
+ return;
+ }
+ my $log = command_output_pipe(@args);
+ run_pager();
+ my (@k, $c, $d, $stat);
+ my $esc_color = qr/(?:\033\[(?:(?:\d+;)*\d*)?m)*/;
+ while (<$log>) {
+ if (/^${esc_color}commit (?:- )?($::sha1_short)/o) {
+ my $cmt = $1;
+ if ($c && cmt_showable($c) && $c->{r} != $r_last) {
+ $r_last = $c->{r};
+ process_commit($c, $r_min, $r_max, \@k) or
+ goto out;
+ }
+ $d = undef;
+ $c = { c => $cmt };
+ } elsif (/^${esc_color}author (.+) (\d+) ([\-\+]?\d+)$/o) {
+ get_author_info($c, $1, $2, $3);
+ } elsif (/^${esc_color}(?:tree|parent|committer) /o) {
+ # ignore
+ } elsif (/^${esc_color}:\d{6} \d{6} $::sha1_short/o) {
+ push @{$c->{raw}}, $_;
+ } elsif (/^${esc_color}[ACRMDT]\t/) {
+ # we could add $SVN->{svn_path} here, but that requires
+ # remote access at the moment (repo_path_split)...
+ s#^(${esc_color})([ACRMDT])\t#$1 $2 #o;
+ push @{$c->{changed}}, $_;
+ } elsif (/^${esc_color}diff /o) {
+ $d = 1;
+ push @{$c->{diff}}, $_;
+ } elsif ($d) {
+ push @{$c->{diff}}, $_;
+ } elsif (/^\ .+\ \|\s*\d+\ $esc_color[\+\-]*
+ $esc_color*[\+\-]*$esc_color$/x) {
+ $stat = 1;
+ push @{$c->{stat}}, $_;
+ } elsif ($stat && /^ \d+ files changed, \d+ insertions/) {
+ push @{$c->{stat}}, $_;
+ $stat = undef;
+ } elsif (/^${esc_color} (git-svn-id:.+)$/o) {
+ ($c->{url}, $c->{r}, undef) = ::extract_metadata($1);
+ } elsif (s/^${esc_color} //o) {
+ push @{$c->{l}}, $_;
+ }
+ }
+ if ($c && defined $c->{r} && $c->{r} != $r_last) {
+ $r_last = $c->{r};
+ process_commit($c, $r_min, $r_max, \@k);
+ }
+ if (@k) {
+ ($r_min, $r_max) = ($r_max, $r_min);
+ process_commit($_, $r_min, $r_max) foreach reverse @k;
+ }
+out:
+ close $log;
+ print commit_log_separator unless $incremental || $oneline;
+}
+
+sub cmd_blame {
+ my $path = pop;
+
+ config_pager();
+ run_pager();
+
+ my ($fh, $ctx, $rev);
+
+ if ($_git_format) {
+ ($fh, $ctx) = command_output_pipe('blame', @_, $path);
+ while (my $line = <$fh>) {
+ if ($line =~ /^\^?([[:xdigit:]]+)\s/) {
+ # Uncommitted edits show up as a rev ID of
+ # all zeros, which we can't look up with
+ # cmt_metadata
+ if ($1 !~ /^0+$/) {
+ (undef, $rev, undef) =
+ ::cmt_metadata($1);
+ $rev = '0' if (!$rev);
+ } else {
+ $rev = '0';
+ }
+ $rev = sprintf('%-10s', $rev);
+ $line =~ s/^\^?[[:xdigit:]]+(\s)/$rev$1/;
+ }
+ print $line;
+ }
+ } else {
+ ($fh, $ctx) = command_output_pipe('blame', '-p', @_, 'HEAD',
+ '--', $path);
+ my ($sha1);
+ my %authors;
+ my @buffer;
+ my %dsha; #distinct sha keys
+
+ while (my $line = <$fh>) {
+ push @buffer, $line;
+ if ($line =~ /^([[:xdigit:]]{40})\s\d+\s\d+/) {
+ $dsha{$1} = 1;
+ }
+ }
+
+ my $s2r = ::cmt_sha2rev_batch([keys %dsha]);
+
+ foreach my $line (@buffer) {
+ if ($line =~ /^([[:xdigit:]]{40})\s\d+\s\d+/) {
+ $rev = $s2r->{$1};
+ $rev = '0' if (!$rev)
+ }
+ elsif ($line =~ /^author (.*)/) {
+ $authors{$rev} = $1;
+ $authors{$rev} =~ s/\s/_/g;
+ }
+ elsif ($line =~ /^\t(.*)$/) {
+ printf("%6s %10s %s\n", $rev, $authors{$rev}, $1);
+ }
+ }
+ }
+ command_close_pipe($fh, $ctx);
+}
+
+1;
diff --git a/perl/Git/SVN/Migration.pm b/perl/Git/SVN/Migration.pm
new file mode 100644
index 0000000..75d7429
--- /dev/null
+++ b/perl/Git/SVN/Migration.pm
@@ -0,0 +1,258 @@
+package Git::SVN::Migration;
+# these version numbers do NOT correspond to actual version numbers
+# of git nor git-svn. They are just relative.
+#
+# v0 layout: .git/$id/info/url, refs/heads/$id-HEAD
+#
+# v1 layout: .git/$id/info/url, refs/remotes/$id
+#
+# v2 layout: .git/svn/$id/info/url, refs/remotes/$id
+#
+# v3 layout: .git/svn/$id, refs/remotes/$id
+# - info/url may remain for backwards compatibility
+# - this is what we migrate up to this layout automatically,
+# - this will be used by git svn init on single branches
+# v3.1 layout (auto migrated):
+# - .rev_db => .rev_db.$UUID, .rev_db will remain as a symlink
+# for backwards compatibility
+#
+# v4 layout: .git/svn/$repo_id/$id, refs/remotes/$repo_id/$id
+# - this is only created for newly multi-init-ed
+# repositories. Similar in spirit to the
+# --use-separate-remotes option in git-clone (now default)
+# - we do not automatically migrate to this (following
+# the example set by core git)
+#
+# v5 layout: .rev_db.$UUID => .rev_map.$UUID
+# - newer, more-efficient format that uses 24-bytes per record
+# with no filler space.
+# - use xxd -c24 < .rev_map.$UUID to view and debug
+# - This is a one-way migration, repositories updated to the
+# new format will not be able to use old git-svn without
+# rebuilding the .rev_db. Rebuilding the rev_db is not
+# possible if noMetadata or useSvmProps are set; but should
+# be no problem for users that use the (sensible) defaults.
+use strict;
+use warnings;
+use Carp qw/croak/;
+use File::Path qw/mkpath/;
+use File::Basename qw/dirname basename/;
+
+our $_minimize;
+use Git qw(
+ command
+ command_noisy
+ command_output_pipe
+ command_close_pipe
+);
+
+sub migrate_from_v0 {
+ my $git_dir = $ENV{GIT_DIR};
+ return undef unless -d $git_dir;
+ my ($fh, $ctx) = command_output_pipe(qw/rev-parse --symbolic --all/);
+ my $migrated = 0;
+ while (<$fh>) {
+ chomp;
+ my ($id, $orig_ref) = ($_, $_);
+ next unless $id =~ s#^refs/heads/(.+)-HEAD$#$1#;
+ next unless -f "$git_dir/$id/info/url";
+ my $new_ref = "refs/remotes/$id";
+ if (::verify_ref("$new_ref^0")) {
+ print STDERR "W: $orig_ref is probably an old ",
+ "branch used by an ancient version of ",
+ "git-svn.\n",
+ "However, $new_ref also exists.\n",
+ "We will not be able ",
+ "to use this branch until this ",
+ "ambiguity is resolved.\n";
+ next;
+ }
+ print STDERR "Migrating from v0 layout...\n" if !$migrated;
+ print STDERR "Renaming ref: $orig_ref => $new_ref\n";
+ command_noisy('update-ref', $new_ref, $orig_ref);
+ command_noisy('update-ref', '-d', $orig_ref, $orig_ref);
+ $migrated++;
+ }
+ command_close_pipe($fh, $ctx);
+ print STDERR "Done migrating from v0 layout...\n" if $migrated;
+ $migrated;
+}
+
+sub migrate_from_v1 {
+ my $git_dir = $ENV{GIT_DIR};
+ my $migrated = 0;
+ return $migrated unless -d $git_dir;
+ my $svn_dir = "$git_dir/svn";
+
+ # just in case somebody used 'svn' as their $id at some point...
+ return $migrated if -d $svn_dir && ! -f "$svn_dir/info/url";
+
+ print STDERR "Migrating from a git-svn v1 layout...\n";
+ mkpath([$svn_dir]);
+ print STDERR "Data from a previous version of git-svn exists, but\n\t",
+ "$svn_dir\n\t(required for this version ",
+ "($::VERSION) of git-svn) does not exist.\n";
+ my ($fh, $ctx) = command_output_pipe(qw/rev-parse --symbolic --all/);
+ while (<$fh>) {
+ my $x = $_;
+ next unless $x =~ s#^refs/remotes/##;
+ chomp $x;
+ next unless -f "$git_dir/$x/info/url";
+ my $u = eval { ::file_to_s("$git_dir/$x/info/url") };
+ next unless $u;
+ my $dn = dirname("$git_dir/svn/$x");
+ mkpath([$dn]) unless -d $dn;
+ if ($x eq 'svn') { # they used 'svn' as GIT_SVN_ID:
+ mkpath(["$git_dir/svn/svn"]);
+ print STDERR " - $git_dir/$x/info => ",
+ "$git_dir/svn/$x/info\n";
+ rename "$git_dir/$x/info", "$git_dir/svn/$x/info" or
+ croak "$!: $x";
+ # don't worry too much about these, they probably
+ # don't exist with repos this old (save for index,
+ # and we can easily regenerate that)
+ foreach my $f (qw/unhandled.log index .rev_db/) {
+ rename "$git_dir/$x/$f", "$git_dir/svn/$x/$f";
+ }
+ } else {
+ print STDERR " - $git_dir/$x => $git_dir/svn/$x\n";
+ rename "$git_dir/$x", "$git_dir/svn/$x" or
+ croak "$!: $x";
+ }
+ $migrated++;
+ }
+ command_close_pipe($fh, $ctx);
+ print STDERR "Done migrating from a git-svn v1 layout\n";
+ $migrated;
+}
+
+sub read_old_urls {
+ my ($l_map, $pfx, $path) = @_;
+ my @dir;
+ foreach (<$path/*>) {
+ if (-r "$_/info/url") {
+ $pfx .= '/' if $pfx && $pfx !~ m!/$!;
+ my $ref_id = $pfx . basename $_;
+ my $url = ::file_to_s("$_/info/url");
+ $l_map->{$ref_id} = $url;
+ } elsif (-d $_) {
+ push @dir, $_;
+ }
+ }
+ foreach (@dir) {
+ my $x = $_;
+ $x =~ s!^\Q$ENV{GIT_DIR}\E/svn/!!o;
+ read_old_urls($l_map, $x, $_);
+ }
+}
+
+sub migrate_from_v2 {
+ my @cfg = command(qw/config -l/);
+ return if grep /^svn-remote\..+\.url=/, @cfg;
+ my %l_map;
+ read_old_urls(\%l_map, '', "$ENV{GIT_DIR}/svn");
+ my $migrated = 0;
+
+ require Git::SVN;
+ foreach my $ref_id (sort keys %l_map) {
+ eval { Git::SVN->init($l_map{$ref_id}, '', undef, $ref_id) };
+ if ($@) {
+ Git::SVN->init($l_map{$ref_id}, '', $ref_id, $ref_id);
+ }
+ $migrated++;
+ }
+ $migrated;
+}
+
+sub minimize_connections {
+ require Git::SVN;
+ require Git::SVN::Ra;
+
+ my $r = Git::SVN::read_all_remotes();
+ my $new_urls = {};
+ my $root_repos = {};
+ foreach my $repo_id (keys %$r) {
+ my $url = $r->{$repo_id}->{url} or next;
+ my $fetch = $r->{$repo_id}->{fetch} or next;
+ my $ra = Git::SVN::Ra->new($url);
+
+ # skip existing cases where we already connect to the root
+ if (($ra->{url} eq $ra->{repos_root}) ||
+ ($ra->{repos_root} eq $repo_id)) {
+ $root_repos->{$ra->{url}} = $repo_id;
+ next;
+ }
+
+ my $root_ra = Git::SVN::Ra->new($ra->{repos_root});
+ my $root_path = $ra->{url};
+ $root_path =~ s#^\Q$ra->{repos_root}\E(/|$)##;
+ foreach my $path (keys %$fetch) {
+ my $ref_id = $fetch->{$path};
+ my $gs = Git::SVN->new($ref_id, $repo_id, $path);
+
+ # make sure we can read when connecting to
+ # a higher level of a repository
+ my ($last_rev, undef) = $gs->last_rev_commit;
+ if (!defined $last_rev) {
+ $last_rev = eval {
+ $root_ra->get_latest_revnum;
+ };
+ next if $@;
+ }
+ my $new = $root_path;
+ $new .= length $path ? "/$path" : '';
+ eval {
+ $root_ra->get_log([$new], $last_rev, $last_rev,
+ 0, 0, 1, sub { });
+ };
+ next if $@;
+ $new_urls->{$ra->{repos_root}}->{$new} =
+ { ref_id => $ref_id,
+ old_repo_id => $repo_id,
+ old_path => $path };
+ }
+ }
+
+ my @emptied;
+ foreach my $url (keys %$new_urls) {
+ # see if we can re-use an existing [svn-remote "repo_id"]
+ # instead of creating a(n ugly) new section:
+ my $repo_id = $root_repos->{$url} || $url;
+
+ my $fetch = $new_urls->{$url};
+ foreach my $path (keys %$fetch) {
+ my $x = $fetch->{$path};
+ Git::SVN->init($url, $path, $repo_id, $x->{ref_id});
+ my $pfx = "svn-remote.$x->{old_repo_id}";
+
+ my $old_fetch = quotemeta("$x->{old_path}:".
+ "$x->{ref_id}");
+ command_noisy(qw/config --unset/,
+ "$pfx.fetch", '^'. $old_fetch . '$');
+ delete $r->{$x->{old_repo_id}}->
+ {fetch}->{$x->{old_path}};
+ if (!keys %{$r->{$x->{old_repo_id}}->{fetch}}) {
+ command_noisy(qw/config --unset/,
+ "$pfx.url");
+ push @emptied, $x->{old_repo_id}
+ }
+ }
+ }
+ if (@emptied) {
+ my $file = $ENV{GIT_CONFIG} || "$ENV{GIT_DIR}/config";
+ print STDERR <<EOF;
+The following [svn-remote] sections in your config file ($file) are empty
+and can be safely removed:
+EOF
+ print STDERR "[svn-remote \"$_\"]\n" foreach @emptied;
+ }
+}
+
+sub migration_check {
+ migrate_from_v0();
+ migrate_from_v1();
+ migrate_from_v2();
+ minimize_connections() if $_minimize;
+}
+
+1;
diff --git a/perl/Git/SVN/Utils.pm b/perl/Git/SVN/Utils.pm
new file mode 100644
index 0000000..496006b
--- /dev/null
+++ b/perl/Git/SVN/Utils.pm
@@ -0,0 +1,59 @@
+package Git::SVN::Utils;
+
+use strict;
+use warnings;
+
+use base qw(Exporter);
+
+our @EXPORT_OK = qw(fatal can_compress);
+
+
+=head1 NAME
+
+Git::SVN::Utils - utility functions used across Git::SVN
+
+=head1 SYNOPSIS
+
+ use Git::SVN::Utils qw(functions to import);
+
+=head1 DESCRIPTION
+
+This module contains functions which are useful across many different
+parts of Git::SVN. Mostly it's a place to put utility functions
+rather than duplicate the code or have classes grabbing at other
+classes.
+
+=head1 FUNCTIONS
+
+All functions can be imported only on request.
+
+=head3 fatal
+
+ fatal(@message);
+
+Display a message and exit with a fatal error code.
+
+=cut
+
+# Note: not certain why this is in use instead of die. Probably because
+# the exit code of die is 255? Doesn't appear to be used consistently.
+sub fatal (@) { print STDERR "@_\n"; exit 1 }
+
+
+=head3 can_compress
+
+ my $can_compress = can_compress;
+
+Returns true if Compress::Zlib is available, false otherwise.
+
+=cut
+
+my $can_compress;
+sub can_compress {
+ return $can_compress if defined $can_compress;
+
+ return $can_compress = eval { require Compress::Zlib; };
+}
+
+
+1;
diff --git a/perl/Makefile b/perl/Makefile
index fe7a486..15d96fc 100644
--- a/perl/Makefile
+++ b/perl/Makefile
@@ -20,36 +20,57 @@ clean:
$(RM) ppport.h
$(RM) $(makfile)
$(RM) $(makfile).old
+ $(RM) PM.stamp
+
+$(makfile): PM.stamp
ifdef NO_PERL_MAKEMAKER
instdir_SQ = $(subst ','\'',$(prefix)/lib)
modules += Git
modules += Git/I18N
+modules += Git/IndexInfo
+modules += Git/SVN
modules += Git/SVN/Memoize/YAML
modules += Git/SVN/Fetcher
modules += Git/SVN/Editor
+modules += Git/SVN/GlobSpec
+modules += Git/SVN/Log
+modules += Git/SVN/Migration
modules += Git/SVN/Prompt
modules += Git/SVN/Ra
+modules += Git/SVN/Utils
$(makfile): ../GIT-CFLAGS Makefile
echo all: private-Error.pm Git.pm Git/I18N.pm > $@
- echo ' mkdir -p blib/lib/Git/SVN/Memoize' >> $@
set -e; \
for i in $(modules); \
do \
+ if test $$i = $${i%/*}; \
+ then \
+ subdir=; \
+ else \
+ subdir=/$${i%/*}; \
+ fi; \
echo ' $(RM) blib/lib/'$$i'.pm' >> $@; \
+ echo ' mkdir -p blib/lib'$$subdir >> $@; \
echo ' cp '$$i'.pm blib/lib/'$$i'.pm' >> $@; \
done
echo ' $(RM) blib/lib/Error.pm' >> $@
'$(PERL_PATH_SQ)' -MError -e 'exit($$Error::VERSION < 0.15009)' || \
echo ' cp private-Error.pm blib/lib/Error.pm' >> $@
echo install: >> $@
- echo ' mkdir -p "$$(DESTDIR)$(instdir_SQ)/Git/SVN/Memoize"' >> $@
set -e; \
for i in $(modules); \
do \
+ if test $$i = $${i%/*}; \
+ then \
+ subdir=; \
+ else \
+ subdir=/$${i%/*}; \
+ fi; \
echo ' $(RM) "$$(DESTDIR)$(instdir_SQ)/'$$i'.pm"' >> $@; \
+ echo ' mkdir -p "$$(DESTDIR)$(instdir_SQ)'$$subdir'"' >> $@; \
echo ' cp '$$i'.pm "$$(DESTDIR)$(instdir_SQ)/'$$i'.pm"' >> $@; \
done
echo ' $(RM) "$$(DESTDIR)$(instdir_SQ)/Error.pm"' >> $@
diff --git a/perl/Makefile.PL b/perl/Makefile.PL
index b54b04a..3f29ba9 100644
--- a/perl/Makefile.PL
+++ b/perl/Makefile.PL
@@ -2,11 +2,16 @@ use strict;
use warnings;
use ExtUtils::MakeMaker;
use Getopt::Long;
+use File::Find;
+
+# Don't forget to update the perl/Makefile, too.
+# Don't forget to test with NO_PERL_MAKEMAKER=YesPlease
# Sanity: die at first unknown option
Getopt::Long::Configure qw/ pass_through /;
-GetOptions("localedir=s" => \my $localedir);
+my $localedir = '';
+GetOptions("localedir=s" => \$localedir);
sub MY::postamble {
return <<'MAKE_FRAG';
@@ -24,24 +29,22 @@ endif
MAKE_FRAG
}
-# XXX. When editing this list:
-#
-# * Please update perl/Makefile, too.
-# * Don't forget to test with NO_PERL_MAKEMAKER=YesPlease
-my %pm = (
- 'Git.pm' => '$(INST_LIBDIR)/Git.pm',
- 'Git/I18N.pm' => '$(INST_LIBDIR)/Git/I18N.pm',
- 'Git/SVN/Memoize/YAML.pm' => '$(INST_LIBDIR)/Git/SVN/Memoize/YAML.pm',
- 'Git/SVN/Fetcher.pm' => '$(INST_LIBDIR)/Git/SVN/Fetcher.pm',
- 'Git/SVN/Editor.pm' => '$(INST_LIBDIR)/Git/SVN/Editor.pm',
- 'Git/SVN/Prompt.pm' => '$(INST_LIBDIR)/Git/SVN/Prompt.pm',
- 'Git/SVN/Ra.pm' => '$(INST_LIBDIR)/Git/SVN/Ra.pm',
-);
+# Find all the .pm files in "Git/" and Git.pm
+my %pm;
+find sub {
+ return unless /\.pm$/;
+
+ # sometimes File::Find prepends a ./ Strip it.
+ my $pm_path = $File::Find::name;
+ $pm_path =~ s{^\./}{};
+
+ $pm{$pm_path} = '$(INST_LIBDIR)/'.$pm_path;
+}, "Git", "Git.pm";
+
# We come with our own bundled Error.pm. It's not in the set of default
# Perl modules so install it if it's not available on the system yet.
-eval { require Error };
-if ($@ || $Error::VERSION < 0.15009) {
+if ( !eval { require Error } || $Error::VERSION < 0.15009) {
$pm{'private-Error.pm'} = '$(INST_LIBDIR)/Error.pm';
}
diff --git a/pkt-line.c b/pkt-line.c
index 5a04984..eaba15f 100644
--- a/pkt-line.c
+++ b/pkt-line.c
@@ -135,13 +135,19 @@ void packet_buf_write(struct strbuf *buf, const char *fmt, ...)
strbuf_add(buf, buffer, n);
}
-static void safe_read(int fd, void *buffer, unsigned size)
+static int safe_read(int fd, void *buffer, unsigned size, int return_line_fail)
{
ssize_t ret = read_in_full(fd, buffer, size);
if (ret < 0)
die_errno("read error");
- else if (ret < size)
+ else if (ret < size) {
+ if (return_line_fail)
+ return -1;
+
die("The remote end hung up unexpectedly");
+ }
+
+ return ret;
}
static int packet_length(const char *linelen)
@@ -169,12 +175,14 @@ static int packet_length(const char *linelen)
return len;
}
-int packet_read_line(int fd, char *buffer, unsigned size)
+static int packet_read_internal(int fd, char *buffer, unsigned size, int return_line_fail)
{
- int len;
+ int len, ret;
char linelen[4];
- safe_read(fd, linelen, 4);
+ ret = safe_read(fd, linelen, 4, return_line_fail);
+ if (return_line_fail && ret < 0)
+ return ret;
len = packet_length(linelen);
if (len < 0)
die("protocol error: bad line length character: %.4s", linelen);
@@ -185,12 +193,24 @@ int packet_read_line(int fd, char *buffer, unsigned size)
len -= 4;
if (len >= size)
die("protocol error: bad line length %d", len);
- safe_read(fd, buffer, len);
+ ret = safe_read(fd, buffer, len, return_line_fail);
+ if (return_line_fail && ret < 0)
+ return ret;
buffer[len] = 0;
packet_trace(buffer, len, 0);
return len;
}
+int packet_read(int fd, char *buffer, unsigned size)
+{
+ return packet_read_internal(fd, buffer, size, 1);
+}
+
+int packet_read_line(int fd, char *buffer, unsigned size)
+{
+ return packet_read_internal(fd, buffer, size, 0);
+}
+
int packet_get_line(struct strbuf *out,
char **src_buf, size_t *src_len)
{
diff --git a/pkt-line.h b/pkt-line.h
index 1e5dcfe..8cfeb0c 100644
--- a/pkt-line.h
+++ b/pkt-line.h
@@ -13,6 +13,7 @@ void packet_buf_flush(struct strbuf *buf);
void packet_buf_write(struct strbuf *buf, const char *fmt, ...) __attribute__((format (printf, 2, 3)));
int packet_read_line(int fd, char *buffer, unsigned size);
+int packet_read(int fd, char *buffer, unsigned size);
int packet_get_line(struct strbuf *out, char **src_buf, size_t *src_len);
ssize_t safe_write(int, const void *, ssize_t);
diff --git a/po/de.po b/po/de.po
index 70d8418..2739bc0 100644
--- a/po/de.po
+++ b/po/de.po
@@ -5,9 +5,9 @@
#
msgid ""
msgstr ""
-"Project-Id-Version: git 1.7.11\n"
+"Project-Id-Version: git 1.7.12\n"
"Report-Msgid-Bugs-To: Git Mailing List <git@vger.kernel.org>\n"
-"POT-Creation-Date: 2012-06-08 10:20+0800\n"
+"POT-Creation-Date: 2012-08-06 23:47+0800\n"
"PO-Revision-Date: 2012-03-28 18:46+0200\n"
"Last-Translator: Ralf Thielow <ralf.thielow@googlemail.com>\n"
"Language-Team: German\n"
@@ -48,7 +48,7 @@ msgstr "'%s' sieht nicht wie eine v2 Paketdatei aus"
msgid "unrecognized header: %s%s (%d)"
msgstr "nicht erkannter Kopfbereich: %s%s (%d)"
-#: bundle.c:89 builtin/commit.c:696
+#: bundle.c:89 builtin/commit.c:699
#, c-format
msgid "could not open '%s'"
msgstr "Konnte '%s' nicht öffnen"
@@ -57,8 +57,8 @@ msgstr "Konnte '%s' nicht öffnen"
msgid "Repository lacks these prerequisite commits:"
msgstr "Dem Projektarchiv fehlen folgende vorrausgesetzte Versionen:"
-#: bundle.c:164 sequencer.c:550 sequencer.c:982 builtin/log.c:289
-#: builtin/log.c:720 builtin/log.c:1309 builtin/log.c:1528 builtin/merge.c:347
+#: bundle.c:164 sequencer.c:550 sequencer.c:982 builtin/log.c:290
+#: builtin/log.c:726 builtin/log.c:1316 builtin/log.c:1535 builtin/merge.c:347
#: builtin/shortlog.c:181
msgid "revision walk setup failed"
msgstr "Einrichtung des Revisionsgangs fehlgeschlagen"
@@ -71,44 +71,48 @@ msgstr[0] "Das Paket enthält %d Referenz"
msgstr[1] "Das Paket enthält %d Referenzen"
#: bundle.c:192
+msgid "The bundle records a complete history."
+msgstr "Das Paket speichert eine komplette Historie."
+
+#: bundle.c:195
#, c-format
msgid "The bundle requires this ref"
msgid_plural "The bundle requires these %d refs"
msgstr[0] "Das Paket benötigt diese Referenz"
msgstr[1] "Das Paket benötigt diese %d Referenzen"
-#: bundle.c:290
+#: bundle.c:294
msgid "rev-list died"
msgstr "\"rev-list\" abgebrochen"
-#: bundle.c:296 builtin/log.c:1205 builtin/shortlog.c:284
+#: bundle.c:300 builtin/log.c:1212 builtin/shortlog.c:284
#, c-format
msgid "unrecognized argument: %s"
msgstr "nicht erkanntes Argument: %s"
-#: bundle.c:331
+#: bundle.c:335
#, c-format
msgid "ref '%s' is excluded by the rev-list options"
msgstr "Referenz '%s' wird durch \"rev-list\" Optionen ausgeschlossen"
-#: bundle.c:376
+#: bundle.c:380
msgid "Refusing to create empty bundle."
msgstr "Erstellung eines leeren Pakets zurückgewiesen."
-#: bundle.c:394
+#: bundle.c:398
msgid "Could not spawn pack-objects"
msgstr "Konnte Paketobjekte nicht erstellen"
-#: bundle.c:412
+#: bundle.c:416
msgid "pack-objects died"
msgstr "Erstellung der Paketobjekte abgebrochen"
-#: bundle.c:415
+#: bundle.c:419
#, c-format
msgid "cannot create '%s'"
msgstr "kann '%s' nicht erstellen"
-#: bundle.c:437
+#: bundle.c:441
msgid "index-pack died"
msgstr "Erstellung der Paketindexdatei abgebrochen"
@@ -228,8 +232,8 @@ msgstr ""
"%s"
#: diff.c:1400
-msgid " 0 files changed\n"
-msgstr " 0 Dateien geändert\n"
+msgid " 0 files changed"
+msgstr " 0 Dateien geändert"
#: diff.c:1404
#, c-format
@@ -252,7 +256,7 @@ msgid_plural ", %d deletions(-)"
msgstr[0] ", %d Zeile entfernt(-)"
msgstr[1] ", %d Zeilen entfernt(-)"
-#: diff.c:3478
+#: diff.c:3461
#, c-format
msgid ""
"Failed to parse --dirstat/-X option parameter:\n"
@@ -288,16 +292,16 @@ msgstr "'%s': %s"
msgid "'%s': short read %s"
msgstr "'%s': read() zu kurz %s"
-#: help.c:207
+#: help.c:212
#, c-format
msgid "available git commands in '%s'"
msgstr "Vorhandene Git-Kommandos in '%s'"
-#: help.c:214
+#: help.c:219
msgid "git commands available from elsewhere on your $PATH"
msgstr "Vorhandene Git-Kommandos irgendwo in deinem $PATH"
-#: help.c:270
+#: help.c:275
#, c-format
msgid ""
"'%s' appears to be a git command, but we were not\n"
@@ -306,11 +310,11 @@ msgstr ""
"'%s' scheint ein git-Kommando zu sein, konnte aber\n"
"nicht ausgeführt werden. Vielleicht ist git-%s fehlerhaft?"
-#: help.c:327
+#: help.c:332
msgid "Uh oh. Your system reports no Git commands at all."
msgstr "Uh oh. Keine Git-Kommandos auf deinem System vorhanden."
-#: help.c:349
+#: help.c:354
#, c-format
msgid ""
"WARNING: You called a Git command named '%s', which does not exist.\n"
@@ -319,17 +323,17 @@ msgstr ""
"Warnung: Du hast das nicht existierende Git-Kommando '%s' ausgeführt.\n"
"Setze fort unter der Annahme das du '%s' gemeint hast"
-#: help.c:354
+#: help.c:359
#, c-format
msgid "in %0.1f seconds automatically..."
msgstr "automatisch in %0.1f Sekunden..."
-#: help.c:361
+#: help.c:366
#, c-format
msgid "git: '%s' is not a git command. See 'git --help'."
msgstr "git: '%s' ist kein Git-Kommando. Siehe 'git --help'."
-#: help.c:365
+#: help.c:370
msgid ""
"\n"
"Did you mean this?"
@@ -343,35 +347,301 @@ msgstr[1] ""
"\n"
"Hast du eines von diesen gemeint?"
-#: parse-options.c:493
+#: merge-recursive.c:190
+#, c-format
+msgid "(bad commit)\n"
+msgstr "(ungültige Version)\n"
+
+#: merge-recursive.c:206
+#, c-format
+msgid "addinfo_cache failed for path '%s'"
+msgstr "addinfo_cache für Pfad '%s' fehlgeschlagen"
+
+#: merge-recursive.c:268
+msgid "error building trees"
+msgstr "Fehler beim Erstellen der Bäume"
+
+#: merge-recursive.c:497
+msgid "diff setup failed"
+msgstr "diff_setup_done fehlgeschlagen"
+
+#: merge-recursive.c:627
+msgid "merge-recursive: disk full?"
+msgstr "merge-recursive: Festplatte voll?"
+
+#: merge-recursive.c:690
+#, c-format
+msgid "failed to create path '%s'%s"
+msgstr "Fehler beim Erstellen des Pfades '%s'%s"
+
+#: merge-recursive.c:701
+#, c-format
+msgid "Removing %s to make room for subdirectory\n"
+msgstr "Entferne %s um Platz für Unterverzeichnis zu schaffen\n"
+
+#. something else exists
+#. .. but not some other error (who really cares what?)
+#: merge-recursive.c:715 merge-recursive.c:736
+msgid ": perhaps a D/F conflict?"
+msgstr ": vielleicht ein Verzeichnis/Datei-Konflikt?"
+
+#: merge-recursive.c:726
+#, c-format
+msgid "refusing to lose untracked file at '%s'"
+msgstr "verweigere, da unbeobachtete Dateien in '%s' verloren gehen würden"
+
+#: merge-recursive.c:766
+#, c-format
+msgid "cannot read object %s '%s'"
+msgstr "kann Objekt %s '%s' nicht lesen"
+
+#: merge-recursive.c:768
+#, c-format
+msgid "blob expected for %s '%s'"
+msgstr "Blob erwartet für %s '%s'"
+
+#: merge-recursive.c:791 builtin/clone.c:302
+#, c-format
+msgid "failed to open '%s'"
+msgstr "Fehler beim Öffnen von '%s'"
+
+#: merge-recursive.c:799
+#, c-format
+msgid "failed to symlink '%s'"
+msgstr "Fehler beim Erstellen einer symbolischen Verknüpfung für '%s'"
+
+#: merge-recursive.c:802
+#, c-format
+msgid "do not know what to do with %06o %s '%s'"
+msgstr "weiß nicht was mit %06o %s '%s' zu machen ist"
+
+#: merge-recursive.c:939
+msgid "Failed to execute internal merge"
+msgstr "Fehler bei Ausführung der internen Zusammenführung"
+
+#: merge-recursive.c:943
+#, c-format
+msgid "Unable to add %s to database"
+msgstr "Konnte %s nicht zur Datenbank hinzufügen"
+
+#: merge-recursive.c:959
+msgid "unsupported object type in the tree"
+msgstr "nicht unterstützter Objekttyp im Baum"
+
+#: merge-recursive.c:1038 merge-recursive.c:1052
+#, c-format
+msgid ""
+"CONFLICT (%s/delete): %s deleted in %s and %s in %s. Version %s of %s left "
+"in tree."
+msgstr ""
+"KONFLIKT (%s/löschen): %s gelöscht in %s und %s in %s. Stand %s von %s wurde "
+"im Arbeitsbereich gelassen."
+
+#: merge-recursive.c:1044 merge-recursive.c:1057
+#, c-format
+msgid ""
+"CONFLICT (%s/delete): %s deleted in %s and %s in %s. Version %s of %s left "
+"in tree at %s."
+msgstr ""
+"KONFLIKT (%s/löschen): %s gelöscht in %s und %s in %s. Stand %s von %s wurde "
+"im Arbeitsbereich bei %s gelassen."
+
+#: merge-recursive.c:1098
+msgid "rename"
+msgstr "umbenennen"
+
+#: merge-recursive.c:1098
+msgid "renamed"
+msgstr "umbenannt"
+
+#: merge-recursive.c:1154
+#, c-format
+msgid "%s is a directory in %s adding as %s instead"
+msgstr "%s ist ein Verzeichnis in %s, füge es stattdessen als %s hinzu"
+
+#: merge-recursive.c:1176
+#, c-format
+msgid ""
+"CONFLICT (rename/rename): Rename \"%s\"->\"%s\" in branch \"%s\" rename \"%s"
+"\"->\"%s\" in \"%s\"%s"
+msgstr ""
+"KONFLIKT (umbenennen/umbenennen): Benenne um \"%s\"->\"%s\" in Zweig \"%s\" "
+"und \"%s\"->\"%s\" in Zweig \"%s\"%s"
+
+#: merge-recursive.c:1181
+msgid " (left unresolved)"
+msgstr " (bleibt unaufgelöst)"
+
+#: merge-recursive.c:1235
+#, c-format
+msgid "CONFLICT (rename/rename): Rename %s->%s in %s. Rename %s->%s in %s"
+msgstr ""
+"KONFLIKT (umbenennen/umbenennen): Benenne um %s->%s in %s. Benenne um %s->%s "
+"in %s"
+
+#: merge-recursive.c:1265
+#, c-format
+msgid "Renaming %s to %s and %s to %s instead"
+msgstr "Benenne stattdessen %s nach %s und %s nach %s um"
+
+#: merge-recursive.c:1464
+#, c-format
+msgid "CONFLICT (rename/add): Rename %s->%s in %s. %s added in %s"
+msgstr ""
+"KONFLIKT (umbenennen/hinzufügen): Benenne um %s->%s in %s. %s hinzugefügt in "
+"%s"
+
+#: merge-recursive.c:1474
+#, c-format
+msgid "Adding merged %s"
+msgstr "Füge zusammengeführte Datei %s hinzu"
+
+#: merge-recursive.c:1479 merge-recursive.c:1677
+#, c-format
+msgid "Adding as %s instead"
+msgstr "Füge stattdessen als %s hinzu"
+
+#: merge-recursive.c:1530
+#, c-format
+msgid "cannot read object %s"
+msgstr "kann Objekt %s nicht lesen"
+
+#: merge-recursive.c:1533
+#, c-format
+msgid "object %s is not a blob"
+msgstr "Objekt %s ist kein Blob"
+
+#: merge-recursive.c:1581
+msgid "modify"
+msgstr "ändern"
+
+#: merge-recursive.c:1581
+msgid "modified"
+msgstr "geändert"
+
+#: merge-recursive.c:1591
+msgid "content"
+msgstr "Inhalt"
+
+#: merge-recursive.c:1598
+msgid "add/add"
+msgstr "hinzufügen/hinzufügen"
+
+#: merge-recursive.c:1632
+#, c-format
+msgid "Skipped %s (merged same as existing)"
+msgstr "%s ausgelassen (Ergebnis der Zusammenführung existiert bereits)"
+
+#: merge-recursive.c:1646
+#, c-format
+msgid "Auto-merging %s"
+msgstr "automatische Zusammenführung von %s"
+
+#: merge-recursive.c:1650 git-submodule.sh:844
+msgid "submodule"
+msgstr "Unterprojekt"
+
+#: merge-recursive.c:1651
+#, c-format
+msgid "CONFLICT (%s): Merge conflict in %s"
+msgstr "KONFLIKT (%s): Zusammenführungskonflikt in %s"
+
+#: merge-recursive.c:1741
+#, c-format
+msgid "Removing %s"
+msgstr "Entferne %s"
+
+#: merge-recursive.c:1766
+msgid "file/directory"
+msgstr "Datei/Verzeichnis"
+
+#: merge-recursive.c:1772
+msgid "directory/file"
+msgstr "Verzeichnis/Datei"
+
+#: merge-recursive.c:1777
+#, c-format
+msgid "CONFLICT (%s): There is a directory with name %s in %s. Adding %s as %s"
+msgstr ""
+"KONFLIKT (%s): Es existiert bereits ein Verzeichnis %s in %s. Füge %s als %s "
+"hinzu."
+
+#: merge-recursive.c:1787
+#, c-format
+msgid "Adding %s"
+msgstr "Füge %s hinzu"
+
+#: merge-recursive.c:1804
+msgid "Fatal merge failure, shouldn't happen."
+msgstr "Fataler Fehler bei der Zusammenführung. Sollte nicht passieren."
+
+#: merge-recursive.c:1823
+msgid "Already up-to-date!"
+msgstr "Bereits aktuell!"
+
+#: merge-recursive.c:1832
+#, c-format
+msgid "merging of trees %s and %s failed"
+msgstr "Zusammenführen der Bäume %s und %s fehlgeschlagen"
+
+#: merge-recursive.c:1862
+#, c-format
+msgid "Unprocessed path??? %s"
+msgstr "unverarbeiteter Pfad??? %s"
+
+#: merge-recursive.c:1907
+msgid "Merging:"
+msgstr "Zusammenführung:"
+
+#: merge-recursive.c:1920
+#, c-format
+msgid "found %u common ancestor:"
+msgid_plural "found %u common ancestors:"
+msgstr[0] "%u gemeinsamen Vorfahren gefunden"
+msgstr[1] "%u gemeinsame Vorfahren gefunden"
+
+#: merge-recursive.c:1957
+msgid "merge returned no commit"
+msgstr "Zusammenführung hat keine Version zurückgegeben"
+
+#: merge-recursive.c:2014
+#, c-format
+msgid "Could not parse object '%s'"
+msgstr "Konnte Objekt '%s' nicht parsen."
+
+#: merge-recursive.c:2026 builtin/merge.c:697
+msgid "Unable to write index."
+msgstr "Konnte Bereitstellung nicht schreiben."
+
+#: parse-options.c:494
msgid "..."
msgstr "..."
-#: parse-options.c:511
+#: parse-options.c:512
#, c-format
msgid "usage: %s"
msgstr "Verwendung: %s"
#. TRANSLATORS: the colon here should align with the
#. one in "usage: %s" translation
-#: parse-options.c:515
+#: parse-options.c:516
#, c-format
msgid " or: %s"
msgstr " oder: %s"
-#: parse-options.c:518
+#: parse-options.c:519
#, c-format
msgid " %s"
msgstr " %s"
-#: remote.c:1629
+#: remote.c:1632
#, c-format
msgid "Your branch is ahead of '%s' by %d commit.\n"
msgid_plural "Your branch is ahead of '%s' by %d commits.\n"
msgstr[0] "Dein Zweig ist vor '%s' um %d Version.\n"
msgstr[1] "Dein Zweig ist vor '%s' um %d Versionen.\n"
-#: remote.c:1635
+#: remote.c:1638
#, c-format
msgid "Your branch is behind '%s' by %d commit, and can be fast-forwarded.\n"
msgid_plural ""
@@ -382,7 +652,7 @@ msgstr[1] ""
"Dein Zweig ist zu '%s' um %d Versionen hinterher, und kann vorgespult "
"werden.\n"
-#: remote.c:1643
+#: remote.c:1646
#, c-format
msgid ""
"Your branch and '%s' have diverged,\n"
@@ -610,7 +880,7 @@ msgstr "kann Zweigspitze (HEAD) nicht auflösen"
msgid "cannot abort from a branch yet to be born"
msgstr "kann nicht abbrechen: bin auf einem Zweig, der noch geboren wird"
-#: sequencer.c:805 builtin/apply.c:3697
+#: sequencer.c:805 builtin/apply.c:3988
#, c-format
msgid "cannot open %s: %s"
msgstr "Kann %s nicht öffnen: %s"
@@ -644,21 +914,21 @@ msgstr "Kann nicht zu initialer Version zurücksetzen."
msgid "Can't cherry-pick into empty head"
msgstr "Kann \"cherry-pick\" nicht in einem leerem Kopf ausführen."
-#: sha1_name.c:864
+#: sha1_name.c:1044
msgid "HEAD does not point to a branch"
msgstr "Zweigspitze (HEAD) zeigt auf keinen Zweig"
-#: sha1_name.c:867
+#: sha1_name.c:1047
#, c-format
msgid "No such branch: '%s'"
msgstr "Kein solcher Zweig '%s'"
-#: sha1_name.c:869
+#: sha1_name.c:1049
#, c-format
msgid "No upstream configured for branch '%s'"
msgstr "Kein entferntes Projektarchiv für Zweig '%s' konfiguriert."
-#: sha1_name.c:872
+#: sha1_name.c:1052
#, c-format
msgid "Upstream branch '%s' not stored as a remote-tracking branch"
msgstr ""
@@ -673,243 +943,353 @@ msgstr "konnte aktuellen Benutzer nicht in Passwort-Datei finden: %s"
msgid "no such user"
msgstr "kein solcher Benutzer"
-#: wt-status.c:135
+#: wt-status.c:140
msgid "Unmerged paths:"
msgstr "Nicht zusammengeführte Pfade:"
-#: wt-status.c:141 wt-status.c:158
+#: wt-status.c:167 wt-status.c:194
#, c-format
msgid " (use \"git reset %s <file>...\" to unstage)"
msgstr ""
" (benutze \"git reset %s <Datei>...\" zum Herausnehmen aus der "
"Bereitstellung)"
-#: wt-status.c:143 wt-status.c:160
+#: wt-status.c:169 wt-status.c:196
msgid " (use \"git rm --cached <file>...\" to unstage)"
msgstr ""
" (benutze \"git rm --cached <Datei>...\" zum Herausnehmen aus der "
"Bereitstellung)"
-#: wt-status.c:144
+#: wt-status.c:173
+msgid " (use \"git add <file>...\" to mark resolution)"
+msgstr " (benutze \"git add/rm <Datei>...\" um die Auflösung zu markieren)"
+
+#: wt-status.c:175 wt-status.c:179
msgid " (use \"git add/rm <file>...\" as appropriate to mark resolution)"
msgstr ""
" (benutze \"git add/rm <Datei>...\" um die Auflösung entsprechend zu "
"markieren)"
-#: wt-status.c:152
+#: wt-status.c:177
+msgid " (use \"git rm <file>...\" to mark resolution)"
+msgstr " (benutze \"git add/rm <Datei>...\" um die Auflösung zu markieren)"
+
+#: wt-status.c:188
msgid "Changes to be committed:"
msgstr "zum Eintragen bereitgestellte Änderungen:"
-#: wt-status.c:170
+#: wt-status.c:206
msgid "Changes not staged for commit:"
msgstr "Änderungen, die nicht zum Eintragen bereitgestellt sind:"
-#: wt-status.c:174
+#: wt-status.c:210
msgid " (use \"git add <file>...\" to update what will be committed)"
msgstr " (benutze \"git add <Datei>...\" zum Bereitstellen)"
-#: wt-status.c:176
+#: wt-status.c:212
msgid " (use \"git add/rm <file>...\" to update what will be committed)"
msgstr " (benutze \"git add/rm <Datei>...\" zum Bereitstellen)"
-#: wt-status.c:177
+#: wt-status.c:213
msgid ""
" (use \"git checkout -- <file>...\" to discard changes in working directory)"
msgstr ""
" (benutze \"git checkout -- <Datei>...\" um die Änderungen im "
"Arbeitsverzeichnis zu verwerfen)"
-#: wt-status.c:179
+#: wt-status.c:215
msgid " (commit or discard the untracked or modified content in submodules)"
msgstr ""
" (trage ein oder verwerfe den unbeobachteten oder geänderten Inhalt in den "
"Unterprojekten)"
-#: wt-status.c:188
+#: wt-status.c:224
#, c-format
msgid "%s files:"
msgstr "%s Dateien:"
-#: wt-status.c:191
+#: wt-status.c:227
#, c-format
msgid " (use \"git %s <file>...\" to include in what will be committed)"
msgstr " (benutze \"git %s <Datei>...\" zum Einfügen in die Eintragung)"
-#: wt-status.c:208
+#: wt-status.c:244
msgid "bug"
msgstr "Fehler"
-#: wt-status.c:213
+#: wt-status.c:249
msgid "both deleted:"
msgstr "beide gelöscht:"
-#: wt-status.c:214
+#: wt-status.c:250
msgid "added by us:"
msgstr "von uns hinzugefügt:"
-#: wt-status.c:215
+#: wt-status.c:251
msgid "deleted by them:"
msgstr "von denen gelöscht:"
-#: wt-status.c:216
+#: wt-status.c:252
msgid "added by them:"
msgstr "von denen hinzugefügt:"
-#: wt-status.c:217
+#: wt-status.c:253
msgid "deleted by us:"
msgstr "von uns gelöscht:"
-#: wt-status.c:218
+#: wt-status.c:254
msgid "both added:"
msgstr "von beiden hinzugefügt:"
-#: wt-status.c:219
+#: wt-status.c:255
msgid "both modified:"
msgstr "von beiden geändert:"
-#: wt-status.c:249
+#: wt-status.c:285
msgid "new commits, "
msgstr "neue Versionen, "
-#: wt-status.c:251
+#: wt-status.c:287
msgid "modified content, "
msgstr "geänderter Inhalt, "
-#: wt-status.c:253
+#: wt-status.c:289
msgid "untracked content, "
msgstr "unbeobachteter Inhalt, "
-#: wt-status.c:267
+#: wt-status.c:303
#, c-format
msgid "new file: %s"
msgstr "neue Datei: %s"
-#: wt-status.c:270
+#: wt-status.c:306
#, c-format
msgid "copied: %s -> %s"
msgstr "kopiert: %s -> %s"
-#: wt-status.c:273
+#: wt-status.c:309
#, c-format
msgid "deleted: %s"
msgstr "gelöscht: %s"
-#: wt-status.c:276
+#: wt-status.c:312
#, c-format
msgid "modified: %s"
msgstr "geändert: %s"
-#: wt-status.c:279
+#: wt-status.c:315
#, c-format
msgid "renamed: %s -> %s"
msgstr "umbenannt: %s -> %s"
-#: wt-status.c:282
+#: wt-status.c:318
#, c-format
msgid "typechange: %s"
msgstr "Typänderung: %s"
-#: wt-status.c:285
+#: wt-status.c:321
#, c-format
msgid "unknown: %s"
msgstr "unbekannt: %s"
-#: wt-status.c:288
+#: wt-status.c:324
#, c-format
msgid "unmerged: %s"
msgstr "nicht zusammengeführt: %s"
-#: wt-status.c:291
+#: wt-status.c:327
#, c-format
msgid "bug: unhandled diff status %c"
msgstr "Fehler: unbehandelter Differenz-Status %c"
-#: wt-status.c:737
+#: wt-status.c:785
+msgid "You have unmerged paths."
+msgstr "Du hast nicht zusammengeführte Pfade."
+
+#: wt-status.c:788 wt-status.c:912
+msgid " (fix conflicts and run \"git commit\")"
+msgstr " (behebe die Konflikte und führe \"git commit\" aus)"
+
+#: wt-status.c:791
+msgid "All conflicts fixed but you are still merging."
+msgstr ""
+"Alle Konflikte sind behoben, aber du bist immer noch beim Zusammenführen."
+
+#: wt-status.c:794
+msgid " (use \"git commit\" to conclude merge)"
+msgstr " (benutze \"git commit\" um die Zusammenführung abzuschließen)"
+
+#: wt-status.c:804
+msgid "You are in the middle of an am session."
+msgstr "Eine \"am\"-Sitzung ist im Gange."
+
+#: wt-status.c:807
+msgid "The current patch is empty."
+msgstr "Der aktuelle Patch ist leer."
+
+#: wt-status.c:811
+msgid " (fix conflicts and then run \"git am --resolved\")"
+msgstr " (behebe die Konflikte und führe dann \"git am --resolved\" aus)"
+
+#: wt-status.c:813
+msgid " (use \"git am --skip\" to skip this patch)"
+msgstr " (benutze \"git am --skip\" um diesen Patch auszulassen)"
+
+#: wt-status.c:815
+msgid " (use \"git am --abort\" to restore the original branch)"
+msgstr ""
+" (benutze \"git am --abort\" um den ursprünglichen Zweig wiederherzustellen)"
+
+#: wt-status.c:873 wt-status.c:883
+msgid "You are currently rebasing."
+msgstr "Du bist gerade beim Neuaufbau."
+
+#: wt-status.c:876
+msgid " (fix conflicts and then run \"git rebase --continue\")"
+msgstr " (behebe die Konflikte und führe dann \"git rebase --continue\" aus)"
+
+#: wt-status.c:878
+msgid " (use \"git rebase --skip\" to skip this patch)"
+msgstr " (benutze \"git rebase --skip\" um diesen Patch auszulassen)"
+
+#: wt-status.c:880
+msgid " (use \"git rebase --abort\" to check out the original branch)"
+msgstr ""
+" (benutze \"git rebase --abort\" um den ursprünglichen Zweig auszuchecken)"
+
+#: wt-status.c:886
+msgid " (all conflicts fixed: run \"git rebase --continue\")"
+msgstr " (alle Konflikte behoben: führe \"git rebase --continue\" aus)"
+
+#: wt-status.c:888
+msgid "You are currently splitting a commit during a rebase."
+msgstr "Du teilst gerade eine Version während eines Neuaufbaus auf."
+
+#: wt-status.c:891
+msgid " (Once your working directory is clean, run \"git rebase --continue\")"
+msgstr ""
+" (Sobald dein Arbeitsverzeichnis sauber ist, führe \"git rebase --continue"
+"\" aus)"
+
+#: wt-status.c:893
+msgid "You are currently editing a commit during a rebase."
+msgstr "Du editierst gerade eine Version während eines Neuaufbaus."
+
+#: wt-status.c:896
+msgid " (use \"git commit --amend\" to amend the current commit)"
+msgstr ""
+" (benutze \"git commit --amend\" um die aktuelle Version nachzubessern)"
+
+#: wt-status.c:898
+msgid ""
+" (use \"git rebase --continue\" once you are satisfied with your changes)"
+msgstr ""
+" (benutze \"git rebase --continue\" sobald deine Änderungen abgeschlossen "
+"sind)"
+
+#: wt-status.c:908
+msgid "You are currently cherry-picking."
+msgstr "Du führst gerade \"cherry-pick\" aus."
+
+#: wt-status.c:915
+msgid " (all conflicts fixed: run \"git commit\")"
+msgstr " (alle Konflikte behoben: führe \"git commit\" aus)"
+
+#: wt-status.c:924
+msgid "You are currently bisecting."
+msgstr "Du bist gerade beim Halbieren."
+
+#: wt-status.c:927
+msgid " (use \"git bisect reset\" to get back to the original branch)"
+msgstr ""
+" (benutze \"git bisect reset\" um zum ursprünglichen Zweig zurückzukehren)"
+
+#: wt-status.c:978
msgid "On branch "
msgstr "Auf Zweig "
-#: wt-status.c:744
+#: wt-status.c:985
msgid "Not currently on any branch."
msgstr "Im Moment auf keinem Zweig."
-#: wt-status.c:755
+#: wt-status.c:997
msgid "Initial commit"
msgstr "Initiale Version"
-#: wt-status.c:769
+#: wt-status.c:1011
msgid "Untracked"
msgstr "Unbeobachtete"
-#: wt-status.c:771
+#: wt-status.c:1013
msgid "Ignored"
msgstr "Ignorierte"