summaryrefslogtreecommitdiff
path: root/http-fetch.c
diff options
context:
space:
mode:
authorJonathan Tan <jonathantanmy@google.com>2020-06-10 20:57:18 (GMT)
committerJunio C Hamano <gitster@pobox.com>2020-06-11 01:06:34 (GMT)
commit8d5d2a34df4f82cd9cce913fa25f3a3c2c07d126 (patch)
tree5bed1dd7872cbc4bf914e4d38008f602d6654032 /http-fetch.c
parent8e6adb69e18b18de72f0114d153b47bed4560560 (diff)
downloadgit-8d5d2a34df4f82cd9cce913fa25f3a3c2c07d126.zip
git-8d5d2a34df4f82cd9cce913fa25f3a3c2c07d126.tar.gz
git-8d5d2a34df4f82cd9cce913fa25f3a3c2c07d126.tar.bz2
http-fetch: support fetching packfiles by URL
Teach http-fetch the ability to download packfiles directly, given a URL, and to verify them. The http_pack_request suite has been augmented with a function that takes a URL directly. With this function, the hash is only used to determine the name of the temporary file. Signed-off-by: Jonathan Tan <jonathantanmy@google.com> Signed-off-by: Junio C Hamano <gitster@pobox.com>
Diffstat (limited to 'http-fetch.c')
-rw-r--r--http-fetch.c63
1 files changed, 53 insertions, 10 deletions
diff --git a/http-fetch.c b/http-fetch.c
index e538174..1df376e 100644
--- a/http-fetch.c
+++ b/http-fetch.c
@@ -5,7 +5,7 @@
#include "walker.h"
static const char http_fetch_usage[] = "git http-fetch "
-"[-c] [-t] [-a] [-v] [--recover] [-w ref] [--stdin] commit-id url";
+"[-c] [-t] [-a] [-v] [--recover] [-w ref] [--stdin | --packfile=hash | commit-id] url";
static int fetch_using_walker(const char *raw_url, int get_verbosely,
int get_recover, int commits, char **commit_id,
@@ -43,6 +43,37 @@ static int fetch_using_walker(const char *raw_url, int get_verbosely,
return rc;
}
+static void fetch_single_packfile(struct object_id *packfile_hash,
+ const char *url) {
+ struct http_pack_request *preq;
+ struct slot_results results;
+ int ret;
+
+ http_init(NULL, url, 0);
+
+ preq = new_direct_http_pack_request(packfile_hash->hash, xstrdup(url));
+ if (preq == NULL)
+ die("couldn't create http pack request");
+ preq->slot->results = &results;
+ preq->generate_keep = 1;
+
+ if (start_active_slot(preq->slot)) {
+ run_active_slot(preq->slot);
+ if (results.curl_result != CURLE_OK) {
+ die("Unable to get pack file %s\n%s", preq->url,
+ curl_errorstr);
+ }
+ } else {
+ die("Unable to start request");
+ }
+
+ if ((ret = finish_http_pack_request(preq)))
+ die("finish_http_pack_request gave result %d", ret);
+
+ release_http_pack_request(preq);
+ http_cleanup();
+}
+
int cmd_main(int argc, const char **argv)
{
int commits_on_stdin = 0;
@@ -52,8 +83,12 @@ int cmd_main(int argc, const char **argv)
int arg = 1;
int get_verbosely = 0;
int get_recover = 0;
+ int packfile = 0;
+ struct object_id packfile_hash;
while (arg < argc && argv[arg][0] == '-') {
+ const char *p;
+
if (argv[arg][1] == 't') {
} else if (argv[arg][1] == 'c') {
} else if (argv[arg][1] == 'a') {
@@ -68,25 +103,33 @@ int cmd_main(int argc, const char **argv)
get_recover = 1;
} else if (!strcmp(argv[arg], "--stdin")) {
commits_on_stdin = 1;
+ } else if (skip_prefix(argv[arg], "--packfile=", &p)) {
+ const char *end;
+
+ packfile = 1;
+ if (parse_oid_hex(p, &packfile_hash, &end) || *end)
+ die(_("argument to --packfile must be a valid hash (got '%s')"), p);
}
arg++;
}
- if (argc != arg + 2 - commits_on_stdin)
+ if (argc != arg + 2 - (commits_on_stdin || packfile))
usage(http_fetch_usage);
- if (commits_on_stdin) {
- commits = walker_targets_stdin(&commit_id, &write_ref);
- } else {
- commit_id = (char **) &argv[arg++];
- commits = 1;
- }
setup_git_directory();
git_config(git_default_config, NULL);
- if (!argv[arg])
- BUG("must have one arg remaining");
+ if (packfile) {
+ fetch_single_packfile(&packfile_hash, argv[arg]);
+ return 0;
+ }
+ if (commits_on_stdin) {
+ commits = walker_targets_stdin(&commit_id, &write_ref);
+ } else {
+ commit_id = (char **) &argv[arg++];
+ commits = 1;
+ }
return fetch_using_walker(argv[arg], get_verbosely, get_recover,
commits, commit_id, write_ref,
commits_on_stdin);