mirror of
https://github.com/git/git
synced 2024-11-05 01:58:18 +00:00
Merge branch 'eb/hash-transition'
Work to support a repository that work with both SHA-1 and SHA-256 hash algorithms has started. * eb/hash-transition: (30 commits) t1016-compatObjectFormat: add tests to verify the conversion between objects t1006: test oid compatibility with cat-file t1006: rename sha1 to oid test-lib: compute the compatibility hash so tests may use it builtin/ls-tree: let the oid determine the output algorithm object-file: handle compat objects in check_object_signature tree-walk: init_tree_desc take an oid to get the hash algorithm builtin/cat-file: let the oid determine the output algorithm rev-parse: add an --output-object-format parameter repository: implement extensions.compatObjectFormat object-file: update object_info_extended to reencode objects object-file-convert: convert commits that embed signed tags object-file-convert: convert commit objects when writing object-file-convert: don't leak when converting tag objects object-file-convert: convert tag objects when writing object-file-convert: add a function to convert trees between algorithms object: factor out parse_mode out of fast-import and tree-walk into in object.h cache: add a function to read an OID of a specific algorithm tag: sign both hashes commit: export add_header_signature to support handling signatures on tags ...
This commit is contained in:
commit
1002f28a52
62 changed files with 1880 additions and 385 deletions
|
@ -7,6 +7,18 @@ Note that this setting should only be set by linkgit:git-init[1] or
|
|||
linkgit:git-clone[1]. Trying to change it after initialization will not
|
||||
work and will produce hard-to-diagnose issues.
|
||||
|
||||
extensions.compatObjectFormat::
|
||||
|
||||
Specify a compatitbility hash algorithm to use. The acceptable values
|
||||
are `sha1` and `sha256`. The value specified must be different from the
|
||||
value of extensions.objectFormat. This allows client level
|
||||
interoperability between git repositories whose objectFormat matches
|
||||
this compatObjectFormat. In particular when fully implemented the
|
||||
pushes and pulls from a repository in whose objectFormat matches
|
||||
compatObjectFormat. As well as being able to use oids encoded in
|
||||
compatObjectFormat in addition to oids encoded with objectFormat to
|
||||
locally specify objects.
|
||||
|
||||
extensions.refStorage::
|
||||
Specify the ref storage format to use. The acceptable values are:
|
||||
+
|
||||
|
|
|
@ -159,6 +159,18 @@ for another option.
|
|||
unfortunately named tag "master"), and shows them as full
|
||||
refnames (e.g. "refs/heads/master").
|
||||
|
||||
--output-object-format=(sha1|sha256|storage)::
|
||||
|
||||
Allow oids to be input from any object format that the current
|
||||
repository supports.
|
||||
|
||||
Specifying "sha1" translates if necessary and returns a sha1 oid.
|
||||
|
||||
Specifying "sha256" translates if necessary and returns a sha256 oid.
|
||||
|
||||
Specifying "storage" translates if necessary and returns an oid in
|
||||
encoded in the storage hash algorithm.
|
||||
|
||||
Options for Objects
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
|
3
Makefile
3
Makefile
|
@ -797,6 +797,7 @@ TEST_BUILTINS_OBJS += test-config.o
|
|||
TEST_BUILTINS_OBJS += test-crontab.o
|
||||
TEST_BUILTINS_OBJS += test-csprng.o
|
||||
TEST_BUILTINS_OBJS += test-date.o
|
||||
TEST_BUILTINS_OBJS += test-delete-gpgsig.o
|
||||
TEST_BUILTINS_OBJS += test-delta.o
|
||||
TEST_BUILTINS_OBJS += test-dir-iterator.o
|
||||
TEST_BUILTINS_OBJS += test-drop-caches.o
|
||||
|
@ -1060,6 +1061,7 @@ LIB_OBJS += list-objects-filter.o
|
|||
LIB_OBJS += list-objects.o
|
||||
LIB_OBJS += lockfile.o
|
||||
LIB_OBJS += log-tree.o
|
||||
LIB_OBJS += loose.o
|
||||
LIB_OBJS += ls-refs.o
|
||||
LIB_OBJS += mailinfo.o
|
||||
LIB_OBJS += mailmap.o
|
||||
|
@ -1080,6 +1082,7 @@ LIB_OBJS += notes-cache.o
|
|||
LIB_OBJS += notes-merge.o
|
||||
LIB_OBJS += notes-utils.o
|
||||
LIB_OBJS += notes.o
|
||||
LIB_OBJS += object-file-convert.o
|
||||
LIB_OBJS += object-file.o
|
||||
LIB_OBJS += object-name.o
|
||||
LIB_OBJS += object.o
|
||||
|
|
|
@ -339,7 +339,8 @@ int write_archive_entries(struct archiver_args *args,
|
|||
opts.src_index = args->repo->index;
|
||||
opts.dst_index = args->repo->index;
|
||||
opts.fn = oneway_merge;
|
||||
init_tree_desc(&t, args->tree->buffer, args->tree->size);
|
||||
init_tree_desc(&t, &args->tree->object.oid,
|
||||
args->tree->buffer, args->tree->size);
|
||||
if (unpack_trees(1, &t, &opts))
|
||||
return -1;
|
||||
git_attr_set_direction(GIT_ATTR_INDEX);
|
||||
|
|
|
@ -1994,8 +1994,8 @@ static int fast_forward_to(struct tree *head, struct tree *remote, int reset)
|
|||
opts.reset = reset ? UNPACK_RESET_PROTECT_UNTRACKED : 0;
|
||||
opts.preserve_ignored = 0; /* FIXME: !overwrite_ignore */
|
||||
opts.fn = twoway_merge;
|
||||
init_tree_desc(&t[0], head->buffer, head->size);
|
||||
init_tree_desc(&t[1], remote->buffer, remote->size);
|
||||
init_tree_desc(&t[0], &head->object.oid, head->buffer, head->size);
|
||||
init_tree_desc(&t[1], &remote->object.oid, remote->buffer, remote->size);
|
||||
|
||||
if (unpack_trees(2, t, &opts)) {
|
||||
rollback_lock_file(&lock_file);
|
||||
|
@ -2029,7 +2029,7 @@ static int merge_tree(struct tree *tree)
|
|||
opts.dst_index = &the_index;
|
||||
opts.merge = 1;
|
||||
opts.fn = oneway_merge;
|
||||
init_tree_desc(&t[0], tree->buffer, tree->size);
|
||||
init_tree_desc(&t[0], &tree->object.oid, tree->buffer, tree->size);
|
||||
|
||||
if (unpack_trees(1, t, &opts)) {
|
||||
rollback_lock_file(&lock_file);
|
||||
|
|
|
@ -106,7 +106,10 @@ static int cat_one_file(int opt, const char *exp_type, const char *obj_name,
|
|||
struct object_info oi = OBJECT_INFO_INIT;
|
||||
struct strbuf sb = STRBUF_INIT;
|
||||
unsigned flags = OBJECT_INFO_LOOKUP_REPLACE;
|
||||
unsigned get_oid_flags = GET_OID_RECORD_PATH | GET_OID_ONLY_TO_DIE;
|
||||
unsigned get_oid_flags =
|
||||
GET_OID_RECORD_PATH |
|
||||
GET_OID_ONLY_TO_DIE |
|
||||
GET_OID_HASH_ANY;
|
||||
const char *path = force_path;
|
||||
const int opt_cw = (opt == 'c' || opt == 'w');
|
||||
if (!path && opt_cw)
|
||||
|
@ -226,7 +229,8 @@ static int cat_one_file(int opt, const char *exp_type, const char *obj_name,
|
|||
die(_("unable to read %s"), oid_to_hex(&oid));
|
||||
|
||||
if (!skip_prefix(buffer, "object ", &target) ||
|
||||
get_oid_hex(target, &blob_oid))
|
||||
get_oid_hex_algop(target, &blob_oid,
|
||||
&hash_algos[oid.algo]))
|
||||
die("%s not a valid tag", oid_to_hex(&oid));
|
||||
free(buffer);
|
||||
} else
|
||||
|
@ -517,7 +521,9 @@ static void batch_one_object(const char *obj_name,
|
|||
struct expand_data *data)
|
||||
{
|
||||
struct object_context ctx;
|
||||
int flags = opt->follow_symlinks ? GET_OID_FOLLOW_SYMLINKS : 0;
|
||||
int flags =
|
||||
GET_OID_HASH_ANY |
|
||||
(opt->follow_symlinks ? GET_OID_FOLLOW_SYMLINKS : 0);
|
||||
enum get_oid_result result;
|
||||
|
||||
result = get_oid_with_context(the_repository, obj_name,
|
||||
|
|
|
@ -706,7 +706,7 @@ static int reset_tree(struct tree *tree, const struct checkout_opts *o,
|
|||
NULL);
|
||||
if (parse_tree(tree) < 0)
|
||||
return 128;
|
||||
init_tree_desc(&tree_desc, tree->buffer, tree->size);
|
||||
init_tree_desc(&tree_desc, &tree->object.oid, tree->buffer, tree->size);
|
||||
switch (unpack_trees(1, &tree_desc, &opts)) {
|
||||
case -2:
|
||||
*writeout_error = 1;
|
||||
|
@ -826,11 +826,13 @@ static int merge_working_tree(const struct checkout_opts *opts,
|
|||
die(_("unable to parse commit %s"),
|
||||
oid_to_hex(old_commit_oid));
|
||||
|
||||
init_tree_desc(&trees[0], tree->buffer, tree->size);
|
||||
init_tree_desc(&trees[0], &tree->object.oid,
|
||||
tree->buffer, tree->size);
|
||||
if (parse_tree(new_tree) < 0)
|
||||
exit(128);
|
||||
tree = new_tree;
|
||||
init_tree_desc(&trees[1], tree->buffer, tree->size);
|
||||
init_tree_desc(&trees[1], &tree->object.oid,
|
||||
tree->buffer, tree->size);
|
||||
|
||||
ret = unpack_trees(2, trees, &topts);
|
||||
clear_unpack_trees_porcelain(&topts);
|
||||
|
|
|
@ -740,7 +740,7 @@ static int checkout(int submodule_progress, int filter_submodules)
|
|||
die(_("unable to parse commit %s"), oid_to_hex(&oid));
|
||||
if (parse_tree(tree) < 0)
|
||||
exit(128);
|
||||
init_tree_desc(&t, tree->buffer, tree->size);
|
||||
init_tree_desc(&t, &tree->object.oid, tree->buffer, tree->size);
|
||||
if (unpack_trees(1, &t, &opts) < 0)
|
||||
die(_("unable to checkout working tree"));
|
||||
|
||||
|
|
|
@ -333,7 +333,7 @@ static void create_base_index(const struct commit *current_head)
|
|||
die(_("failed to unpack HEAD tree object"));
|
||||
if (parse_tree(tree) < 0)
|
||||
exit(128);
|
||||
init_tree_desc(&t, tree->buffer, tree->size);
|
||||
init_tree_desc(&t, &tree->object.oid, tree->buffer, tree->size);
|
||||
if (unpack_trees(1, &t, &opts))
|
||||
exit(128); /* We've already reported the error, finish dying */
|
||||
}
|
||||
|
|
|
@ -1236,20 +1236,6 @@ static void *gfi_unpack_entry(
|
|||
return unpack_entry(the_repository, p, oe->idx.offset, &type, sizep);
|
||||
}
|
||||
|
||||
static const char *get_mode(const char *str, uint16_t *modep)
|
||||
{
|
||||
unsigned char c;
|
||||
uint16_t mode = 0;
|
||||
|
||||
while ((c = *str++) != ' ') {
|
||||
if (c < '0' || c > '7')
|
||||
return NULL;
|
||||
mode = (mode << 3) + (c - '0');
|
||||
}
|
||||
*modep = mode;
|
||||
return str;
|
||||
}
|
||||
|
||||
static void load_tree(struct tree_entry *root)
|
||||
{
|
||||
struct object_id *oid = &root->versions[1].oid;
|
||||
|
@ -1287,7 +1273,7 @@ static void load_tree(struct tree_entry *root)
|
|||
t->entries[t->entry_count++] = e;
|
||||
|
||||
e->tree = NULL;
|
||||
c = get_mode(c, &e->versions[1].mode);
|
||||
c = parse_mode(c, &e->versions[1].mode);
|
||||
if (!c)
|
||||
die("Corrupt mode in %s", oid_to_hex(oid));
|
||||
e->versions[0].mode = e->versions[1].mode;
|
||||
|
@ -2280,7 +2266,7 @@ static void file_change_m(const char *p, struct branch *b)
|
|||
struct object_id oid;
|
||||
uint16_t mode, inline_data = 0;
|
||||
|
||||
p = get_mode(p, &mode);
|
||||
p = parse_mode(p, &mode);
|
||||
if (!p)
|
||||
die("Corrupt mode: %s", command_buf.buf);
|
||||
switch (mode) {
|
||||
|
|
|
@ -527,7 +527,7 @@ static int grep_submodule(struct grep_opt *opt,
|
|||
strbuf_addstr(&base, filename);
|
||||
strbuf_addch(&base, '/');
|
||||
|
||||
init_tree_desc(&tree, data, size);
|
||||
init_tree_desc(&tree, oid, data, size);
|
||||
hit = grep_tree(&subopt, pathspec, &tree, &base, base.len,
|
||||
object_type == OBJ_COMMIT);
|
||||
strbuf_release(&base);
|
||||
|
@ -573,7 +573,7 @@ static int grep_cache(struct grep_opt *opt,
|
|||
&type, &size);
|
||||
if (!data)
|
||||
die(_("unable to read tree %s"), oid_to_hex(&ce->oid));
|
||||
init_tree_desc(&tree, data, size);
|
||||
init_tree_desc(&tree, &ce->oid, data, size);
|
||||
|
||||
hit |= grep_tree(opt, pathspec, &tree, &name, 0, 0);
|
||||
strbuf_setlen(&name, name_base_len);
|
||||
|
@ -669,7 +669,7 @@ static int grep_tree(struct grep_opt *opt, const struct pathspec *pathspec,
|
|||
oid_to_hex(&entry.oid));
|
||||
|
||||
strbuf_addch(base, '/');
|
||||
init_tree_desc(&sub, data, size);
|
||||
init_tree_desc(&sub, &entry.oid, data, size);
|
||||
hit |= grep_tree(opt, pathspec, &sub, base, tn_len,
|
||||
check_attr);
|
||||
free(data);
|
||||
|
@ -713,7 +713,7 @@ static int grep_object(struct grep_opt *opt, const struct pathspec *pathspec,
|
|||
strbuf_add(&base, name, len);
|
||||
strbuf_addch(&base, ':');
|
||||
}
|
||||
init_tree_desc(&tree, data, size);
|
||||
init_tree_desc(&tree, &obj->oid, data, size);
|
||||
hit = grep_tree(opt, pathspec, &tree, &base, base.len,
|
||||
obj->type == OBJ_COMMIT);
|
||||
strbuf_release(&base);
|
||||
|
|
|
@ -375,6 +375,7 @@ int cmd_ls_tree(int argc, const char **argv, const char *prefix)
|
|||
OPT_END()
|
||||
};
|
||||
struct ls_tree_cmdmode_to_fmt *m2f = ls_tree_cmdmode_format;
|
||||
struct object_context obj_context;
|
||||
int ret;
|
||||
|
||||
git_config(git_default_config, NULL);
|
||||
|
@ -406,7 +407,9 @@ int cmd_ls_tree(int argc, const char **argv, const char *prefix)
|
|||
ls_tree_usage, ls_tree_options);
|
||||
if (argc < 1)
|
||||
usage_with_options(ls_tree_usage, ls_tree_options);
|
||||
if (repo_get_oid(the_repository, argv[0], &oid))
|
||||
if (get_oid_with_context(the_repository, argv[0],
|
||||
GET_OID_HASH_ANY, &oid,
|
||||
&obj_context))
|
||||
die("Not a valid object name %s", argv[0]);
|
||||
|
||||
/*
|
||||
|
|
|
@ -677,7 +677,8 @@ static int read_tree_trivial(struct object_id *common, struct object_id *head,
|
|||
cache_tree_free(&the_index.cache_tree);
|
||||
for (i = 0; i < nr_trees; i++) {
|
||||
parse_tree(trees[i]);
|
||||
init_tree_desc(t+i, trees[i]->buffer, trees[i]->size);
|
||||
init_tree_desc(t+i, &trees[i]->object.oid,
|
||||
trees[i]->buffer, trees[i]->size);
|
||||
}
|
||||
if (unpack_trees(nr_trees, t, &opts))
|
||||
return -1;
|
||||
|
|
|
@ -1826,7 +1826,8 @@ static void add_pbase_object(struct tree_desc *tree,
|
|||
tree = pbase_tree_get(&entry.oid);
|
||||
if (!tree)
|
||||
return;
|
||||
init_tree_desc(&sub, tree->tree_data, tree->tree_size);
|
||||
init_tree_desc(&sub, &tree->oid,
|
||||
tree->tree_data, tree->tree_size);
|
||||
|
||||
add_pbase_object(&sub, down, downlen, fullname);
|
||||
pbase_tree_put(tree);
|
||||
|
@ -1886,7 +1887,8 @@ static void add_preferred_base_object(const char *name)
|
|||
}
|
||||
else {
|
||||
struct tree_desc tree;
|
||||
init_tree_desc(&tree, it->pcache.tree_data, it->pcache.tree_size);
|
||||
init_tree_desc(&tree, &it->pcache.oid,
|
||||
it->pcache.tree_data, it->pcache.tree_size);
|
||||
add_pbase_object(&tree, name, cmplen, name);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -263,7 +263,7 @@ int cmd_read_tree(int argc, const char **argv, const char *cmd_prefix)
|
|||
struct tree *tree = trees[i];
|
||||
if (parse_tree(tree) < 0)
|
||||
return 128;
|
||||
init_tree_desc(t+i, tree->buffer, tree->size);
|
||||
init_tree_desc(t+i, &tree->object.oid, tree->buffer, tree->size);
|
||||
}
|
||||
if (unpack_trees(nr_trees, t, &opts))
|
||||
return 128;
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
#include "submodule.h"
|
||||
#include "commit-reach.h"
|
||||
#include "shallow.h"
|
||||
#include "object-file-convert.h"
|
||||
|
||||
#define DO_REVS 1
|
||||
#define DO_NOREV 2
|
||||
|
@ -676,6 +677,8 @@ static void print_path(const char *path, const char *prefix, enum format_type fo
|
|||
int cmd_rev_parse(int argc, const char **argv, const char *prefix)
|
||||
{
|
||||
int i, as_is = 0, verify = 0, quiet = 0, revs_count = 0, type = 0;
|
||||
const struct git_hash_algo *output_algo = NULL;
|
||||
const struct git_hash_algo *compat = NULL;
|
||||
int did_repo_setup = 0;
|
||||
int has_dashdash = 0;
|
||||
int output_prefix = 0;
|
||||
|
@ -747,6 +750,7 @@ int cmd_rev_parse(int argc, const char **argv, const char *prefix)
|
|||
|
||||
prepare_repo_settings(the_repository);
|
||||
the_repository->settings.command_requires_full_index = 0;
|
||||
compat = the_repository->compat_hash_algo;
|
||||
}
|
||||
|
||||
if (!strcmp(arg, "--")) {
|
||||
|
@ -834,6 +838,22 @@ int cmd_rev_parse(int argc, const char **argv, const char *prefix)
|
|||
flags |= GET_OID_QUIETLY;
|
||||
continue;
|
||||
}
|
||||
if (opt_with_value(arg, "--output-object-format", &arg)) {
|
||||
if (!arg)
|
||||
die(_("no object format specified"));
|
||||
if (!strcmp(arg, the_hash_algo->name) ||
|
||||
!strcmp(arg, "storage")) {
|
||||
flags |= GET_OID_HASH_ANY;
|
||||
output_algo = the_hash_algo;
|
||||
continue;
|
||||
}
|
||||
else if (compat && !strcmp(arg, compat->name)) {
|
||||
flags |= GET_OID_HASH_ANY;
|
||||
output_algo = compat;
|
||||
continue;
|
||||
}
|
||||
else die(_("unsupported object format: %s"), arg);
|
||||
}
|
||||
if (opt_with_value(arg, "--short", &arg)) {
|
||||
filter &= ~(DO_FLAGS|DO_NOREV);
|
||||
verify = 1;
|
||||
|
@ -883,7 +903,7 @@ int cmd_rev_parse(int argc, const char **argv, const char *prefix)
|
|||
continue;
|
||||
}
|
||||
if (skip_prefix(arg, "--disambiguate=", &arg)) {
|
||||
repo_for_each_abbrev(the_repository, arg,
|
||||
repo_for_each_abbrev(the_repository, arg, the_hash_algo,
|
||||
show_abbrev, NULL);
|
||||
continue;
|
||||
}
|
||||
|
@ -1091,6 +1111,9 @@ int cmd_rev_parse(int argc, const char **argv, const char *prefix)
|
|||
}
|
||||
if (!get_oid_with_context(the_repository, name,
|
||||
flags, &oid, &unused)) {
|
||||
if (output_algo)
|
||||
repo_oid_to_algop(the_repository, &oid,
|
||||
output_algo, &oid);
|
||||
if (verify)
|
||||
revs_count++;
|
||||
else
|
||||
|
|
|
@ -284,7 +284,7 @@ static int reset_tree(struct object_id *i_tree, int update, int reset)
|
|||
if (parse_tree(tree))
|
||||
return -1;
|
||||
|
||||
init_tree_desc(t, tree->buffer, tree->size);
|
||||
init_tree_desc(t, &tree->object.oid, tree->buffer, tree->size);
|
||||
|
||||
opts.head_idx = 1;
|
||||
opts.src_index = &the_index;
|
||||
|
@ -870,7 +870,8 @@ static void diff_include_untracked(const struct stash_info *info, struct diff_op
|
|||
tree[i] = parse_tree_indirect(oid[i]);
|
||||
if (parse_tree(tree[i]) < 0)
|
||||
die(_("failed to parse tree"));
|
||||
init_tree_desc(&tree_desc[i], tree[i]->buffer, tree[i]->size);
|
||||
init_tree_desc(&tree_desc[i], &tree[i]->object.oid,
|
||||
tree[i]->buffer, tree[i]->size);
|
||||
}
|
||||
|
||||
unpack_tree_opt.head_idx = -1;
|
||||
|
|
|
@ -27,6 +27,7 @@
|
|||
#include "ref-filter.h"
|
||||
#include "date.h"
|
||||
#include "write-or-die.h"
|
||||
#include "object-file-convert.h"
|
||||
|
||||
static const char * const git_tag_usage[] = {
|
||||
N_("git tag [-a | -s | -u <key-id>] [-f] [-m <msg> | -F <file>] [-e]\n"
|
||||
|
@ -151,9 +152,43 @@ static int verify_tag(const char *name, const char *ref UNUSED,
|
|||
return 0;
|
||||
}
|
||||
|
||||
static int do_sign(struct strbuf *buffer)
|
||||
static int do_sign(struct strbuf *buffer, struct object_id **compat_oid,
|
||||
struct object_id *compat_oid_buf)
|
||||
{
|
||||
return sign_buffer(buffer, buffer, get_signing_key()) ? -1 : 0;
|
||||
const struct git_hash_algo *compat = the_repository->compat_hash_algo;
|
||||
struct strbuf sig = STRBUF_INIT, compat_sig = STRBUF_INIT;
|
||||
struct strbuf compat_buf = STRBUF_INIT;
|
||||
const char *keyid = get_signing_key();
|
||||
int ret = -1;
|
||||
|
||||
if (sign_buffer(buffer, &sig, keyid))
|
||||
return -1;
|
||||
|
||||
if (compat) {
|
||||
const struct git_hash_algo *algo = the_repository->hash_algo;
|
||||
|
||||
if (convert_object_file(&compat_buf, algo, compat,
|
||||
buffer->buf, buffer->len, OBJ_TAG, 1))
|
||||
goto out;
|
||||
if (sign_buffer(&compat_buf, &compat_sig, keyid))
|
||||
goto out;
|
||||
add_header_signature(&compat_buf, &sig, algo);
|
||||
strbuf_addbuf(&compat_buf, &compat_sig);
|
||||
hash_object_file(compat, compat_buf.buf, compat_buf.len,
|
||||
OBJ_TAG, compat_oid_buf);
|
||||
*compat_oid = compat_oid_buf;
|
||||
}
|
||||
|
||||
if (compat_sig.len)
|
||||
add_header_signature(buffer, &compat_sig, compat);
|
||||
|
||||
strbuf_addbuf(buffer, &sig);
|
||||
ret = 0;
|
||||
out:
|
||||
strbuf_release(&sig);
|
||||
strbuf_release(&compat_sig);
|
||||
strbuf_release(&compat_buf);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static const char tag_template[] =
|
||||
|
@ -226,9 +261,11 @@ static void write_tag_body(int fd, const struct object_id *oid)
|
|||
|
||||
static int build_tag_object(struct strbuf *buf, int sign, struct object_id *result)
|
||||
{
|
||||
if (sign && do_sign(buf) < 0)
|
||||
struct object_id *compat_oid = NULL, compat_oid_buf;
|
||||
if (sign && do_sign(buf, &compat_oid, &compat_oid_buf) < 0)
|
||||
return error(_("unable to sign the tag"));
|
||||
if (write_object_file(buf->buf, buf->len, OBJ_TAG, result) < 0)
|
||||
if (write_object_file_flags(buf->buf, buf->len, OBJ_TAG, result,
|
||||
compat_oid, 0) < 0)
|
||||
return error(_("unable to write tag file"));
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -447,7 +447,7 @@ static int update_one(struct cache_tree *it,
|
|||
hash_object_file(the_hash_algo, buffer.buf, buffer.len,
|
||||
OBJ_TREE, &it->oid);
|
||||
} else if (write_object_file_flags(buffer.buf, buffer.len, OBJ_TREE,
|
||||
&it->oid, flags & WRITE_TREE_SILENT
|
||||
&it->oid, NULL, flags & WRITE_TREE_SILENT
|
||||
? HASH_SILENT : 0)) {
|
||||
strbuf_release(&buffer);
|
||||
return -1;
|
||||
|
@ -769,7 +769,7 @@ static void prime_cache_tree_rec(struct repository *r,
|
|||
|
||||
oidcpy(&it->oid, &tree->object.oid);
|
||||
|
||||
init_tree_desc(&desc, tree->buffer, tree->size);
|
||||
init_tree_desc(&desc, &tree->object.oid, tree->buffer, tree->size);
|
||||
cnt = 0;
|
||||
while (tree_entry(&desc, &entry)) {
|
||||
if (!S_ISDIR(entry.mode))
|
||||
|
|
239
commit.c
239
commit.c
|
@ -27,6 +27,7 @@
|
|||
#include "tree.h"
|
||||
#include "hook.h"
|
||||
#include "parse.h"
|
||||
#include "object-file-convert.h"
|
||||
|
||||
static struct commit_extra_header *read_commit_extra_header_lines(const char *buf, size_t len, const char **);
|
||||
|
||||
|
@ -1113,12 +1114,11 @@ static const char *gpg_sig_headers[] = {
|
|||
"gpgsig-sha256",
|
||||
};
|
||||
|
||||
int sign_with_header(struct strbuf *buf, const char *keyid)
|
||||
int add_header_signature(struct strbuf *buf, struct strbuf *sig, const struct git_hash_algo *algo)
|
||||
{
|
||||
struct strbuf sig = STRBUF_INIT;
|
||||
int inspos, copypos;
|
||||
const char *eoh;
|
||||
const char *gpg_sig_header = gpg_sig_headers[hash_algo_by_ptr(the_hash_algo)];
|
||||
const char *gpg_sig_header = gpg_sig_headers[hash_algo_by_ptr(algo)];
|
||||
int gpg_sig_header_len = strlen(gpg_sig_header);
|
||||
|
||||
/* find the end of the header */
|
||||
|
@ -1128,15 +1128,8 @@ int sign_with_header(struct strbuf *buf, const char *keyid)
|
|||
else
|
||||
inspos = eoh - buf->buf + 1;
|
||||
|
||||
if (!keyid || !*keyid)
|
||||
keyid = get_signing_key();
|
||||
if (sign_buffer(buf, &sig, keyid)) {
|
||||
strbuf_release(&sig);
|
||||
return -1;
|
||||
}
|
||||
|
||||
for (copypos = 0; sig.buf[copypos]; ) {
|
||||
const char *bol = sig.buf + copypos;
|
||||
for (copypos = 0; sig->buf[copypos]; ) {
|
||||
const char *bol = sig->buf + copypos;
|
||||
const char *eol = strchrnul(bol, '\n');
|
||||
int len = (eol - bol) + !!*eol;
|
||||
|
||||
|
@ -1149,11 +1142,17 @@ int sign_with_header(struct strbuf *buf, const char *keyid)
|
|||
inspos += len;
|
||||
copypos += len;
|
||||
}
|
||||
strbuf_release(&sig);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static int sign_commit_to_strbuf(struct strbuf *sig, struct strbuf *buf, const char *keyid)
|
||||
{
|
||||
if (!keyid || !*keyid)
|
||||
keyid = get_signing_key();
|
||||
if (sign_buffer(buf, sig, keyid))
|
||||
return -1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
int parse_signed_commit(const struct commit *commit,
|
||||
struct strbuf *payload, struct strbuf *signature,
|
||||
|
@ -1369,6 +1368,39 @@ void append_merge_tag_headers(struct commit_list *parents,
|
|||
}
|
||||
}
|
||||
|
||||
static int convert_commit_extra_headers(struct commit_extra_header *orig,
|
||||
struct commit_extra_header **result)
|
||||
{
|
||||
const struct git_hash_algo *compat = the_repository->compat_hash_algo;
|
||||
const struct git_hash_algo *algo = the_repository->hash_algo;
|
||||
struct commit_extra_header *extra = NULL, **tail = &extra;
|
||||
struct strbuf out = STRBUF_INIT;
|
||||
while (orig) {
|
||||
struct commit_extra_header *new;
|
||||
CALLOC_ARRAY(new, 1);
|
||||
if (!strcmp(orig->key, "mergetag")) {
|
||||
if (convert_object_file(&out, algo, compat,
|
||||
orig->value, orig->len,
|
||||
OBJ_TAG, 1)) {
|
||||
free(new);
|
||||
free_commit_extra_headers(extra);
|
||||
return -1;
|
||||
}
|
||||
new->key = xstrdup("mergetag");
|
||||
new->value = strbuf_detach(&out, &new->len);
|
||||
} else {
|
||||
new->key = xstrdup(orig->key);
|
||||
new->len = orig->len;
|
||||
new->value = xmemdupz(orig->value, orig->len);
|
||||
}
|
||||
*tail = new;
|
||||
tail = &new->next;
|
||||
orig = orig->next;
|
||||
}
|
||||
*result = extra;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void add_extra_header(struct strbuf *buffer,
|
||||
struct commit_extra_header *extra)
|
||||
{
|
||||
|
@ -1612,6 +1644,49 @@ N_("Warning: commit message did not conform to UTF-8.\n"
|
|||
"You may want to amend it after fixing the message, or set the config\n"
|
||||
"variable i18n.commitEncoding to the encoding your project uses.\n");
|
||||
|
||||
static void write_commit_tree(struct strbuf *buffer, const char *msg, size_t msg_len,
|
||||
const struct object_id *tree,
|
||||
const struct object_id *parents, size_t parents_len,
|
||||
const char *author, const char *committer,
|
||||
struct commit_extra_header *extra)
|
||||
{
|
||||
int encoding_is_utf8;
|
||||
size_t i;
|
||||
|
||||
/* Not having i18n.commitencoding is the same as having utf-8 */
|
||||
encoding_is_utf8 = is_encoding_utf8(git_commit_encoding);
|
||||
|
||||
strbuf_grow(buffer, 8192); /* should avoid reallocs for the headers */
|
||||
strbuf_addf(buffer, "tree %s\n", oid_to_hex(tree));
|
||||
|
||||
/*
|
||||
* NOTE! This ordering means that the same exact tree merged with a
|
||||
* different order of parents will be a _different_ changeset even
|
||||
* if everything else stays the same.
|
||||
*/
|
||||
for (i = 0; i < parents_len; i++)
|
||||
strbuf_addf(buffer, "parent %s\n", oid_to_hex(&parents[i]));
|
||||
|
||||
/* Person/date information */
|
||||
if (!author)
|
||||
author = git_author_info(IDENT_STRICT);
|
||||
strbuf_addf(buffer, "author %s\n", author);
|
||||
if (!committer)
|
||||
committer = git_committer_info(IDENT_STRICT);
|
||||
strbuf_addf(buffer, "committer %s\n", committer);
|
||||
if (!encoding_is_utf8)
|
||||
strbuf_addf(buffer, "encoding %s\n", git_commit_encoding);
|
||||
|
||||
while (extra) {
|
||||
add_extra_header(buffer, extra);
|
||||
extra = extra->next;
|
||||
}
|
||||
strbuf_addch(buffer, '\n');
|
||||
|
||||
/* And add the comment */
|
||||
strbuf_add(buffer, msg, msg_len);
|
||||
}
|
||||
|
||||
int commit_tree_extended(const char *msg, size_t msg_len,
|
||||
const struct object_id *tree,
|
||||
struct commit_list *parents, struct object_id *ret,
|
||||
|
@ -1619,63 +1694,119 @@ int commit_tree_extended(const char *msg, size_t msg_len,
|
|||
const char *sign_commit,
|
||||
struct commit_extra_header *extra)
|
||||
{
|
||||
int result;
|
||||
struct repository *r = the_repository;
|
||||
int result = 0;
|
||||
int encoding_is_utf8;
|
||||
struct strbuf buffer;
|
||||
struct strbuf buffer = STRBUF_INIT, compat_buffer = STRBUF_INIT;
|
||||
struct strbuf sig = STRBUF_INIT, compat_sig = STRBUF_INIT;
|
||||
struct object_id *parent_buf = NULL, *compat_oid = NULL;
|
||||
struct object_id compat_oid_buf;
|
||||
size_t i, nparents;
|
||||
|
||||
/* Not having i18n.commitencoding is the same as having utf-8 */
|
||||
encoding_is_utf8 = is_encoding_utf8(git_commit_encoding);
|
||||
|
||||
assert_oid_type(tree, OBJ_TREE);
|
||||
|
||||
if (memchr(msg, '\0', msg_len))
|
||||
return error("a NUL byte in commit log message not allowed.");
|
||||
|
||||
/* Not having i18n.commitencoding is the same as having utf-8 */
|
||||
encoding_is_utf8 = is_encoding_utf8(git_commit_encoding);
|
||||
|
||||
strbuf_init(&buffer, 8192); /* should avoid reallocs for the headers */
|
||||
strbuf_addf(&buffer, "tree %s\n", oid_to_hex(tree));
|
||||
|
||||
/*
|
||||
* NOTE! This ordering means that the same exact tree merged with a
|
||||
* different order of parents will be a _different_ changeset even
|
||||
* if everything else stays the same.
|
||||
*/
|
||||
nparents = commit_list_count(parents);
|
||||
CALLOC_ARRAY(parent_buf, nparents);
|
||||
i = 0;
|
||||
while (parents) {
|
||||
struct commit *parent = pop_commit(&parents);
|
||||
strbuf_addf(&buffer, "parent %s\n",
|
||||
oid_to_hex(&parent->object.oid));
|
||||
oidcpy(&parent_buf[i++], &parent->object.oid);
|
||||
}
|
||||
|
||||
/* Person/date information */
|
||||
if (!author)
|
||||
author = git_author_info(IDENT_STRICT);
|
||||
strbuf_addf(&buffer, "author %s\n", author);
|
||||
if (!committer)
|
||||
committer = git_committer_info(IDENT_STRICT);
|
||||
strbuf_addf(&buffer, "committer %s\n", committer);
|
||||
if (!encoding_is_utf8)
|
||||
strbuf_addf(&buffer, "encoding %s\n", git_commit_encoding);
|
||||
|
||||
while (extra) {
|
||||
add_extra_header(&buffer, extra);
|
||||
extra = extra->next;
|
||||
}
|
||||
strbuf_addch(&buffer, '\n');
|
||||
|
||||
/* And add the comment */
|
||||
strbuf_add(&buffer, msg, msg_len);
|
||||
|
||||
/* And check the encoding */
|
||||
if (encoding_is_utf8 && !verify_utf8(&buffer))
|
||||
fprintf(stderr, _(commit_utf8_warn));
|
||||
|
||||
if (sign_commit && sign_with_header(&buffer, sign_commit)) {
|
||||
write_commit_tree(&buffer, msg, msg_len, tree, parent_buf, nparents, author, committer, extra);
|
||||
if (sign_commit && sign_commit_to_strbuf(&sig, &buffer, sign_commit)) {
|
||||
result = -1;
|
||||
goto out;
|
||||
}
|
||||
if (r->compat_hash_algo) {
|
||||
struct commit_extra_header *compat_extra = NULL;
|
||||
struct object_id mapped_tree;
|
||||
struct object_id *mapped_parents;
|
||||
|
||||
result = write_object_file(buffer.buf, buffer.len, OBJ_COMMIT, ret);
|
||||
CALLOC_ARRAY(mapped_parents, nparents);
|
||||
|
||||
if (repo_oid_to_algop(r, tree, r->compat_hash_algo, &mapped_tree)) {
|
||||
result = -1;
|
||||
free(mapped_parents);
|
||||
goto out;
|
||||
}
|
||||
for (i = 0; i < nparents; i++)
|
||||
if (repo_oid_to_algop(r, &parent_buf[i], r->compat_hash_algo, &mapped_parents[i])) {
|
||||
result = -1;
|
||||
free(mapped_parents);
|
||||
goto out;
|
||||
}
|
||||
if (convert_commit_extra_headers(extra, &compat_extra)) {
|
||||
result = -1;
|
||||
free(mapped_parents);
|
||||
goto out;
|
||||
}
|
||||
write_commit_tree(&compat_buffer, msg, msg_len, &mapped_tree,
|
||||
mapped_parents, nparents, author, committer, compat_extra);
|
||||
free_commit_extra_headers(compat_extra);
|
||||
free(mapped_parents);
|
||||
|
||||
if (sign_commit && sign_commit_to_strbuf(&compat_sig, &compat_buffer, sign_commit)) {
|
||||
result = -1;
|
||||
goto out;
|
||||
}
|
||||
}
|
||||
|
||||
if (sign_commit) {
|
||||
struct sig_pairs {
|
||||
struct strbuf *sig;
|
||||
const struct git_hash_algo *algo;
|
||||
} bufs [2] = {
|
||||
{ &compat_sig, r->compat_hash_algo },
|
||||
{ &sig, r->hash_algo },
|
||||
};
|
||||
int i;
|
||||
|
||||
/*
|
||||
* We write algorithms in the order they were implemented in
|
||||
* Git to produce a stable hash when multiple algorithms are
|
||||
* used.
|
||||
*/
|
||||
if (r->compat_hash_algo && hash_algo_by_ptr(bufs[0].algo) > hash_algo_by_ptr(bufs[1].algo))
|
||||
SWAP(bufs[0], bufs[1]);
|
||||
|
||||
/*
|
||||
* We traverse each algorithm in order, and apply the signature
|
||||
* to each buffer.
|
||||
*/
|
||||
for (i = 0; i < ARRAY_SIZE(bufs); i++) {
|
||||
if (!bufs[i].algo)
|
||||
continue;
|
||||
add_header_signature(&buffer, bufs[i].sig, bufs[i].algo);
|
||||
if (r->compat_hash_algo)
|
||||
add_header_signature(&compat_buffer, bufs[i].sig, bufs[i].algo);
|
||||
}
|
||||
}
|
||||
|
||||
/* And check the encoding. */
|
||||
if (encoding_is_utf8 && (!verify_utf8(&buffer) || !verify_utf8(&compat_buffer)))
|
||||
fprintf(stderr, _(commit_utf8_warn));
|
||||
|
||||
if (r->compat_hash_algo) {
|
||||
hash_object_file(r->compat_hash_algo, compat_buffer.buf, compat_buffer.len,
|
||||
OBJ_COMMIT, &compat_oid_buf);
|
||||
compat_oid = &compat_oid_buf;
|
||||
}
|
||||
|
||||
result = write_object_file_flags(buffer.buf, buffer.len, OBJ_COMMIT,
|
||||
ret, compat_oid, 0);
|
||||
out:
|
||||
free(parent_buf);
|
||||
strbuf_release(&buffer);
|
||||
strbuf_release(&compat_buffer);
|
||||
strbuf_release(&sig);
|
||||
strbuf_release(&compat_sig);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
1
commit.h
1
commit.h
|
@ -370,5 +370,6 @@ int parse_buffer_signed_by_header(const char *buffer,
|
|||
struct strbuf *payload,
|
||||
struct strbuf *signature,
|
||||
const struct git_hash_algo *algop);
|
||||
int add_header_signature(struct strbuf *buf, struct strbuf *sig, const struct git_hash_algo *algo);
|
||||
|
||||
#endif /* COMMIT_H */
|
||||
|
|
|
@ -284,7 +284,7 @@ void resolve_tree_islands(struct repository *r,
|
|||
if (!tree || parse_tree(tree) < 0)
|
||||
die(_("bad tree object %s"), oid_to_hex(&ent->idx.oid));
|
||||
|
||||
init_tree_desc(&desc, tree->buffer, tree->size);
|
||||
init_tree_desc(&desc, &tree->object.oid, tree->buffer, tree->size);
|
||||
while (tree_entry(&desc, &entry)) {
|
||||
struct object *obj;
|
||||
|
||||
|
|
|
@ -562,7 +562,7 @@ static int diff_cache(struct rev_info *revs,
|
|||
opts.pathspec = &revs->diffopt.pathspec;
|
||||
opts.pathspec->recursive = 1;
|
||||
|
||||
init_tree_desc(&t, tree->buffer, tree->size);
|
||||
init_tree_desc(&t, &tree->object.oid, tree->buffer, tree->size);
|
||||
return unpack_trees(1, &t, &opts);
|
||||
}
|
||||
|
||||
|
|
6
fsck.c
6
fsck.c
|
@ -327,7 +327,8 @@ static int fsck_walk_tree(struct tree *tree, void *data, struct fsck_options *op
|
|||
return -1;
|
||||
|
||||
name = fsck_get_object_name(options, &tree->object.oid);
|
||||
if (init_tree_desc_gently(&desc, tree->buffer, tree->size, 0))
|
||||
if (init_tree_desc_gently(&desc, &tree->object.oid,
|
||||
tree->buffer, tree->size, 0))
|
||||
return -1;
|
||||
while (tree_entry_gently(&desc, &entry)) {
|
||||
struct object *obj;
|
||||
|
@ -598,7 +599,8 @@ static int fsck_tree(const struct object_id *tree_oid,
|
|||
const char *o_name;
|
||||
struct name_stack df_dup_candidates = { NULL };
|
||||
|
||||
if (init_tree_desc_gently(&desc, buffer, size, TREE_DESC_RAW_MODES)) {
|
||||
if (init_tree_desc_gently(&desc, tree_oid, buffer, size,
|
||||
TREE_DESC_RAW_MODES)) {
|
||||
retval += report(options, tree_oid, OBJ_TREE,
|
||||
FSCK_MSG_BAD_TREE,
|
||||
"cannot be parsed as a tree");
|
||||
|
|
|
@ -145,6 +145,7 @@ struct object_id {
|
|||
#define GET_OID_RECORD_PATH 0200
|
||||
#define GET_OID_ONLY_TO_DIE 04000
|
||||
#define GET_OID_REQUIRE_PATH 010000
|
||||
#define GET_OID_HASH_ANY 020000
|
||||
|
||||
#define GET_OID_DISAMBIGUATORS \
|
||||
(GET_OID_COMMIT | GET_OID_COMMITTISH | \
|
||||
|
|
9
hash.h
9
hash.h
|
@ -73,10 +73,15 @@ static inline void oidclr(struct object_id *oid)
|
|||
oid->algo = hash_algo_by_ptr(the_hash_algo);
|
||||
}
|
||||
|
||||
static inline void oidread_algop(struct object_id *oid, const unsigned char *hash, const struct git_hash_algo *algop)
|
||||
{
|
||||
memcpy(oid->hash, hash, algop->rawsz);
|
||||
oid->algo = hash_algo_by_ptr(algop);
|
||||
}
|
||||
|
||||
static inline void oidread(struct object_id *oid, const unsigned char *hash)
|
||||
{
|
||||
memcpy(oid->hash, hash, the_hash_algo->rawsz);
|
||||
oid->algo = hash_algo_by_ptr(the_hash_algo);
|
||||
oidread_algop(oid, hash, the_hash_algo);
|
||||
}
|
||||
|
||||
static inline int is_empty_blob_sha1(const unsigned char *sha1)
|
||||
|
|
|
@ -1307,7 +1307,7 @@ static struct object_list **process_tree(struct tree *tree,
|
|||
obj->flags |= SEEN;
|
||||
p = add_one_object(obj, p);
|
||||
|
||||
init_tree_desc(&desc, tree->buffer, tree->size);
|
||||
init_tree_desc(&desc, &tree->object.oid, tree->buffer, tree->size);
|
||||
|
||||
while (tree_entry(&desc, &entry))
|
||||
switch (object_type(entry.mode)) {
|
||||
|
|
|
@ -102,7 +102,7 @@ static void process_tree_contents(struct traversal_context *ctx,
|
|||
enum interesting match = ctx->revs->diffopt.pathspec.nr == 0 ?
|
||||
all_entries_interesting : entry_not_interesting;
|
||||
|
||||
init_tree_desc(&desc, tree->buffer, tree->size);
|
||||
init_tree_desc(&desc, &tree->object.oid, tree->buffer, tree->size);
|
||||
|
||||
while (tree_entry(&desc, &entry)) {
|
||||
if (match != all_entries_interesting) {
|
||||
|
|
259
loose.c
Normal file
259
loose.c
Normal file
|
@ -0,0 +1,259 @@
|
|||
#include "git-compat-util.h"
|
||||
#include "hash.h"
|
||||
#include "path.h"
|
||||
#include "object-store.h"
|
||||
#include "hex.h"
|
||||
#include "wrapper.h"
|
||||
#include "gettext.h"
|
||||
#include "loose.h"
|
||||
#include "lockfile.h"
|
||||
#include "oidtree.h"
|
||||
|
||||
static const char *loose_object_header = "# loose-object-idx\n";
|
||||
|
||||
static inline int should_use_loose_object_map(struct repository *repo)
|
||||
{
|
||||
return repo->compat_hash_algo && repo->gitdir;
|
||||
}
|
||||
|
||||
void loose_object_map_init(struct loose_object_map **map)
|
||||
{
|
||||
struct loose_object_map *m;
|
||||
m = xmalloc(sizeof(**map));
|
||||
m->to_compat = kh_init_oid_map();
|
||||
m->to_storage = kh_init_oid_map();
|
||||
*map = m;
|
||||
}
|
||||
|
||||
static int insert_oid_pair(kh_oid_map_t *map, const struct object_id *key, const struct object_id *value)
|
||||
{
|
||||
khiter_t pos;
|
||||
int ret;
|
||||
struct object_id *stored;
|
||||
|
||||
pos = kh_put_oid_map(map, *key, &ret);
|
||||
|
||||
/* This item already exists in the map. */
|
||||
if (ret == 0)
|
||||
return 0;
|
||||
|
||||
stored = xmalloc(sizeof(*stored));
|
||||
oidcpy(stored, value);
|
||||
kh_value(map, pos) = stored;
|
||||
return 1;
|
||||
}
|
||||
|
||||
static int insert_loose_map(struct object_directory *odb,
|
||||
const struct object_id *oid,
|
||||
const struct object_id *compat_oid)
|
||||
{
|
||||
struct loose_object_map *map = odb->loose_map;
|
||||
int inserted = 0;
|
||||
|
||||
inserted |= insert_oid_pair(map->to_compat, oid, compat_oid);
|
||||
inserted |= insert_oid_pair(map->to_storage, compat_oid, oid);
|
||||
if (inserted)
|
||||
oidtree_insert(odb->loose_objects_cache, compat_oid);
|
||||
|
||||
return inserted;
|
||||
}
|
||||
|
||||
static int load_one_loose_object_map(struct repository *repo, struct object_directory *dir)
|
||||
{
|
||||
struct strbuf buf = STRBUF_INIT, path = STRBUF_INIT;
|
||||
FILE *fp;
|
||||
|
||||
if (!dir->loose_map)
|
||||
loose_object_map_init(&dir->loose_map);
|
||||
if (!dir->loose_objects_cache) {
|
||||
ALLOC_ARRAY(dir->loose_objects_cache, 1);
|
||||
oidtree_init(dir->loose_objects_cache);
|
||||
}
|
||||
|
||||
insert_loose_map(dir, repo->hash_algo->empty_tree, repo->compat_hash_algo->empty_tree);
|
||||
insert_loose_map(dir, repo->hash_algo->empty_blob, repo->compat_hash_algo->empty_blob);
|
||||
insert_loose_map(dir, repo->hash_algo->null_oid, repo->compat_hash_algo->null_oid);
|
||||
|
||||
strbuf_git_common_path(&path, repo, "objects/loose-object-idx");
|
||||
fp = fopen(path.buf, "rb");
|
||||
if (!fp) {
|
||||
strbuf_release(&path);
|
||||
return 0;
|
||||
}
|
||||
|
||||
errno = 0;
|
||||
if (strbuf_getwholeline(&buf, fp, '\n') || strcmp(buf.buf, loose_object_header))
|
||||
goto err;
|
||||
while (!strbuf_getline_lf(&buf, fp)) {
|
||||
const char *p;
|
||||
struct object_id oid, compat_oid;
|
||||
if (parse_oid_hex_algop(buf.buf, &oid, &p, repo->hash_algo) ||
|
||||
*p++ != ' ' ||
|
||||
parse_oid_hex_algop(p, &compat_oid, &p, repo->compat_hash_algo) ||
|
||||
p != buf.buf + buf.len)
|
||||
goto err;
|
||||
insert_loose_map(dir, &oid, &compat_oid);
|
||||
}
|
||||
|
||||
strbuf_release(&buf);
|
||||
strbuf_release(&path);
|
||||
return errno ? -1 : 0;
|
||||
err:
|
||||
strbuf_release(&buf);
|
||||
strbuf_release(&path);
|
||||
return -1;
|
||||
}
|
||||
|
||||
int repo_read_loose_object_map(struct repository *repo)
|
||||
{
|
||||
struct object_directory *dir;
|
||||
|
||||
if (!should_use_loose_object_map(repo))
|
||||
return 0;
|
||||
|
||||
prepare_alt_odb(repo);
|
||||
|
||||
for (dir = repo->objects->odb; dir; dir = dir->next) {
|
||||
if (load_one_loose_object_map(repo, dir) < 0) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int repo_write_loose_object_map(struct repository *repo)
|
||||
{
|
||||
kh_oid_map_t *map = repo->objects->odb->loose_map->to_compat;
|
||||
struct lock_file lock;
|
||||
int fd;
|
||||
khiter_t iter;
|
||||
struct strbuf buf = STRBUF_INIT, path = STRBUF_INIT;
|
||||
|
||||
if (!should_use_loose_object_map(repo))
|
||||
return 0;
|
||||
|
||||
strbuf_git_common_path(&path, repo, "objects/loose-object-idx");
|
||||
fd = hold_lock_file_for_update_timeout(&lock, path.buf, LOCK_DIE_ON_ERROR, -1);
|
||||
iter = kh_begin(map);
|
||||
if (write_in_full(fd, loose_object_header, strlen(loose_object_header)) < 0)
|
||||
goto errout;
|
||||
|
||||
for (; iter != kh_end(map); iter++) {
|
||||
if (kh_exist(map, iter)) {
|
||||
if (oideq(&kh_key(map, iter), the_hash_algo->empty_tree) ||
|
||||
oideq(&kh_key(map, iter), the_hash_algo->empty_blob))
|
||||
continue;
|
||||
strbuf_addf(&buf, "%s %s\n", oid_to_hex(&kh_key(map, iter)), oid_to_hex(kh_value(map, iter)));
|
||||
if (write_in_full(fd, buf.buf, buf.len) < 0)
|
||||
goto errout;
|
||||
strbuf_reset(&buf);
|
||||
}
|
||||
}
|
||||
strbuf_release(&buf);
|
||||
if (commit_lock_file(&lock) < 0) {
|
||||
error_errno(_("could not write loose object index %s"), path.buf);
|
||||
strbuf_release(&path);
|
||||
return -1;
|
||||
}
|
||||
strbuf_release(&path);
|
||||
return 0;
|
||||
errout:
|
||||
rollback_lock_file(&lock);
|
||||
strbuf_release(&buf);
|
||||
error_errno(_("failed to write loose object index %s\n"), path.buf);
|
||||
strbuf_release(&path);
|
||||
return -1;
|
||||
}
|
||||
|
||||
static int write_one_object(struct repository *repo, const struct object_id *oid,
|
||||
const struct object_id *compat_oid)
|
||||
{
|
||||
struct lock_file lock;
|
||||
int fd;
|
||||
struct stat st;
|
||||
struct strbuf buf = STRBUF_INIT, path = STRBUF_INIT;
|
||||
|
||||
strbuf_git_common_path(&path, repo, "objects/loose-object-idx");
|
||||
hold_lock_file_for_update_timeout(&lock, path.buf, LOCK_DIE_ON_ERROR, -1);
|
||||
|
||||
fd = open(path.buf, O_WRONLY | O_CREAT | O_APPEND, 0666);
|
||||
if (fd < 0)
|
||||
goto errout;
|
||||
if (fstat(fd, &st) < 0)
|
||||
goto errout;
|
||||
if (!st.st_size && write_in_full(fd, loose_object_header, strlen(loose_object_header)) < 0)
|
||||
goto errout;
|
||||
|
||||
strbuf_addf(&buf, "%s %s\n", oid_to_hex(oid), oid_to_hex(compat_oid));
|
||||
if (write_in_full(fd, buf.buf, buf.len) < 0)
|
||||
goto errout;
|
||||
if (close(fd))
|
||||
goto errout;
|
||||
adjust_shared_perm(path.buf);
|
||||
rollback_lock_file(&lock);
|
||||
strbuf_release(&buf);
|
||||
strbuf_release(&path);
|
||||
return 0;
|
||||
errout:
|
||||
error_errno(_("failed to write loose object index %s\n"), path.buf);
|
||||
close(fd);
|
||||
rollback_lock_file(&lock);
|
||||
strbuf_release(&buf);
|
||||
strbuf_release(&path);
|
||||
return -1;
|
||||
}
|
||||
|
||||
int repo_add_loose_object_map(struct repository *repo, const struct object_id *oid,
|
||||
const struct object_id *compat_oid)
|
||||
{
|
||||
int inserted = 0;
|
||||
|
||||
if (!should_use_loose_object_map(repo))
|
||||
return 0;
|
||||
|
||||
inserted = insert_loose_map(repo->objects->odb, oid, compat_oid);
|
||||
if (inserted)
|
||||
return write_one_object(repo, oid, compat_oid);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int repo_loose_object_map_oid(struct repository *repo,
|
||||
const struct object_id *src,
|
||||
const struct git_hash_algo *to,
|
||||
struct object_id *dest)
|
||||
{
|
||||
struct object_directory *dir;
|
||||
kh_oid_map_t *map;
|
||||
khiter_t pos;
|
||||
|
||||
for (dir = repo->objects->odb; dir; dir = dir->next) {
|
||||
struct loose_object_map *loose_map = dir->loose_map;
|
||||
if (!loose_map)
|
||||
continue;
|
||||
map = (to == repo->compat_hash_algo) ?
|
||||
loose_map->to_compat :
|
||||
loose_map->to_storage;
|
||||
pos = kh_get_oid_map(map, *src);
|
||||
if (pos < kh_end(map)) {
|
||||
oidcpy(dest, kh_value(map, pos));
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
void loose_object_map_clear(struct loose_object_map **map)
|
||||
{
|
||||
struct loose_object_map *m = *map;
|
||||
struct object_id *oid;
|
||||
|
||||
if (!m)
|
||||
return;
|
||||
|
||||
kh_foreach_value(m->to_compat, oid, free(oid));
|
||||
kh_foreach_value(m->to_storage, oid, free(oid));
|
||||
kh_destroy_oid_map(m->to_compat);
|
||||
kh_destroy_oid_map(m->to_storage);
|
||||
free(m);
|
||||
*map = NULL;
|
||||
}
|
22
loose.h
Normal file
22
loose.h
Normal file
|
@ -0,0 +1,22 @@
|
|||
#ifndef LOOSE_H
|
||||
#define LOOSE_H
|
||||
|
||||
#include "khash.h"
|
||||
|
||||
struct loose_object_map {
|
||||
kh_oid_map_t *to_compat;
|
||||
kh_oid_map_t *to_storage;
|
||||
};
|
||||
|
||||
void loose_object_map_init(struct loose_object_map **map);
|
||||
void loose_object_map_clear(struct loose_object_map **map);
|
||||
int repo_loose_object_map_oid(struct repository *repo,
|
||||
const struct object_id *src,
|
||||
const struct git_hash_algo *dest_algo,
|
||||
struct object_id *dest);
|
||||
int repo_add_loose_object_map(struct repository *repo, const struct object_id *oid,
|
||||
const struct object_id *compat_oid);
|
||||
int repo_read_loose_object_map(struct repository *repo);
|
||||
int repo_write_loose_object_map(struct repository *repo);
|
||||
|
||||
#endif
|
|
@ -63,7 +63,7 @@ static void *fill_tree_desc_strict(struct tree_desc *desc,
|
|||
die("unable to read tree (%s)", oid_to_hex(hash));
|
||||
if (type != OBJ_TREE)
|
||||
die("%s is not a tree", oid_to_hex(hash));
|
||||
init_tree_desc(desc, buffer, size);
|
||||
init_tree_desc(desc, hash, buffer, size);
|
||||
return buffer;
|
||||
}
|
||||
|
||||
|
@ -194,7 +194,7 @@ static int splice_tree(const struct object_id *oid1, const char *prefix,
|
|||
buf = repo_read_object_file(the_repository, oid1, &type, &sz);
|
||||
if (!buf)
|
||||
die("cannot read tree %s", oid_to_hex(oid1));
|
||||
init_tree_desc(&desc, buf, sz);
|
||||
init_tree_desc(&desc, oid1, buf, sz);
|
||||
|
||||
rewrite_here = NULL;
|
||||
while (desc.size) {
|
||||
|
|
11
merge-ort.c
11
merge-ort.c
|
@ -1665,9 +1665,10 @@ static int collect_merge_info(struct merge_options *opt,
|
|||
parse_tree(side1) < 0 ||
|
||||
parse_tree(side2) < 0)
|
||||
return -1;
|
||||
init_tree_desc(t + 0, merge_base->buffer, merge_base->size);
|
||||
init_tree_desc(t + 1, side1->buffer, side1->size);
|
||||
init_tree_desc(t + 2, side2->buffer, side2->size);
|
||||
init_tree_desc(t + 0, &merge_base->object.oid,
|
||||
merge_base->buffer, merge_base->size);
|
||||
init_tree_desc(t + 1, &side1->object.oid, side1->buffer, side1->size);
|
||||
init_tree_desc(t + 2, &side2->object.oid, side2->buffer, side2->size);
|
||||
|
||||
trace2_region_enter("merge", "traverse_trees", opt->repo);
|
||||
ret = traverse_trees(NULL, 3, t, &info);
|
||||
|
@ -4446,10 +4447,10 @@ static int checkout(struct merge_options *opt,
|
|||
unpack_opts.preserve_ignored = 0; /* FIXME: !opts->overwrite_ignore */
|
||||
if (parse_tree(prev) < 0)
|
||||
return -1;
|
||||
init_tree_desc(&trees[0], prev->buffer, prev->size);
|
||||
init_tree_desc(&trees[0], &prev->object.oid, prev->buffer, prev->size);
|
||||
if (parse_tree(next) < 0)
|
||||
return -1;
|
||||
init_tree_desc(&trees[1], next->buffer, next->size);
|
||||
init_tree_desc(&trees[1], &next->object.oid, next->buffer, next->size);
|
||||
|
||||
ret = unpack_trees(2, trees, &unpack_opts);
|
||||
clear_unpack_trees_porcelain(&unpack_opts);
|
||||
|
|
|
@ -407,7 +407,7 @@ static void init_tree_desc_from_tree(struct tree_desc *desc, struct tree *tree)
|
|||
{
|
||||
if (parse_tree(tree) < 0)
|
||||
exit(128);
|
||||
init_tree_desc(desc, tree->buffer, tree->size);
|
||||
init_tree_desc(desc, &tree->object.oid, tree->buffer, tree->size);
|
||||
}
|
||||
|
||||
static int unpack_trees_start(struct merge_options *opt,
|
||||
|
|
3
merge.c
3
merge.c
|
@ -81,7 +81,8 @@ int checkout_fast_forward(struct repository *r,
|
|||
rollback_lock_file(&lock_file);
|
||||
return -1;
|
||||
}
|
||||
init_tree_desc(t+i, trees[i]->buffer, trees[i]->size);
|
||||
init_tree_desc(t+i, &trees[i]->object.oid,
|
||||
trees[i]->buffer, trees[i]->size);
|
||||
}
|
||||
|
||||
memset(&opts, 0, sizeof(opts));
|
||||
|
|
277
object-file-convert.c
Normal file
277
object-file-convert.c
Normal file
|
@ -0,0 +1,277 @@
|
|||
#include "git-compat-util.h"
|
||||
#include "gettext.h"
|
||||
#include "strbuf.h"
|
||||
#include "hex.h"
|
||||
#include "repository.h"
|
||||
#include "hash-ll.h"
|
||||
#include "hash.h"
|
||||
#include "object.h"
|
||||
#include "loose.h"
|
||||
#include "commit.h"
|
||||
#include "gpg-interface.h"
|
||||
#include "object-file-convert.h"
|
||||
|
||||
int repo_oid_to_algop(struct repository *repo, const struct object_id *src,
|
||||
const struct git_hash_algo *to, struct object_id *dest)
|
||||
{
|
||||
/*
|
||||
* If the source algorithm is not set, then we're using the
|
||||
* default hash algorithm for that object.
|
||||
*/
|
||||
const struct git_hash_algo *from =
|
||||
src->algo ? &hash_algos[src->algo] : repo->hash_algo;
|
||||
|
||||
if (from == to) {
|
||||
if (src != dest)
|
||||
oidcpy(dest, src);
|
||||
return 0;
|
||||
}
|
||||
if (repo_loose_object_map_oid(repo, src, to, dest)) {
|
||||
/*
|
||||
* We may have loaded the object map at repo initialization but
|
||||
* another process (perhaps upstream of a pipe from us) may have
|
||||
* written a new object into the map. If the object is missing,
|
||||
* let's reload the map to see if the object has appeared.
|
||||
*/
|
||||
repo_read_loose_object_map(repo);
|
||||
if (repo_loose_object_map_oid(repo, src, to, dest))
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int decode_tree_entry_raw(struct object_id *oid, const char **path,
|
||||
size_t *len, const struct git_hash_algo *algo,
|
||||
const char *buf, unsigned long size)
|
||||
{
|
||||
uint16_t mode;
|
||||
const unsigned hashsz = algo->rawsz;
|
||||
|
||||
if (size < hashsz + 3 || buf[size - (hashsz + 1)]) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
*path = parse_mode(buf, &mode);
|
||||
if (!*path || !**path)
|
||||
return -1;
|
||||
*len = strlen(*path) + 1;
|
||||
|
||||
oidread_algop(oid, (const unsigned char *)*path + *len, algo);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int convert_tree_object(struct strbuf *out,
|
||||
const struct git_hash_algo *from,
|
||||
const struct git_hash_algo *to,
|
||||
const char *buffer, size_t size)
|
||||
{
|
||||
const char *p = buffer, *end = buffer + size;
|
||||
|
||||
while (p < end) {
|
||||
struct object_id entry_oid, mapped_oid;
|
||||
const char *path = NULL;
|
||||
size_t pathlen;
|
||||
|
||||
if (decode_tree_entry_raw(&entry_oid, &path, &pathlen, from, p,
|
||||
end - p))
|
||||
return error(_("failed to decode tree entry"));
|
||||
if (repo_oid_to_algop(the_repository, &entry_oid, to, &mapped_oid))
|
||||
return error(_("failed to map tree entry for %s"), oid_to_hex(&entry_oid));
|
||||
strbuf_add(out, p, path - p);
|
||||
strbuf_add(out, path, pathlen);
|
||||
strbuf_add(out, mapped_oid.hash, to->rawsz);
|
||||
p = path + pathlen + from->rawsz;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int convert_tag_object(struct strbuf *out,
|
||||
const struct git_hash_algo *from,
|
||||
const struct git_hash_algo *to,
|
||||
const char *buffer, size_t size)
|
||||
{
|
||||
struct strbuf payload = STRBUF_INIT, oursig = STRBUF_INIT, othersig = STRBUF_INIT;
|
||||
const int entry_len = from->hexsz + 7;
|
||||
size_t payload_size;
|
||||
struct object_id oid, mapped_oid;
|
||||
const char *p;
|
||||
|
||||
/* Consume the object line */
|
||||
if ((entry_len >= size) ||
|
||||
memcmp(buffer, "object ", 7) || buffer[entry_len] != '\n')
|
||||
return error("bogus tag object");
|
||||
if (parse_oid_hex_algop(buffer + 7, &oid, &p, from) < 0)
|
||||
return error("bad tag object ID");
|
||||
if (repo_oid_to_algop(the_repository, &oid, to, &mapped_oid))
|
||||
return error("unable to map tree %s in tag object",
|
||||
oid_to_hex(&oid));
|
||||
size -= ((p + 1) - buffer);
|
||||
buffer = p + 1;
|
||||
|
||||
/* Is there a signature for our algorithm? */
|
||||
payload_size = parse_signed_buffer(buffer, size);
|
||||
if (payload_size != size) {
|
||||
/* Yes, there is. */
|
||||
strbuf_add(&oursig, buffer + payload_size, size - payload_size);
|
||||
}
|
||||
|
||||
/* Now, is there a signature for the other algorithm? */
|
||||
parse_buffer_signed_by_header(buffer, payload_size, &payload, &othersig, to);
|
||||
/*
|
||||
* Our payload is now in payload and we may have up to two signatrures
|
||||
* in oursig and othersig.
|
||||
*/
|
||||
|
||||
/* Add some slop for longer signature header in the new algorithm. */
|
||||
strbuf_grow(out, (7 + to->hexsz + 1) + size + 7);
|
||||
strbuf_addf(out, "object %s\n", oid_to_hex(&mapped_oid));
|
||||
strbuf_addbuf(out, &payload);
|
||||
if (oursig.len)
|
||||
add_header_signature(out, &oursig, from);
|
||||
strbuf_addbuf(out, &othersig);
|
||||
|
||||
strbuf_release(&payload);
|
||||
strbuf_release(&othersig);
|
||||
strbuf_release(&oursig);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int convert_commit_object(struct strbuf *out,
|
||||
const struct git_hash_algo *from,
|
||||
const struct git_hash_algo *to,
|
||||
const char *buffer, size_t size)
|
||||
{
|
||||
const char *tail = buffer;
|
||||
const char *bufptr = buffer;
|
||||
const int tree_entry_len = from->hexsz + 5;
|
||||
const int parent_entry_len = from->hexsz + 7;
|
||||
struct object_id oid, mapped_oid;
|
||||
const char *p, *eol;
|
||||
|
||||
tail += size;
|
||||
|
||||
while ((bufptr < tail) && (*bufptr != '\n')) {
|
||||
eol = memchr(bufptr, '\n', tail - bufptr);
|
||||
if (!eol)
|
||||
return error(_("bad %s in commit"), "line");
|
||||
|
||||
if (((bufptr + 5) < eol) && !memcmp(bufptr, "tree ", 5))
|
||||
{
|
||||
if (((bufptr + tree_entry_len) != eol) ||
|
||||
parse_oid_hex_algop(bufptr + 5, &oid, &p, from) ||
|
||||
(p != eol))
|
||||
return error(_("bad %s in commit"), "tree");
|
||||
|
||||
if (repo_oid_to_algop(the_repository, &oid, to, &mapped_oid))
|
||||
return error(_("unable to map %s %s in commit object"),
|
||||
"tree", oid_to_hex(&oid));
|
||||
strbuf_addf(out, "tree %s\n", oid_to_hex(&mapped_oid));
|
||||
}
|
||||
else if (((bufptr + 7) < eol) && !memcmp(bufptr, "parent ", 7))
|
||||
{
|
||||
if (((bufptr + parent_entry_len) != eol) ||
|
||||
parse_oid_hex_algop(bufptr + 7, &oid, &p, from) ||
|
||||
(p != eol))
|
||||
return error(_("bad %s in commit"), "parent");
|
||||
|
||||
if (repo_oid_to_algop(the_repository, &oid, to, &mapped_oid))
|
||||
return error(_("unable to map %s %s in commit object"),
|
||||
"parent", oid_to_hex(&oid));
|
||||
|
||||
strbuf_addf(out, "parent %s\n", oid_to_hex(&mapped_oid));
|
||||
}
|
||||
else if (((bufptr + 9) < eol) && !memcmp(bufptr, "mergetag ", 9))
|
||||
{
|
||||
struct strbuf tag = STRBUF_INIT, new_tag = STRBUF_INIT;
|
||||
|
||||
/* Recover the tag object from the mergetag */
|
||||
strbuf_add(&tag, bufptr + 9, (eol - (bufptr + 9)) + 1);
|
||||
|
||||
bufptr = eol + 1;
|
||||
while ((bufptr < tail) && (*bufptr == ' ')) {
|
||||
eol = memchr(bufptr, '\n', tail - bufptr);
|
||||
if (!eol) {
|
||||
strbuf_release(&tag);
|
||||
return error(_("bad %s in commit"), "mergetag continuation");
|
||||
}
|
||||
strbuf_add(&tag, bufptr + 1, (eol - (bufptr + 1)) + 1);
|
||||
bufptr = eol + 1;
|
||||
}
|
||||
|
||||
/* Compute the new tag object */
|
||||
if (convert_tag_object(&new_tag, from, to, tag.buf, tag.len)) {
|
||||
strbuf_release(&tag);
|
||||
strbuf_release(&new_tag);
|
||||
return -1;
|
||||
}
|
||||
|
||||
/* Write the new mergetag */
|
||||
strbuf_addstr(out, "mergetag");
|
||||
strbuf_add_lines(out, " ", new_tag.buf, new_tag.len);
|
||||
strbuf_release(&tag);
|
||||
strbuf_release(&new_tag);
|
||||
}
|
||||
else if (((bufptr + 7) < tail) && !memcmp(bufptr, "author ", 7))
|
||||
strbuf_add(out, bufptr, (eol - bufptr) + 1);
|
||||
else if (((bufptr + 10) < tail) && !memcmp(bufptr, "committer ", 10))
|
||||
strbuf_add(out, bufptr, (eol - bufptr) + 1);
|
||||
else if (((bufptr + 9) < tail) && !memcmp(bufptr, "encoding ", 9))
|
||||
strbuf_add(out, bufptr, (eol - bufptr) + 1);
|
||||
else if (((bufptr + 6) < tail) && !memcmp(bufptr, "gpgsig", 6))
|
||||
strbuf_add(out, bufptr, (eol - bufptr) + 1);
|
||||
else {
|
||||
/* Unknown line fail it might embed an oid */
|
||||
return -1;
|
||||
}
|
||||
/* Consume any trailing continuation lines */
|
||||
bufptr = eol + 1;
|
||||
while ((bufptr < tail) && (*bufptr == ' ')) {
|
||||
eol = memchr(bufptr, '\n', tail - bufptr);
|
||||
if (!eol)
|
||||
return error(_("bad %s in commit"), "continuation");
|
||||
strbuf_add(out, bufptr, (eol - bufptr) + 1);
|
||||
bufptr = eol + 1;
|
||||
}
|
||||
}
|
||||
if (bufptr < tail)
|
||||
strbuf_add(out, bufptr, tail - bufptr);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int convert_object_file(struct strbuf *outbuf,
|
||||
const struct git_hash_algo *from,
|
||||
const struct git_hash_algo *to,
|
||||
const void *buf, size_t len,
|
||||
enum object_type type,
|
||||
int gentle)
|
||||
{
|
||||
int ret;
|
||||
|
||||
/* Don't call this function when no conversion is necessary */
|
||||
if ((from == to) || (type == OBJ_BLOB))
|
||||
BUG("Refusing noop object file conversion");
|
||||
|
||||
switch (type) {
|
||||
case OBJ_COMMIT:
|
||||
ret = convert_commit_object(outbuf, from, to, buf, len);
|
||||
break;
|
||||
case OBJ_TREE:
|
||||
ret = convert_tree_object(outbuf, from, to, buf, len);
|
||||
break;
|
||||
case OBJ_TAG:
|
||||
ret = convert_tag_object(outbuf, from, to, buf, len);
|
||||
break;
|
||||
default:
|
||||
/* Not implemented yet, so fail. */
|
||||
ret = -1;
|
||||
break;
|
||||
}
|
||||
if (!ret)
|
||||
return 0;
|
||||
if (gentle) {
|
||||
strbuf_release(outbuf);
|
||||
return ret;
|
||||
}
|
||||
die(_("Failed to convert object from %s to %s"),
|
||||
from->name, to->name);
|
||||
}
|
24
object-file-convert.h
Normal file
24
object-file-convert.h
Normal file
|
@ -0,0 +1,24 @@
|
|||
#ifndef OBJECT_CONVERT_H
|
||||
#define OBJECT_CONVERT_H
|
||||
|
||||
struct repository;
|
||||
struct object_id;
|
||||
struct git_hash_algo;
|
||||
struct strbuf;
|
||||
#include "object.h"
|
||||
|
||||
int repo_oid_to_algop(struct repository *repo, const struct object_id *src,
|
||||
const struct git_hash_algo *to, struct object_id *dest);
|
||||
|
||||
/*
|
||||
* Convert an object file from one hash algorithm to another algorithm.
|
||||
* Return -1 on failure, 0 on success.
|
||||
*/
|
||||
int convert_object_file(struct strbuf *outbuf,
|
||||
const struct git_hash_algo *from,
|
||||
const struct git_hash_algo *to,
|
||||
const void *buf, size_t len,
|
||||
enum object_type type,
|
||||
int gentle);
|
||||
|
||||
#endif /* OBJECT_CONVERT_H */
|
212
object-file.c
212
object-file.c
|
@ -35,6 +35,8 @@
|
|||
#include "setup.h"
|
||||
#include "submodule.h"
|
||||
#include "fsck.h"
|
||||
#include "loose.h"
|
||||
#include "object-file-convert.h"
|
||||
|
||||
/* The maximum size for an object header. */
|
||||
#define MAX_HEADER_LEN 32
|
||||
|
@ -1084,9 +1086,11 @@ int check_object_signature(struct repository *r, const struct object_id *oid,
|
|||
void *buf, unsigned long size,
|
||||
enum object_type type)
|
||||
{
|
||||
const struct git_hash_algo *algo =
|
||||
oid->algo ? &hash_algos[oid->algo] : r->hash_algo;
|
||||
struct object_id real_oid;
|
||||
|
||||
hash_object_file(r->hash_algo, buf, size, type, &real_oid);
|
||||
hash_object_file(algo, buf, size, type, &real_oid);
|
||||
|
||||
return !oideq(oid, &real_oid) ? -1 : 0;
|
||||
}
|
||||
|
@ -1652,10 +1656,101 @@ static int do_oid_object_info_extended(struct repository *r,
|
|||
return 0;
|
||||
}
|
||||
|
||||
static int oid_object_info_convert(struct repository *r,
|
||||
const struct object_id *input_oid,
|
||||
struct object_info *input_oi, unsigned flags)
|
||||
{
|
||||
const struct git_hash_algo *input_algo = &hash_algos[input_oid->algo];
|
||||
int do_die = flags & OBJECT_INFO_DIE_IF_CORRUPT;
|
||||
struct strbuf type_name = STRBUF_INIT;
|
||||
struct object_id oid, delta_base_oid;
|
||||
struct object_info new_oi, *oi;
|
||||
unsigned long size;
|
||||
void *content;
|
||||
int ret;
|
||||
|
||||
if (repo_oid_to_algop(r, input_oid, the_hash_algo, &oid)) {
|
||||
if (do_die)
|
||||
die(_("missing mapping of %s to %s"),
|
||||
oid_to_hex(input_oid), the_hash_algo->name);
|
||||
return -1;
|
||||
}
|
||||
|
||||
/* Is new_oi needed? */
|
||||
oi = input_oi;
|
||||
if (input_oi && (input_oi->delta_base_oid || input_oi->sizep ||
|
||||
input_oi->contentp)) {
|
||||
new_oi = *input_oi;
|
||||
/* Does delta_base_oid need to be converted? */
|
||||
if (input_oi->delta_base_oid)
|
||||
new_oi.delta_base_oid = &delta_base_oid;
|
||||
/* Will the attributes differ when converted? */
|
||||
if (input_oi->sizep || input_oi->contentp) {
|
||||
new_oi.contentp = &content;
|
||||
new_oi.sizep = &size;
|
||||
new_oi.type_name = &type_name;
|
||||
}
|
||||
oi = &new_oi;
|
||||
}
|
||||
|
||||
ret = oid_object_info_extended(r, &oid, oi, flags);
|
||||
if (ret)
|
||||
return -1;
|
||||
if (oi == input_oi)
|
||||
return ret;
|
||||
|
||||
if (new_oi.contentp) {
|
||||
struct strbuf outbuf = STRBUF_INIT;
|
||||
enum object_type type;
|
||||
|
||||
type = type_from_string_gently(type_name.buf, type_name.len,
|
||||
!do_die);
|
||||
if (type == -1)
|
||||
return -1;
|
||||
if (type != OBJ_BLOB) {
|
||||
ret = convert_object_file(&outbuf,
|
||||
the_hash_algo, input_algo,
|
||||
content, size, type, !do_die);
|
||||
if (ret == -1)
|
||||
return -1;
|
||||
free(content);
|
||||
size = outbuf.len;
|
||||
content = strbuf_detach(&outbuf, NULL);
|
||||
}
|
||||
if (input_oi->sizep)
|
||||
*input_oi->sizep = size;
|
||||
if (input_oi->contentp)
|
||||
*input_oi->contentp = content;
|
||||
else
|
||||
free(content);
|
||||
if (input_oi->type_name)
|
||||
*input_oi->type_name = type_name;
|
||||
else
|
||||
strbuf_release(&type_name);
|
||||
}
|
||||
if (new_oi.delta_base_oid == &delta_base_oid) {
|
||||
if (repo_oid_to_algop(r, &delta_base_oid, input_algo,
|
||||
input_oi->delta_base_oid)) {
|
||||
if (do_die)
|
||||
die(_("missing mapping of %s to %s"),
|
||||
oid_to_hex(&delta_base_oid),
|
||||
input_algo->name);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
input_oi->whence = new_oi.whence;
|
||||
input_oi->u = new_oi.u;
|
||||
return ret;
|
||||
}
|
||||
|
||||
int oid_object_info_extended(struct repository *r, const struct object_id *oid,
|
||||
struct object_info *oi, unsigned flags)
|
||||
{
|
||||
int ret;
|
||||
|
||||
if (oid->algo && (hash_algo_by_ptr(r->hash_algo) != oid->algo))
|
||||
return oid_object_info_convert(r, oid, oi, flags);
|
||||
|
||||
obj_read_lock();
|
||||
ret = do_oid_object_info_extended(r, oid, oi, flags);
|
||||
obj_read_unlock();
|
||||
|
@ -1944,9 +2039,12 @@ static int start_loose_object_common(struct strbuf *tmp_file,
|
|||
const char *filename, unsigned flags,
|
||||
git_zstream *stream,
|
||||
unsigned char *buf, size_t buflen,
|
||||
git_hash_ctx *c,
|
||||
git_hash_ctx *c, git_hash_ctx *compat_c,
|
||||
char *hdr, int hdrlen)
|
||||
{
|
||||
struct repository *repo = the_repository;
|
||||
const struct git_hash_algo *algo = repo->hash_algo;
|
||||
const struct git_hash_algo *compat = repo->compat_hash_algo;
|
||||
int fd;
|
||||
|
||||
fd = create_tmpfile(tmp_file, filename);
|
||||
|
@ -1966,14 +2064,18 @@ static int start_loose_object_common(struct strbuf *tmp_file,
|
|||
git_deflate_init(stream, zlib_compression_level);
|
||||
stream->next_out = buf;
|
||||
stream->avail_out = buflen;
|
||||
the_hash_algo->init_fn(c);
|
||||
algo->init_fn(c);
|
||||
if (compat && compat_c)
|
||||
compat->init_fn(compat_c);
|
||||
|
||||
/* Start to feed header to zlib stream */
|
||||
stream->next_in = (unsigned char *)hdr;
|
||||
stream->avail_in = hdrlen;
|
||||
while (git_deflate(stream, 0) == Z_OK)
|
||||
; /* nothing */
|
||||
the_hash_algo->update_fn(c, hdr, hdrlen);
|
||||
algo->update_fn(c, hdr, hdrlen);
|
||||
if (compat && compat_c)
|
||||
compat->update_fn(compat_c, hdr, hdrlen);
|
||||
|
||||
return fd;
|
||||
}
|
||||
|
@ -1982,16 +2084,21 @@ static int start_loose_object_common(struct strbuf *tmp_file,
|
|||
* Common steps for the inner git_deflate() loop for writing loose
|
||||
* objects. Returns what git_deflate() returns.
|
||||
*/
|
||||
static int write_loose_object_common(git_hash_ctx *c,
|
||||
static int write_loose_object_common(git_hash_ctx *c, git_hash_ctx *compat_c,
|
||||
git_zstream *stream, const int flush,
|
||||
unsigned char *in0, const int fd,
|
||||
unsigned char *compressed,
|
||||
const size_t compressed_len)
|
||||
{
|
||||
struct repository *repo = the_repository;
|
||||
const struct git_hash_algo *algo = repo->hash_algo;
|
||||
const struct git_hash_algo *compat = repo->compat_hash_algo;
|
||||
int ret;
|
||||
|
||||
ret = git_deflate(stream, flush ? Z_FINISH : 0);
|
||||
the_hash_algo->update_fn(c, in0, stream->next_in - in0);
|
||||
algo->update_fn(c, in0, stream->next_in - in0);
|
||||
if (compat && compat_c)
|
||||
compat->update_fn(compat_c, in0, stream->next_in - in0);
|
||||
if (write_in_full(fd, compressed, stream->next_out - compressed) < 0)
|
||||
die_errno(_("unable to write loose object file"));
|
||||
stream->next_out = compressed;
|
||||
|
@ -2006,15 +2113,21 @@ static int write_loose_object_common(git_hash_ctx *c,
|
|||
* - End the compression of zlib stream.
|
||||
* - Get the calculated oid to "oid".
|
||||
*/
|
||||
static int end_loose_object_common(git_hash_ctx *c, git_zstream *stream,
|
||||
struct object_id *oid)
|
||||
static int end_loose_object_common(git_hash_ctx *c, git_hash_ctx *compat_c,
|
||||
git_zstream *stream, struct object_id *oid,
|
||||
struct object_id *compat_oid)
|
||||
{
|
||||
struct repository *repo = the_repository;
|
||||
const struct git_hash_algo *algo = repo->hash_algo;
|
||||
const struct git_hash_algo *compat = repo->compat_hash_algo;
|
||||
int ret;
|
||||
|
||||
ret = git_deflate_end_gently(stream);
|
||||
if (ret != Z_OK)
|
||||
return ret;
|
||||
the_hash_algo->final_oid_fn(oid, c);
|
||||
algo->final_oid_fn(oid, c);
|
||||
if (compat && compat_c)
|
||||
compat->final_oid_fn(compat_oid, compat_c);
|
||||
|
||||
return Z_OK;
|
||||
}
|
||||
|
@ -2038,7 +2151,7 @@ static int write_loose_object(const struct object_id *oid, char *hdr,
|
|||
|
||||
fd = start_loose_object_common(&tmp_file, filename.buf, flags,
|
||||
&stream, compressed, sizeof(compressed),
|
||||
&c, hdr, hdrlen);
|
||||
&c, NULL, hdr, hdrlen);
|
||||
if (fd < 0)
|
||||
return -1;
|
||||
|
||||
|
@ -2048,14 +2161,14 @@ static int write_loose_object(const struct object_id *oid, char *hdr,
|
|||
do {
|
||||
unsigned char *in0 = stream.next_in;
|
||||
|
||||
ret = write_loose_object_common(&c, &stream, 1, in0, fd,
|
||||
ret = write_loose_object_common(&c, NULL, &stream, 1, in0, fd,
|
||||
compressed, sizeof(compressed));
|
||||
} while (ret == Z_OK);
|
||||
|
||||
if (ret != Z_STREAM_END)
|
||||
die(_("unable to deflate new object %s (%d)"), oid_to_hex(oid),
|
||||
ret);
|
||||
ret = end_loose_object_common(&c, &stream, ¶no_oid);
|
||||
ret = end_loose_object_common(&c, NULL, &stream, ¶no_oid, NULL);
|
||||
if (ret != Z_OK)
|
||||
die(_("deflateEnd on object %s failed (%d)"), oid_to_hex(oid),
|
||||
ret);
|
||||
|
@ -2100,10 +2213,12 @@ static int freshen_packed_object(const struct object_id *oid)
|
|||
int stream_loose_object(struct input_stream *in_stream, size_t len,
|
||||
struct object_id *oid)
|
||||
{
|
||||
const struct git_hash_algo *compat = the_repository->compat_hash_algo;
|
||||
struct object_id compat_oid;
|
||||
int fd, ret, err = 0, flush = 0;
|
||||
unsigned char compressed[4096];
|
||||
git_zstream stream;
|
||||
git_hash_ctx c;
|
||||
git_hash_ctx c, compat_c;
|
||||
struct strbuf tmp_file = STRBUF_INIT;
|
||||
struct strbuf filename = STRBUF_INIT;
|
||||
int dirlen;
|
||||
|
@ -2127,7 +2242,7 @@ int stream_loose_object(struct input_stream *in_stream, size_t len,
|
|||
*/
|
||||
fd = start_loose_object_common(&tmp_file, filename.buf, 0,
|
||||
&stream, compressed, sizeof(compressed),
|
||||
&c, hdr, hdrlen);
|
||||
&c, &compat_c, hdr, hdrlen);
|
||||
if (fd < 0) {
|
||||
err = -1;
|
||||
goto cleanup;
|
||||
|
@ -2145,7 +2260,7 @@ int stream_loose_object(struct input_stream *in_stream, size_t len,
|
|||
if (in_stream->is_finished)
|
||||
flush = 1;
|
||||
}
|
||||
ret = write_loose_object_common(&c, &stream, flush, in0, fd,
|
||||
ret = write_loose_object_common(&c, &compat_c, &stream, flush, in0, fd,
|
||||
compressed, sizeof(compressed));
|
||||
/*
|
||||
* Unlike write_loose_object(), we do not have the entire
|
||||
|
@ -2168,7 +2283,7 @@ int stream_loose_object(struct input_stream *in_stream, size_t len,
|
|||
*/
|
||||
if (ret != Z_STREAM_END)
|
||||
die(_("unable to stream deflate new object (%d)"), ret);
|
||||
ret = end_loose_object_common(&c, &stream, oid);
|
||||
ret = end_loose_object_common(&c, &compat_c, &stream, oid, &compat_oid);
|
||||
if (ret != Z_OK)
|
||||
die(_("deflateEnd on stream object failed (%d)"), ret);
|
||||
close_loose_object(fd, tmp_file.buf);
|
||||
|
@ -2195,6 +2310,8 @@ int stream_loose_object(struct input_stream *in_stream, size_t len,
|
|||
}
|
||||
|
||||
err = finalize_object_file(tmp_file.buf, filename.buf);
|
||||
if (!err && compat)
|
||||
err = repo_add_loose_object_map(the_repository, oid, &compat_oid);
|
||||
cleanup:
|
||||
strbuf_release(&tmp_file);
|
||||
strbuf_release(&filename);
|
||||
|
@ -2203,19 +2320,42 @@ int stream_loose_object(struct input_stream *in_stream, size_t len,
|
|||
|
||||
int write_object_file_flags(const void *buf, unsigned long len,
|
||||
enum object_type type, struct object_id *oid,
|
||||
unsigned flags)
|
||||
struct object_id *compat_oid_in, unsigned flags)
|
||||
{
|
||||
struct repository *repo = the_repository;
|
||||
const struct git_hash_algo *algo = repo->hash_algo;
|
||||
const struct git_hash_algo *compat = repo->compat_hash_algo;
|
||||
struct object_id compat_oid;
|
||||
char hdr[MAX_HEADER_LEN];
|
||||
int hdrlen = sizeof(hdr);
|
||||
|
||||
/* Generate compat_oid */
|
||||
if (compat) {
|
||||
if (compat_oid_in)
|
||||
oidcpy(&compat_oid, compat_oid_in);
|
||||
else if (type == OBJ_BLOB)
|
||||
hash_object_file(compat, buf, len, type, &compat_oid);
|
||||
else {
|
||||
struct strbuf converted = STRBUF_INIT;
|
||||
convert_object_file(&converted, algo, compat,
|
||||
buf, len, type, 0);
|
||||
hash_object_file(compat, converted.buf, converted.len,
|
||||
type, &compat_oid);
|
||||
strbuf_release(&converted);
|
||||
}
|
||||
}
|
||||
|
||||
/* Normally if we have it in the pack then we do not bother writing
|
||||
* it out into .git/objects/??/?{38} file.
|
||||
*/
|
||||
write_object_file_prepare(the_hash_algo, buf, len, type, oid, hdr,
|
||||
&hdrlen);
|
||||
write_object_file_prepare(algo, buf, len, type, oid, hdr, &hdrlen);
|
||||
if (freshen_packed_object(oid) || freshen_loose_object(oid))
|
||||
return 0;
|
||||
return write_loose_object(oid, hdr, hdrlen, buf, len, 0, flags);
|
||||
if (write_loose_object(oid, hdr, hdrlen, buf, len, 0, flags))
|
||||
return -1;
|
||||
if (compat)
|
||||
return repo_add_loose_object_map(repo, oid, &compat_oid);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int write_object_file_literally(const void *buf, unsigned long len,
|
||||
|
@ -2223,7 +2363,27 @@ int write_object_file_literally(const void *buf, unsigned long len,
|
|||
unsigned flags)
|
||||
{
|
||||
char *header;
|
||||
struct repository *repo = the_repository;
|
||||
const struct git_hash_algo *algo = repo->hash_algo;
|
||||
const struct git_hash_algo *compat = repo->compat_hash_algo;
|
||||
struct object_id compat_oid;
|
||||
int hdrlen, status = 0;
|
||||
int compat_type = -1;
|
||||
|
||||
if (compat) {
|
||||
compat_type = type_from_string_gently(type, -1, 1);
|
||||
if (compat_type == OBJ_BLOB)
|
||||
hash_object_file(compat, buf, len, compat_type,
|
||||
&compat_oid);
|
||||
else if (compat_type != -1) {
|
||||
struct strbuf converted = STRBUF_INIT;
|
||||
convert_object_file(&converted, algo, compat,
|
||||
buf, len, compat_type, 0);
|
||||
hash_object_file(compat, converted.buf, converted.len,
|
||||
compat_type, &compat_oid);
|
||||
strbuf_release(&converted);
|
||||
}
|
||||
}
|
||||
|
||||
/* type string, SP, %lu of the length plus NUL must fit this */
|
||||
hdrlen = strlen(type) + MAX_HEADER_LEN;
|
||||
|
@ -2236,6 +2396,8 @@ int write_object_file_literally(const void *buf, unsigned long len,
|
|||
if (freshen_packed_object(oid) || freshen_loose_object(oid))
|
||||
goto cleanup;
|
||||
status = write_loose_object(oid, header, hdrlen, buf, len, 0, 0);
|
||||
if (compat_type != -1)
|
||||
return repo_add_loose_object_map(repo, oid, &compat_oid);
|
||||
|
||||
cleanup:
|
||||
free(header);
|
||||
|
@ -2244,9 +2406,12 @@ int write_object_file_literally(const void *buf, unsigned long len,
|
|||
|
||||
int force_object_loose(const struct object_id *oid, time_t mtime)
|
||||
{
|
||||
struct repository *repo = the_repository;
|
||||
const struct git_hash_algo *compat = repo->compat_hash_algo;
|
||||
void *buf;
|
||||
unsigned long len;
|
||||
struct object_info oi = OBJECT_INFO_INIT;
|
||||
struct object_id compat_oid;
|
||||
enum object_type type;
|
||||
char hdr[MAX_HEADER_LEN];
|
||||
int hdrlen;
|
||||
|
@ -2259,8 +2424,15 @@ int force_object_loose(const struct object_id *oid, time_t mtime)
|
|||
oi.contentp = &buf;
|
||||
if (oid_object_info_extended(the_repository, oid, &oi, 0))
|
||||
return error(_("cannot read object for %s"), oid_to_hex(oid));
|
||||
if (compat) {
|
||||
if (repo_oid_to_algop(repo, oid, compat, &compat_oid))
|
||||
return error(_("cannot map object %s to %s"),
|
||||
oid_to_hex(oid), compat->name);
|
||||
}
|
||||
hdrlen = format_object_header(hdr, sizeof(hdr), type, len);
|
||||
ret = write_loose_object(oid, hdr, hdrlen, buf, len, mtime, 0);
|
||||
if (!ret && compat)
|
||||
ret = repo_add_loose_object_map(the_repository, oid, &compat_oid);
|
||||
free(buf);
|
||||
|
||||
return ret;
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
#include "midx.h"
|
||||
#include "commit-reach.h"
|
||||
#include "date.h"
|
||||
#include "object-file-convert.h"
|
||||
|
||||
static int get_oid_oneline(struct repository *r, const char *, struct object_id *, struct commit_list *);
|
||||
|
||||
|
@ -47,6 +48,7 @@ struct disambiguate_state {
|
|||
|
||||
static void update_candidates(struct disambiguate_state *ds, const struct object_id *current)
|
||||
{
|
||||
/* The hash algorithm of current has already been filtered */
|
||||
if (ds->always_call_fn) {
|
||||
ds->ambiguous = ds->fn(ds->repo, current, ds->cb_data) ? 1 : 0;
|
||||
return;
|
||||
|
@ -132,6 +134,8 @@ static void unique_in_midx(struct multi_pack_index *m,
|
|||
{
|
||||
uint32_t num, i, first = 0;
|
||||
const struct object_id *current = NULL;
|
||||
int len = ds->len > ds->repo->hash_algo->hexsz ?
|
||||
ds->repo->hash_algo->hexsz : ds->len;
|
||||
num = m->num_objects;
|
||||
|
||||
if (!num)
|
||||
|
@ -147,7 +151,7 @@ static void unique_in_midx(struct multi_pack_index *m,
|
|||
for (i = first; i < num && !ds->ambiguous; i++) {
|
||||
struct object_id oid;
|
||||
current = nth_midxed_object_oid(&oid, m, i);
|
||||
if (!match_hash(ds->len, ds->bin_pfx.hash, current->hash))
|
||||
if (!match_hash(len, ds->bin_pfx.hash, current->hash))
|
||||
break;
|
||||
update_candidates(ds, current);
|
||||
}
|
||||
|
@ -157,6 +161,8 @@ static void unique_in_pack(struct packed_git *p,
|
|||
struct disambiguate_state *ds)
|
||||
{
|
||||
uint32_t num, i, first = 0;
|
||||
int len = ds->len > ds->repo->hash_algo->hexsz ?
|
||||
ds->repo->hash_algo->hexsz : ds->len;
|
||||
|
||||
if (p->multi_pack_index)
|
||||
return;
|
||||
|
@ -175,7 +181,7 @@ static void unique_in_pack(struct packed_git *p,
|
|||
for (i = first; i < num && !ds->ambiguous; i++) {
|
||||
struct object_id oid;
|
||||
nth_packed_object_id(&oid, p, i);
|
||||
if (!match_hash(ds->len, ds->bin_pfx.hash, oid.hash))
|
||||
if (!match_hash(len, ds->bin_pfx.hash, oid.hash))
|
||||
break;
|
||||
update_candidates(ds, &oid);
|
||||
}
|
||||
|
@ -186,6 +192,10 @@ static void find_short_packed_object(struct disambiguate_state *ds)
|
|||
struct multi_pack_index *m;
|
||||
struct packed_git *p;
|
||||
|
||||
/* Skip, unless oids from the storage hash algorithm are wanted */
|
||||
if (ds->bin_pfx.algo && (&hash_algos[ds->bin_pfx.algo] != ds->repo->hash_algo))
|
||||
return;
|
||||
|
||||
for (m = get_multi_pack_index(ds->repo); m && !ds->ambiguous;
|
||||
m = m->next)
|
||||
unique_in_midx(m, ds);
|
||||
|
@ -324,11 +334,12 @@ int set_disambiguate_hint_config(const char *var, const char *value)
|
|||
|
||||
static int init_object_disambiguation(struct repository *r,
|
||||
const char *name, int len,
|
||||
const struct git_hash_algo *algo,
|
||||
struct disambiguate_state *ds)
|
||||
{
|
||||
int i;
|
||||
|
||||
if (len < MINIMUM_ABBREV || len > the_hash_algo->hexsz)
|
||||
if (len < MINIMUM_ABBREV || len > GIT_MAX_HEXSZ)
|
||||
return -1;
|
||||
|
||||
memset(ds, 0, sizeof(*ds));
|
||||
|
@ -355,6 +366,7 @@ static int init_object_disambiguation(struct repository *r,
|
|||
ds->len = len;
|
||||
ds->hex_pfx[len] = '\0';
|
||||
ds->repo = r;
|
||||
ds->bin_pfx.algo = algo ? hash_algo_by_ptr(algo) : GIT_HASH_UNKNOWN;
|
||||
prepare_alt_odb(r);
|
||||
return 0;
|
||||
}
|
||||
|
@ -489,9 +501,10 @@ static int repo_collect_ambiguous(struct repository *r UNUSED,
|
|||
return collect_ambiguous(oid, data);
|
||||
}
|
||||
|
||||
static int sort_ambiguous(const void *a, const void *b, void *ctx)
|
||||
static int sort_ambiguous(const void *va, const void *vb, void *ctx)
|
||||
{
|
||||
struct repository *sort_ambiguous_repo = ctx;
|
||||
const struct object_id *a = va, *b = vb;
|
||||
int a_type = oid_object_info(sort_ambiguous_repo, a, NULL);
|
||||
int b_type = oid_object_info(sort_ambiguous_repo, b, NULL);
|
||||
int a_type_sort;
|
||||
|
@ -501,8 +514,12 @@ static int sort_ambiguous(const void *a, const void *b, void *ctx)
|
|||
* Sorts by hash within the same object type, just as
|
||||
* oid_array_for_each_unique() would do.
|
||||
*/
|
||||
if (a_type == b_type)
|
||||
return oidcmp(a, b);
|
||||
if (a_type == b_type) {
|
||||
if (a->algo == b->algo)
|
||||
return oidcmp(a, b);
|
||||
else
|
||||
return a->algo > b->algo ? 1 : -1;
|
||||
}
|
||||
|
||||
/*
|
||||
* Between object types show tags, then commits, and finally
|
||||
|
@ -531,8 +548,12 @@ static enum get_oid_result get_short_oid(struct repository *r,
|
|||
int status;
|
||||
struct disambiguate_state ds;
|
||||
int quietly = !!(flags & GET_OID_QUIETLY);
|
||||
const struct git_hash_algo *algo = r->hash_algo;
|
||||
|
||||
if (init_object_disambiguation(r, name, len, &ds) < 0)
|
||||
if (flags & GET_OID_HASH_ANY)
|
||||
algo = NULL;
|
||||
|
||||
if (init_object_disambiguation(r, name, len, algo, &ds) < 0)
|
||||
return -1;
|
||||
|
||||
if (HAS_MULTI_BITS(flags & GET_OID_DISAMBIGUATORS))
|
||||
|
@ -586,7 +607,7 @@ static enum get_oid_result get_short_oid(struct repository *r,
|
|||
if (!ds.ambiguous)
|
||||
ds.fn = NULL;
|
||||
|
||||
repo_for_each_abbrev(r, ds.hex_pfx, collect_ambiguous, &collect);
|
||||
repo_for_each_abbrev(r, ds.hex_pfx, algo, collect_ambiguous, &collect);
|
||||
sort_ambiguous_oid_array(r, &collect);
|
||||
|
||||
if (oid_array_for_each(&collect, show_ambiguous_object, &out))
|
||||
|
@ -608,13 +629,14 @@ static enum get_oid_result get_short_oid(struct repository *r,
|
|||
}
|
||||
|
||||
int repo_for_each_abbrev(struct repository *r, const char *prefix,
|
||||
const struct git_hash_algo *algo,
|
||||
each_abbrev_fn fn, void *cb_data)
|
||||
{
|
||||
struct oid_array collect = OID_ARRAY_INIT;
|
||||
struct disambiguate_state ds;
|
||||
int ret;
|
||||
|
||||
if (init_object_disambiguation(r, prefix, strlen(prefix), &ds) < 0)
|
||||
if (init_object_disambiguation(r, prefix, strlen(prefix), algo, &ds) < 0)
|
||||
return -1;
|
||||
|
||||
ds.always_call_fn = 1;
|
||||
|
@ -785,10 +807,12 @@ void strbuf_add_unique_abbrev(struct strbuf *sb, const struct object_id *oid,
|
|||
int repo_find_unique_abbrev_r(struct repository *r, char *hex,
|
||||
const struct object_id *oid, int len)
|
||||
{
|
||||
const struct git_hash_algo *algo =
|
||||
oid->algo ? &hash_algos[oid->algo] : r->hash_algo;
|
||||
struct disambiguate_state ds;
|
||||
struct min_abbrev_data mad;
|
||||
struct object_id oid_ret;
|
||||
const unsigned hexsz = r->hash_algo->hexsz;
|
||||
const unsigned hexsz = algo->hexsz;
|
||||
|
||||
if (len < 0) {
|
||||
unsigned long count = repo_approximate_object_count(r);
|
||||
|
@ -824,7 +848,7 @@ int repo_find_unique_abbrev_r(struct repository *r, char *hex,
|
|||
|
||||
find_abbrev_len_packed(&mad);
|
||||
|
||||
if (init_object_disambiguation(r, hex, mad.cur_len, &ds) < 0)
|
||||
if (init_object_disambiguation(r, hex, mad.cur_len, algo, &ds) < 0)
|
||||
return -1;
|
||||
|
||||
ds.fn = repo_extend_abbrev_len;
|
||||
|
|
|
@ -67,7 +67,8 @@ enum get_oid_result get_oid_with_context(struct repository *repo, const char *st
|
|||
|
||||
|
||||
typedef int each_abbrev_fn(const struct object_id *oid, void *);
|
||||
int repo_for_each_abbrev(struct repository *r, const char *prefix, each_abbrev_fn, void *);
|
||||
int repo_for_each_abbrev(struct repository *r, const char *prefix,
|
||||
const struct git_hash_algo *algo, each_abbrev_fn, void *);
|
||||
|
||||
int set_disambiguate_hint_config(const char *var, const char *value);
|
||||
|
||||
|
|
|
@ -26,6 +26,9 @@ struct object_directory {
|
|||
uint32_t loose_objects_subdir_seen[8]; /* 256 bits */
|
||||
struct oidtree *loose_objects_cache;
|
||||
|
||||
/* Map between object IDs for loose objects. */
|
||||
struct loose_object_map *loose_map;
|
||||
|
||||
/*
|
||||
* This is a temporary object store created by the tmp_objdir
|
||||
* facility. Disable ref updates since the objects in the store
|
||||
|
@ -252,11 +255,11 @@ void hash_object_file(const struct git_hash_algo *algo, const void *buf,
|
|||
|
||||
int write_object_file_flags(const void *buf, unsigned long len,
|
||||
enum object_type type, struct object_id *oid,
|
||||
unsigned flags);
|
||||
struct object_id *comapt_oid_in, unsigned flags);
|
||||
static inline int write_object_file(const void *buf, unsigned long len,
|
||||
enum object_type type, struct object_id *oid)
|
||||
{
|
||||
return write_object_file_flags(buf, len, type, oid, 0);
|
||||
return write_object_file_flags(buf, len, type, oid, NULL, 0);
|
||||
}
|
||||
|
||||
int write_object_file_literally(const void *buf, unsigned long len,
|
||||
|
|
2
object.c
2
object.c
|
@ -13,6 +13,7 @@
|
|||
#include "alloc.h"
|
||||
#include "packfile.h"
|
||||
#include "commit-graph.h"
|
||||
#include "loose.h"
|
||||
|
||||
unsigned int get_max_object_index(void)
|
||||
{
|
||||
|
@ -553,6 +554,7 @@ void free_object_directory(struct object_directory *odb)
|
|||
{
|
||||
free(odb->path);
|
||||
odb_clear_loose_cache(odb);
|
||||
loose_object_map_clear(&odb->loose_map);
|
||||
free(odb);
|
||||
}
|
||||
|
||||
|
|
18
object.h
18
object.h
|
@ -190,6 +190,24 @@ void *create_object(struct repository *r, const struct object_id *oid, void *obj
|
|||
|
||||
void *object_as_type(struct object *obj, enum object_type type, int quiet);
|
||||
|
||||
|
||||
static inline const char *parse_mode(const char *str, uint16_t *modep)
|
||||
{
|
||||
unsigned char c;
|
||||
unsigned int mode = 0;
|
||||
|
||||
if (*str == ' ')
|
||||
return NULL;
|
||||
|
||||
while ((c = *str++) != ' ') {
|
||||
if (c < '0' || c > '7')
|
||||
return NULL;
|
||||
mode = (mode << 3) + (c - '0');
|
||||
}
|
||||
*modep = mode;
|
||||
return str;
|
||||
}
|
||||
|
||||
/*
|
||||
* Returns the object, having parsed it to find out what it is.
|
||||
*
|
||||
|
|
12
oid-array.c
12
oid-array.c
|
@ -6,12 +6,20 @@ void oid_array_append(struct oid_array *array, const struct object_id *oid)
|
|||
{
|
||||
ALLOC_GROW(array->oid, array->nr + 1, array->alloc);
|
||||
oidcpy(&array->oid[array->nr++], oid);
|
||||
if (!oid->algo)
|
||||
oid_set_algo(&array->oid[array->nr - 1], the_hash_algo);
|
||||
array->sorted = 0;
|
||||
}
|
||||
|
||||
static int void_hashcmp(const void *a, const void *b)
|
||||
static int void_hashcmp(const void *va, const void *vb)
|
||||
{
|
||||
return oidcmp(a, b);
|
||||
const struct object_id *a = va, *b = vb;
|
||||
int ret;
|
||||
if (a->algo == b->algo)
|
||||
ret = oidcmp(a, b);
|
||||
else
|
||||
ret = a->algo > b->algo ? 1 : -1;
|
||||
return ret;
|
||||
}
|
||||
|
||||
void oid_array_sort(struct oid_array *array)
|
||||
|
|
|
@ -370,7 +370,7 @@ static int fill_bitmap_tree(struct bitmap *bitmap,
|
|||
if (parse_tree(tree) < 0)
|
||||
die("unable to load tree object %s",
|
||||
oid_to_hex(&tree->object.oid));
|
||||
init_tree_desc(&desc, tree->buffer, tree->size);
|
||||
init_tree_desc(&desc, &tree->object.oid, tree->buffer, tree->size);
|
||||
|
||||
while (tree_entry(&desc, &entry)) {
|
||||
switch (object_type(entry.mode)) {
|
||||
|
|
|
@ -2249,7 +2249,8 @@ static int add_promisor_object(const struct object_id *oid,
|
|||
struct tree *tree = (struct tree *)obj;
|
||||
struct tree_desc desc;
|
||||
struct name_entry entry;
|
||||
if (init_tree_desc_gently(&desc, tree->buffer, tree->size, 0))
|
||||
if (init_tree_desc_gently(&desc, &tree->object.oid,
|
||||
tree->buffer, tree->size, 0))
|
||||
/*
|
||||
* Error messages are given when packs are
|
||||
* verified, so do not print any here.
|
||||
|
|
2
reflog.c
2
reflog.c
|
@ -39,7 +39,7 @@ static int tree_is_complete(const struct object_id *oid)
|
|||
tree->buffer = data;
|
||||
tree->size = size;
|
||||
}
|
||||
init_tree_desc(&desc, tree->buffer, tree->size);
|
||||
init_tree_desc(&desc, &tree->object.oid, tree->buffer, tree->size);
|
||||
complete = 1;
|
||||
while (tree_entry(&desc, &entry)) {
|
||||
if (!repo_has_object_file(the_repository, &entry.oid) ||
|
||||
|
|
14
repository.c
14
repository.c
|
@ -14,6 +14,7 @@
|
|||
#include "read-cache-ll.h"
|
||||
#include "remote.h"
|
||||
#include "setup.h"
|
||||
#include "loose.h"
|
||||
#include "submodule-config.h"
|
||||
#include "sparse-index.h"
|
||||
#include "trace2.h"
|
||||
|
@ -104,6 +105,15 @@ void repo_set_hash_algo(struct repository *repo, int hash_algo)
|
|||
repo->hash_algo = &hash_algos[hash_algo];
|
||||
}
|
||||
|
||||
void repo_set_compat_hash_algo(struct repository *repo, int algo)
|
||||
{
|
||||
if (hash_algo_by_ptr(repo->hash_algo) == algo)
|
||||
BUG("hash_algo and compat_hash_algo match");
|
||||
repo->compat_hash_algo = algo ? &hash_algos[algo] : NULL;
|
||||
if (repo->compat_hash_algo)
|
||||
repo_read_loose_object_map(repo);
|
||||
}
|
||||
|
||||
void repo_set_ref_storage_format(struct repository *repo, unsigned int format)
|
||||
{
|
||||
repo->ref_storage_format = format;
|
||||
|
@ -189,6 +199,7 @@ int repo_init(struct repository *repo,
|
|||
goto error;
|
||||
|
||||
repo_set_hash_algo(repo, format.hash_algo);
|
||||
repo_set_compat_hash_algo(repo, format.compat_hash_algo);
|
||||
repo_set_ref_storage_format(repo, format.ref_storage_format);
|
||||
repo->repository_format_worktree_config = format.worktree_config;
|
||||
|
||||
|
@ -199,6 +210,9 @@ int repo_init(struct repository *repo,
|
|||
if (worktree)
|
||||
repo_set_worktree(repo, worktree);
|
||||
|
||||
if (repo->compat_hash_algo)
|
||||
repo_read_loose_object_map(repo);
|
||||
|
||||
clear_repository_format(&format);
|
||||
return 0;
|
||||
|
||||
|
|
|
@ -163,6 +163,9 @@ struct repository {
|
|||
/* Repository's current hash algorithm, as serialized on disk. */
|
||||
const struct git_hash_algo *hash_algo;
|
||||
|
||||
/* Repository's compatibility hash algorithm. */
|
||||
const struct git_hash_algo *compat_hash_algo;
|
||||
|
||||
/* Repository's reference storage format, as serialized on disk. */
|
||||
unsigned int ref_storage_format;
|
||||
|
||||
|
@ -205,6 +208,7 @@ void repo_set_gitdir(struct repository *repo, const char *root,
|
|||
const struct set_gitdir_args *extra_args);
|
||||
void repo_set_worktree(struct repository *repo, const char *path);
|
||||
void repo_set_hash_algo(struct repository *repo, int algo);
|
||||
void repo_set_compat_hash_algo(struct repository *repo, int compat_algo);
|
||||
void repo_set_ref_storage_format(struct repository *repo, unsigned int format);
|
||||
void initialize_the_repository(void);
|
||||
RESULT_MUST_BE_USED
|
||||
|
|
|
@ -81,7 +81,7 @@ static void mark_tree_contents_uninteresting(struct repository *r,
|
|||
if (parse_tree_gently(tree, 1) < 0)
|
||||
return;
|
||||
|
||||
init_tree_desc(&desc, tree->buffer, tree->size);
|
||||
init_tree_desc(&desc, &tree->object.oid, tree->buffer, tree->size);
|
||||
while (tree_entry(&desc, &entry)) {
|
||||
switch (object_type(entry.mode)) {
|
||||
case OBJ_TREE:
|
||||
|
@ -188,7 +188,7 @@ static void add_children_by_path(struct repository *r,
|
|||
if (parse_tree_gently(tree, 1) < 0)
|
||||
return;
|
||||
|
||||
init_tree_desc(&desc, tree->buffer, tree->size);
|
||||
init_tree_desc(&desc, &tree->object.oid, tree->buffer, tree->size);
|
||||
while (tree_entry(&desc, &entry)) {
|
||||
switch (object_type(entry.mode)) {
|
||||
case OBJ_TREE:
|
||||
|
|
22
setup.c
22
setup.c
|
@ -591,6 +591,25 @@ static enum extension_result handle_extension(const char *var,
|
|||
"extensions.objectformat", value);
|
||||
data->hash_algo = format;
|
||||
return EXTENSION_OK;
|
||||
} else if (!strcmp(ext, "compatobjectformat")) {
|
||||
struct string_list_item *item;
|
||||
int format;
|
||||
|
||||
if (!value)
|
||||
return config_error_nonbool(var);
|
||||
format = hash_algo_by_name(value);
|
||||
if (format == GIT_HASH_UNKNOWN)
|
||||
return error(_("invalid value for '%s': '%s'"),
|
||||
"extensions.compatobjectformat", value);
|
||||
/* For now only support compatObjectFormat being specified once. */
|
||||
for_each_string_list_item(item, &data->v1_only_extensions) {
|
||||
if (!strcmp(item->string, "compatobjectformat"))
|
||||
return error(_("'%s' already specified as '%s'"),
|
||||
"extensions.compatobjectformat",
|
||||
hash_algos[data->compat_hash_algo].name);
|
||||
}
|
||||
data->compat_hash_algo = format;
|
||||
return EXTENSION_OK;
|
||||
} else if (!strcmp(ext, "refstorage")) {
|
||||
unsigned int format;
|
||||
|
||||
|
@ -1603,6 +1622,8 @@ const char *setup_git_directory_gently(int *nongit_ok)
|
|||
}
|
||||
if (startup_info->have_repository) {
|
||||
repo_set_hash_algo(the_repository, repo_fmt.hash_algo);
|
||||
repo_set_compat_hash_algo(the_repository,
|
||||
repo_fmt.compat_hash_algo);
|
||||
repo_set_ref_storage_format(the_repository,
|
||||
repo_fmt.ref_storage_format);
|
||||
the_repository->repository_format_worktree_config =
|
||||
|
@ -1698,6 +1719,7 @@ void check_repository_format(struct repository_format *fmt)
|
|||
check_repository_format_gently(get_git_dir(), fmt, NULL);
|
||||
startup_info->have_repository = 1;
|
||||
repo_set_hash_algo(the_repository, fmt->hash_algo);
|
||||
repo_set_compat_hash_algo(the_repository, fmt->compat_hash_algo);
|
||||
repo_set_ref_storage_format(the_repository,
|
||||
fmt->ref_storage_format);
|
||||
the_repository->repository_format_worktree_config =
|
||||
|
|
1
setup.h
1
setup.h
|
@ -115,6 +115,7 @@ struct repository_format {
|
|||
int worktree_config;
|
||||
int is_bare;
|
||||
int hash_algo;
|
||||
int compat_hash_algo;
|
||||
unsigned int ref_storage_format;
|
||||
int sparse_index;
|
||||
char *work_tree;
|
||||
|
|
62
t/helper/test-delete-gpgsig.c
Normal file
62
t/helper/test-delete-gpgsig.c
Normal file
|
@ -0,0 +1,62 @@
|
|||
#include "test-tool.h"
|
||||
#include "gpg-interface.h"
|
||||
#include "strbuf.h"
|
||||
|
||||
|
||||
int cmd__delete_gpgsig(int argc, const char **argv)
|
||||
{
|
||||
struct strbuf buf = STRBUF_INIT;
|
||||
const char *pattern = "gpgsig";
|
||||
const char *bufptr, *tail, *eol;
|
||||
int deleting = 0;
|
||||
size_t plen;
|
||||
|
||||
if (argc >= 2) {
|
||||
pattern = argv[1];
|
||||
argv++;
|
||||
argc--;
|
||||
}
|
||||
|
||||
plen = strlen(pattern);
|
||||
strbuf_read(&buf, 0, 0);
|
||||
|
||||
if (!strcmp(pattern, "trailer")) {
|
||||
size_t payload_size = parse_signed_buffer(buf.buf, buf.len);
|
||||
fwrite(buf.buf, 1, payload_size, stdout);
|
||||
fflush(stdout);
|
||||
return 0;
|
||||
}
|
||||
|
||||
bufptr = buf.buf;
|
||||
tail = bufptr + buf.len;
|
||||
|
||||
while (bufptr < tail) {
|
||||
/* Find the end of the line */
|
||||
eol = memchr(bufptr, '\n', tail - bufptr);
|
||||
if (!eol)
|
||||
eol = tail;
|
||||
|
||||
/* Drop continuation lines */
|
||||
if (deleting && (bufptr < eol) && (bufptr[0] == ' ')) {
|
||||
bufptr = eol + 1;
|
||||
continue;
|
||||
}
|
||||
deleting = 0;
|
||||
|
||||
/* Does the line match the prefix? */
|
||||
if (((bufptr + plen) < eol) &&
|
||||
!memcmp(bufptr, pattern, plen) &&
|
||||
(bufptr[plen] == ' ')) {
|
||||
deleting = 1;
|
||||
bufptr = eol + 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
/* Print all other lines */
|
||||
fwrite(bufptr, 1, (eol - bufptr) + 1, stdout);
|
||||
bufptr = eol + 1;
|
||||
}
|
||||
fflush(stdout);
|
||||
|
||||
return 0;
|
||||
}
|
|
@ -20,6 +20,7 @@ static struct test_cmd cmds[] = {
|
|||
{ "crontab", cmd__crontab },
|
||||
{ "csprng", cmd__csprng },
|
||||
{ "date", cmd__date },
|
||||
{ "delete-gpgsig", cmd__delete_gpgsig },
|
||||
{ "delta", cmd__delta },
|
||||
{ "dir-iterator", cmd__dir_iterator },
|
||||
{ "drop-caches", cmd__drop_caches },
|
||||
|
|
|
@ -14,6 +14,7 @@ int cmd__crontab(int argc, const char **argv);
|
|||
int cmd__csprng(int argc, const char **argv);
|
||||
int cmd__date(int argc, const char **argv);
|
||||
int cmd__delta(int argc, const char **argv);
|
||||
int cmd__delete_gpgsig(int argc, const char **argv);
|
||||
int cmd__dir_iterator(int argc, const char **argv);
|
||||
int cmd__drop_caches(int argc, const char **argv);
|
||||
int cmd__dump_cache_tree(int argc, const char **argv);
|
||||
|
|
|
@ -112,65 +112,65 @@ strlen () {
|
|||
|
||||
run_tests () {
|
||||
type=$1
|
||||
sha1=$2
|
||||
oid=$2
|
||||
size=$3
|
||||
content=$4
|
||||
pretty_content=$5
|
||||
|
||||
batch_output="$sha1 $type $size
|
||||
batch_output="$oid $type $size
|
||||
$content"
|
||||
|
||||
test_expect_success "$type exists" '
|
||||
git cat-file -e $sha1
|
||||
git cat-file -e $oid
|
||||
'
|
||||
|
||||
test_expect_success "Type of $type is correct" '
|
||||
echo $type >expect &&
|
||||
git cat-file -t $sha1 >actual &&
|
||||
git cat-file -t $oid >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
test_expect_success "Size of $type is correct" '
|
||||
echo $size >expect &&
|
||||
git cat-file -s $sha1 >actual &&
|
||||
git cat-file -s $oid >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
test_expect_success "Type of $type is correct using --allow-unknown-type" '
|
||||
echo $type >expect &&
|
||||
git cat-file -t --allow-unknown-type $sha1 >actual &&
|
||||
git cat-file -t --allow-unknown-type $oid >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
test_expect_success "Size of $type is correct using --allow-unknown-type" '
|
||||
echo $size >expect &&
|
||||
git cat-file -s --allow-unknown-type $sha1 >actual &&
|
||||
git cat-file -s --allow-unknown-type $oid >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
test -z "$content" ||
|
||||
test_expect_success "Content of $type is correct" '
|
||||
echo_without_newline "$content" >expect &&
|
||||
git cat-file $type $sha1 >actual &&
|
||||
git cat-file $type $oid >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
test_expect_success "Pretty content of $type is correct" '
|
||||
echo_without_newline "$pretty_content" >expect &&
|
||||
git cat-file -p $sha1 >actual &&
|
||||
git cat-file -p $oid >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
test -z "$content" ||
|
||||
test_expect_success "--batch output of $type is correct" '
|
||||
echo "$batch_output" >expect &&
|
||||
echo $sha1 | git cat-file --batch >actual &&
|
||||
echo $oid | git cat-file --batch >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
test_expect_success "--batch-check output of $type is correct" '
|
||||
echo "$sha1 $type $size" >expect &&
|
||||
echo_without_newline $sha1 | git cat-file --batch-check >actual &&
|
||||
echo "$oid $type $size" >expect &&
|
||||
echo_without_newline $oid | git cat-file --batch-check >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
|
@ -179,33 +179,33 @@ $content"
|
|||
test -z "$content" ||
|
||||
test_expect_success "--batch-command $opt output of $type content is correct" '
|
||||
echo "$batch_output" >expect &&
|
||||
test_write_lines "contents $sha1" | git cat-file --batch-command $opt >actual &&
|
||||
test_write_lines "contents $oid" | git cat-file --batch-command $opt >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
test_expect_success "--batch-command $opt output of $type info is correct" '
|
||||
echo "$sha1 $type $size" >expect &&
|
||||
test_write_lines "info $sha1" |
|
||||
echo "$oid $type $size" >expect &&
|
||||
test_write_lines "info $oid" |
|
||||
git cat-file --batch-command $opt >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
done
|
||||
|
||||
test_expect_success "custom --batch-check format" '
|
||||
echo "$type $sha1" >expect &&
|
||||
echo $sha1 | git cat-file --batch-check="%(objecttype) %(objectname)" >actual &&
|
||||
echo "$type $oid" >expect &&
|
||||
echo $oid | git cat-file --batch-check="%(objecttype) %(objectname)" >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
test_expect_success "custom --batch-command format" '
|
||||
echo "$type $sha1" >expect &&
|
||||
echo "info $sha1" | git cat-file --batch-command="%(objecttype) %(objectname)" >actual &&
|
||||
echo "$type $oid" >expect &&
|
||||
echo "info $oid" | git cat-file --batch-command="%(objecttype) %(objectname)" >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
test_expect_success '--batch-check with %(rest)' '
|
||||
echo "$type this is some extra content" >expect &&
|
||||
echo "$sha1 this is some extra content" |
|
||||
echo "$oid this is some extra content" |
|
||||
git cat-file --batch-check="%(objecttype) %(rest)" >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
@ -216,7 +216,7 @@ $content"
|
|||
echo "$size" &&
|
||||
echo "$content"
|
||||
} >expect &&
|
||||
echo $sha1 | git cat-file --batch="%(objectsize)" >actual &&
|
||||
echo $oid | git cat-file --batch="%(objectsize)" >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
|
@ -226,114 +226,154 @@ $content"
|
|||
echo "$type" &&
|
||||
echo "$content"
|
||||
} >expect &&
|
||||
echo $sha1 | git cat-file --batch="%(objecttype)" >actual &&
|
||||
echo $oid | git cat-file --batch="%(objecttype)" >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
}
|
||||
|
||||
hello_content="Hello World"
|
||||
hello_size=$(strlen "$hello_content")
|
||||
hello_sha1=$(echo_without_newline "$hello_content" | git hash-object --stdin)
|
||||
hello_oid=$(echo_without_newline "$hello_content" | git hash-object --stdin)
|
||||
|
||||
test_expect_success "setup" '
|
||||
git config core.repositoryformatversion 1 &&
|
||||
git config extensions.objectformat $test_hash_algo &&
|
||||
git config extensions.compatobjectformat $test_compat_hash_algo &&
|
||||
echo_without_newline "$hello_content" > hello &&
|
||||
git update-index --add hello
|
||||
'
|
||||
|
||||
run_tests 'blob' $hello_sha1 $hello_size "$hello_content" "$hello_content"
|
||||
run_blob_tests () {
|
||||
oid=$1
|
||||
|
||||
test_expect_success '--batch-command --buffer with flush for blob info' '
|
||||
echo "$hello_sha1 blob $hello_size" >expect &&
|
||||
test_write_lines "info $hello_sha1" "flush" |
|
||||
run_tests 'blob' $oid $hello_size "$hello_content" "$hello_content"
|
||||
|
||||
test_expect_success '--batch-command --buffer with flush for blob info' '
|
||||
echo "$oid blob $hello_size" >expect &&
|
||||
test_write_lines "info $oid" "flush" |
|
||||
GIT_TEST_CAT_FILE_NO_FLUSH_ON_EXIT=1 \
|
||||
git cat-file --batch-command --buffer >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
'
|
||||
|
||||
test_expect_success '--batch-command --buffer without flush for blob info' '
|
||||
test_expect_success '--batch-command --buffer without flush for blob info' '
|
||||
touch output &&
|
||||
test_write_lines "info $hello_sha1" |
|
||||
test_write_lines "info $oid" |
|
||||
GIT_TEST_CAT_FILE_NO_FLUSH_ON_EXIT=1 \
|
||||
git cat-file --batch-command --buffer >>output &&
|
||||
test_must_be_empty output
|
||||
'
|
||||
'
|
||||
}
|
||||
|
||||
hello_compat_oid=$(git rev-parse --output-object-format=$test_compat_hash_algo $hello_oid)
|
||||
run_blob_tests $hello_oid
|
||||
run_blob_tests $hello_compat_oid
|
||||
|
||||
test_expect_success '--batch-check without %(rest) considers whole line' '
|
||||
echo "$hello_sha1 blob $hello_size" >expect &&
|
||||
git update-index --add --cacheinfo 100644 $hello_sha1 "white space" &&
|
||||
echo "$hello_oid blob $hello_size" >expect &&
|
||||
git update-index --add --cacheinfo 100644 $hello_oid "white space" &&
|
||||
test_when_finished "git update-index --remove \"white space\"" &&
|
||||
echo ":white space" | git cat-file --batch-check >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
tree_sha1=$(git write-tree)
|
||||
tree_oid=$(git write-tree)
|
||||
tree_compat_oid=$(git rev-parse --output-object-format=$test_compat_hash_algo $tree_oid)
|
||||
tree_size=$(($(test_oid rawsz) + 13))
|
||||
tree_pretty_content="100644 blob $hello_sha1 hello${LF}"
|
||||
tree_compat_size=$(($(test_oid --hash=compat rawsz) + 13))
|
||||
tree_pretty_content="100644 blob $hello_oid hello${LF}"
|
||||
tree_compat_pretty_content="100644 blob $hello_compat_oid hello${LF}"
|
||||
|
||||
run_tests 'tree' $tree_sha1 $tree_size "" "$tree_pretty_content"
|
||||
run_tests 'tree' $tree_oid $tree_size "" "$tree_pretty_content"
|
||||
run_tests 'tree' $tree_compat_oid $tree_compat_size "" "$tree_compat_pretty_content"
|
||||
|
||||
commit_message="Initial commit"
|
||||
commit_sha1=$(echo_without_newline "$commit_message" | git commit-tree $tree_sha1)
|
||||
commit_oid=$(echo_without_newline "$commit_message" | git commit-tree $tree_oid)
|
||||
commit_compat_oid=$(git rev-parse --output-object-format=$test_compat_hash_algo $commit_oid)
|
||||
commit_size=$(($(test_oid hexsz) + 137))
|
||||
commit_content="tree $tree_sha1
|
||||
commit_compat_size=$(($(test_oid --hash=compat hexsz) + 137))
|
||||
commit_content="tree $tree_oid
|
||||
author $GIT_AUTHOR_NAME <$GIT_AUTHOR_EMAIL> $GIT_AUTHOR_DATE
|
||||
committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
|
||||
|
||||
$commit_message"
|
||||
|
||||
run_tests 'commit' $commit_sha1 $commit_size "$commit_content" "$commit_content"
|
||||
commit_compat_content="tree $tree_compat_oid
|
||||
author $GIT_AUTHOR_NAME <$GIT_AUTHOR_EMAIL> $GIT_AUTHOR_DATE
|
||||
committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
|
||||
|
||||
tag_header_without_timestamp="object $hello_sha1
|
||||
type blob
|
||||
$commit_message"
|
||||
|
||||
run_tests 'commit' $commit_oid $commit_size "$commit_content" "$commit_content"
|
||||
run_tests 'commit' $commit_compat_oid $commit_compat_size "$commit_compat_content" "$commit_compat_content"
|
||||
|
||||
tag_header_without_oid="type blob
|
||||
tag hellotag
|
||||
tagger $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL>"
|
||||
tag_header_without_timestamp="object $hello_oid
|
||||
$tag_header_without_oid"
|
||||
tag_compat_header_without_timestamp="object $hello_compat_oid
|
||||
$tag_header_without_oid"
|
||||
tag_description="This is a tag"
|
||||
tag_content="$tag_header_without_timestamp 0 +0000
|
||||
|
||||
$tag_description"
|
||||
tag_compat_content="$tag_compat_header_without_timestamp 0 +0000
|
||||
|
||||
tag_sha1=$(echo_without_newline "$tag_content" | git hash-object -t tag --stdin -w)
|
||||
$tag_description"
|
||||
|
||||
tag_oid=$(echo_without_newline "$tag_content" | git hash-object -t tag --stdin -w)
|
||||
tag_size=$(strlen "$tag_content")
|
||||
|
||||
run_tests 'tag' $tag_sha1 $tag_size "$tag_content" "$tag_content"
|
||||
tag_compat_oid=$(git rev-parse --output-object-format=$test_compat_hash_algo $tag_oid)
|
||||
tag_compat_size=$(strlen "$tag_compat_content")
|
||||
|
||||
run_tests 'tag' $tag_oid $tag_size "$tag_content" "$tag_content"
|
||||
run_tests 'tag' $tag_compat_oid $tag_compat_size "$tag_compat_content" "$tag_compat_content"
|
||||
|
||||
test_expect_success "Reach a blob from a tag pointing to it" '
|
||||
echo_without_newline "$hello_content" >expect &&
|
||||
git cat-file blob $tag_sha1 >actual &&
|
||||
git cat-file blob $tag_oid >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
for batch in batch batch-check batch-command
|
||||
for oid in $hello_oid $hello_compat_oid
|
||||
do
|
||||
for opt in t s e p
|
||||
for batch in batch batch-check batch-command
|
||||
do
|
||||
for opt in t s e p
|
||||
do
|
||||
test_expect_success "Passing -$opt with --$batch fails" '
|
||||
test_must_fail git cat-file --$batch -$opt $hello_sha1
|
||||
test_must_fail git cat-file --$batch -$opt $oid
|
||||
'
|
||||
|
||||
test_expect_success "Passing --$batch with -$opt fails" '
|
||||
test_must_fail git cat-file -$opt --$batch $hello_sha1
|
||||
test_must_fail git cat-file -$opt --$batch $oid
|
||||
'
|
||||
done
|
||||
|
||||
test_expect_success "Passing <type> with --$batch fails" '
|
||||
test_must_fail git cat-file --$batch blob $oid
|
||||
'
|
||||
|
||||
test_expect_success "Passing --$batch with <type> fails" '
|
||||
test_must_fail git cat-file blob --$batch $oid
|
||||
'
|
||||
|
||||
test_expect_success "Passing oid with --$batch fails" '
|
||||
test_must_fail git cat-file --$batch $oid
|
||||
'
|
||||
done
|
||||
|
||||
test_expect_success "Passing <type> with --$batch fails" '
|
||||
test_must_fail git cat-file --$batch blob $hello_sha1
|
||||
'
|
||||
|
||||
test_expect_success "Passing --$batch with <type> fails" '
|
||||
test_must_fail git cat-file blob --$batch $hello_sha1
|
||||
'
|
||||
|
||||
test_expect_success "Passing sha1 with --$batch fails" '
|
||||
test_must_fail git cat-file --$batch $hello_sha1
|
||||
'
|
||||
done
|
||||
|
||||
for opt in t s e p
|
||||
for oid in $hello_oid $hello_compat_oid
|
||||
do
|
||||
test_expect_success "Passing -$opt with --follow-symlinks fails" '
|
||||
test_must_fail git cat-file --follow-symlinks -$opt $hello_sha1
|
||||
for opt in t s e p
|
||||
do
|
||||
test_expect_success "Passing -$opt with --follow-symlinks fails" '
|
||||
test_must_fail git cat-file --follow-symlinks -$opt $oid
|
||||
'
|
||||
done
|
||||
done
|
||||
|
||||
test_expect_success "--batch-check for a non-existent named object" '
|
||||
|
@ -360,12 +400,12 @@ test_expect_success "--batch-check for a non-existent hash" '
|
|||
|
||||
test_expect_success "--batch for an existent and a non-existent hash" '
|
||||
cat >expect <<-EOF &&
|
||||
$tag_sha1 tag $tag_size
|
||||
$tag_oid tag $tag_size
|
||||
$tag_content
|
||||
0000000000000000000000000000000000000000 missing
|
||||
EOF
|
||||
|
||||
printf "$tag_sha1\n0000000000000000000000000000000000000000" >in &&
|
||||
printf "$tag_oid\n0000000000000000000000000000000000000000" >in &&
|
||||
git cat-file --batch <in >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
@ -386,78 +426,160 @@ test_expect_success 'empty --batch-check notices missing object' '
|
|||
test_cmp expect actual
|
||||
'
|
||||
|
||||
batch_input="$hello_sha1
|
||||
$commit_sha1
|
||||
$tag_sha1
|
||||
batch_tests () {
|
||||
boid=$1
|
||||
loid=$2
|
||||
lsize=$3
|
||||
coid=$4
|
||||
csize=$5
|
||||
ccontent=$6
|
||||
toid=$7
|
||||
tsize=$8
|
||||
tcontent=$9
|
||||
|
||||
batch_input="$boid
|
||||
$coid
|
||||
$toid
|
||||
deadbeef
|
||||
|
||||
"
|
||||
|
||||
printf "%s\0" \
|
||||
"$hello_sha1 blob $hello_size" \
|
||||
printf "%s\0" \
|
||||
"$boid blob $hello_size" \
|
||||
"$hello_content" \
|
||||
"$commit_sha1 commit $commit_size" \
|
||||
"$commit_content" \
|
||||
"$tag_sha1 tag $tag_size" \
|
||||
"$tag_content" \
|
||||
"$coid commit $csize" \
|
||||
"$ccontent" \
|
||||
"$toid tag $tsize" \
|
||||
"$tcontent" \
|
||||
"deadbeef missing" \
|
||||
" missing" >batch_output
|
||||
|
||||
test_expect_success '--batch with multiple sha1s gives correct format' '
|
||||
test_expect_success '--batch with multiple oids gives correct format' '
|
||||
tr "\0" "\n" <batch_output >expect &&
|
||||
echo_without_newline "$batch_input" >in &&
|
||||
git cat-file --batch <in >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
'
|
||||
|
||||
test_expect_success '--batch, -z with multiple sha1s gives correct format' '
|
||||
test_expect_success '--batch, -z with multiple oids gives correct format' '
|
||||
echo_without_newline_nul "$batch_input" >in &&
|
||||
tr "\0" "\n" <batch_output >expect &&
|
||||
git cat-file --batch -z <in >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
'
|
||||
|
||||
test_expect_success '--batch, -Z with multiple sha1s gives correct format' '
|
||||
test_expect_success '--batch, -Z with multiple oids gives correct format' '
|
||||
echo_without_newline_nul "$batch_input" >in &&
|
||||
git cat-file --batch -Z <in >actual &&
|
||||
test_cmp batch_output actual
|
||||
'
|
||||
'
|
||||
|
||||
batch_check_input="$hello_sha1
|
||||
$tree_sha1
|
||||
$commit_sha1
|
||||
$tag_sha1
|
||||
batch_check_input="$boid
|
||||
$loid
|
||||
$coid
|
||||
$toid
|
||||
deadbeef
|
||||
|
||||
"
|
||||
|
||||
printf "%s\0" \
|
||||
"$hello_sha1 blob $hello_size" \
|
||||
"$tree_sha1 tree $tree_size" \
|
||||
"$commit_sha1 commit $commit_size" \
|
||||
"$tag_sha1 tag $tag_size" \
|
||||
printf "%s\0" \
|
||||
"$boid blob $hello_size" \
|
||||
"$loid tree $lsize" \
|
||||
"$coid commit $csize" \
|
||||
"$toid tag $tsize" \
|
||||
"deadbeef missing" \
|
||||
" missing" >batch_check_output
|
||||
|
||||
test_expect_success "--batch-check with multiple sha1s gives correct format" '
|
||||
test_expect_success "--batch-check with multiple oids gives correct format" '
|
||||
tr "\0" "\n" <batch_check_output >expect &&
|
||||
echo_without_newline "$batch_check_input" >in &&
|
||||
git cat-file --batch-check <in >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
'
|
||||
|
||||
test_expect_success "--batch-check, -z with multiple sha1s gives correct format" '
|
||||
test_expect_success "--batch-check, -z with multiple oids gives correct format" '
|
||||
tr "\0" "\n" <batch_check_output >expect &&
|
||||
echo_without_newline_nul "$batch_check_input" >in &&
|
||||
git cat-file --batch-check -z <in >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
'
|
||||
|
||||
test_expect_success "--batch-check, -Z with multiple sha1s gives correct format" '
|
||||
test_expect_success "--batch-check, -Z with multiple oids gives correct format" '
|
||||
echo_without_newline_nul "$batch_check_input" >in &&
|
||||
git cat-file --batch-check -Z <in >actual &&
|
||||
test_cmp batch_check_output actual
|
||||
'
|
||||
'
|
||||
|
||||
batch_command_multiple_info="info $boid
|
||||
info $loid
|
||||
info $coid
|
||||
info $toid
|
||||
info deadbeef"
|
||||
|
||||
test_expect_success '--batch-command with multiple info calls gives correct format' '
|
||||
cat >expect <<-EOF &&
|
||||
$boid blob $hello_size
|
||||
$loid tree $lsize
|
||||
$coid commit $csize
|
||||
$toid tag $tsize
|
||||
deadbeef missing
|
||||
EOF
|
||||
|
||||
echo "$batch_command_multiple_info" >in &&
|
||||
git cat-file --batch-command --buffer <in >actual &&
|
||||
|
||||
test_cmp expect actual &&
|
||||
|
||||
echo "$batch_command_multiple_info" | tr "\n" "\0" >in &&
|
||||
git cat-file --batch-command --buffer -z <in >actual &&
|
||||
|
||||
test_cmp expect actual &&
|
||||
|
||||
echo "$batch_command_multiple_info" | tr "\n" "\0" >in &&
|
||||
tr "\n" "\0" <expect >expect_nul &&
|
||||
git cat-file --batch-command --buffer -Z <in >actual &&
|
||||
|
||||
test_cmp expect_nul actual
|
||||
'
|
||||
|
||||
batch_command_multiple_contents="contents $boid
|
||||
contents $coid
|
||||
contents $toid
|
||||
contents deadbeef
|
||||
flush"
|
||||
|
||||
test_expect_success '--batch-command with multiple command calls gives correct format' '
|
||||
printf "%s\0" \
|
||||
"$boid blob $hello_size" \
|
||||
"$hello_content" \
|
||||
"$coid commit $csize" \
|
||||
"$ccontent" \
|
||||
"$toid tag $tsize" \
|
||||
"$tcontent" \
|
||||
"deadbeef missing" >expect_nul &&
|
||||
tr "\0" "\n" <expect_nul >expect &&
|
||||
|
||||
echo "$batch_command_multiple_contents" >in &&
|
||||
git cat-file --batch-command --buffer <in >actual &&
|
||||
|
||||
test_cmp expect actual &&
|
||||
|
||||
echo "$batch_command_multiple_contents" | tr "\n" "\0" >in &&
|
||||
git cat-file --batch-command --buffer -z <in >actual &&
|
||||
|
||||
test_cmp expect actual &&
|
||||
|
||||
echo "$batch_command_multiple_contents" | tr "\n" "\0" >in &&
|
||||
git cat-file --batch-command --buffer -Z <in >actual &&
|
||||
|
||||
test_cmp expect_nul actual
|
||||
'
|
||||
|
||||
}
|
||||
|
||||
batch_tests $hello_oid $tree_oid $tree_size $commit_oid $commit_size "$commit_content" $tag_oid $tag_size "$tag_content"
|
||||
batch_tests $hello_compat_oid $tree_compat_oid $tree_compat_size $commit_compat_oid $commit_compat_size "$commit_compat_content" $tag_compat_oid $tag_compat_size "$tag_compat_content"
|
||||
|
||||
|
||||
test_expect_success FUNNYNAMES 'setup with newline in input' '
|
||||
touch -- "newline${LF}embedded" &&
|
||||
|
@ -480,71 +602,6 @@ test_expect_success FUNNYNAMES '--batch-check, -Z with newline in input' '
|
|||
test_cmp expect actual
|
||||
'
|
||||
|
||||
batch_command_multiple_info="info $hello_sha1
|
||||
info $tree_sha1
|
||||
info $commit_sha1
|
||||
info $tag_sha1
|
||||
info deadbeef"
|
||||
|
||||
test_expect_success '--batch-command with multiple info calls gives correct format' '
|
||||
cat >expect <<-EOF &&
|
||||
$hello_sha1 blob $hello_size
|
||||
$tree_sha1 tree $tree_size
|
||||
$commit_sha1 commit $commit_size
|
||||
$tag_sha1 tag $tag_size
|
||||
deadbeef missing
|
||||
EOF
|
||||
|
||||
echo "$batch_command_multiple_info" >in &&
|
||||
git cat-file --batch-command --buffer <in >actual &&
|
||||
|
||||
test_cmp expect actual &&
|
||||
|
||||
echo "$batch_command_multiple_info" | tr "\n" "\0" >in &&
|
||||
git cat-file --batch-command --buffer -z <in >actual &&
|
||||
|
||||
test_cmp expect actual &&
|
||||
|
||||
echo "$batch_command_multiple_info" | tr "\n" "\0" >in &&
|
||||
tr "\n" "\0" <expect >expect_nul &&
|
||||
git cat-file --batch-command --buffer -Z <in >actual &&
|
||||
|
||||
test_cmp expect_nul actual
|
||||
'
|
||||
|
||||
batch_command_multiple_contents="contents $hello_sha1
|
||||
contents $commit_sha1
|
||||
contents $tag_sha1
|
||||
contents deadbeef
|
||||
flush"
|
||||
|
||||
test_expect_success '--batch-command with multiple command calls gives correct format' '
|
||||
printf "%s\0" \
|
||||
"$hello_sha1 blob $hello_size" \
|
||||
"$hello_content" \
|
||||
"$commit_sha1 commit $commit_size" \
|
||||
"$commit_content" \
|
||||
"$tag_sha1 tag $tag_size" \
|
||||
"$tag_content" \
|
||||
"deadbeef missing" >expect_nul &&
|
||||
tr "\0" "\n" <expect_nul >expect &&
|
||||
|
||||
echo "$batch_command_multiple_contents" >in &&
|
||||
git cat-file --batch-command --buffer <in >actual &&
|
||||
|
||||
test_cmp expect actual &&
|
||||
|
||||
echo "$batch_command_multiple_contents" | tr "\n" "\0" >in &&
|
||||
git cat-file --batch-command --buffer -z <in >actual &&
|
||||
|
||||
test_cmp expect actual &&
|
||||
|
||||
echo "$batch_command_multiple_contents" | tr "\n" "\0" >in &&
|
||||
git cat-file --batch-command --buffer -Z <in >actual &&
|
||||
|
||||
test_cmp expect_nul actual
|
||||
'
|
||||
|
||||
test_expect_success 'setup blobs which are likely to delta' '
|
||||
test-tool genrandom foo 10240 >foo &&
|
||||
{ cat foo && echo plus; } >foo-plus &&
|
||||
|
@ -569,7 +626,7 @@ test_expect_success 'confirm that neither loose blob is a delta' '
|
|||
# we will check only that one of the two objects is a delta
|
||||
# against the other, but not the order. We can do so by just
|
||||
# asking for the base of both, and checking whether either
|
||||
# sha1 appears in the output.
|
||||
# oid appears in the output.
|
||||
test_expect_success '%(deltabase) reports packed delta bases' '
|
||||
git repack -ad &&
|
||||
git cat-file --batch-check="%(deltabase)" <blobs >actual &&
|
||||
|
@ -583,12 +640,12 @@ test_expect_success 'setup bogus data' '
|
|||
bogus_short_type="bogus" &&
|
||||
bogus_short_content="bogus" &&
|
||||
bogus_short_size=$(strlen "$bogus_short_content") &&
|
||||
bogus_short_sha1=$(echo_without_newline "$bogus_short_content" | git hash-object -t $bogus_short_type --literally -w --stdin) &&
|
||||
bogus_short_oid=$(echo_without_newline "$bogus_short_content" | git hash-object -t $bogus_short_type --literally -w --stdin) &&
|
||||
|
||||
bogus_long_type="abcdefghijklmnopqrstuvwxyz1234679" &&
|
||||
bogus_long_content="bogus" &&
|
||||
bogus_long_size=$(strlen "$bogus_long_content") &&
|
||||
bogus_long_sha1=$(echo_without_newline "$bogus_long_content" | git hash-object -t $bogus_long_type --literally -w --stdin)
|
||||
bogus_long_oid=$(echo_without_newline "$bogus_long_content" | git hash-object -t $bogus_long_type --literally -w --stdin)
|
||||
'
|
||||
|
||||
for arg1 in '' --allow-unknown-type
|
||||
|
@ -608,9 +665,9 @@ do
|
|||
|
||||
if test "$arg1" = "--allow-unknown-type"
|
||||
then
|
||||
git cat-file $arg1 $arg2 $bogus_short_sha1
|
||||
git cat-file $arg1 $arg2 $bogus_short_oid
|
||||
else
|
||||
test_must_fail git cat-file $arg1 $arg2 $bogus_short_sha1 >out 2>actual &&
|
||||
test_must_fail git cat-file $arg1 $arg2 $bogus_short_oid >out 2>actual &&
|
||||
test_must_be_empty out &&
|
||||
test_cmp expect actual
|
||||
fi
|
||||
|
@ -620,21 +677,21 @@ do
|
|||
if test "$arg2" = "-p"
|
||||
then
|
||||
cat >expect <<-EOF
|
||||
error: header for $bogus_long_sha1 too long, exceeds 32 bytes
|
||||
fatal: Not a valid object name $bogus_long_sha1
|
||||
error: header for $bogus_long_oid too long, exceeds 32 bytes
|
||||
fatal: Not a valid object name $bogus_long_oid
|
||||
EOF
|
||||
else
|
||||
cat >expect <<-EOF
|
||||
error: header for $bogus_long_sha1 too long, exceeds 32 bytes
|
||||
error: header for $bogus_long_oid too long, exceeds 32 bytes
|
||||
fatal: git cat-file: could not get object info
|
||||
EOF
|
||||
fi &&
|
||||
|
||||
if test "$arg1" = "--allow-unknown-type"
|
||||
then
|
||||
git cat-file $arg1 $arg2 $bogus_short_sha1
|
||||
git cat-file $arg1 $arg2 $bogus_short_oid
|
||||
else
|
||||
test_must_fail git cat-file $arg1 $arg2 $bogus_long_sha1 >out 2>actual &&
|
||||
test_must_fail git cat-file $arg1 $arg2 $bogus_long_oid >out 2>actual &&
|
||||
test_must_be_empty out &&
|
||||
test_cmp expect actual
|
||||
fi
|
||||
|
@ -668,28 +725,28 @@ do
|
|||
done
|
||||
|
||||
test_expect_success '-e is OK with a broken object without --allow-unknown-type' '
|
||||
git cat-file -e $bogus_short_sha1
|
||||
git cat-file -e $bogus_short_oid
|
||||
'
|
||||
|
||||
test_expect_success '-e can not be combined with --allow-unknown-type' '
|
||||
test_expect_code 128 git cat-file -e --allow-unknown-type $bogus_short_sha1
|
||||
test_expect_code 128 git cat-file -e --allow-unknown-type $bogus_short_oid
|
||||
'
|
||||
|
||||
test_expect_success '-p cannot print a broken object even with --allow-unknown-type' '
|
||||
test_must_fail git cat-file -p $bogus_short_sha1 &&
|
||||
test_expect_code 128 git cat-file -p --allow-unknown-type $bogus_short_sha1
|
||||
test_must_fail git cat-file -p $bogus_short_oid &&
|
||||
test_expect_code 128 git cat-file -p --allow-unknown-type $bogus_short_oid
|
||||
'
|
||||
|
||||
test_expect_success '<type> <hash> does not work with objects of broken types' '
|
||||
cat >err.expect <<-\EOF &&
|
||||
fatal: invalid object type "bogus"
|
||||
EOF
|
||||
test_must_fail git cat-file $bogus_short_type $bogus_short_sha1 2>err.actual &&
|
||||
test_must_fail git cat-file $bogus_short_type $bogus_short_oid 2>err.actual &&
|
||||
test_cmp err.expect err.actual
|
||||
'
|
||||
|
||||
test_expect_success 'broken types combined with --batch and --batch-check' '
|
||||
echo $bogus_short_sha1 >bogus-oid &&
|
||||
echo $bogus_short_oid >bogus-oid &&
|
||||
|
||||
cat >err.expect <<-\EOF &&
|
||||
fatal: invalid object type
|
||||
|
@ -711,52 +768,52 @@ test_expect_success 'the --allow-unknown-type option does not consider replaceme
|
|||
cat >expect <<-EOF &&
|
||||
$bogus_short_type
|
||||
EOF
|
||||
git cat-file -t --allow-unknown-type $bogus_short_sha1 >actual &&
|
||||
git cat-file -t --allow-unknown-type $bogus_short_oid >actual &&
|
||||
test_cmp expect actual &&
|
||||
|
||||
# Create it manually, as "git replace" will die on bogus
|
||||
# types.
|
||||
head=$(git rev-parse --verify HEAD) &&
|
||||
test_when_finished "test-tool ref-store main delete-refs 0 msg refs/replace/$bogus_short_sha1" &&
|
||||
test-tool ref-store main update-ref msg "refs/replace/$bogus_short_sha1" $head $ZERO_OID REF_SKIP_OID_VERIFICATION &&
|
||||
test_when_finished "test-tool ref-store main delete-refs 0 msg refs/replace/$bogus_short_oid" &&
|
||||
test-tool ref-store main update-ref msg "refs/replace/$bogus_short_oid" $head $ZERO_OID REF_SKIP_OID_VERIFICATION &&
|
||||
|
||||
cat >expect <<-EOF &&
|
||||
commit
|
||||
EOF
|
||||
git cat-file -t --allow-unknown-type $bogus_short_sha1 >actual &&
|
||||
git cat-file -t --allow-unknown-type $bogus_short_oid >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
test_expect_success "Type of broken object is correct" '
|
||||
echo $bogus_short_type >expect &&
|
||||
git cat-file -t --allow-unknown-type $bogus_short_sha1 >actual &&
|
||||
git cat-file -t --allow-unknown-type $bogus_short_oid >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
test_expect_success "Size of broken object is correct" '
|
||||
echo $bogus_short_size >expect &&
|
||||
git cat-file -s --allow-unknown-type $bogus_short_sha1 >actual &&
|
||||
git cat-file -s --allow-unknown-type $bogus_short_oid >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
test_expect_success 'clean up broken object' '
|
||||
rm .git/objects/$(test_oid_to_path $bogus_short_sha1)
|
||||
rm .git/objects/$(test_oid_to_path $bogus_short_oid)
|
||||
'
|
||||
|
||||
test_expect_success "Type of broken object is correct when type is large" '
|
||||
echo $bogus_long_type >expect &&
|
||||
git cat-file -t --allow-unknown-type $bogus_long_sha1 >actual &&
|
||||
git cat-file -t --allow-unknown-type $bogus_long_oid >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
test_expect_success "Size of large broken object is correct when type is large" '
|
||||
echo $bogus_long_size >expect &&
|
||||
git cat-file -s --allow-unknown-type $bogus_long_sha1 >actual &&
|
||||
git cat-file -s --allow-unknown-type $bogus_long_oid >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
||||
test_expect_success 'clean up broken object' '
|
||||
rm .git/objects/$(test_oid_to_path $bogus_long_sha1)
|
||||
rm .git/objects/$(test_oid_to_path $bogus_long_oid)
|
||||
'
|
||||
|
||||
test_expect_success 'cat-file -t and -s on corrupt loose object' '
|
||||
|
@ -853,7 +910,7 @@ test_expect_success 'prep for symlink tests' '
|
|||
test_ln_s_add loop2 loop1 &&
|
||||
git add morx dir/subdir/ind2 dir/ind1 &&
|
||||
git commit -am "test" &&
|
||||
echo $hello_sha1 blob $hello_size >found
|
||||
echo $hello_oid blob $hello_size >found
|
||||
'
|
||||
|
||||
test_expect_success 'git cat-file --batch-check --follow-symlinks works for non-links' '
|
||||
|
@ -941,7 +998,7 @@ test_expect_success 'git cat-file --batch-check --follow-symlinks works for dir/
|
|||
echo HEAD:dirlink/morx >>expect &&
|
||||
echo HEAD:dirlink/morx | git cat-file --batch-check --follow-symlinks >actual &&
|
||||
test_cmp expect actual &&
|
||||
echo $hello_sha1 blob $hello_size >expect &&
|
||||
echo $hello_oid blob $hello_size >expect &&
|
||||
echo HEAD:dirlink/ind1 | git cat-file --batch-check --follow-symlinks >actual &&
|
||||
test_cmp expect actual
|
||||
'
|
||||
|
|
281
t/t1016-compatObjectFormat.sh
Executable file
281
t/t1016-compatObjectFormat.sh
Executable file
|
@ -0,0 +1,281 @@
|
|||
#!/bin/sh
|
||||
#
|
||||
# Copyright (c) 2023 Eric Biederman
|
||||
#
|
||||
|
||||
test_description='Test how well compatObjectFormat works'
|
||||
|
||||
TEST_PASSES_SANITIZE_LEAK=true
|
||||
. ./test-lib.sh
|
||||
. "$TEST_DIRECTORY"/lib-gpg.sh
|
||||
|
||||
# All of the follow variables must be defined in the environment:
|
||||
# GIT_AUTHOR_NAME
|
||||
# GIT_AUTHOR_EMAIL
|
||||
# GIT_AUTHOR_DATE
|
||||
# GIT_COMMITTER_NAME
|
||||
# GIT_COMMITTER_EMAIL
|
||||
# GIT_COMMITTER_DATE
|
||||
#
|
||||
# The test relies on these variables being set so that the two
|
||||
# different commits in two different repositories encoded with two
|
||||
# different hash functions result in the same content in the commits.
|
||||
# This means that when the commit is translated between hash functions
|
||||
# the commit is identical to the commit in the other repository.
|
||||
|
||||
compat_hash () {
|
||||
case "$1" in
|
||||
"sha1")
|
||||
echo "sha256"
|
||||
;;
|
||||
"sha256")
|
||||
echo "sha1"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
hello_oid () {
|
||||
case "$1" in
|
||||
"sha1")
|
||||
echo "$hello_sha1_oid"
|
||||
;;
|
||||
"sha256")
|
||||
echo "$hello_sha256_oid"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
tree_oid () {
|
||||
case "$1" in
|
||||
"sha1")
|
||||
echo "$tree_sha1_oid"
|
||||
;;
|
||||
"sha256")
|
||||
echo "$tree_sha256_oid"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
commit_oid () {
|
||||
case "$1" in
|
||||
"sha1")
|
||||
echo "$commit_sha1_oid"
|
||||
;;
|
||||
"sha256")
|
||||
echo "$commit_sha256_oid"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
commit2_oid () {
|
||||
case "$1" in
|
||||
"sha1")
|
||||
echo "$commit2_sha1_oid"
|
||||
;;
|
||||
"sha256")
|
||||
echo "$commit2_sha256_oid"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
del_sigcommit () {
|
||||
local delete=$1
|
||||
|
||||
if test "$delete" = "sha256" ; then
|
||||
local pattern="gpgsig-sha256"
|
||||
else
|
||||
local pattern="gpgsig"
|
||||
fi
|
||||
test-tool delete-gpgsig "$pattern"
|
||||
}
|
||||
|
||||
|
||||
del_sigtag () {
|
||||
local storage=$1
|
||||
local delete=$2
|
||||
|
||||
if test "$storage" = "$delete" ; then
|
||||
local pattern="trailer"
|
||||
elif test "$storage" = "sha256" ; then
|
||||
local pattern="gpgsig"
|
||||
else
|
||||
local pattern="gpgsig-sha256"
|
||||
fi
|
||||
test-tool delete-gpgsig "$pattern"
|
||||
}
|
||||
|
||||
base=$(pwd)
|
||||
for hash in sha1 sha256
|
||||
do
|
||||
cd "$base"
|
||||
mkdir -p repo-$hash
|
||||
cd repo-$hash
|
||||
|
||||
test_expect_success "setup $hash repository" '
|
||||
git init --object-format=$hash &&
|
||||
git config core.repositoryformatversion 1 &&
|
||||
git config extensions.objectformat $hash &&
|
||||
git config extensions.compatobjectformat $(compat_hash $hash) &&
|
||||
git config gpg.program $TEST_DIRECTORY/t1016/gpg &&
|
||||
echo "Hellow World!" > hello &&
|
||||
eval hello_${hash}_oid=$(git hash-object hello) &&
|
||||
git update-index --add hello &&
|
||||
git commit -m "Initial commit" &&
|
||||
eval commit_${hash}_oid=$(git rev-parse HEAD) &&
|
||||
eval tree_${hash}_oid=$(git rev-parse HEAD^{tree})
|
||||
'
|
||||
test_expect_success "create a $hash tagged blob" '
|
||||
git tag --no-sign -m "This is a tag" hellotag $(hello_oid $hash) &&
|
||||
eval hellotag_${hash}_oid=$(git rev-parse hellotag)
|
||||
'
|
||||
test_expect_success "create a $hash tagged tree" '
|
||||
git tag --no-sign -m "This is a tag" treetag $(tree_oid $hash) &&
|
||||
eval treetag_${hash}_oid=$(git rev-parse treetag)
|
||||
'
|
||||
test_expect_success "create a $hash tagged commit" '
|
||||
git tag --no-sign -m "This is a tag" committag $(commit_oid $hash) &&
|
||||
eval committag_${hash}_oid=$(git rev-parse committag)
|
||||
'
|
||||
test_expect_success GPG2 "create a $hash signed commit" '
|
||||
git commit --gpg-sign --allow-empty -m "This is a signed commit" &&
|
||||
eval signedcommit_${hash}_oid=$(git rev-parse HEAD)
|
||||
'
|
||||
test_expect_success GPG2 "create a $hash signed tag" '
|
||||
git tag -s -m "This is a signed tag" signedtag HEAD &&
|
||||
eval signedtag_${hash}_oid=$(git rev-parse signedtag)
|
||||
'
|
||||
test_expect_success "create a $hash branch" '
|
||||
git checkout -b branch $(commit_oid $hash) &&
|
||||
echo "More more more give me more!" > more &&
|
||||
eval more_${hash}_oid=$(git hash-object more) &&
|
||||
echo "Another and another and another" > another &&
|
||||
eval another_${hash}_oid=$(git hash-object another) &&
|
||||
git update-index --add more another &&
|
||||
git commit -m "Add more files!" &&
|
||||
eval commit2_${hash}_oid=$(git rev-parse HEAD) &&
|
||||
eval tree2_${hash}_oid=$(git rev-parse HEAD^{tree})
|
||||
'
|
||||
test_expect_success GPG2 "create another $hash signed tag" '
|
||||
git tag -s -m "This is another signed tag" signedtag2 $(commit2_oid $hash) &&
|
||||
eval signedtag2_${hash}_oid=$(git rev-parse signedtag2)
|
||||
'
|
||||
test_expect_success GPG2 "merge the $hash branches together" '
|
||||
git merge -S -m "merge some signed tags together" signedtag signedtag2 &&
|
||||
eval signedcommit2_${hash}_oid=$(git rev-parse HEAD)
|
||||
'
|
||||
test_expect_success GPG2 "create additional $hash signed commits" '
|
||||
git commit --gpg-sign --allow-empty -m "This is an additional signed commit" &&
|
||||
git cat-file commit HEAD | del_sigcommit sha256 > "../${hash}_signedcommit3" &&
|
||||
git cat-file commit HEAD | del_sigcommit sha1 > "../${hash}_signedcommit4" &&
|
||||
eval signedcommit3_${hash}_oid=$(git hash-object -t commit -w ../${hash}_signedcommit3) &&
|
||||
eval signedcommit4_${hash}_oid=$(git hash-object -t commit -w ../${hash}_signedcommit4)
|
||||
'
|
||||
test_expect_success GPG2 "create additional $hash signed tags" '
|
||||
git tag -s -m "This is an additional signed tag" signedtag34 HEAD &&
|
||||
git cat-file tag signedtag34 | del_sigtag "${hash}" sha256 > ../${hash}_signedtag3 &&
|
||||
git cat-file tag signedtag34 | del_sigtag "${hash}" sha1 > ../${hash}_signedtag4 &&
|
||||
eval signedtag3_${hash}_oid=$(git hash-object -t tag -w ../${hash}_signedtag3) &&
|
||||
eval signedtag4_${hash}_oid=$(git hash-object -t tag -w ../${hash}_signedtag4)
|
||||
'
|
||||
done
|
||||
cd "$base"
|
||||
|
||||
compare_oids () {
|
||||
test "$#" = 5 && { local PREREQ=$1; shift; } || PREREQ=
|
||||
local type="$1"
|
||||
local name="$2"
|
||||
local sha1_oid="$3"
|
||||
local sha256_oid="$4"
|
||||
|
||||
echo ${sha1_oid} > ${name}_sha1_expected
|
||||
echo ${sha256_oid} > ${name}_sha256_expected
|
||||
echo ${type} > ${name}_type_expected
|
||||
|
||||
git --git-dir=repo-sha1/.git rev-parse --output-object-format=sha256 ${sha1_oid} > ${name}_sha1_sha256_found
|
||||
git --git-dir=repo-sha256/.git rev-parse --output-object-format=sha1 ${sha256_oid} > ${name}_sha256_sha1_found
|
||||
local sha1_sha256_oid=$(cat ${name}_sha1_sha256_found)
|
||||
local sha256_sha1_oid=$(cat ${name}_sha256_sha1_found)
|
||||
|
||||
test_expect_success $PREREQ "Verify ${type} ${name}'s sha1 oid" '
|
||||
git --git-dir=repo-sha256/.git rev-parse --output-object-format=sha1 ${sha256_oid} > ${name}_sha1 &&
|
||||
test_cmp ${name}_sha1 ${name}_sha1_expected
|
||||
'
|
||||
|
||||
test_expect_success $PREREQ "Verify ${type} ${name}'s sha256 oid" '
|
||||
git --git-dir=repo-sha1/.git rev-parse --output-object-format=sha256 ${sha1_oid} > ${name}_sha256 &&
|
||||
test_cmp ${name}_sha256 ${name}_sha256_expected
|
||||
'
|
||||
|
||||
test_expect_success $PREREQ "Verify ${name}'s sha1 type" '
|
||||
git --git-dir=repo-sha1/.git cat-file -t ${sha1_oid} > ${name}_type1 &&
|
||||
git --git-dir=repo-sha256/.git cat-file -t ${sha256_sha1_oid} > ${name}_type2 &&
|
||||
test_cmp ${name}_type1 ${name}_type2 &&
|
||||
test_cmp ${name}_type1 ${name}_type_expected
|
||||
'
|
||||
|
||||
test_expect_success $PREREQ "Verify ${name}'s sha256 type" '
|
||||
git --git-dir=repo-sha256/.git cat-file -t ${sha256_oid} > ${name}_type3 &&
|
||||
git --git-dir=repo-sha1/.git cat-file -t ${sha1_sha256_oid} > ${name}_type4 &&
|
||||
test_cmp ${name}_type3 ${name}_type4 &&
|
||||
test_cmp ${name}_type3 ${name}_type_expected
|
||||
'
|
||||
|
||||
test_expect_success $PREREQ "Verify ${name}'s sha1 size" '
|
||||
git --git-dir=repo-sha1/.git cat-file -s ${sha1_oid} > ${name}_size1 &&
|
||||
git --git-dir=repo-sha256/.git cat-file -s ${sha256_sha1_oid} > ${name}_size2 &&
|
||||
test_cmp ${name}_size1 ${name}_size2
|
||||
'
|
||||
|
||||
test_expect_success $PREREQ "Verify ${name}'s sha256 size" '
|
||||
git --git-dir=repo-sha256/.git cat-file -s ${sha256_oid} > ${name}_size3 &&
|
||||
git --git-dir=repo-sha1/.git cat-file -s ${sha1_sha256_oid} > ${name}_size4 &&
|
||||
test_cmp ${name}_size3 ${name}_size4
|
||||
'
|
||||
|
||||
test_expect_success $PREREQ "Verify ${name}'s sha1 pretty content" '
|
||||
git --git-dir=repo-sha1/.git cat-file -p ${sha1_oid} > ${name}_content1 &&
|
||||
git --git-dir=repo-sha256/.git cat-file -p ${sha256_sha1_oid} > ${name}_content2 &&
|
||||
test_cmp ${name}_content1 ${name}_content2
|
||||
'
|
||||
|
||||
test_expect_success $PREREQ "Verify ${name}'s sha256 pretty content" '
|
||||
git --git-dir=repo-sha256/.git cat-file -p ${sha256_oid} > ${name}_content3 &&
|
||||
git --git-dir=repo-sha1/.git cat-file -p ${sha1_sha256_oid} > ${name}_content4 &&
|
||||
test_cmp ${name}_content3 ${name}_content4
|
||||
'
|
||||
|
||||
test_expect_success $PREREQ "Verify ${name}'s sha1 content" '
|
||||
git --git-dir=repo-sha1/.git cat-file ${type} ${sha1_oid} > ${name}_content5 &&
|
||||
git --git-dir=repo-sha256/.git cat-file ${type} ${sha256_sha1_oid} > ${name}_content6 &&
|
||||
test_cmp ${name}_content5 ${name}_content6
|
||||
'
|
||||
|
||||
test_expect_success $PREREQ "Verify ${name}'s sha256 content" '
|
||||
git --git-dir=repo-sha256/.git cat-file ${type} ${sha256_oid} > ${name}_content7 &&
|
||||
git --git-dir=repo-sha1/.git cat-file ${type} ${sha1_sha256_oid} > ${name}_content8 &&
|
||||
test_cmp ${name}_content7 ${name}_content8
|
||||
'
|
||||
|
||||
}
|
||||
|
||||
compare_oids 'blob' hello "$hello_sha1_oid" "$hello_sha256_oid"
|
||||
compare_oids 'tree' tree "$tree_sha1_oid" "$tree_sha256_oid"
|
||||
compare_oids 'commit' commit "$commit_sha1_oid" "$commit_sha256_oid"
|
||||
compare_oids GPG2 'commit' signedcommit "$signedcommit_sha1_oid" "$signedcommit_sha256_oid"
|
||||
compare_oids 'tag' hellotag "$hellotag_sha1_oid" "$hellotag_sha256_oid"
|
||||
compare_oids 'tag' treetag "$treetag_sha1_oid" "$treetag_sha256_oid"
|
||||
compare_oids 'tag' committag "$committag_sha1_oid" "$committag_sha256_oid"
|
||||
compare_oids GPG2 'tag' signedtag "$signedtag_sha1_oid" "$signedtag_sha256_oid"
|
||||
|
||||
compare_oids 'blob' more "$more_sha1_oid" "$more_sha256_oid"
|
||||
compare_oids 'blob' another "$another_sha1_oid" "$another_sha256_oid"
|
||||
compare_oids 'tree' tree2 "$tree2_sha1_oid" "$tree2_sha256_oid"
|
||||
compare_oids 'commit' commit2 "$commit2_sha1_oid" "$commit2_sha256_oid"
|
||||
compare_oids GPG2 'tag' signedtag2 "$signedtag2_sha1_oid" "$signedtag2_sha256_oid"
|
||||
compare_oids GPG2 'commit' signedcommit2 "$signedcommit2_sha1_oid" "$signedcommit2_sha256_oid"
|
||||
compare_oids GPG2 'commit' signedcommit3 "$signedcommit3_sha1_oid" "$signedcommit3_sha256_oid"
|
||||
compare_oids GPG2 'commit' signedcommit4 "$signedcommit4_sha1_oid" "$signedcommit4_sha256_oid"
|
||||
compare_oids GPG2 'tag' signedtag3 "$signedtag3_sha1_oid" "$signedtag3_sha256_oid"
|
||||
compare_oids GPG2 'tag' signedtag4 "$signedtag4_sha1_oid" "$signedtag4_sha256_oid"
|
||||
|
||||
test_done
|
2
t/t1016/gpg
Executable file
2
t/t1016/gpg
Executable file
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
exec gpg --faked-system-time "20230918T154812" "$@"
|
|
@ -1655,7 +1655,16 @@ test_set_hash () {
|
|||
|
||||
# Detect the hash algorithm in use.
|
||||
test_detect_hash () {
|
||||
test_hash_algo="${GIT_TEST_DEFAULT_HASH:-sha1}"
|
||||
case "$GIT_TEST_DEFAULT_HASH" in
|
||||
"sha256")
|
||||
test_hash_algo=sha256
|
||||
test_compat_hash_algo=sha1
|
||||
;;
|
||||
*)
|
||||
test_hash_algo=sha1
|
||||
test_compat_hash_algo=sha256
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Detect the hash algorithm in use.
|
||||
|
@ -1712,6 +1721,12 @@ test_oid () {
|
|||
local algo="${test_hash_algo}" &&
|
||||
|
||||
case "$1" in
|
||||
--hash=storage)
|
||||
algo="$test_hash_algo" &&
|
||||
shift;;
|
||||
--hash=compat)
|
||||
algo="$test_compat_hash_algo" &&
|
||||
shift;;
|
||||
--hash=*)
|
||||
algo="${1#--hash=}" &&
|
||||
shift;;
|
||||
|
|
58
tree-walk.c
58
tree-walk.c
|
@ -11,35 +11,19 @@
|
|||
#include "json-writer.h"
|
||||
#include "environment.h"
|
||||
|
||||
static const char *get_mode(const char *str, unsigned int *modep)
|
||||
{
|
||||
unsigned char c;
|
||||
unsigned int mode = 0;
|
||||
|
||||
if (*str == ' ')
|
||||
return NULL;
|
||||
|
||||
while ((c = *str++) != ' ') {
|
||||
if (c < '0' || c > '7')
|
||||
return NULL;
|
||||
mode = (mode << 3) + (c - '0');
|
||||
}
|
||||
*modep = mode;
|
||||
return str;
|
||||
}
|
||||
|
||||
static int decode_tree_entry(struct tree_desc *desc, const char *buf, unsigned long size, struct strbuf *err)
|
||||
{
|
||||
const char *path;
|
||||
unsigned int mode, len;
|
||||
const unsigned hashsz = the_hash_algo->rawsz;
|
||||
unsigned int len;
|
||||
uint16_t mode;
|
||||
const unsigned hashsz = desc->algo->rawsz;
|
||||
|
||||
if (size < hashsz + 3 || buf[size - (hashsz + 1)]) {
|
||||
strbuf_addstr(err, _("too-short tree object"));
|
||||
return -1;
|
||||
}
|
||||
|
||||
path = get_mode(buf, &mode);
|
||||
path = parse_mode(buf, &mode);
|
||||
if (!path) {
|
||||
strbuf_addstr(err, _("malformed mode in tree entry"));
|
||||
return -1;
|
||||
|
@ -54,15 +38,19 @@ static int decode_tree_entry(struct tree_desc *desc, const char *buf, unsigned l
|
|||
desc->entry.path = path;
|
||||
desc->entry.mode = (desc->flags & TREE_DESC_RAW_MODES) ? mode : canon_mode(mode);
|
||||
desc->entry.pathlen = len - 1;
|
||||
oidread(&desc->entry.oid, (const unsigned char *)path + len);
|
||||
oidread_algop(&desc->entry.oid, (const unsigned char *)path + len,
|
||||
desc->algo);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int init_tree_desc_internal(struct tree_desc *desc, const void *buffer,
|
||||
unsigned long size, struct strbuf *err,
|
||||
static int init_tree_desc_internal(struct tree_desc *desc,
|
||||
const struct object_id *oid,
|
||||
const void *buffer, unsigned long size,
|
||||
struct strbuf *err,
|
||||
enum tree_desc_flags flags)
|
||||
{
|
||||
desc->algo = (oid && oid->algo) ? &hash_algos[oid->algo] : the_hash_algo;
|
||||
desc->buffer = buffer;
|
||||
desc->size = size;
|
||||
desc->flags = flags;
|
||||
|
@ -71,19 +59,21 @@ static int init_tree_desc_internal(struct tree_desc *desc, const void *buffer,
|
|||
return 0;
|
||||
}
|
||||
|
||||
void init_tree_desc(struct tree_desc *desc, const void *buffer, unsigned long size)
|
||||
void init_tree_desc(struct tree_desc *desc, const struct object_id *tree_oid,
|
||||
const void *buffer, unsigned long size)
|
||||
{
|
||||
struct strbuf err = STRBUF_INIT;
|
||||
if (init_tree_desc_internal(desc, buffer, size, &err, 0))
|
||||
if (init_tree_desc_internal(desc, tree_oid, buffer, size, &err, 0))
|
||||
die("%s", err.buf);
|
||||
strbuf_release(&err);
|
||||
}
|
||||
|
||||
int init_tree_desc_gently(struct tree_desc *desc, const void *buffer, unsigned long size,
|
||||
int init_tree_desc_gently(struct tree_desc *desc, const struct object_id *oid,
|
||||
const void *buffer, unsigned long size,
|
||||
enum tree_desc_flags flags)
|
||||
{
|
||||
struct strbuf err = STRBUF_INIT;
|
||||
int result = init_tree_desc_internal(desc, buffer, size, &err, flags);
|
||||
int result = init_tree_desc_internal(desc, oid, buffer, size, &err, flags);
|
||||
if (result)
|
||||
error("%s", err.buf);
|
||||
strbuf_release(&err);
|
||||
|
@ -102,7 +92,7 @@ void *fill_tree_descriptor(struct repository *r,
|
|||
if (!buf)
|
||||
die(_("unable to read tree (%s)"), oid_to_hex(oid));
|
||||
}
|
||||
init_tree_desc(desc, buf, size);
|
||||
init_tree_desc(desc, oid, buf, size);
|
||||
return buf;
|
||||
}
|
||||
|
||||
|
@ -119,7 +109,7 @@ static void entry_extract(struct tree_desc *t, struct name_entry *a)
|
|||
static int update_tree_entry_internal(struct tree_desc *desc, struct strbuf *err)
|
||||
{
|
||||
const void *buf = desc->buffer;
|
||||
const unsigned char *end = (const unsigned char *)desc->entry.path + desc->entry.pathlen + 1 + the_hash_algo->rawsz;
|
||||
const unsigned char *end = (const unsigned char *)desc->entry.path + desc->entry.pathlen + 1 + desc->algo->rawsz;
|
||||
unsigned long size = desc->size;
|
||||
unsigned long len = end - (const unsigned char *)buf;
|
||||
|
||||
|
@ -633,7 +623,7 @@ int get_tree_entry(struct repository *r,
|
|||
retval = -1;
|
||||
} else {
|
||||
struct tree_desc t;
|
||||
init_tree_desc(&t, tree, size);
|
||||
init_tree_desc(&t, tree_oid, tree, size);
|
||||
retval = find_tree_entry(r, &t, name, oid, mode);
|
||||
}
|
||||
free(tree);
|
||||
|
@ -676,7 +666,7 @@ enum get_oid_result get_tree_entry_follow_symlinks(struct repository *r,
|
|||
struct tree_desc t;
|
||||
int follows_remaining = GET_TREE_ENTRY_FOLLOW_SYMLINKS_MAX_LINKS;
|
||||
|
||||
init_tree_desc(&t, NULL, 0UL);
|
||||
init_tree_desc(&t, NULL, NULL, 0UL);
|
||||
strbuf_addstr(&namebuf, name);
|
||||
oidcpy(¤t_tree_oid, tree_oid);
|
||||
|
||||
|
@ -712,7 +702,7 @@ enum get_oid_result get_tree_entry_follow_symlinks(struct repository *r,
|
|||
goto done;
|
||||
|
||||
/* descend */
|
||||
init_tree_desc(&t, tree, size);
|
||||
init_tree_desc(&t, ¤t_tree_oid, tree, size);
|
||||
}
|
||||
|
||||
/* Handle symlinks to e.g. a//b by removing leading slashes */
|
||||
|
@ -746,7 +736,7 @@ enum get_oid_result get_tree_entry_follow_symlinks(struct repository *r,
|
|||
free(parent->tree);
|
||||
parents_nr--;
|
||||
parent = &parents[parents_nr - 1];
|
||||
init_tree_desc(&t, parent->tree, parent->size);
|
||||
init_tree_desc(&t, &parent->oid, parent->tree, parent->size);
|
||||
strbuf_remove(&namebuf, 0, remainder ? 3 : 2);
|
||||
continue;
|
||||
}
|
||||
|
@ -826,7 +816,7 @@ enum get_oid_result get_tree_entry_follow_symlinks(struct repository *r,
|
|||
contents_start = contents;
|
||||
|
||||
parent = &parents[parents_nr - 1];
|
||||
init_tree_desc(&t, parent->tree, parent->size);
|
||||
init_tree_desc(&t, &parent->oid, parent->tree, parent->size);
|
||||
strbuf_splice(&namebuf, 0, len,
|
||||
contents_start, link_len);
|
||||
if (remainder)
|
||||
|
|
|
@ -24,6 +24,7 @@ struct name_entry {
|
|||
* A semi-opaque data structure used to maintain the current state of the walk.
|
||||
*/
|
||||
struct tree_desc {
|
||||
const struct git_hash_algo *algo;
|
||||
/*
|
||||
* pointer into the memory representation of the tree. It always
|
||||
* points at the current entry being visited.
|
||||
|
@ -83,9 +84,11 @@ int update_tree_entry_gently(struct tree_desc *);
|
|||
* size parameters are assumed to be the same as the buffer and size
|
||||
* members of `struct tree`.
|
||||
*/
|
||||
void init_tree_desc(struct tree_desc *desc, const void *buf, unsigned long size);
|
||||
void init_tree_desc(struct tree_desc *desc, const struct object_id *tree_oid,
|
||||
const void *buf, unsigned long size);
|
||||
|
||||
int init_tree_desc_gently(struct tree_desc *desc, const void *buf, unsigned long size,
|
||||
int init_tree_desc_gently(struct tree_desc *desc, const struct object_id *oid,
|
||||
const void *buf, unsigned long size,
|
||||
enum tree_desc_flags flags);
|
||||
|
||||
/*
|
||||
|
|
2
tree.c
2
tree.c
|
@ -29,7 +29,7 @@ int read_tree_at(struct repository *r,
|
|||
if (parse_tree(tree))
|
||||
return -1;
|
||||
|
||||
init_tree_desc(&desc, tree->buffer, tree->size);
|
||||
init_tree_desc(&desc, &tree->object.oid, tree->buffer, tree->size);
|
||||
|
||||
while (tree_entry(&desc, &entry)) {
|
||||
if (retval != all_entries_interesting) {
|
||||
|
|
2
walker.c
2
walker.c
|
@ -45,7 +45,7 @@ static int process_tree(struct walker *walker, struct tree *tree)
|
|||
if (parse_tree(tree))
|
||||
return -1;
|
||||
|
||||
init_tree_desc(&desc, tree->buffer, tree->size);
|
||||
init_tree_desc(&desc, &tree->object.oid, tree->buffer, tree->size);
|
||||
while (tree_entry(&desc, &entry)) {
|
||||
struct object *obj = NULL;
|
||||
|
||||
|
|
Loading…
Reference in a new issue