mirror of
https://github.com/git/git
synced 2024-09-13 05:14:36 +00:00
Merge branch 'jk/lookup-object-prefer-latest'
Optimizes object lookup when the object hashtable starts to become crowded. * jk/lookup-object-prefer-latest: lookup_object: prioritize recently found objects
This commit is contained in:
commit
4818cfcdcc
14
object.c
14
object.c
|
@ -71,13 +71,13 @@ static unsigned int hashtable_index(const unsigned char *sha1)
|
||||||
|
|
||||||
struct object *lookup_object(const unsigned char *sha1)
|
struct object *lookup_object(const unsigned char *sha1)
|
||||||
{
|
{
|
||||||
unsigned int i;
|
unsigned int i, first;
|
||||||
struct object *obj;
|
struct object *obj;
|
||||||
|
|
||||||
if (!obj_hash)
|
if (!obj_hash)
|
||||||
return NULL;
|
return NULL;
|
||||||
|
|
||||||
i = hashtable_index(sha1);
|
first = i = hashtable_index(sha1);
|
||||||
while ((obj = obj_hash[i]) != NULL) {
|
while ((obj = obj_hash[i]) != NULL) {
|
||||||
if (!hashcmp(sha1, obj->sha1))
|
if (!hashcmp(sha1, obj->sha1))
|
||||||
break;
|
break;
|
||||||
|
@ -85,6 +85,16 @@ struct object *lookup_object(const unsigned char *sha1)
|
||||||
if (i == obj_hash_size)
|
if (i == obj_hash_size)
|
||||||
i = 0;
|
i = 0;
|
||||||
}
|
}
|
||||||
|
if (obj && i != first) {
|
||||||
|
/*
|
||||||
|
* Move object to where we started to look for it so
|
||||||
|
* that we do not need to walk the hash table the next
|
||||||
|
* time we look for it.
|
||||||
|
*/
|
||||||
|
struct object *tmp = obj_hash[i];
|
||||||
|
obj_hash[i] = obj_hash[first];
|
||||||
|
obj_hash[first] = tmp;
|
||||||
|
}
|
||||||
return obj;
|
return obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue