mirror of
https://github.com/git/git
synced 2024-11-05 18:59:29 +00:00
df7000cd91
In this developer's tests, producing one gigabyte worth of NULs in a busy loop that writes out individual bytes, unbuffered, took ~27sec. Writing chunked 256kB buffers instead only took ~0.6sec This matters because we are about to introduce a pair of test cases that want to be able to produce 5GB of NULs, and we cannot use `/dev/zero` because of the HP NonStop platform's lack of support for that device. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
34 lines
651 B
C
34 lines
651 B
C
#include "test-tool.h"
|
|
#include "git-compat-util.h"
|
|
|
|
int cmd__genzeros(int argc, const char **argv)
|
|
{
|
|
/* static, so that it is NUL-initialized */
|
|
static const char zeros[256 * 1024];
|
|
intmax_t count;
|
|
ssize_t n;
|
|
|
|
if (argc > 2) {
|
|
fprintf(stderr, "usage: %s [<count>]\n", argv[0]);
|
|
return 1;
|
|
}
|
|
|
|
count = argc > 1 ? strtoimax(argv[1], NULL, 0) : -1;
|
|
|
|
/* Writing out individual NUL bytes is slow... */
|
|
while (count < 0)
|
|
if (write(1, zeros, ARRAY_SIZE(zeros)) < 0)
|
|
return -1;
|
|
|
|
while (count > 0) {
|
|
n = write(1, zeros, count < ARRAY_SIZE(zeros) ?
|
|
count : ARRAY_SIZE(zeros));
|
|
|
|
if (n < 0)
|
|
return -1;
|
|
|
|
count -= n;
|
|
}
|
|
|
|
return 0;
|
|
}
|